hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringdate 2015-01-01 00:00:47 2022-03-31 23:42:18 ⌀ | max_issues_repo_issues_event_max_datetime stringdate 2015-01-01 17:43:30 2022-03-31 23:59:58 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
959ac1baff7cea9daabf593760b72f74cd08cb19 | 778 | py | Python | porcupine/plugins/gotoline.py | rscales02/porcupine | 91b3c90d19d2291c0a60ddb9dffac931147cde3c | [
"MIT"
] | null | null | null | porcupine/plugins/gotoline.py | rscales02/porcupine | 91b3c90d19d2291c0a60ddb9dffac931147cde3c | [
"MIT"
] | null | null | null | porcupine/plugins/gotoline.py | rscales02/porcupine | 91b3c90d19d2291c0a60ddb9dffac931147cde3c | [
"MIT"
] | null | null | null | from tkinter import simpledialog
from porcupine import actions, get_tab_manager, tabs
def gotoline():
tab = get_tab_manager().select()
# simpledialog isn't ttk yet, but it's not a huge problem imo
lineno = simpledialog.askinteger(
"Go to Line", "Type a line number and press Enter:")
if lineno is not None: # not cancelled
# there's no need to do a bounds check because tk ignores out-of-bounds
# text indexes
column = tab.textwidget.index('insert').split('.')[1]
tab.textwidget.mark_set('insert', '%d.%s' % (lineno, column))
tab.textwidget.see('insert')
tab.on_focus()
def setup():
actions.add_command("Edit/Go to Line", gotoline, '<Control-l>',
tabtypes=[tabs.FileTab])
| 31.12 | 79 | 0.638817 |
959aea6673bc315fd2a49870629b49b87e1b393a | 4,634 | py | Python | preprocessing.py | JackAndCole/Detection-of-sleep-apnea-from-single-lead-ECG-signal-using-a-time-window-artificial-neural-network | 692bb7d969b7eb4a0ad9b221660901a863bc76e2 | [
"Apache-2.0"
] | 7 | 2020-01-22T03:23:39.000Z | 2021-12-26T05:02:10.000Z | preprocessing.py | JackAndCole/Detection-of-sleep-apnea-from-single-lead-ECG-signal-using-a-time-window-artificial-neural-network | 692bb7d969b7eb4a0ad9b221660901a863bc76e2 | [
"Apache-2.0"
] | null | null | null | preprocessing.py | JackAndCole/Detection-of-sleep-apnea-from-single-lead-ECG-signal-using-a-time-window-artificial-neural-network | 692bb7d969b7eb4a0ad9b221660901a863bc76e2 | [
"Apache-2.0"
] | 1 | 2020-05-29T06:32:24.000Z | 2020-05-29T06:32:24.000Z | import os
import pickle
import sys
import warnings
from collections import OrderedDict
import biosppy.signals.tools as st
import numpy as np
import wfdb
from biosppy.signals.ecg import correct_rpeaks, hamilton_segmenter
from hrv.classical import frequency_domain, time_domain
from scipy.signal import medfilt
from tqdm import tqdm
warnings.filterwarnings(action="ignore")
base_dir = "dataset"
fs = 100 # ECG sample frequency
hr_min = 20
hr_max = 300
def feature_extraction(recording, signal, labels):
data = []
for i in tqdm(range(len(labels)), desc=recording, file=sys.stdout):
segment = signal[i * fs * 60:(i + 1) * fs * 60]
segment, _, _ = st.filter_signal(segment, ftype='FIR', band='bandpass', order=int(0.3 * fs), frequency=[3, 45],
sampling_rate=fs)
# Finding R peaks
rpeaks, = hamilton_segmenter(segment, sampling_rate=fs)
rpeaks, = correct_rpeaks(segment, rpeaks, sampling_rate=fs, tol=0.1)
# Extracting feature
label = 0 if labels[i] == "N" else 1
if 40 <= len(rpeaks) <= 200: # Remove abnormal R peaks
rri_tm, rri = rpeaks[1:] / float(fs), np.diff(rpeaks, axis=-1) / float(fs)
rri = medfilt(rri, kernel_size=3)
edr_tm, edr = rpeaks / float(fs), segment[rpeaks]
# Remove physiologically impossible HR signal
if np.all(np.logical_and(60 / rri >= hr_min, 60 / rri <= hr_max)):
rri_time_features, rri_frequency_features = time_domain(rri * 1000), frequency_domain(rri, rri_tm)
edr_frequency_features = frequency_domain(edr, edr_tm)
# 6 + 6 + 6 + 1 = 19
data.append([
rri_time_features["rmssd"], rri_time_features["sdnn"], rri_time_features["nn50"],
rri_time_features["pnn50"], rri_time_features["mrri"], rri_time_features["mhr"],
rri_frequency_features["vlf"] / rri_frequency_features["total_power"],
rri_frequency_features["lf"] / rri_frequency_features["total_power"],
rri_frequency_features["hf"] / rri_frequency_features["total_power"],
rri_frequency_features["lf_hf"], rri_frequency_features["lfnu"], rri_frequency_features["hfnu"],
edr_frequency_features["vlf"] / edr_frequency_features["total_power"],
edr_frequency_features["lf"] / edr_frequency_features["total_power"],
edr_frequency_features["hf"] / edr_frequency_features["total_power"],
edr_frequency_features["lf_hf"], edr_frequency_features["lfnu"], edr_frequency_features["hfnu"],
label
])
else:
data.append([np.nan] * 18 + [label])
else:
data.append([np.nan] * 18 + [label])
data = np.array(data, dtype="float")
return data
if __name__ == "__main__":
apnea_ecg = OrderedDict()
# train data
recordings = [
"a01", "a02", "a03", "a04", "a05", "a06", "a07", "a08", "a09", "a10",
"a11", "a12", "a13", "a14", "a15", "a16", "a17", "a18", "a19", "a20",
"b01", "b02", "b03", "b04", "b05",
"c01", "c02", "c03", "c04", "c05", "c06", "c07", "c08", "c09", "c10"
]
for recording in recordings:
signal = wfdb.rdrecord(os.path.join(base_dir, recording), channels=[0]).p_signal[:, 0]
labels = wfdb.rdann(os.path.join(base_dir, recording), extension="apn").symbol
apnea_ecg[recording] = feature_extraction(recording, signal, labels)
print()
# test data
recordings = [
"x01", "x02", "x03", "x04", "x05", "x06", "x07", "x08", "x09", "x10",
"x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
"x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x29", "x30",
"x31", "x32", "x33", "x34", "x35"
]
answers = {}
filename = os.path.join(base_dir, "event-2-answers")
with open(filename, "r") as f:
for answer in f.read().split("\n\n"):
answers[answer[:3]] = list("".join(answer.split()[2::2]))
for recording in recordings:
signal = wfdb.rdrecord(os.path.join(base_dir, recording), channels=[0]).p_signal[:, 0]
labels = answers[recording]
apnea_ecg[recording] = feature_extraction(recording, signal, labels)
with open(os.path.join(base_dir, "apnea-ecg.pkl"), "wb") as f:
pickle.dump(apnea_ecg, f, protocol=2)
print("ok")
| 44.990291 | 120 | 0.579197 |
959b3935838082e9b39f90f0dbe7ce84722264d7 | 3,904 | py | Python | tiddlywebplugins/tiddlyspace/openid.py | FND/tiddlyspace | 7b26e5b4e0b0a817b3ea0a357613c59705d016d4 | [
"BSD-3-Clause"
] | 2 | 2015-12-15T00:40:36.000Z | 2019-04-22T16:54:41.000Z | tiddlywebplugins/tiddlyspace/openid.py | jdlrobson/tiddlyspace | 70f500687fcd26e3fa4ef144297a05203ccf0f35 | [
"BSD-3-Clause"
] | null | null | null | tiddlywebplugins/tiddlyspace/openid.py | jdlrobson/tiddlyspace | 70f500687fcd26e3fa4ef144297a05203ccf0f35 | [
"BSD-3-Clause"
] | null | null | null | """
Subclass of tiddlywebplugins.openid2 to support
tiddlyweb_secondary_user cookie.
"""
import urlparse
from tiddlyweb.web.util import server_host_url, make_cookie
from tiddlywebplugins.openid2 import Challenger as OpenID
FRAGMENT_PREFIX = 'auth:OpenID:'
class Challenger(OpenID):
def __init__(self):
self.name = __name__
def _domain_path(self, environ):
return "." + environ['tiddlyweb.config']['server_host']['host']
def _success(self, environ, start_response, info):
"""
After successful validation of an openid generate
and send a cookie with the value of that openid.
If this is a normal auth scenario make the name
of the cookie the normal 'tiddlyweb_user'. If this
is auth addition, where a fragment of 'auth:OpenID' is
set, then name the cookie 'tiddlyweb_secondary_user'.
"""
usersign = info.getDisplayIdentifier()
if info.endpoint.canonicalID:
usersign = info.endpoint.canonicalID
# canonicolize usersign to tiddlyweb form
if usersign.startswith('http'):
usersign = usersign.split('://', 1)[1]
usersign = usersign.rstrip('/')
redirect = environ['tiddlyweb.query'].get(
'tiddlyweb_redirect', ['/'])[0]
uri = urlparse.urljoin(server_host_url(environ), redirect)
cookie_name = 'tiddlyweb_user'
cookie_age = environ['tiddlyweb.config'].get('cookie_age', None)
try:
fragment = uri.rsplit('#', 1)[1]
except (ValueError, IndexError):
fragment = None
secondary_cookie_name = 'tiddlyweb_secondary_user'
secondary_cookie_age = None
secondary_cookie_only = False
if fragment:
openid = fragment[len(FRAGMENT_PREFIX):]
uri = uri.replace(FRAGMENT_PREFIX + openid,
FRAGMENT_PREFIX + usersign)
secondary_cookie_only = True
secret = environ['tiddlyweb.config']['secret']
cookie_header_string = make_cookie(cookie_name, usersign,
mac_key=secret, path=self._cookie_path(environ),
expires=cookie_age)
secondary_cookie_header_string = make_cookie(
secondary_cookie_name, usersign,
mac_key=secret, path=self._cookie_path(environ),
expires=cookie_age, domain=self._domain_path(environ))
headers = [('Location', uri.encode('utf-8')),
('Content-Type', 'text/plain'),
('Set-Cookie', secondary_cookie_header_string)]
if not secondary_cookie_only:
headers.append(('Set-Cookie', cookie_header_string))
start_response('303 See Other', headers)
return [uri]
def _render_form(self, environ, start_response, openid='',
message='', form=''):
redirect = environ['tiddlyweb.query'].get(
'tiddlyweb_redirect', ['/'])[0]
start_response('200 OK', [(
'Content-Type', 'text/html')])
environ['tiddlyweb.title'] = 'OpenID Login'
return ["""
<div id='content'>
<div class='message'>%s</div>
<pre>
<form action="" method="POST">
OpenID: <input name="openid" size="60" value="%s"/>
<input type="hidden" name="tiddlyweb_redirect" value="%s" />
<input type="hidden" id="csrf_token" name="csrf_token" />
<input type="submit" value="submit" />
</form>
<script type="text/javascript"
src="%s/bags/tiddlyspace/tiddlers/TiddlySpaceCSRF"></script>
<script type="text/javascript">
var csrfToken = window.getCSRFToken(),
el = null;
if (csrfToken) {
el = document.getElementById('csrf_token');
el.value = csrfToken;
}
</script>
</pre>
</div>""" % (message, openid, redirect,
environ['tiddlyweb.config']['server_prefix'])]
| 36.148148 | 72 | 0.615523 |
959b55108828b137a9e2c7ce659d11e247c56fff | 226 | py | Python | tests/__init__.py | tltx/iommi | a0ca5e261040cc0452d7452e9320a88af5222b30 | [
"BSD-3-Clause"
] | 192 | 2020-01-30T14:29:56.000Z | 2022-03-28T19:55:30.000Z | tests/__init__.py | tltx/iommi | a0ca5e261040cc0452d7452e9320a88af5222b30 | [
"BSD-3-Clause"
] | 105 | 2020-03-29T21:59:01.000Z | 2022-03-24T12:29:09.000Z | tests/__init__.py | tltx/iommi | a0ca5e261040cc0452d7452e9320a88af5222b30 | [
"BSD-3-Clause"
] | 28 | 2020-02-02T20:51:09.000Z | 2022-03-08T16:23:42.000Z | from datetime import datetime
import freezegun
# Initialize freezegun to avoid freezegun being reinitialized which is expensive
initialize_freezegun = freezegun.freeze_time(datetime(2021, 1, 1))
initialize_freezegun.start()
| 28.25 | 80 | 0.836283 |
959bcca51833c2423f463ff10fb943bd7f71b93f | 9,047 | py | Python | pyacoustics/morph/intensity_morph.py | UNIST-Interactions/pyAcoustics | f22d19d258b4e359fec365b30f11af261dee1b5c | [
"MIT"
] | 72 | 2015-12-10T20:00:04.000Z | 2022-03-31T05:42:17.000Z | pyacoustics/morph/intensity_morph.py | alivalehi/pyAcoustics | ab446681d7a2267063afb6a386334dcaefd0d93b | [
"MIT"
] | 5 | 2017-08-08T05:13:15.000Z | 2020-11-26T00:58:04.000Z | pyacoustics/morph/intensity_morph.py | alivalehi/pyAcoustics | ab446681d7a2267063afb6a386334dcaefd0d93b | [
"MIT"
] | 16 | 2016-05-09T07:36:15.000Z | 2021-08-30T14:23:25.000Z | '''
Created on Apr 2, 2015
@author: tmahrt
'''
import os
from os.path import join
import math
import copy
from pyacoustics.morph.morph_utils import common
from pyacoustics.morph.morph_utils import plot_morphed_data
from pyacoustics.utilities import utils
from pyacoustics.utilities import sequences
from pyacoustics.signals import audio_scripts
from pyacoustics.utilities import my_math
def intensityMorph(fromWavFN, toWavFN, fromWavTGFN, toWavTGFN, tierName,
numSteps, coreChunkSize, plotFlag):
fromDataTupleList = common.getIntervals(fromWavTGFN, tierName)
toDataTupleList = common.getIntervals(toWavTGFN, tierName)
outputName = os.path.splitext(fromWavFN)[0] + "_int_" + tierName
_intensityMorph(fromWavFN, toWavFN,
fromDataTupleList, toDataTupleList,
numSteps, coreChunkSize, plotFlag, outputName)
def _intensityMorph(fromWavFN, toWavFN, fromDataTupleList,
toDataTupleList, numSteps, coreChunkSize, plotFlag,
outputName=None):
if outputName is None:
outputName = os.path.splitext(fromWavFN)[0] + "_int"
outputDir = join(os.path.split(fromWavFN)[0], "output")
utils.makeDir(outputDir)
# Determine the multiplication values to be used in normalization
# - this extracts one value per chunk
expectedLength = 0
normFactorList = []
truncatedToList = []
chunkSizeList = []
fromDataList = []
fromParams = audio_scripts.getParams(fromWavFN)
toParams = audio_scripts.getParams(toWavFN)
for fromTuple, toTuple in zip(fromDataTupleList, toDataTupleList):
fromStart, fromEnd = fromTuple[:2]
toStart, toEnd = toTuple[:2]
expectedLength += (fromEnd - fromStart) * fromParams[2]
fromDataList.extend(fromSubWav.rawDataList)
normFactorListTmp, a = getRelativeNormalizedFactors(fromSubWav,
toSubWav,
coreChunkSize)
tmpChunkList = [tmpChunkSize
for value, tmpChunkSize in normFactorListTmp]
chunkSizeList.append(sum(tmpChunkList))
normFactorList.extend(normFactorListTmp)
truncatedToList.extend(a)
interpolatedResults = []
normFactorGen = [sequences.interp(1.0, factor[0], numSteps)
for factor in normFactorList]
tmpChunkSizeList = [factor[1] for factor in normFactorList]
for i in xrange(numSteps):
outputFN = "%s_s%d_%d_%d.wav" % (outputName,
coreChunkSize,
numSteps - 1, i)
tmpNormFactorList = [next(normFactorGen[j])
for j in xrange(len(normFactorGen))]
# Skip the first value (same as the input value)
if i == 0:
continue
tmpInputList = zip(tmpNormFactorList, tmpChunkSizeList)
normalizationTuple = expandNormalizationFactors(tmpInputList)
expandedNormFactorList = normalizationTuple[0]
# It happened once that the expanded factor list was off by one value
# -- I could not determine why, so this is just a cheap hack
if len(expandedNormFactorList) == (expectedLength - 1):
expandedNormFactorList.append(expandedNormFactorList[-1])
# print("Diff: ", expectedLength, len(expandedNormFactorList))
assert(expectedLength == len(expandedNormFactorList))
newWavObj = copy.deepcopy(fromWavObj)
newRawDataList = []
# Apply the normalization and reinsert the data back
# into the original file
offset = 0
for fromTuple, chunkSize in zip(fromDataTupleList, chunkSizeList):
fromStart, fromEnd = fromTuple[:2]
fromSubWav = fromWavObj.extractSubsegment(fromStart, fromEnd)
assert(len(fromSubWav.rawDataList) ==
len(expandedNormFactorList[offset:offset + chunkSize]))
tmpList = [fromSubWav.rawDataList,
expandedNormFactorList[offset:offset + chunkSize]]
subRawDataList = [value * normFactor for value, normFactor in
utils.safeZip(tmpList, enforceLength=True)]
newRawDataList.extend(subRawDataList)
offset += chunkSize
newWavObj = audio.WavObj(newRawDataList, fromWavObj.samplingRate)
newWavObj.save(join(outputDir, outputFN))
interpolatedResults.append(newWavObj.rawDataList)
plotFN = "%s_s%d_%d.png" % (outputFN, coreChunkSize, numSteps)
if plotFlag:
plotMorphedData.plotIntensity(fromDataList,
truncatedToList,
interpolatedResults,
expandedNormFactorList,
os.path.join(outputDir, plotFN))
def getNormalizationFactor(lst, refLst=None):
'''
'''
# Get the source values that we will be normalizing
lst = list(set(lst))
if 0 in lst:
lst.pop(lst.index(0))
actMaxV = float(max(lst))
actMinV = float(min(lst))
# Get the reference values
if refLst is None:
refMaxV = 32767.0
refMinV = -32767.0
else:
refLst = list(set(refLst))
if 0 in refLst:
refLst.pop(refLst.index(0))
refMaxV = float(max(refLst))
refMinV = float(min(refLst))
actualFactor = min(refMaxV / actMaxV, abs(refMinV) / abs(actMinV))
# print("Normalization factor: ", actualFactor)
return actualFactor
def getRelativeNormalizedFactors(fromDataList, toDataList, chunkSize):
'''
Determines the factors to be used to normalize sourceWav from targetWav
This can be used to relatively normalize the source based on the target
on an iterative basis (small chunks are normalized rather than the entire
wav.
'''
# Sample proportionately from the targetWav
# - if the two lists are the same length, there is no change
# - if /target/ is shorter, it will be lengthened with some repeated values
# - if /target/ is longer, it will be shortened with some values dropped
tmpIndexList = sequences.interp(0, len(toDataList) - 1,
fromDataList)
newTargetRawDataList = [toDataList[int(round(i))]
for i in tmpIndexList]
assert(len(fromDataList) == len(newTargetRawDataList))
fromGen = sequences.subsequenceGenerator(fromDataList,
chunkSize,
sequences.sampleMiddle,
sequences.DO_SAMPLE_GATED)
toGen = sequences.subsequenceGenerator(newTargetRawDataList,
chunkSize,
sequences.sampleMiddle,
sequences.DO_SAMPLE_GATED)
normFactorList = []
i = 0
for fromTuple, toTuple in zip(fromGen, toGen):
fromDataChunk = fromTuple[0]
toDataChunk = toTuple[0]
distToNextControlPoint = fromTuple[2]
normFactor = getNormalizationFactor(fromDataChunk, toDataChunk)
normFactorList.append((normFactor, distToNextControlPoint))
# i += 1
# if i >= 38:
# print("hello")
# print(len(sourceWav.rawDataList), allChunks)
# assert(len(sourceWav.rawDataList) == allChunks)
return normFactorList, newTargetRawDataList
def expandNormalizationFactors(normFactorList):
'''
Expands the normFactorList from being chunk-based to sample-based
E.g. A wav with 1000 samples may be represented by a factorList of 5 chunks
(5 factor values). This function will expand that to 1000.
'''
i = 0
normFactorsFull = []
controlPoints = []
while i < len(normFactorList) - 1:
startVal, chunkSize = normFactorList[i]
endVal = normFactorList[i + 1][0]
normFactorsFull.extend(my_math.linspace(startVal, endVal, chunkSize))
controlPoints.append(startVal)
controlPoints.extend(my_math.linspace(startVal, startVal,
chunkSize - 1))
i += 1
# We have no more data, so just repeat the final norm factor at the tail
# of the file
value, finalChunkSize = normFactorList[i]
controlPoints.append(value)
controlPoints.extend(my_math.linspace(startVal, startVal,
finalChunkSize - 1))
normFactorsFull.extend(my_math.linspace(value, value, finalChunkSize))
print('Norm factors full: %d' % len(normFactorsFull))
return normFactorsFull, controlPoints
| 36.776423 | 79 | 0.606831 |
959ca1652d25eeda188d0626465d82a0647c2777 | 1,886 | py | Python | algorithms/library/metricscontroller.py | heitor57/poi-rss | 12990af118f19595be01bf80e26a7ee93f9d05d8 | [
"MIT"
] | 1 | 2021-09-01T23:55:27.000Z | 2021-09-01T23:55:27.000Z | algorithms/library/metricscontroller.py | heitor57/poi-rss | 12990af118f19595be01bf80e26a7ee93f9d05d8 | [
"MIT"
] | 1 | 2021-09-09T06:21:48.000Z | 2021-09-14T02:08:33.000Z | algorithms/library/metricscontroller.py | heitor57/poi-rss | 12990af118f19595be01bf80e26a7ee93f9d05d8 | [
"MIT"
] | null | null | null | import numpy as np
class MetricsController:
def __init__(self,metrics=None,algorithm="generic",metrics_path="/home/heitor/recsys/data/metrics",k=10):
self.algorithm=algorithm
self.metrics_path=metrics_path
self.k=k
if metrics==None:
self.fload_metrics()
else:
self.metrics={}
for i in metrics:
self.metrics[i]={}
def append_data(self,uid,data):
c=0
for i in self.metrics.keys():
self.metrics[i][uid]=data[c]
c+=1
def get_metrics_mean(self):
result=str()
for c,i in enumerate(self.metrics.keys()):
result+=("%s→%.6f"+ ('' if (c==len(self.metrics.keys())-1) else ',')) % (i,np.mean(self.metrics[i]))
return result
def print_metrics(self):
#for c,i in enumerate(self.metrics.keys()):
# print("%s→%.6f" % (i,np.mean(self.metrics[i])),end=((c==len(self.metrics.keys())-1)?'':','))
print(self.get_metrics_mean())
pass
def __str__(self):
return f"Metrics-{list(self.metrics.keys())}\nAlgorithm-{self.algorithm}\nMetricsPath-{self.metrics_path}\nRecSize-{self.k}\n"+self.get_metrics_mean()
def fwrite_metrics(self):
fname=self.metrics_path+"/"+self.algorithm+"_at_"+str(self.k)
result=str()
result=f"""{str(self.metrics)}"""
# for c,i in enumerate(self.metrics.keys()):
# result+=("%s\t%.6f" % (i,np.mean(self.metrics[i]))+('' if (c==len(self.metrics.keys())-1) else '\n'))
f=open(fname,"w+")
f.write(result)
f.close()
print("File "+fname+" written with success")
def fload_metrics(self):
fname=self.metrics_path+"/"+self.algorithm+"_at_"+str(self.k)
f=open(fname,"r")
self.metrics=eval(f.read())
f.close() | 37.72 | 158 | 0.559915 |
959cbddc7a775bd66392c574ba57d0e444a033d9 | 736 | py | Python | backend-service/users-service/app/app/models/user.py | abhishek70/python-petclinic-microservices | e15a41a668958f35f1b962487cd2360c5c150f0b | [
"MIT"
] | 2 | 2021-05-19T07:21:59.000Z | 2021-09-15T17:30:08.000Z | backend-service/users-service/app/app/models/user.py | abhishek70/python-petclinic-microservices | e15a41a668958f35f1b962487cd2360c5c150f0b | [
"MIT"
] | null | null | null | backend-service/users-service/app/app/models/user.py | abhishek70/python-petclinic-microservices | e15a41a668958f35f1b962487cd2360c5c150f0b | [
"MIT"
] | null | null | null | from typing import TYPE_CHECKING
from sqlalchemy import Boolean, Column, Integer, String
from sqlalchemy.orm import relationship
from app.db.base_class import Base
if TYPE_CHECKING:
from .pet import Pet # noqa: F401
class User(Base):
id = Column(Integer, primary_key=True, index=True, autoincrement=True, nullable=False)
first_name = Column(String(20), index=True, nullable=False)
last_name = Column(String(20), index=True, nullable=False)
email = Column(String, unique=True, index=True, nullable=False)
hashed_password = Column(String, nullable=False)
is_active = Column(Boolean(), default=True)
is_superuser = Column(Boolean(), default=False)
pets = relationship("Pet", back_populates="owner")
| 38.736842 | 90 | 0.744565 |
959f88de24a529a6005e19e9f3a68842519cdb55 | 930 | py | Python | slackbot/admin.py | surface-security/django-slackbot | 8d22fb922cf5365284d7a4836bb095eeeb8c7e90 | [
"MIT"
] | 1 | 2022-01-24T10:29:09.000Z | 2022-01-24T10:29:09.000Z | slackbot/admin.py | surface-security/django-slack-processor | 8d22fb922cf5365284d7a4836bb095eeeb8c7e90 | [
"MIT"
] | 4 | 2022-02-21T15:59:08.000Z | 2022-03-26T00:33:13.000Z | slackbot/admin.py | surface-security/django-slack-processor | 8d22fb922cf5365284d7a4836bb095eeeb8c7e90 | [
"MIT"
] | null | null | null | from django.contrib import admin
from django.utils.html import format_html
from . import get_user_model
@admin.register(get_user_model())
class UserAdmin(admin.ModelAdmin):
list_display = ['ext_id', 'username', 'name', 'email', 'active', 'is_bot', 'is_admin', 'get_photo', 'last_seen']
list_filter = ('active', 'is_bot', 'is_admin')
search_fields = ['username', 'name', 'email', 'ext_id']
readonly_fields = ['ext_id', 'username', 'name', 'email', 'active', 'is_bot', 'is_admin', 'get_photo', 'last_seen']
exclude = ['photo', 'photo_thumb']
def get_photo(self, obj):
if obj.photo:
return format_html(
'<a href="{}" target="_blank"><img src="{}" style="width:50px;"></a>', obj.photo, obj.photo_thumb
)
return None
get_photo.short_description = 'Photo'
get_photo.allow_tags = True
def has_add_permission(self, _):
return False
| 34.444444 | 119 | 0.634409 |
95a0896392ae42746732acf467a7a7dc9ad52617 | 1,476 | py | Python | touroute/tourouteapp/migrations/0001_initial.py | oscarlamasrios/toroute | 5b00c0f606f438229e7857f25a23c4d51ff34293 | [
"Apache-2.0"
] | null | null | null | touroute/tourouteapp/migrations/0001_initial.py | oscarlamasrios/toroute | 5b00c0f606f438229e7857f25a23c4d51ff34293 | [
"Apache-2.0"
] | null | null | null | touroute/tourouteapp/migrations/0001_initial.py | oscarlamasrios/toroute | 5b00c0f606f438229e7857f25a23c4d51ff34293 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-04-24 14:53
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Place',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('lat', models.FloatField()),
('lon', models.FloatField()),
('identifier', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='Route',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='RP',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('place_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tourouteapp.Place')),
('route_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tourouteapp.Route')),
],
),
]
| 34.325581 | 117 | 0.571816 |
95a1633d9ce1bb6f212d67d9111c6397f243ba02 | 19,691 | py | Python | catalog/application.py | gevannmullins/catalog-category-items | 850c77e17d5123511c954e3705f522228c6574ea | [
"MIT"
] | null | null | null | catalog/application.py | gevannmullins/catalog-category-items | 850c77e17d5123511c954e3705f522228c6574ea | [
"MIT"
] | null | null | null | catalog/application.py | gevannmullins/catalog-category-items | 850c77e17d5123511c954e3705f522228c6574ea | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from flask import Flask, render_template, request, redirect, jsonify, url_for, flash
from sqlalchemy import create_engine, asc
from sqlalchemy.orm import sessionmaker
from database_setup import Base, Category, Item, User
from flask import session as login_session
import random
import string
import collections
import json
import requests
from flask import make_response
from oauth2client.client import flow_from_clientsecrets
from oauth2client.client import FlowExchangeError
import httplib2
# from dict2xml import dict2xml
from xml.etree.ElementTree import Element, SubElement, Comment, tostring
import psycopg2
# from page_views import *
app = Flask(__name__)
CLIENT_ID = json.loads(open('/vagrant/catalog/client_secret.json', 'r').read())['web']['client_id']
APPLICATION_NAME = "Catalog Category Items Application"
engine = create_engine('sqlite:///catalog.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
# User Helper Functions
def createUser(login_session):
newUser = User(name=login_session['username'], email=login_session[
'email'], picture=login_session['picture'])
session.add(newUser)
session.commit()
user = session.query(User).filter_by(email=login_session['email']).one()
return user.id
def getUserInfo(user_id):
user = session.query(User).filter_by(id=user_id).one()
return user
def getUserID(email):
try:
user = session.query(User).filter_by(email=email).one()
return user.id
except:
return None
# login page
@app.route('/login')
def showLogin():
state = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in xrange(32))
login_session['state'] = state
# return "The current session state is %s" % login_session['state']
return render_template('user_login.html', STATE=state)
# display home page / categories page
# Categories
@app.route('/')
@app.route('/categories/')
def showCategories():
categories = session.query(Category).order_by(asc(Category.name))
items = session.query(Item).all()
if 'username' not in login_session:
return render_template(
'public_categories.html',
categories=categories,
items=items
)
else:
return render_template('categories.html', categories=categories, items=items)
# Show Category Items
@app.route('/categories/<int:category_id>/')
@app.route('/categories/<int:category_id>/items/')
def showItems(category_id):
category = session.query(Category).filter_by(id=category_id).one()
creator = getUserInfo(category.user_id)
items = session.query(Item).filter_by(
category_id=category_id).all()
if ('username' not in login_session or creator.id != login_session['user_id']):
return render_template(
'public_items.html',
items=items,
category=category,
creator=creator
)
else:
return render_template(
'items.html',
items=items,
category=category,
creator=creator
)
# Create a new category
@app.route('/categories/new/', methods=['GET', 'POST'])
def newCategory():
if 'username' not in login_session:
return redirect('/login')
if request.method == 'POST':
if request.form['name']:
newCategory = Category(
name=request.form['name'], user_id=login_session['user_id'])
session.add(newCategory)
flash(
'New Category %s Successfully Created'
% newCategory.name
)
session.commit()
return redirect(url_for('showCategories'))
else:
flash('Please Complete Name Field')
return render_template('new_category.html')
else:
return render_template('new_category.html')
# Edit a categories
@app.route('/categories/<int:category_id>/edit/', methods=['GET', 'POST'])
def editCategory(category_id):
editedCategory = session.query(Category).filter_by(id=category_id).one()
if 'username' not in login_session:
return redirect('/login')
if editedCategory.user_id != login_session['user_id']:
return """<script>function myFunction()
{alert('You are not authorized to edit
this category. Please create your own
category in order to edit.');}</script>
<body onload='myFunction()''>"""
if request.method == 'POST':
if request.form['name']:
editedCategory.name = request.form['name']
flash('Successfully edited your Category %s' % editedCategory.name)
return redirect(url_for('showItems', category_id=category_id))
else:
return redirect(url_for('editCategory', category_id=category_id))
else:
return render_template(
'edit_category.html',
category=editedCategory,
category_id=category_id
)
# Delete a category
@app.route('/categories/<int:category_id>/delete/', methods=['GET', 'POST'])
def deleteCategory(category_id):
categoryToDelete = session.query(
Category).filter_by(id=category_id).one()
if 'username' not in login_session:
return redirect('/login')
if categoryToDelete.user_id != login_session['user_id']:
return """<script>function myFunction()
{alert('You do not have permission to delete this category. Please create your own
category in order to delete.');}</script>
<body onload='myFunction()''>"""
if request.method == 'POST':
session.delete(categoryToDelete)
flash('%s Successfully Deleted' % categoryToDelete.name)
session.commit()
return redirect(url_for(
'showCategories',
category_id=category_id)
)
else:
return render_template(
'delete_category.html',
category=categoryToDelete
)
# Item Services
# Create a new item
@app.route('/categories/<int:category_id>/items/new/', methods=['GET', 'POST'])
def newItem(category_id):
if 'username' not in login_session:
return redirect('/login')
category = session.query(Category).filter_by(id=category_id).one()
if login_session['user_id'] != category.user_id:
return """<script>function myFunction()
{alert('You are not authorized to add
items to this category. Please create
your own category in order to add items.');
}</script><body onload='myFunction()''>"""
if request.method == 'POST':
if (request.form['name'] and request.form['description']):
newItem = Item(
name=request.form['name'],
description=request.form['description'],
category_id=category_id,
user_id=category.user_id
)
session.add(newItem)
session.commit()
flash('New %s Item Successfully Created' % (newItem.name))
return redirect(url_for('showItems', category_id=category_id))
else:
flash("Please Complete Form")
return redirect(url_for('newItem',
category_id=category_id,
category=category))
else:
return render_template(
'new_item.html',
category_id=category_id,
category=category
)
# Edit a item
@app.route('/categories/<int:category_id>/items/<int:item_id>/edit', methods=['GET', 'POST'])
def editItem(category_id, item_id):
if 'username' not in login_session:
return redirect('/login')
editedItem = session.query(Item).filter_by(id=item_id).one()
category = session.query(Category).filter_by(id=category_id).one()
if login_session['user_id'] != category.user_id:
return """<script>function myFunction()
{alert('You are not authorized to edit
items to this category. Please create
your own category in order to edit items.');
}</script><body onload='myFunction()''>"""
if request.method == 'POST':
if (request.form['name'] and request.form['description']):
editedItem.name = request.form['name']
editedItem.description = request.form['description']
editedItem.category_id = category.id
session.add(editedItem)
session.commit()
flash('Item Successfully Edited')
return redirect(url_for('showItems',
category_id=category_id))
else:
flash("Do Not Leave Any Blanks")
return redirect(url_for('editItem',
category_id=category_id,
item_id=item_id,
item=editedItem,
category=category))
else:
return render_template(
'edit_item.html',
category_id=category_id,
item_id=item_id,
item=editedItem,
category=category
)
# Delete a item
@app.route(
'/categories/<int:category_id>/items/<int:item_id>/delete',
methods=['GET', 'POST']
)
def deleteItem(category_id, item_id):
if 'username' not in login_session:
return redirect('/login')
category = session.query(Category).filter_by(id=category_id).one()
itemToDelete = session.query(Item).filter_by(id=item_id).one()
if login_session['user_id'] != category.user_id:
return """<script>function myFunction()
{alert('You are not authorized to delete
items to this category. Please create
your own category in order to delete items.')
;}</script><body onload='myFunction()''>"""
if request.method == 'POST':
session.delete(itemToDelete)
session.commit()
flash('Item Successfully Deleted')
return redirect(url_for('showItems', category_id=category_id))
else:
return render_template(
'delete_item.html',
item=itemToDelete,
category=category
)
# Disconnect based on provider
@app.route('/logout')
@app.route('/disconnect')
def disconnect():
if 'provider' in login_session:
if login_session['provider'] == 'google':
gdisconnect()
del login_session['gplus_id']
del login_session['access_token']
if login_session['provider'] == 'facebook':
fbdisconnect()
del login_session['facebook_id']
del login_session['username']
del login_session['email']
del login_session['picture']
del login_session['user_id']
del login_session['provider']
flash("You have successfully been logged out.")
return redirect(url_for('showCategories'))
else:
flash("You were not logged in")
return redirect(url_for('showCategories'))
##### JSON APIs to view Category Information
@app.route('/categories/<int:category_id>/items/JSON')
def categoryItemJSON(category_id):
category = session.query(Category).filter_by(id=category_id).one()
items = session.query(Item).filter_by(
category_id=category_id).all()
return jsonify(Items=[i.serialize for i in items])
@app.route('/categories/<int:category_id>/items/<int:item_id>/JSON')
def itemJSON(category_id, item_id):
item = session.query(Item).filter_by(id=item_id).one()
return jsonify(Item=Item.serialize)
@app.route('/categories/JSON')
def categoryJSON():
categories = session.query(Category).all()
return jsonify(categories=[r.serialize for r in categories])
##### Social media routes #####
@app.route('/fbconnect', methods=['POST'])
def fbconnect():
if request.args.get('state') != login_session['state']:
response = make_response(json.dumps('Invalid state parameter.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
access_token = request.data
print "access token received %s " % access_token
app_id = json.loads(open('fb_client_secrets.json', 'r').read())[
'web']['app_id']
app_secret = json.loads(
open('fb_client_secret.json', 'r').read())['web']['app_secret']
url = 'https://graph.facebook.com/oauth/access_token?grant_type=fb_exchange_token&client_id=%s&client_secret=%s&fb_exchange_token=%s' % (app_id, app_secret, access_token)
h = httplib2.Http()
result = h.request(url, 'GET')[1]
# Use token to get user info from API
userinfo_url = "https://graph.facebook.com/v2.4/me"
# strip expire tag from access token
token = result.split("&")[0]
url = 'https://graph.facebook.com/v2.4/me?%s&fields=name,id,email' % token
h = httplib2.Http()
result = h.request(url, 'GET')[1]
# print "url sent for API access:%s"% url
# print "API JSON result: %s" % result
data = json.loads(result)
login_session['provider'] = 'facebook'
login_session['username'] = data["name"]
login_session['email'] = data["email"]
login_session['facebook_id'] = data["id"]
# The token must be stored in the login_session
# in order to properly logout, let's strip out
# the information before the equals sign in our token
stored_token = token.split("=")[1]
login_session['access_token'] = stored_token
# Get user picture
url = 'https://graph.facebook.com/v2.4/me/picture?%s&redirect=0&height=200&width=200' % token
h = httplib2.Http()
result = h.request(url, 'GET')[1]
data = json.loads(result)
login_session['picture'] = data["data"]["url"]
# see if user exists
user_id = getUserID(login_session['email'])
if not user_id:
user_id = createUser(login_session)
login_session['user_id'] = user_id
output = ''
output += '<h1>Welcome, '
output += login_session['username']
output += '!</h1>'
output += '<img src="'
output += login_session['picture']
output += ''' " style = "width: 300px;
height: 300px;
border-radius: 150px;
-webkit-border-radius: 150px;
-moz-border-radius: 150px;"> '''
flash("Now logged in as %s" % login_session['username'])
return output
@app.route('/fbdisconnect')
def fbdisconnect():
facebook_id = login_session['facebook_id']
# The access token must me included to successfully logout
access_token = login_session['access_token']
url = 'https://graph.facebook.com/%s/permissions?' \
'access_token=%s' % (facebook_id, access_token)
h = httplib2.Http()
result = h.request(url, 'DELETE')[1]
return "you have been logged out"
@app.route('/gconnect', methods=['POST'])
def gconnect():
# Validate state token
if request.args.get('state') != login_session['state']:
response = make_response(json.dumps('Invalid state parameter.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Obtain authorization code
code = request.data
try:
# Upgrade the authorization code into a credentials object
oauth_flow = flow_from_clientsecrets('client_secret.json', scope='')
oauth_flow.redirect_uri = 'postmessage'
credentials = oauth_flow.step2_exchange(code)
except FlowExchangeError:
response = make_response(
json.dumps('Failed to upgrade the authorization code.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Check that the access token is valid.
access_token = credentials.access_token
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s'
% access_token)
h = httplib2.Http()
result = json.loads(h.request(url, 'GET')[1])
# If there was an error in the access token info, abort.
if result.get('error') is not None:
response = make_response(json.dumps(result.get('error')), 500)
response.headers['Content-Type'] = 'application/json'
return response
# Verify that the access token is used for the intended user.
gplus_id = credentials.id_token['sub']
if result['user_id'] != gplus_id:
response = make_response(
json.dumps("Token's user ID doesn't match given user ID."), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Verify that the access token is valid for this app.
if result['issued_to'] != CLIENT_ID:
response = make_response(
json.dumps("Token's client ID does not match app's."), 401)
print "Token's client ID does not match app's."
response.headers['Content-Type'] = 'application/json'
return response
stored_credentials = login_session.get('credentials')
stored_gplus_id = login_session.get('gplus_id')
if stored_credentials is not None and gplus_id == stored_gplus_id:
response = make_response(json.dumps(
'Current user is already connected.'), 200)
response.headers['Content-Type'] = 'application/json'
return response
# Store the access token in the session for later use.
login_session['access_token'] = credentials.access_token
login_session['gplus_id'] = gplus_id
# Get user info
userinfo_url = "https://www.googleapis.com/oauth2/v1/userinfo"
params = {'access_token': credentials.access_token, 'alt': 'json'}
answer = requests.get(userinfo_url, params=params)
data = answer.json()
login_session['username'] = data['name']
login_session['picture'] = data['picture']
login_session['email'] = data['email']
# ADD PROVIDER TO LOGIN SESSION
login_session['provider'] = 'google'
# see if user exists, if it doesn't make a new one
user_id = getUserID(data["email"])
if not user_id:
user_id = createUser(login_session)
login_session['user_id'] = user_id
output = ''
output += '<h1>Welcome, '
output += login_session['username']
output += '!</h1>'
output += '<img src="'
output += login_session['picture']
output += ''' "style = "width: 300px;
height: 300px;
border-radius: 150px;
-webkit-border-radius: 150px;
-moz-border-radius: 150px;"> '''
flash("you are now logged in as %s" % login_session['username'])
print "done!"
return output
# DISCONNECT - Revoke a current user's token and reset their login_session
@app.route('/gdisconnect')
def gdisconnect():
# Only disconnect a connected user.
credentials = login_session.get('credentials')
if credentials is None:
response = make_response(
json.dumps('Current user not connected.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
access_token = credentials.access_token
url = 'https://accounts.google.com/' \
'o/oauth2/revoke?token=%s' % access_token
h = httplib2.Http()
result = h.request(url, 'GET')[0]
if result['status'] != '200':
# For whatever reason, the given token was invalid.
response = make_response(
json.dumps('Failed to revoke token for given user.'), 400)
response.headers['Content-Type'] = 'application/json'
return response
if __name__ == '__main__':
app.secret_key = "lRYRXEimZGfbt3Q2TpD_6_Kj"
app.debug = True
app.run(host='0.0.0.0', port=8002)
| 35.867031 | 174 | 0.632218 |
95a163ba2b23c18ae5bb7535ab4caa4e069308b6 | 144 | py | Python | bolt/core/exceptions.py | ph7vc/CL4M-B0T | e992cf63b1215ea7c241cab94edc251653dbaed7 | [
"MIT"
] | 9 | 2019-02-17T06:33:14.000Z | 2021-10-05T02:19:00.000Z | bolt/core/exceptions.py | ns-phennessy/Bolt | e992cf63b1215ea7c241cab94edc251653dbaed7 | [
"MIT"
] | 28 | 2019-02-10T07:48:05.000Z | 2021-12-20T00:15:37.000Z | bolt/core/exceptions.py | ph7vc/CL4M-B0T | e992cf63b1215ea7c241cab94edc251653dbaed7 | [
"MIT"
] | 4 | 2015-03-13T03:58:55.000Z | 2015-05-27T08:29:46.000Z | class InvalidConfigurationError(Exception):
pass
class InvalidBotToken(Exception):
pass
class InvalidBotPlugin(Exception):
pass
| 13.090909 | 43 | 0.763889 |
95a2f6f31ddcda8bf982507b3035c6d82bfe1d80 | 723 | py | Python | selfdrive/visiond/tensorflow_autodetect.py | jeroenbbb/openpilot | 4a2ff784f85ac87a4aa9ba8a345c2403102f960a | [
"MIT"
] | 4 | 2019-05-29T19:44:56.000Z | 2021-09-10T18:36:57.000Z | selfdrive/visiond/tensorflow_autodetect.py | jeroenbbb/openpilot | 4a2ff784f85ac87a4aa9ba8a345c2403102f960a | [
"MIT"
] | null | null | null | selfdrive/visiond/tensorflow_autodetect.py | jeroenbbb/openpilot | 4a2ff784f85ac87a4aa9ba8a345c2403102f960a | [
"MIT"
] | 5 | 2019-08-09T07:49:28.000Z | 2020-10-11T03:19:04.000Z | import os
from setuptools import setup
version = os.getenv('VERSION', '1.10.1')
setup(
name='tensorflow-autodetect',
version=version,
url='https://github.com/commaai/tensorflow-autodetect',
author='comma.ai',
author_email='',
license='MIT',
long_description='Auto-detect tensorflow or tensorflow-gpu package based on nvidia driver being installed',
keywords='tensorflow tensorflow-gpu',
install_requires=[
('tensorflow-gpu' if os.path.exists('/proc/driver/nvidia/version') else 'tensorflow') + '==' + version,
],
classifiers=[
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
| 30.125 | 111 | 0.656985 |
95a308d03af24087015385e9c1aa146e859dc63c | 1,639 | py | Python | intask_api/projects/permissions.py | KirovVerst/intask | 4bdec6f49fa2873cca1354d7d3967973f5bcadc3 | [
"MIT"
] | null | null | null | intask_api/projects/permissions.py | KirovVerst/intask | 4bdec6f49fa2873cca1354d7d3967973f5bcadc3 | [
"MIT"
] | 7 | 2016-08-17T23:08:31.000Z | 2022-03-02T02:23:08.000Z | intask_api/projects/permissions.py | KirovVerst/intask | 4bdec6f49fa2873cca1354d7d3967973f5bcadc3 | [
"MIT"
] | null | null | null | from rest_framework import permissions
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from intask_api.projects.models import Project
class IsProjectHeader(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
return obj.header == request.user
class CanUpdateProject(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
return obj.header == request.user
class CanDeleteProject(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
return obj.header == request.user
class IsParticipant(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
return request.user in obj.users.all()
class CanRetrieveProjectUser(permissions.BasePermission):
def has_permission(self, request, view):
project = get_object_or_404(Project, id=view.kwargs['project_id'])
return request.user in project.users.all()
class CanDeleteProjectUser(permissions.BasePermission):
def has_permission(self, request, view):
project = get_object_or_404(Project, id=view.kwargs['project_id'])
user = get_object_or_404(User, id=view.kwargs['pk'])
is_project_header = project.header == request.user
is_current_user = request.user == user
return is_project_header | is_current_user
class CanAddProjectUser(permissions.BasePermission):
def has_permission(self, request, view):
project = get_object_or_404(Project, id=view.kwargs['project_id'])
return request.user == project.header
| 34.87234 | 74 | 0.748627 |
95a3853b501cce7a1c286e558ccff9a6692b3e3f | 171 | py | Python | Ekeopara_Praise/Phase 2/LIST/Day43 Tasks/Task3.py | CodedLadiesInnovateTech/-python-challenge-solutions | 430cd3eb84a2905a286819eef384ee484d8eb9e7 | [
"MIT"
] | 6 | 2020-05-23T19:53:25.000Z | 2021-05-08T20:21:30.000Z | Ekeopara_Praise/Phase 2/LIST/Day43 Tasks/Task3.py | CodedLadiesInnovateTech/-python-challenge-solutions | 430cd3eb84a2905a286819eef384ee484d8eb9e7 | [
"MIT"
] | 8 | 2020-05-14T18:53:12.000Z | 2020-07-03T00:06:20.000Z | Ekeopara_Praise/Phase 2/LIST/Day43 Tasks/Task3.py | CodedLadiesInnovateTech/-python-challenge-solutions | 430cd3eb84a2905a286819eef384ee484d8eb9e7 | [
"MIT"
] | 39 | 2020-05-10T20:55:02.000Z | 2020-09-12T17:40:59.000Z | '''3. Write a Python program to split a list into different variables. '''
universalList = [(1, 2, 3), ('w', 'e', 's')]
lst1, lst2 = universalList
print(lst1)
print(lst2) | 28.5 | 74 | 0.654971 |
95a3fd394b5e1d1a390370d7caef0aefa5912c98 | 576 | py | Python | Codefights/arcade/python-arcade/level-9/62.Check-Participants/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | 7 | 2017-09-20T16:40:39.000Z | 2021-08-31T18:15:08.000Z | Codefights/arcade/python-arcade/level-9/62.Check-Participants/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | Codefights/arcade/python-arcade/level-9/62.Check-Participants/Python/test.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | # Python3
from solution1 import checkParticipants as f
qa = [
([0, 1, 1, 5, 4, 8],
[2]),
([0, 1, 2, 3, 4, 5],
[]),
([6],
[]),
([3, 3, 3, 3, 3, 3, 3, 3],
[4, 5, 6, 7]),
([0, 0, 1, 5, 5, 4, 5, 4, 10, 8],
[1, 2, 5, 6, 7, 9])
]
for *q, a in qa:
for i, e in enumerate(q):
print('input{0}: {1}'.format(i + 1, e))
ans = f(*q)
if ans != a:
print(' [failed]')
print(' output:', ans)
print(' expected:', a)
else:
print(' [ok]')
print(' output:', ans)
print()
| 19.2 | 47 | 0.378472 |
95a45f4832007319ba41671ba4a21dd2a62ab0fc | 202 | py | Python | models/__init__.py | mikuh/bert-tf2-keras | e361a0e7dc9fa0d64c48ac41320d302599dba025 | [
"MIT"
] | 4 | 2020-06-21T15:48:40.000Z | 2022-01-24T05:10:59.000Z | models/__init__.py | mikuh/bert-tf2-keras | e361a0e7dc9fa0d64c48ac41320d302599dba025 | [
"MIT"
] | null | null | null | models/__init__.py | mikuh/bert-tf2-keras | e361a0e7dc9fa0d64c48ac41320d302599dba025 | [
"MIT"
] | 3 | 2020-07-20T07:11:27.000Z | 2022-01-24T05:11:21.000Z | from models.base_model import BaseModel
from models.classifier import BertClassifier
from models.sequence_labeling import BertSequenceLabeling
from models.sequence_embedding import BertSequenceEmbedding | 50.5 | 59 | 0.905941 |
95a49255a761f17a3cc35cbf97bc73b1442eaf32 | 7,563 | py | Python | plex_import_watched_history.py | chazlarson/plex-watched-tools | ef3e34e733ec9555353d695ced582395bdc73480 | [
"MIT"
] | null | null | null | plex_import_watched_history.py | chazlarson/plex-watched-tools | ef3e34e733ec9555353d695ced582395bdc73480 | [
"MIT"
] | null | null | null | plex_import_watched_history.py | chazlarson/plex-watched-tools | ef3e34e733ec9555353d695ced582395bdc73480 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# python3 -m pip install --force -U --user PlexAPI
import json
import time
import logging
import plexapi
import plexapi.video
import plexapi.myplex
import plexapi.server
import plexapi.library
import plexapi.exceptions
PLEX_URL = ""
PLEX_TOKEN = ""
WATCHED_HISTORY = ""
LOG_FILE = ""
BATCH_SIZE = 10000
PLEX_REQUESTS_SLEEP = 0
CHECK_USERS = [
]
LOG_FORMAT = \
"[%(name)s][%(process)05d][%(asctime)s][%(levelname)-8s][%(funcName)-15s]" \
" %(message)s"
LOG_DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
LOG_LEVEL = logging.INFO
plexapi.server.TIMEOUT = 3600
plexapi.server.X_PLEX_CONTAINER_SIZE = 2500
_SHOW_GUID_RATING_KEY_MAPPING = {}
_MOVIE_GUID_RATING_KEY_MAPPING = {}
_EPISODE_GUID_RATING_KEY_MAPPING = {}
logger = logging.getLogger("PlexWatchedHistoryImporter")
def _get_config_str(key):
return plexapi.CONFIG.get(key, default="", cast=str).strip("'").strip('"').strip()
def _load_config():
global PLEX_URL, PLEX_TOKEN, WATCHED_HISTORY, CHECK_USERS, LOG_FILE, LOG_LEVEL
if PLEX_URL == "":
PLEX_URL = _get_config_str("sync.dst_url")
if PLEX_TOKEN == "":
PLEX_TOKEN = _get_config_str("sync.dst_token")
if WATCHED_HISTORY == "":
WATCHED_HISTORY = _get_config_str("sync.watched_history")
if len(CHECK_USERS) == 0:
config_check_users = _get_config_str("sync.check_users").split(",")
CHECK_USERS = [user.strip() for user in config_check_users if user]
if LOG_FILE == "":
LOG_FILE = _get_config_str("sync.import_log_file")
debug = plexapi.utils.cast(bool, _get_config_str("sync.debug").lower())
if debug:
LOG_LEVEL = logging.DEBUG
def _setup_logger():
logging.Formatter.converter = time.gmtime
logging.raiseExceptions = False
logger.setLevel(logging.DEBUG)
logger.handlers = []
logger.propagate = False
detailed_formatter = logging.Formatter(fmt=LOG_FORMAT,
datefmt=LOG_DATE_FORMAT)
file_handler = logging.FileHandler(filename=LOG_FILE, mode="a+")
file_handler.setFormatter(detailed_formatter)
file_handler.setLevel(LOG_LEVEL)
logger.addHandler(file_handler)
def _get_rating_keys(server, rating_key_guid_mapping, guid):
if guid not in rating_key_guid_mapping:
items = server.library.search(guid=guid)
rating_key_guid_mapping[guid] = [item.ratingKey for item in items]
return rating_key_guid_mapping[guid]
def _set_movie_section_watched_history(server, movie_history):
for movie_guid, movie_item_history in movie_history.items():
rating_keys = _get_rating_keys(server, _MOVIE_GUID_RATING_KEY_MAPPING, movie_guid)
for rating_key in rating_keys:
item = server.fetchItem(rating_key)
if movie_item_history['watched'] and not item.isWatched:
logger.debug(f"Watching Movie: {item.title}")
item.markWatched()
if movie_item_history['viewCount'] > item.viewCount:
for _ in range(movie_item_history['viewCount'] - item.viewCount):
logger.debug(f"Watching Movie: {item.title}")
item.markWatched()
if movie_item_history['viewOffset'] != 0:
logger.debug(f"Updating Movie Timeline: {item.title}: {movie_item_history['viewOffset']}")
item.updateTimeline(movie_item_history['viewOffset'])
if movie_item_history['userRating'] != "":
logger.debug(f"Rating Movie: {item.title}: {movie_item_history['userRating']}")
item.rate(movie_item_history['userRating'])
def _set_show_section_watched_history(server, show_history):
for show_guid, show_item_history in show_history.items():
rating_keys = _get_rating_keys(server, _SHOW_GUID_RATING_KEY_MAPPING, show_guid)
for rating_key in rating_keys:
item = server.fetchItem(rating_key)
if show_item_history['watched'] and not item.isWatched:
logger.debug(f"Watching Show: {item.title}")
item.markWatched()
if show_item_history['userRating'] != "":
logger.debug(f"Rating Show: {item.title}: {show_item_history['userRating']}")
item.rate(show_item_history['userRating'])
for episode_guid, episode_item_history in show_item_history['episodes'].items():
rating_keys = _get_rating_keys(server, _EPISODE_GUID_RATING_KEY_MAPPING, episode_guid)
for rating_key in rating_keys:
item = server.fetchItem(rating_key)
if episode_item_history['watched'] and not item.isWatched:
logger.debug(f"Watching Episode: {item.title}")
item.markWatched()
if episode_item_history['viewCount'] > item.viewCount:
for _ in range(episode_item_history['viewCount'] - item.viewCount):
logger.debug(f"Watching Episode: {item.title}")
item.markWatched()
if episode_item_history['viewOffset'] != 0:
logger.debug(f"Updating Episode Timeline: {item.title}: {episode_item_history['viewOffset']}")
item.updateTimeline(episode_item_history['viewOffset'])
if episode_item_history['userRating'] != "":
logger.debug(f"Rating Episode: {item.title}: {episode_item_history['userRating']}")
item.rate(episode_item_history['userRating'])
def _set_user_server_watched_history(server, watched_history):
_set_movie_section_watched_history(server, watched_history['movie'])
_set_show_section_watched_history(server, watched_history['show'])
def main():
_load_config()
_setup_logger()
plex_server = plexapi.server.PlexServer(PLEX_URL, PLEX_TOKEN, timeout=300)
plex_account = plex_server.myPlexAccount()
with open(WATCHED_HISTORY, "r") as watched_history_file:
watched_history = json.load(watched_history_file)
logger.info(f"Starting Import")
plex_users = plex_account.users()
# Owner will be processed separately
logger.info(f"Total Users: {len(plex_users) + 1}")
if not (len(CHECK_USERS) > 0 and plex_account.username not in CHECK_USERS and
plex_account.email not in CHECK_USERS):
logger.info(f"Processing Owner: {plex_account.username}")
user_history = watched_history[plex_account.username]
_set_user_server_watched_history(plex_server, user_history)
for user_index, user in enumerate(plex_users):
if (len(CHECK_USERS) > 0 and user.username not in CHECK_USERS and
user.email not in CHECK_USERS):
continue
if user.username not in watched_history:
logger.warning(f"Missing User from Watched History: {user.username}")
continue
logger.info(f"Processing User: {user.username}")
user_server_token = user.get_token(plex_server.machineIdentifier)
try:
user_server = plexapi.server.PlexServer(PLEX_URL, user_server_token, timeout=300)
except plexapi.exceptions.Unauthorized:
# This should only happen when no libraries are shared
logger.warning(f"Skipped User with No Libraries Shared: {user.username}")
continue
user_history = watched_history[user.username]
_set_user_server_watched_history(user_server, user_history)
logger.info(f"Completed Import")
if __name__ == "__main__":
main()
| 36.713592 | 114 | 0.672352 |
95a5e5403994144db82f320da6b9ae78fdfacc78 | 3,556 | py | Python | django_thermostat/pypelib/Rule.py | jpardobl/django-thermostat | 184e398134f289eb0337ec2af33c650f9ee26a13 | [
"BSD-3-Clause"
] | null | null | null | django_thermostat/pypelib/Rule.py | jpardobl/django-thermostat | 184e398134f289eb0337ec2af33c650f9ee26a13 | [
"BSD-3-Clause"
] | null | null | null | django_thermostat/pypelib/Rule.py | jpardobl/django-thermostat | 184e398134f289eb0337ec2af33c650f9ee26a13 | [
"BSD-3-Clause"
] | null | null | null | import os
import sys
import time
import exceptions
import uuid
import logging
'''
@author: msune,lbergesio,omoya,CarolinaFernandez
@organization: i2CAT, OFELIA FP7
PolicyEngine Rule class
Encapsulates logic of a simple Rule
'''
from django_thermostat.pypelib.Condition import Condition
from django_thermostat.pypelib.persistence.PersistenceEngine import PersistenceEngine
from django_thermostat.pypelib.utils.Logger import Logger
class TerminalMatch(exceptions.Exception):
value = None
desc = None
def __init__(self,rType,desc):
if isinstance(rType['value'],bool):
self.value = rType['value']
else:
raise Exception("Unknown rule type")
self.desc = desc
def __str__(self):
return "%s "%self.desc
class Rule():
logger = Logger.getLogger()
#Class Attributes
_condition = None
_description = None
_errorMsg = None
_uuid = None #uuid.uuid4().hex
_defaultParser = "RegexParser"
_defaultPersistence = "Django"
#Types of rule
POSITIVE_TERMINAL={'value':True,'terminal':True}
POSITIVE_NONTERMINAL={'value':True,'terminal':False}
NEGATIVE_TERMINAL={'value':False,'terminal':True}
NEGATIVE_NONTERMINAL={'value':False,'terminal':False}
_types = [POSITIVE_TERMINAL,POSITIVE_NONTERMINAL,NEGATIVE_TERMINAL, NEGATIVE_NONTERMINAL]
#Rule type
_type = None
#Rule match Action
_matchAction=None
#Getters
def getCondition(self):
return self._condition
def getDescription(self):
return self._description
def getType(self):
return self._type
def getErrorMsg(self):
return self._errorMsg
def getMatchAction(self):
return self._matchAction
def getUUID(self):
return self._uuid
#setters
def setUUID(self,UUID):
self._uuid = UUID
#Constructor
def __init__(self,condition,description,errorMsg,ruleType=POSITIVE_TERMINAL,action=None,uuid=None):
if not isinstance(condition,Condition):
raise Exception("Object must be an instance of Condition")
if ruleType not in self._types:
raise Exception("Unknown rule type")
if action == None and (ruleType == self.NEGATIVE_NONTERMINAL or ruleType == self.POSITIVE_NONTERMINAL):
raise Exception("You cannot create non-terminal actionless rules")
self._condition = condition
self._matchAction = action
self._type = ruleType
self._description = description
self._errorMsg = errorMsg
self._uuid = uuid
def dump(self):
#Debug dump
toReturn = self._condition.dump()
toReturn+="=> %s "%str(self._type['value'])
if self._matchAction != None:
toReturn += "(%s) "%str(self._matchAction)
if self._type['terminal']:
toReturn += "[TERM] "
if self._description:
toReturn+=" #"+self._description
return toReturn
#Resolver is passed at evaluation time to be able to dynamically redirect actions
def evaluate(self,metaObj,resolver):
try:
Rule.logger.setLevel(logging.DEBUG)
result = self._condition.evaluate(metaObj,resolver)
Rule.logger.debug('Result was: %s rule: [%s]' % (str(result), self.dump()))
except Exception as e:
Rule.logger.error('Error on rule: %s',self.dump())
Rule.logger.error('Exception: %s', str(e))
Rule.logger.error('Rule will be skiped!')
result = False
if result:
if self._matchAction != None:
resolver.resolve(self._matchAction,metaObj)
#If is terminal raise TerminalMatch
if self._type['terminal']:
raise TerminalMatch(self._type,self._errorMsg)
#return whatever
return
def getConditionDump(self):
return self.getCondition().dump()
| 27.353846 | 105 | 0.719629 |
95aa250e5dd1b191d40f33aa77ed3ab2c0ae9d74 | 64 | py | Python | tests/test_tag.py | veoco/PyTypecho | 59280c4770f0b66acccea7a5eb62495be30977b4 | [
"MIT"
] | 5 | 2019-11-25T13:39:01.000Z | 2021-11-03T07:12:33.000Z | tests/test_tag.py | veoco/PyTypecho | 59280c4770f0b66acccea7a5eb62495be30977b4 | [
"MIT"
] | 4 | 2021-01-24T14:03:02.000Z | 2021-07-19T04:43:59.000Z | tests/test_tag.py | veoco/PyTypecho | 59280c4770f0b66acccea7a5eb62495be30977b4 | [
"MIT"
] | 2 | 2020-12-03T12:47:31.000Z | 2021-07-19T02:45:36.000Z | def test_get_tags(te):
r = te.get_tags()
assert r == []
| 16 | 22 | 0.5625 |
95aa9b2ab7c302c981b157247e84659b7c3d8105 | 709 | py | Python | test/test_integration.py | gaborfodor/wave-bird-recognition | 6feafdbae82746e3e7b0f6588a9158aa8336309a | [
"MIT"
] | 17 | 2021-06-02T12:26:30.000Z | 2022-03-27T18:35:02.000Z | test/test_integration.py | gaborfodor/wave-bird-recognition | 6feafdbae82746e3e7b0f6588a9158aa8336309a | [
"MIT"
] | null | null | null | test/test_integration.py | gaborfodor/wave-bird-recognition | 6feafdbae82746e3e7b0f6588a9158aa8336309a | [
"MIT"
] | 3 | 2021-06-02T12:26:51.000Z | 2021-06-06T05:56:45.000Z | from birds.display_utils import geo_plot
from birds.pann import load_pretrained_model, read_audio_fast, get_model_predictions_for_clip, BIRDS
def test_prediction_works():
test_bird = 'comrav'
model = load_pretrained_model()
y = read_audio_fast(f'./data/audio/{test_bird}.mp3')
predictions = get_model_predictions_for_clip(y, model)
class_probs = predictions[BIRDS].sum().reset_index()
class_probs.columns = ['ebird', 'p']
class_probs = class_probs.sort_values(by='p')
top_ebird = class_probs.ebird.values[-1]
assert top_ebird == test_bird
def test_map():
html = geo_plot('norcar', 10, 10)
with open('./temp/test_map.html', 'w') as f:
f.write(html)
| 27.269231 | 100 | 0.71086 |
95abecff3908d6331f655cf91a24b321277dc4f4 | 12,306 | py | Python | For_Cluster/letshpc_folder_backtracking_2/main_script_without_perf.py | yatin2410/HPC_N_QUEENS | df629ac4ebc678815953370c8ae97c6d276819ff | [
"MIT"
] | 2 | 2019-05-10T09:09:07.000Z | 2022-02-07T05:46:57.000Z | For_Cluster/letshpc_folder_bitmasking/main_script_without_perf.py | yatin2410/HPC_N_QUEENS | df629ac4ebc678815953370c8ae97c6d276819ff | [
"MIT"
] | null | null | null | For_Cluster/letshpc_folder_bitmasking/main_script_without_perf.py | yatin2410/HPC_N_QUEENS | df629ac4ebc678815953370c8ae97c6d276819ff | [
"MIT"
] | null | null | null | #!/bin/python
import subprocess
import os
import sys
import maps
import time
import logging
def line(n):
print('-'*n)
logging.basicConfig(filename = "LetsHPC_Team_CodeRunner.log", level = logging.INFO)
logger = logging.getLogger(__name__)
########################################################################################################
USAGE = """
Usage:
run.py problem_name approach_name serial_executable parallel_executable runs log_directory output_directory input_directory base_directory
'problem_name' is the name of the problem assigned to you.
'approach_name' is the name of the appraoch assigned to you.
'serial_executable' must be the name of the compiled executable file for the serial code.
'parallel_executable' must be the name of the compiled executable file for the parallel code.
'runs' is the number of times to run the codes. Run at least thrice and ideally 10 times.
'log_directory' is the directory where you want to store the log files
'output_directory' is the directory where you want to store the output files
'input_directory' is the directory where you take the input from
"""
def foobar(l):
if len(l) < 10:
print USAGE
return
problem_name = l[1]
approach_name = l[2]
serial_executable = l[3]
parallel_executable = l[4]
runs = int(l[5])
compiler_to_use = l[-1]
logger.info("-"*80)
logger.info("Problem Name : %s" % (problem_name))
logger.info("Approach Name : %s" % (approach_name))
logger.info("Serial Executable : %s" % (serial_executable))
logger.info("Parallel Executable : %s" % (parallel_executable))
logger.info("Number of runs : %s" % (str(runs)))
if problem_name not in maps.problem_list:
print problem_name, 'not in', maps.problem_list
logger.error("%s not in problem list" % (problem_name))
exit(0)
if approach_name not in maps.approaches[problem_name]:
print approach_name, 'not a valid approach for', problem_name
print 'Choose from:'
print maps.approaches[problem_name]
logger.error("%s is not a valid approach" % (approach_name))
exit(0)
log_directory = l[6]
output_directory = l[7]
input_directory = l[8]
line(80)
logger.info("Log Directory : %s" %(log_directory))
logger.info("Output Directory : %s" % (output_directory))
logger.info("Input Directory : %s" % (input_directory))
print 'Assuming that input has been created for:', problem_name
subprocess.call('lscpu > '
+ log_directory
+ "lscpu.txt", shell=True)
subprocess.call('cat /proc/cpuinfo > '
+ log_directory
+ "cpuinfo.txt", shell=True)
for run in range(runs):
os.chdir(l[9])
print 'Run:', str(run+1)
print('Running Serial')
logger.info("Started running the serial code for run_id = %d" %(run))
for n in maps.problem_size[problem_name]:
print('Problem Size:', n)
input_file = input_directory+problem_name+'_'+str(n)+'_input.txt'
if compiler_to_use == 'openmp':
logger.info("Running the Command : " + serial_executable
+ " " + str(n)
+ " " + str(0) # p=0 for serial code.
+ " " + input_file
+ " >> " + log_directory
+ problem_name + "_" + approach_name
+ ".logs")
subprocess.call(serial_executable
+ " " + str(n)
+ " " + str(0) # p=0 for serial code.
+ " " + input_file
+ " >> " + log_directory
+ problem_name + "_" + approach_name
+ ".logs",
shell=True)
elif compiler_to_use == 'mpi':
logger.info("Running the Command : mpirun -np 1 " + serial_executable
+ " " + str(n)
+ " " + str(0) # p=0 for serial code.
+ " " + input_file
+ " >> " + log_directory
+ problem_name + "_" + approach_name
+ ".logs")
subprocess.call("mpirun -np 1 " + serial_executable
+ " " + str(n)
+ " " + str(0) # p=0 for serial code.
+ " " + input_file
+ " >> " + log_directory
+ problem_name + "_" + approach_name
+ ".logs",
shell=True)
line(80)
print('Running Parallel')
for p in maps.processor_range:
print('Number of Processors:', p)
logger.info("Running the parallel code with %d processors" % (p))
for n in maps.problem_size[problem_name]:
os.chdir(l[9])
input_file = input_directory+problem_name+'_'+str(n)+'_input.txt'
print('Problem Size:', n)
if compiler_to_use == 'openmp':
logger.info("Running the Command : " + parallel_executable
+ " " + str(n)
+ " " + str(p)
+ " " + input_file
+ " >> " + log_directory
+ problem_name + "_" + approach_name
+ ".logs")
subprocess.call(parallel_executable
+ " " + str(n)
+ " " + str(p)
+ " " + input_file
+ " >> " + log_directory
+ problem_name + "_" + approach_name
+ ".logs",
shell=True)
elif compiler_to_use == 'mpi':
logger.info("Running the Command : mpirun -np " + str(p) + " " + parallel_executable
+ " " + str(n)
+ " " + str(p)
+ " " + input_file
+ " >> " + log_directory
+ problem_name + "_" + approach_name
+ ".logs")
subprocess.call("mpirun -np " + str(p) + " " + parallel_executable
+ " " + str(n)
+ " " + str(p)
+ " " + input_file
+ " >> " + log_directory
+ problem_name + "_" + approach_name
+ ".logs",
shell=True)
line(80)
print(os.getcwd())
#######################################################################
base = os.getcwd()
all_files = os.listdir(base)
inp = None
while True:
if 'codes_run_file' in all_files:
inp = raw_input("Do you want to reuse the results of previous run? (y/n): ").lower()
if inp == 'y':
break
elif inp == 'n':
os.remove(base + '/codes_run_file')
break
else:
print "Invalid input. Try again."
else:
break
while True:
compiler_to_use = raw_input("Which parallel framework would you be using? (openmp/mpi): ").lower()
if compiler_to_use == 'mpi' or compiler_to_use == 'openmp':
break
else:
print("Incorrect input. Try again.")
while True:
try:
runs = int(raw_input("Enter the number of times you want the code to run (recommended: at least 10 runs): "))
if runs <= 0: # if not a positive int print message and ask for input again
print("Input must be a positive integer, try again!")
continue
except ValueError as ve:
print("That's not an int! Try again!")
continue
else:
print('the number of runs is ' + str(runs))
break
all_inputs = os.getcwd() + '/all_input/'
base = os.getcwd() + '/all_codes/'
starting_point = os.getcwd()
all_codes = os.listdir(base)
count = 0
try:
os.remove(base + "progress.txt")
except Exception as e:
print "File already deleted"
print(all_codes)
code_to_run = None
codes_already_run = None
try:
uber = open(os.getcwd() + "/codes_run_file", "r")
codes_already_run = uber.readlines()
uber.close()
except Exception as e:
command = "touch %s" % (starting_point + "/codes_run_file")
subprocess.call(command, shell = True)
if codes_already_run is None:
code_to_run = all_codes[0]
else:
for each in all_codes:
if each+"\n" not in codes_already_run:
code_to_run = each
break
print "The following code will be run now", code_to_run
if code_to_run is None:
print "All the codes have already been executed."# + " You can run the collect data script now"
sys.exit(1)
for each_code in [code_to_run]:
if each_code == "progress.txt" or "log" in each_code:
continue
subprocess.call("rm -rf "
+ base + each_code + "/output"
, shell=True)
subprocess.call("rm -rf "
+ base + each_code + "/logs"
, shell=True)
division = each_code.split("-")
problem = division[2]
approach = division[3]
print "-"*80
print problem, approach
all_files = os.listdir(base+each_code+"/")
serial = None
parallel = None
for each_file in all_files:
if 'clean' not in each_file.lower() and 'logs'!=each_file.lower() and 'output'!=each_file.lower():
if 'par' not in each_file.lower() and each_file!="ser":
serial = each_file
elif 'parallel' in each_file.lower():
parallel = each_file
if compiler_to_use == 'mpi':
compiler = "mpicc "
elif compiler_to_use == 'openmp':
compiler = "gcc "
if ".cpp" in parallel:
if compiler_to_use == "mpi":
compiler = "mpiCC "
elif compiler_to_use == "openmp":
compiler = "g++ "
print serial, parallel
if 'logs' not in all_files:
os.mkdir(base + each_code + "/logs")
os.mkdir(base + each_code + "/output")
if compiler_to_use == 'openmp':
subprocess.call(compiler
+ base + each_code + "/" + parallel
+ " -fopenmp -lm -w -o "
+ base + each_code + "/parr", shell=True)
subprocess.call(compiler
+ base + each_code + "/" + serial
+ " -fopenmp -lm -w -o "
+ base + each_code + "/ser", shell=True)
elif compiler_to_use == 'mpi':
subprocess.call(compiler
+ base + each_code + "/" + parallel
+ " -lm -w -o "
+ base + each_code + "/parr", shell=True)
subprocess.call(compiler
+ base + each_code + "/" + serial
+ " -lm -w -o "
+ base + each_code + "/ser", shell=True)
print serial,parallel
#raw_input()
foobar(['run.py', problem, approach, base + each_code + "/ser", base + each_code + "/parr", int(runs), base + each_code + "/logs/", \
base + each_code + "/output/", all_inputs, base + each_code + "/", compiler_to_use])
f = open(base + "progress.txt", "a")
f.write(str(time.time()) + " " + str(count) + " " + str(each_code)+"\n")
f.close()
count +=1
print "Reached Here:", code_to_run, type(code_to_run)
w2f = open(starting_point + "/codes_run_file", "a")
string_to_write = code_to_run + "\n"
w2f.write(string_to_write)
w2f.close()
print "Written To file"
| 34.664789 | 138 | 0.491549 |
95ae2e3a04b5bb9553c2d275221aaaba3d17f40e | 1,236 | py | Python | 0205.Isomorphic Strings/solution.py | zhlinh/leetcode | 6dfa0a4df9ec07b2c746a13c8257780880ea04af | [
"Apache-2.0"
] | null | null | null | 0205.Isomorphic Strings/solution.py | zhlinh/leetcode | 6dfa0a4df9ec07b2c746a13c8257780880ea04af | [
"Apache-2.0"
] | null | null | null | 0205.Isomorphic Strings/solution.py | zhlinh/leetcode | 6dfa0a4df9ec07b2c746a13c8257780880ea04af | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
*****************************************
Author: zhlinh
Email: zhlinhng@gmail.com
Version: 0.0.1
Created Time: 2016-03-24
Last_modify: 2016-03-24
******************************************
'''
'''
Given two strings s and t, determine if they are isomorphic.
Two strings are isomorphic if the characters in s can be replaced to get t.
All occurrences of a character must be replaced with another character
while preserving the order of characters.
No two characters may map to the same character
but a character may map to itself.
For example,
Given "egg", "add", return true.
Given "foo", "bar", return false.
Given "paper", "title", return true.
Note:
You may assume both s and t have the same length.
'''
class Solution(object):
def isIsomorphic(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
if len(s) != len(t):
return False
m1 = [0] * 256
m2 = [0] * 256
for i in range(len(s)):
if m1[ord(s[i])] != m2[ord(t[i])]:
return False
m1[ord(s[i])] = i + 1
m2[ord(t[i])] = i + 1
return True
| 24.235294 | 75 | 0.536408 |
95b11eb96aa3d734016e0fceb804be347a3066c5 | 1,860 | py | Python | testModules/migration.py | mannamman/newsCrawl | 8779c1ee06ef51d2affbd9b8a80e688c6ed056e7 | [
"MIT"
] | null | null | null | testModules/migration.py | mannamman/newsCrawl | 8779c1ee06ef51d2affbd9b8a80e688c6ed056e7 | [
"MIT"
] | 14 | 2021-12-20T03:44:08.000Z | 2022-02-24T06:04:06.000Z | testModules/migration.py | mannamman/newsCrawl | 8779c1ee06ef51d2affbd9b8a80e688c6ed056e7 | [
"MIT"
] | null | null | null | import pymongo
## local 테스트 ##
from dotenv import load_dotenv
import os
import pytz
import datetime
import itertools
from uuid import uuid4
from collections import defaultdict
# ObjectId로 쿼리할때 필요
from bson.objectid import ObjectId
"""
RDBMS Mongo DB
Database Database
Table Collection
Row Document
Index Index
DB server Mongod
DB client mongo
"""
class BaseWorker:
def __init__(self):
## local 테스트 ##
dot_env_path = os.path.dirname(os.path.abspath(__file__))
load_dotenv(dotenv_path=f"{dot_env_path}/../cred/.mongopasswd",verbose=True)
ip = os.getenv("ip")
port = os.getenv("port")
user = os.getenv("user")
passwd = os.getenv("passwd")
self.client = pymongo.MongoClient(f"mongodb://{user}:{passwd}@{ip}:{port}/")
class newWorker(BaseWorker):
def __init__(self):
super().__init__()
self.db = self.client["stock"]
self.collection = self.db["en"]
def insert_regacy(self, results, subject):
for result in results:
result["subject"] = subject
self.collection.insert_one(result)
def migration_for_mistyping(self):
self.collection.update_many({ "subejct": { "$exists": True } }, { "$rename": { 'subejct': 'subject'} })
class regacyWorker(BaseWorker):
def __init__(self, dbname: str, collection: str):
super().__init__()
self.db = self.client[dbname]
self.collection = self.db[collection]
def get_all_data(self, subejct):
res = self.collection.find()
filterd_res = list()
for r in res:
if("url" in r["sentiment"][0]):
filterd_res.append(r)
print(subject, len(filterd_res))
return filterd_res
if(__name__ == "__main__"):
new_db_worker = newWorker()
new_db_worker.migration_for_mistyping() | 28.181818 | 111 | 0.638172 |
95b233e62bad224b765ef9f8b1c2e67cce2b24ad | 1,659 | py | Python | YOLOv2.py | scain40/OpenCVCVImageComparisson | 368d901233111606fb2f0ecbce4447dd9c149fd0 | [
"MIT"
] | null | null | null | YOLOv2.py | scain40/OpenCVCVImageComparisson | 368d901233111606fb2f0ecbce4447dd9c149fd0 | [
"MIT"
] | null | null | null | YOLOv2.py | scain40/OpenCVCVImageComparisson | 368d901233111606fb2f0ecbce4447dd9c149fd0 | [
"MIT"
] | null | null | null | import numpy as np
import cv2 as cv
import os
import sys
class ObjectDetector:
"""
Object Detector is the class model for using YOLOv2 and gathering results
"""
def __init__(self):
self.network_loading()
def network_loading(self):
# Loading in the trained darknet models labels
self.LABELS = open(os.getcwd() + "\coco.names").read().strip().split("\n")
self.readingNetwork = cv.dnn.readNetFromDarknet(os.getcwd() + "\yolov3.cfg", os.getcwd() + "\yolov3.weights")
def read_image(self, image_name):
# Reading in a specific image from the files that exist in the input folder
working_image = cv.imread(image_name)
self.labelNames = self.readingNetwork.getLayerNames()
self.labelNames = [self.labelNames[i[0] - 1] for i in self.readingNetwork.getUnconnectedOutLayers()]
imageInputBlob = cv.dnn.blobFromImage(working_image, 1 / 255.0, (416, 416), swapRB=True, crop=False)
self.readingNetwork.setInput(imageInputBlob)
layerOutputs = self.readingNetwork.forward(self.labelNames)
return self.processReading(layerOutputs)
def processReading(self, processingResults):
# Takes in the results from a reading and proceses them to check for valid objects
classIDs = []
for objects in processingResults:
# loop over each of the detections
for detection in objects:
scores = detection[5:]
classID = np.argmax(scores)
confidence = scores[classID]
if confidence > 0.9:
# Appending the names of all the objects to be sorted later
classIDs.append(self.LABELS[classID])
# Just returning class names as it's only thing relevant to OSR later
return classIDs
| 36.866667 | 112 | 0.722122 |
95b3747c398cbe76bc2e8c76655c81e2a5cd82bc | 115 | py | Python | closuredag/apps.py | farmlab/django-closuredag | 19bacabea5e922613a18d21048866dceb44d0afe | [
"MIT"
] | null | null | null | closuredag/apps.py | farmlab/django-closuredag | 19bacabea5e922613a18d21048866dceb44d0afe | [
"MIT"
] | 93 | 2017-11-16T13:58:45.000Z | 2022-03-27T22:01:19.000Z | closuredag/apps.py | farmlab/django-closuredag | 19bacabea5e922613a18d21048866dceb44d0afe | [
"MIT"
] | null | null | null | # -*- coding: utf-8
from django.apps import AppConfig
class ClosuredagConfig(AppConfig):
name = 'closuredag'
| 16.428571 | 34 | 0.721739 |
95b3dfb14ba48f34faa00abbd1780bd7ac43862d | 499 | py | Python | experiments/reversed_string_stack.py | shruti-bt/data-structure-python | 0729f486f516ce05acdd92b28b108f43b67f656f | [
"MIT"
] | 1 | 2022-01-10T17:17:35.000Z | 2022-01-10T17:17:35.000Z | experiments/reversed_string_stack.py | shruti-bt/data-structure-python | 0729f486f516ce05acdd92b28b108f43b67f656f | [
"MIT"
] | null | null | null | experiments/reversed_string_stack.py | shruti-bt/data-structure-python | 0729f486f516ce05acdd92b28b108f43b67f656f | [
"MIT"
] | null | null | null | class Stack():
def __init__(self):
self.stack = []
def push(self, value):
self.stack.append(value)
def pop(self):
return self.stack.pop()
def __len__(self):
return len(self.stack)
def print(self):
return print(self.stack)
if __name__ == '__main__':
str_ = input()
stack = Stack()
for i in str_:
stack.push(i)
for j in range(len(stack)):
print(stack.pop(), end='')
print()
| 19.192308 | 35 | 0.519038 |
95b40e4094e935db9b4e39bc3de9c67b55114bbe | 484 | py | Python | app/run.py | dudikbender/geocoder | af8c0839d3d73c7825a0488763d053b5e6bc8257 | [
"Unlicense"
] | null | null | null | app/run.py | dudikbender/geocoder | af8c0839d3d73c7825a0488763d053b5e6bc8257 | [
"Unlicense"
] | null | null | null | app/run.py | dudikbender/geocoder | af8c0839d3d73c7825a0488763d053b5e6bc8257 | [
"Unlicense"
] | null | null | null | from utils.db import connection, print_version
import pandas as pd
def add_table(csv_file, table_name, engine):
df = pd.read_csv(csv_file)
df = df.drop('Unnamed: 0')
df.to_sql(name=table_name, con=engine, index=False, if_exists='replace')
table = 'data/tables/postcode_coordinates.csv'
add_table(table, 'Postcode_coordinates', connection)
cur = connection.cursor()
cur.execute('''SELECT *
FROM Postcode_coordinates''')
data = cur.fetchmany(5)
print(data) | 25.473684 | 76 | 0.727273 |
95b525d705b0f34eba83af30d5fc61bd4affc2f0 | 48 | pyw | Python | seemee.pyw | gaming32/SeeMee | a99655efdd9e1aea218474bcdbd1370954a366d2 | [
"MIT"
] | null | null | null | seemee.pyw | gaming32/SeeMee | a99655efdd9e1aea218474bcdbd1370954a366d2 | [
"MIT"
] | null | null | null | seemee.pyw | gaming32/SeeMee | a99655efdd9e1aea218474bcdbd1370954a366d2 | [
"MIT"
] | null | null | null | import runpy
runpy._run_module_as_main('SeeMee') | 24 | 35 | 0.854167 |
95b591115eff8da9eaed281f3f62bddae8faefca | 755 | py | Python | model/param_const.py | tototo617/Biomodel-Raia2011 | a06d531e3d9f18ddee1d85a19d8c57363be3da8e | [
"MIT"
] | null | null | null | model/param_const.py | tototo617/Biomodel-Raia2011 | a06d531e3d9f18ddee1d85a19d8c57363be3da8e | [
"MIT"
] | null | null | null | model/param_const.py | tototo617/Biomodel-Raia2011 | a06d531e3d9f18ddee1d85a19d8c57363be3da8e | [
"MIT"
] | null | null | null | from .name2idx import parameters as C
def f_params():
x = [0]*C.len_f_params
x[C.Kon_IL13Rec] = 0.00341992
x[C.Rec_phosphorylation] = 999.631
x[C.pRec_intern] = 0.15254
x[C.pRec_degradation] = 0.172928
x[C.Rec_intern] = 0.103346
x[C.Rec_recycle] = 0.00135598
x[C.JAK2_phosphorylation] = 0.157057
x[C.pJAK2_dephosphorylation] = 6.21906E-4
x[C.STAT5_phosphorylation] = 0.0382596
x[C.pSTAT5_dephosphorylation] = 3.43392E-4
x[C.SOCS3mRNA_production] = 0.00215826
x[C.DecoyR_binding] = 1.24391E-4
x[C.JAK2_p_inhibition] = 0.0168268
x[C.SOCS3_translation] = 11.9086
x[C.SOCS3_accumulation] = 3.70803
x[C.SOCS3_degradation] = 0.0429186
x[C.CD274mRNA_production] = 8.21752E-5
return x | 31.458333 | 46 | 0.682119 |
95b6aab732ea16915f09231a8049e60f6f242ea6 | 593 | py | Python | flaskr/commands.py | aicioara-old/flask_tutorial2 | acb5c6fa2743f2f060ad6a3a26cc7eef56b6490b | [
"MIT"
] | null | null | null | flaskr/commands.py | aicioara-old/flask_tutorial2 | acb5c6fa2743f2f060ad6a3a26cc7eef56b6490b | [
"MIT"
] | null | null | null | flaskr/commands.py | aicioara-old/flask_tutorial2 | acb5c6fa2743f2f060ad6a3a26cc7eef56b6490b | [
"MIT"
] | null | null | null | import os
import datetime
import click
from flask.cli import with_appcontext
from werkzeug.security import generate_password_hash
def init_app(app):
app.cli.add_command(init_db_command)
@click.command('init-db')
@with_appcontext
def init_db_command():
"""Clear the existing data and create new tables."""
init_db()
click.echo('Initialized the database.')
def init_db():
from . import models
models.db.create_all()
user = models.User(username='admin', password=generate_password_hash('admin'))
models.db.session.add(user)
models.db.session.commit()
| 20.448276 | 82 | 0.735245 |
95b6e78900559f4f960f26e452c446bb79f637e4 | 191 | py | Python | intel_bot_sentenca_rj_civel/test.py | slarda/Web-Scrapping-Bots-For-Crawling-Docs | aa8ce3c72bfbe2111d16655ffc3a6759a825946e | [
"Apache-2.0"
] | 1 | 2020-12-17T11:21:01.000Z | 2020-12-17T11:21:01.000Z | intel_bot_sentenca_rj_civel/test.py | soft-super/Web-Scrapping-Bots-For-Crawling-Docs | aa8ce3c72bfbe2111d16655ffc3a6759a825946e | [
"Apache-2.0"
] | 5 | 2021-03-19T01:48:07.000Z | 2021-06-09T18:26:31.000Z | intel_bot_sentenca_rj_civel/test.py | tiny-1996/Web-Scrapping-Bots-For-Crawling-Docs | aa8ce3c72bfbe2111d16655ffc3a6759a825946e | [
"Apache-2.0"
] | null | null | null | with open('./logs/test.log', 'r') as f1:
data = f1.readlines()
formatted = [x.replace('.pdf', '') for x in data]
with open('./logs/test2.log', 'r') as f1:
f1.writelines(formatted)
| 21.222222 | 49 | 0.602094 |
95b771302ac3436f68366f36390ccc4ddba021fd | 2,206 | py | Python | validator_rewards/validator_rewards.py | harmony-one/monitor-ops | 0a379655ff26bff5821cd7cb6f619a15a308441b | [
"MIT"
] | 1 | 2020-04-11T16:46:56.000Z | 2020-04-11T16:46:56.000Z | validator_rewards/validator_rewards.py | harmony-one/monitor-ops | 0a379655ff26bff5821cd7cb6f619a15a308441b | [
"MIT"
] | 3 | 2020-04-13T10:42:59.000Z | 2020-07-10T06:26:23.000Z | validator_rewards/validator_rewards.py | harmony-one/monitor-ops | 0a379655ff26bff5821cd7cb6f619a15a308441b | [
"MIT"
] | 2 | 2020-04-22T10:36:25.000Z | 2020-05-20T15:58:02.000Z | import argparse
import json
from pyhmy import (
get_all_validator_addresses,
get_validator_information
)
def get_block_by_num(block_num, endpoint):
params = [
str(hex(block_num)),
False,
]
payload = {
"id": "1",
"jsonrpc": "2.0",
"method": "hmy_getBlockByNumber",
"params": params
}
headers = {
'Content-Type': 'application/json'
}
timeout = 5
try:
resp = requests.request('POST', endpoint, headers=headers, data=json.dumps(payload),
timeout=timeout, allow_redirects=True)
return json.loads(resp.content)
except Exception as e:
v_print(f'{e.__class__}: {e}')
return None
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--start", required=True, type=int, help="First block")
parser.add_argument("--end", required=True, type=int, help="Last block")
parser.add_argument("--endpoint", default="http://localhost:9500", help="Endpoint to query")
parser.add_argument("--verbose", action='store_true', help="Verbose print for debug")
args = parser.parse_args()
if args.verbose:
def v_print(*args, **kwargs):
print(*args, **kwargs)
else:
def v_print(*args, **kwargs):
return
block_timestamps = []
block_tx = []
block_stx = []
for block_num in range(args.start, args.end):
v_print(f'Block {block_num}/{args.end}', end="\r")
reply = get_block_by_num(block_num, args.endpoint)
try:
block_timestamps.append(int(reply['result']['timestamp'], 0))
block_tx.append(len(reply['result']['transactions']))
block_stx.append(len(reply['result']['stakingTransactions']))
except Exception as e:
v_print(f'{e.__class__}: {e}')
pass
block_times = [y - x for x, y in zip(block_timestamps, block_timestamps[1:])]
avg = sum(block_times) / len(block_times)
print(f'Average Block Time: {avg}')
unique_times = Counter(block_times)
print(f'Unique block times: {unique_times.most_common()}')
# offset = [0].extend(block_times)
| 31.514286 | 96 | 0.609248 |
95b980c29bfb10b077998e38727075e9d4e823a6 | 2,271 | py | Python | day4/day4.py | UncleTed/adventOfCode2020 | 382560f7aee89f6b04b2ee60882d3801425ea46c | [
"MIT"
] | null | null | null | day4/day4.py | UncleTed/adventOfCode2020 | 382560f7aee89f6b04b2ee60882d3801425ea46c | [
"MIT"
] | null | null | null | day4/day4.py | UncleTed/adventOfCode2020 | 382560f7aee89f6b04b2ee60882d3801425ea46c | [
"MIT"
] | null | null | null | import re
valid = ['hcl', 'iyr', 'pid', 'ecl', 'hgt','eyr', 'byr' ]
def check_passport_part1(buffer):
passport = map(lambda x: x.split(':')[0], buffer.split(' '))
for v in valid:
if v not in passport:
return False
return True
def split_passport(buffer):
dictionary = {}
for fields in buffer.split(' '):
if fields != '':
f = fields.split(':')
dictionary[f[0]] = f[1]
return dictionary
def check_value(field, value):
if field == 'byr':
return int(value) >= 1920 and int(value) <= 2002
if field == 'iyr':
return int(value) >= 2010 and int(value) <= 2020
if field == 'eyr':
return int(value) >= 2020 and int(value) <= 2030
if field == 'hgt':
if 'cm' in value:
h = value[:value.index('cm')]
return int(h) >= 150 and int(h) <= 193
if 'in' in value:
h = value[:value.index('in')]
return int(h) >= 59 and int(h) <= 76
return False
if field == 'hcl':
return re.match("#[0-9a-f]{6}", value) != None
if field == 'ecl':
colors = ['amb', 'blu', 'brn', 'gry', 'grn', 'hzl', 'oth']
return colors.count(value) == 1
if field == 'pid':
return re.match("\d{9}", value) != None
def check_passport_part2(passport):
for v in valid:
if not passport.has_key(v):
return False
if not check_value(v, passport.get(v)):
return False
return True
def part2():
total = 0
with open("input.txt", "r") as f:
buffer = ''
for line in f:
if line != '\n':
buffer = line.rstrip() + ' ' + buffer
else:
passport = split_passport(buffer)
if check_passport_part2(passport):
total = total + 1
buffer = ''
print (total)
def part1():
total = 0
with open("input.txt", "r") as f:
buffer = ''
for line in f:
if line != '\n':
buffer = line.rstrip() + ' ' + buffer
else:
if check_passport_part1(buffer):
total = total + 1
buffer = ''
print (total)
#part1()
part2() | 27.695122 | 66 | 0.483928 |
95bb338ca37179ca6d20e80795bb6cc5417559db | 535 | py | Python | app/shared/models.py | prapeller/blackemployer_api | ae9232773e6e164b22ffccf0b39dd9a4c2a036cf | [
"MIT"
] | null | null | null | app/shared/models.py | prapeller/blackemployer_api | ae9232773e6e164b22ffccf0b39dd9a4c2a036cf | [
"MIT"
] | null | null | null | app/shared/models.py | prapeller/blackemployer_api | ae9232773e6e164b22ffccf0b39dd9a4c2a036cf | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth import get_user_model
from django.contrib.postgres.fields import ArrayField
from utils.model_utils import default_1d_array_of_strings
class SeoModel(models.Model):
seo_title = models.CharField("SEO title", max_length=100, blank=True, null=True)
seo_description = models.TextField("SEO description", max_length=400, blank=True, null=True)
seo_keywords = models.CharField("SEO keywords", max_length=200, blank=True, null=True)
class Meta:
abstract = True
| 35.666667 | 96 | 0.773832 |
95bbb3583a2750d5735e9244fe93a6a446fb803f | 8,314 | py | Python | dataset/data_load.py | clovaai/symmetrical-synthesis | 207953b1ae3d2e0a96fb676db3669bdc88cc18e8 | [
"MIT"
] | 76 | 2020-02-08T03:15:54.000Z | 2022-03-04T16:14:52.000Z | dataset/data_load.py | clovaai/symmetrical-synthesis | 207953b1ae3d2e0a96fb676db3669bdc88cc18e8 | [
"MIT"
] | 5 | 2020-02-07T14:00:58.000Z | 2021-05-31T01:37:55.000Z | dataset/data_load.py | clovaai/symmetrical-synthesis | 207953b1ae3d2e0a96fb676db3669bdc88cc18e8 | [
"MIT"
] | 13 | 2020-02-10T02:56:51.000Z | 2021-05-28T06:56:30.000Z | '''
symmetrical-synthesis
Copyright (c) 2020-present NAVER Corp.
MIT license
'''
import os
import time
import glob
import cv2
import random
import numpy as np
import tensorflow as tf
import random
try:
import data_util
except ImportError:
from dataset import data_util
tf.app.flags.DEFINE_boolean('random_resize', False, 'True or False')
tf.app.flags.DEFINE_boolean('past_dataset', False, 'True or False')
tf.app.flags.DEFINE_string('google_path', None, '')
tf.app.flags.DEFINE_integer('min_train3', 2, '')
tf.app.flags.DEFINE_string('match_info', None, '')
tf.app.flags.DEFINE_float('match_prob', 0.0, '')
tf.app.flags.DEFINE_boolean('mnist_mode', False, '')
FLAGS = tf.app.flags.FLAGS
'''
image_path = '/where/your/images/*.jpg'
'''
def load_image(im_fn, input_size=224):
org_image = cv2.imread(im_fn, cv2.IMREAD_IGNORE_ORIENTATION | cv2.IMREAD_COLOR)[:,:,::-1] # rgb converted
'''
if FLAGS.random_resize:
resize_table = [0.5, 1.0, 1.5, 2.0]
selected_scale = np.random.choice(resize_table, 1)[0]
shrinked_hr_size = int(hr_size / selected_scale)
h, w, _ = high_image.shape
if h <= shrinked_hr_size or w <= shrinked_hr_size:
high_image = cv2.resize(high_image, (hr_size, hr_size))
else:
h_edge = h - shrinked_hr_size
w_edge = w - shrinked_hr_size
h_start = np.random.randint(low=0, high=h_edge, size=1)[0]
w_start = np.random.randint(low=0, high=w_edge, size=1)[0]
high_image_crop = high_image[h_start:h_start+hr_size, w_start:w_start+hr_size, :]
high_image = cv2.resize(high_image_crop, (hr_size, hr_size))
'''
h, w, _ = org_image.shape
min_len = np.min([h, w])
# center crop margin, we follow the method, which was introduced in DELF paper.
if FLAGS.mnist_mode:
crop_image = org_image.copy()
else:
try:
cc_margin = np.random.randint(low=1, high=int(min_len * 0.05), size=1)[0]
crop_image = org_image[cc_margin:-cc_margin, cc_margin:-cc_margin, :].copy()
except:
crop_image = org_image.copy()
new_input_size = int(input_size * 1.125)
crop_image = cv2.resize(crop_image, (new_input_size, new_input_size), interpolation=cv2.INTER_AREA)
# random crop range
h_edge = new_input_size - input_size#32#256 - input_size # input_size is 224
w_edge = new_input_size - input_size#256 - input_size
h_start = np.random.randint(low=0, high=h_edge, size=1)[0]
w_start = np.random.randint(low=0, high=w_edge, size=1)[0]
return_image = crop_image[h_start:h_start+input_size, w_start:w_start+input_size,:]
# flip lr
if random.randint(0, 1):
return_image = return_image[:,::-1,:]
#print('return', return_image.shape)
return return_image #high_image, low_image
def get_images_dict(image_folder):
'''
image_folder = '/data/IR/DB/sid_images'
folder structure
sid_images - sid0 - image00.png, image01.png, ...
- sid1 - ...
- sid2 - ...
'''
if FLAGS.match_info is not None:
match_dict = {}
f_match = open(FLAGS.match_info, 'r')
match_lines = f_match.readlines()
cnt = 0
for match_line in match_lines:
ver1_cls, ver2_cls, prob = match_line.split()
prob = float(prob)
if prob >= FLAGS.match_prob:
match_dict[ver2_cls] = 1
possible_image_type = ['jpg', 'JPG', 'png', 'JPEG', 'jpeg']
sid_list = glob.glob(os.path.join(image_folder, '*'))
images_dict = {}
images_list = []
images_cnt = 0
sid_idx = 0
for sid_folder in sid_list:
ext_folder = sid_folder
#ext_folder = os.path.join(sid_folder, 'exterior')
images_path = [image_path for image_paths in [glob.glob(os.path.join(ext_folder, '*.%s' % ext)) for ext in possible_image_type] for image_path in image_paths]
n_instance = 2
if len(images_path) < n_instance:
continue
for image_path in images_path:
images_list.append([image_path, sid_idx])
images_dict[sid_idx] = images_path
images_cnt += len(images_path)
sid_idx += 1
#print(images_dict)
stat_db = {}
stat_db['num_sid'] = len(images_dict)
stat_db['images_cnt'] = images_cnt
return images_dict, stat_db, images_list
def get_record(image_folder, input_size, batch_size):
images_dict, stat_db, images_list = get_images_dict(image_folder)
print('place total sids: %d, total images: %d' % (stat_db['num_sid'], stat_db['images_cnt']))
if FLAGS.google_path is not None:
images_dict_google, stat_db_google, images_list_google = get_images_dict(FLAGS.google_path)
print('google total sids: %d, total images: %d' % (stat_db_google['num_sid'], stat_db_google['images_cnt']))
#time.sleep(3)
n_instance = 2
b_replace = False
real_batch_size = batch_size // n_instance
while True:
try:
gt_labels = np.random.choice(len(images_dict), real_batch_size, replace=b_replace)
anchor_images = []
pos_images = []
for n in range(n_instance - 1):
pos_images.append([])
for label in gt_labels:
tmp_image_list = images_dict[label]
image_index = np.random.choice(len(tmp_image_list), n_instance, replace=False)
anchor_image = load_image(tmp_image_list[image_index[0]], input_size)
anchor_images.append(anchor_image)
for n, ind in enumerate(image_index[1:]):
pos_image = load_image(tmp_image_list[ind], input_size)
pos_images[n].append(pos_image)
#print(len(gt_labels))
if n_instance == 2:
pos_images = pos_images[0]
elif n_instance == 1:
pos_images = pos_images
else:
pos_images = np.concatenate(pos_images, axis=0)
yield anchor_images, pos_images, gt_labels #im_fn, gt_label
except Exception as e:
print(e)
continue
def generator(image_folder, input_size=224, batch_size=32):
for anchor_images, pos_images, gt_labels in get_record(image_folder, input_size, batch_size):
yield anchor_images, pos_images, gt_labels
def get_generator(image_folder, **kwargs):
return generator(image_folder, **kwargs)
## image_path = '/where/is/your/images/'
def get_batch(image_path, num_workers, **kwargs):
try:
generator = get_generator(image_path, **kwargs)
enqueuer = data_util.GeneratorEnqueuer(generator, use_multiprocessing=True)
enqueuer.start(max_queue_size=24, workers=num_workers)
generator_ouptut = None
while True:
while enqueuer.is_running():
if not enqueuer.queue.empty():
generator_output = enqueuer.queue.get()
break
else:
time.sleep(0.001)
yield generator_output
generator_output = None
finally:
if enqueuer is not None:
enqueuer.stop()
if __name__ == '__main__':
image_path = '/data/IR/DB/data_refinement/place_exterior'
num_workers = 4
batch_size = 128
input_size = 224
data_generator = get_batch(image_path=image_path,
num_workers=num_workers,
batch_size=batch_size,
input_size=224)
_ = 0
while True:
_ += 1
#break
start_time = time.time()
data = next(data_generator)
anchor_images = np.asarray(data[0])
pos_images = np.asarray(data[1])
gts = np.asarray(data[2])
print('%d done!!! %f' % (_, time.time() - start_time), anchor_images.shape, pos_images.shape, gts.shape)
#for sub_idx, (loaded_image, gt) in enumerate(zip(loaded_images, gts)):
# save_path = '/data/IR/DB/naver_place/test/%03d_%03d_gt_%d_image.jpg' % (_, sub_idx, gt)
# cv2.imwrite(save_path, loaded_image[:,:,::-1])
| 35.228814 | 170 | 0.615227 |
95bc1cbdca2faf1169e04427ea20b03a36f4f201 | 1,678 | py | Python | python_parikshith21/Day39.py | 01coders/50-Days-Of-Code | 98928cf0e186ee295bc90a4da0aa9554e2918659 | [
"MIT"
] | null | null | null | python_parikshith21/Day39.py | 01coders/50-Days-Of-Code | 98928cf0e186ee295bc90a4da0aa9554e2918659 | [
"MIT"
] | null | null | null | python_parikshith21/Day39.py | 01coders/50-Days-Of-Code | 98928cf0e186ee295bc90a4da0aa9554e2918659 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sat Jun 17 20:55:53 2019
@author: Parikshith.H
"""
import sqlite3
conn=sqlite3.connect('music.sqlite')
cur=conn.cursor()
cur.execute('DROP TABLE IF EXISTS Tracks')
cur.execute('CREATE TABLE Tracks(title TEXT,plays INTEGER)')
cur.execute('''INSERT INTO Tracks(title,plays) VALUES ('Thunder2',100)''')
cur.execute('''INSERT INTO Tracks VALUES ('Thunder3',100)''')
cur.execute('INSERT INTO Tracks(title,plays) VALUES (?,?)',('Thunderstuck',200))
cur.execute('INSERT INTO Tracks(title,plays) VALUES (?,?)',('Dangerous',20))
cur.execute('INSERT INTO Tracks(title,plays) VALUES (?,?)',('Myway',150))
cur.execute('INSERT INTO Tracks(title,plays) VALUES (?,?)',('Newway',30))
cur.execute('SELECT * FROM Tracks')
for row in cur:
print(row)
print('****************************')
cur.execute('''UPDATE Tracks SET plays=50 WHERE title='Myway' ''')
cur.execute('SELECT * FROM Tracks')
for row in cur:
print(row)
print('****************************')
cur.execute('''DELETE FROM Tracks WHERE plays<100 ''')
cur.execute('SELECT * FROM Tracks')
for row in cur:
print(row)
cur.close()
conn.close()
# =============================================================================
# #output:
# ('Thunder2', 100)
# ('Thunder3', 100)
# ('Thunderstuck', 200)
# ('Dangerous', 20)
# ('Myway', 150)
# ('Newway', 30)
# ****************************
# ('Thunder2', 100)
# ('Thunder3', 100)
# ('Thunderstuck', 200)
# ('Dangerous', 20)
# ('Myway', 50)
# ('Newway', 30)
# ****************************
# ('Thunder2', 100)
# ('Thunder3', 100)
# ('Thunderstuck', 200)
# ============================================================================= | 28.440678 | 80 | 0.544696 |
95bd0c7bd55d7d49e38f428fd858ef62fbc90459 | 269 | py | Python | tests/ansible/lib/modules/custom_python_external_pkg.py | webcoast-dk/mitogen | a5fe4a9fac5561511b676fe61ed127b732be5b12 | [
"BSD-3-Clause"
] | 1,526 | 2017-09-15T18:49:40.000Z | 2021-01-17T16:04:12.000Z | tests/ansible/lib/modules/custom_python_external_pkg.py | webcoast-dk/mitogen | a5fe4a9fac5561511b676fe61ed127b732be5b12 | [
"BSD-3-Clause"
] | 682 | 2017-09-11T17:43:12.000Z | 2021-01-17T05:26:26.000Z | tests/ansible/lib/modules/custom_python_external_pkg.py | webcoast-dk/mitogen | a5fe4a9fac5561511b676fe61ed127b732be5b12 | [
"BSD-3-Clause"
] | 111 | 2017-09-15T23:21:37.000Z | 2021-01-01T14:45:35.000Z | #!/usr/bin/python
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.externalpkg import extmod
def main():
module = AnsibleModule(argument_spec={})
module.exit_json(extmod_path=extmod.path())
if __name__ == '__main__':
main()
| 22.416667 | 52 | 0.750929 |
95bd18d246cfb63e62a2a8d0384166889102ed92 | 1,869 | py | Python | mlangpy/metalanguages/EBNF.py | rium9/mlangpy | 75821306b15d72278220d2a1a403daa36f60cc4a | [
"MIT"
] | 1 | 2020-04-20T20:23:31.000Z | 2020-04-20T20:23:31.000Z | mlangpy/metalanguages/EBNF.py | rium9/mlangpy | 75821306b15d72278220d2a1a403daa36f60cc4a | [
"MIT"
] | null | null | null | mlangpy/metalanguages/EBNF.py | rium9/mlangpy | 75821306b15d72278220d2a1a403daa36f60cc4a | [
"MIT"
] | null | null | null | from ..grammar import *
from .Metalanguage import Metalanguage
class EBNFTerminal(Terminal):
def __init__(self, subject, left_bound='"', right_bound='"'):
super().__init__(subject, left_bound=left_bound, right_bound=right_bound)
class EBNFDefinitionList(DefinitionList):
def __init__(self, definitions, alternation='|'):
super().__init__(definitions, alternation=alternation)
class EBNFNonTerminal(NonTerminal):
def __init__(self, subject, left_bound='', right_bound=''):
super().__init__(subject, left_bound=left_bound, right_bound=right_bound)
class EBNFConcat(Sequence):
def __init__(self, terms, separator=', '):
super().__init__(terms, separator=separator)
class EBNFRule(Rule):
def __init__(self, left, right, production='=', terminator=';'):
super().__init__(left, right, production=production, terminator=terminator)
class EBNFRepetition(Repetition):
def __init__(self, subject, left_bound='{', right_bound='}'):
super().__init__(subject, left_bound=left_bound, right_bound=right_bound)
class EBNFSpecialSequence(Bracket):
def __init__(self, subject, left_bound='?', right_bound='?'):
super().__init__(subject, left_bound=left_bound, right_bound=right_bound)
class EBNF(Metalanguage):
def __init__(self, ruleset: Ruleset, normalise=False):
super().__init__(ruleset, syntax_dict={
# Core
Sequence: Concat,
DefinitionList: EBNFDefinitionList,
Rule: EBNFRule,
Terminal: EBNFTerminal,
NonTerminal: EBNFNonTerminal,
# Auxiliary
Optional: Optional,
Group: Group,
Repetition: EBNFRepetition,
# Additional
Except: Except
}, normalise=normalise) | 29.203125 | 83 | 0.652755 |
95bd8914d357d073cde74eb4ec195a84ebfe2b04 | 560 | py | Python | app/tests/test_db/test_jobs_crud.py | JvitorS23/jobboard_fastAPI | 5abcc69f19417ad99352c0434db96407e2d7da76 | [
"MIT"
] | 1 | 2021-10-01T16:40:33.000Z | 2021-10-01T16:40:33.000Z | app/tests/test_db/test_jobs_crud.py | JvitorS23/jobboard_fastAPI | 5abcc69f19417ad99352c0434db96407e2d7da76 | [
"MIT"
] | null | null | null | app/tests/test_db/test_jobs_crud.py | JvitorS23/jobboard_fastAPI | 5abcc69f19417ad99352c0434db96407e2d7da76 | [
"MIT"
] | null | null | null | from sqlalchemy.orm import Session
from db.crud.jobs import create_new_job, retrieve_job
from schemas.jobs import JobCreate
from tests.utils.users import create_random_owner
from tests.utils.jobs import create_sample_job
def test_retrieve_job_by_id(db_session: Session):
"""Test retrieving job from db"""
owner = create_random_owner(session=db_session)
job = create_sample_job(owner, db_session)
retrieved_job = retrieve_job(job_id=job.id, session=db_session)
assert retrieved_job.id == job.id
assert retrieved_job.title == job.title
| 37.333333 | 67 | 0.792857 |
95c0ec3bbf5dfcbc14218087f1c41fdd10c1b36f | 5,135 | py | Python | spacy/tests/website/test_home.py | moyogo/spacy | ddf5c5bb61864320189ebc70dac3bc10e4ecde82 | [
"MIT"
] | null | null | null | spacy/tests/website/test_home.py | moyogo/spacy | ddf5c5bb61864320189ebc70dac3bc10e4ecde82 | [
"MIT"
] | null | null | null | spacy/tests/website/test_home.py | moyogo/spacy | ddf5c5bb61864320189ebc70dac3bc10e4ecde82 | [
"MIT"
] | null | null | null | from __future__ import unicode_literals
import pytest
import spacy
import os
try:
xrange
except NameError:
xrange = range
@pytest.fixture()
def token(doc):
return doc[0]
@pytest.mark.models
def test_load_resources_and_process_text():
from spacy.en import English
nlp = English()
doc = nlp(u'Hello, world. Here are two sentences.')
@pytest.mark.models
def test_get_tokens_and_sentences(doc):
token = doc[0]
sentence = next(doc.sents)
assert token is sentence[0]
assert sentence.text == 'Hello, world.'
@pytest.mark.models
def test_use_integer_ids_for_any_strings(nlp, token):
hello_id = nlp.vocab.strings['Hello']
hello_str = nlp.vocab.strings[hello_id]
assert token.orth == hello_id == 3125
assert token.orth_ == hello_str == 'Hello'
def test_get_and_set_string_views_and_flags(nlp, token):
assert token.shape_ == 'Xxxxx'
for lexeme in nlp.vocab:
if lexeme.is_alpha:
lexeme.shape_ = 'W'
elif lexeme.is_digit:
lexeme.shape_ = 'D'
elif lexeme.is_punct:
lexeme.shape_ = 'P'
else:
lexeme.shape_ = 'M'
assert token.shape_ == 'W'
def test_export_to_numpy_arrays(nlp, doc):
from spacy.attrs import ORTH, LIKE_URL, IS_OOV
attr_ids = [ORTH, LIKE_URL, IS_OOV]
doc_array = doc.to_array(attr_ids)
assert doc_array.shape == (len(doc), len(attr_ids))
assert doc[0].orth == doc_array[0, 0]
assert doc[1].orth == doc_array[1, 0]
assert doc[0].like_url == doc_array[0, 1]
assert list(doc_array[:, 1]) == [t.like_url for t in doc]
@pytest.mark.models
def test_word_vectors(nlp):
doc = nlp("Apples and oranges are similar. Boots and hippos aren't.")
apples = doc[0]
oranges = doc[2]
boots = doc[6]
hippos = doc[8]
assert apples.similarity(oranges) > boots.similarity(hippos)
@pytest.mark.models
def test_part_of_speech_tags(nlp):
from spacy.parts_of_speech import ADV
def is_adverb(token):
return token.pos == spacy.parts_of_speech.ADV
# These are data-specific, so no constants are provided. You have to look
# up the IDs from the StringStore.
NNS = nlp.vocab.strings['NNS']
NNPS = nlp.vocab.strings['NNPS']
def is_plural_noun(token):
return token.tag == NNS or token.tag == NNPS
def print_coarse_pos(token):
print(token.pos_)
def print_fine_pos(token):
print(token.tag_)
@pytest.mark.models
def test_syntactic_dependencies():
def dependency_labels_to_root(token):
'''Walk up the syntactic tree, collecting the arc labels.'''
dep_labels = []
while token.head is not token:
dep_labels.append(token.dep)
token = token.head
return dep_labels
@pytest.mark.models
def test_named_entities():
def iter_products(docs):
for doc in docs:
for ent in doc.ents:
if ent.label_ == 'PRODUCT':
yield ent
def word_is_in_entity(word):
return word.ent_type != 0
def count_parent_verb_by_person(docs):
counts = defaultdict(defaultdict(int))
for doc in docs:
for ent in doc.ents:
if ent.label_ == 'PERSON' and ent.root.head.pos == VERB:
counts[ent.orth_][ent.root.head.lemma_] += 1
return counts
def test_calculate_inline_mark_up_on_original_string():
def put_spans_around_tokens(doc, get_classes):
'''Given some function to compute class names, put each token in a
span element, with the appropriate classes computed.
All whitespace is preserved, outside of the spans. (Yes, I know HTML
won't display it. But the point is no information is lost, so you can
calculate what you need, e.g. <br /> tags, <p> tags, etc.)
'''
output = []
template = '<span classes="{classes}">{word}</span>{space}'
for token in doc:
if token.is_space:
output.append(token.orth_)
else:
output.append(
template.format(
classes=' '.join(get_classes(token)),
word=token.orth_,
space=token.whitespace_))
string = ''.join(output)
string = string.replace('\n', '')
string = string.replace('\t', ' ')
return string
@pytest.mark.models
def test_efficient_binary_serialization(doc):
from spacy.tokens.doc import Doc
byte_string = doc.to_bytes()
open('moby_dick.bin', 'wb').write(byte_string)
nlp = spacy.en.English()
for byte_string in Doc.read_bytes(open('moby_dick.bin', 'rb')):
doc = Doc(nlp.vocab)
doc.from_bytes(byte_string)
@pytest.mark.models
def test_multithreading(nlp):
texts = [u'One document.', u'...', u'Lots of documents']
# .pipe streams input, and produces streaming output
iter_texts = (texts[i % 3] for i in xrange(100000000))
for i, doc in enumerate(nlp.pipe(iter_texts, batch_size=50, n_threads=4)):
assert doc.is_parsed
if i == 100:
break
| 28.370166 | 78 | 0.631353 |
95c1052429e03206d9d42e4ca673e5f48a3f3906 | 35,774 | py | Python | bridge_sim/internal/make/ps_question.py | jerbaroo/bridge-sim | c4ec1c18a07a78462ccf3b970a99a1bd7efcc2af | [
"MIT"
] | 2 | 2020-05-12T11:41:49.000Z | 2020-08-10T15:00:58.000Z | bridge_sim/internal/make/ps_question.py | barischrooneyj/bridge-sim | c4ec1c18a07a78462ccf3b970a99a1bd7efcc2af | [
"MIT"
] | 48 | 2020-05-11T23:58:22.000Z | 2020-09-18T20:28:52.000Z | bridge_sim/internal/make/ps_question.py | jerbaroo/bridge-sim | c4ec1c18a07a78462ccf3b970a99a1bd7efcc2af | [
"MIT"
] | 1 | 2020-05-27T12:43:37.000Z | 2020-05-27T12:43:37.000Z | import os
from copy import deepcopy
import matplotlib.pyplot as plt
import numpy as np
from bridge_sim import model, sim, temperature, traffic, plot, util
from bridge_sim.model import Config, Point, Bridge
from bridge_sim.plot.util import equal_lims
from bridge_sim.sim.responses import without
from bridge_sim.util import print_i, print_w
from bridge_sim.internal.plot import axis_cmap_r
def plot_year_effects(config: Config, x: float, z: float, num_years: int):
"""Plot all effects over a single year and 100 years at a point."""
install_day = 37
year = 2018
weather = temperature.load("holly-springs-18")
_0, _1, traffic_array = traffic.load_traffic(
config, traffic.normal_traffic(config), 60 * 10
)
(
ll_responses,
ps_responses,
temp_responses,
shrinkage_responses,
creep_responses,
) = np.repeat(None, 5)
start_day, end_day = None, None
def set_responses(n):
nonlocal weather, start_day, end_day
weather["temp"] = temperature.resize(weather["temp"], year=year)
weather = temperature.repeat(config, "holly-springs-18", weather, n)
start_date, end_date = (
weather["datetime"].iloc[0].strftime(temperature.f_string),
weather["datetime"].iloc[-1].strftime(temperature.f_string),
)
start_day, end_day = install_day, 365 * n
nonlocal ll_responses, ps_responses, temp_responses, shrinkage_responses, creep_responses
(
ll_responses,
ps_responses,
temp_responses,
shrinkage_responses,
creep_responses,
) = sim.responses.to(
config=config,
points=[model.Point(x=x, z=z)],
traffic_array=traffic_array,
response_type=model.RT.YTrans,
with_creep=True,
weather=weather,
start_date=start_date,
end_date=end_date,
install_day=install_day,
start_day=start_day,
end_day=end_day,
ret_all=True,
)
# from sklearn.decomposition import FastICA, PCA
# ica = FastICA(n_components=3)
# try_ = ica.fit_transform((ll_responses + temp_responses + creep_responses + shrinkage_responses).T)
# plt.plot(try_)
# plt.show()
plt.landscape()
lw = 2
def legend():
leg = plt.legend(
facecolor="white",
loc="upper right",
framealpha=1,
fancybox=False,
borderaxespad=0,
)
for legobj in leg.legendHandles:
legobj.set_linewidth(lw)
plt.subplot(1, 2, 1)
set_responses(1)
xax = np.interp(
np.arange(len(traffic_array)), [0, len(traffic_array) - 1], [start_day, end_day]
)
plt.plot(xax, ll_responses[0] * 1e3, c="green", label="traffic", lw=lw)
plt.plot(xax, temp_responses[0] * 1e3, c="red", label="temperature")
plt.plot(xax, shrinkage_responses[0] * 1e3, c="blue", label="shrinkage", lw=lw)
plt.plot(xax, creep_responses[0] * 1e3, c="black", label="creep", lw=lw)
legend()
plt.ylabel("Y translation (mm)")
plt.xlabel("Time (days)")
plt.subplot(1, 2, 2)
end_day = 365 * num_years
set_responses(num_years)
xax = (
np.interp(
np.arange(len(traffic_array)),
[0, len(traffic_array) - 1],
[start_day, end_day],
)
/ 365
)
plt.plot(xax, ll_responses[0] * 1e3, c="green", label="traffic", lw=lw)
plt.plot(xax, temp_responses[0] * 1e3, c="red", label="temperature")
plt.plot(xax, shrinkage_responses[0] * 1e3, c="blue", label="shrinkage", lw=lw)
plt.plot(xax, creep_responses[0] * 1e3, c="black", label="creep", lw=lw)
legend()
plt.ylabel("Y translation (mm)")
plt.xlabel("Time (years)")
equal_lims("y", 1, 2)
plt.suptitle(f"Y translation at X = {x} m, Z = {z} m")
plt.tight_layout(rect=[0, 0.03, 1, 0.95])
plt.savefig(config.get_image_path("classify/ps", f"year-effect-{x}-{z}.png"))
def plot_sensor_placement(config: Config, num_years: int):
all_points = [
model.Point(x=x, z=z)
for x in np.linspace(config.bridge.x_min, config.bridge.x_max, 300)
for z in np.linspace(config.bridge.z_min, config.bridge.z_max, 100)
]
response_type = model.ResponseType.YTrans
install_day = 37
year = 2018
weather = temperature.load("holly-springs-18")
config.sensor_freq = 1
_0, _1, traffic_array = traffic.load_traffic(
config, traffic.normal_traffic(config), 10
)
weather["temp"] = temperature.resize(weather["temp"], year=year)
weather = temperature.repeat(config, "holly-springs-18", weather, num_years)
start_date, end_date = (
weather["datetime"].iloc[0].strftime(temperature.f_string),
weather["datetime"].iloc[-1].strftime(temperature.f_string),
)
start_day, end_day = install_day, 365 * num_years
for pier in [9]:
pier_centre = model.Point(
x=config.bridge.supports[pier].x, z=config.bridge.supports[pier].z,
)
points = [p for p in all_points if pier_centre.distance(p) < 7]
ps = model.PierSettlement(pier=pier, settlement=5 / 1e3)
(
_0,
_1,
temp_responses,
shrinkage_responses,
creep_responses,
) = sim.responses.to(
config=config,
points=points,
traffic_array=traffic_array,
response_type=response_type,
with_creep=True,
weather=weather,
start_date=start_date,
end_date=end_date,
install_day=install_day,
start_day=start_day,
end_day=end_day,
ret_all=True,
)
ps_responses = sim.responses.to_pier_settlement(
config=config,
points=points,
responses_array=_0,
response_type=response_type,
pier_settlement=[(ps, ps)],
).T[-1]
ps_responses += sim.responses.to_creep(
config=config,
points=points,
responses_array=_0,
response_type=response_type,
pier_settlement=[(ps, ps)],
install_pier_settlement=[ps],
install_day=install_day,
start_day=start_day,
end_day=end_day,
).T[-1]
long_term_responses = (
temp_responses.T[-1] + shrinkage_responses.T[-1] + creep_responses.T[-1]
)
############
# Plotting #
############
plt.landscape()
plt.subplot(3, 1, 1)
responses = sim.model.Responses(
response_type=response_type,
responses=list(zip(abs(long_term_responses) * 1e3, points)),
)
plot.contour_responses(config, responses, levels=30, interp=(200, 60))
plot.top_view_bridge(config.bridge, piers=True)
plt.subplot(3, 1, 2)
responses = sim.model.Responses(
response_type=response_type,
responses=list(zip(abs(ps_responses) * 1e3, points)),
)
plot.contour_responses(config, responses, levels=30, interp=(200, 60))
plot.top_view_bridge(config.bridge, piers=True)
plt.subplot(3, 1, 3)
responses = sim.model.Responses(
response_type=response_type,
responses=list(
zip((abs(ps_responses) - abs(long_term_responses)) * 1e3, points)
),
)
plot.contour_responses(config, responses, levels=30, interp=(200, 60))
plot.top_view_bridge(config.bridge, piers=True)
plt.savefig(config.get_image_path("classify/ps", "placement.pdf"))
def plot_removal(config: Config, x: float, z: float):
response_type = model.RT.YTrans
weather = temperature.load("holly-springs-18")
weather["temp"] = temperature.resize(weather["temp"], year=2018)
start_date, end_date = (
weather["datetime"].iloc[0].strftime(temperature.f_string),
weather["datetime"].iloc[-1].strftime(temperature.f_string),
)
install_day = 37
start_day, end_day = install_day, install_day + 365
_0, _1, traffic_array = traffic.load_traffic(
config, traffic.normal_traffic(config), time=60
)
responses = (
sim.responses.to(
config=config,
points=[model.Point(x=x, z=z)],
traffic_array=traffic_array,
response_type=response_type,
with_creep=True,
weather=weather,
start_date=start_date,
end_date=end_date,
install_day=install_day,
start_day=start_day,
end_day=end_day,
# ret_all=True,
)[0]
* 1e3
)
def legend():
return plt.legend(
facecolor="white",
loc="upper right",
framealpha=1,
fancybox=False,
borderaxespad=0,
)
plt.landscape()
plt.subplot(2, 2, 1)
xax = np.interp(
np.arange(len(weather)), [0, len(weather) - 1], [start_day, end_day]
)
plt.plot(xax, weather["temp"], c="red")
plt.ylabel("Temperature °C")
plt.xlabel("Days since T_0")
plt.title("Temperature in 2018")
plt.subplot(2, 2, 2)
xax = np.interp(
np.arange(len(responses)), [0, len(responses) - 1], [start_day, end_day]
)
plt.plot(xax, responses)
plt.ylabel("Y translation (mm)")
plt.xlabel("Days since T_0")
plt.title("Y translation in 2018")
plt.subplot(2, 2, 3)
num_samples = 365 * 24
temps = util.apply(weather["temp"], np.arange(num_samples))
rs = util.apply(responses, np.arange(num_samples))
lr, _ = temperature.regress_and_errors(temps, rs)
lr_x = np.linspace(min(temps), max(temps), 100)
y = lr.predict(lr_x.reshape((-1, 1)))
plt.plot(lr_x, y, lw=2, c="red", label="linear fit")
plt.scatter(temps, rs, s=2, alpha=0.5, label="hourly samples")
leg = legend()
leg.legendHandles[1]._sizes = [30]
plt.ylabel("Y translation (mm)")
plt.xlabel("Temperature °C")
plt.title("Linear model from 2018 data")
#############
# 2019 data #
#############
weather_2019 = temperature.load("holly-springs")
weather_2019["temp"] = temperature.resize(weather_2019["temp"], year=2019)
start_date, end_date = (
weather_2019["datetime"].iloc[0].strftime(temperature.f_string),
weather_2019["datetime"].iloc[-1].strftime(temperature.f_string),
)
start_day, end_day = install_day + 365, install_day + (2 * 365)
responses_2019 = (
sim.responses.to(
config=config,
points=[model.Point(x=x, z=z)],
traffic_array=traffic_array,
response_type=response_type,
with_creep=True,
weather=weather_2019,
start_date=start_date,
end_date=end_date,
install_day=install_day,
start_day=start_day,
end_day=end_day,
)[0]
* 1e3
)
plt.subplot(2, 2, 4)
xax_responses = np.interp(
np.arange(len(responses_2019)),
[0, len(responses_2019) - 1],
[start_day, end_day],
)
plt.plot(xax_responses, responses_2019, label="2019 responses")
temps_2019 = util.apply(weather_2019["temp"], xax_responses)
y = lr.predict(temps_2019.reshape((-1, 1)))
plt.plot(xax_responses, y, label="prediction")
plt.ylabel("Y translation (mm)")
plt.xlabel("Days since T_0")
plt.title("Y translation in 2019")
for legobj in legend().legendHandles:
legobj.set_linewidth(2.0)
plt.suptitle(f"Predicting long-term effect at X = {x} m, Z = {z} m")
plt.tight_layout(rect=[0, 0.03, 1, 0.95])
plt.savefig(config.get_image_path("classify/ps", "regress.pdf"))
def plot_removal_2(config: Config, x: float, z: float):
response_type = model.RT.YTrans
weather_2018 = temperature.load("holly-springs-18")
weather_2018["temp"] = temperature.resize(weather_2018["temp"], year=2018)
start_date, end_date = (
weather_2018["datetime"].iloc[0].strftime(temperature.f_string),
weather_2018["datetime"].iloc[-1].strftime(temperature.f_string),
)
install_day = 37
start_day, end_day = install_day, install_day + 365
_0, _1, traffic_array = traffic.load_traffic(
config, traffic.normal_traffic(config), time=60
)
responses_2018 = (
sim.responses.to(
config=config,
points=[model.Point(x=x, z=z)],
traffic_array=traffic_array,
response_type=response_type,
with_creep=True,
weather=weather_2018,
start_date=start_date,
end_date=end_date,
install_day=install_day,
start_day=start_day,
end_day=end_day,
# ret_all=True,
)[0]
* 1e3
)
num_samples = 365 * 24
temps = util.apply(weather_2018["temp"], np.arange(num_samples))
rs = util.apply(responses_2018, np.arange(num_samples))
lr, err = temperature.regress_and_errors(temps, rs)
def legend():
plt.legend(
facecolor="white",
loc="lower left",
framealpha=1,
fancybox=False,
borderaxespad=0,
labelspacing=0.02,
)
##############################
# Iterate through each year. #
##############################
plt.landscape()
weather_2019 = temperature.load("holly-springs")
weather_2019["temp"] = temperature.resize(weather_2019["temp"], year=2019)
start_date, end_date = (
weather_2019["datetime"].iloc[0].strftime(temperature.f_string),
weather_2019["datetime"].iloc[-1].strftime(temperature.f_string),
)
for y_i, year in enumerate([2019, 2024, 2039]):
plt.subplot(3, 1, y_i + 1)
start_day = install_day + ((year - 2018) * 365)
end_day = start_day + 365
responses_2019 = (
sim.responses.to(
config=config,
points=[model.Point(x=x, z=z)],
traffic_array=traffic_array,
response_type=response_type,
with_creep=True,
weather=weather_2019,
start_date=start_date,
end_date=end_date,
install_day=install_day,
start_day=start_day,
end_day=end_day,
)[0]
* 1e3
)
# Plot actual values.
xax = np.interp(
np.arange(len(responses_2019)), [0, len(responses_2019) - 1], [0, 364]
)
plt.plot(xax, responses_2019, label="responses in year", lw=2)
# Daily prediction.
xax_responses = np.arange(365)
temps_2019 = util.apply(weather_2019["temp"], xax_responses)
y_daily = lr.predict(temps_2019.reshape((-1, 1)))
y_2_week = [
np.mean(y_daily[max(0, i - 14) : min(i + 14, len(y_daily))])
for i in range(len(y_daily))
]
for percentile, alpha in [(100, 20), (75, 40), (50, 60), (25, 100)]:
err = np.percentile(err, percentile)
p = percentile / 100
plt.fill_between(
xax_responses,
y_2_week + (err * p),
y_2_week - (err * p),
color="orange",
alpha=alpha / 100,
label=f"{percentile}% of regression error",
)
plt.plot(xax_responses, y_daily, color="black", lw=2, label="daily prediction")
plt.plot(
xax_responses, y_2_week, color="red", lw=2, label="2 week sliding window"
)
plt.ylabel("Y. trans (mm)")
plt.title(f"Year {year}")
if y_i == 0:
legend()
if y_i == 2:
plt.xlabel("Days in year")
else:
plt.tick_params("x", bottom=False, labelbottom=False)
equal_lims("y", 3, 1)
plt.suptitle(f"Predicting long-term effects at X = {x} m, Z = {z} m")
plt.tight_layout(rect=[0, 0.03, 1, 0.95])
plt.savefig(config.get_image_path("classify/ps", "regress-2.pdf"))
def plot_removal_3(config: Config, x: float, z: float):
# First calculate the linear model.
response_type = model.RT.YTrans
weather_2018 = temperature.load("holly-springs-18")
weather_2018["temp"] = temperature.resize(weather_2018["temp"], year=2018)
start_date, end_date = (
weather_2018["datetime"].iloc[0].strftime(temperature.f_string),
weather_2018["datetime"].iloc[-1].strftime(temperature.f_string),
)
install_day = 37
start_day, end_day = install_day, install_day + 365
_0, _1, traffic_array = traffic.load_traffic(
config, traffic.normal_traffic(config), time=60
)
responses_2018 = (
sim.responses.to(
config=config,
points=[model.Point(x=x, z=z)],
traffic_array=traffic_array,
response_type=response_type,
with_creep=True,
weather=weather_2018,
start_date=start_date,
end_date=end_date,
install_day=install_day,
start_day=start_day,
end_day=end_day,
)[0]
* 1e3
)
num_samples = 365 * 24
temps = util.apply(weather_2018["temp"], np.arange(num_samples))
rs = util.apply(responses_2018, np.arange(num_samples))
lr, _ = temperature.regress_and_errors(temps, rs)
# Calculate long-term weather.
NUM_YEARS = 5
PIER = 5
long_weather = deepcopy(weather_2018)
long_weather["temp"] = temperature.resize(long_weather["temp"], year=2019)
print_i(f"Repeating {NUM_YEARS} of weather data")
long_weather = temperature.repeat(
config, "holly-springs-18", long_weather, NUM_YEARS
)
print_i(f"Repeated {NUM_YEARS} of weather data")
start_date, end_date = (
long_weather["datetime"].iloc[0].strftime(temperature.f_string),
long_weather["datetime"].iloc[-1].strftime(temperature.f_string),
)
start_day = install_day + 365
end_day = start_day + 365 * NUM_YEARS
MAX_PS = 20
THRESHES = np.arange(0, MAX_PS, 1)
acc_mat = np.zeros((MAX_PS, len(THRESHES)))
fp_mat = np.zeros(acc_mat.shape)
fn_mat = np.zeros(acc_mat.shape)
tp_mat = np.zeros(acc_mat.shape)
tn_mat = np.zeros(acc_mat.shape)
for p_i, ps in enumerate(range(MAX_PS)):
print_i(f"Using pier settlement = {ps} mm")
long_responses = sim.responses.to(
config=config,
points=[model.Point(x=x, z=z)],
traffic_array=traffic_array,
response_type=response_type,
with_creep=True,
pier_settlement=[
(
model.PierSettlement(pier=PIER, settlement=0.00001),
model.PierSettlement(pier=PIER, settlement=ps / 1e3),
)
],
install_pier_settlement=[],
weather=long_weather,
start_date=start_date,
end_date=end_date,
install_day=install_day,
start_day=start_day,
end_day=end_day,
ret_all=False,
ignore_pier_creep=True,
)
healthy_responses = sim.responses.to(
config=config,
points=[model.Point(x=x, z=z)],
traffic_array=traffic_array,
response_type=response_type,
with_creep=True,
pier_settlement=[],
install_pier_settlement=None,
weather=long_weather,
start_date=start_date,
end_date=end_date,
install_day=install_day,
start_day=start_day,
end_day=end_day,
ret_all=False,
ignore_pier_creep=True,
)
plt.plot(healthy_responses[0] * 1e3, label="healthy")
plt.plot(long_responses[0] * 1e3, label="pier settlement")
plt.legend()
plt.savefig(config.get_image_path("hello", f"q3-{p_i}.png"))
plt.close()
for t_i, thresh in enumerate(THRESHES):
thresh *= -1
print(thresh)
print(max(healthy_responses[0]))
print(min(healthy_responses[0]))
print(max(long_responses[0]))
print(min(long_responses[0]))
fp = len([x for x in healthy_responses[0] * 1e3 if x <= thresh])
tp = len([x for x in long_responses[0] * 1e3 if x <= thresh])
tn = len([x for x in healthy_responses[0] * 1e3 if x > thresh])
fn = len([x for x in long_responses[0] * 1e3 if x > thresh])
acc_mat[p_i][t_i] = (tp + tn) / (tp + tn + fp + fn)
fp_mat[p_i][t_i] = fp
tp_mat[p_i][t_i] = tp
fn_mat[p_i][t_i] = fn
tn_mat[p_i][t_i] = tn
##################
# Save matrices. #
##################
plt.imshow(acc_mat, cmap=axis_cmap_r)
plt.savefig(config.get_image_path("hello", f"mat.png"))
plt.close()
plt.imshow(fp_mat, cmap=axis_cmap_r)
plt.savefig(config.get_image_path("hello", f"mat-fp.png"))
plt.close()
plt.imshow(fn_mat, cmap=axis_cmap_r)
plt.savefig(config.get_image_path("hello", f"mat-fn.png"))
plt.close()
plt.imshow(tp_mat, cmap=axis_cmap_r)
plt.savefig(config.get_image_path("hello", f"mat-tp.png"))
plt.close()
plt.imshow(tn_mat, cmap=axis_cmap_r)
plt.savefig(config.get_image_path("hello", f"mat-tn.png"))
plt.close()
def support_with_points(bridge: Bridge, delta_x: float):
for support in bridge.supports:
if support.x < bridge.length / 2:
s_x = support.x - ((support.length / 2) + delta_x)
else:
s_x = support.x + ((support.length / 2) + delta_x)
support.point = Point(x=s_x, z=support.z)
for support_2 in bridge.supports:
if support_2.z == support.z and np.isclose(
support_2.x, bridge.length - support.x
):
support.opposite_support = support_2
print_w(f"Support sensor at X = {support.point.x}, Z = {support.point.z}")
if not hasattr(support, "opposite_support"):
raise ValueError("No opposite support")
return bridge.supports
def plot_min_diff(config: Config, num_years: int, delta_x: float = 0.5):
plt.landscape()
log_path = config.get_image_path("classify/q1", "min-thresh.txt")
if os.path.exists(log_path):
os.remove(log_path)
install_day = 37
start_day, end_day = install_day, 365 * num_years
year = 2018
weather = temperature.load("holly-springs-18")
_0, _1, traffic_array = traffic.load_traffic(
config, traffic.normal_traffic(config), 60 * 10
)
weather["temp"] = temperature.resize(weather["temp"], year=year)
# weather = temperature.repeat(config, "holly-springs-18", weather, num_years)
start_date, end_date = (
weather["datetime"].iloc[0].strftime(temperature.f_string),
weather["datetime"].iloc[-1].strftime(temperature.f_string),
)
# For each support load the responses to traffic and assign to "Support".
for s_i, support in enumerate(support_with_points(config.bridge, delta_x=delta_x)):
support.responses = (
sim.responses.to_traffic_array(
config=config,
points=[support.point],
traffic_array=traffic_array,
response_type=model.RT.YTrans,
# with_creep=True,
# weather=weather,
# start_date=start_date,
# end_date=end_date,
# install_day=install_day,
# start_day=start_day,
# end_day=end_day,
)[0]
* 1e3
)
# Determine max difference for each sensor pair.
for s_i, support in enumerate(config.bridge.supports):
min1, max1 = min(support.responses), max(support.responses)
min2, max2 = (
min(support.opposite_support.responses),
max(support.opposite_support.responses),
)
delta_1, delta_2 = abs(min1 - max2), abs(min2 - max1)
# max_delta = max(abs(support.responses - support.opposite_support.responses))
support.max_delta = max(delta_1, delta_2)
to_write = f"Max delta {support.max_delta} for support {s_i}, sensor at X = {support.point.x}, Z = {support.point.z}"
with open(log_path, "a") as f:
f.write(to_write)
# Bridge supports.
plot.top_view_bridge(config.bridge, lanes=True, piers=True, units="m")
for s_i, support in enumerate(config.bridge.supports):
if s_i % 4 == 0:
support.max_delta = max(
support.max_delta, config.bridge.supports[s_i + 3].max_delta
)
elif s_i % 4 == 1:
support.max_delta = max(
support.max_delta, config.bridge.supports[s_i + 1].max_delta
)
elif s_i % 4 == 2:
support.max_delta = max(
support.max_delta, config.bridge.supports[s_i - 1].max_delta
)
elif s_i % 4 == 3:
support.max_delta = max(
support.max_delta, config.bridge.supports[s_i - 3].max_delta
)
plt.scatter([support.point.x], [support.point.z], c="red")
plt.annotate(
f"{np.around(support.max_delta, 2)} mm",
xy=(support.point.x - 3, support.point.z + 2),
color="b",
size="large",
)
plt.title("Maximum difference between symmetric sensors")
plt.tight_layout()
plt.savefig(config.get_image_path("classify/q1", "min-thresh.pdf"))
def plot_contour_q2(config: Config, num_years: int, delta_x: float = 0.5):
# Select points: over the deck and the sensors!
points = [
Point(x=x, z=z)
for x in np.linspace(config.bridge.x_min, config.bridge.x_max, 100)
for z in np.linspace(config.bridge.z_min, config.bridge.z_max, 30)
]
sensor_points = [
s.point for s in support_with_points(config.bridge, delta_x=delta_x)
]
points += sensor_points
install_day = 37
start_day, end_day = install_day, 365 * num_years
year = 2018
weather = temperature.load("holly-springs-18")
# Responses aren't much from traffic, and we are getting the maximum from 4
# sensors, so short traffic data doesn't really matter.
_0, _1, traffic_array = traffic.load_traffic(
config, traffic.normal_traffic(config), 10
)
weather["temp"] = temperature.resize(weather["temp"], year=year)
# weather = temperature.repeat(config, "holly-springs-18", weather, num_years)
start_date, end_date = (
weather["datetime"].iloc[0].strftime(temperature.f_string),
weather["datetime"].iloc[-1].strftime(temperature.f_string),
)
# Generate the data!
responses = (
sim.responses.to(
config=config,
points=points,
traffic_array=traffic_array,
response_type=model.RT.YTrans,
with_creep=True,
weather=weather,
start_date=start_date,
end_date=end_date,
install_day=install_day,
start_day=start_day,
end_day=end_day,
)
* 1e3
)
# Convert to Responses, determining maximum response per point.
max_responses = [min(rs) for rs in responses]
sensor_responses = max_responses[-len(sensor_points) :]
responses = sim.model.Responses(
response_type=model.RT.YTrans,
responses=[(r, p) for r, p in zip(max_responses, points)],
units="mm",
).without(without.edges(config, 2))
# Adjust maximum responses per sensor so they are symmetric!
for s_i, support in enumerate(support_with_points(config.bridge, delta_x=delta_x)):
support.max_response = sensor_responses[s_i]
for support in support_with_points(config.bridge, delta_x=delta_x):
support.max_response = min(
support.max_response, support.opposite_support.max_response
)
for s_i, support in enumerate(support_with_points(config.bridge, delta_x=delta_x)):
if s_i % 4 == 0:
support.max_response = max(
support.max_response, config.bridge.supports[s_i + 3].max_response
)
elif s_i % 4 == 1:
support.max_response = max(
support.max_response, config.bridge.supports[s_i + 1].max_response
)
elif s_i % 4 == 2:
support.max_response = max(
support.max_response, config.bridge.supports[s_i - 1].max_response
)
elif s_i % 4 == 3:
support.max_response = max(
support.max_response, config.bridge.supports[s_i - 3].max_response
)
plt.landscape()
plot.contour_responses(config, responses, interp=(200, 60), levels=20)
plot.top_view_bridge(config.bridge, lanes=True, piers=True, units="m")
for s_i, support in enumerate(support_with_points(config.bridge, delta_x=delta_x)):
plt.scatter([support.point.x], [support.point.z], c="black")
plt.annotate(
f"{np.around(support.max_response, 2)}",
xy=(support.point.x - 3, support.point.z + 2),
color="black",
size="large",
)
plt.title(
f"Minimum Y translation over {num_years} years \n from traffic, temperature, shrinkage & creep"
)
plt.tight_layout()
plt.savefig(config.get_image_path("classify/q2", "q2-contour.pdf"))
plt.close()
def plot_min_ps_1(config: Config, num_years: int, delta_x: float = 0.5):
THRESH = 2 # Pier settlement from question 1.
plt.landscape()
log_path = config.get_image_path("classify/q1b", "min-ps.txt")
if os.path.exists(log_path): # Start with fresh logfile.
os.remove(log_path)
install_day = 37
start_day, end_day = install_day, 365 * num_years
year = 2018
weather = temperature.load("holly-springs-18")
_0, _1, traffic_array = traffic.load_traffic(
config, traffic.normal_traffic(config), 60 * 10
)
weather["temp"] = temperature.resize(weather["temp"], year=year)
# weather = temperature.repeat(config, "holly-springs-18", weather, num_years)
start_date, end_date = (
weather["datetime"].iloc[0].strftime(temperature.f_string),
weather["datetime"].iloc[-1].strftime(temperature.f_string),
)
# For each support..
for s_i, support in enumerate(support_with_points(config.bridge, delta_x=delta_x)):
# ..increase pier settlement until threshold triggered.
for settlement in np.arange(0, 10, 0.1):
responses = (
sim.responses.to(
config=config,
points=[support.point, support.opposite_support.point],
traffic_array=traffic_array,
response_type=model.RT.YTrans,
with_creep=True,
weather=weather,
start_date=start_date,
end_date=end_date,
install_day=install_day,
start_day=start_day,
end_day=end_day,
pier_settlement=[
(
model.PierSettlement(pier=s_i, settlement=0),
model.PierSettlement(pier=s_i, settlement=settlement / 1e3),
)
],
skip_weather_interp=True,
)
* 1e3
)
delta = max(abs(responses[0] - responses[1]))
to_write = f"Max delta {delta} for settlement {settlement} mm for support {s_i}, sensor at X = {support.point.x}, Z = {support.point.z}"
print_w(to_write)
# Because of "abs", "delta" will be positive.
if delta > THRESH:
break
# Write the minimum settlement value for this support to a file.
with open(log_path, "a") as f:
f.write(to_write)
# Annotate the support with the minimum settlement value.
plt.scatter([support.point.x], [support.point.z], c="red")
plt.annotate(
f"{np.around(settlement, 2)} mm",
xy=(support.point.x - 3, support.point.z + 2),
color="b",
size="large",
)
# Plot the results.
plot.top_view_bridge(config.bridge, lanes=True, piers=True, units="m")
plt.title("Minimum pier settlement detected (Question 1B)")
plt.tight_layout()
plt.savefig(config.get_image_path("classify/q1b", "q1b-min-ps.pdf"))
plt.close()
def plot_min_ps_2(config: Config, num_years: int, delta_x: float = 0.5):
THRESH = 6 # Pier settlement from question 1.
plt.landscape()
log_path = config.get_image_path("classify/q2b", "2b-min-ps.txt")
if os.path.exists(log_path): # Start with fresh logfile.
os.remove(log_path)
install_day = 37
start_day, end_day = install_day, 365 * num_years
year = 2018
weather = temperature.load("holly-springs-18")
_0, _1, traffic_array = traffic.load_traffic(
config, traffic.normal_traffic(config), 60 * 10
)
weather["temp"] = temperature.resize(weather["temp"], year=year)
# weather = temperature.repeat(config, "holly-springs-18", weather, num_years)
start_date, end_date = (
weather["datetime"].iloc[0].strftime(temperature.f_string),
weather["datetime"].iloc[-1].strftime(temperature.f_string),
)
for s_i, support in enumerate(support_with_points(config.bridge, delta_x=delta_x)):
# Increase pier settlement until threshold triggered.
for settlement in np.arange(0, 10, 0.1):
responses = (
sim.responses.to(
config=config,
points=[support.point],
traffic_array=traffic_array,
response_type=model.RT.YTrans,
with_creep=True,
weather=weather,
start_date=start_date,
end_date=end_date,
install_day=install_day,
start_day=start_day,
end_day=end_day,
pier_settlement=[
(
model.PierSettlement(pier=s_i, settlement=0),
model.PierSettlement(pier=s_i, settlement=settlement / 1e3),
)
],
skip_weather_interp=True,
)
* 1e3
)
# Determine the minimum response for this level of settlement.
max_r = min(responses[0])
to_write = f"Min {max_r} for settlement {settlement} mm for support {s_i}, sensor at X = {support.point.x}, Z = {support.point.z}"
print_w(to_write)
if max_r < -THRESH:
break
# Write the minimum response and settlement for this support to a file.
with open(log_path, "a") as f:
f.write(to_write)
plt.scatter([support.point.x], [support.point.z], c="red")
plt.annotate(
f"{np.around(settlement, 2)} mm",
xy=(support.point.x - 3, support.point.z + 2),
color="b",
size="large",
)
plot.top_view_bridge(config.bridge, lanes=True, piers=True, units="m")
plt.title("Minimum pier settlement detected (Question 2B)")
plt.tight_layout()
plt.savefig(config.get_image_path("classify/q2b", "q2b-min-ps.pdf"))
| 38.138593 | 148 | 0.587494 |
95c1db49e8979342f440e2ee5e1a48186d51308c | 936 | py | Python | parsers/download_data.py | bioinf-mcb/polish-microbiome-project | 0fc15b1a5afe4edf63b6be6b945ac4053e3a24f9 | [
"BSD-3-Clause"
] | null | null | null | parsers/download_data.py | bioinf-mcb/polish-microbiome-project | 0fc15b1a5afe4edf63b6be6b945ac4053e3a24f9 | [
"BSD-3-Clause"
] | null | null | null | parsers/download_data.py | bioinf-mcb/polish-microbiome-project | 0fc15b1a5afe4edf63b6be6b945ac4053e3a24f9 | [
"BSD-3-Clause"
] | null | null | null | #%%
import json
import requests
from io import StringIO
import pandas as pd
# %%
with open("../db_pass", "r") as f:
token = json.load(f)['token']
# %%
data = {
'token': token,
'content': 'record',
'format': 'csv',
'type': 'flat',
'csvDelimiter': '',
'rawOrLabel': 'raw',
'rawOrLabelHeaders': 'raw',
'exportCheckboxLabel': 'false',
'exportSurveyFields': 'false',
'exportDataAccessGroups': 'false',
'returnFormat': 'csv',
'fields': 'patient_id,age,bmi,covid_test_date,date_of_test,weight,height,admission_date,final_date,death,sex'
}
r = requests.post('http://192.168.45.244/api/',data=data)
print('HTTP Status: ' + str(r.status_code))
data = StringIO(r.text)
# %%
df = pd.read_csv(data)
df = df[df["height"].apply(lambda x: not pd.isna(x))]
df = df.dropna(axis=1, how='all')
df["bmi"] = df["bmi"].apply(lambda x: round(x, 1))
df.to_csv("metadata.csv", index=False)
print(df)
# %%
| 23.4 | 113 | 0.63141 |
95c256321ed64a1e2f22ab370936dbb097ea26b8 | 2,622 | py | Python | preprocess/sequence_stats.py | ashish-roopan/fsgan | 1582e112d0f59cd32920ac5953baec783e088cad | [
"CC0-1.0"
] | 599 | 2020-04-14T19:28:58.000Z | 2022-03-26T11:29:37.000Z | preprocess/sequence_stats.py | ashish-roopan/fsgan | 1582e112d0f59cd32920ac5953baec783e088cad | [
"CC0-1.0"
] | 157 | 2020-04-14T21:13:43.000Z | 2022-02-07T06:30:16.000Z | preprocess/sequence_stats.py | ashish-roopan/fsgan | 1582e112d0f59cd32920ac5953baec783e088cad | [
"CC0-1.0"
] | 150 | 2020-04-14T20:40:41.000Z | 2022-03-30T10:50:21.000Z | """
Sequence statistics: Count, length, bounding boxes size.
"""
import os
from glob import glob
import pickle
from tqdm import tqdm
def extract_stats(cache_path):
# Load sequences from file
with open(cache_path, "rb") as fp: # Unpickling
seq_list = pickle.load(fp)
if len(seq_list) == 0:
return 0, 0., 0.
# For each sequence
len_sum, size_sum = 0., 0.
for seq in seq_list:
len_sum += len(seq)
size_sum += seq.size_avg
return len(seq_list), len_sum / len(seq_list), size_sum / len(seq_list)
def main(in_dir, out_path=None, postfix='_dsfd_seq.pkl'):
out_path = os.path.join(in_dir, 'sequence_stats.txt') if out_path is None else out_path
# Validation
if not os.path.isdir(in_dir):
raise RuntimeError('Input directory not exist: ' + in_dir)
# Parse file paths
input_query = os.path.join(in_dir, '*' + postfix)
file_paths = sorted(glob(input_query))
# For each file in the input directory with the specified postfix
pbar = tqdm(file_paths, unit='files')
count_sum, len_sum, size_sum = 0., 0., 0.
vid_count = 0
for i, file_path in enumerate(pbar):
curr_count, curr_mean_len, curr_mean_size = extract_stats(file_path)
if curr_count == 0:
continue
count_sum += curr_count
len_sum += curr_mean_len
size_sum += curr_mean_size
vid_count += 1
pbar.set_description('mean_count = %.1f, mean_len = %.1f, mean_size = %.1f, valid_vids = %d / %d' %
(count_sum / vid_count, len_sum / vid_count, size_sum / vid_count, vid_count, i + 1))
# Write result to file
if out_path is not None:
with open(out_path, "w") as f:
f.write('mean_count = %.1f\n' % (count_sum / vid_count))
f.write('mean_len = %.1f\n' % (len_sum / vid_count))
f.write('mean_size = %.1f\n' % (size_sum / vid_count))
f.write('valid videos = %d / %d\n' % (vid_count, len(file_paths)))
if __name__ == "__main__":
# Parse program arguments
import argparse
parser = argparse.ArgumentParser('detections2sequences')
parser.add_argument('input', metavar='DIR',
help='input directory')
parser.add_argument('-o', '--output', default=None, metavar='PATH',
help='output directory')
parser.add_argument('-p', '--postfix', metavar='POSTFIX', default='_dsfd_seq.pkl',
help='the files postfix to search the input directory for')
args = parser.parse_args()
main(args.input, args.output, args.postfix)
| 35.432432 | 114 | 0.622426 |
95c285b58cd596c463e5846360384f8f0b80a4d5 | 352 | py | Python | app/migrations/0004_auto_20200704_0405.py | duorah/GRanDpa-Family-Tree | 613df3fb61a8dd5eba7416ad6f8fda80e350bbe1 | [
"MIT"
] | 1 | 2020-07-13T21:03:17.000Z | 2020-07-13T21:03:17.000Z | app/migrations/0004_auto_20200704_0405.py | duorah/grandpa-family-tree | 613df3fb61a8dd5eba7416ad6f8fda80e350bbe1 | [
"MIT"
] | null | null | null | app/migrations/0004_auto_20200704_0405.py | duorah/grandpa-family-tree | 613df3fb61a8dd5eba7416ad6f8fda80e350bbe1 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.7 on 2020-07-04 04:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('app', '0003_auto_20200704_0400'),
]
operations = [
migrations.AlterModelOptions(
name='person',
options={'verbose_name_plural': 'People'},
),
]
| 19.555556 | 54 | 0.602273 |
95c5e262b4da5f7adb2dec6d61c74e3194680b9a | 7,735 | py | Python | tests/test_dossier.py | openkamer/tk-api-python | 907b98ccc7602ad7e3e74f1e06f9544fbe66aba3 | [
"MIT"
] | 9 | 2017-11-16T12:39:11.000Z | 2021-10-16T19:30:52.000Z | tests/test_dossier.py | openkamer/tk-api-python | 907b98ccc7602ad7e3e74f1e06f9544fbe66aba3 | [
"MIT"
] | 1 | 2017-11-16T14:20:20.000Z | 2017-11-20T18:49:14.000Z | tests/test_dossier.py | openkamer/tk-api-python | 907b98ccc7602ad7e3e74f1e06f9544fbe66aba3 | [
"MIT"
] | 3 | 2018-09-10T18:57:39.000Z | 2020-06-09T14:13:10.000Z | import datetime
from tkapi.util import queries
from tkapi.zaak import Zaak, ZaakSoort
from tkapi.dossier import Dossier, DossierWetsvoorstel
from tkapi.document import Document
from .core import TKApiTestCase
class TestDossier(TKApiTestCase):
def test_get_dossiers(self):
dossiers = self.api.get_dossiers(filter=None, max_items=10)
self.assertEqual(10, len(dossiers))
def test_get_dossier_by_nummer(self):
nummer = 34435
filter = Dossier.create_filter()
filter.filter_nummer(nummer)
dossiers = self.api.get_dossiers(filter=filter)
self.assertEqual(len(dossiers), 1)
dossiers[0].print_json()
def test_dossier_filter(self):
self.check_dossier_filter('2016Z16486', 34537)
self.check_dossier_filter('2016Z24906', 34640)
def check_dossier_filter(self, zaak_nr, expected_dossier_nummer):
dossier_filter = Dossier.create_filter()
dossier_filter.filter_zaak(zaak_nr)
dossiers = self.api.get_dossiers(filter=dossier_filter)
# for dossier in dossiers:
# dossier.print_json()
self.assertEqual(len(dossiers), 1)
# print(dossiers[0].nummer)
self.assertEqual(dossiers[0].nummer, expected_dossier_nummer)
class TestDossiersForZaken(TKApiTestCase):
start_datetime = datetime.datetime(year=2016, month=1, day=1)
end_datetime = datetime.datetime(year=2016, month=1, day=14)
def test_get_dossiers(self):
zaak_filter = Zaak.create_filter()
zaak_filter.filter_date_range(
TestDossiersForZaken.start_datetime,
TestDossiersForZaken.end_datetime
)
zaak_filter.filter_soort(ZaakSoort.WETGEVING)
zaken = self.api.get_zaken(zaak_filter)
print('Wetgeving zaken found: ' + str(len(zaken)))
dossier_filter = Dossier.create_filter()
zaak_nummers = [zaak.nummer for zaak in zaken]
print(zaak_nummers)
dossier_filter.filter_zaken(zaak_nummers)
dossiers = self.api.get_dossiers(filter=dossier_filter)
dossier_zaak_nummers = set()
for dossier in dossiers:
print('dossier.nummer: ', str(dossier.nummer))
for zaak in dossier.zaken:
dossier_zaak_nummers.add(zaak.nummer)
print('dossier_zaak_nummers', dossier_zaak_nummers)
for zaak in zaken:
if zaak.nummer not in dossier_zaak_nummers:
print(zaak.nummer)
# zaak.print_json()
# self.assertTrue(zaak_nr in dossier_zaak_nummers)
# print(zaken)
for zaak_nummer in zaak_nummers:
self.assertTrue(zaak_nummer in dossier_zaak_nummers)
class TestDossierAfgesloten(TKApiTestCase):
start_datetime = datetime.datetime(year=2015, month=1, day=1)
end_datetime = datetime.datetime.now()
def test_filter_afgesloten(self):
dossier_filter = Dossier.create_filter()
dossier_filter.filter_afgesloten(True)
dossiers = self.api.get_dossiers(filter=dossier_filter)
# There are currently no afgesloten dossiers, this will hopefully change in the future
self.assertEqual(len(dossiers), 0)
class TestDossierFilter(TKApiTestCase):
def test_filter_dossier_nummer(self):
nummer = 33885
dossier = queries.get_dossier(nummer)
self.assertEqual(nummer, dossier.nummer)
def test_filter_dossier_nummer_toevoeging(self):
nummer = 35300
toevoeging = 'XVI'
dossier = queries.get_dossier(nummer, toevoeging=toevoeging)
self.assertEqual(nummer, dossier.nummer)
self.assertEqual(toevoeging, dossier.toevoeging)
def test_get_document_actors(self):
# nummer = 35234
nummer = 33885
dossier = queries.get_dossier(nummer)
for zaak in dossier.zaken:
print('==========')
print(zaak.soort, zaak.onderwerp, zaak.volgnummer)
for actor in zaak.actors:
print(actor.naam, actor.persoon.achternaam if actor.persoon else None, actor.fractie, actor.commissie)
for doc in zaak.documenten:
print(doc.soort, doc.onderwerp, doc.titel, doc.volgnummer)
for actor in doc.actors:
print(actor.naam)
class TestWetsvoorstelDossier(TKApiTestCase):
def test_get_wetsvoorstellen_dossiers(self):
max_items = 200
wetsvoorstellen = self.api.get_items(DossierWetsvoorstel, max_items=max_items)
self.assertEqual(max_items, len(wetsvoorstellen))
def test_get_begroting_dossiers(self):
filter = Zaak.create_filter()
filter.filter_date_range(datetime.date(year=2019, month=6, day=1), datetime.date.today())
filter.filter_soort(ZaakSoort.BEGROTING, is_or=True)
zaken = self.api.get_zaken(filter=filter)
for zaak in zaken:
dossier_id = str(zaak.dossier.nummer)
print(dossier_id)
def test_get_dossiers_via_documenten(self):
pd_filter = Document.create_filter()
# NOTE: this date filter does not seem to work in combination with the soort filter.
# start_datetime = datetime.datetime(year=2016, month=1, day=1)
# end_datetime = datetime.datetime(year=2016, month=2, day=1)
# pd_filter.filter_date_range(start_datetime, end_datetime)
pd_filter.filter_soort('Voorstel van wet', is_or=True)
pd_filter.filter_soort('Voorstel van wet (initiatiefvoorstel)', is_or=True)
pds = self.api.get_documenten(pd_filter)
dossier_nrs = []
pds_no_dossier_nr = []
for pd in pds[:10]:
print(pd.dossier_nummers)
if pd.dossier_nummers:
dossier_nrs += pd.dossier_nummers
else:
pds_no_dossier_nr.append(pd)
for pd in pds_no_dossier_nr:
print(pd.dossier_nummers)
print(pd.onderwerp)
dossier_nrs = sorted(set(dossier_nrs))
print(dossier_nrs)
for dossier_nr in dossier_nrs:
print(dossier_nr)
print(len(dossier_nrs))
# def test_get_dossiers(self):
# zaak_filter = Zaak.create_filter()
# start_datetime = datetime.datetime(year=2005, month=1, day=1)
# end_datetime = datetime.datetime.now()
# zaak_filter.filter_date_range(start_datetime, end_datetime)
# zaak_filter.filter_soort('Wetgeving')
# zaken = self.api.get_zaken(zaak_filter)
# print('Wetgeving zaken found: ' + str(len(zaken)))
# zaak_nummers = [zaak.nummer for zaak in zaken]
# print(zaak_nummers)
# dossiers = []
# nrs_batch = set()
# for zaak_nr in zaak_nummers:
# nrs_batch.add(zaak_nr)
# if len(nrs_batch) < 10:
# continue
# dossier_filter = Dossier.create_filter()
# dossier_filter.filter_zaken(nrs_batch)
# nrs_batch = set()
# dossiers_for_zaak = self.api.get_dossiers(filter=dossier_filter)
# if dossiers_for_zaak:
# dossiers += dossiers_for_zaak
# print('Dossier found for zaak: ' + str(zaak_nr))
# else:
# print('WARNING: No dossier found for zaak: ' + str(zaak_nr))
# dossier_nummers = []
# for dossier in dossiers:
# print('\n=======')
# print(dossier.nummer)
# print(dossier.afgesloten)
# print(dossier.organisatie)
# print(dossier.titel)
# dossier_nummers.append(dossier.nummer)
# # dossier.print_json()
# dossier_nrs = sorted(set(dossier_nummers))
# print(dossier_nrs)
# print(len(dossier_nrs))
| 39.065657 | 118 | 0.648869 |
95c7b536f4cc90da867d02e9f53e889cad554b21 | 27,649 | py | Python | Manuscript files/modflow_reference/auxfile_hexaplot.py | MaxRamgraber/Simple-AEM-Toolbox | 27751103f5e504dd675ba6225f2aee9f85d7c85d | [
"MIT"
] | 3 | 2021-06-16T12:27:22.000Z | 2022-01-04T11:21:35.000Z | Manuscript files/modflow_reference/auxfile_hexaplot.py | MaxRamgraber/Simple-AEM-Toolbox | 27751103f5e504dd675ba6225f2aee9f85d7c85d | [
"MIT"
] | null | null | null | Manuscript files/modflow_reference/auxfile_hexaplot.py | MaxRamgraber/Simple-AEM-Toolbox | 27751103f5e504dd675ba6225f2aee9f85d7c85d | [
"MIT"
] | 3 | 2021-06-17T11:20:20.000Z | 2022-01-12T09:56:56.000Z | """
This library contains several functions designed to help with the illustration of hexagonal grids
Functions:
plot_hexagaons : plots a specified data vector over a 2-D hexagon grid.
create_alpha_mask : creates an alpha shape (a concave hull), which is required for plotting contours; without it, the contour function extrapolates outside of the model area.
plot_scattered_contour : plots contour lines over an irregular grid, such as a hexagonal one.
plot_hexagons_3d : plots a 2-dimensional hexagon grid with specified z-dimensions
"""
def plot_hexagons (data, hexagon_grid_cores, hexagon_radius, hexagon_orientation = 0, colormap = 'steel', color = None, vmin = None, vmax = None, vincr = None, xlabel = None, ylabel = None, clabel = None, hide_colorbar = False, **kwargs):
"""
Call to plot a specified vector (positions relative to node IDs) in a hexagonal grid
@params:
data - Required : vector of values for hexagonal plot, positions corresponding to cell IDs (counting from zero)
hexagon_grid_cores - Required : tessellated polygons over area of interest
hexagon_radius - Required : radius of hexagons used for tessellation
hexagon_orientation - Optional : orientation of hexagon in clock-wise degrees [0 = flat top]
colormap - Optional : specify a colormap as string
vmin - Optional : externally specified min value for colorbar
vmax - Optional : externally specified max value for colorbar
vincr - Optional : specified value increment for colorbar
xlabel - Optional : string for xlabel
ylabel - Optional : string for ylabel
clabel - Optional : string for colorbar label
**kwargs - Optional : keyword arguments for matplotlib.patches.RegularPolygon
"""
import matplotlib
import numpy as np
import math
#--------------------------------------------------------------------------
# Prepare data for plotting
#--------------------------------------------------------------------------
# If not specified, define range of values
if vmin == None or vmax == None:
vmin = np.min(data)
vmax = np.max(data)
vrange = vmax-vmin
if vincr == None:
vincr = vrange/100
# Snap value range to integers
vmin = int(vmin/vincr)*vincr # minimum value for colorbar
vmax = (int(vmax/vincr)+1)*vincr # maximum value for colorbar
if color is None:
# Retrieve colormap
if colormap == 'steel':
# Create colormap 'steel'
from matplotlib.colors import LinearSegmentedColormap
cmap_steel = [(0.007843137,0.305882353,0.443137255), (0.301960784,0.592156863,0.784313725),(0.623529412,0.776470588,0.882352941)]
cm = LinearSegmentedColormap.from_list('steel', cmap_steel, N=100)
cmaps = cm
else:
cmaps = colormap
# Correct orientation
orientation = math.radians(-hexagon_orientation+30)
# Hexagon radius only goes to normal of sides
edgepoint_distance = hexagon_radius/np.cos(np.deg2rad(30))
# Retrieve colormap information
if color is None:
cmap = matplotlib.cm.get_cmap(cmaps)
#--------------------------------------------------------------------------
# Start plotting
#--------------------------------------------------------------------------
# Create empty figure
ax1 = matplotlib.pyplot.gca()
# Plot hexagons
for hex in range(len(hexagon_grid_cores[:,0])):
# Retrieve color value
if color is None:
rgba = cmap((data[hex]-vmin)/(vrange))
rgba = matplotlib.colors.rgb2hex(rgba)
else:
rgba = color
# Add the patch
ax1.add_patch(
matplotlib.patches.RegularPolygon(
(hexagon_grid_cores[hex,0], hexagon_grid_cores[hex,1]), # x and y
6, # edges
edgepoint_distance,
orientation=orientation,
facecolor = rgba,
**kwargs)
)
# Determine meaningful colorbar steps
if color is None:
colorbar_increment = vincr
colorbar_min = int(vmin/colorbar_increment)*colorbar_increment # minimum value for colorbar
colorbar_max = (int(vmax/colorbar_increment)+1)*colorbar_increment # maximum value for colorbar
colorbar_increment_numbers = int((colorbar_max-colorbar_min)/colorbar_increment+1)
colorbar_steps = []
for num in range(colorbar_increment_numbers):
colorbar_steps = colorbar_steps + [colorbar_min+num*colorbar_increment]
# Recompute the ax.dataLim
ax1.relim()
# Update ax.viewLim using the new dataLim
ax1.autoscale_view()
# Create colorbar
if hide_colorbar == False and color is None:
norm = matplotlib.colors.Normalize(vmin=vmin,vmax=vmax)
sm = matplotlib.pyplot.cm.ScalarMappable(cmap=cmap, norm=norm)
sm.set_array([])
cbar = matplotlib.pyplot.colorbar(sm)
# Label plot
if xlabel != None:
matplotlib.pyplot.xlabel(xlabel)
if ylabel != None:
matplotlib.pyplot.ylabel(ylabel)
if clabel != None and not hide_colorbar and color is None:
cbar.set_label(clabel, rotation=270, labelpad=20)
def create_alpha_mask(points, distance_limit, resolution_x = 1000, resolution_y = 1000, visualization = True):
"""
Creates interpolation grid, then masks over the alpha shape spanned up by points and defined by distance_limit.
@params:
points - Required : points spanning up alpha shape
distance_limit - Required : distance threshold for removing Delaunay simplices
resolution_x - Optional : resolution for grid in x, default is 1000
resolution_y - Optional : resolution for grid in y, default is 1000
visualization - Optional : boolean for visualizing result, default is False
Returns:
grid_mask : An array containing 1 for cells inside, and 0 for cells outside
"""
import numpy as np
from scipy.spatial import Delaunay
from matplotlib.collections import LineCollection
import matplotlib.path as mplPath
#----------------------------------------------------------------------
# Create Grid
#----------------------------------------------------------------------
# Create meshgrid
xi = np.transpose(np.linspace(min(points[:,0]), max(points[:,0]), resolution_x))
yi = np.transpose(np.linspace(min(points[:,1]), max(points[:,1]), resolution_y))
X, Y = np.meshgrid(xi, yi)
# Reshape into vector
gridpoints_x = np.reshape(X, resolution_x*resolution_y)
gridpoints_y = np.reshape(Y, resolution_x*resolution_y)
# Combine into gridpoints array
gridpoints = np.transpose(np.asarray((gridpoints_x, gridpoints_y)))
#----------------------------------------------------------------------
# Create Alpha Shape
#----------------------------------------------------------------------
# Start Delaunay triangulation
tri = Delaunay(points)
# Auxiliary function for plotting, if required
if visualization == True:
import matplotlib.pyplot as plt
edges = set()
edge_points = []
def add_edge(i, j):
"""Add a line between the i-th and j-th points, if not in the list already"""
if (i, j) in edges or (j, i) in edges:
# already added
return
edges.add( (i, j) )
edge_points.append(points[ [i, j] ])
# Remove simplices outside of distance_limit
simplex_flag = np.zeros(len(tri.simplices[:,0])) # Flags bad simplices
counter = 0
for ia, ib, ic in tri.vertices:
# ia, ib, ic = indices of corner points of the triangle
if np.sqrt((points[ia,0]-points[ib,0])**2+(points[ia,1]-points[ib,1])**2) < distance_limit and \
np.sqrt((points[ia,0]-points[ic,0])**2+(points[ia,1]-points[ic,1])**2) < distance_limit and \
np.sqrt((points[ib,0]-points[ic,0])**2+(points[ib,1]-points[ic,1])**2) < distance_limit:
# do nothing
simplex_flag[counter] = 0
else:
# simplex has at least one side larger than threshold, flag it
simplex_flag[counter] = 1
counter += 1
tri.simplices = tri.simplices[simplex_flag == 0,:] # Remove bad simplices
tri.vertices = tri.vertices[simplex_flag == 0,:] # Remove bad simplices
# Visualize, if requested
if visualization == True:
# Mark all remaining simplices
for ia, ib, ic in tri.vertices:
add_edge(ia, ib)
add_edge(ib, ic)
add_edge(ic, ia)
# Draw them
lines = LineCollection(edge_points)
plt.figure()
plt.gca().add_collection(lines)
plt.plot(points[:,0], points[:,1], 'o')
#----------------------------------------------------------------------
# Mask over Alpha Shape
#----------------------------------------------------------------------
# Prepare point flag
flag_gridpoints = np.zeros(len(gridpoints[:,0]), dtype = np.int)
# Evaluate gridpoints
for sim in range(len(tri.simplices[:,0])):
# Print progress bar
cv = sim
mv = len(tri.simplices[:,0])-1
print('\r%s |%s| %s%% %s' % ('Masking: ', '\033[33m'+'█' * int(50 * cv // mv) + '-' * (50 - int(50 * cv // mv))+'\033[0m', ("{0:." + str(1) + "f}").format(100 * (cv / float(mv))), ' Complete'), end = '\r')
# Create simplex path
bbPath = mplPath.Path(np.array([points[tri.simplices[sim,0],:],
points[tri.simplices[sim,1],:],
points[tri.simplices[sim,2],:],
points[tri.simplices[sim,0],:]]))
# Flag points that are inside this simplex
for gridpts in range(len(gridpoints[:,0])):
if flag_gridpoints[gridpts] == 0: # only process points not already allocated
if bbPath.contains_point((gridpoints[gridpts,0],gridpoints[gridpts,1])) == True:
flag_gridpoints[gridpts] = 1
# Plot, if required
if visualization == True:
plt.scatter(gridpoints[flag_gridpoints == 1,0], gridpoints[flag_gridpoints == 1,1],color = 'g')
plt.scatter(gridpoints[flag_gridpoints == 0,0], gridpoints[flag_gridpoints == 0,1],color = 'r')
# Reshape flag_gridpoints into a 2D array
global grid_mask
grid_mask = np.reshape(flag_gridpoints,(resolution_y,resolution_x))
# Return result
return grid_mask
def plot_scattered_contour(x, y, data, resolution_x=1000, resolution_y=1000,
grid_mask = None, vmin = None, vmax = None, vincr = None, suppress_clabel = False,
**kwargs):
"""
Call to plot contour of scattered data
@params:
x - Required : x-coordinate
y - Required : y-coordinate
data - Required : data for the contours
resolution_x - Optional : resolution of auxiliary grid in x
resolution_y - Optional : resolution of auxiliary grid in y
grid_mask - Optional : mask array of dimension [resolution_y,resolution_x]
vmin - Optional : min value for contour
vmax - Optional : max value for contour
vincr - Optional : increment for contour
suppress_clabel - Optional : Flag wether contours should be labeld, False by default
**kwargs - Optional : keyword arguments for matplotlib.patches.RegularPolygon
"""
import numpy as np
import matplotlib
import scipy
#--------------------------------------------------------------------------
# Integrity checks
#--------------------------------------------------------------------------
# Check if grid_mask matches meshgrid dimensions
if len(grid_mask) != 1:
if len(grid_mask[:,0]) != resolution_y or len(grid_mask[0,:]) != resolution_x:
raise Exception('Grid mask dimensions must match resolution in x and y!')
# Check if one of the cells has dried; this algorithm can't handle that yet
if vmin < -1000:
print('\033[31m'+'WARNING:'+'\033[0m'+' Dried cells detected. Contour not printed.')
return
# Extract vmin and vmax, if not specified
if vmin == None or vmax == None:
vmin = np.min(data)
vmax = np.max(data)
# Set vincr, if not specified
if vincr == None:
vincr = (vmax-vmin)/10
# Snap value range to integers
vmin = int(vmin/vincr)*vincr # minimum value for colorbar
vmax = (int(vmax/vincr)+1)*vincr # maximum value for colorbar
#--------------------------------------------------------------------------
# Prepare data for plotting
#--------------------------------------------------------------------------
# Convert source material into required format
source = np.transpose(np.asarray([x,y]))
# Create and convert target material
xi = np.transpose(np.linspace(min(x), max(x), resolution_x))
yi = np.transpose(np.linspace(min(y), max(y), resolution_y))
X, Y = np.meshgrid(xi, yi)
target = np.transpose(np.asarray([X,Y]))
# Interpolate and transpose
Z = scipy.interpolate.griddata(source, data, target)
Z = np.transpose(Z)
# Mask values, if grid_mask was specified
if len(grid_mask) != 1:
Z[grid_mask == 0] = float('NaN')
# Define function for masking
levels = np.arange(vmin,vmax,vincr)
#--------------------------------------------------------------------------
# Plot that shit
#--------------------------------------------------------------------------
CS = matplotlib.pyplot.contour(xi,yi,Z,levels=levels,**kwargs)
if not suppress_clabel:
matplotlib.pyplot.clabel(CS, inline=1, inline_spacing = 0)
return
def plot_hexagons_3d(grid, zdim, hexagon_radius, hexagon_orientation = 0, xlabel = 'x', ylabel = 'y', zlabel = 'z', clabel = 'depth', depth_colormap = 'steel', alpha = 1, **kwargs):
"""
Call to tessellate a given polygon with hexagons
@params:
grid - Required : x-y-coordinates of center of hexagons, array of form [nx2]
zdim - Required : bottom and top elevation of hexagon cells, array of form [nx2]
hexagon_radius - Required : radius of hexagons used for tessellation
hexagon_orientation - Required : orientation of hexagon in clock-wise degrees [0 = flat top]
xlabel - Optional : label for x-axis
ylabel - Optional : label for y-axis
zlabel - Optional : label for z-axis
clabel - Optional : label for colorbar
depth_colormap - Optional : string of colormap, if requested
alpha - Optional : alpha value for transparency of polygons, default is 1
**kwargs - Optional : keyword arguments for Poly3DCollection
"""
# PLOT 3D
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from mpl_toolkits.mplot3d.art3d import Poly3DCollection, Line3DCollection
import math
if depth_colormap == 'steel':
# Create colormap 'steel'
from matplotlib.colors import LinearSegmentedColormap
cmap_steel = [(0.007843137,0.305882353,0.443137255), (0.301960784,0.592156863,0.784313725),(0.623529412,0.776470588,0.882352941)]
cm = LinearSegmentedColormap.from_list('steel', cmap_steel, N=100)
cmaps = cm
else:
cmaps = depth_colormap
# Initialize figure
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
# Hexagon radius only goes to normal of sides
edgepoint_distance = hexagon_radius/np.cos(np.deg2rad(30))
# Determine depth range, if colorbar is requested
vmin = np.min(zdim[:,1]-zdim[:,0])
vmax = np.max(zdim[:,1]-zdim[:,0])
c_range = vmax-vmin
# Plot hexagons
for hex in range(len(grid[:,0])):
# Reset coordinate variables
x = []
y = []
# Read top and bottom elevation
zbot = zdim[hex,0]
ztop = zdim[hex,1]
# Pre-allocate memory for coordinate matrix
Z = np.zeros((12,3))
# Determine cell color, if requested
if depth_colormap != 'None':
import matplotlib
# Retrieve colormap information
cmap = matplotlib.cm.get_cmap(cmaps)
rgba = cmap((ztop-zbot-vmin)/c_range) #cmap((zbot-vmin)/(vmax-vmin))
rgba = list(rgba)
rgba[3] = alpha
# rgba = matplotlib.colors.rgb2hex(rgba)
# Plot grid
counter = 0
for angle in range(0-hexagon_orientation, 420-hexagon_orientation, 60):
# Coordinates of edge point
x = np.append(x,grid[hex,0]+math.cos(math.radians(angle)) * edgepoint_distance)
y = np.append(y,grid[hex,1]+math.sin(math.radians(angle)) * edgepoint_distance)
# Write into coordinate matrix
if counter < 6:
Z[counter,0] = grid[hex,0]+math.cos(math.radians(angle)) * edgepoint_distance
Z[counter,1] = grid[hex,1]+math.sin(math.radians(angle)) * edgepoint_distance
Z[counter,2] = ztop
Z[6+counter,0] = grid[hex,0]+math.cos(math.radians(angle)) * edgepoint_distance
Z[6+counter,1] = grid[hex,1]+math.sin(math.radians(angle)) * edgepoint_distance
Z[6+counter,2] = zbot
counter += 1
# Vertices of hexagon sides
verts = [[Z[0],Z[1],Z[7],Z[6]],
[Z[1],Z[2],Z[8],Z[7]],
[Z[2],Z[3],Z[9],Z[8]],
[Z[3],Z[4],Z[10],Z[9]],
[Z[4],Z[5],Z[11],Z[10]],
[Z[5],Z[0],Z[6],Z[11]]]
if depth_colormap != 'None':
# Plot hexagon side
face = Poly3DCollection(verts,
**kwargs)
face.set_facecolor(rgba)
ax.add_collection3d(face)
else:
face = Poly3DCollection(verts,
**kwargs)
face.set_facecolor(rgba)
ax.add_collection3d(face)
# Vertices of hexagon top
verts = [[Z[0],Z[1],Z[2],Z[3],Z[4],Z[5]]]
# Plot hexagon top
if depth_colormap != 'None':
# Plot hexagon side
face = Poly3DCollection(verts,
**kwargs)
face.set_facecolor(rgba)
ax.add_collection3d(face)
else:
face = Poly3DCollection(verts,
**kwargs)
face.set_facecolor(rgba)
ax.add_collection3d(face)
# Vertices of hexagon bot
verts = [[Z[6],Z[7],Z[8],Z[9],Z[10],Z[11]]]
# Plot hexagon bot
if depth_colormap != 'None':
# Plot hexagon side
face = Poly3DCollection(verts,
**kwargs)
face.set_facecolor(rgba)
ax.add_collection3d(face)
else:
face = Poly3DCollection(verts,
**kwargs)
face.set_facecolor(rgba)
ax.add_collection3d(face)
# Determine meaningful colorbar steps, if colorbar was requested
if depth_colormap != 'None':
colorbar_increment = 0.1
colorbar_min = int(vmin/colorbar_increment)*colorbar_increment # minimum value for colorbar
colorbar_max = (int(vmax/colorbar_increment)+1)*colorbar_increment # maximum value for colorbar
colorbar_increment_numbers = int((colorbar_max-colorbar_min)/colorbar_increment+1)
colorbar_steps = []
for num in range(colorbar_increment_numbers):
colorbar_steps = colorbar_steps + [colorbar_min+num*colorbar_increment]
# Create colorbar
norm = matplotlib.colors.Normalize(vmin=vmin,vmax=vmax)
sm = matplotlib.pyplot.cm.ScalarMappable(cmap=cmap, norm=norm)
sm.set_array([])
cbar = matplotlib.pyplot.colorbar(sm)
cbar.set_label(clabel, rotation=270, labelpad=20)
# Label axes
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
ax.set_zlabel(zlabel)
# Equal aspect scaling doesn't work yet, manual workaround
# Designate array of edges
xyzlims = np.zeros((3,2))
xyzlims[0,0] = np.min(grid[:,0])
xyzlims[0,1] = np.max(grid[:,0])
xyzlims[1,0] = np.min(grid[:,1])
xyzlims[1,1] = np.max(grid[:,1])
xyzlims[2,0] = np.min(zdim)
xyzlims[2,1] = np.max(zdim)
# Determine maximal range
maxrange = np.max([xyzlims[0,1]-xyzlims[0,0],xyzlims[1,1]-xyzlims[1,0],xyzlims[2,1]-xyzlims[2,0]])
# Determine difference to maximal range
xdif = maxrange - (xyzlims[0,1]-xyzlims[0,0])
ydif = maxrange - (xyzlims[1,1]-xyzlims[1,0])
zdif = maxrange - (xyzlims[2,1]-xyzlims[2,0])
# Set axis limits -> equal aspect
ax.set_xlim3d(xyzlims[0,0]-xdif/2,xyzlims[0,1]+xdif/2)
ax.set_ylim3d(xyzlims[1,0]-ydif/2,xyzlims[1,1]+ydif/2)
ax.set_zlim3d(xyzlims[2,0]-zdif/2,xyzlims[2,1]+zdif/2)
# Show result
plt.show()
def vulture_plot(incr = 1, elev = 40., fps = 50):
"""
Creates a short animated .gif providing a flight around the 3-D model, requiring an open, compatible 3D figure
@params:
incr - Optional : degree increment for rotation frames; defines temporal resolution of .gif (default = 1)
elev - Optional : elevation angle for camera (default = 40)
fps - Optional : frames per second for resulting .gif; defines speed of .gif display (default 50)
"""
# Import libraries
import imageio
import os
import matplotlib.pyplot as plt
# Retrieve axis
ax = plt.gca()
# Rotate, save and compile vulture plot
images = []
for cv in range(0,360,incr):
# Rotate image
ax.view_init(elev=40., azim=cv)
plt.show()
# Save it as temporary file
plt.savefig("dummy.png")
# Append it to saved movie
images.append(imageio.imread("dummy.png"))
# Remove temporary file
os.remove("dummy.png")
# Print progress bar
mv = 359 # max value
print('\r%s |%s| %s%% %s' % ('Printing: ', '\033[33m'+'█' * int(50 * cv // mv) + '-' * (50 - int(50 * cv // mv))+'\033[0m', ("{0:." + str(1) + "f}").format(100 * (cv / float(mv))), ' Complete'), end = '\r')
# Compile .gif
imageio.mimsave('output_quick.gif', images,fps=fps)
def visualize_genealogy(genealogy,weights = None, rejuvenation = None,colormap = 'jet'):
"""
Creates an inline figure visualizing the particle genealogy over one resampling step.
@params:
genealogy - Required : vector describing genealogy of resampled particles, referring to indices
weights - Optional : weight of particles prior to resampling
rejuvenation - Optional : vector of booleans describing whether particles were rejuvenated
colormap - Optional : colormap string for visualization
"""
import numpy as np
from IPython import get_ipython
import matplotlib
import matplotlib.pyplot as plt
# Determine number of particles
n_particles = len(genealogy)
# Assign optional variables, if not provided
if weights is None == True:
weights = np.ones(n_particles)
# if rejuvenation is None == True:
# rejuvenation = np.ones((n_particles),dtype = np.bool)
# Switch to inline printing
get_ipython().run_line_magic('matplotlib', 'inline')
# Create dummy features for the legend
full_line = plt.Line2D([], [], color='black',label='inherited')
dashed_line = plt.Line2D([], [], linestyle = '--', color='black',label='rejuvenated')
particle = plt.Line2D([], [], linestyle = 'None', marker ='.', color='black',label='particle')
# Plot legend
plt.legend(handles=[dashed_line,full_line,particle],bbox_to_anchor=(0., -0.05, 1., .102), loc=3,
ncol=3, mode="expand", borderaxespad=0.)
# Determine colormap for particles
cmap = matplotlib.cm.get_cmap(colormap)
# Extract particle colors
rgba = [None] * n_particles
for n in range(n_particles):
rgba[n] = matplotlib.colors.rgb2hex(cmap(n/(n_particles-1)))
# Create plot
for n in range(n_particles):
plt.plot([genealogy[n],n],[1,2],'--',c=rgba[genealogy[n]])
# Draw genealogy of current particle
# if rejuvenation[n] == False:
# plt.plot([genealogy[n],n],[1,2],c=rgba[genealogy[n]])
# else:
# plt.plot([genealogy[n],n],[1,2],c='w')
# plt.plot([genealogy[n],n],[1,2],'--',c=rgba[genealogy[n]])
# Scatter previous and current particle index
if weights[n] == 0: # Particle weight is zero - print as greyscale
plt.scatter(n,1,s = weights[n]/np.max(weights)*55+5,c='xkcd:medium grey')
else:
plt.scatter(n,1,s = weights[n]/np.max(weights)*55+5,c=rgba[n])
plt.scatter(n,2,s=20,c=rgba[n])
# Deactivate axes
plt.axis('off')
# Show, and revert to automatic printing
plt.show()
get_ipython().run_line_magic('matplotlib', 'qt5') | 42.66821 | 240 | 0.537054 |
95c8f1ad4e81caf4b83710c865b7efb620f7466e | 58,889 | py | Python | tests/python/self_concepts_test.py | JulianAL-01/self-concepts | d4a5ebfdadc472535777349602c775a67aaa3823 | [
"MIT"
] | 14 | 2020-07-21T21:09:25.000Z | 2022-01-30T11:00:35.000Z | tests/python/self_concepts_test.py | JulianAL-01/self-concepts | d4a5ebfdadc472535777349602c775a67aaa3823 | [
"MIT"
] | 2 | 2020-07-28T14:46:11.000Z | 2020-07-28T14:52:23.000Z | tests/python/self_concepts_test.py | JulianAL-01/self-concepts | d4a5ebfdadc472535777349602c775a67aaa3823 | [
"MIT"
] | 5 | 2020-07-28T13:50:20.000Z | 2021-07-12T22:56:11.000Z | '''
self_concepts_test
This module serves as the unit test for self_concepts
'''
import argparse, sys
sys.path.append('../../source/python')
from self_concepts import Concept
from self_concepts import Property
from self_concepts import Relationship
from self_concepts import Ontology
from self_concepts import Blackboard
from self_concepts import Agent
from self_concepts import SelfException
# Helper functions in support of concise and verbose reporting
def parseArguments():
'''Collect and return the test's arguments.'''
parser = argparse.ArgumentParser(description='Test ')
parser.add_argument('-c',
'--concise',
action='store_true',
help='test self_concept with concise results')
return parser.parse_args()
def reportHeader(message):
'''Print a report header.'''
if arguments.concise != True:
print(message)
else:
print('#', end='')
def reportSection(message):
'''Print a section header.'''
if arguments.concise != True:
print(' ' + message)
else:
print('*', end='')
def reportDetail(message):
'''Print a report detail.'''
if arguments.concise != True:
print(' ' + message)
else:
print('.', end='')
def reportDetailFailure(message):
'''Print a report failure.'''
if arguments.concise != True:
print('!!!!!!! ' + message)
else:
print('!')
exit()
def reportConceptName(concept: 'Concept'):
'''Print the name of the concept.'''
reportDetail(' Function applied to ' + concept.__class__.__name__ + ' (' + concept.name + ')')
# Various functions, classes, and instances used for testing
class AnotherConcept(Concept): pass
CONCEPT_NAME_1 = 'A well-formed concept'
CONCEPT_NAME_2 = 'A well-formed concept'
CONCEPT_NAME_3 = 'Another well-formed concept'
CONCEPT_NAME_4 = 'A well-formed concept'
c1 = Concept(CONCEPT_NAME_1)
c2 = Concept(CONCEPT_NAME_2)
c3 = AnotherConcept(CONCEPT_NAME_3)
c4 = Concept(CONCEPT_NAME_4)
class AnotherProperty(Property): pass
class YetAnotherProperty(AnotherProperty): pass
PROPERTY_NAME_1 = 'A well-formed property'
PROPERTY_NAME_2 = 'A well-formed property'
PROPERTY_NAME_3 = 'Another well-formed property'
PROPERTY_NAME_4 = 'A well-formed property'
PROPERTY_VALUE_1 = 42
PROPERTY_VALUE_2 = 'A value'
PROPERTY_VALUE_3 = c1
PROPERTY_VALUE_4 = 'A value'
p1 = Property(PROPERTY_NAME_1, PROPERTY_VALUE_1)
p2 = Property(PROPERTY_NAME_2, PROPERTY_VALUE_2)
p3 = AnotherProperty(PROPERTY_NAME_3, PROPERTY_VALUE_3)
p4 = Property(PROPERTY_NAME_4, PROPERTY_VALUE_4)
class AnotherRelationship(Relationship): pass
RELATIONSHIP_NAME_1 = 'A well-formed relationship'
RELATIONSHIP_NAME_2 = 'A well-formed relationship'
RELATIONSHIP_NAME_3 = 'Another well-formed relationship'
RELATIONSHIP_NAME_4 = 'A well-formed relationship'
r1 = Relationship(RELATIONSHIP_NAME_1, c1, c2)
r2 = Relationship(RELATIONSHIP_NAME_2, c2, c3)
r3 = AnotherRelationship(RELATIONSHIP_NAME_3, c3, c1)
r4 = Relationship(RELATIONSHIP_NAME_4, c1, c4)
ONTOLOGY_NAME_1 = 'A well-formed ontology'
o1 = Ontology(ONTOLOGY_NAME_1)
BLACKBOARD_NAME_1 = 'A well-formed blackboard'
b1 = Blackboard(BLACKBOARD_NAME_1)
class AnotherAgent(Agent):
def activity(self,
parameters: 'Concept' = None):
super().activity(parameters)
if parameters == None:
reportDetail(' Activity ('
+ self.name
+ ')')
else:
reportDetail(' Activity ('
+ self.name
+ ') with parameters ('
+ parameters.name
+ ')')
def start(self,
parameters: 'Concept' = None):
super().start(parameters)
if parameters == None:
reportDetail(' Start ('
+ self.name
+ ')')
else:
reportDetail(' Start ('
+ self.name
+ ') with parameters ('
+ parameters.name
+ ')')
def stop(self,
parameters: 'Concept' = None):
super().stop(parameters)
if parameters == None:
reportDetail(' Stop ('
+ self.name
+ ')')
else:
reportDetail(' Stop ('
+ self.name
+ ') with parameters ('
+ parameters.name
+ ')')
def pause(self,
parameters: 'Concept' = None):
super().pause(parameters)
if parameters == None:
reportDetail(' Pause ('
+ self.name
+ ')')
else:
reportDetail(' Pause ('
+ self.name
+ ') with parameters ('
+ parameters.name
+ ')')
def isAlive(self) -> bool:
state = super().isAlive()
reportDetail(' isAlive ('
+ self.name
+ ')')
return True
def status(self) -> Concept:
state = super().status()
reportDetail(' Status ('
+ self.name
+ ')')
return Concept('Status')
def signal(self,
source: 'Concept',
message: 'Concept',
parameters: 'Concept' = None):
super().signal(source, message, parameters)
reportDetail(' Signal to '
+ self.__class__.__name__
+ ' ('
+ self.name
+ ') by '
+ source.__class__.__name__
+ ' ('
+ source.name
+ ') regarding '
+ message.__class__.__name__
+ ' ('
+ message.name
+ ')')
def connect(self,
channel: 'Relationship',
parameters: 'Concept' = None):
super().connect(channel, parameters)
if parameters == None:
reportDetail(' Connect ('
+ self.name
+ ') to a channel ('
+ channel.name
+ ')')
else:
reportDetail(' Connect ('
+ self.name
+ ') with parameters ('
+ parameters.name
+ ') to a channel ('
+ channel.name
+ ')')
AGENT_NAME_1 = 'A well-formed agent'
AGENT_NAME_2 = 'Another well-formed agent'
AGENT_NAME_3 = 'Yet another well-formed agent'
a1 = AnotherAgent(AGENT_NAME_1)
a2 = AnotherAgent(AGENT_NAME_2)
a3 = AnotherAgent(AGENT_NAME_3)
# Concept unit test
def testConcept():
reportHeader('Concept')
reportSection('attributes')
if c1.name == CONCEPT_NAME_1:
reportDetail('Correctly set and retrived name')
else:
reportDetailFailure('Name was not set or retrived')
try:
s = c1.properties
reportDetailFailure('Properties were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to properties')
try:
c1.properties = set()
reportDetailFailure('Properties were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to properties')
reportSection('addProperty')
c1.addProperty(p1)
if c1.propertyExists(p1):
reportDetail('Correctly added property')
else:
reportFailure('Property was not added')
try:
c1.addProperty(p1)
reportDetailFailure('Property already exists')
except SelfException:
reportDetail('Correctly denied adding property that already exists')
try:
c1.addProperty('An ill-formed property')
reportDetailFailure('Property is ill-formed')
except SelfException:
reportDetail('Correctly denied adding ill-formed property')
reportSection('removeProperty')
c1.removeProperty(p1)
if not c1.propertyExists(p1):
reportDetail('Correctly removed property')
else:
reportFailure('Property was not removed')
try:
c1.removeProperty(p2)
reportDetailFailure('Property exists')
except SelfException:
reportDetail('Correctly denied removing property that does not exist')
try:
c1.removeProperty('An ill-formed property')
reportDetailFailure('Property is ill-formed')
except SelfException:
reportDetail('Correctly denied removing ill-formed property')
reportSection('removeAllProperties')
c1.addProperty(p1)
c1.addProperty(p2)
c1.removeAllProperties()
if c1.numberOfProperties() == 0:
reportDetail('Correctly removed all properties')
else:
reportDetailFailure('Properties were not removed')
reportSection('propertyExists')
c1.addProperty(p1)
if c1.propertyExists(p1):
reportDetail('Correctly checked that property exists')
else:
reportDetailFailure('Property does not exist')
if not c1.propertyExists(p2):
reportDetail('Correctly checked that property does not exist')
else:
reportDetailFailure('Property exists')
try:
c1.propertyExists('An ill-formed property')
reportDetailFailure('Property is ill-formed')
except SelfException:
reportDetail('Correctly denied checking existence of ill-formed property')
reportSection('numberOfProperties')
c1.addProperty(p2)
if c1.numberOfProperties() == 2:
reportDetail('Correctly reported number of properties')
else:
reportDetailFailure('Number of properties is wrong')
reportSection('iterateOverProperties')
c1.iterateOverProperties(reportConceptName)
reportDetail('Correctly iterated over properties')
c1.iterateOverProperties(reportConceptName, PROPERTY_NAME_1)
reportDetail('Correctly iterated over properties with given name')
c1.iterateOverProperties(reportConceptName, None, AnotherProperty)
reportDetail('Correctly iterated over properties with given property class')
c1.iterateOverProperties(reportConceptName, PROPERTY_NAME_2, Property)
reportDetail('Correctly iterated over properties with given name and property class')
try:
c1.iterateOverProperties(reportConceptName, None, SelfException)
reportDetailFailure('Property class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed property class')
try:
c1.iterateOverProperties(reportConceptName, None, 'An ill-formed property class')
reportDetailFailure('Property class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed property class')
# Property unit test
def testProperty():
reportHeader('Property')
reportSection('attributes')
if p3.name == PROPERTY_NAME_3:
reportDetail('Correctly set and retrived name')
else:
reportDetailFailure('Name was not set or retrived')
if p3.value == c1:
reportDetail('Correctly set and retrieved value')
else:
reportDetailFailure('Value was not set or retrieved')
# Relationship unit test
def testRelationship():
reportHeader('Relationship')
reportSection('constructor')
try:
r0 = Relationship('A well-formed relationship', c1, c2)
reportDetail('Correctly constructed relationship')
except SelfException:
reportDetailFailure('Relationship was not constructed')
try:
r0 = Relationship('A well-formed relationship', Concept, Concept)
reportDetail('Correctly constructed relationship')
except SelfException:
reportDetailFailure('Relationship was not constructed')
try:
r0 = Relationship('An ill-formed relationship', 'An ill-formed edge', c2)
reportDetailFailure('Edge is ill-formed')
except SelfException:
reportDetail('Correctly denied constructing relationship with ill-formed edge')
try:
r0 = Relationship('An ill-formed relationship', c1, 'An ill-formed edge')
reportDetailFailure('Edge is ill-formed')
except SelfException:
reportDetail('Correctly denied constructing relationship with ill-formed edge')
reportSection('attributes')
r1.name = RELATIONSHIP_NAME_1;
if r1.name == RELATIONSHIP_NAME_1:
reportDetail('Correctly set and retrived name')
else:
reportDetailFailure('Name was not set or retrieved')
r1.edge1 = c1
if r1.edge1 == c1:
reportDetail('Correctly set and retrieved edge')
else:
reportDetailFailure('Edge was not set or retrieved')
try:
r1.edge1 = 'An ill-formed edge'
reportDetailFailure('Edge is ill-formed')
except SelfException:
reportDetail('Correctly denied assigning ill-formed edge')
try:
r1.edge2 = 'An ill-formed edge'
reportDetailFailure('Edge is ill-formed')
except SelfException:
reportDetail('Correctly denied assigning ill-formed edge')
try:
s = r1.edge1Properties
reportDetailFailure('Edge properties were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to edge properties')
try:
r1.edge1Properties = set()
reportDetailFailure('Edge properties were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to edge properties')
try:
s = r1.edge2Properties
reportDetailFailure('Edge properties were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to edge properties')
try:
r1.edge2Properties = set()
reportDetailFailure('Edge properties were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to edge properties')
reportSection('addEdgeProperty')
r1.addEdgeProperty(Relationship.EDGE1, p1)
if r1.edgePropertyExists(Relationship.EDGE1, p1):
reportDetail('Correctly added edge property')
else:
reportFailure('Edge property was not added')
try:
r1.addEdgeProperty(Relationship.EDGE1, p1)
reportDetailFailure('Edge property already exists')
except SelfException:
reportDetail('Correctly denied adding edge property that already exists')
try:
r1.addEdgeProperty(Relationship.EDGE1, 'An ill-formed property')
reportDetailFailure('Edge property is ill-formed')
except SelfException:
reportDetail('Correctly denied adding ill-formed edge property')
r1.addEdgeProperty(Relationship.EDGE2, p1)
if r1.edgePropertyExists(Relationship.EDGE2, p1):
reportDetail('Correctly added edge property')
else:
reportFailure('Edge property was not added')
try:
r1.addEdgeProperty(Relationship.EDGE2, p1)
reportDetailFailure('Edge property already exists')
except SelfException:
reportDetail('Correctly denied adding edge property that already exists')
try:
r1.addEdgeProperty(Relationship.EDGE2, 'An ill-formed property')
reportDetailFailure('Edge property is ill-formed')
except SelfException:
reportDetail('Correctly denied adding ill-formed edge property')
reportSection('removeEdgeProperty')
r1.removeEdgeProperty(Relationship.EDGE1, p1)
if not r1.edgePropertyExists(Relationship.EDGE1, p1):
reportDetail('Correctly removed edge property')
else:
reportFailure('Edge property was not removed')
try:
r1.removeEdgeProperty(Relationship.EDGE1, p2)
reportDetailProperty('Edge property exists')
except SelfException:
reportDetail('Correctly denied removing edge property that does not exist')
try:
r1.removeEdgeProperty(Relationship.EDGE1, 'An ill-formed property')
reportDetailFailure('Edge property is ill-formed')
except SelfException:
reportDetail('Correctly denied removing ill-formed edge property')
r1.removeEdgeProperty(Relationship.EDGE2, p1)
if not r1.edgePropertyExists(Relationship.EDGE2, p1):
reportDetail('Correctly removed edge property')
else:
reportFailure('Edge property was not removed')
try:
r1.removeEdgeProperty(Relationship.EDGE2, p2)
reportDetailFailure('Edge property exists')
except SelfException:
reportDetail('Correctly denied removing edge property that does not exist')
try:
r1.removeEdgeProperty(Relationship.EDGE2, 'An ill-formed property')
reportDetailFailure('Edge property is ill-formed')
except SelfException:
reportDetail('Correctly denied removing ill-formed edge property')
reportSection('removeAllEdgeProperties')
r1.addEdgeProperty(Relationship.EDGE1, p1)
r1.addEdgeProperty(Relationship.EDGE1, p2)
r1.removeAllEdgeProperties(Relationship.EDGE1)
if r1.numberOfEdgeProperties(Relationship.EDGE1) == 0:
reportDetail('Correctly removed all edge properties')
else:
reportDetailFailure('Edge properties were not removed')
r1.addEdgeProperty(Relationship.EDGE2, p1)
r1.addEdgeProperty(Relationship.EDGE2, p2)
r1.removeAllEdgeProperties(Relationship.EDGE2)
if r1.numberOfEdgeProperties(Relationship.EDGE2) == 0:
reportDetail('Correctly removed all edge properties')
else:
reportDetailFailure('Edge properties were not removed')
reportSection('edgePropertyExists')
r1.addEdgeProperty(Relationship.EDGE1, p1)
r1.addEdgeProperty(Relationship.EDGE2, p1)
if r1.edgePropertyExists(Relationship.EDGE1, p1):
reportDetail('Correctly checked that edge property exists')
else:
reportDetailFailure('Edge property does not exist')
if not r1.edgePropertyExists(Relationship.EDGE1, p2):
reportDetail('Correctly checked that edge property does not exist')
else:
reportDetailFailure('Edge property exists')
try:
r1.edgePropertyExists(Relationship.EDGE1, 'An ill-formed property')
reportDetailFailure('Edge property is ill-formed')
except SelfException:
reportDetail('Correctly denied checking existence of ill-formed edge property')
if r1.edgePropertyExists(Relationship.EDGE2, p1):
reportDetail('Correctly checked that edge property exists')
else:
reportDetailFailure('Edge property does not exist')
if not r1.edgePropertyExists(Relationship.EDGE2, p2):
reportDetail('Correctly checked that edge property does not exist')
else:
reportDetailFailure('Edge property exists')
try:
r1.edgePropertyExists(Relationship.EDGE2, 'An ill-formed property')
reportDetailFailure('Edge property is ill-formed')
except SelfException:
reportDetail('Correctly denied checking existence of ill-formed edge property')
reportSection('numberOfEdgeProperties')
r1.addEdgeProperty(Relationship.EDGE1, p2)
r1.addEdgeProperty(Relationship.EDGE2, p2)
if r1.numberOfEdgeProperties(Relationship.EDGE1) == 2:
reportDetail('Correctly reported number of edge properties')
else:
reportDetailFailure('Number of edge properties is wrong')
if r1.numberOfEdgeProperties(Relationship.EDGE2) == 2:
reportDetail('Correctly reported number of edge properties')
else:
reportDetailFailure('Number of edge properties is wrong')
reportSection('iterateOverEdgeProperties')
r1.iterateOverEdgeProperties(Relationship.EDGE1, reportConceptName)
reportDetail('Correctly iterated over edge properties')
r1.iterateOverEdgeProperties(Relationship.EDGE1, reportConceptName, PROPERTY_NAME_1)
reportDetail('Correctly iterated over edge properties with given name')
r1.iterateOverEdgeProperties(Relationship.EDGE1, reportConceptName, None, AnotherProperty)
reportDetail('Correctly iterated over edge properties with given property class')
r1.iterateOverEdgeProperties(Relationship.EDGE1, reportConceptName, PROPERTY_NAME_2, Property)
reportDetail('Correctly iterated over edge properties with given name and property class')
try:
r1.iterateOverEdgeProperties(Relationship.EDGE1, reportConceptName, None, SelfException)
reportDetailFailure('Property class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed property class')
try:
r1.iterateOverEdgeProperties(Relationship.EDGE1, reportConceptName, None, 'An ill-formed property class')
reportDetailFailure('Edge property class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed edge property class')
r1.iterateOverEdgeProperties(Relationship.EDGE2, reportConceptName)
reportDetail('Correctly iterated over edge properties')
r1.iterateOverEdgeProperties(Relationship.EDGE2, reportConceptName, PROPERTY_NAME_1)
reportDetail('Correctly iterated over edge properties with given name')
r1.iterateOverEdgeProperties(Relationship.EDGE2, reportConceptName, None, AnotherProperty)
reportDetail('Correctly iterated over edge properties with given property class')
r1.iterateOverEdgeProperties(Relationship.EDGE2, reportConceptName, PROPERTY_NAME_2, Property)
reportDetail('Correctly iterated over edge properties with given name and property class')
try:
r1.iterateOverEdgeProperties(Relationship.EDGE2, reportConceptName, None, SelfException)
reportDetailFailure('Property class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed property class')
try:
r1.iterateOverEdgeProperties(Relationship.EDGE2, reportConceptName, None, 'An ill-formed property class')
reportDetailFailure('Edge property class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed edge property class')
# Ontology unit test
def testOntology():
reportHeader('Ontology')
reportSection('attributes')
if o1.name == ONTOLOGY_NAME_1:
reportDetail('Correctly set and retrived name')
else:
reportDetailFailure('Name was not set or retrived')
try:
s = o1.concepts
reportDetailFailure('Concepts were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to concepts')
try:
o1.concepts = set()
reportDetailFailure('Concepts were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to concepts')
try:
s = o1.relationships
reportDetailFailure('Relationships were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to relationships')
try:
o1.relationships = set()
reportDetailFailure('Relationships were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to relationships')
reportSection('addConcept')
o1.addConcept(c1)
if o1.conceptExists(c1):
reportDetail('Correctly added concept')
else:
reportFailure('Concept was not added')
try:
o1.addConcept(c1)
reportDetailFailure('Concept already exists')
except SelfException:
reportDetail('Correctly denied adding concept that already exists')
try:
o1.addConcept('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied adding ill-formed concept')
reportSection('removeConcept')
o1.removeConcept(c1)
if not o1.conceptExists(c1):
reportDetail('Correctly removed concept')
else:
reportFailure('Concept was not removed')
try:
o1.removeConcept(c2)
reportDetailFailure('Concept exists')
except SelfException:
reportDetail('Correctly denied removing concept that does not exist')
try:
o1.removeConcept('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied removing an ill-formed concept')
o1.addConcept(c1)
o1.addConcept(c2)
o1.addRelationship(r1)
try:
o1.removeConcept(c1)
reportDetailFailure('Concept is bound')
except SelfException:
reportDetail('Correctly denied removing concept that is bound')
reportSection('removeAllConcepts')
o1.removeRelationship(r1)
o1.removeAllConcepts()
if o1.numberOfConcepts() == 0:
reportDetail('Correctly removed all concepts')
else:
reportDetailFailure('Concepts were not removed')
o1.addConcept(c1)
o1.addConcept(c2)
o1.addRelationship(r1)
try:
o1.removeAllConcepts()
reportDetailFailure('Concepts are bound')
except SelfException:
reportDetail('Correctly denied removing concepts that are bound')
o1.removeRelationship(r1)
o1.removeConcept(c2)
o1.removeConcept(c1)
reportSection('conceptExists')
o1.addConcept(c1)
if o1.conceptExists(c1):
reportDetail('Correctly checked that concept exists')
else:
reportDetailFailure('Concept does not exist')
if not o1.conceptExists(c2):
reportDetail('Correctly checked that concept does not exist')
else:
reportDetailFailure('Concept exists')
try:
o1.conceptExists('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied checking existence of ill-formed concept')
reportSection('numberOfConcepts')
o1.addConcept(c2)
if o1.numberOfConcepts() == 2:
reportDetail('Correctly reported number of concepts')
else:
reportDetailFailure('Number of concepts is wrong')
reportSection('iterateOverConcepts')
o1.addConcept(c3)
o1.iterateOverConcepts(reportConceptName)
reportDetail('Correctly iterated over concepts')
o1.iterateOverConcepts(reportConceptName, CONCEPT_NAME_1)
reportDetail('Correctly iterated over concepts with given name')
o1.iterateOverConcepts(reportConceptName, None, AnotherConcept)
reportDetail('Correctly iterated over concepts with given concept class')
o1.iterateOverConcepts(reportConceptName, CONCEPT_NAME_2, Concept)
reportDetail('Correctly iterated over concepts with given name and concept class')
try:
o1.iterateOverConcepts(reportConceptName, None, SelfException)
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
try:
o1.iterateOverConcepts(reportConceptName, None, 'An ill-formed concept class')
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
reportSection('addRelationship')
o1.addRelationship(r1)
o1.addRelationship(r2)
o1.addRelationship(r3)
if o1.numberOfRelationships() == 3:
reportDetail('Correctly added relationship')
else:
reportDetailFailure('Relationship was not added')
try:
o1.addRelationship(r1)
reportDetailFailure('Relationship already exists')
except SelfException:
reportDetail('Correctly denied addding relationship that already exists')
try:
o1.addRelationship('An ill-formed relationship')
reportDetailFailure('Relationship is ill-formed')
except SelfException:
reportDetail('Correctly denied adding ill-formed relationship')
try:
o1.addRelationship(r4)
reportDetailFalure('Relationship is not closed')
except SelfException:
reportDetail('Correctly denied adding relationship that is not closed')
reportSection('removeRelationship')
o1.removeRelationship(r3)
if not o1.relationshipExists(r3):
reportDetail('Correctly remove relationship')
else:
reportDetailFailure('Relationship was not removed')
try:
o1.removeRelationship(r3)
reportDetailFailure('Relationship exists')
except SelfException:
reportDetail('Corectly denied removing relationship that does not exist')
try:
o1.removeRelationship('An ill-formed relationship')
reportDetailFailure('Relationship is ill-formed')
except SelfException:
reportDetail('Correctly denied removing ill-formed relationship')
reportSection('removeAllRelationships')
o1.removeAllRelationships()
if o1.numberOfRelationships() == 0:
reportDetail('Correctly removed all relationships')
else:
reportDetailFailure('Relationships were not removed')
reportSection('relationshipExists')
o1.addRelationship(r1)
if o1.relationshipExists(r1):
reportDetail('Correctly checked that relationship exists')
else:
reportDetailFailure('Relationship does not exist')
if not o1.relationshipExists(r3):
reportDetail('Correctly checked that relationship does not exist')
else:
reportDetailFailure('Relationship exists')
try:
o1.relationshipExists('An ill-formed relationship')
reportDetailFailure('Relationship is ill-formed')
except SelfException:
reportDetail('Correctly denied checking existance of ill-formed relationship')
reportSection('numberOfRelationship')
o1.addRelationship(r2)
if o1.numberOfRelationships() == 2:
reportDetail('Correctly reported number of relationships')
else:
reportDetailFailure('Number of relationships is wrong')
reportSection('iterateOverRelationships')
o1.addRelationship(r3)
o1.iterateOverRelationships(reportConceptName)
reportDetail('Correctly iterated over relationships')
o1.iterateOverRelationships(reportConceptName, RELATIONSHIP_NAME_1)
reportDetail('Correctly iterated over relationships with given name')
o1.iterateOverRelationships(reportConceptName, None, AnotherRelationship)
reportDetail('Correctly iterated over relationships with given relationship class')
o1.iterateOverRelationships(reportConceptName, RELATIONSHIP_NAME_2, Relationship)
reportDetail('Correctly iterated over relationshps with given name and concept class')
try:
o1.iterateOverRelationships(reportConceptName, None, SelfException)
reportDetailFailure('Relationship class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed relationship class')
try:
o1.iterateOverRelationships(reportConceptName, None, 'An ill-formed relationship class')
reportDetailFailure('Relationship class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed relationship class')
reportSection('conceptIsBound')
if o1.conceptIsBound(c1):
reportDetail('Correctly checked that concept is bound')
else:
reportDetailFailure('Concept is not bound')
if not o1.conceptIsBound(c4):
reportDetail('Correctly checked that concept is not bound')
else:
reportDetailFailure('Concept is bound')
try:
o1.conceptIsBound('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied checking if an ill-formed concept is bound')
reportSection('numberOfUnboundConcepts')
o1.addConcept(c4)
if o1.numberOfUnboundConcepts() == 1:
reportDetail('Correctly reported number of unbound concepts')
else:
reportDetailFailure('Number of unbound concepts is wrong')
reportSection('numberOfBoundConcepts')
if o1.numberOfBoundConcepts() == 3:
reportDetail('Correctly reported number of bound concepts')
else:
reportDetailFailure('Number of bound concepts is wrong')
reportSection('iterateOverUnboundConcepts')
o1.iterateOverUnboundConcepts(reportConceptName)
reportDetail('Correctly iterated over unbound concepts')
o1.iterateOverUnboundConcepts(reportConceptName, CONCEPT_NAME_1)
reportDetail('Correctly iterated over unbound concepts with given name')
o1.iterateOverUnboundConcepts(reportConceptName, None, AnotherConcept)
reportDetail('Correctly iterated over unbound concepts with given concept class')
o1.iterateOverUnboundConcepts(reportConceptName, CONCEPT_NAME_2, Concept)
reportDetail('Correctly iterated over unbound concepts with given name and concept class')
try:
o1.iterateOverUnboundConcepts(reportConceptName, None, SelfException)
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
try:
o1.iterateOverUnboundConcepts(reportConceptName, None, 'An ill-formed concept class')
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
reportSection('iterateOverBoundConcepts')
o1.iterateOverBoundConcepts(reportConceptName)
reportDetail('Correctly iterated over bound concepts')
o1.iterateOverBoundConcepts(reportConceptName, CONCEPT_NAME_1)
reportDetail('Correctly iterated over bound concepts with given name')
o1.iterateOverBoundConcepts(reportConceptName, None, AnotherConcept)
reportDetail('Correctly iterated over bound concepts with given concept class')
o1.iterateOverBoundConcepts(reportConceptName, CONCEPT_NAME_2, Concept)
reportDetail('Correctly iterated over bound concepts with given name and concept class')
try:
o1.iterateOverBoundConcepts(reportConceptName, None, SelfException)
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
try:
o1.iterateOverBoundConcepts(reportConceptName, None, 'An ill-formed concept class')
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
# Blackboard unit test
def testBlackboard():
reportHeader('Blackboard')
reportSection('attributes')
if b1.name == BLACKBOARD_NAME_1:
reportDetail('Correctly set and retrieved name')
else:
reportDetailFailure('Name was not set or retrieved')
try:
s = b1.concepts
reportDetailFailure('Concepts were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to concepts')
try:
b1.concepts = set()
reportDetailFailure('Concepts were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to concepts')
try:
s = b1.conceptClasses
reportDetailFailure('Concepts classes were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to concept classes')
try:
b1.conceptClasses = set()
reportDetailFailure('Concept classes were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to concept classes')
try:
s = b1.publications
reportDetailFailure('Publications were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to publications')
try:
b1.publications = set()
reportDetailFailure('Publications were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to publications')
try:
s = b1.conceptSubscriptions
reportDetailFailure('Subscriptions were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to subsubscriptions')
try:
b1.conceptSubscriptions = set()
reportDetailFailure('Subscriptions were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to subscriptions')
try:
s = b1.classSubscriptions
reportDetailFailure('Class subscriptions were directly accessed')
except SelfException:
reportDetail('Correctly denied direct access to class subscriptions')
try:
b1.classSubscriptions = set()
reportDetailFailure('Class subscriptions were directly assigned')
except SelfException:
reportDetail('Correctly denied direct assignment to class subscriptions')
reportSection('publishConcept')
b1.publishConcept(a1, c1)
if b1.conceptExists(c1):
reportDetail('Correctly published concept')
else:
reportDetailFailure('Concept was not published')
b1.subscribeToConceptClass(a2, AnotherConcept)
b1.publishConcept(a1, c3)
if len(b1.subscribers(c3)) == 1:
reportDetail('Correctly subscribed to concept class instance')
else:
reportDetailFailure('Subscription failed')
try:
b1.publishConcept(a1, c1)
reportDetailFailure('Concept already exists')
except SelfException:
reportDetail('Correctly denied adding concept that already exists')
try:
b1.publishConcept('An ill-formed agent', c1)
reportDetailFailure('Agent is ill-formed')
except SelfException:
reportDetail('Correctly denied publishing ill-formed agent')
try:
b1.publishConcept(a1, 'An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied publishing ill-formed concept')
reportSection('unpublishConcept')
b1.unpublishConcept(c1)
b1.unpublishConcept(c3)
if not b1.conceptExists(c3):
reportDetail('Correctly unpublished concept')
else:
reportDetailFailure('Concept was not unpublished')
b1.publishConcept(a1, c1)
b1.publishConcept(a2, c2)
b1.publishConcept(a1, c3)
b1.unpublishConcept()
if b1.numberOfConcepts() == 0:
reportDetail('Correctly unpublished all concepts')
else:
reportDetailFailure('Concepts were not unpublished')
try:
b1.unpublishConcept(c3)
reportDetailFailure('Concept exists')
except SelfException:
reportDetail('Correctly denied unpublishing concept that does not exist')
try:
b1.unpublishConcept('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied unpublishing ill-formed concept')
reportSection('publisher')
b1.publishConcept(a1, c1)
if b1.publisher(c1) == a1:
reportDetail('Correctly returned publisher')
else:
reportDetailFailure('Publisher was not returned')
try:
b1.publisher(c2)
reportDetailFailure('Concept does not exist')
except SelfException:
reportDetail('Correctly denied returning publisher of concept that does not exist')
try:
b1.publisher('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied returning publisher of ill-formed concept')
reportSection('signalPublisher')
b1.signalPublisher(Concept('A well-formed source'), Concept('A well-formed message'), c1)
reportDetail('Correctly signaled publisher')
b1.signalPublisher(Concept('A well-formed source'), Concept('A well-formed message'))
reportDetail('Correctly signaled publishers')
try:
b1.signalPublisher(Concept('A well-formed source'), Concept('A well-formed message'), c2)
reportDetailFailure('Concept does not exist')
except SelfException:
reportDetail('Correctly denied signaling a publisher of concept that does not exist')
try:
b1.signalPublisher(Concept('A well-formed source'), Concept('A well-formed message'), 'An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling publisher of ill-formed concept')
try:
b1.signalPublisher('An ill-formed source', Concept('A well-formed message'), c1)
reportDetail('Source is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling publisher of ill-formed source')
try:
b1.signalPublisher(Concept('A well-formed source'), 'An ill-formed message', c1)
reportDetailFailure('Message is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling publisher of ill-formed message')
reportSection('conceptExists')
if b1.conceptExists(c1):
reportDetail('Correctly checked that concept exists')
else:
reportDetailFailure('Concept does not exist')
if not b1.conceptExists(c2):
reportDetail('Correctly checked that concept does not exist')
else:
reportDetailFailure('Concept exists')
try:
b1.conceptExists('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied checking of ill-formed concept')
reportSection('numberOfConcepts')
b1.publishConcept(a2, c3)
if b1.numberOfConcepts() == 2:
reportDetail('Correctly reported number of concepts')
else:
reportDetailFailure('Number of concepts is wrong')
reportSection('iterateOverConcepts')
b1.iterateOverConcepts(reportConceptName)
reportDetail('Correctly iterated over concepts')
b1.iterateOverConcepts(reportConceptName, CONCEPT_NAME_1)
reportDetail('Correctly iterated over concepts with given name')
b1.iterateOverConcepts(reportConceptName, None, AnotherConcept)
reportDetail('Correctly iterated over concepts with given concept class')
b1.iterateOverConcepts(reportConceptName, CONCEPT_NAME_2, Concept)
reportDetail('Correctly iterated over concepts with given name and concept class')
try:
b1.iterateOverConcepts(reportConceptName, None, SelfException)
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
try:
b1.iterateOverConcepts(reportConceptName, None, 'An ill-formed concept class')
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied iterating over ill-formed concept class')
reportSection('subscribeToConcept')
b1.subscribeToConcept(a3, c3)
if len(b1.subscribers(c3)) == 2:
reportDetail('Correctly subscribed to concept')
else:
reportDetailFailure('Concept was not subscribed')
try:
b1.subscribeToConcept(a3, c3)
reportDetailFailure('Concept is already subscribed')
except SelfException:
reportDetail('Correctly denied subscribing to concept more than once')
try:
b1.subscribeToConcept(a3, c4)
reportDetailFailure('Concept exists')
except SelfException:
reportDetail('Correctly denied subscribing to concept that does not exist')
try:
b1.subscribeToConcept('An ill-formed agent', c3)
reportDetailFailure('Agent is ill-formed')
except SelfException:
reportDetail('Correctly denied subscribing by ill-formed agent')
try:
b1.subscribeToConcept(a2, 'An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied subscribing to ill-formed concept')
reportSection('unsubscribeFromConcept')
b1.unsubscribeFromConcept()
if len(b1.subscribers()) == 0:
reportDetail('Correctly unsubscribed by from all concepts by all agents')
else:
reportDetailFailure('Concepts were not unsubscribed')
b1.subscribeToConcept(a1, c1)
b1.subscribeToConcept(a1, c3)
b1.subscribeToConcept(a2, c1)
b1.subscribeToConcept(a2, c3)
b1.unsubscribeFromConcept(a1)
if (len(b1.subscribers(c1)) == 1 and len(b1.subscribers(c3)) == 1):
reportDetail('Correctly unsubscribed from all concepts by agent')
else:
reportDetailFailure('Concepts were not unsubscribed')
b1.subscribeToConcept(a1, c1)
b1.unsubscribeFromConcept(None, c1)
if (len(b1.subscribers(c1)) == 0 and len(b1.subscribers(c3)) == 1):
reportDetail('Correctly unsubscribied from concept by all agents')
else:
reportDetailFailure('Concepts were not unsubscribed')
b1.unsubscribeFromConcept(a2, c3)
if len(b1.subscribers(c3)) == 0:
reportDetail('Correctly unsubscribed from concept by agent')
else:
reportDetailFailure('Concept was not unsubscribed')
try:
b1.unsubscribeFromConcept(None, c2)
reportDetailFailure('Concept does not exist')
except SelfException:
reportDetail('Correctly denied unsubscribing from concept that does not exist')
try:
b1.unsubscribeFromConcept('An ill-formed agent', c1)
reportDetailFailure('Agent is ill-formed')
except SelfException:
reportDetail('Correctly denied unsubscibing from ill-formed agent')
try:
b1.unsubscribeFromConcept(a1, 'An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied unsubscrbing from ill-formed concept')
reportDetail('subscribers')
b1.subscribeToConcept(a1, c1)
b1.subscribeToConcept(a2, c1)
if len(b1.subscribers(c1)) == 2:
reportDetail('Correctly return subscribers')
else:
reportDetailFailure('Subscribers were not returned')
if len(b1.subscribers(c3)) == 0:
reportDetail('Correctly returned subscribers')
else:
reportDetailFailure('Subscribers were not returned')
if len(b1.subscribers()) == 2:
reportDetail('Correctly returned subscribers')
else:
reportDetailFailure('Subscribers were not returned')
try:
b1.subscribers(c2)
reportDetailFailure('Concept exists')
except SelfException:
reportDetail('Correctly denied returning subscribers from concept that does not exist')
try:
b1.subscribers('An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except:
reportDetail('Correctly denied returning subscribers from ill-formed concept')
reportDetail('signalSubscribers')
b1.signalSubscribers(Concept('A well-formed source'), Concept('A well-formed message'), c1)
reportDetail('Correctly signaled subscribers')
b1.signalSubscribers(Concept('A well-formed source'), Concept('A well-formed message'))
reportDetail('Correctly signaled subscribers')
try:
b1.signalSubscribers(Concept('A well-formed source'), Concept('A well-formed message'), c2)
reportDetailFailure('Concept does not exist')
except SelfException:
reportDetail('Correctly denied signaling subscribers of concept that does not exist')
try:
b1.signalSubscribers(Concept('A well-formed source'), Concept('A well-formed message'), 'An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling subscribers of ill-formed concept')
try:
b1.signalSubscribers('An ill-formed source', Concept('A well-formed message'), c1)
reportDetail('Source is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling subscribers of ill-formed source')
try:
b1.signalSubscribers(Concept('A well-formed source'), 'An ill-formed message', c1)
reportDetailFailure('Message is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling subscribers of ill-formed message')
reportSection('subscribeToConceptClass')
b1.unsubscribeFromConceptClass()
b1.subscribeToConceptClass(a1, Concept)
b1.subscribeToConceptClass(a2, Concept)
b1.subscribeToConceptClass(a3, AnotherConcept)
if len(b1.classSubscribers()) == 3:
reportDetail('Correctly subscribed to concept class')
else:
reportDetailFailure('Concept class was not subscribed')
if len(b1.classSubscribers(Concept)) == 2:
reportDetail('Correctly subscribed to concept class')
else:
reportDetail('Concept class was not subscribed')
if len(b1.classSubscribers(AnotherConcept)) == 1:
reportDetail('Correctly subscribed to concept class')
else:
reportDetailFailure('CConcept class was not subscribed')
try:
b1.subscribeToConceptClass(a1, Concept)
reportDetailFailure('Concept class is already subscribed')
except SelfException:
reportDetail('Correctly denied subscribing to concept class more than once')
try:
b1.subscribeToConceptClass('An ill-formed agent', c3)
reportDetailFailure('Agent is ill-formed')
except SelfException:
reportDetail('Correctly denied subscribing by ill-formed agent')
try:
b1.subscribeToConceptClass(a2, 'An ill-formed concept')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied subscribing to ill-formed concept')
reportSection('unsubscribeFromConceptClass')
b1.unsubscribeFromConceptClass()
if len(b1.classSubscribers()) == 0:
reportDetail('Correctly unsubscribed by from all concept classes by all agents')
else:
reportDetailFailure('Concept classes were not unsubscribed')
b1.subscribeToConceptClass(a1, Concept)
b1.subscribeToConceptClass(a1, AnotherConcept)
b1.subscribeToConceptClass(a2, Concept)
b1.subscribeToConceptClass(a3, AnotherConcept)
b1.unsubscribeFromConceptClass(a1)
if (len(b1.classSubscribers(Concept)) == 1 and len(b1.classSubscribers(AnotherConcept)) == 1):
reportDetail('Correctly unsubcribed from all concept classes by agent')
else:
reportDetailFailure('Concept classes were not unsubscribed')
b1.subscribeToConceptClass(a1, Concept)
b1.unsubscribeFromConceptClass(None, Concept)
if len(b1.classSubscribers(AnotherConcept)) == 1:
reportDetail('Correctly unsubscribied from concept class by all agents')
else:
reportDetailFailure('Concept class was not unsubscribed')
b1.unsubscribeFromConceptClass(a3, AnotherConcept)
if len(b1.classSubscribers()) == 0:
reportDetail('Correctly unsubscribed from concept class by agent')
else:
reportDetailFailure('Concept class was not unsubscribed')
try:
b1.unsubscribeFromConceptClass(None, c2)
reportDetailFailure('Concept class does not exist')
except SelfException:
reportDetail('Correctly denied unsubscribing from concept class that does not exist')
try:
b1.unsubscribeFromConceptClass('An ill-formed agent', c1)
reportDetailFailure('Agent is ill-formed')
except SelfException:
reportDetail('Correctly denied unsubscibing from ill-formed agent')
try:
b1.unsubscribeFromConceptClass(a1, 'An ill-formed concept class')
reportDetailFailure('Concept is ill-formed')
except SelfException:
reportDetail('Correctly denied unsubscrbing from ill-formed concept class')
reportSection('classSubscribers')
b1.subscribeToConceptClass(a1, Concept)
b1.subscribeToConceptClass(a2, Concept)
if len(b1.classSubscribers(Concept)) == 2:
reportDetail('Correctly return subscribers')
else:
reportDetailFailure('Subscribers were not returned')
if len(b1.classSubscribers()) == 2:
reportDetail('Correctly returned subscribers')
else:
reportDetailFailure('Subscribers were not returned')
try:
b1.classSubscribers(AnotherConcept)
reportDetailFailure('Concept class exists')
except SelfException:
reportDetail('Correctly denied returning subscribers from concept class that does not exist')
try:
b1.classSubscribers('An ill-formed concept class')
reportDetailFailure('Concept class is ill-formed')
except:
reportDetail('Correctly denied returning subscribers from ill-formed concept class')
reportSection('signalConceptClassSubscribers')
b1.subscribeToConceptClass(a3, AnotherConcept)
b1.signalClassSubscribers(Concept('A well-formed source'), Concept('A well-formed message'), Concept)
reportDetail('Correctly signaled subscribers')
b1.signalClassSubscribers(Concept('A well-formed source'), Concept('A well-formed message'))
reportDetail('Correctly signaled subscribers')
try:
b1.signalClassSubscribers(Concept('A well-formed source'), Concept('A well-formed message'), SelfException)
reportDetailFailure('Concept class does not exist')
except SelfException:
reportDetail('Correctly denied signaling subscribers of concept class that does not exist')
try:
b1.signalClassSubscribers(Concept('A well-formed source'),
Concept('A well-formed message'),
'An ill-formed concept class')
reportDetailFailure('Concept class is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling subscribers of ill-formed concept class')
try:
b1.signalClassSubscribers('An ill-formed source', Concept('A well-formed message'), Concept)
reportDetail('Source is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling subscribers of ill-formed source')
try:
b1.signalClassSubscribers(Concept('A well-formed source'), 'An ill-formed message', Concept)
reportDetailFailure('Message is ill-formed')
except SelfException:
reportDetail('Correctly denied signaling subscribers of ill-formed message')
# Agent unit test
def testAgent():
reportHeader('Agent')
reportSection('activity')
a1.activity()
reportDetail('Correctly carried out the activity')
a1.activity(Concept('A well-formed parameter'))
reportDetail('Correctly carried out the activity')
try:
a1.activity('An ill-formed parameter')
reportDetailFailure('Parameters are ill-formed')
except SelfException:
reportDetail('Correctly denied carrying out activity with ill-formed parameters')
reportSection('start')
a1.start()
reportDetail('Correctly started the agent activity')
a1.start(Concept('A well-formed parameter'))
reportDetail('Correctly started the agent activity')
try:
a1.start('An ill-formed parameter')
reportDetailFailure('Parameters are ill-formed')
except SelfException:
reportDetail('Correctly denied starting activity with ill-formed parameters')
reportSection('stop')
a1.stop()
reportDetail('Correctly stopped the agent activity')
a1.stop(Concept('A well-formed parameter'))
reportDetail('Correctly stopped the agent activity')
try:
a1.start('An ill-formed parameter')
reportDetailFailure('Parameters are ill-formed')
except SelfException:
reportDetail('Correctly denied starting activity with ill-formed parameters')
reportSection('pause')
a1.pause()
reportDetail('Correctly paused the agent activity')
a1.pause(Concept('A well-formed parameter'))
reportDetail('Correctly paused the agent activity')
try:
a1.start('An ill-formed parameter')
reportDetailFailure('Parameters are ill-formed')
except SelfException:
reportDetail('Correctly denied starting activity with ill-formed parameters')
reportSection('isAlive')
if a1.isAlive():
reportDetail('Correctly checked that agent is alive')
else:
reportDetailFailure('Agent is not alive')
reportSection('status')
if a1.status().name == 'Status':
reportDetail('Correctly checked agent status')
else:
reportDetailFailure('Agent status is wrong')
reportSection('signal')
a1.signal(Concept('A well-defined source'), Concept('A well-defined message'))
reportDetail('Correctly signaled the agent')
a1.signal(Concept('A well-defined source'), Concept('A well-defined message'), Concept('A well-defined parameter'))
reportDetail('Correctly signaled the agent')
try:
a1.signal('An ill-defined source', Concept('A well-defined message'), Concept('A well-defined parameter'))
reportDetailFailure('Source is ill-defined')
except SelfException:
reportDetail('Correctly denied connecting with ill-defined source')
try:
a1.signal(Concept('A well-defined source'), 'An ill-defined message', Concept('A well-defined parameter'))
reportDetailFailure('Message is ill-defined')
except SelfException:
reportDetail('Correctly denied connecting with ill-defined message')
try:
a1.signal(Concept('A well-defined source'), Concept('A well-defined message'), 'An ill-defined parameter')
reportDetailFailure('Parameters are ill-defined')
except SelfException:
reportDetail('Correctly denied connecting with ill-defined parameters')
reportSection('connect')
a1.connect(Relationship('A well-defined relationship', a1, a2))
reportDetail('Correctly connected the agent')
a1.connect(Relationship('A well-defined relationship', a1, a2), Concept('A well-formed parameter'))
reportDetail('Correctly connected the agent')
try:
a1.connect('An ill-formed relationship', Concept('A well-formed parameter'))
reportDetailFailure('Channel is ill-formed')
except SelfException:
reportDetail('Correctly denied connecting with ill-formed channel')
try:
a1.connect(Relationship('A well-formed relationship', a1, a2), 'An ill-formed parameter')
reportDetailFailure('Parameters are ill-defined')
except SelfException:
reportDetail('Correctly denied connecting wiht ill-formed parameters')
# Test all of Self's foundational classes
arguments = parseArguments()
testConcept()
testProperty()
testRelationship()
testOntology()
testBlackboard()
testAgent()
# Clean up the output stream if reporting concisely
if arguments.concise == True:
print()
| 40.252221 | 120 | 0.69558 |
95c9bf8a576fcba5f592caf1b205652fbf6c6df7 | 1,042 | py | Python | 100-200q/123.py | rampup01/Leetcode | 8450a95a966ef83b24ffe6450f06ce8de92b3efb | [
"MIT"
] | 990 | 2018-06-05T11:49:22.000Z | 2022-03-31T08:59:17.000Z | 100-200q/123.py | rampup01/Leetcode | 8450a95a966ef83b24ffe6450f06ce8de92b3efb | [
"MIT"
] | 1 | 2021-11-01T01:29:38.000Z | 2021-11-01T01:29:38.000Z | 100-200q/123.py | rampup01/Leetcode | 8450a95a966ef83b24ffe6450f06ce8de92b3efb | [
"MIT"
] | 482 | 2018-06-12T22:16:53.000Z | 2022-03-29T00:23:29.000Z | '''
Say you have an array for which the ith element is the price of a given stock on day i.
Design an algorithm to find the maximum profit. You may complete at most two transactions.
Note: You may not engage in multiple transactions at the same time (i.e., you must sell the stock before you buy again).
Example 1:
Input: [3,3,5,0,0,3,1,4]
Output: 6
Explanation: Buy on day 4 (price = 0) and sell on day 6 (price = 3), profit = 3-0 = 3.
Then buy on day 7 (price = 1) and sell on day 8 (price = 4), profit = 4-1 = 3.
'''
class Solution(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
if len(prices) < 2:
return 0
dp = [[0 for _ in range(len(prices))] for _ in range(3)]
for i in range(1,3):
maxDiff = -prices[0]
for j in range(1,len(prices)):
dp[i][j] = max(dp[i][j-1], prices[j] + maxDiff)
maxDiff = max(maxDiff, dp[i-1][j] -prices[j])
return dp[2][len(prices)-1]
| 30.647059 | 121 | 0.579655 |
95ca4ff47bbf69d356929cfddbfe83070e5ea793 | 2,077 | py | Python | lambdas/verify_admin.py | charvi-a/320-S20-Track1 | ac97504fc1fdedb1c311773b015570eeea8a8663 | [
"BSD-3-Clause"
] | 9 | 2019-12-30T16:32:22.000Z | 2020-03-03T20:14:47.000Z | lambdas/verify_admin.py | charvi-a/320-S20-Track1 | ac97504fc1fdedb1c311773b015570eeea8a8663 | [
"BSD-3-Clause"
] | 283 | 2020-02-03T15:16:03.000Z | 2020-05-05T03:18:59.000Z | lambdas/verify_admin.py | charvi-a/320-S20-Track1 | ac97504fc1fdedb1c311773b015570eeea8a8663 | [
"BSD-3-Clause"
] | 3 | 2020-04-16T15:23:29.000Z | 2020-05-12T00:38:41.000Z | import json
from package.query_db import query
from package.dictionary_to_list import dictionary_to_list
from package.lambda_exception import LambdaException
from boto3 import client as boto3_client
def verify_admin(event, context):
user_id = int(event['user_id'])
user_id_dic = {}
if user_id == None: #Making sure user_id was passed
raise LambdaException("400: user_id was not given")
user_id_dic['user_id'] = user_id
sql_parameters = dictionary_to_list(user_id_dic)
sql_select = """SELECT users.id FROM users WHERE users.id = :user_id""" #This query is ensuring that the user exists
response = query(sql_select, sql_parameters)
if response['records'] == []: #Returning error if user does not exist
raise LambdaException("404: user does not exist")
sql_select = """SELECT users.id FROM users WHERE users.id = :user_id and is_admin = true""" #This query is ensuring user is not already an admin
response = query(sql_select, sql_parameters)
if response['records'] != []: #Returning error if user is already an admin
raise LambdaException("405: user is already an admin")
else:
sql_update = """UPDATE users SET is_admin = true WHERE users.id = :user_id"""
response = query(sql_update, sql_parameters)
sql_insert = """INSERT INTO admins(admin_id, user_id, is_pending) VALUES(:user_id, :user_id, false)
"""
response = query(sql_insert, sql_parameters)
# send approval email
lambda_client = boto3_client('lambda')
email_event = {
"user_id": user_id,
"approved_role": "admin"
}
try:
response = lambda_client.invoke(FunctionName="approval_email",
InvocationType='Event',
Payload=json.dumps(email_event))
except Exception as e:
raise LambdaException("404: Unable to send approval email " + str(e))
return{
"statusCode": 200
}
| 42.387755 | 148 | 0.639384 |
95cadfb3b8d6c3a18abd5334655fd77acc7c9759 | 4,821 | py | Python | run.py | Galaxy-SynBioCAD/rp2paths | f87ea0f64556be44af1ae717cd4246159253d029 | [
"MIT"
] | null | null | null | run.py | Galaxy-SynBioCAD/rp2paths | f87ea0f64556be44af1ae717cd4246159253d029 | [
"MIT"
] | null | null | null | run.py | Galaxy-SynBioCAD/rp2paths | f87ea0f64556be44af1ae717cd4246159253d029 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""
Created on September 21 2019
@author: Melchior du Lac
@description: Wrap rp2paths into a docker
"""
import argparse
import tempfile
import os
import logging
import shutil
import docker
import glob
def main(rp_pathways, rp2paths_pathways, rp2paths_compounds, timeout=30, max_steps=0, max_paths=150, unfold_compounds=False):
"""Call the docker to run rp2paths
:param rp_pathways: The path to the results RetroPath2.0 scope file
:param rp2paths_pathways: The path to the results rp2paths out_paths file
:param rp2paths_compounds: The path to the results rp2paths compounds file
:param timeout: The timeout of the function in minutes (Default: 90)
:param max_steps: The maximal number of steps WARNING: not used (Default: 0, ie. infinite)
:param max_paths: The maximal number of pathways to return WARNING: not used (Default: 150)
:param unfold_compounds: not sure WARNING: not used (Default: False)
:param rp_pathways: str
:param rp2paths_pathways: str
:param rp2paths_compounds: str
:param timeout: int
:param max_steps: int
:param max_paths: int
:param unfold_compounds: bool
:rtype: None
:return: None
"""
docker_client = docker.from_env()
image_str = 'brsynth/rp2paths-standalone'
try:
image = docker_client.images.get(image_str)
except docker.errors.ImageNotFound:
logging.warning('Could not find the image, trying to pull it')
try:
docker_client.images.pull(image_str)
image = docker_client.images.get(image_str)
except docker.errors.ImageNotFound:
logging.error('Cannot pull image: '+str(image_str))
exit(1)
with tempfile.TemporaryDirectory() as tmpOutputFolder:
if os.path.exists(rp_pathways):
shutil.copy(rp_pathways, tmpOutputFolder+'/rp_pathways.csv')
command = ['python',
'/home/tool_rp2paths.py',
'-rp_pathways',
'/home/tmp_output/rp_pathways.csv',
'-rp2paths_compounds',
'/home/tmp_output/rp2paths_compounds.csv',
'-rp2paths_pathways',
'/home/tmp_output/rp2paths_pathways.csv',
'-timeout',
str(timeout),
'-max_steps',
str(max_steps),
'-max_paths',
str(max_paths),
'-unfold_compounds',
str(unfold_compounds)]
container = docker_client.containers.run(image_str,
command,
detach=True,
stderr=True,
volumes={tmpOutputFolder+'/': {'bind': '/home/tmp_output', 'mode': 'rw'}})
container.wait()
err = container.logs(stdout=False, stderr=True)
err_str = err.decode('utf-8')
if 'ERROR' in err_str:
print(err_str)
elif 'WARNING' in err_str:
print(err_str)
if not os.path.exists(tmpOutputFolder+'/rp2paths_compounds.csv') or not os.path.exists(tmpOutputFolder+'/rp2paths_pathways.csv'):
print('ERROR: Cannot find the output file: '+str(tmpOutputFolder+'/rp2paths_compounds.csv'))
print('ERROR: Cannot find the output file: '+str(tmpOutputFolder+'/rp2paths_pathways.csv'))
else:
shutil.copy(tmpOutputFolder+'/rp2paths_pathways.csv', rp2paths_pathways)
shutil.copy(tmpOutputFolder+'/rp2paths_compounds.csv', rp2paths_compounds)
container.remove()
else:
logging.error('Cannot find one or more of the input files: '+str(rp_pathways))
exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser('Enumerate the individual pathways from the results of Retropath2')
parser.add_argument('-rp_pathways', type=str)
parser.add_argument('-rp2paths_pathways', type=str)
parser.add_argument('-rp2paths_compounds', type=str)
parser.add_argument('-max_steps', type=int, default=0)
parser.add_argument('-timeout', type=int, default=30)
parser.add_argument('-max_paths', type=int, default=150)
parser.add_argument('-unfold_compounds', type=str, default='False')
params = parser.parse_args()
if params.timeout<0:
logging.error('Timeout cannot be <0 :'+str(params.timeout))
exit(1)
main(params.rp_pathways, params.rp2paths_pathways, params.rp2paths_compounds, params.timeout, params.max_steps, params.max_paths, params.unfold_compounds)
| 43.827273 | 158 | 0.611077 |
95cae2c1de14d040a592e9ed57f23f978ae86e71 | 150 | py | Python | test_cases/conftest.py | majdukovic/pybooker | b9a373d556be0481c93a528f731407ca7a47b11f | [
"MIT"
] | null | null | null | test_cases/conftest.py | majdukovic/pybooker | b9a373d556be0481c93a528f731407ca7a47b11f | [
"MIT"
] | null | null | null | test_cases/conftest.py | majdukovic/pybooker | b9a373d556be0481c93a528f731407ca7a47b11f | [
"MIT"
] | null | null | null | import pytest
from framework.services.booker_client import BookerClient
booker_client = BookerClient()
@pytest.fixture()
def clear_env():
pass
| 15 | 57 | 0.786667 |
95cb8a34cde724ada03c12bdaeb21669317ed997 | 402 | py | Python | verilator/scripts/concat_up5k.py | micro-FPGA/engine-V | 00a8f924e10fc69874d9c179f788bf037fe9c407 | [
"Apache-2.0"
] | 44 | 2018-11-19T16:49:10.000Z | 2021-12-05T10:16:24.000Z | verilator/scripts/concat_up5k.py | micro-FPGA/engine-V | 00a8f924e10fc69874d9c179f788bf037fe9c407 | [
"Apache-2.0"
] | null | null | null | verilator/scripts/concat_up5k.py | micro-FPGA/engine-V | 00a8f924e10fc69874d9c179f788bf037fe9c407 | [
"Apache-2.0"
] | 5 | 2018-12-05T23:43:21.000Z | 2020-09-03T04:36:34.000Z |
spiFile = open('spiflash.bin','wb')
# 128KB is reserved for bitstream
bitFile = open('../bitstream/mf8a18_rv32i.bin','rb')
bitData = bitFile.read(0x20000)
riscvFile = open('riscv.bin','rb')
riscvData = riscvFile.read(32768)
spiFile.write(bitData)
spiFile.seek(0x20000)
spiFile.write(riscvData)
nullData = bytearray([0])
spiFile.seek(0x27fff)
spiFile.write(nullData)
spiFile.close
bitFile.close
| 17.478261 | 52 | 0.748756 |
95cda288d497faae566e114db4bdc1e1b83b2b52 | 753 | py | Python | pyvista_gui/options.py | akaszynski/pyvista-gui | 4ed7e3a52026dfeab4e82a300b92a92f43060dda | [
"MIT"
] | 6 | 2019-11-20T20:08:42.000Z | 2022-02-24T12:24:20.000Z | pyvista_gui/options.py | akaszynski/pyvista-gui | 4ed7e3a52026dfeab4e82a300b92a92f43060dda | [
"MIT"
] | 6 | 2020-01-27T16:15:11.000Z | 2021-04-12T11:42:11.000Z | pyvista_gui/options.py | akaszynski/pyvista-gui | 4ed7e3a52026dfeab4e82a300b92a92f43060dda | [
"MIT"
] | null | null | null | """Options for saving user prefences, etc.
"""
import json
import os
import pyvista
class RcParams(dict):
"""Internally used class to manage the rcParams"""
filename = os.path.join(pyvista.USER_DATA_PATH, 'rcParams.json')
def save(self):
with open(self.filename, 'w') as f:
json.dump(self, f)
return
def load(self):
with open(self.filename, 'r') as f:
data = json.load(f)
self.update(data)
def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
self.save()
# The options
rcParams = RcParams(
dark_mode=False,
)
# Load user prefences from last session if none exist, save defaults
try:
rcParams.load()
except:
rcParams.save()
| 19.815789 | 68 | 0.629482 |
95cdaf4dfa1b6e4f1d482661c80dff3aa859d8b1 | 11,978 | py | Python | validatearcgisenterprisedeployment.py | pheede/ArcGIS-Server-Stuff | 9b491d2f4edebec3f613182981f4e50dcc7641a3 | [
"Apache-2.0"
] | 6 | 2017-05-31T10:44:09.000Z | 2020-12-18T18:12:15.000Z | validatearcgisenterprisedeployment.py | pheede/ArcGIS-Server-Stuff | 9b491d2f4edebec3f613182981f4e50dcc7641a3 | [
"Apache-2.0"
] | 1 | 2021-09-30T21:20:59.000Z | 2021-09-30T23:55:48.000Z | validatearcgisenterprisedeployment.py | pheede/ArcGIS-Server-Stuff | 9b491d2f4edebec3f613182981f4e50dcc7641a3 | [
"Apache-2.0"
] | 2 | 2017-12-28T19:30:23.000Z | 2019-10-04T20:34:27.000Z | """This script validates an ArcGIS Enterprise deployment to ensure it is
configured properly with all the required components such as Portal for ArcGIS,
ArcGIS Server, ArcGIS Data Store and the associated configuration.
Designed for ArcGIS Enterprise 10.5 and higher."""
# Author: Philip Heede <pheede@esri.com>
# Last modified: 2017-02-18
import os
import sys
import ssl
import socket
import urllib.request
import getopt
import getpass
import json
import traceback
def main(argv):
parameters = parseInputParameters(argv)
portalUrl = parameters['portalUrl']
token = parameters['token']
if token == '':
adminUsername = parameters['adminUsername']
adminPassword = parameters['adminPassword']
token = generateToken(adminUsername, adminPassword, portalUrl)
if token == 'Failed':
print('Invalid administrator username or password.')
sys.exit(1)
portalSelf = getPortalSelf(portalUrl, token)
supportsHostedServices = portalSelf['supportsHostedServices']
supportsSceneServices = portalSelf['supportsSceneServices']
# check analysis tools helper service registration and valid url
helperServices = portalSelf['helperServices']
analysisHelperServiceRegistered = False
if 'analysis' in helperServices:
if 'url' in helperServices['analysis']:
analysisHelperService = portalSelf['helperServices']['analysis']['url']
analysisHelperServiceRegistered = analysisHelperService != ''
geoanalyticsHelperServiceRegistered = False
if 'geoanalytics' in portalSelf['helperServices']:
if 'url' in helperServices['geoanalytics']:
geoanalyticsHelperService = helperServices['geoanalytics']['url']
geoanalyticsHelperServiceRegistered = geoanalyticsHelperService != ''
rasterAnalyticsHelperServiceRegistered = False
if 'rasterAnalytics' in portalSelf['helperServices']:
if 'url' in helperServices['rasterAnalytics']:
rasterAnalyticsHelperService = helperServices['rasterAnalytics']['url']
rasterAnalyticsHelperServiceRegistered = rasterAnalyticsHelperService != ''
# enumerate federated servers and find hosting server
federatedServers = getFederatedServers(portalUrl, token)
hostingServer = None
for server in federatedServers:
if 'serverRole' in server:
serverRole = server['serverRole']
if serverRole == 'HOSTING_SERVER': hostingServer = server
print()
print("ArcGIS Enterprise deployment characteristics")
print("- Hosting server configured: %s" % (hostingServer is not None))
if hostingServer is None: print("-- WARNING: lack of a hosting server will prevent many functions from working")
else:
hostingServerValid, validationMsgs = validateHostingServer(portalUrl, hostingServer['id'], token)
if not hostingServerValid: print('-- ERROR: unable to validate hosting server')
for msg in validationMsgs: print('-- ' + msg)
if hostingServerValid:
hasRelationalDataStore = checkArcGISDataStoreRelational(hostingServer['adminUrl'], hostingServer['url'], token)
print("- ArcGIS Data Store (relational) configured with hosting server: %s" % hasRelationalDataStore)
if not hasRelationalDataStore: print("-- WARNING: you must use ArcGIS Data Store to configure a relational database")
print('- Analysis Tools helper service is configured: %s' % analysisHelperServiceRegistered)
if not analysisHelperServiceRegistered: print('-- WARNING: analysis tools helper service not configured')
analysisServiceStarted = checkAnalysisServices(hostingServer['url'], token)
print("- Hosting server's spatial analysis service is started and available: %s" % analysisServiceStarted)
if not analysisServiceStarted: print("-- WARNING: analysis service not started or unreachable")
print("- Hosted feature services are supported: %s" % supportsHostedServices)
if not supportsHostedServices: print("-- WARNING: this indicates a lack of ArcGIS Data Store configured with the relational data store type")
print("- Scene services are supported: %s" % supportsSceneServices)
if not supportsSceneServices: print("-- WARNING: this indicates a lack of ArcGIS Data Store (tile cache)")
print('- GeoAnalytics configured: %s' % geoanalyticsHelperServiceRegistered)
print('- Raster Analytics configured: %s' % rasterAnalyticsHelperServiceRegistered)
def parseInputParameters(argv):
currentHost = socket.getfqdn().lower()
currentDir = os.getcwd()
portalHost = ''
context = ''
adminUsername = ''
adminPassword = ''
outputDir = ''
token = ''
if len(sys.argv) > 0:
try:
opts, args = getopt.getopt(argv, "?hn:c:u:p:t:", ("help", "portalurl=", "context=", "user=", "password=", "token=", "ignoressl"))
except:
print('One or more invalid arguments')
print('validatebasedeployment.py [-n <portal hostname>] [-c <portal context>] [-u <admin username>] [-p <admin password>] [-t <token>]')
sys.exit(2)
for opt, arg in opts:
if opt in ('-n', '--portalurl'):
portalHost = arg
elif opt in ('-c', '--context'):
context = arg
elif opt in ('-u', '--user'):
adminUsername = arg
elif opt in ('-p', '--password'):
adminPassword = arg
elif opt == '--ignoressl':
# disable SSL certificate checking to avoid errors with self-signed certs
# this is NOT a generally recommended practice
_create_unverified_https_context = ssl._create_unverified_context
ssl._create_default_https_context = _create_unverified_https_context
elif opt in ('-t', '--token'):
token = arg
elif opt in ('-h', '-?', '--help'):
print('validatebasedeployment.py [-n <portal hostname>] [-c <portal context>] [-u <admin username>] [-p <admin password>] [-t <token>]')
sys.exit(0)
# Prompt for portal hostname
if portalHost == '':
portalHost = input('Enter ArcGIS Enterprise FQDN [' + currentHost + ']: ')
if portalHost == '': portalHost = currentHost
# Prompt for portal context
if context == '':
context = input('Enter context of the portal instance [\'arcgis\']: ')
if context == '': context = 'arcgis'
# Prompt for admin username
if adminUsername == '' and token == '':
while adminUsername == '':
adminUsername = input('Enter administrator username: ')
# Prompt for admin password
if adminPassword == '' and token == '':
while adminPassword == '':
adminPassword = getpass.getpass(prompt='Enter administrator password: ')
portalUrl = 'https://' + portalHost + '/' + context
parameters = {'adminPassword':adminPassword, 'adminUsername':adminUsername, 'portalUrl':portalUrl, 'token':token}
return parameters
def validateHostingServer(portalUrl, hostingServerID, token):
params = {'token':token, 'f':'pjson', 'types':'egdb'}
request = urllib.request.Request(portalUrl + '/portaladmin/federation/servers/' + hostingServerID + '/validate?' + urllib.parse.urlencode(params))
try:
response = urllib.request.urlopen(request)
result = json.loads(response.read().decode('utf-8'))
msgs = []
if 'messages' in result: msgs = result['messages']
if 'status' in result and result['status'] == 'success': return True, msgs
except: pass
return False, msgs
def checkArcGISDataStoreRelational(serverAdminUrl, serverUrl, portalToken):
params = {'token':portalToken, 'f':'pjson', 'types':'egdb'}
request = urllib.request.Request(serverAdminUrl + '/admin/data/findItems', urllib.parse.urlencode(params).encode('ascii'))
try: response = urllib.request.urlopen(request)
except:
request = urllib.request.Request(serverUrl + '/admin/data/findItems', urllib.parse.urlencode(params).encode('ascii'))
try:
response = urllib.request.urlopen(request)
print('-- WARNING: hosting server administrative endpoint not')
print(' accessible from this machine; this may cause')
print(' publishing issues from ArcGIS Pro')
except:
print('-- ERROR: unable to reach hosting server administrative endpoint')
print(' maybe the administrative endpoint is only accessible internally?')
egdbs = json.loads(response.read().decode('utf-8'))
if 'error' in egdbs: return False
else:
managedegdb = None
for egdb in egdbs['items']:
if egdb['info']['isManaged']: managedegdb = egdb
if managedegdb is None: return False
return managedegdb['provider'] == 'ArcGIS Data Store'
def checkAnalysisServices(serverUrl, portalToken):
params = {'token':portalToken, 'f':'json'}
request = urllib.request.Request(serverUrl + '/rest/services/System/SpatialAnalysisTools/GPServer?%s' % urllib.parse.urlencode(params))
try:
response = urllib.request.urlopen(request)
serviceInfo = json.loads(response.read().decode('utf-8'))
if 'error' in serviceInfo: return False
else: return True
except:
return False
def getFederatedServers(portalUrl, token):
params = {'token':token, 'f':'json'}
request = urllib.request.Request(portalUrl + '/portaladmin/federation/servers?%s' % urllib.parse.urlencode(params))
response = urllib.request.urlopen(request)
federatedServers = json.loads(response.read().decode('utf-8'))
if 'servers' not in federatedServers:
print('Unable to enumerate federated servers. Not an administrator login?')
sys.exit(1)
return federatedServers['servers']
def getPortalSelf(portalUrl, token):
params = {'token':token, 'f':'json'}
request = urllib.request.Request(portalUrl + '/sharing/portals/self',
urllib.parse.urlencode(params).encode('ascii'))
response = urllib.request.urlopen(request)
portalSelf = json.loads(response.read().decode('utf-8'))
return portalSelf
def generateToken(username, password, portalUrl):
params = {'username':username,
'password':password,
'referer':portalUrl,
'f':'json'}
try:
request = urllib.request.Request(portalUrl + '/sharing/rest/generateToken',
urllib.parse.urlencode(params).encode('ascii'))
response = urllib.request.urlopen(request)
genToken = json.loads(response.read().decode('utf-8'))
if 'token' in genToken.keys():
return genToken.get('token')
else:
return 'Failed'
except urllib.error.URLError as urlError:
print('Unable to access ArcGIS Enterprise deployment at ' + portalUrl)
if isinstance(urlError.reason, ssl.SSLError):
print("SSL certificate validation error. Maybe you're using a self-signed certificate?")
print("Pass the --ignoressl parameter to disable certificate validation")
else:
print(urlError.reason)
sys.exit(1)
except Exception as ex:
print('Unable to access ArcGIS Enterprise deployment at ' + portalUrl)
exc_type, exc_value, exc_traceback = sys.exc_info()
print("*** print_exception:")
traceback.print_exception(exc_type, exc_value, exc_traceback,
limit=2, file=sys.stdout)
sys.exit(0)
if not sys.version_info >= (3, 4):
print('This script requires Python 3.4 or higher: found Python %s.%s' % sys.version_info[:2])
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| 46.972549 | 153 | 0.655034 |
95ce4cab43e2034234aed87a60cc3f00447f9524 | 4,445 | py | Python | 2020/aoc/__init__.py | amochtar/adventofcode | 292e7f00a1e19d2149d00246b0a77fedfcd3bd08 | [
"MIT"
] | 1 | 2019-12-27T22:36:30.000Z | 2019-12-27T22:36:30.000Z | 2020/aoc/__init__.py | amochtar/adventofcode | 292e7f00a1e19d2149d00246b0a77fedfcd3bd08 | [
"MIT"
] | null | null | null | 2020/aoc/__init__.py | amochtar/adventofcode | 292e7f00a1e19d2149d00246b0a77fedfcd3bd08 | [
"MIT"
] | null | null | null | import itertools
import re
import math
from typing import List, Tuple
def ints(text: str) -> Tuple[int, ...]:
"Return a tuple of all ints in a string"
return tuple(map(int, re.findall(r'-?\b\d+\b', text)))
def powerset(iterable):
"powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"
s = list(iterable)
return itertools.chain.from_iterable(itertools.combinations(s, r) for r in range(len(s)+1))
def manhattan(p: Tuple[int, ...], q=itertools.repeat(0)) -> Tuple[int, ...]:
"Return the manhattan distance between 2 (multi-dimensional) points"
return sum([abs(a-b) for a, b in zip(p, q)])
def king_distance(p: Tuple[int, ...], q=itertools.repeat(0)) -> Tuple[int, ...]:
"Return thenNumber of chess King moves between two points"
return max(abs(a - b) for a, b in zip(p, q))
def neighbors4(p: Tuple[int, int]) -> List[Tuple[int, int]]:
"Return the 4 neighboring cells for a given position"
x, y = p
return [
(x, y-1),
(x, y+1),
(x-1, y),
(x+1, y)
]
def neighbors8(p: Tuple[int, int]) -> List[Tuple[int, int]]:
"Return the 8 neighboring cells for a given position"
x, y = p
return [
(x-1, y-1),
(x, y-1),
(x+1, y-1),
(x-1, y),
(x+1, y),
(x-1, y+1),
(x, y+1),
(x+1, y+1)
]
def neighbors_cube(p: Tuple[int, int, int]) -> List[Tuple[int, int, int]]:
"Return the 26 neighboring cells for a given position in a 3d cube"
x, y, z = p
n = []
for i in range(-1, 2):
for j in range(-1, 2):
for k in range(-1, 2):
if (i, j, k) != (0, 0, 0):
n.append((x+i, y+j, z+k))
return n
def neighbors_cube4(p: Tuple[int, int, int, int]) -> List[Tuple[int, int, int, int]]:
"Return the 80 neighboring cells for a given position in a 4-d cube"
x, y, z, w = p
n = []
for i in range(-1, 2):
for j in range(-1, 2):
for k in range(-1, 2):
for l in range(-1, 2):
if (i, j, k, l) != (0, 0, 0, 0):
n.append((x+i, y+j, z+k, w+l))
return n
moves = {
'n': lambda p: (p[0], p[1]-1),
's': lambda p: (p[0], p[1]+1),
'e': lambda p: (p[0]+1, p[1]),
'w': lambda p: (p[0]-1, p[1]),
}
left_turn = {
'n': 'w',
's': 'e',
'e': 'n',
'w': 's',
}
right_turn = {
'n': 'e',
's': 'w',
'e': 's',
'w': 'n',
}
opposite = {
'n': 's',
's': 'n',
'e': 'w',
'w': 'e',
}
facing_dir = {
'n': (0, -1),
's': (0, 1),
'e': (1, 0),
'w': (-1, 0),
}
origin = (0, 0)
hex_origin = (0, 0, 0)
hex_moves = {
'ne': lambda p: (p[0]+1, p[1], p[2]-1),
'nw': lambda p: (p[0], p[1]+1, p[2]-1),
'se': lambda p: (p[0], p[1]-1, p[2]+1),
'sw': lambda p: (p[0]-1, p[1], p[2]+1),
'w': lambda p: (p[0]-1, p[1]+1, p[2]),
'e': lambda p: (p[0]+1, p[1]-1, p[2]),
}
def hex_neighbors(p: Tuple[int, int, int]) -> List[Tuple[int, int, int]]:
return [move(p) for move in hex_moves.values()]
def add_pos(a: Tuple[int, int], b: Tuple[int, int], factor: int = 1) -> Tuple[int, int]:
"Adds two position tuples"
return (a[0]+b[0]*factor, a[1]+b[1]*factor)
def sub_pos(a: Tuple[int, int], b: Tuple[int, int]) -> Tuple[int, int]:
"Subtracts the position tuple b from a"
return (a[0]-b[0], a[1]-b[1])
def mult_pos(a: Tuple[int, int], factor: int) -> Tuple[int, int]:
"Multiplies a position tuple with a given factor"
return (a[0]*factor, a[1]*factor)
def rot_left(pos: Tuple[int, int], rel: Tuple[int, int] = origin) -> Tuple[int, int]:
"Rotates a position 90 degrees left (counter clock-wise) relative to the given location (default origin)"
rel_pos = sub_pos(pos, rel)
new_pos = (rel_pos[1], -rel_pos[0])
return add_pos(new_pos, rel)
def rot_right(pos: Tuple[int, int], rel: Tuple[int, int] = origin) -> Tuple[int, int]:
"Rotates a position 90 degrees right (clock-wise) relative to the given location (default origin)"
rel_pos = sub_pos(pos, rel)
new_pos = (-rel_pos[1], rel_pos[0])
return add_pos(new_pos, rel)
def min_max(lst: List[Tuple[int, ...]]) -> Tuple[int, ...]:
"Returns the min and max values for every index in the given list of tuples"
return tuple((min(e), max(e)) for e in zip(*lst))
def mod1(a: int, b: int) -> int:
"Returns 1-based modulo"
return 1 + (a-1) % b
| 26.939394 | 109 | 0.526659 |
95ce971f5a305cd3a19578c204fef92020757f3c | 4,431 | py | Python | pi_source_code.py | cjkuhlmann/CCHack2019 | fb6eb505ac350c2dda0c36e1f33254fbeef049bf | [
"MIT"
] | null | null | null | pi_source_code.py | cjkuhlmann/CCHack2019 | fb6eb505ac350c2dda0c36e1f33254fbeef049bf | [
"MIT"
] | null | null | null | pi_source_code.py | cjkuhlmann/CCHack2019 | fb6eb505ac350c2dda0c36e1f33254fbeef049bf | [
"MIT"
] | null | null | null | import math
import time
from max30105 import MAX30105, HeartRate
import smbus
from bme280 import BME280
import socket
#from matplotlib import pyplot as plt
class DataPoint():
def __init__(self,value,time):
self.time_stamp = time
self.value = value
class Device():
def __init__(self):
self.humidity = []
self.temperature = []
self.smoke_level = []
self.mean_size = 100
self.identifier = "0,0"
def setup_network(self):
self.network = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
connected = False
while not connected:
try:
self.network.connect(("192.168.88.167",25565))
connected = True
except:
a = 1
def upload_data(self):
network_string = (#str(round(self.calculate_humidity_trend(),5)) + "," +
str(round(self.humidity[-1].value,5)) + "," +
#str(round(self.calculate_temperature_trend(),5)) + "," +
str(round(self.temperature[-1].value,5)) + "," +
#str(round(self.calculate_smoke_level_trend(),5)) + "," +
str(round(self.smoke_level[-1].value,5)) + "," +
str(round(self.pressure.value,5)) + "," +
str(self.identifier))
network_string = network_string.encode()
self.network.sendall(network_string)
def update(self):
dev.get_smoke_data()
dev.get_humi_temp_data()
def setup_particle_sensor(self):
self.MAX30105 = MAX30105()
self.MAX30105.setup(leds_enable=3)
self.MAX30105.set_led_pulse_amplitude(1,0.0)
self.MAX30105.set_led_pulse_amplitude(2,0.0)
self.MAX30105.set_led_pulse_amplitude(3,12.5)
self.MAX30105.set_slot_mode(1,"red")
self.MAX30105.set_slot_mode(2,"ir")
self.MAX30105.set_slot_mode(3,"green")
self.MAX30105.set_slot_mode(4,"off")
self.hr = HeartRate(self.MAX30105)
def setup_temp_humi_sensor(self):
bus = smbus.SMBus(1)
self.bme280 = BME280(i2c_dev=bus)
def setup_sensors(self):
self.setup_particle_sensor()
self.setup_temp_humi_sensor()
def get_smoke_data(self):
data = []
for i in range(self.mean_size*3+1):
samples = self.MAX30105.get_samples()
if samples is not None:
for sample in samples:
r = samples[2] & 0xff
d = self.hr.low_pass_fir(r)
data.append(d)
mean = sum(data)/(self.mean_size*3)
self.smoke_level.append(DataPoint(mean,time.time))
def get_humi_temp_data(self):
temp_data = []
humi_data = []
pres_data = []
for i in range(self.mean_size):
temp_data.append(self.bme280.get_temperature())
humi_data.append(self.bme280.get_humidity())
pres_data.append(self.bme280.get_pressure())
mean_temp = sum(temp_data)/self.mean_size
mean_humi = sum(humi_data)/self.mean_size
mean_pres = sum(pres_data)/self.mean_size
self.humidity.append(DataPoint(mean_humi,time.time()))
self.temperature.append(DataPoint(mean_temp,time.time()))
self.pressure = DataPoint(mean_pres,time.time())
"""def calculate_humidity_trend(self):
return self.lin_reg(self.humidity)
def calculate_temperature_trend(self):
return self.lin_reg(self.temperature)
def calculate_smoke_level_trend(self):
return self.lin_reg(self.smoke_level)
def lin_reg(self,data_set):
x = 0
Sxy = 0
Sx = 0
Sx2 = 0
Sy = 0
Sy2 = 0
sample_size = len(data_set)
for y in data_set:
y=y.value
x += 1
Sxy += x * y
Sx += x
Sx2 += x**2
Sy += y
Sy2 += y**2
lin_reg = ((sample_size*Sxy)-(Sx*Sy))/((sample_size*Sx2)-(Sx)**2)
return lin_reg"""
dev = Device()
dev.setup_sensors()
dev.setup_network()
for i in range(2):
dev.update()
while True:
try:
dev.update()
dev.upload_data()
print("sending_data")
except:
dev.setup_network()
| 28.403846 | 83 | 0.558565 |
95cead6bce011703374b48a18d5379f241d0c282 | 1,417 | py | Python | butter/mas/clients/client_factory.py | bennymeg/Butter.MAS.PythonAPI | 9641293436d989ae9c5324c2b8129f232822b248 | [
"Apache-2.0"
] | 2 | 2019-08-22T08:57:42.000Z | 2019-11-28T14:01:49.000Z | butter/mas/clients/client_factory.py | bennymeg/Butter.MAS.PythonAPI | 9641293436d989ae9c5324c2b8129f232822b248 | [
"Apache-2.0"
] | null | null | null | butter/mas/clients/client_factory.py | bennymeg/Butter.MAS.PythonAPI | 9641293436d989ae9c5324c2b8129f232822b248 | [
"Apache-2.0"
] | null | null | null | from .client_http import HttpClient
from .client_tcp import TcpClient
from .client_udp import UdpClient
from .client import Client
class ClientFactory:
""" Client factory for different types of protocols """
def getClient(self, ip, port=None, protocol="http") -> Client:
"""Creates new client
Args:
ip (str): robot IP
port (int, optional): robot port. Defaults to None.
protocol (str, optional): communication protocol. Defaults to "http".
Returns:
Client: requested client
"""
if protocol == "http":
return HttpClient(ip) if port is None else HttpClient(ip, port)
elif protocol == "tcp":
return TcpClient(ip) if port is None else TcpClient(ip, port)
elif protocol == "udp":
return UdpClient(ip) if port is None else UdpClient(ip, port)
else:
return None
def getClientClass(self, protocol="http"):
"""Get client class
Args:
protocol (str, optional): communication protocol. Defaults to "http".
Returns:
Client: client class
"""
if protocol == "http":
return HttpClient
elif protocol == "tcp":
return TcpClient
elif protocol == "udp":
return UdpClient
else:
return None
| 30.148936 | 81 | 0.56669 |
95ceaebae16674be2fef2960c47326152d1eb461 | 1,569 | py | Python | scrapytest/spiders/ScrapyDemo5.py | liang1024/Scrapy | bfa7ea5b2174bf91c49f4da9dadc5471acc43092 | [
"Apache-2.0"
] | null | null | null | scrapytest/spiders/ScrapyDemo5.py | liang1024/Scrapy | bfa7ea5b2174bf91c49f4da9dadc5471acc43092 | [
"Apache-2.0"
] | null | null | null | scrapytest/spiders/ScrapyDemo5.py | liang1024/Scrapy | bfa7ea5b2174bf91c49f4da9dadc5471acc43092 | [
"Apache-2.0"
] | null | null | null | import scrapy
'''
现在您已经知道如何从页面中提取数据,我们来看看如何跟踪它们的链接。
首先是提取我们想要跟踪的页面的链接。检查我们的页面,我们可以看到有一个链接到下一个页面与以下标记:
<ul class="pager">
<li class="next">
<a href="/page/2/">Next <span aria-hidden="true">→</span></a>
</li>
</ul>
我们可以尝试在shell中提取它:
>>> response.css('li.next a').extract_first()
'<a href="/page/2/">Next <span aria-hidden="true">→</span></a>'
这得到了锚点元素,但是我们需要该属性href。为此,Scrapy支持CSS扩展,您可以选择属性内容,如下所示:
>>> response.css('li.next a::attr(href)').extract_first()
'/page/2/'
让我们看看现在我们的蜘蛛修改为递归地跟随链接到下一页,从中提取数据:
'''
import scrapy
class QuotesSpider(scrapy.Spider):
name = "demo5"
start_urls = [
'http://quotes.toscrape.com/page/1/',
]
def parse(self, response):
for quote in response.css('div.quote'):
yield {
'text': quote.css('span.text::text').extract_first(),
'author': quote.css('small.author::text').extract_first(),
'tags': quote.css('div.tags a.tag::text').extract(),
}
next_page = response.css('li.next a::attr(href)').extract_first()
if next_page is not None:
next_page = response.urljoin(next_page)
yield scrapy.Request(next_page, callback=self.parse)
'''
现在,在提取数据之后,该parse()方法会查找到下一页的链接,使用该urljoin()方法构建完整的绝对URL (由于链接可以是相对的),并且向下一页产生一个新的请求,将其注册为回调以处理下一页的数据提取,并保持爬行遍历所有页面。
您在这里看到的是Scrapy的以下链接机制:当您以回调方式生成请求时,Scrapy将安排该请求发送,并注册一个回调方法,以在该请求完成时执行。
使用它,您可以根据您定义的规则构建复杂的跟踪链接,并根据访问页面提取不同类型的数据。
在我们的示例中,它创建一个循环,跟随到所有到下一页的链接,直到它找不到一个方便的抓取博客,论坛和其他站点分页。
'''
'''
启动项目
scrapy crawl demo5
''' | 24.138462 | 116 | 0.66348 |
95cf45edd5e367889b2e72c5aaae8636bfca5ddc | 909 | py | Python | tests/test_objectives.py | theislab/AutoGeneS | 22bde0d5eba013e90edb85341e0bd9c28b82e7fd | [
"MIT"
] | 46 | 2020-02-25T14:09:21.000Z | 2022-01-20T16:42:40.000Z | tests/test_objectives.py | theislab/AutoGeneS | 22bde0d5eba013e90edb85341e0bd9c28b82e7fd | [
"MIT"
] | 16 | 2020-03-18T15:08:42.000Z | 2022-01-29T20:00:10.000Z | tests/test_objectives.py | theislab/AutoGeneS | 22bde0d5eba013e90edb85341e0bd9c28b82e7fd | [
"MIT"
] | 6 | 2020-02-13T14:23:46.000Z | 2021-12-28T16:50:50.000Z | import pytest
import numpy as np
import pandas as pd
from scipy.special import binom
import os
import sys
sys.path.insert(0, "..")
from autogenes import objectives as ga_objectives
def test_distance():
arr = np.ones((3,3))
assert ga_objectives.distance(arr) == 0
arr = np.identity(3)
assert np.isclose(ga_objectives.distance(arr), 3 * np.sqrt(2))
def test_correlation():
arr = np.ones((3,3))
# Should'nt throw a warning
assert ga_objectives.correlation(arr) == 0
arr = np.zeros((3,3))
assert ga_objectives.correlation(arr) == 0
arr = np.identity(5)
# Let i!=j
# cov(e_i, e_j) = 1/4 * ( 2 * (4/5) * (-1/5) + 3 * (-1/5) * (-1/5)) = -1/20
# var(e_i) = 1/4 * ( (4/5)^2 + 4 * (-1/5)^2) = 1/5
# (i,j)th entry in the corrcoef matrix is (-1/20)/sqrt(var(e_i) * var(e_j)) = -1/4
# Result is 1/4 * (5 over 2)
assert np.isclose(ga_objectives.correlation(arr), 1/4*binom(5,2))
| 23.307692 | 84 | 0.630363 |
95cf9c3a1a9e3db6fb75803b4f3891c4c503d528 | 15,563 | py | Python | digits/model/forms.py | Linda-liugongzi/DIGITS-digits-py3 | 6df5eb6972574a628b9544934518ec8dfa9c7439 | [
"BSD-3-Clause"
] | null | null | null | digits/model/forms.py | Linda-liugongzi/DIGITS-digits-py3 | 6df5eb6972574a628b9544934518ec8dfa9c7439 | [
"BSD-3-Clause"
] | null | null | null | digits/model/forms.py | Linda-liugongzi/DIGITS-digits-py3 | 6df5eb6972574a628b9544934518ec8dfa9c7439 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2014-2017, NVIDIA CORPORATION. All rights reserved.
import os
import flask
from flask_wtf import FlaskForm
import wtforms
from wtforms import validators
from digits.config import config_value
from digits.device_query import get_device, get_nvml_info
from digits import utils
from digits.utils import sizeof_fmt
from digits.utils.forms import validate_required_iff
from digits import frameworks
from flask_babel import lazy_gettext as _
class ModelForm(FlaskForm):
# Methods
def selection_exists_in_choices(form, field):
found = False
for choice in field.choices:
if choice[0] == field.data:
found = True
if not found:
raise validators.ValidationError(_("Selected job doesn't exist. Maybe it was deleted by another user."))
def validate_NetParameter(form, field):
fw = frameworks.get_framework_by_id(form['framework'].data)
try:
# below function raises a BadNetworkException in case of validation error
fw.validate_network(field.data)
except frameworks.errors.BadNetworkError as e:
raise validators.ValidationError(_('Bad network: %(message)s', message=e.message))
def validate_file_exists(form, field):
from_client = bool(form.python_layer_from_client.data)
filename = ''
if not from_client and field.type == 'StringField':
filename = field.data
if filename == '':
return
if not os.path.isfile(filename):
raise validators.ValidationError(_('Server side file, %(filename)s, does not exist.', filename=filename))
def validate_py_ext(form, field):
from_client = bool(form.python_layer_from_client.data)
filename = ''
if from_client and field.type == 'FileField':
filename = flask.request.files[field.name].filename
elif not from_client and field.type == 'StringField':
filename = field.data
if filename == '':
return
(root, ext) = os.path.splitext(filename)
if ext != '.py' and ext != '.pyc':
raise validators.ValidationError(_('Python file, %(filename)s, needs .py or .pyc extension.',
filename=filename))
# Fields
# The options for this get set in the view (since they are dynamic)
dataset = utils.forms.SelectField(
_('Select Dataset'),
choices=[],
tooltip=_("Choose the dataset to use for this model.")
)
python_layer_from_client = utils.forms.BooleanField(
_('Use client-side file'),
default=False,
)
python_layer_client_file = utils.forms.FileField(
_('Client-side file'),
validators=[
validate_py_ext
],
tooltip=_("Choose a Python file on the client containing layer definitions.")
)
python_layer_server_file = utils.forms.StringField(
_('Server-side file'),
validators=[
validate_file_exists,
validate_py_ext
],
tooltip=_("Choose a Python file on the server containing layer definitions.")
)
train_epochs = utils.forms.IntegerField(
_('Training epochs'),
validators=[
validators.NumberRange(min=1)
],
default=30,
tooltip=_("How many passes through the training data?")
)
snapshot_interval = utils.forms.FloatField(
_('Snapshot interval (in epochs)'),
default=1,
validators=[
validators.NumberRange(min=0),
],
tooltip=_("How many epochs of training between taking a snapshot?")
)
val_interval = utils.forms.FloatField(
_('Validation interval (in epochs)'),
default=1,
validators=[
validators.NumberRange(min=0)
],
tooltip=_("How many epochs of training between running through one pass of the validation data?")
)
traces_interval = utils.forms.IntegerField(
_('Tracing Interval (in steps)'),
validators=[
validators.NumberRange(min=0)
],
default=0,
tooltip=_("Generation of a timeline trace every few steps")
)
random_seed = utils.forms.IntegerField(
_('Random seed'),
validators=[
validators.NumberRange(min=0),
validators.Optional(),
],
tooltip=_('If you provide a random seed, then back-to-back runs with '
'the same model and dataset should give identical results.')
)
batch_size = utils.forms.MultiIntegerField(
_('Batch size'),
default=100,
validators=[
utils.forms.MultiNumberRange(min=1),
utils.forms.MultiOptional(),
],
tooltip=_("How many images to process at once. If blank, values are used from the network definition.")
)
batch_accumulation = utils.forms.IntegerField(
_('Batch Accumulation'),
validators=[
validators.NumberRange(min=1),
validators.Optional(),
],
tooltip=_("Accumulate gradients over multiple batches (useful when you "
"need a bigger batch size for training but it doesn't fit in memory).")
)
# Solver types
solver_type = utils.forms.SelectField(
_('Solver type'),
choices=[
('SGD', _('SGD (Stochastic Gradient Descent)')),
('MOMENTUM', _('Momentum')),
('NESTEROV', _("NAG (Nesterov's accelerated gradient)")),
('ADAGRAD', _('AdaGrad (Adaptive Gradient)')),
('ADAGRADDA', _('AdaGradDA (AdaGrad Dual Averaging)')),
('ADADELTA', _('AdaDelta')),
('ADAM', _('Adam (Adaptive Moment Estimation)')),
('RMSPROP', _('RMSprop')),
('FTRL', _('FTRL (Follow-The-Regularized-Leader)')),
],
default='SGD',
tooltip=_("What type of solver will be used?"),
)
def validate_solver_type(form, field):
fw = frameworks.get_framework_by_id(form.framework)
if fw is not None:
if not fw.supports_solver_type(field.data):
raise validators.ValidationError(
_('Solver type not supported by this framework'))
# Additional settings specific to selected solver
rms_decay = utils.forms.FloatField(
_('RMS decay value'),
default=0.99,
validators=[
validators.NumberRange(min=0),
],
tooltip=_("If the gradient updates results in oscillations the gradient is reduced "
"by times 1-rms_decay. Otherwise it will be increased by rms_decay.")
)
# Learning rate
learning_rate = utils.forms.MultiFloatField(
_('Base Learning Rate'),
default=0.01,
validators=[
utils.forms.MultiNumberRange(min=0),
],
tooltip=_("Affects how quickly the network learns. If you are getting "
"NaN for your loss, you probably need to lower this value.")
)
lr_policy = wtforms.SelectField(
_('Policy'),
choices=[
('fixed', _('Fixed')),
('step', _('Step Down')),
('multistep', _('Step Down (arbitrary steps)')),
('exp', _('Exponential Decay')),
('inv', _('Inverse Decay')),
('poly', _('Polynomial Decay')),
('sigmoid', _('Sigmoid Decay')),
],
default='step'
)
lr_step_size = wtforms.FloatField(_('Step Size'), default=33)
lr_step_gamma = wtforms.FloatField(_('Gamma'), default=0.1)
lr_multistep_values = wtforms.StringField(_('Step Values'), default="50,85")
def validate_lr_multistep_values(form, field):
if form.lr_policy.data == 'multistep':
for value in field.data.split(','):
try:
float(value)
except ValueError:
raise validators.ValidationError(_('invalid value'))
lr_multistep_gamma = wtforms.FloatField(_('Gamma'), default=0.5)
lr_exp_gamma = wtforms.FloatField(_('Gamma'), default=0.95)
lr_inv_gamma = wtforms.FloatField(_('Gamma'), default=0.1)
lr_inv_power = wtforms.FloatField(_('Power'), default=0.5)
lr_poly_power = wtforms.FloatField(_('Power'), default=3)
lr_sigmoid_step = wtforms.FloatField(_('Step'), default=50)
lr_sigmoid_gamma = wtforms.FloatField(_('Gamma'), default=0.1)
# Network
# Use a SelectField instead of a HiddenField so that the default value
# is used when nothing is provided (through the REST API)
method = wtforms.SelectField(
_('Network type'),
choices=[
('standard', _('Standard network')),
('previous', _('Previous network')),
('pretrained', _('Pretrained network')),
('custom', _('Custom network')),
],
default='standard',
)
# framework - hidden field, set by Javascript to the selected framework ID
framework = wtforms.HiddenField(
_('framework'),
validators=[
validators.AnyOf(
[fw.get_id() for fw in frameworks.get_frameworks()],
message=_('The framework you choose is not currently supported.')
)
],
default=frameworks.get_frameworks()[0].get_id()
)
# The options for this get set in the view (since they are dependent on the data type)
standard_networks = wtforms.RadioField(
_('Standard Networks'),
validators=[
validate_required_iff(method='standard'),
],
)
previous_networks = wtforms.RadioField(
_('Previous Networks'),
choices=[],
validators=[
validate_required_iff(method='previous'),
selection_exists_in_choices,
],
)
pretrained_networks = wtforms.RadioField(
_('Pretrained Networks'),
choices=[],
validators=[
validate_required_iff(method='pretrained'),
selection_exists_in_choices,
],
)
custom_network = utils.forms.TextAreaField(
_('Custom Network'),
validators=[
validate_required_iff(method='custom'),
validate_NetParameter,
],
)
custom_network_snapshot = utils.forms.TextField(
_('Pretrained model(s)'),
tooltip=_("Paths to pretrained model files, separated by '%(pathsep)s'. "
"Only edit this field if you understand how fine-tuning "
"works in caffe or torch.", pathsep=os.path.pathsep)
)
def validate_custom_network_snapshot(form, field):
pass
# if form.method.data == 'custom':
# for filename in field.data.strip().split(os.path.pathsep):
# if filename and not os.path.lexists(filename):
# raise validators.ValidationError('File "%s" does not exist' % filename)
# Select one of several GPUs
select_gpu = wtforms.RadioField(
_('Select which GPU you would like to use'),
choices=[('next', 'Next available')] + [(
index,
'#%s - %s (%s memory)' % (
index,
get_device(index).name,
sizeof_fmt(
get_nvml_info(index)['memory']['total']
if get_nvml_info(index) and 'memory' in get_nvml_info(index)
else get_device(index).totalGlobalMem)
),
) for index in config_value('gpu_list').split(',') if index],
default='next',
)
# Select N of several GPUs
select_gpus = utils.forms.SelectMultipleField(
_('Select which GPU[s] you would like to use'),
choices=[(
index,
'#%s - %s (%s memory)' % (
index,
get_device(index).name,
sizeof_fmt(
get_nvml_info(index)['memory']['total']
if get_nvml_info(index) and 'memory' in get_nvml_info(index)
else get_device(index).totalGlobalMem)
),
) for index in config_value('gpu_list').split(',') if index],
tooltip=_("The job won't start until all of the chosen GPUs are available.")
)
# XXX For testing
# The Flask test framework can't handle SelectMultipleFields correctly
select_gpus_list = wtforms.StringField(_('Select which GPU[s] you would like to use (comma separated)'))
def validate_select_gpus(form, field):
if form.select_gpus_list.data:
field.data = form.select_gpus_list.data.split(',')
# Use next available N GPUs
select_gpu_count = wtforms.IntegerField(_('Use this many GPUs (next available)'),
validators=[
validators.NumberRange(min=1, max=len(
config_value('gpu_list').split(',')))
],
default=1,
)
def validate_select_gpu_count(form, field):
if field.data is None:
if form.select_gpus.data:
# Make this field optional
field.errors[:] = []
raise validators.StopValidation()
model_name = utils.forms.StringField(_('Model Name'),
validators=[
validators.DataRequired()
],
tooltip=_("An identifier, later used to refer to this model in the Application.")
)
group_name = utils.forms.StringField(_('Group Name'),
tooltip=_("An optional group name for organization on the main page.")
)
# allows shuffling data during training (for frameworks that support this, as indicated by
# their Framework.can_shuffle_data() method)
shuffle = utils.forms.BooleanField(_('Shuffle Train Data'),
default=True,
tooltip=_('For every epoch, shuffle the data before training.')
)
steps = utils.forms.IntegerField("训练总步长",
default=4000,
validators=[
validators.NumberRange(min=1)
],
tooltip="本次训练总步长数(迭代次数)")
iter_store_step = utils.forms.IntegerField("步长间隔",
default=1000,
validators=[
validators.NumberRange(min=1)
],
tooltip="要间隔多少个步长来进行快照保存")
train_batch_size = utils.forms.IntegerField("批处理大小",
default=100,
validators=[
validators.NumberRange(min=1)
],
tooltip="一次处理多少图片,默认为100")
# bottleneck_dir = utils.forms.StringField("瓶颈值目录",
# tooltip="计算出每个图片的瓶颈值并存储于此目录下")
| 36.791962 | 122 | 0.556512 |
95d02019dda244ece2c09a15f8673c55536ad4de | 1,155 | py | Python | 004 Sons/afinacao.py | yamadathamine/300ideiasparaprogramarPython | 331a063bbf8bcd117ae5a34324b8176a6014fc98 | [
"MIT"
] | null | null | null | 004 Sons/afinacao.py | yamadathamine/300ideiasparaprogramarPython | 331a063bbf8bcd117ae5a34324b8176a6014fc98 | [
"MIT"
] | 4 | 2020-06-09T19:10:04.000Z | 2020-06-17T18:23:47.000Z | 004 Sons/afinacao.py | yamadathamine/300ideiasparaprogramarPython | 331a063bbf8bcd117ae5a34324b8176a6014fc98 | [
"MIT"
] | null | null | null | # encoding: utf-8
# usando python 3
# Afinação - Alberto toca violão e é programador.
# Precisando afinar o violão e sem diapasão por perto,
# resolveu fazer um programa para ajudá-lo.
# O que ele queria era a nota Lá soando sem parar até que ele conseguisse afinar a
# respectiva corda do violão; as demais cordas ele poderia afinar com base na primeira.
# Escreva um programa que faz soar no alto-falante do computador a nota Lá (440 Hz)
# e só para quando for pressionada alguma tecla.
import numpy as np
import simpleaudio as sa
frequency = 440 # Our played note will be 440 Hz
fs = 44100 # 44100 samples per second
seconds = 3 # Note duration of 3 seconds
# Generate array with seconds*sample_rate steps, ranging between 0 and seconds
t = np.linspace(0, seconds, seconds * fs, False)
# Generate a 440 Hz sine wave
note = np.sin(frequency * t * 2 * np.pi)
# Ensure that highest value is in 16-bit range
audio = note * (2**15 - 1) / np.max(np.abs(note))
# Convert to 16-bit data
audio = audio.astype(np.int16)
# Start playback
play_obj = sa.play_buffer(audio, 1, 2, fs)
# Wait for playback to finish before exiting
play_obj.wait_done() | 35 | 88 | 0.735931 |
95d0529ff78fe4e15217221008da8dabb874d847 | 138 | py | Python | python/flask-app/data.py | zkan/100DaysOfCode | 3c713ead94a9928e2d0f8d794e49ec202dc64ba3 | [
"MIT"
] | 2 | 2019-05-01T00:32:30.000Z | 2019-11-20T05:23:05.000Z | python/flask-app/data.py | zkan/100DaysOfCode | 3c713ead94a9928e2d0f8d794e49ec202dc64ba3 | [
"MIT"
] | 15 | 2020-09-05T18:35:04.000Z | 2022-03-11T23:44:47.000Z | python/flask-app/data.py | zkan/100DaysOfCode | 3c713ead94a9928e2d0f8d794e49ec202dc64ba3 | [
"MIT"
] | null | null | null | fav_beer = {'Julian': 'White Rabbit Dark Ale',
'Bob': 'Some sort of light beer I assume',
'Mike': 'Oregano Beer'}
| 34.5 | 54 | 0.550725 |
95d185b829b29c3736cdbb9908672dc12ffef154 | 548 | py | Python | appengine/chrome_infra_packages/apps.py | mithro/chromium-infra | d27ac0b230bedae4bc968515b02927cf9e17c2b7 | [
"BSD-3-Clause"
] | 1 | 2018-01-02T05:47:07.000Z | 2018-01-02T05:47:07.000Z | appengine/chrome_infra_packages/apps.py | mithro/chromium-infra | d27ac0b230bedae4bc968515b02927cf9e17c2b7 | [
"BSD-3-Clause"
] | null | null | null | appengine/chrome_infra_packages/apps.py | mithro/chromium-infra | d27ac0b230bedae4bc968515b02927cf9e17c2b7 | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Actual WSGI app instantiations used from app.yaml.
Extracted to a separate module to avoid calling 'initialize' in unit tests
during module loading time.
"""
import gae_ts_mon
import main
endpoints_app, frontend_app, backend_app = main.initialize()
gae_ts_mon.initialize()
gae_ts_mon.instrument_wsgi_application(frontend_app)
gae_ts_mon.instrument_wsgi_application(backend_app)
| 28.842105 | 74 | 0.810219 |
95d4bf219897990197feea13feb7cf1258d214c8 | 6,298 | py | Python | yadlt/core/layers.py | Perfect-SoftwareEngineer/Deep-Learning-Tensorflow | b191cd2c8ff9d8cb6e2c6dedcac4483fa7548366 | [
"MIT"
] | null | null | null | yadlt/core/layers.py | Perfect-SoftwareEngineer/Deep-Learning-Tensorflow | b191cd2c8ff9d8cb6e2c6dedcac4483fa7548366 | [
"MIT"
] | null | null | null | yadlt/core/layers.py | Perfect-SoftwareEngineer/Deep-Learning-Tensorflow | b191cd2c8ff9d8cb6e2c6dedcac4483fa7548366 | [
"MIT"
] | null | null | null | """Layer classes."""
from __future__ import absolute_import
import abc
import six
import tensorflow as tf
@six.add_metaclass(abc.ABCMeta)
class BaseLayer(object):
"""Base layer interface."""
@abc.abstractmethod
def forward(self):
"""Layer forward propagation."""
pass
@abc.abstractmethod
def backward(self):
"""Layer backward propagation."""
pass
@abc.abstractmethod
def get_variables(self):
"""Get layer's tf variables."""
pass
@abc.abstractmethod
def get_parameters(self):
"""Get the layer parameters."""
pass
class Linear(BaseLayer):
"""Fully-Connected layer."""
def __init__(self, shape, name="linear", vnames=["W", "b"]):
"""Create a new linear layer instance."""
self.name = name
self.vnames = vnames
with tf.name_scope(self.name):
self.W = tf.Variable(
tf.truncated_normal(shape=shape, stddev=0.1), name=vnames[0])
self.b = tf.Variable(
tf.constant(0.1, shape=[shape[1]]), name=vnames[1])
def forward(self, X):
"""Forward propagate X through the fc layer."""
with tf.name_scope(self.name):
return tf.add(tf.matmul(X, self.W), self.b)
def backward(self, H):
"""Backward propagate H through the fc layer."""
pass
def get_variables(self):
"""Get layer's variables."""
return [self.W, self.b]
def get_parameters(self):
"""Return all the parameters of this layer."""
with tf.Session() as sess:
return {
self.names[0]: sess.run(self.W),
self.names[1]: sess.run(self.b)
}
class Activation(BaseLayer):
"""Activation function layer."""
def __init__(self, func, name="act_func"):
"""Create a new Activation layer instance."""
self.name = name
if func is not None:
self.func = func
else:
self.func = tf.identity
def forward(self, X):
"""Forward propagate X."""
return self.func(X)
def backward(self, H):
"""Backward propagate H through the fc layer."""
pass
def get_variables(self):
"""Return the layer's variables."""
pass
def get_parameters(self):
"""Return all the parameters of this layer."""
pass
class SoftMax(BaseLayer):
"""SoftMax layer."""
def __init__(self, prev_layer, n_classes, name="softmax"):
"""Create a new SoftMax layer instance."""
self.prev_layer = prev_layer
self.shape = (prev_layer.get_shape()[1].value, n_classes)
self.n_classes = n_classes
self.name = name
self.vs = ['softmax_W', 'softmax_b']
with tf.name_scope(self.name):
self.W = tf.Variable(
tf.truncated_normal(self.shape, stddev=0.1), name=self.vs[0])
self.b = tf.Variable(
tf.constant(0.1, shape=[n_classes]), name=self.vs[0])
def forward(self, X):
"""Forward propagate X."""
with tf.name_scope(self.name):
return tf.add(tf.matmul(self.prev_layer, self.W), self.b)
def backward(self, H):
"""Backward propagate H through the fc layer."""
pass
def get_variables(self):
"""Return the layer's variables."""
return (self.W, self.b)
def get_parameters(self):
"""Return all the parameters of this layer."""
with tf.Session() as sess:
return {
self.vs[0]: sess.run(self.W),
self.vs[1]: sess.run(self.b)
}
class Regularization(BaseLayer):
"""Regularization function layer."""
def __init__(self, variables, C, regtype="l2", name="act_func"):
"""Create a new Regularization layer instance."""
assert regtype in ["l1", "l2"]
self.variables = variables
self.C = C
self.regtype = regtype
self.name = name
def forward(self, X):
"""Forward propagate X."""
regs = tf.constant(0.0)
for v in self.variables:
if self.regtype == "l1":
regs = tf.add(regs, tf.reduce_sum(tf.abs(v)))
elif self.regtype == "l2":
regs = tf.add(regs, tf.nn.l2_loss(v))
return tf.mul(self.C, regs)
def backward(self, H):
"""Backward propagate H through the fc layer."""
pass
def get_variables(self):
"""Return the layer's variables."""
pass
def get_parameters(self):
"""Return all the parameters of this layer."""
pass
class Loss(BaseLayer):
"""Loss function layer."""
def __init__(self, mod_y, ref_y, loss_type, regterm=None,
summary=True, name="loss_func"):
"""Create a new Loss layer instance."""
assert loss_type in ["cross_entropy", "softmax_cross_entropy",
"mean_squared"]
self.mod_y = mod_y
self.ref_y = ref_y
self.loss_type = loss_type
self.regterm = regterm
self.name = name
if loss_type == "cross_entropy":
clip_inf = tf.clip_by_value(self.mod_y, 1e-10, float('inf'))
clip_sup = tf.clip_by_value(1 - self.mod_y, 1e-10, float('inf'))
loss = - tf.reduce_mean(tf.add(
tf.mul(self.ref_y, tf.log(clip_inf)),
tf.mul(tf.sub(1.0, self.ref_y), tf.log(clip_sup))))
elif loss_type == "softmax_cross_entropy":
loss = tf.contrib.losses.softmax_cross_entropy(
self.mod_y, self.ref_y)
elif loss_type == "mean_squared":
loss = tf.sqrt(tf.reduce_mean(
tf.square(tf.sub(self.ref_y, self.mod_y))))
self.loss = loss + regterm if regterm is not None else loss
if summary:
tf.summary.scalar(self.name, self.loss)
def forward(self, X):
"""Forward propagate X."""
pass
def backward(self, H):
"""Backward propagate H through the fc layer."""
pass
def get_variables(self):
"""Return the layer's variables."""
pass
def get_parameters(self):
"""Return all the parameters of this layer."""
pass
| 28.497738 | 77 | 0.563671 |
95d7f54672f221417081565b033268249f18412b | 835 | py | Python | tests/test_modules/test_builtin/test_grouppart.py | MattTaylorDLS/pymalcolm | 995a8e4729bd745f8f617969111cc5a34ce1ac14 | [
"Apache-2.0"
] | null | null | null | tests/test_modules/test_builtin/test_grouppart.py | MattTaylorDLS/pymalcolm | 995a8e4729bd745f8f617969111cc5a34ce1ac14 | [
"Apache-2.0"
] | null | null | null | tests/test_modules/test_builtin/test_grouppart.py | MattTaylorDLS/pymalcolm | 995a8e4729bd745f8f617969111cc5a34ce1ac14 | [
"Apache-2.0"
] | null | null | null | import unittest
from malcolm.core import call_with_params
from malcolm.modules.builtin.parts import GroupPart
class TestGroupPart(unittest.TestCase):
def setUp(self):
self.o = call_with_params(
GroupPart, name="things", description="A group of things")
self.setter = list(self.o.create_attribute_models())[0][2]
def test_init(self):
assert self.o.name == "things"
assert self.o.attr.value == "expanded"
assert self.o.attr.meta.description == "A group of things"
assert self.o.attr.meta.tags == ("widget:group", "config")
def test_setter(self):
assert self.o.attr.value == "expanded"
self.setter("collapsed")
assert self.o.attr.value == "collapsed"
with self.assertRaises(ValueError):
self.setter("anything else")
| 32.115385 | 70 | 0.653892 |
95d8eae1e421c5a5d85e31ca5953813a5295d371 | 512 | py | Python | ok2_backend/common/utils.py | Mipsters/ok2-backend | 50ddbb44262749d731f4e923add205541254223d | [
"MIT"
] | 1 | 2020-02-10T17:53:58.000Z | 2020-02-10T17:53:58.000Z | ok2_backend/common/utils.py | Mipsters/ok2-backend | 50ddbb44262749d731f4e923add205541254223d | [
"MIT"
] | 6 | 2020-01-06T19:37:12.000Z | 2021-09-22T18:03:31.000Z | ok2_backend/common/utils.py | Mipsters/ok2-backend | 50ddbb44262749d731f4e923add205541254223d | [
"MIT"
] | 5 | 2019-11-18T17:39:29.000Z | 2020-07-31T16:00:21.000Z | import os
from jose import jwt
from datetime import datetime, timedelta
JWT_SECRET = 'secret'
JWT_ALGORITHM = 'HS256'
JWT_EXP_DELTA_SECONDS = 31556952 # year
def get_token(request):
return jwt.decode(request.headers.get('Authorization'), os.environ['JWT_SECRET'])
def create_token(user_id):
payload = {
'user_id': user_id,
'exp': datetime.utcnow() + timedelta(seconds=JWT_EXP_DELTA_SECONDS)
}
jwt_token = jwt.encode(payload, JWT_SECRET, JWT_ALGORITHM)
return jwt_token
| 23.272727 | 85 | 0.722656 |
95da8c78112cb6f44e754d89ffd5c8e26c67e104 | 1,238 | py | Python | backend/ai4all_api/models.py | kevromster/ai4all | 39da1a95c4e06780f5712bb6e6ecb1f570e5d639 | [
"Apache-2.0"
] | null | null | null | backend/ai4all_api/models.py | kevromster/ai4all | 39da1a95c4e06780f5712bb6e6ecb1f570e5d639 | [
"Apache-2.0"
] | null | null | null | backend/ai4all_api/models.py | kevromster/ai4all | 39da1a95c4e06780f5712bb6e6ecb1f570e5d639 | [
"Apache-2.0"
] | null | null | null | import os
from django.db import models
from ai4all_api.detection_items import DETECTION_ITEMS
from ai4all_api.notification_types import NOTIFICATION_TYPES
class SubmitCameraItem(models.Model):
date_created = models.DateTimeField(auto_now_add=True)
tg_chat_id = models.BigIntegerField()
name = models.CharField(max_length=255, blank=True, default='')
url = models.CharField(max_length=2048, blank=False)
what_to_detect = models.CharField(choices=DETECTION_ITEMS, max_length=100)
detection_threshold = models.IntegerField()
detection_enabled = models.BooleanField(default=True)
notification_type = models.CharField(choices = NOTIFICATION_TYPES, max_length=100)
# used for 'appearance/disappearance' notification type
last_time_object_presented = models.BooleanField(default=False)
# Region of image where the detection to be run.
# Specified in percents from the image size.
edge_left = models.IntegerField(default=0)
edge_top = models.IntegerField(default=0)
edge_right = models.IntegerField(default=100)
edge_bottom = models.IntegerField(default=100)
def __str__(self):
return "{}".format(self.name)
class Meta:
ordering = ('date_created',)
| 35.371429 | 86 | 0.757674 |
95dba8c035cf85f4ef8f4bc3e7a7c14c268076f1 | 1,797 | py | Python | engine/src/valet/engine/search/filters/cpu_filter.py | onap/optf-fgps | 1494071d0329698297c5d78ee0799dbff0b57e43 | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | engine/src/valet/engine/search/filters/cpu_filter.py | onap/optf-fgps | 1494071d0329698297c5d78ee0799dbff0b57e43 | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | engine/src/valet/engine/search/filters/cpu_filter.py | onap/optf-fgps | 1494071d0329698297c5d78ee0799dbff0b57e43 | [
"Apache-2.0",
"CC-BY-4.0"
] | 1 | 2021-10-15T18:54:03.000Z | 2021-10-15T18:54:03.000Z | #
# -------------------------------------------------------------------------
# Copyright (c) 2019 AT&T Intellectual Property
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -------------------------------------------------------------------------
#
class CPUFilter(object):
def __init__(self):
self.name = "cpu"
self.status = None
def init_condition(self):
self.status = None
def check_pre_condition(self, _level, _v, _avail_hosts, _avail_groups):
return True
def filter_candidates(self, _level, _v, _candidate_list):
candidate_list = []
for c in _candidate_list:
if self._check_candidate(_level, _v, c):
candidate_list.append(c)
return candidate_list
def _check_candidate(self, _level, _v, _candidate):
"""Return True if host has sufficient CPU cores."""
avail_vcpus = _candidate.get_vcpus(_level)
instance_vcpus = _v.vCPUs
# TODO: need to check against original CPUs?
# Do not allow an instance to overcommit against itself,
# only against other instances.
# if instance_vcpus > vCPUs:
# return False
if avail_vcpus < instance_vcpus:
return False
return True
| 30.982759 | 76 | 0.606566 |
95e0d6973a04cf649a738acb651bea0fa6b7dfcd | 996 | py | Python | Inflearn_SungKim/3.MultiVariableLinearRegression/multi-variableLinearregression.py | shinhaha/tensorflow | 4647017a727985d64c5b0addee92f0ec516952c1 | [
"MIT"
] | null | null | null | Inflearn_SungKim/3.MultiVariableLinearRegression/multi-variableLinearregression.py | shinhaha/tensorflow | 4647017a727985d64c5b0addee92f0ec516952c1 | [
"MIT"
] | null | null | null | Inflearn_SungKim/3.MultiVariableLinearRegression/multi-variableLinearregression.py | shinhaha/tensorflow | 4647017a727985d64c5b0addee92f0ec516952c1 | [
"MIT"
] | null | null | null | import tensorflow as tf
x1_data=[73.,93.,89.,96.,73.]
x2_data=[80.,88.,91.,98.,66.]
x3_data=[75.,93.,90.,100.,70.]
y_data=[152.,185.,180.,196.,142.]
x1=tf.placeholder(tf.float32)
x2=tf.placeholder(tf.float32)
x3=tf.placeholder(tf.float32)
Y=tf.placeholder(tf.float32)
w1=tf.Variable(tf.random_normal([1]),name='weight1')
w2=tf.Variable(tf.random_normal([1]),name='weight2')
w3=tf.Variable(tf.random_normal([1]),name='weight1')
b=tf.Variable(tf.random_normal([1]),name='bias')
hypothesis=x1*w1+x2*w2+x3*w3+b
cost=tf.reduce_mean(tf.square(hypothesis-Y))
#minimize
optimizer=tf.train.GradientDescentOptimizer(learning_rate=1e-5)
train=optimizer.minimize(cost)
#launch graph
sess=tf.Session()
#initialize
sess.run(tf.global_variables_initializer())
for step in range(2001):
cost_val,hy_val,_=sess.run([cost,hypothesis,train],
feed_dict={x1:x1_data,x2:x2_data,x3:x3_data,Y:y_data})
if step%10==0:
print(step,"Cost:",cost_val,"\nPrediction:\n",hy_val) | 32.129032 | 85 | 0.712851 |
95e18e6281085104769aa15c1a8ef9828b449526 | 1,759 | py | Python | train_model.py | sanjjayrj/Chatbot-NLTK | 2000a3c640d6624984ca4ad2457557e937d4ae05 | [
"MIT"
] | 3 | 2020-11-17T12:14:37.000Z | 2021-08-14T05:46:38.000Z | train_model.py | sanjjayrj/Chatbot-NLTK | 2000a3c640d6624984ca4ad2457557e937d4ae05 | [
"MIT"
] | null | null | null | train_model.py | sanjjayrj/Chatbot-NLTK | 2000a3c640d6624984ca4ad2457557e937d4ae05 | [
"MIT"
] | null | null | null | import pandas as pd
import nltk
import re
from nltk.stem import wordnet
from nltk import pos_tag
from nltk import word_tokenize
from datetime import datetime
data = pd.read_csv('traindata.csv', encoding='utf-8')
train_counter = 0
def text_normalize(text):
global train_counter
if train_counter % 10000 == 0:
print(str(train_counter) + " sets lemmatized..., "+"Time now: " + str(datetime.now()))
train_counter += 1
text = str(text).lower()
spl_char_text = re.sub(r'[^ a-z]', '', text)
tokens = nltk.word_tokenize(spl_char_text)
lema = wordnet.WordNetLemmatizer()
tags_list = pos_tag(tokens, tagset = None)
lema_words = []
for token, pos_token in tags_list:
if pos_token.startswith('V'):
pos_value = 'v'
elif pos_token.startswith('J'):
pos_value = 'a'
elif pos_token.startswith('R'):
pos_value = 'r'
else:
pos_value = 'n'
lema_token = lema.lemmatize(token, pos_value)
lema_words.append(lema_token)
return " ".join(lema_words)
if __name__ == '__main__':
print("Time now: " + str(datetime.now()))
print(data.info())
print("\nData Imported...")
print("----------------------------------------------------------------------------------------------------------")
data['lemmatized text'] = data['Content'].apply(text_normalize)
print("Training Data Lemmatized..., Time now: " + str(datetime.now()))
data.to_csv('traindata.csv', encoding='utf-8', index = False)
print(data['lemmatized text'])
print(type(data['lemmatized text']))
print("\nTraining data...")
print("----------------------------------------------------------------------------------------------------------") | 37.425532 | 119 | 0.557703 |
95e2b38a9c011b08bb379e05752137d534a0a8a9 | 1,848 | py | Python | tensor_twister/server.py | iamorphen/tensor_twister | d7936efa50cf0f7f3950ff4cbb0dd3fbac310ca9 | [
"MIT"
] | null | null | null | tensor_twister/server.py | iamorphen/tensor_twister | d7936efa50cf0f7f3950ff4cbb0dd3fbac310ca9 | [
"MIT"
] | null | null | null | tensor_twister/server.py | iamorphen/tensor_twister | d7936efa50cf0f7f3950ff4cbb0dd3fbac310ca9 | [
"MIT"
] | null | null | null | import io
import logging
import queue
from collections import namedtuple
import torch
import zmq
from tensor_twister.status_codes import StatusCode
UnpackedMessage = namedtuple("UnpackedMessage", ["tensor", "name", "ip"])
def serve(host: str, port: int):
"""
Listen for incoming tensor data from clients. Print comparisons between
pairs of tensor data.
Args:
host (str): The hostname to listen on; for example "localhost"
port (int): The port to listen on; for example 5555
"""
logger = logging.getLogger(__name__)
logger.debug("libzmq version: %s", zmq.zmq_version())
logger.debug(" pyzmq version: %s", zmq.__version__)
tensor_queue = queue.Queue()
context = zmq.Context()
socket = context.socket(zmq.REP)
server_uri = f"tcp://{host}:{port}"
logger.info("Attempting to listen on %s.", server_uri)
socket.bind(server_uri)
logger.info("Listening on %s.", server_uri)
while True:
# Get the next message, blocking.
message = socket.recv_pyobj()
try:
tensor = torch.load(message.tensor)
except Exception:
socket.send_pyobj(StatusCode.TensorLoadFailure)
continue
tensor_queue.put(UnpackedMessage(tensor, message.name, message.ip))
socket.send_pyobj(StatusCode.OK)
# If the queue has at least 2 messages, compare the first 2.
if tensor_queue.qsize() >= 2:
m1 = tensor_queue.get()
m2 = tensor_queue.get()
print(f"{m1.name}@{m1.ip}: tensor min: {m1.tensor.min()}; max: {m1.tensor.max()}; mean: {m1.tensor.mean()}")
print(f"{m2.name}@{m2.ip}: tensor min: {m2.tensor.min()}; max: {m2.tensor.max()}; mean: {m2.tensor.mean()}")
print(f"t1 and t2 are {'' if (m1.tensor == m2.tensor).all() else 'not'} equal")
| 33 | 120 | 0.635281 |
95e39518b618f5551cfe1c882c8f307a7a86e276 | 6,744 | py | Python | optunity/solvers/CMAES.py | xrounder/optunity | 019182ca83fe2002083cc1ac938510cb967fd2c9 | [
"BSD-3-Clause"
] | 401 | 2015-01-08T00:56:20.000Z | 2022-03-19T09:07:12.000Z | optunity/solvers/CMAES.py | xrounder/optunity | 019182ca83fe2002083cc1ac938510cb967fd2c9 | [
"BSD-3-Clause"
] | 67 | 2015-01-08T09:13:20.000Z | 2022-01-05T23:26:36.000Z | optunity/solvers/CMAES.py | xrounder/optunity | 019182ca83fe2002083cc1ac938510cb967fd2c9 | [
"BSD-3-Clause"
] | 94 | 2015-02-04T08:35:56.000Z | 2021-10-03T12:40:35.000Z | #! /usr/bin/env python
# Copyright (c) 2014 KU Leuven, ESAT-STADIUS
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither name of copyright holders nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import math
import functools
from .solver_registry import register_solver
from .util import Solver, _copydoc
from . import util
_numpy_available = True
try:
import numpy as np
except ImportError:
_numpy_available = False
_deap_available = True
try:
import deap
import deap.creator
import deap.base
import deap.tools
import deap.cma
import deap.algorithms
except ImportError:
_deap_available = False
except TypeError:
# this can happen because DEAP is in Python 2
# install needs to take proper care of converting
# 2 to 3 when necessary
_deap_available = False
class CMA_ES(Solver):
"""
.. include:: /global.rst
Please refer to |cmaes| for details about this algorithm.
This solver uses an implementation available in the DEAP library [DEAP2012]_.
.. warning:: This solver has dependencies on DEAP_ and NumPy_
and will be unavailable if these are not met.
.. _DEAP: https://code.google.com/p/deap/
.. _NumPy: http://www.numpy.org
"""
def __init__(self, num_generations, sigma=1.0, Lambda=None, **kwargs):
"""blah
.. warning:: |warning-unconstrained|
"""
if not _deap_available:
raise ImportError('This solver requires DEAP but it is missing.')
if not _numpy_available:
raise ImportError('This solver requires NumPy but it is missing.')
self._num_generations = num_generations
self._start = kwargs
self._sigma = sigma
self._lambda = Lambda
@staticmethod
def suggest_from_seed(num_evals, **kwargs):
"""Verify that we can effectively make a solver.
The doctest has to be skipped from automated builds, because DEAP may not be available
and yet we want documentation to be generated.
>>> s = CMA_ES.suggest_from_seed(30, x=1.0, y=-1.0, z=2.0)
>>> solver = CMA_ES(**s) #doctest:+SKIP
"""
fertility = 4 + 3 * math.log(len(kwargs))
d = dict(kwargs)
d['num_generations'] = int(math.ceil(float(num_evals) / fertility))
# num_gen is overestimated
# this will require slightly more function evaluations than permitted by num_evals
return d
@property
def num_generations(self):
return self._num_generations
@property
def start(self):
"""Returns the starting point for CMA-ES."""
return self._start
@property
def lambda_(self):
return self._lambda
@property
def sigma(self):
return self._sigma
@_copydoc(Solver.optimize)
def optimize(self, f, maximize=True, pmap=map):
toolbox = deap.base.Toolbox()
if maximize:
fit = 1.0
else:
fit = -1.0
deap.creator.create("FitnessMax", deap.base.Fitness,
weights=(fit,))
Fit = deap.creator.FitnessMax
deap.creator.create("Individual", list,
fitness=Fit)
Individual = deap.creator.Individual
if self.lambda_:
strategy = deap.cma.Strategy(centroid=list(self.start.values()),
sigma=self.sigma, lambda_=self.lambda_)
else:
strategy = deap.cma.Strategy(centroid=list(self.start.values()),
sigma=self.sigma)
toolbox.register("generate", strategy.generate, Individual)
toolbox.register("update", strategy.update)
@functools.wraps(f)
def evaluate(individual):
return (util.score(f(**dict([(k, v)
for k, v in zip(self.start.keys(),
individual)]))),)
toolbox.register("evaluate", evaluate)
toolbox.register("map", pmap)
hof = deap.tools.HallOfFame(1)
deap.algorithms.eaGenerateUpdate(toolbox=toolbox,
ngen=self._num_generations,
halloffame=hof, verbose=False)
return dict([(k, v)
for k, v in zip(self.start.keys(), hof[0])]), None
# CMA_ES solver requires deap > 1.0.1
# http://deap.readthedocs.org/en/latest/examples/cmaes.html
if _deap_available and _numpy_available:
CMA_ES = register_solver('cma-es', 'covariance matrix adaptation evolutionary strategy',
['CMA-ES: covariance matrix adaptation evolutionary strategy',
' ',
'This method requires the following parameters:',
'- num_generations :: number of generations to use',
'- sigma :: (optional) initial covariance, default 1',
'- Lambda :: (optional) measure of reproducibility',
'- starting point: through kwargs'
' ',
'This method is described in detail in:',
'Hansen and Ostermeier, 2001. Completely Derandomized Self-Adaptation in Evolution Strategies. Evolutionary Computation'
])(CMA_ES)
| 36.852459 | 144 | 0.631821 |
95e555ee7266bd7c5e0f103c5c42eba12b36c67d | 622 | py | Python | DailyCoding/11.py | jason71319jason/Interview-solved | 42ca93a68475952753d185c325cb55c79e2e55e1 | [
"MIT"
] | 46 | 2019-10-14T01:21:35.000Z | 2022-01-08T23:55:15.000Z | DailyCoding/11.py | jason71319jason/Interview-solved | 42ca93a68475952753d185c325cb55c79e2e55e1 | [
"MIT"
] | 53 | 2019-10-03T17:16:43.000Z | 2020-12-08T12:48:19.000Z | DailyCoding/11.py | jason71319jason/Interview-solved | 42ca93a68475952753d185c325cb55c79e2e55e1 | [
"MIT"
] | 96 | 2019-10-03T18:12:10.000Z | 2021-03-14T19:41:06.000Z | """
This problem was asked by Twitter.
Implement an autocomplete system. That is, given a query string s and a set of all possible query strings, return all strings in the set that have s as a prefix.
For example, given the query string de and the set of strings [dog, deer, deal], return [deer, deal].
Hint: Try preprocessing the dictionary into a more efficient data structure to speed up queries.
"""
def autocomplete_bruteforce(words, s):
result = []
for word in words:
if s in word:
result.append(word)
return result
print(autocomplete_bruteforce(['dog','deer','deal'], 'de')) | 28.272727 | 161 | 0.705788 |
95e79ef92334e9854cdc295c02dc16e232f812ed | 4,974 | py | Python | pyblnet/blnet_parser.py | henfri/pyblnet | 0a3a59ea39ab569d4b59be5a918736dc238bcf13 | [
"MIT"
] | 3 | 2019-03-11T12:38:43.000Z | 2022-02-18T21:40:54.000Z | pyblnet/blnet_parser.py | henfri/pyblnet | 0a3a59ea39ab569d4b59be5a918736dc238bcf13 | [
"MIT"
] | 26 | 2018-10-15T10:57:21.000Z | 2021-03-23T18:35:06.000Z | pyblnet/blnet_parser.py | henfri/pyblnet | 0a3a59ea39ab569d4b59be5a918736dc238bcf13 | [
"MIT"
] | 7 | 2018-10-03T09:39:30.000Z | 2020-03-12T19:44:44.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on 09.08.2018
This is basically a python port of of a script by berwinter
https://github.com/berwinter/uvr1611/blob/master/lib/backend/blnet-connection.inc.php
author: Niels
"""
import struct
from datetime import datetime
# Parser constant
# 1 bit
DIGITAL_ON = 1
DIGITAL_OFF = 0
# 8 bit
SPEED_ACTIVE = 0x80
SPEED_MASK = 0x1F
# 16 bit
INT16_POSITIVE_MASK = 0xFFFF
SIGN_BIT = 0x8000
POSITIVE_VALUE_MASK = 0x0FFF
TYPE_MASK = 0x7000
TYPE_NONE = 0x0000
TYPE_DIGITAL = 0x1000
TYPE_TEMP = 0x2000
TYPE_VOLUME = 0x3000
TYPE_RADIATION = 0x4000
TYPE_RAS = 0x7000
RAS_POSITIVE_MASK = 0x01FF
# 32 bit
INT32_MASK = 0xFFFFFFFF
INT32_SIGN = 0x80000000
class BLNETParser:
def __init__(self, data):
"""
parse a binary string containing a dataset
Provides access to the values of a dataset as object properties
@param data: byte string
"""
# check if dataset contains time information
# (fetched from bootloader storage)
if len(data) == 61:
(_, seconds, minutes, hours, days, months, years) = struct.unpack(
'<55sBBBBBB', data)
self.date = datetime(2000 + years, months, days, hours, minutes,
seconds)
# Only parse preceding data
data = data[:55]
power = [0, 0]
kWh = [0, 0]
MWh = [0, 0]
(_, digital, speed, active, power[0], kWh[0], MWh[0], power[1], kWh[1],
MWh[1]) = struct.unpack('<32sH4sBLHHLHH', data)
analog = struct.unpack(
'<{}{}'.format('H' * 16, 'x' * (len(data) - 32)), data)
self.analog = {}
for channel in range(0, 16):
self.analog[channel + 1] = round(
self._convert_analog(analog[channel]), 3)
self.digital = {}
for channel in range(0, 16):
self.digital[channel + 1] = self._convert_digital(digital, channel)
self.speed = {}
for channel in range(0, 4):
self.speed[channel + 1] = round(
self._convert_speed(speed[channel]), 3)
self.energy = {}
for channel in range(0, 2):
self.energy[channel + 1] = round(
self._convert_energy(MWh[channel], kWh[channel], active,
channel), 3)
self.power = {}
for channel in range(0, 2):
self.power[channel + 1] = round(
self._convert_power(power[channel], active, channel), 3)
def to_dict(self):
"""
Turn parsed data into parser object
@return dict
"""
return self.__dict__
def _convert_analog(self, value):
"""
Convert int to correct float
@param value: short unsigned int that was returned by blnet
@return float with correct sensor value
"""
mask = value & TYPE_MASK
if mask == TYPE_TEMP:
return self._calculate_value(value, 0.1)
elif mask == TYPE_VOLUME:
return self._calculate_value(value, 4)
elif mask == TYPE_DIGITAL:
if value & SIGN_BIT:
return 1
else:
return 0
elif mask == TYPE_RAS:
return self._calculate_value(value, 0.1, RAS_POSITIVE_MASK)
elif mask in [TYPE_RADIATION, TYPE_NONE] or True:
return self._calculate_value(value)
def _convert_digital(self, value, position):
"""
Check if bit at given position is set (=1)
"""
if value & (0x1 << (position)):
return DIGITAL_ON
else:
return DIGITAL_OFF
def _convert_speed(self, value):
"""
Check if speed is activated and return its value
"""
if value & SPEED_ACTIVE:
return None
else:
return value & SPEED_MASK
def _convert_energy(self, mwh, kwh, active, position):
"""
Check if heat meter is activated on a given position
@return its energy
"""
if active & position:
kwh = self._calculate_value(kwh, 0.1, INT16_POSITIVE_MASK)
return mwh * 1000 + kwh
else:
return None
def _convert_power(self, value, active, position):
"""
checks if heat meter is activated at given position
@return its power
"""
if active & position:
return self._calculate_value(value, 1 / 2560, INT32_MASK,
INT32_SIGN)
else:
return None
def _calculate_value(self,
value,
multiplier=1,
positive_mask=POSITIVE_VALUE_MASK,
signbit=SIGN_BIT):
result = value & positive_mask
if value & signbit:
result = -((result ^ positive_mask) + 1)
return result * multiplier
| 29.784431 | 85 | 0.559912 |
95e8a73a4c141ad9d18c2ea514ffb13b8b700b03 | 3,568 | py | Python | app/routers/nodes.py | yamatteo/vue-fastapi-boilerplate | 5fa3de29a6e7ec4a8df9b3a4073f462307f62cb6 | [
"MIT"
] | 2 | 2020-03-11T02:58:44.000Z | 2020-03-27T16:00:25.000Z | app/routers/nodes.py | yamatteo/vue-fastapi-boilerplate | 5fa3de29a6e7ec4a8df9b3a4073f462307f62cb6 | [
"MIT"
] | 7 | 2021-03-10T07:59:29.000Z | 2022-02-26T23:46:17.000Z | app/routers/nodes.py | yamatteo/vue-fastapi-boilerplate | 5fa3de29a6e7ec4a8df9b3a4073f462307f62cb6 | [
"MIT"
] | 1 | 2020-03-11T02:58:48.000Z | 2020-03-11T02:58:48.000Z | from typing import Optional
from typing import List
from fastapi import APIRouter, Depends, Body
from models import User, Content, Node, Group, ExternalContent
from routers import get_current_user, admin_only
from schemas import NodeAdd, NodeEdit, NodeFind
#
router = APIRouter()
@router.post("/push_content")
async def push_content(node_id: str = Body(..., embed=True), content_id: str = Body(..., embed=True),
admin: User = Depends(admin_only)):
assert admin is not None
node = await Node.find_one_and_add_to_set(
find={"id": node_id},
data={"contents": Content.ref(content_id)}
)
return node.export()
@router.post("/pull_content")
async def pull_content(node_id: str = Body(..., embed=True), content_id: str = Body(..., embed=True),
admin: User = Depends(admin_only)):
assert admin is not None
node = await Node.find_one_and_pull(
find={"id": node_id},
data={"contents": Content.ref(content_id)}
)
return node.export()
@router.post("/push_external_content")
async def push_external_content(node_id: str = Body(..., embed=True), external_content_id: str = Body(..., embed=True),
admin: User = Depends(admin_only)):
assert admin is not None
node = await Node.find_one_and_add_to_set(
find={"id": node_id},
data={"external_contents": ExternalContent.ref(external_content_id)}
)
return node.export()
@router.post("/pull_external_content")
async def pull_external_content(node_id: str = Body(..., embed=True), external_content_id: str = Body(..., embed=True),
admin: User = Depends(admin_only)):
assert admin is not None
node = await Node.find_one_and_pull(
find={"id": node_id},
data={"external_contents": ExternalContent.ref(external_content_id)}
)
return node.export()
@router.get("/current")
async def current_nodes(current_user: User = Depends(get_current_user)):
groups = await Group.find({"members": current_user})
nodes_ids = [node.id for group in groups for node in group.nodes]
return [node.export() for node in await Node.find({"id": {"$in": nodes_ids}})]
@router.post("/browse", dependencies=[Depends(admin_only)])
async def browse_nodes(find: NodeFind) -> List[Node]:
return await Node.find(find=find.dict(exclude_unset=True))
@router.post("/read", dependencies=[Depends(admin_only)])
async def read_node(find: NodeFind, with_contents: bool = Body(False), with_other_contents: bool = Body(False)):
node = await Node.find_one(find=find.dict(exclude_unset=True))
node_export = node.dict()
if with_contents:
node_export["contents"] = await Content.find({"id": {"$in": [ content.id for content in node.contents ]}})
if with_other_contents:
node_export["other_contents"] = await Content.find({"id": {"$nin": [ content.id for content in node.contents ]}})
return node_export
@router.post("/edit", dependencies=[Depends(admin_only)])
async def edit_node(find: NodeFind, data: NodeEdit):
print("find", find)
print("data", data)
return await Node.find_one_and_set(find=find.dict(exclude_unset=True), data=data.dict(exclude_unset=True))
@router.post("/add", dependencies=[Depends(admin_only)])
async def add_node(data: NodeAdd):
return await Node.insert_one(data=data.dict(exclude_unset=True))
@router.post("/delete", dependencies=[Depends(admin_only)])
async def delete_node(find: NodeFind):
return await Node.delete_one(find=find.dict(exclude_unset=True))
| 37.166667 | 121 | 0.690583 |
95ea2d544465e77e80dcc38902724b81ddc4c5b9 | 2,427 | py | Python | Algebra/vector.py | jonasjungaker/VectorsAlgebra | 1b064b4328b7eb6a3c7a1c50b29e6df042309ca5 | [
"MIT"
] | null | null | null | Algebra/vector.py | jonasjungaker/VectorsAlgebra | 1b064b4328b7eb6a3c7a1c50b29e6df042309ca5 | [
"MIT"
] | null | null | null | Algebra/vector.py | jonasjungaker/VectorsAlgebra | 1b064b4328b7eb6a3c7a1c50b29e6df042309ca5 | [
"MIT"
] | null | null | null | class vector:
def __init__(self, *vals):
self.x = list(vals)
for val in vals:
float(val)
self.dimension = len(self.x)
def __getitem__(self, key):
return self.x[key]
def __setitem__(self, key, value):
self.x[key] = value
return self
def __add__(self, other):
if type(other) == type(int): # This also needs to support floating point types
for i in range(self.dimension):
self[i] += other
return self
self._checkDimension(other)
newx = []
for i in range(self.dimension):
newx.append(self[i] + other[i])
return vector(*newx)
def __eq__(self, other):
if self.dimension != other.dimension:
return False
for i in range(self.dimension):
if self[i] != other[i]:
return False
return True
def __mul__(self, other):
if type(other) == type(int):
x = []
for i in range(self.dimension):
x.append(self[i] * other)
return vector(*x)
self._checkDimension(other)
value = 0
for i in range(self.dimension):
value += self[i] * other[i]
return value
def __rmul__(self, other):
return self * other
def __matmul__(self, other):
if self.dimension != other.dimension != 3:
raise TypeError("Vector dimensions must be 3")
v = vector(0, 0, 0)
v[0] = (self[1] * other[2]) - (self[2] * other[1])
v[1] = (self[2] * other[0]) - (self[0] * other[2])
v[2] = (self[0] * other[1]) - (self[1] * other[0])
return v
def __sub__(self, other):
return self + ( - other)
def __neg__(self):
v = []
for i in range(self):
v.append( - self[i])
return vector(*v)
def __abs__(self):
value = self.magnitude()
return value**0.5
def _checkDimension(self, other):
if self.dimension != other.dimension:
raise TypeError("Vector dimensions must agree")
def magnitude(self):
# Returns the value of the sum of all values of the vector squared
powerMagnitude = 0
for a in self.x:
powerMagnitude += a*a
return powerMagnitude
| 29.240964 | 87 | 0.510507 |
95eaee2ff327784e0d2a6285027d63a294194fa5 | 283 | py | Python | Programming_Maester/Phoneketmon.py | Mayner0220/Programmers | 42e4783a526506fb7d8208841a76201909ed5c5c | [
"Apache-2.0"
] | 1 | 2021-04-01T06:19:02.000Z | 2021-04-01T06:19:02.000Z | Programming_Maester/Phoneketmon.py | Mayner0220/Programmers | 42e4783a526506fb7d8208841a76201909ed5c5c | [
"Apache-2.0"
] | null | null | null | Programming_Maester/Phoneketmon.py | Mayner0220/Programmers | 42e4783a526506fb7d8208841a76201909ed5c5c | [
"Apache-2.0"
] | null | null | null | # https://programmers.co.kr/learn/courses/30/lessons/1845
def solution(nums):
if len(set(nums)) <= len(nums)/2:
return int(len(set(nums)))
else:
return int(len(nums)/2)
print(solution([3,1,2,3]))
print(solution([3,3,3,2,2,4]))
print(solution([3,3,3,2,2,2])) | 25.727273 | 57 | 0.618375 |
95ec274e03ce16625cb08ace26548a81e6d7c252 | 3,903 | py | Python | ooobuild/lo/xml/crypto/sax/xsax_event_keeper.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/lo/xml/crypto/sax/xsax_event_keeper.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/lo/xml/crypto/sax/xsax_event_keeper.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Interface Class
# this is a auto generated file generated by Cheetah
# Libre Office Version: 7.3
# Namespace: com.sun.star.xml.crypto.sax
import typing
from abc import abstractmethod
from ....uno.x_interface import XInterface as XInterface_8f010a43
if typing.TYPE_CHECKING:
from ...sax.x_document_handler import XDocumentHandler as XDocumentHandler_9b90e28
from ...wrapper.xxml_element_wrapper import XXMLElementWrapper as XXMLElementWrapper_66c0107c
class XSAXEventKeeper(XInterface_8f010a43):
"""
Interface of SAX Event Keeper.
This interface is used to manipulate element marks in a SAX event stream.
There are two kinds of element mark, one is element collector, which is used to collect a particular element from the SAX event stream; the other is blocker, which is used to block the SAX event stream.
See Also:
`API XSAXEventKeeper <https://api.libreoffice.org/docs/idl/ref/interfacecom_1_1sun_1_1star_1_1xml_1_1crypto_1_1sax_1_1XSAXEventKeeper.html>`_
"""
__ooo_ns__: str = 'com.sun.star.xml.crypto.sax'
__ooo_full_ns__: str = 'com.sun.star.xml.crypto.sax.XSAXEventKeeper'
__ooo_type_name__: str = 'interface'
__pyunointerface__: str = 'com.sun.star.xml.crypto.sax.XSAXEventKeeper'
@abstractmethod
def addBlocker(self) -> int:
"""
Adds a new blocker on the next element in the SAX event stream.
No SAX event starting from the next element will be forwarded until this blocker is removed.
"""
@abstractmethod
def addElementCollector(self) -> int:
"""
Adds a new element collector on the next element in the SAX event stream.
"""
@abstractmethod
def getCurrentBlockingNode(self) -> 'XXMLElementWrapper_66c0107c':
"""
Gets the element which current blocking happens.
This element is the working element of the first blocker in tree order.
"""
@abstractmethod
def getElement(self, id: int) -> 'XXMLElementWrapper_66c0107c':
"""
Gets the element of an element mark.
"""
@abstractmethod
def isBlocking(self) -> bool:
"""
Checks whether the SAX event stream is blocking.
"""
@abstractmethod
def printBufferNodeTree(self) -> str:
"""
Prints information about all buffered elements.
"""
@abstractmethod
def removeBlocker(self, id: int) -> None:
"""
Removes a blocker.
"""
@abstractmethod
def removeElementCollector(self, id: int) -> None:
"""
Removes an element collector.
"""
@abstractmethod
def setElement(self, id: int, aElement: 'XXMLElementWrapper_66c0107c') -> None:
"""
Sets the element of an element mark.
When an element is replaced outside of this interface, then uses this method can restore the link between an element mark and its working element.
"""
@abstractmethod
def setNextHandler(self, nextHandler: 'XDocumentHandler_9b90e28') -> 'XDocumentHandler_9b90e28':
"""
Sets the next document handler in the SAX chain.
This handler will receive SAX events forwarded by the SAXEventKeeper.
"""
__all__ = ['XSAXEventKeeper']
| 37.171429 | 206 | 0.688189 |
95ed4a727fcf9707dcfd7fa3fc1e4e7848fbb44c | 992 | py | Python | neodroidagent/common/session_factory/vertical/procedures/training/sampling/rollout.py | gitter-badger/agent | 3f53eaa7ebdee3ab423c7b58785d584fe1a6ae11 | [
"Apache-2.0"
] | 8 | 2017-09-13T08:28:44.000Z | 2022-01-21T15:59:19.000Z | neodroidagent/common/session_factory/vertical/procedures/training/sampling/rollout.py | gitter-badger/agent | 3f53eaa7ebdee3ab423c7b58785d584fe1a6ae11 | [
"Apache-2.0"
] | 4 | 2019-03-22T13:49:16.000Z | 2019-03-25T13:49:39.000Z | neodroidagent/common/session_factory/vertical/procedures/training/sampling/rollout.py | gitter-badger/agent | 3f53eaa7ebdee3ab423c7b58785d584fe1a6ae11 | [
"Apache-2.0"
] | 3 | 2017-09-13T08:31:38.000Z | 2021-11-09T11:22:27.000Z | from itertools import count
from tqdm import tqdm
from neodroid.environments.droid_environment import VectorUnityEnvironment
def run(self, environment: VectorUnityEnvironment, render: bool = True) -> None:
state = environment.reset().observables
F = count(1)
F = tqdm(F, leave=False, disable=not render)
for frame_i in F:
F.set_description(f"Frame {frame_i}")
action, *_ = self.sample(state, deterministic=True)
state, signal, terminated, info = environment.react(action, render=render)
if terminated.all():
state = environment.reset().observables
def infer(self, env, render=True):
for episode_i in count(1):
print(f"Episode {episode_i}")
state = env.reset()
for frame_i in count(1):
action, *_ = self.sample(state)
state, signal, terminated, info = env.act(action)
if render:
env.render()
if terminated:
break
| 26.810811 | 82 | 0.626008 |
95eef20a68a045c35b991c4b9eef565e70a03766 | 17,995 | py | Python | sysevr/slicer/mapping.py | Saleh-Ibtasham/VulScrape | 738d17e9dd7e5edc2341d106361651fd28f99c61 | [
"PostgreSQL",
"Unlicense",
"MIT"
] | 1 | 2021-04-12T12:59:33.000Z | 2021-04-12T12:59:33.000Z | sysevr/slicer/mapping.py | Jokers-grin/VulScrape | 738d17e9dd7e5edc2341d106361651fd28f99c61 | [
"PostgreSQL",
"Unlicense",
"MIT"
] | null | null | null | sysevr/slicer/mapping.py | Jokers-grin/VulScrape | 738d17e9dd7e5edc2341d106361651fd28f99c61 | [
"PostgreSQL",
"Unlicense",
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import re
import copy
import os
import string
import xlrd
import pickle
from .get_tokens import *
keywords_0 = ('auto', 'typedf', 'const', 'extern', 'register', 'static', 'volatile', 'continue', 'break',
'default', 'return', 'goto', 'else', 'case')
keywords_1 = ('catch', 'sizeof', 'if', 'switch', 'while', 'for')
keywords_2 = ('memcpy', 'wmemcpy', '_memccpy', 'memmove', 'wmemmove', 'memset', 'wmemset', 'memcmp', 'wmemcmp', 'memchr',
'wmemchr', 'strncpy', 'lstrcpyn', 'wcsncpy', 'strncat', 'bcopy', 'cin', 'strcpy', 'lstrcpy', 'wcscpy', '_tcscpy',
'_mbscpy', 'CopyMemory', 'strcat', 'lstrcat', 'fgets', 'main', '_main', '_tmain', 'Winmain', 'AfxWinMain', 'getchar',
'getc', 'getch', 'getche', 'kbhit', 'stdin', 'm_lpCmdLine', 'getdlgtext', 'getpass', 'istream.get', 'istream.getline',
'istream.peek', 'istream.putback', 'streambuf.sbumpc', 'streambuf.sgetc', 'streambuf.sgetn', 'streambuf.snextc', 'streambuf.sputbackc',
'SendMessage', 'SendMessageCallback', 'SendNotifyMessage', 'PostMessage', 'PostThreadMessage', 'recv', 'recvfrom', 'Receive',
'ReceiveFrom', 'ReceiveFromEx', 'CEdit.GetLine', 'CHtmlEditCtrl.GetDHtmlDocument', 'CListBox.GetText', 'CListCtrl.GetItemText',
'CRichEditCtrl.GetLine', 'GetDlgItemText', 'CCheckListBox.GetCheck', 'DISP_FUNCTION', 'DISP_PROPERTY_EX', 'getenv', 'getenv_s', '_wgetenv',
'_wgetenv_s', 'snprintf', 'vsnprintf', 'scanf', 'sscanf', 'catgets', 'gets', 'fscanf', 'vscanf', 'vfscanf', 'printf', 'vprintf', 'CString.Format',
'CString.FormatV', 'CString.FormatMessage', 'CStringT.Format', 'CStringT.FormatV', 'CStringT.FormatMessage', 'CStringT.FormatMessageV',
'vsprintf', 'asprintf', 'vasprintf', 'fprintf', 'sprintf', 'syslog', 'swscanf', 'sscanf_s', 'swscanf_s', 'swprintf', 'malloc',
'readlink', 'lstrlen', 'strchr', 'strcmp', 'strcoll', 'strcspn', 'strerror', 'strlen', 'strpbrk', 'strrchr', 'strspn', 'strstr',
'strtok', 'strxfrm', 'kfree', '_alloca')
keywords_3 = ('_strncpy*', '_tcsncpy*', '_mbsnbcpy*', '_wcsncpy*', '_strncat*', '_mbsncat*', 'wcsncat*', 'CEdit.Get*', 'CRichEditCtrl.Get*',
'CComboBox.Get*', 'GetWindowText*', 'istream.read*', 'Socket.Receive*', 'DDX_*', '_snprintf*', '_snwprintf*')
keywords_5 = ('*malloc',)
xread = xlrd.open_workbook('./sysevr/ml_models/function.xls')
keywords_4 = []
for sheet in xread.sheets():
col = sheet.col_values(0)[1:]
keywords_4 += col
#print keywords_4
typewords_0 = ('short', 'int', 'long', 'float', 'doubule', 'char', 'unsigned', 'signed', 'void' ,'wchar_t', 'size_t', 'bool')
typewords_1 = ('struct', 'union', 'enum')
typewords_2 = ('new', 'delete')
operators = ('+', '-', '*', '/', '=', '%', '?', ':', '!=', '==', '<<', '&&', '||', '+=', '-=', '++', '--', '>>', '|=')
function = '^[_a-zA-Z][_a-zA-Z0-9]*$'
variable = '^[_a-zA-Z][_a-zA-Z0-9(->)?(\.)?]*$'
number = '[0-9]+'
stringConst = '(^\'[\s|\S]*\'$)|(^"[\s|\S]*"$)'
constValue = ['NULL', 'false', 'true']
phla = '[^a-zA-Z0-9_]'
space = '\s'
spa = ''
def isinKeyword_3(token):
for key in keywords_3:
if len(token) < len(key)-1:
return False
if key[:-1] == token[:len(key)-1]:
return True
else:
return False
def isinKeyword_5(token):
for key in keywords_5:
if len(token) < len(key)-1:
return False
if token.find(key[1:]) != -1:
if "_" in token:
return False
else:
return True
else:
return False
def isphor(s, liter):
m = re.search(liter, s)
if m is not None:
return True
else:
return False
def var(s):
m = re.match(function, s)
if m is not None:
return True
else:
return False
def CreateVariable(string, token):
length = len(string)
stack1 = []
s = ''
i = 0
while (i < length):
if var(string[i]):
#if i + 1 < length and (string[i + 1] == '->' or string[i + 1] == '.'):
# stack1.append(string[i])
# stack1.append(string[i + 1])
# i = i + 2
#else:
while stack1 != []:
s = stack1.pop() + s
s = s + string[i]
token.append(s)
s = ''
i = i + 1
else:
token.append(string[i])
i = i + 1
def mapping(list_sentence):
list_code = []
list_func = []
for code in list_sentence:
#print code
_string = ''
for c in code:
_string = _string + ' ' + c
_string = _string[1:]
list_code.append(_string)
#print list_code
_func_dict = {}
_variable_dict = {}
index = 0
while index < len(list_code):
string = []
token = []
j = 0
str1 = copy.copy(list_code[index])
i = 0
tag = 0
strtemp = ''
while i < len(str1):
if tag == 0:
if isphor(str1[i], space):
if i > 0:
string.append(str1[j:i])
j = i + 1
else:
j = i + 1
i = i + 1
elif i + 1 == len(str1):
string.append(str1[j:i + 1])
break
elif isphor(str1[i], phla):
if i + 1 < len(str1) and str1[i] == '-' and str1[i + 1] == '>':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '<' and str1[i + 1] == '<':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '>' and str1[i + 1] == '>':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '&' and str1[i + 1] == '&':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '|' and str1[i + 1] == '|':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '|' and str1[i + 1] == '=':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '=' and str1[i + 1] == '=':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '!' and str1[i + 1] == '=':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '+' and str1[i + 1] == '+':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '-' and str1[i + 1] == '-':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '+' and str1[i + 1] == '=':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif i + 1 < len(str1) and str1[i] == '-' and str1[i + 1] == '=':
string.append(str1[i] + str1[i + 1])
j = i + 2
i = i + 2
elif str1[i] == '"':
strtemp = strtemp + str1[i]
i = i + 1
tag = 1
elif str1[i] == '\'':
strtemp = strtemp + str1[i]
i = i + 1
tag = 2
else:
string.append(str1[i])
j = i + 1
i += 1
else:
i += 1
elif tag == 1:
if str1[i] != '"':
strtemp = strtemp + str1[i]
i = i + 1
else:
strtemp = strtemp + str1[i]
string.append(strtemp)
strtemp = ''
tag = 0
j = i + 1
i += 1
elif tag == 2:
if str1[i] != '\'':
strtemp = strtemp + str1[i]
i = i + 1
else:
strtemp = strtemp + str1[i]
string.append(strtemp)
strtemp = ''
tag = 0
j = i + 1
i += 1
count = 0
for sub in string:
if sub == spa:
count += 1
for i in range(count):
string.remove('')
CreateVariable(string, token)
j = 0
while j < len(token):
if token[j] in constValue:
token[j] = token[j]
j += 1
elif j < len(token) and isphor(token[j], variable):
if (token[j] in keywords_0) or (token[j] in typewords_0) or (token[j] in typewords_1 or token[j] in typewords_2):
j += 1
elif j - 1 >= 0 and j + 1 < len(token) and token[j-1] == 'new' and token[j + 1] == '[':
j = j + 2
elif j + 1 < len(token) and token[j + 1] == '(':
#print(token[j])
if token[j] in keywords_1:
j = j + 2
elif token[j] in keywords_2:
#print('3', token[j])
j = j + 2
elif isinKeyword_3(token[j]):
#print('4', token[j])
j = j + 2
elif token[j] in keywords_4:
#print('5', token[j])
j = j + 2
elif isinKeyword_5(token[j]):
#print('6', token[j])
j = j + 2
else:
#print('7',token[j])
if "good" in token[j] or "bad" in token[j]:
list_func.append(str(token[j]))
if token[j] in _func_dict.keys():
token[j] = _func_dict[token[j]]
else:
list_values = _func_dict.values()
if len(list_values) == 0:
_func_dict[token[j]] = 'func_0'
token[j] = _func_dict[token[j]]
else:
if token[j] in _func_dict.keys():
token[j] = _func_dict[token[j]]
else:
list_num = []
for value in list_values:
list_num.append(int(value.split('_')[-1]))
_max = max(list_num)
_func_dict[token[j]] = 'func_' + str(_max+1)
token[j] = _func_dict[token[j]]
j = j + 2
elif j + 1 < len(token) and (not isphor(token[j + 1], variable)):
if token[j + 1] == '*':
if j + 2 < len(token) and token[j + 2] == 'const':
j = j + 3
elif j - 1 >= 0 and token[j - 1] == 'const':
j = j + 2
elif j - 1 > 0 and (token[j - 1] in operators):
list_values = _variable_dict.values()
if len(list_values) == 0:
_variable_dict[token[j]] = 'variable_0'
token[j] = _variable_dict[token[j]]
else:
if token[j] in _variable_dict.keys():
token[j] = _variable_dict[token[j]]
else:
list_num = []
for value in list_values:
list_num.append(int(value.split('_')[-1]))
_max = max(list_num)
_variable_dict[token[j]] = 'variable_' + str(_max+1)
token[j] = _variable_dict[token[j]]
j = j + 2
elif j + 2 < len(token) and token[j + 2] == ')':
j = j + 2
elif j - 2 > 0 and (token[j - 1] == '(' and token[j - 2] in operators):
list_values = _variable_dict.values()
if len(list_values) == 0:
_variable_dict[token[j]] = 'variable_0'
token[j] = _variable_dict[token[j]]
else:
if token[j] in _variable_dict.keys():
token[j] = _variable_dict[token[j]]
else:
list_num = []
for value in list_values:
list_num.append(int(value.split('_')[-1]))
_max = max(list_num)
_variable_dict[token[j]] = 'variable_' + str(_max+1)
token[j] = _variable_dict[token[j]]
j = j + 2
else:
list_values = _variable_dict.values()
if len(list_values) == 0:
_variable_dict[token[j]] = 'variable_0'
token[j] = _variable_dict[token[j]]
else:
if token[j] in _variable_dict.keys():
token[j] = _variable_dict[token[j]]
else:
list_num = []
for value in list_values:
list_num.append(int(value.split('_')[-1]))
_max = max(list_num)
_variable_dict[token[j]] = 'variable_' + str(_max+1)
token[j] = _variable_dict[token[j]]
j = j + 2
else:
list_values = _variable_dict.values()
if len(list_values) == 0:
_variable_dict[token[j]] = 'variable_0'
token[j] = _variable_dict[token[j]]
else:
if token[j] in _variable_dict.keys():
token[j] = _variable_dict[token[j]]
else:
list_num = []
for value in list_values:
list_num.append(int(value.split('_')[-1]))
_max = max(list_num)
_variable_dict[token[j]] = 'variable_' + str(_max+1)
token[j] = _variable_dict[token[j]]
j = j + 2
elif j + 1 == len(token):
list_values = _variable_dict.values()
if len(list_values) == 0:
_variable_dict[token[j]] = 'variable_0'
token[j] = _variable_dict[token[j]]
else:
if token[j] in _variable_dict.keys():
token[j] = _variable_dict[token[j]]
else:
list_num = []
for value in list_values:
list_num.append(int(value.split('_')[-1]))
_max = max(list_num)
_variable_dict[token[j]] = 'variable_' + str(_max+1)
token[j] = _variable_dict[token[j]]
break
else:
j += 1
elif j < len(token) and isphor(token[j], number):
j += 1
elif j < len(token) and isphor(token[j], stringConst):
j += 1
else:
j += 1
stemp = ''
i = 0
while i < len(token):
if i == len(token) - 1:
stemp = stemp + token[i]
else:
stemp = stemp + token[i] + ' '
i += 1
list_code[index] = stemp
index += 1
#print list_code
#print _variable_dict
return list_code, list_func
| 38.866091 | 160 | 0.373159 |
95ef3fa428d1d310cb953139858993824869ba27 | 1,929 | py | Python | Flatipie/widgets/cards.py | zenqiwp/Flatipie | 441b1f120d78ec072b86d1c95b381e85fbe7661d | [
"MIT"
] | 11 | 2020-11-12T08:07:56.000Z | 2021-04-27T01:42:30.000Z | Flatipie/widgets/cards.py | zenqiwp/Flatipie | 441b1f120d78ec072b86d1c95b381e85fbe7661d | [
"MIT"
] | null | null | null | Flatipie/widgets/cards.py | zenqiwp/Flatipie | 441b1f120d78ec072b86d1c95b381e85fbe7661d | [
"MIT"
] | 2 | 2020-11-12T07:08:27.000Z | 2020-11-12T08:07:58.000Z |
"""
Copyright (c) 2020 Flatipie
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from PyQt5.QtWidgets import QGroupBox, QGraphicsDropShadowEffect
from PyQt5.QtCore import Qt, pyqtSignal, QObject, QPoint
class MaterialCard(QGroupBox):
clicked = pyqtSignal()
def __init__(self, parent=None, shadow=True):
super(MaterialCard, self).__init__(parent)
self._is_shadow = shadow
@property
def is_shadow(self):
return self._is_shadow
def enterEvent(self, event):
if self.is_shadow:
shadow_effect = QGraphicsDropShadowEffect(
blurRadius=10, offset=QPoint(0, 0)
)
self.setGraphicsEffect(shadow_effect)
def leaveEvent(self, event):
if self.is_shadow:
self.setGraphicsEffect(None)
def mousePressEvent(self, event):
self.clicked.emit()
| 37.096154 | 79 | 0.709176 |
95ef71f8c3f9102a164ab9d3fc0c343aa7cbaaa5 | 7,035 | py | Python | lib/datasets/vrd/to_pascal_format.py | sx14/open-relation.pytorch | 3fe52a0c6129a80abbc84df53903d13b7dea05d6 | [
"MIT"
] | 2 | 2019-04-21T01:45:01.000Z | 2020-03-11T07:09:18.000Z | lib/datasets/vrd/to_pascal_format.py | sx14/open-relation.pytorch | 3fe52a0c6129a80abbc84df53903d13b7dea05d6 | [
"MIT"
] | null | null | null | lib/datasets/vrd/to_pascal_format.py | sx14/open-relation.pytorch | 3fe52a0c6129a80abbc84df53903d13b7dea05d6 | [
"MIT"
] | null | null | null | import os
import shutil
import xml.dom.minidom
def output_pascal_format(mid_data, output_path):
# mid_data:
# filename
# width
# height
# depth
# objects
# -- xmin
# -- ymin
# -- xmax
# -- ymax
# -- name
# -- pose
# -- truncated
# -- difficult
additional_data = dict()
additional_data['folder'] = 'VOC2007'
additional_data['s_database'] = 'The VOC2007 Database'
additional_data['s_annotation'] = 'PASCAL VOC2007'
additional_data['s_image'] = 'flickr'
additional_data['s_flickrid'] = '123456789'
additional_data['o_flickrid'] = 'Tom'
additional_data['o_name'] = 'Tom'
additional_data['segmented'] = '0'
des_xml_dom = xml.dom.minidom.Document()
# annotation
des_root_node = des_xml_dom.createElement('annotation')
# folder
des_folder_node = des_xml_dom.createElement('folder')
des_folder = des_xml_dom.createTextNode(additional_data['folder'])
des_folder_node.appendChild(des_folder)
des_root_node.appendChild(des_folder_node)
# filename
des_filename_node = des_xml_dom.createElement('filename')
des_filename = des_xml_dom.createTextNode(mid_data['filename'])
des_filename_node.appendChild(des_filename)
des_root_node.appendChild(des_filename_node)
# source
des_dataset_name = des_xml_dom.createTextNode(additional_data['s_database'])
des_dataset_node = des_xml_dom.createElement('database')
des_dataset_node.appendChild(des_dataset_name)
des_annotation = des_xml_dom.createTextNode(additional_data['s_annotation'])
des_annotation_node = des_xml_dom.createElement('annotation')
des_annotation_node.appendChild(des_annotation)
des_image = des_xml_dom.createTextNode(additional_data['s_image'])
des_image_node = des_xml_dom.createElement('image')
des_image_node.appendChild(des_image)
des_flickrid = des_xml_dom.createTextNode(additional_data['s_flickrid'])
des_flickrid_node = des_xml_dom.createElement('flickrid')
des_flickrid_node.appendChild(des_flickrid)
des_source_node = des_xml_dom.createElement('source')
des_source_node.appendChild(des_dataset_node)
des_source_node.appendChild(des_annotation_node)
des_source_node.appendChild(des_image_node)
des_source_node.appendChild(des_flickrid_node)
des_root_node.appendChild(des_source_node)
# owner
des_owner_flickrid = des_xml_dom.createTextNode(additional_data['o_flickrid'])
des_owner_flickrid_node = des_xml_dom.createElement('flickrid')
des_owner_flickrid_node.appendChild(des_owner_flickrid)
des_owner_name = des_xml_dom.createTextNode(additional_data['o_name'])
des_owner_name_node = des_xml_dom.createElement('name')
des_owner_name_node.appendChild(des_owner_name)
des_owner_node = des_xml_dom.createElement('owner')
des_owner_node.appendChild(des_owner_flickrid_node)
des_owner_node.appendChild(des_owner_name_node)
des_root_node.appendChild(des_owner_node)
# size
des_size_node = des_xml_dom.createElement('size')
des_width_node = des_xml_dom.createElement('width')
des_height_node = des_xml_dom.createElement('height')
des_depth_node = des_xml_dom.createElement('depth')
des_width = des_xml_dom.createTextNode(str(mid_data['width']))
des_height = des_xml_dom.createTextNode(str(mid_data['height']))
des_depth = des_xml_dom.createTextNode(str(mid_data['depth']))
des_width_node.appendChild(des_width)
des_height_node.appendChild(des_height)
des_depth_node.appendChild(des_depth)
des_size_node.appendChild(des_width_node)
des_size_node.appendChild(des_height_node)
des_size_node.appendChild(des_depth_node)
des_root_node.appendChild(des_size_node)
# segmented
des_segmented = des_xml_dom.createTextNode(additional_data['segmented'])
des_segmented_node = des_xml_dom.createElement('segmented')
des_segmented_node.appendChild(des_segmented)
des_root_node.appendChild(des_segmented_node)
# object
org_objects = mid_data['objects']
for j in range(0, len(org_objects)):
org_object = org_objects[j]
des_object_node = des_xml_dom.createElement('object')
x_min = int(org_object['xmin'])
y_min = int(org_object['ymin'])
x_max = int(org_object['xmax'])
y_max = int(org_object['ymax'])
# prevent box scale out
# pixel coordinate is 1 based
if x_min <= 0:
org_object['xmin'] = '1'
if y_min <= 0:
org_object['ymin'] = '1'
if y_max > mid_data['height']:
org_object['ymax'] = mid_data['height']
if x_max > mid_data['width']:
org_object['xmax'] = mid_data['width']
# name
des_object_name = des_xml_dom.createTextNode(org_object['name'])
des_object_name_node = des_xml_dom.createElement('name')
des_object_name_node.appendChild(des_object_name)
des_object_node.appendChild(des_object_name_node)
# pose
des_pose = des_xml_dom.createTextNode(org_object['pose'])
des_pose_node = des_xml_dom.createElement('pose')
des_pose_node.appendChild(des_pose)
des_object_node.appendChild(des_pose_node)
# truncated
des_truncated = des_xml_dom.createTextNode(str(org_object['truncated']))
des_truncated_node = des_xml_dom.createElement('truncated')
des_truncated_node.appendChild(des_truncated)
des_object_node.appendChild(des_truncated_node)
# difficult
des_object_difficult = des_xml_dom.createTextNode(str(org_object['difficult']))
des_object_difficult_node = des_xml_dom.createElement('difficult')
des_object_difficult_node.appendChild(des_object_difficult)
des_object_node.appendChild(des_object_difficult_node)
# bndbox
des_xmin_node = des_xml_dom.createElement('xmin')
des_xmin = des_xml_dom.createTextNode(str(org_object['xmin']))
des_xmin_node.appendChild(des_xmin)
des_ymin_node = des_xml_dom.createElement('ymin')
des_ymin = des_xml_dom.createTextNode(str(org_object['ymin']))
des_ymin_node.appendChild(des_ymin)
des_xmax_node = des_xml_dom.createElement('xmax')
des_xmax = des_xml_dom.createTextNode(str(org_object['xmax']))
des_xmax_node.appendChild(des_xmax)
des_ymax_node = des_xml_dom.createElement('ymax')
des_ymax = des_xml_dom.createTextNode(str(org_object['ymax']))
des_ymax_node.appendChild(des_ymax)
des_object_box_node = des_xml_dom.createElement('bndbox')
des_object_box_node.appendChild(des_xmin_node)
des_object_box_node.appendChild(des_ymin_node)
des_object_box_node.appendChild(des_xmax_node)
des_object_box_node.appendChild(des_ymax_node)
des_object_node.appendChild(des_object_box_node)
des_root_node.appendChild(des_object_node)
with open(output_path, 'w') as des_file:
des_root_node.writexml(des_file, addindent='\t', newl='\n') | 45.980392 | 87 | 0.729495 |
95f03d2ec095743360ac14d2a11b057617f86d87 | 4,880 | py | Python | stellar/cognition/planning.py | strfx/stellar | 41b190eed016d2d6ad8548490a0c9620a02d711e | [
"MIT"
] | null | null | null | stellar/cognition/planning.py | strfx/stellar | 41b190eed016d2d6ad8548490a0c9620a02d711e | [
"MIT"
] | null | null | null | stellar/cognition/planning.py | strfx/stellar | 41b190eed016d2d6ad8548490a0c9620a02d711e | [
"MIT"
] | null | null | null | """
Contains path planning logic.
"""
import math
import numpy as np
from heapq import heappush, heappop
def heuristics(a, b):
"""Heuristics function using the Euclidian Distance."""
weight = 1.0
x1, y1 = a
x2, y2 = b
distance = np.sqrt(np.square(x2-x1) + np.square(y2-y1))
# distance = math.hypot(x1 - x2, y1 - y2)
return distance
def motion_model_4():
return [
[1, 0, 1],
[0, 1, 1],
[-1, 0, 1],
[0, -1, 1],
[-1, -1, 1],
[-1, 1, 1],
[1, -1, 1],
[1, 1, 1]
]
class AStarPlanner:
def __init__(self):
pass
def plan(self, occupancy_grid_map, start_node, goal_node):
"""Plans a path through the occupancy grid map.
Args:
occupancy_grid_map: The occupancy grid map.
start_node: Coordinates of the start node.
goal_ndoe: Coordinates of the goal node.
Returns:
A list of coordinates of the planned path or None, if no path
could be constructed.
"""
# Node; Cost to Goal; Node cost, previous node
start_node_costs = 0
node_to_goal = heuristics(start_node, goal_node) + start_node_costs
frontier = [(node_to_goal, start_node_costs, start_node, None)]
visited = []
history = {}
possible_movements = motion_model_4()
# Safety guard (TODO: Remove after DEV)
i = 0
break_if_count_reached = 10000
while frontier or i >= break_if_count_reached:
i += 1
element = heappop(frontier)
total_cost, cost, position, previous = element
# If we have already traversed this node (x,y), then skip it
if position in visited:
continue
# Mark this position as visited
visited.append(position)
history[position] = previous
# Have already reached our goal, we can abort.
if position == goal_node:
break
for dx, dy, dcost in possible_movements:
xn = position[0] + dx
yn = position[1] + dy
if xn < 0 or yn < 0:
continue
if (xn, yn) in visited:
continue
if yn >= occupancy_grid_map.shape[0] or xn >= occupancy_grid_map.shape[1]:
continue
# Check if that cell is free!
cell = occupancy_grid_map[yn][xn]
if cell <= 0:
potential_cost = 0 # abs(cell) # * 3
new_cost = cost + dcost + potential_cost
new_total_cost_to_goal = new_cost + \
heuristics((xn, yn), goal_node) + potential_cost
heappush(
frontier, (new_total_cost_to_goal, new_cost, (xn, yn), position))
path = []
while position:
path.append(position)
position = history[position]
return list(reversed(path))
def smoothen(self, occupancy_grid_map, path):
"""Smoothens the planned path.
Utilizes gradient descent to smoothen the path.
"""
from copy import deepcopy
# Create a deep copy of the path
smoothed_path = deepcopy(path)
weight_data = 0.01
weight_smooth = 0.8
tolerance = 0.0000001
smoothed_path = [list(elem) for elem in smoothed_path]
while True:
# Keep track of the total of changes made to check if we
# reached convergence
total_of_changes = 0
for i in range(len(path)):
# Do not smoothen start and endpoint
if i == 0 or i == (len(path) - 1):
continue
for dimension in range(len(path[i])):
previous = smoothed_path[i][dimension]
smoothed_path[i][dimension] = smoothed_path[i][dimension] + \
weight_data * (path[i][dimension] - smoothed_path[i][dimension]) + \
weight_smooth * \
(smoothed_path[i+1][dimension] + smoothed_path[i-1]
[dimension] - 2 * smoothed_path[i][dimension])
total_of_changes += abs(previous -
smoothed_path[i][dimension])
if total_of_changes < tolerance:
break
return smoothed_path
def get_nearest_point(robot, aa):
r = (robot.x, robot.y)
a = [edist(k, r) for k in list(reference_trajectory)]
i = np.argmin(a)
p1 = reference_trajectory[i]
p2 = reference_trajectory[i+5]
aaa = np.arctan2(p2[1] - p1[1], p2[0] - p1[0])
print(f"l => {aaa:.4f}, {p1}, {p2}")
return reference_trajectory[i]
| 28.87574 | 92 | 0.527664 |
95f0995fad2f82fbbbdccca26aa5605a1c0767e1 | 84 | py | Python | libcity/model/traffic_od_prediction/__init__.py | LibCity/Bigscity-LibCity-Docs-zh_CN | 2be639c3fe7d75727ade18f473d6f625900f73f2 | [
"Apache-2.0"
] | 5 | 2021-09-28T12:32:50.000Z | 2022-02-03T09:04:35.000Z | libcity/model/traffic_od_prediction/__init__.py | aptx1231/Bigscity-TrafficDL-Docs-zh_CN | 2be639c3fe7d75727ade18f473d6f625900f73f2 | [
"Apache-2.0"
] | null | null | null | libcity/model/traffic_od_prediction/__init__.py | aptx1231/Bigscity-TrafficDL-Docs-zh_CN | 2be639c3fe7d75727ade18f473d6f625900f73f2 | [
"Apache-2.0"
] | 1 | 2021-12-16T05:10:35.000Z | 2021-12-16T05:10:35.000Z | from libcity.model.traffic_od_prediction.GEML import GEML
__all__ = [
"GEML"
]
| 14 | 57 | 0.738095 |
95f29beeb0a5add129f6eb5d02625efa724d1d4e | 699 | py | Python | core/migrations/0008_auto_20151203_1519.py | rafaelbantu/timtec | 86c51b7440a044704ed33c3e752a6cf6b15ceae3 | [
"BSD-3-Clause"
] | 21 | 2015-09-23T14:07:16.000Z | 2022-02-18T01:35:18.000Z | core/migrations/0008_auto_20151203_1519.py | rafaelbantu/timtec | 86c51b7440a044704ed33c3e752a6cf6b15ceae3 | [
"BSD-3-Clause"
] | 178 | 2016-05-10T16:16:19.000Z | 2021-12-15T20:21:21.000Z | core/migrations/0008_auto_20151203_1519.py | rafaelbantu/timtec | 86c51b7440a044704ed33c3e752a6cf6b15ceae3 | [
"BSD-3-Clause"
] | 18 | 2015-10-23T13:28:17.000Z | 2021-09-22T13:08:28.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20151202_1434'),
]
operations = [
migrations.AddField(
model_name='certificationprocess',
name='active',
field=models.BooleanField(default=True, verbose_name='Active'),
),
migrations.AlterField(
model_name='certificationprocess',
name='course_certification',
field=models.ForeignKey(related_name='processes', verbose_name='Certificate', to='core.CourseCertification', null=True),
),
]
| 27.96 | 132 | 0.635193 |
95f38c9fc1b89ab08b48f547bb8603c9adde90bb | 628 | py | Python | hipshare/lib/util.py | erg0dic/hipshare | 993f0edee7e9156b7154d578ef6a4e50cfcdd632 | [
"BSD-2-Clause"
] | 1 | 2015-11-03T19:33:44.000Z | 2015-11-03T19:33:44.000Z | hipshare/lib/util.py | erg0dic/hipshare | 993f0edee7e9156b7154d578ef6a4e50cfcdd632 | [
"BSD-2-Clause"
] | 1 | 2015-11-03T19:35:19.000Z | 2015-11-03T19:35:19.000Z | hipshare/lib/util.py | erg0dic/hipshare | 993f0edee7e9156b7154d578ef6a4e50cfcdd632 | [
"BSD-2-Clause"
] | null | null | null | import json
import logging
import sys
log = logging.getLogger(__name__)
def die(s):
log.error(s)
sys.exit(-1)
def load_json(path):
try:
fp = open(path)
except OSError as err:
die("Could not open {}: {}".format(path, str(err)))
try:
value = json.load(fp)
except ValueError as err:
die("Invalid JSON in {}: {}".format(path, str(err)))
return value
def load_jsons(*paths):
return [load_json(path) for path in paths]
def merge_dicts(a, b):
c = a.copy()
c.update(b)
return c
def usage():
log.error("usage: hipshare <strategy>")
sys.exit(-1)
| 17.942857 | 60 | 0.598726 |
95f41e13e20cbb1290f9018c568b80de61a76953 | 95 | py | Python | vistrails/tests/resources/test_upgrades_layout/__init__.py | remram44/VisTrails-mybinder | ee7477b471920d738f3ac430932f01901b56ed44 | [
"BSD-3-Clause"
] | 83 | 2015-01-05T14:50:50.000Z | 2021-09-17T19:45:26.000Z | vistrails/tests/resources/test_upgrades_layout/__init__.py | remram44/VisTrails-mybinder | ee7477b471920d738f3ac430932f01901b56ed44 | [
"BSD-3-Clause"
] | 254 | 2015-01-02T20:39:19.000Z | 2018-11-28T17:16:44.000Z | vistrails/tests/resources/test_upgrades_layout/__init__.py | remram44/VisTrails-mybinder | ee7477b471920d738f3ac430932f01901b56ed44 | [
"BSD-3-Clause"
] | 40 | 2015-04-17T16:46:36.000Z | 2021-09-28T22:43:24.000Z | identifier = 'org.vistrails.test.upgrades_layout'
name ='test_upgrades_layout'
version = '0.3'
| 23.75 | 49 | 0.778947 |
95f557240d66982761b806e370f3acd7dca5c78f | 480 | py | Python | python_tips_three.py | obetron/python_tips | 9b6299db4f9dbc51638bba310deaf0c1dec759fe | [
"MIT"
] | null | null | null | python_tips_three.py | obetron/python_tips | 9b6299db4f9dbc51638bba310deaf0c1dec759fe | [
"MIT"
] | null | null | null | python_tips_three.py | obetron/python_tips | 9b6299db4f9dbc51638bba310deaf0c1dec759fe | [
"MIT"
] | null | null | null |
#for all read file code at the end of the code file should closed.
f = open('test.txt', 'r')
file_contents = f.read()
f.close()
words = file_contents.split(' ')
word_count = len(words)
print(word_count)
"""
OUTPUT:
3
"""
#for all the time use a file closing it is became a big job.
with open('test.txt', 'r') as f:
#python manage the file life cycle
file_contents = f.read()
words = file_contents.split(' ')
word_count = len(words)
print(word_count)
"""
OUTPUT:
3
"""
| 17.777778 | 66 | 0.675 |
95f79173711e478161ccc6939dc1c76c706f0ae5 | 1,192 | py | Python | albert/helper.py | vvvm23/albert | 548fa03832f7e64ab79f2dfa16aa5ac42469333d | [
"MIT"
] | 1 | 2021-06-10T10:54:59.000Z | 2021-06-10T10:54:59.000Z | albert/helper.py | vvvm23/albert | 548fa03832f7e64ab79f2dfa16aa5ac42469333d | [
"MIT"
] | null | null | null | albert/helper.py | vvvm23/albert | 548fa03832f7e64ab79f2dfa16aa5ac42469333d | [
"MIT"
] | null | null | null | import torch
from einops import rearrange
"""
@openai - VD-VAE
https://github.com/openai/vdvae/blob/main/vae_helpers.py
Nice helper module as calling super.__init__() gets annoying
"""
class HelperModule(torch.nn.Module):
def __init__(self, *args, **kwargs):
super().__init__()
self.build(*args, **kwargs)
def build(self, *args, **kwargs):
raise NotImplementedError
def get_parameter_count(net: torch.nn.Module) -> int:
return sum(p.numel() for p in net.parameters() if p.requires_grad)
"""
@lucidrains - Phil Wang (nystrom-attention)
https://github.com/lucidrains/nystrom-attention/blob/main/nystrom_attention/nystrom_attention.py
"""
def exists(val):
return val is not None
def moore_penrose_iter_pinv(x, iters = 6):
device = x.device
abs_x = torch.abs(x)
col = abs_x.sum(dim = -1)
row = abs_x.sum(dim = -2)
z = rearrange(x, '... i j -> ... j i') / (torch.max(col) * torch.max(row))
I = torch.eye(x.shape[-1], device = device)
I = rearrange(I, 'i j -> () i j')
for _ in range(iters):
xz = x @ z
z = 0.25 * z @ (13 * I - (xz @ (15 * I - (xz @ (7 * I - xz)))))
return z
| 27.090909 | 100 | 0.613255 |
95f8d504586e0cc5968ca2a0c621d00c07ae2c40 | 2,078 | py | Python | p2_mahjong/utils.py | yata0/Mahjong | 764cd607df715b879f3f8a54b6def55e0b7d4706 | [
"MIT"
] | null | null | null | p2_mahjong/utils.py | yata0/Mahjong | 764cd607df715b879f3f8a54b6def55e0b7d4706 | [
"MIT"
] | null | null | null | p2_mahjong/utils.py | yata0/Mahjong | 764cd607df715b879f3f8a54b6def55e0b7d4706 | [
"MIT"
] | null | null | null | # coding=utf-8
import sys
import numpy as np
from p2_mahjong.card import MahjongCard as Card
log_head = "utils.py"
CARD_USED_TYPE = ['characters', 'green', 'red', 'white', 'east', 'west', 'north', 'south',
'spring', 'summer', 'autumn', 'winter', 'mei', 'lan', 'zhu', 'ju']
card_encoding_dict = {}
card_id = 0
DIC_CHOW = {}
character_list = []
wind_list = []
dragon_list = []
card_used = {}
for _type in ['bamboo', 'characters', 'dots']:
for _trait in ['1', '2', '3', '4', '5', '6', '7', '8', '9']:
card = _type+"-"+_trait
card_encoding_dict[card] = card_id
DIC_CHOW[card_id] = 1
if _type in ['characters']:
card_used[card_id] = 1
character_list.append(card_id)
card_id += 1
for _trait in ['green', 'red', 'white']:
card = 'dragons-'+_trait
card_encoding_dict[card] = card_id
if _trait in CARD_USED_TYPE:
card_used[card_id] = 1
dragon_list.append(card_id)
card_id += 1
for _trait in ['east', 'west', 'north', 'south']:
card = 'winds-'+_trait
card_encoding_dict[card] = card_id
if _trait in CARD_USED_TYPE:
card_used[card_id] = 1
wind_list.append(card_id)
card_id += 1
for _trait in ['spring', 'summer', 'autumn', 'winter', 'mei', 'lan', 'zhu', 'ju']:
card = 'flowers-'+_trait
card_encoding_dict[card] = card_id
if _trait in CARD_USED_TYPE:
card_used[card_id] = 1
card_id += 1
card_decoding_dict = {card_encoding_dict[key]: key for key in card_encoding_dict.keys()}
def init_deck(game_id=""):
func_head = "init_deck()" + game_id
deck = []
idx = 0
for card_id in card_decoding_dict:
for _ in range(4):
if card_id not in card_used:
continue
card = Card(runtime_id=idx, card_id=card_id)
card.type = card_decoding_dict[card_id].split("-")[0]
card.trait = card_decoding_dict[card_id].split("-")[1]
deck.append(card)
idx += 1
if card.type == "flowers":
break
return deck
| 29.685714 | 90 | 0.590472 |
95fa5390eed432169e5e44214698604b6c85fcde | 1,062 | py | Python | Chapter 01/int_sqrt.py | bpbpublications/Python-Quick-Interview-Guide | ab4ff3e670b116a4db6b9e1f0ccba8424640704d | [
"MIT"
] | 1 | 2021-05-14T19:53:41.000Z | 2021-05-14T19:53:41.000Z | Chapter 01/int_sqrt.py | bpbpublications/Python-Quick-Interview-Guide | ab4ff3e670b116a4db6b9e1f0ccba8424640704d | [
"MIT"
] | null | null | null | Chapter 01/int_sqrt.py | bpbpublications/Python-Quick-Interview-Guide | ab4ff3e670b116a4db6b9e1f0ccba8424640704d | [
"MIT"
] | null | null | null |
class Solution:
def mySqrt(self, x: int) -> int:
# Base cases
if (x == 0 or x == 1):
return x
# Staring from 1, try all numbers until
# i*i remains less than to x.
i = 1
while (i*i < x):i += 1
return i if i*i == x else i-1
'''
class Solution:
def mySqrt(self,x) :
# Base cases
if (x == 0 or x == 1) :
return x
# Do Binary Search for integer square root
start = 1
end = x
while (start <= end) :
mid = (start + end) // 2
# If x is a perfect square
if (mid*mid == x) :
return mid
# when mid^2 is smaller than x, check if (mid+1)^2 >x
if (mid * mid < x) :
if (mid+1)*(mid+1) > x:return mid
start = mid + 1
else :
# If mid*mid is greater than x
end = mid-1
'''
sol=Solution()
for i in range(1,10):
print(i,sol.mySqrt(i))
| 24.136364 | 62 | 0.415254 |
95ff75475d347ef322808cfa526e253df07b5f81 | 13,517 | py | Python | meg_runtime/ui/manager.py | MultimediaExtensibleGit/Runtime | ba2e469666163177034e44077b02378dfc6649c9 | [
"MIT"
] | null | null | null | meg_runtime/ui/manager.py | MultimediaExtensibleGit/Runtime | ba2e469666163177034e44077b02378dfc6649c9 | [
"MIT"
] | 5 | 2020-03-24T19:59:38.000Z | 2020-04-22T03:44:43.000Z | meg_runtime/ui/manager.py | MultimediaExtensibleGit/Runtime | ba2e469666163177034e44077b02378dfc6649c9 | [
"MIT"
] | 2 | 2020-03-13T18:35:46.000Z | 2020-04-11T20:19:20.000Z | """MEG UI Manager
"""
import pkg_resources
from PyQt5 import QtCore, QtWidgets, QtGui, uic
from meg_runtime.config import Config
from meg_runtime.logger import Logger
from meg_runtime.app import App
class UIManager(QtWidgets.QMainWindow):
"""Main UI manager for the MEG system."""
UI_FILE = 'mainwindow.ui'
# The window class widgets
__widgets = None
def __init__(self, **kwargs):
"""UI manager constructor."""
# Load window resource if needed
if UIManager.__widgets is None:
# Load the resource setup from the package
UIManager.__widgets = uic.loadUiType(pkg_resources.resource_filename(__name__, UIManager.UI_FILE))
# Initialize the super class
super().__init__(**kwargs)
# Setup window resource
UIManager.__widgets[0]().setupUi(self)
# Set the window panel stack
self._panels = []
self._current_panel = None
self._current_popup = None
# Set handler for closing a panel
self._panel = self.findChild(QtWidgets.QTabWidget, 'panelwidget')
self._panel.tabCloseRequested.connect(self.remove_view_by_index)
self._panel.currentChanged.connect(self._show_view_by_index)
# Get status widget
self._statusbar = self.findChild(QtWidgets.QStatusBar, 'statusbar')
# Set handlers for main buttons
# TODO: Add more handlers for these
self._action_clone = self.findChild(QtWidgets.QAction, 'action_Clone')
self._action_clone.triggered.connect(App.open_clone_panel)
self._action_open = self.findChild(QtWidgets.QAction, 'action_Open')
self._action_open.triggered.connect(App.open_repo_panel)
self._action_quit = self.findChild(QtWidgets.QAction, 'action_Quit')
self._action_quit.triggered.connect(App.quit)
self._action_about = self.findChild(QtWidgets.QAction, 'action_About')
self._action_about.triggered.connect(App.open_about)
self._action_preferences = self.findChild(QtWidgets.QAction, 'action_Preferences')
self._action_preferences.triggered.connect(App.open_prefs_panel)
self._action_manage_plugins = self.findChild(QtWidgets.QAction, 'action_Manage_Plugins')
self._action_manage_plugins.triggered.connect(App.open_plugins_panel)
# Set the default title
self.set_title()
# Set the icon
icon_path = App.get_icon()
if icon_path is not None:
self.setWindowIcon(QtGui.QIcon(icon_path))
# Restore the state from the configuration if needed
window_state = Config.get('window/state', 'none')
state = self.windowState()
if window_state == 'maximized':
state &= ~(QtCore.Qt.WindowMinimized | QtCore.Qt.WindowFullScreen)
state |= QtCore.Qt.WindowMaximized
elif window_state == 'minimized':
state &= ~(QtCore.Qt.WindowMaximized | QtCore.Qt.WindowFullScreen)
state |= QtCore.Qt.WindowMinimized
elif window_state == 'fullscreen':
state &= ~(QtCore.Qt.WindowMinimized | QtCore.Qt.WindowMaximized)
state |= QtCore.Qt.WindowFullScreen
self.setWindowState(state)
# Restore the window geometry from the configuration if needed
geometry = Config.get('window/geometry', None)
if isinstance(geometry, list) and len(geometry) == 4:
self.setGeometry(geometry[0], geometry[1], geometry[2], geometry[3])
def closeEvent(self, event):
"""The window was closed"""
# Determine the window state
state = self.windowState()
window_state = 'none'
if state & QtCore.Qt.WindowFullScreen:
window_state = 'fullscreen'
elif state & QtCore.Qt.WindowMaximized:
window_state = 'maximized'
elif state & QtCore.Qt.WindowMinimized:
window_state = 'minimized'
else:
# Save the window geometry for normal state
geometry = self.geometry()
Config.set('window/geometry', [
geometry.x(),
geometry.y(),
geometry.width(),
geometry.height()
])
# Save the window state
Config.set('window/state', window_state)
# Save the configuration
Config.save()
# Continue to close the window
QtWidgets.QMainWindow.closeEvent(self, event)
def set_title(self, panel=None):
"""Update the window title from the current panel"""
# Set the new window title, if provided by the panel
if panel is not None and panel.get_title():
title = panel.get_title()
self.setWindowTitle(f'{App.get_name()} - {title}')
container = self.get_panel_container()
if container is not None:
index = container.indexOf(panel.get_widgets())
if index >= 0:
container.setTabText(index, title)
container.setTabIcon(index, panel.get_icon())
else:
self.setWindowTitle(f'{App.get_name()}')
def set_status(self, panel=None, timeout=0):
"""Update the window status from the current panel"""
self.set_status_text('' if panel is None else panel.get_status(), timeout)
def set_status_text(self, message, timeout=0):
"""Update the window status from the current panel"""
if self._statusbar is not None:
self._statusbar.showMessage('' if message is None else message, timeout)
def get_panel_container(self):
"""Get the panel container widget"""
return self._panel
def get_panels(self):
"""Get all the panels in the window panel stack"""
if not isinstance(self._panels, list):
self._panels = []
return self._panels
def get_panel(self, name):
"""Get a panel in the window panel stack by name"""
# Check panels by name
for panel in self.get_panels():
if panel.get_name() == name:
# Return the panel
return panel
# Panel not found
return None
def get_panel_by_index(self, index):
"""Get a panel in the window panel stack by index"""
# Get panel container
container = self.get_panel_container()
if container is not None:
# Get the widgets of the panel
widgets = container.widget(index)
if widgets is not None:
# Check the panels for matching widgets
for panel in self.get_panels():
if panel.get_widgets() == widgets:
# Found the panel
return panel
# Panel not found
return None
def get_current_panel(self):
"""Get the current panel in the window stack"""
return self._current_panel
def get_current_popup(self):
"""Get the current popup dialog"""
return self._current_popup
def push_view(self, panel):
"""Push a panel onto the stack being viewed."""
if panel is not None:
Logger.debug(f'MEG UI: Adding panel "{panel.get_name()}"')
# Hide the current panel
current_panel = self.get_current_panel()
if current_panel is not None:
current_panel.on_hide()
# Show the current panel
panel.on_show()
# Update the title for the panel
self.set_title(panel)
# Update the status for the panel
self.set_status(panel)
# Get the window central widget
container = self.get_panel_container()
if container is not None:
# Add the panel to the view stack
widgets = panel.get_widgets()
widgets.setParent(container)
title = panel.get_title()
index = container.addTab(widgets, 'Home' if not title else title)
# Remove the close button if not closable
tabbar = container.tabBar()
if not panel.get_is_closable():
tabbar.tabButton(index, QtWidgets.QTabBar.RightSide).deleteLater()
tabbar.setTabButton(index, QtWidgets.QTabBar.RightSide, None)
# Add the panel icon
tabbar.setTabIcon(index, panel.get_icon())
# Add the panel to the panel stack
self.get_panels().append(panel)
# Set the panel to the view
container.setCurrentIndex(index)
def set_view(self, panel):
"""Set the panel to be viewed in the stack or push the panel onto the stack being viewed."""
if panel is not None:
# Get the window central widget
container = self.get_panel_container()
if container is not None:
# Get the index of the panel
index = container.indexOf(panel.get_widgets())
if index >= 0:
# Set the new panel
container.setCurrentIndex(index)
# Do not continue since the panel was found do not push
Logger.debug(f'MEG UI: Setting panel "{panel.get_name()}"')
return
# Push the panel instead because it was not found
self.push_view(panel)
def popup_view(self, panel, resizable=False):
"""Popup a dialog containing a panel."""
if panel is None or self._current_popup is not None:
return QtWidgets.QDialog.Rejected
# Create a dialog window to popup
dialog = QtWidgets.QDialog(None, QtCore.Qt.WindowSystemMenuHint | QtCore.Qt.WindowTitleHint | QtCore.Qt.WindowCloseButtonHint)
dialog.setModal(True)
dialog.setSizeGripEnabled(resizable)
# Set the current popup
self._current_popup = dialog
# Set dialog layout
layout = QtWidgets.QGridLayout()
layout.setContentsMargins(0, 0, 0, 0)
dialog.setLayout(layout)
# Add the panel widgets to the popup
widgets = panel.get_widgets()
layout.addWidget(widgets)
widgets.setParent(dialog)
# Set the dialog icon
icon = panel.get_icon()
dialog.setWindowIcon(icon if icon else QtWidgets.QIcon(App.get_icon()))
title = panel.get_title()
# Set the dialog title
dialog.setWindowTitle(title if title else App.get_name())
previous_panel = self._current_panel
# Hide the current panel
if previous_panel is not None:
previous_panel.on_hide()
# Make the panel the current
self._current_panel = panel
# Show the panel
panel.on_show()
# Show the dialog
if not resizable:
dialog.setFixedSize(dialog.size())
result = dialog.exec_()
# Hide the panel
panel.on_hide()
# Remove the popup
self._current_popup = None
# Restore the previous panel to current
self._current_panel = previous_panel
# Show the previous panel
if previous_panel is not None:
previous_panel.on_show()
return result
def remove_view(self, panel):
"""Remove a panel from the stack being viewed."""
# Check if the panel is closable
if panel is not None and panel.get_is_closable():
Logger.debug(f'MEG UI: Removing panel "{panel.get_name()}"')
# Close the panel
panel.on_hide()
panel.on_close()
# Remove the panel from the list
panels = self.get_panels()
if panel in panels:
panels.remove(panel)
if self._current_panel == panel:
self._current_panel = None
# Get the window central widget
container = self.get_panel_container()
if container:
# Get the index of this panel
index = container.indexOf(panel.get_widgets())
if index >= 0:
# Remove the panel from the view stack
container.removeTab(index)
panel.get_widgets().setParent(None)
def remove_view_by_index(self, index):
"""Remove a panel from the stack being viewed."""
# Get the panel by index
Logger.debug(f'MEG UI: Removing panel by index ({index})')
panel = self.get_panel_by_index(index)
if panel is not None and panel.get_is_closable():
# Remove the panel
self.remove_view(panel)
def _show_view_by_index(self, index):
"""Show the panel on click"""
# Get the panel by index
panel = self.get_panel_by_index(index)
if panel is not None:
# Get the current panel
current_panel = self.get_current_panel()
# Check if the panel is not the current panel
if current_panel != panel:
# Hide the current panel
if current_panel is not None:
current_panel.on_hide()
# Set the current panel
self._current_panel = panel
# Update the title
self.set_title(panel)
# Update the status
self.set_status(panel)
# Show the new panel
if panel is not None:
panel.on_show()
| 41.719136 | 134 | 0.600059 |
95ffe673bc040b87ba5fb46405be623006c98d02 | 1,459 | py | Python | tests/scripts/test_s_rs_raw_shapes.py | nismod/energy_demand | 247fcea074a846026710ed9b039b22f8b9835643 | [
"MIT"
] | 14 | 2018-02-23T10:03:45.000Z | 2022-03-03T13:59:30.000Z | tests/scripts/test_s_rs_raw_shapes.py | nismod/energy_demand | 247fcea074a846026710ed9b039b22f8b9835643 | [
"MIT"
] | 59 | 2017-02-22T15:03:30.000Z | 2020-12-16T12:26:17.000Z | tests/scripts/test_s_rs_raw_shapes.py | nismod/energy_demand | 247fcea074a846026710ed9b039b22f8b9835643 | [
"MIT"
] | 5 | 2017-08-22T11:31:42.000Z | 2020-06-24T18:30:12.000Z | """testing
"""
from energy_demand.scripts import s_rs_raw_shapes
import numpy as np
def test_assign_hes_data_to_year():
hes_data = {
'working_day': {
0: np.zeros((24, 2)) + 10,
1: np.zeros((24, 2)) + 20,
2: np.zeros((24, 2)) + 30,
3: np.zeros((24, 2)) + 40,
4: np.zeros((24, 2)) + 50,
5: np.zeros((24, 2)) + 60,
6: np.zeros((24, 2)) + 70,
7: np.zeros((24, 2)) + 80,
8: np.zeros((24, 2)) + 90,
9: np.zeros((24, 2)) + 100,
10: np.zeros((24, 2)) + 110,
11: np.zeros((24, 2)) + 120},
'holiday' : {
0: np.zeros((24, 2)) + 1,
1: np.zeros((24, 2)) + 2,
2: np.zeros((24, 2)) + 3,
3: np.zeros((24, 2)) + 4,
4: np.zeros((24, 2)) + 5,
5: np.zeros((24, 2)) + 6,
6: np.zeros((24, 2)) + 7,
7: np.zeros((24, 2)) + 8,
8: np.zeros((24, 2)) + 9,
9: np.zeros((24, 2)) + 10,
10: np.zeros((24, 2)) + 11,
11: np.zeros((24, 2)) + 12}
}
result = s_rs_raw_shapes.assign_hes_data_to_year(
nr_of_appliances=2,
hes_data=hes_data,
base_yr=2015)
#daytype, month_python, appliances
assert result[10][0][1] == 1 # yearday, hour, appliance_nr--> sun
assert result[11][0][1] == 10 # yearday, hour, appliance_nr--> mon
| 31.717391 | 70 | 0.437286 |
2501aa9e0452052b19ad9fe91a29c5a969b9d03e | 1,935 | py | Python | release/davis16/evaluate.py | MSiam/segment-any-moving | 82cb782867d866d2f4eb68230edb75f613e15a02 | [
"Apache-2.0"
] | 70 | 2019-09-16T17:55:55.000Z | 2022-03-07T00:26:53.000Z | release/davis16/evaluate.py | MSiam/segment-any-moving | 82cb782867d866d2f4eb68230edb75f613e15a02 | [
"Apache-2.0"
] | 9 | 2019-09-30T09:15:11.000Z | 2021-07-21T11:33:13.000Z | release/davis16/evaluate.py | MSiam/segment-any-moving | 82cb782867d866d2f4eb68230edb75f613e15a02 | [
"Apache-2.0"
] | 5 | 2019-09-25T05:14:37.000Z | 2021-07-08T20:13:47.000Z | import argparse
import logging
import yaml
from pathlib import Path
from script_utils.common import common_setup
from release.davis16.compute_flow import link_splits
from release.helpers.misc import msg, subprocess_call
def check_tracks(track_output, splits):
for split in splits:
np_dir = track_output / split
if not np_dir.exists():
raise ValueError(f'Did not find tracks in {np_dir}; '
f'did you run release/davis17/track.py?')
def evaluate_proposed(config, output_stage):
if output_stage == 'detection':
input_dir = (Path(config['davis16']['output_dir']) / 'detections')
elif output_stage == 'tracking':
input_dir = (Path(config['davis16']['output_dir']) / 'tracks')
else:
raise ValueError(f'Unknown output stage: {output_stage}')
for split in config['davis16']['splits']:
masks_dir = input_dir / split / 'masks' / 'masks'
cmd = [
'python', 'davis/eval_fgbg.py',
'--masks-dir', masks_dir
]
msg(f'Evaluating {split}')
subprocess_call(cmd)
def main():
# Use first line of file docstring as description if it exists.
parser = argparse.ArgumentParser(
description=__doc__.split('\n')[0] if __doc__ else '',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('output_stage',
choices=['detection', 'tracking'],
default='detection')
parser.add_argument('--config', default=Path('./release/config.yaml'))
args = parser.parse_args()
logging.getLogger().setLevel(logging.INFO)
logging.basicConfig(format='%(asctime)s.%(msecs).03d: %(message)s',
datefmt='%H:%M:%S')
with open(args.config, 'r') as f:
config = yaml.load(f)
evaluate_proposed(config, args.output_stage)
if __name__ == "__main__":
main()
| 31.209677 | 74 | 0.632041 |
2503cb791f9ad674e778396da993788db1fa44bb | 4,712 | py | Python | qq/mention.py | foxwhite25/qq.py | 92e744205e57b4c8922aa5843095ae900b3c1d84 | [
"MIT"
] | 40 | 2021-12-07T02:18:14.000Z | 2022-03-28T13:14:16.000Z | qq/mention.py | foxwhite25/qq.py | 92e744205e57b4c8922aa5843095ae900b3c1d84 | [
"MIT"
] | 2 | 2021-12-12T17:34:29.000Z | 2021-12-17T04:43:03.000Z | qq/mention.py | foxwhite25/qq.py | 92e744205e57b4c8922aa5843095ae900b3c1d84 | [
"MIT"
] | 5 | 2021-12-10T11:17:41.000Z | 2022-03-05T13:53:50.000Z | # The MIT License (MIT)
# Copyright (c) 2021-present foxwhite25
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from __future__ import annotations
from typing import Type, TypeVar, List, TYPE_CHECKING, Any, Union
__all__ = (
'AllowedMentions',
)
if TYPE_CHECKING:
from .types.message import AllowedMentions as AllowedMentionsPayload
from .member import Member
from .role import Role
class _FakeBool:
def __repr__(self):
return 'True'
def __eq__(self, other):
return other is True
def __bool__(self):
return True
default: Any = _FakeBool()
A = TypeVar('A', bound='AllowedMentions')
class AllowedMentions:
"""一个类,表示消息中允许提及的内容。
这个类可以在 :class:`Client` 初始化期间设置,以应用于每条发送的消息。
它也可以通过 :meth:`abc.Messageable.send` 在每条消息的基础上应用,以获得更细粒度的控制。
Attributes
------------
everyone: :class:`bool`
是否允许所有人和这里提到。 默认为 ``True``。
users: Union[:class:`bool`, List[:class:`Member`]]
控制被提及的用户。 如果为 ``True`` (默认值),则根据消息内容提及用户。
如果 ``False`` 则根本不会提及用户。 如果给出了 :class:`Member` 的列表,则只提及所提供的用户,前提是这些用户在消息内容中。
roles: Union[:class:`bool`, List[:class:`Role`]]
控制提到的用户组。 如果为 ``True`` (默认值),则根据消息内容提及用户组。 如果 ``False`` 则根本不提及用户组。
如果给出了 :class:`Role` 的列表,则只提及所提供的用户组,前提是这些用户组在消息内容中。
replied_user: :class:`bool`
是否提及正在回复的消息的作者。 默认为 ``True`` 。
"""
__slots__ = ('everyone', 'users', 'roles', 'replied_user')
def __init__(
self,
*,
everyone: bool = default,
users: Union[bool, List[Member]] = default,
roles: Union[bool, List[Role]] = default,
replied_user: bool = default,
):
self.everyone = everyone
self.users = users
self.roles = roles
self.replied_user = replied_user
@classmethod
def all(cls: Type[A]) -> A:
"""返回一个 :class:`AllowedMentions` 的工厂方法,其中所有字段都显式设置为 ``True``"""
return cls(everyone=True, users=True, roles=True, replied_user=True)
@classmethod
def none(cls: Type[A]) -> A:
"""一个工厂方法,返回一个 :class:`AllowedMentions`,所有字段都设置为 ``False``"""
return cls(everyone=False, users=False, roles=False, replied_user=False)
def to_dict(self) -> AllowedMentionsPayload:
parse = []
data = {}
if self.everyone:
parse.append('everyone')
if self.users == True:
parse.append('users')
elif self.users != False:
data['users'] = [x.id for x in self.users]
if self.roles == True:
parse.append('roles')
elif self.roles != False:
data['roles'] = [x.id for x in self.roles]
if self.replied_user:
data['replied_user'] = True
data['parse'] = parse
return data # type: ignore
def merge(self, other: AllowedMentions) -> AllowedMentions:
# Creates a new AllowedMentions by merging from another one.
# Merge is done by using the 'self' values unless explicitly
# overridden by the 'other' values.
everyone = self.everyone if other.everyone is default else other.everyone
users = self.users if other.users is default else other.users
roles = self.roles if other.roles is default else other.roles
replied_user = self.replied_user if other.replied_user is default else other.replied_user
return AllowedMentions(everyone=everyone, roles=roles, users=users, replied_user=replied_user)
def __repr__(self) -> str:
return (
f'{self.__class__.__name__}(everyone={self.everyone}, '
f'users={self.users}, roles={self.roles}, replied_user={self.replied_user})'
)
| 35.164179 | 102 | 0.655136 |
2504849dd9ac056161c71352758bc395f3e09c7d | 6,395 | py | Python | cupcake/editor/autocomplete/itemkinds.py | billyeatcookies/cupcake | 2f2d1d5f8a1a454e50283547cf433cc82d1825d6 | [
"MIT"
] | 3 | 2022-03-29T12:55:24.000Z | 2022-03-30T17:06:11.000Z | cupcake/editor/autocomplete/itemkinds.py | billyeatcookies/Cupcake | 2f2d1d5f8a1a454e50283547cf433cc82d1825d6 | [
"MIT"
] | null | null | null | cupcake/editor/autocomplete/itemkinds.py | billyeatcookies/Cupcake | 2f2d1d5f8a1a454e50283547cf433cc82d1825d6 | [
"MIT"
] | null | null | null | import tkinter as tk
class Kinds:
def __init__(self, master, *args, **kwargs):
self.master = master
self.imethods = tk.PhotoImage(data="""iVBORw0KGgoAAAANSUhEUgAAAA8AAAAPCAYAAAA71pVKAAAACXBIWXMAAA7DAAAOwwHHb6hk
AAAAGXRFWHRTb2Z0d2FyZQB3d3cuaW5rc2NhcGUub3Jnm+48GgAAAddJREFUKJGNkjFrk3EQxn/3vtEW8QMICoK
CNZYqDn4BndQ2NIMZRHES0TqILomW9toG7aJiW4cOQkGKoAhNagUHEZwspHQQm9SiDoI4iBqJJNb3/Z9D+0IaE+
iz3d3z4+7gEZpoLru0OwjkDpAE5p3H5eRAfKHRJ/XF5GRhy46v2y9hNmxITiCNcCqqfeRqj3Z8+w/Oa+kouAlDq
oL1JfTAm2j2VFd2+YQ3PeykiQwvWse4qjhZA5eyhlxAuL5o+x+oimv2Tn5o+biZGxMoJjSe8AAMuQJUzPg9qFgz
cM1nNYGyQc8zfb/HW+/7AvcFRvJaKswOvTtWD80MF7vyWprD7IkTmwYqq4QxAchpseog/hf3pU38i5j1AwXDbnv
IaYOUIRNGdTSph3/mtFgO4UisfkNKO1eBe49HP0y111bTIC8MHlksiPf2d31ufMNrbACk0nvLCY1ngKrzY4PNwJ
bwZhXBoeC2bQZ4pZ/aAX8rfrAO213Be50bKp5XtZbXzOjSiV/U3gIvu3Xfxw0JM2wcqEUJy2mx4vzYIRfyp2XCI
j0fW2kLvgfXDDIC0wbnwKZAzoA99NuCG92Zgz8i/wY40mx2eWcY2C3BzgosOM/19Q50zjf6/gEMUNa2RFgfkAAA
AABJRU5ErkJggg==""")
self.ivariables = None
self.ifields = tk.PhotoImage(data="""iVBORw0KGgoAAAANSUhEUgAAAA8AAAANCAYAAAB2HjRBAAAACX
BIWXMAAA7DAAAOwwHHb6hkAAAAGXRFWHRTb2Z0d2FyZQB3d3cuaW5rc2NhcGUub3Jnm+48GgAAAcBJREFUKJF1k
jFoUwEQhr97SQzaqODi5KCTRRA3K6iQxA5BnIpZHCwIioJK074XqEZSi2hIK0gpilBBcNStVMX4nkWtFDqVLi5F
wUlECBrRNu/9Tq0xbW68+7/j/uM3OtSQr8MOlAQ5jPkoYnQ8a89bNbYBqumIGcMYPSYmYwkmm01yghLQMHGrmuE
pZlqH3ZqO4XADOGgwts24X07bz7X5+QUldv7gLGLY4HsUcdncQIeAu4huQTXV4EH5lP3qaGdWey1kBkjFEc8Mmq
HoGc/a505Q0deBEFxC+gxmBHkH2CfjoxlLnq+H3qy6W6HB1zru+ZqO4D3wNRT7V1e4CBAHaP6hP5lkRyiuEPLB8
/UOMS2jH9gjuOeEnKn0Wh1g4IV2rcMAd9L2CSgUX2kkinEVYwK40PWNJ+W8rWxmxWlvVHqt3hRTwGo1Y4/awYE5
bU1s4Ryw7ABfYklOlwPF2xf99+WX6hryVYj/ZllGDqMvroi84zDRAM8NdLu+ncfU/0HlQKmGuAQMAotE5Ksn7C2
sJUwy9w0nESVgt2DK4JrBiKAALDhitJK1udZrNsSzGOhoBDcRaaAmuD6WsfnNrPwFMR6ziXNIHQ8AAAAASUVORK
5CYII=""")
self.iclasses = tk.PhotoImage(data="""iVBORw0KGgoAAAANSUhEUgAAAA8AAAAPCAYAAAA71pVKAAAAC
XBIWXMAAA7DAAAOwwHHb6hkAAAAGXRFWHRTb2Z0d2FyZQB3d3cuaW5rc2NhcGUub3Jnm+48GgAAAdFJREFUKJGN
0k1IVFEYxvH/c+descDgzsaCFhHUeDPBatcmIheCIi2slasZpDZB22hTBrWqhSEhzUdRIAxB1KpshJYRtTAoxgp
qYYILvVMimDP3vK2KYZSxZ3U4nN/Lw8sRTYlLvf3mkgpmlXBx35iuvW7QJt7fQ60YHTeXVIA7SFG8f7lkZVI74j
h/5JgzmxVcT+eqN801hpCN1taikbY4LvX2m9ysZBNhrnrXyr0d8vx7oHf1De9VO+ybS+aA22F2YfJb6UBnvJY8B
XZvBsHg3tyH9VawUsyM4tSX3lOd0Go+eoSsz/luyGt4eWBQsrNhduFZK1wtZsYwTQGLwLysTCr+FT1ANgq8BdLA
T0lLzdCZ+YIzeN6wkuSUSZcFYGVStbVo5HcQzHbUN+dM/BD25Z80pYFxTMMShwy7KtyAtlQr9LyR7FZzbZs+EcT
++hOgD+gSbiDMfZ73WvF20YX39bArdQ5ZwWDEl1uCpk+y44DzHzelXVPAZN3873Ehc9rf7qGh7lrx8MHmu4Sg09
zGY0EVrGzo+RZs8EnGtGspJRKAGZmumKgAD7csbLss3z/aHXiNr0g3MBsHXobZ6qX/wgArxZ6TMl4AM2G2elHC/
gCskr1TRm4JBAAAAABJRU5ErkJggg==""")
self.iinterfaces = tk.PhotoImage(data="""iVBORw0KGgoAAAANSUhEUgAAAA8AAAAICAYAAAAm06XyAA
AACXBIWXMAAA7DAAAOwwHHb6hkAAAAGXRFWHRTb2Z0d2FyZQB3d3cuaW5rc2NhcGUub3Jnm+48GgAAAQRJREFUG
JWFkD9LgnEAhJ/7qbODQw211JLRVlMEofYFgqIhglpsqSnfCJeaChRXgz5Aa30A/1ARCbnXJ7Aai5rC95oEkcAb
D4577sQYFbtOpb+IMLuIKeBJCQ4qq3pJAkRNzwoylTzPSB4Opz+pI5YtIsf0QmDDfTrRnZdUantfpg4EzG21oPV
BsNT2gsx9KiZ7vqaPgR+1fYHJKmr5DZgcKrsGvgEEc8B7Ja+tYZqjhudDoBPGbf5PyYAAkoJTwyUQEDfVnLZHsB
/KDU8MY/fNjqApgJOGZ34hUyvQHT3suOUrmxUFysT0HNjEFBNiUeMQz9pO/sSUJPYM08BjLA5rOb3+AQBaXk0WW
bcyAAAAAElFTkSuQmCC""")
self.imodules = tk.PhotoImage(data="""iVBORw0KGgoAAAANSUhEUgAAAA8AAAAPCAYAAAA71pVKAAAAC
XBIWXMAAA7DAAAOwwHHb6hkAAAAGXRFWHRTb2Z0d2FyZQB3d3cuaW5rc2NhcGUub3Jnm+48GgAAAXtJREFUKJGN
0TuLE2EYxfH/eZLIikkweEFb2xQWostMOhUCUwha+BG0007wEwh22ilabmNpF1CbZCbeECzS2iqsMrmJu2bzvBa
OQkJMfMr3nB8ceEVxnU7nSLVavQ9cb7VaJ1i6NE13gWfT6fROu93+DmB/wlqt9gC4FEK4uQwBJN0ALha9328Ag8
Hg0HA4HJnZ5SiK0lUYoN/vt9z9xWg0Opokyb4B5Hl+HNiazWaf/gUBinyrXq8fW5gNYGZah5dzAyiVSqeAAzPLN
+AcOAghnAZQmqbngYeSduM4vrIOA2RZ9hw4CdwqA2+Bubuf3QQB3P2upI/AG5O0Lem9md37H1z03gEXBNDtds+Z
2WtJ9TiOf6yZfDiEMDaz7SiKPhiApM9A2d0bGyY3gPJ8Pv8CS1/l7mEDXsgNYDwefwP2KpXKmXW4yPcajcbXvzh
Jkn1JO+7+JMuyq6tgr9e75u5PJe00m82fC7Mnk8lt4FUI4fEqLOkR8LLoAfALta2R2TiY6xMAAAAASUVORK5CYI
I=""")
self.iproperties = tk.PhotoImage(data="""iVBORw0KGgoAAAANSUhEUgAAAA8AAAAPCAYAAAA71pVKAAAACXB
IWXMAAA7DAAAOwwHHb6hkAAAAGXRFWHRTb2Z0d2FyZQB3d3cuaW5rc2NhcGUub3Jnm+48GgAAAdFJREFUKJGN0r
9rFEEUB/Dvd+8uxzUhoBCCEkhtEbTbHVdZIiQQfySKWmkjWtkIWtj4D1imsDQIRlCDoBbauOLNzR5aGrBVEY0g2
gQCtzfztTALinenrxt4n++8Nwwxorrd7ni/379O8gyAKQDvAdwyxqwAQDQCTnrvLcndJBcBnAewC0Be9QzF3vub
JJ8aYy6GEGYkrZBcMMZsWGv3WWsvcxDM83xibGzsQ6/X29NoNPaTfExyPkmS1+12ezaKoucAfgy8udVqTZP8lGX
ZVlmW7yR9BLBYFMUBks9IXgEwM2zsr5Km8jyvZ1n2rSzLuRDCUgjhhaRLkt4C2ByIvfezANBsNicBoAqQtJym6R
NJ1wCs/7VzURRHQgh3oyg6Gcdxx1o7XZbl5yzL+t1ud9x7fwPA0RBC/Ad2zs1JWvsNZiQfAQgANgHsBfCQ5NUkS
b7XK2itzSStSToVx3GnCgJwnOSG936iVqt9SZJkuzLcgQdJrocQzqZp+tI5d1jS/eo85FHBTqdzCMADAKeNMa+c
c/OS7lSjD4MAUAdwG8C5HbggaRXAUhzHxSgI/PqeLUnbzrllSashhBPGmH9CAKhLukDynqQtAMfSNH3zPxAAfgL
2/u9cQzl88QAAAABJRU5ErkJggg==""")
self.ikeywords = tk.PhotoImage(data="""iVBORw0KGgoAAAANSUhEUgAAAA8AAAAPCAYAAAA71pVKAAAACXBIW
XMAAA7DAAAOwwHHb6hkAAAAGXRFWHRTb2Z0d2FyZQB3d3cuaW5rc2NhcGUub3Jnm+48GgAAAPRJREFUKJGlkrFK
xEAURc8bHltbWqytYG0TYiC2VnZ+hK3iByhouX7HfkcmhAgigpXFgt02gpXETObZiEVMWI23PecOl+HBPyLe+5c
xaGaLLMsWY1yBHRE5NLO3Hrtwzm0BFEVxJCI3Q2XM7BIIPbYLPAOo6n2M8exHWUROxmaFEJ4AkiRZA+sxb1KkLM
vrTVKM8SHLsmVVVfMY4ynwnqbplZrZbFPZOacATdOIqs7MrP8/E2Z774+HQAjhMc/zFUBVVfOu6/b7jgLnQ2VVv
QVWXw/ticigNzlS1/X2b+UQwkeapq/f69q2Hb3tgdwBB39ZN5pP4uJac+7GJRAAAAAASUVORK5CYII=""")
self.iwords = tk.PhotoImage(data="""iVBORw0KGgoAAAANSUhEUgAAAA8AAAAICAYAAAAm06XyAAAACXBIWXMAAA7DAAAOwwHHb6hkAAAA
GXRFWHRTb2Z0d2FyZQB3d3cuaW5rc2NhcGUub3Jnm+48GgAAATBJREFUGJWFjyFLQ2EYhc95t31JEQYiGExaLBO
DYbvJaFBEGBqHGNW4n6BFBUGw2ZUJCoaxMGT3Xm+1LwkLgrILwzLZdwy7wTD0iec9z4EXmEAcx40wDPcn3X5j/x
X+gmEYLgA4JLkE4KFSqdzEcdwA0Jc0A0CSzoMgSNrt9pRzrg5gRVLXSNZIfki6AnDa6XRKGBsbku5IPpNsJkky5
5x7AjAv6ZLkV344HJ4VCoUdM9uUNMrlcsuZfBEEwS0ARFG0670/BrDY6/XWq9XqCEAr75xrABgAeASwKilHElmG
bIgkpwF8ZuL45yiKUgCBpG+SLwCOSG5LmvXebwEomdk9yTVJife+ViwWm/1+f9tI1iW1ssIrgIGkd0lmZm9mdk1
yr1wudwFUzewkTdOU5MEPhImOHsSTJnYAAAAASUVORK5CYII=""")
| 81.987179 | 120 | 0.854887 |
2505a01273ca3c6e0204f8f0d9964f3bd464761f | 1,125 | py | Python | register/migrations/0001_initial.py | duks500/Nilestone-Project | 6b45ac21f9d9e7a4839d6f616711cbe8daa6e051 | [
"bzip2-1.0.6"
] | null | null | null | register/migrations/0001_initial.py | duks500/Nilestone-Project | 6b45ac21f9d9e7a4839d6f616711cbe8daa6e051 | [
"bzip2-1.0.6"
] | null | null | null | register/migrations/0001_initial.py | duks500/Nilestone-Project | 6b45ac21f9d9e7a4839d6f616711cbe8daa6e051 | [
"bzip2-1.0.6"
] | null | null | null | # Generated by Django 3.0 on 2020-03-29 14:09
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('location', models.CharField(blank=True, default=0, max_length=30, null=True)),
('age', models.IntegerField()),
('occupation', models.IntegerField(choices=[(0, 'Undergraduate Student'), (1, 'Graduate Student'), (2, 'Faculty Member'), (3, 'Staff')], default=4)),
('first_name', models.CharField(max_length=30)),
('last_name', models.CharField(max_length=150)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 37.5 | 165 | 0.623111 |
250a44eb50bdd484b59b76e217165e7deeb8a326 | 9,686 | py | Python | utils/iwr6843_utils/parse_tlv.py | ApocalyVec/mGesf | 21e0bf37a9d11a3cdde86a8d54e2f6c6a2211ab5 | [
"MIT"
] | 18 | 2020-06-02T11:21:47.000Z | 2022-03-25T08:16:57.000Z | utils/iwr6843_utils/parse_tlv.py | ApocalyVec/mGesf | 21e0bf37a9d11a3cdde86a8d54e2f6c6a2211ab5 | [
"MIT"
] | 4 | 2020-06-20T13:53:44.000Z | 2021-09-11T22:58:21.000Z | utils/iwr6843_utils/parse_tlv.py | ApocalyVec/mGesf | 21e0bf37a9d11a3cdde86a8d54e2f6c6a2211ab5 | [
"MIT"
] | 6 | 2020-04-23T21:30:17.000Z | 2021-08-03T19:59:12.000Z | import struct
import sys
import math
import numpy as np
#
# TODO 1: (NOW FIXED) Find the first occurrence of magic and start from there
# TODO 2: Warn if we cannot parse a specific section and try to recover
# TODO 3: Remove error at end of file if we have only fragment of TLV
#
def tlvHeaderDecode(data):
tlvType, tlvLength = struct.unpack('2I', data)
return tlvType, tlvLength
def parseDetectedObjects(data, numObj, tlvLength):
detected_points = struct.unpack(str(numObj * 4) + 'f', data[:tlvLength])
detected_points = np.asarray(detected_points).reshape(numObj, 4)
return detected_points
def parseRangeProfile(data, tlvLength):
# an integer is 2 byte long
range_bins = tlvLength / 2
range_profile = struct.unpack(str(int(range_bins)) + 'H', data[:tlvLength])
return range_profile
def parseRDheatmap(data, tlvLength, range_bins, rm_clutter=True):
"""
range bins times doppler bins times 2, doppler bins = chirps/ frame divided by num of antennas TX (3)
#default chirps per frame is (128/3) = 42 * 2 * 256
the call to replace_left_right mirror-flips left and right after reshaping.
replace_left_right is equivalent to this line from mmWave.js in the visualizer code
# rangeDoppler = rangeDoppler.slice((rangeDoppler.length + 1) / 2).concat(
# rangeDoppler.slice(0, (rangeDoppler.length + 1) / 2));
:param range_bins:
:param data: the incoming byte stream to be interpreted as range-doppler heatmap/profile
:param tlvLength:
:return:
"""
doppler_bins = (tlvLength / 2) / range_bins
rd_heatmap = struct.unpack(str(int(range_bins * doppler_bins)) + 'H', data[:tlvLength])
rd_heatmap = np.reshape(rd_heatmap, (int(range_bins), int(doppler_bins)))
overall_mean = np.mean(rd_heatmap)
if rm_clutter:
rd_heatmap = np.array([row - np.mean(row) for row in rd_heatmap])
return replace_left_right(rd_heatmap)
def chg_val(val):
return val - 65536 if val > 32767 else val
def parseAziheatmap(data, tlvLength, range_bins):
"""
:param range_bins:
:param data: the incoming byte stream to be interpreted as range-doppler heatmap/profile
:param tlvLength:
:return:
"""
# range_bins = 256
azi_bins = (tlvLength / 2) / range_bins
azi_heatmap = struct.unpack(str(int(range_bins * azi_bins)) + 'H', data[:tlvLength])
# azi_heatmap = [chg_val(x) for x in azi_heatmap]
azi_heatmap = np.reshape(azi_heatmap, (int(range_bins), int(azi_bins)))
# use the default order of 3 Tx's and ordering is TX0, TX1, TX2
row_indices = [7, 5, 11, 9]
qrows = 4
qcols = range_bins
rowSizeBytes = 48
q = data[:tlvLength]
qq = []
for col in range(qcols):
real = []
img = []
for row in range(qrows):
index = col * rowSizeBytes + 4 * row_indices[row]
real.append(q[index + 1] * 256 + q[index])
img.append(q[index + 3] * 256 + q[index + 2])
real = [chg_val(x) for x in real]
img = [chg_val(x) for x in img]
# convert to complex numbers
data = np.array([real, img]).transpose()
data = np.pad(data, ((0, 60), (0, 0)), 'constant', constant_values=0)
data = data[..., 0] + 1j * data[..., 1]
transformed = np.fft.fft(data)
# take the magnitude
transformed = np.absolute(transformed)
qq.append(np.concatenate((transformed[int(len(transformed) / 2):], transformed[:int(len(transformed) / 2)])))
qq = np.array(qq)
return qq
def replace_left_right(a):
rtn = np.empty(shape=a.shape)
rtn[:, :int(rtn.shape[1] / 2)] = a[:, int(rtn.shape[1] / 2):]
rtn[:, int(rtn.shape[1] / 2):] = a[:, :int(rtn.shape[1] / 2)]
return rtn
def parseStats(data):
interProcess, transmitOut, frameMargin, chirpMargin, activeCPULoad, interCPULoad = struct.unpack('6I', data[:24])
return interProcess, transmitOut, frameMargin, chirpMargin, activeCPULoad, interCPULoad
# print("\tOutputMsgStats:\t%d " % (6))
# print("\t\tChirpMargin:\t%d " % (chirpMargin))
# print("\t\tFrameMargin:\t%d " % (frameMargin))
# print("\t\tInterCPULoad:\t%d " % (interCPULoad))
# print("\t\tActiveCPULoad:\t%d " % (activeCPULoad))
# print("\t\tTransmitOut:\t%d " % (transmitOut))
# print("\t\tInterprocess:\t%d " % (interProcess))
negative_rtn = False, None, None, None, None, None
class tlv_header_decoder():
def __init__(self):
pass
def decode_iwr_tlv(in_data):
"""
Must disable range profile for the quick RD heatmap to work, this way the number of range bins will be be calculated
from the absent range profile. You can still get the range profile by inferring it from the RD heatmap
:param in_data:
:return: if no detected point at this frame, the detected point will be an empty a
"""
magic = b'\x02\x01\x04\x03\x06\x05\x08\x07'
header_length = 36
offset = in_data.find(magic)
data = in_data[offset:]
if len(data) < header_length:
return negative_rtn
try:
data_magic, version, length, platform, frameNum, cpuCycles, numObj, numTLVs = struct.unpack('Q7I',
data[
:header_length])
except struct.error:
print("Improper TLV structure found: ", (data,))
return negative_rtn
# print("Packet ID:\t%d "%(frameNum))
# print("Version:\t%x "%(version))
# print("Data Len:\t\t%d", length)
# print("TLV:\t\t%d "%(numTLVs))
# print("Detect Obj:\t%d "%(numObj))
# print("Platform:\t%X "%(platform))
if version >= 50462726 and len(data) >= length:
# if version > 0x01000005 and len(data) >= length:
try:
sub_frame_num = struct.unpack('I', data[36:40])[0]
header_length = 40
# print("Subframe:\t%d "%(subFrameNum))
pending_bytes = length - header_length
data = data[header_length:]
detected_points = None
range_profile = None
rd_heatmap = None
azi_heatmap = None
range_bins = 8
statistics = None
for i in range(numTLVs):
tlvType, tlvLength = tlvHeaderDecode(data[:8])
data = data[8:]
if tlvType == 1:
# print('Outputting Points')
detected_points = parseDetectedObjects(data, numObj,
tlvLength) # if no detected points, tlvType won't have 1
elif tlvType == 2:
# the range bins is modified in the range profile is enabled
range_profile = parseRangeProfile(data, tlvLength)
elif tlvType == 4:
# resolving static azimuth heatmap
pass
elif tlvType == 5:
# try:
# assert range_bins
# except AssertionError:
# raise Exception('Must enable range-profile while enabling range-doppler-profile, in order to'
# 'interpret the number of range bins')
rd_heatmap = parseRDheatmap(data, tlvLength, range_bins)
elif tlvType == 6:
# TODO why is the states' TLV not present?
interProcess, transmitOut, frameMargin, chirpMargin, activeCPULoad, interCPULoad = parseStats(data)
pass
elif tlvType == 7:
pass
elif tlvType == 8:
# resolving static azimuth-elevation heatmap
try:
azi_heatmap = parseAziheatmap(data, tlvLength, range_bins)
except:
print('bad azimuth')
azi_heatmap = None
pass
elif tlvType == 9: # only for AoP EV2
pass
else:
# print("Unidentified tlv type %d" % tlvType, '. Its len is ' + str(tlvLength))
n_offset = data.find(magic)
if n_offset != offset and n_offset != -1:
print('New magic found, discarding previous frame with unknown tlv')
data = data[n_offset:]
return True, data, detected_points, range_profile, rd_heatmap, azi_heatmap
data = data[tlvLength:]
pending_bytes -= (8 + tlvLength)
data = data[pending_bytes:] # data that are left
# infer range profile from heatmap is the former is not enabled
if range_profile is None and rd_heatmap is not None and len(rd_heatmap) > 0:
range_profile = rd_heatmap[:, 0]
return True, data, detected_points, range_profile, rd_heatmap, azi_heatmap
except struct.error as se:
print('Failed to parse tlv message, type = ' + str(tlvType) + ', error: ')
print(se)
pass
return negative_rtn
if __name__ == "__main__":
magic = b'\x02\x01\x04\x03\x06\x05\x08\x07'
fileName = 'D:/PycharmProjects/mmWave_gesture_iwr6843/test_data2.dat'
rawDataFile = open(fileName, "rb")
rawData = rawDataFile.read()
rawDataFile.close()
offset = rawData.find(magic)
rawData = rawData[offset:]
# for i in range(len(rawData/36))
#
# for length, frameNum in tlvHeader(rawData):
# print
| 39.056452 | 120 | 0.58218 |
250b9e3749e20788bc502eca2699fe190f414709 | 11,285 | py | Python | shape_generator/helpers.py | MarkusPic/SWMM_xsections_shape_generator | daf19c9508f3727e8cd12c450485f7f21f3e30ed | [
"MIT"
] | null | null | null | shape_generator/helpers.py | MarkusPic/SWMM_xsections_shape_generator | daf19c9508f3727e8cd12c450485f7f21f3e30ed | [
"MIT"
] | null | null | null | shape_generator/helpers.py | MarkusPic/SWMM_xsections_shape_generator | daf19c9508f3727e8cd12c450485f7f21f3e30ed | [
"MIT"
] | 1 | 2020-12-29T13:02:28.000Z | 2020-12-29T13:02:28.000Z | from io import StringIO
from os import path, listdir, remove
from math import radians, tan, cos, pi, atan, sin
from pandas import read_csv
import sympy as sy
import numpy as np
# these variables are used to solve symbolic mathematical equations
# x is the control variable over the height ... max(x) = H_cross_section
x = sy.Symbol('x', real=True, positive=True)
accuracy = 10
def to_num(x):
if x == '':
return None
elif x.replace('-', '').isdecimal():
return int(x)
elif ('.' in x) and (x.lower().replace('.', '').replace('-', '').replace('e', '').isdecimal()):
return float(x)
else:
return x
def csv(txt, comment=None):
"""
Read the string in txt as csv file and return the content as DataFrame.
Args:
txt (str): content of csv
comment (str): comment sign
Returns:
dict: profile label and values
"""
df = read_csv(StringIO(txt), index_col=0, skipinitialspace=True, skip_blank_lines=True, comment=comment)
df = df[df.index.notnull()].copy()
df.index = df.index.astype(str)
return df
def to_xs_dict(txt, comment=None):
"""
Read the string in txt as csv file and return the content as DataFrame.
Args:
txt (str): content of csv
comment (str): comment sign
Returns:
dict: profile label and values
"""
di = dict()
names = []
for line in txt.split('\n'):
if line == '':
continue
elif isinstance(comment, str) and line.startswith(comment):
continue
elif not names:
names = [n.strip() for n in line.split(',')[1:]]
di['_names'] = names
else:
name, *values = [n.strip() for n in line.split(',')]
# di[name] = {k: to_num(v) for k, v in zip(names, values)}
di[name] = [to_num(v) for v in values]
return di
def deg2slope(degree):
"""
convert degrees to a slope (:math:`\\Delta x / \\Delta y`)
Args:
degree (float): angle in degree
Returns:
float: slope
.. figure:: images/slope.gif
:align: center
:alt: slope
:figclass: align-center
Slope
"""
return tan(radians(degree))
def channel_end(r, end_degree):
"""
get vertical end of the channel based on the radius of the channel and an end angle
Args:
r (float): radius of the channel
end_degree (float): end angle in degree (°)
Returns:
float: height of the channel when the circle reaches a certain angle
.. figure:: images/channel_end.gif
:align: center
:alt: channel end
:figclass: align-center
Channel end
"""
return r * (1 - cos(radians(end_degree)))
def sqrt(i):
""" Return the square root of x. """
return i ** (1 / 2)
def combine_input_files(shape_path, delete_original=False):
"""combine all generated shape text files to a single inp-like text file
When running the :func:`shape_generator.shape_generator.Profile.input_file` function, a .txt file will be created.
Those txt files will be combines to a single file with this function.
This makes it easier to import all shapes to the .inp file.
Args:
shape_path (str): path where the shapes are stored
delete_original (bool): whether to delete the original single files
"""
with open(os.path.join(shape_path, 'all_shapes.txt'), 'w') as outfile:
for fname in listdir(shape_path):
if not fname.endswith('_shape.txt'):
continue
in_fn = os.path.join(shape_path, fname)
with open(in_fn) as infile:
outfile.write(infile.read())
outfile.write('\n\n')
if delete_original:
remove(in_fn)
print('Files are combined and originals {}deleted.'.format('' if delete_original else 'NOT '))
####################################################################################################################
class CustomExpr:
def __init__(self):
pass
def __repr__(self):
return 'Custom Function'
def expr(self):
pass
def solve(self, i):
pass
def length(self, i0, i1):
pass
def area(self, i0, i1):
pass
####################################################################################################################
class Slope(CustomExpr):
"""
get function/expression of a straight line with a given point which it intercepts
Args:
slope (float): slope
p0 (set[float, float]): point as a set of a x and a y coordinate
Returns:
sympy.core.expr.Expr: linear function
.. figure:: images/gerade.gif
:align: center
:alt: straight line
:figclass: align-center
Straight line
"""
def __init__(self, slope, unit=None):
if unit is None or unit == '':
self.slope = slope
elif unit == '°':
self.slope = deg2slope(slope)
elif unit == '%':
self.slope = slope / 100
else:
raise NotImplementedError('Unknown Unit for slope function')
self.x0 = None
self.y0 = None
self.x1 = None
self.y1 = None
CustomExpr.__init__(self)
def __repr__(self):
return f'Slope Function (k={self.slope:0.2f}, zero=[{self.x0:0.2f}, {self.y0:0.2f}])'
def set_start_point(self, point):
"""set start point"""
x0, y0 = point
self.x0 = x0
self.y0 = y0
def set_end_point(self, point):
"""set end point"""
x1, y1 = point
self.x1 = x1
self.y1 = y1
def expr(self):
"""get sympy expression"""
return self.y0 + (x - self.x0) / self.slope
def solve(self, i):
"""get y value"""
return self.y0 + (i - self.x0) / self.slope
@classmethod
def from_points(cls, start, end):
"""
set the slope by giving the start and end point
Args:
start ():
end ():
Returns:
"""
x0, f0 = start
x1, f1 = end
if abs(f0 - f1) < 1.0e-6:
return Vertical(f0)
elif abs(x0 - x1) < 1.0e-6:
return Horizontal.from_points(start, end)
slope = (x1 - x0) / (f1 - f0)
new_slope = cls(slope)
new_slope.set_start_point(start)
new_slope.set_end_point(end)
return new_slope
def end_point(self):
"""get the end point"""
return self.x1, self.y1
def length(self, i0, i1):
"""get shape length between two values"""
return sqrt((self.solve(i0) - self.solve(i1)) ** 2 + (i0 - i1) ** 2)
def area(self, i0, i1):
"""get shape area between two values"""
return (self.solve(i0) + self.solve(i1)) / 2 * np.abs(i0 - i1)
####################################################################################################################
class Vertical(CustomExpr):
"""
function of a vertical line
"""
def __init__(self, y):
"""
Args:
y (float): y value of the vertical line
"""
self.y = y
CustomExpr.__init__(self)
def __repr__(self):
return f'Vertical Function (y={self.y:0.2f})'
def expr(self):
return self.y + x * 0
def solve(self, i):
return self.y + i * 0
def length(self, i0, i1):
return i1 - i0
def area(self, i0, i1):
return self.length(i0, i1) * self.y
####################################################################################################################
class Horizontal(CustomExpr):
"""
function of a horizontal line
"""
def __init__(self):
CustomExpr.__init__(self)
self.x = None
self.y0 = None
self.y1 = None
def set_x(self, i):
self.x = i
def set_points(self, start, end):
x0, y0 = start
x1, y1 = end
if x0 == x1:
self.x = x0
else:
if x0 is not None:
self.x = x0
elif x1 is not None:
self.x = x1
self.y0 = y0
self.y1 = y1
def __repr__(self):
return 'Horizontal Function'
def expr(self):
return self.y1
def solve(self, i):
return self.y1
def length(self, i0, i1):
return np.abs(self.y1 - self.y0)
def area(self, i0, i1):
return 0
@classmethod
def from_points(cls, start, end):
h = cls()
h.set_points(start, end)
return h
def start_point(self):
return self.x, self.y0
def end_point(self):
return self.x, self.y1
####################################################################################################################
class Circle(CustomExpr):
"""
function of a circle
.. figure:: images/kreis.gif
:align: center
:alt: circle
:figclass: align-center
Circle
"""
def __init__(self, r, x_m=0, y_m=0, clockwise=False):
"""
Args:
r (float): radius
x_m (float): x axis value of the mid point
y_m (float): y axis value of the mid point
clockwise (bool): whether the circle is clockwise or anticlockwise
"""
self.r = float(r)
self.x_m = float(x_m)
self.y_m = float(y_m)
self.clockwise = clockwise
CustomExpr.__init__(self)
def __repr__(self):
return f'Circle Function (radius={self.r:0.2f}, mid=[{self.x_m:0.2f}, {self.y_m:0.2f}])'
def expr(self):
"""
get function/expression of a circle with a given mid point
Returns:
sympy.core.expr.Expr: function of the circle
"""
return sy.sqrt(sy.Float(self.r) ** 2 - (x - sy.Float(self.x_m)) ** 2) * (-1 if self.clockwise else 1) + \
sy.Float(self.y_m)
def _alpha(self, i):
"""
angle in the circle of a point to the horizontal
Args:
i: variable
Returns:
float: angle in rad
"""
if isinstance(i, np.ndarray):
return np.arctan((i - self.x_m) / (self.solve(i) - self.y_m))
else:
if (self.solve(i) - self.y_m) == 0:
a = pi / 2
if (i - self.x_m) < 0:
a *= -1
else:
a = np.arctan((i - self.x_m) / (self.solve(i) - self.y_m))
return a
def _d_alpha(self, i0, i1):
"""
difference of the angle in the circle of two points
Args:
i0: start variable
i1: end variable
Returns:
float: difference of the angle in rad
"""
return np.abs(self._alpha(i0) - self._alpha(i1))
def solve(self, i):
return sqrt(self.r ** 2 - (i - self.x_m) ** 2) * (-1 if self.clockwise else 1) + self.y_m
def length(self, i0, i1):
return self._d_alpha(i0, i1) * self.r
def area(self, i0, i1):
alpha = self._d_alpha(i0, i1)
return self.r ** 2 / 2 * (alpha - np.sin(alpha)) + (self.solve(i0) + self.solve(i1)) / 2 * (i1 - i0)
| 25.823799 | 118 | 0.517767 |
250eb809dd09ad7a9b6aa51c271e231f078546da | 1,772 | py | Python | bunq/sdk/util/util.py | mwiekens/sdk_python | 9333636083bc63dca4353e8f497588f57617efec | [
"MIT"
] | null | null | null | bunq/sdk/util/util.py | mwiekens/sdk_python | 9333636083bc63dca4353e8f497588f57617efec | [
"MIT"
] | null | null | null | bunq/sdk/util/util.py | mwiekens/sdk_python | 9333636083bc63dca4353e8f497588f57617efec | [
"MIT"
] | null | null | null | from __future__ import annotations
import json
import socket
import requests
from bunq.sdk.context.api_context import ApiContext, ApiEnvironmentType
from bunq.sdk.exception.bunq_exception import BunqException
from bunq.sdk.http.api_client import ApiClient
from bunq.sdk.model.generated import endpoint
from bunq.sdk.model.generated.endpoint import SandboxUser
__UNIQUE_REQUEST_ID = "uniqueness-is-required"
__FIELD_API_KEY = "ApiKey"
__INDEX_FIRST = 0
__FIELD_RESPONSE = "Response"
__ENDPOINT_SANDBOX_USER = "sandbox-user"
_ERROR_COULD_NOT_CREATE_NEW_SANDBOX_USER = "Could not create new sandbox user."
def automatic_sandbox_install() -> ApiContext:
sandbox_user = __generate_new_sandbox_user()
return ApiContext.create(ApiEnvironmentType.SANDBOX,
sandbox_user.api_key,
socket.gethostname()
)
def __generate_new_sandbox_user() -> SandboxUser:
url = ApiEnvironmentType.SANDBOX.uri_base + __ENDPOINT_SANDBOX_USER
headers = {
ApiClient.HEADER_REQUEST_ID: __UNIQUE_REQUEST_ID,
ApiClient.HEADER_CACHE_CONTROL: ApiClient.CACHE_CONTROL_NONE,
ApiClient.HEADER_GEOLOCATION: ApiClient.GEOLOCATION_ZERO,
ApiClient.HEADER_LANGUAGE: ApiClient.LANGUAGE_EN_US,
ApiClient.HEADER_REGION: ApiClient.REGION_NL_NL,
}
response = requests.request(ApiClient.METHOD_POST, url, headers=headers)
if response.status_code is ApiClient.STATUS_CODE_OK:
response_json = json.loads(response.text)
return endpoint.SandboxUser.from_json(
json.dumps(response_json[__FIELD_RESPONSE][__INDEX_FIRST][
__FIELD_API_KEY]))
raise BunqException(_ERROR_COULD_NOT_CREATE_NEW_SANDBOX_USER)
| 34.076923 | 79 | 0.744921 |