max_stars_repo_path stringlengths 4 286 | max_stars_repo_name stringlengths 5 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.03M | content_cleaned stringlengths 6 1.03M | language stringclasses 111 values | language_score float64 0.03 1 | comments stringlengths 0 556k | edu_score float64 0.32 5.03 | edu_int_score int64 0 5 |
|---|---|---|---|---|---|---|---|---|---|---|
src/tipboard/app/views/dashboard.py | adeo/benchmark-tipboard | 0 | 6617151 | from django.http import JsonResponse, HttpResponse, Http404, HttpResponseRedirect
from django.contrib.staticfiles import finders
from django.shortcuts import render
from src.tipboard.app.flipboard import Flipboard
from src.tipboard.app.parser import parse_xml_layout
from src.tipboard.app.properties import TIPBOARD_CSS_STYLES, FLIPBOARD_INTERVAL, LOG, TIPBOARD_JAVASCRIPTS
from src.tipboard.app.utils import getTimeStr
from src.sensors.sensors_main import scheduleYourSensors, stopTheSensors
from apscheduler.schedulers.background import BackgroundScheduler
scheduler = BackgroundScheduler()
def renderFlipboardHtml(request): # pragma: no cover
""" Render the Html Flipboard, wich start the js tipboard mecanism """
return render(request,
'flipboard.html',
dict(page_title=Flipboard().get_flipboard_title(),
flipboard_interval=FLIPBOARD_INTERVAL,
tipboard_css=TIPBOARD_CSS_STYLES,
tipboard_js=['js/flipboard.js']))
def getDashboardsPaths(request): # pragma: no cover
""" Return the path of layout prensent in the ./tipboard/app/Config """
return JsonResponse(dict(paths=Flipboard().get_paths()), safe=False)
def hijackNamesTiles(tiles_name):
""" Replace name_tile when it's the same JS tile :), duplicate code is bad """
listOfTiles = list()
for name_tile in tiles_name:
if not listOfTiles.__contains__(name_tile):
if name_tile == "vbar_chart":
name_tile = "bar_chart"
elif name_tile == "cumulative_flow":
name_tile = "line_chart"
elif name_tile == "doughnut_chart":
name_tile = "radar_chart"
listOfTiles.append(name_tile)
return listOfTiles
def getTilesDependency(layout_name):
""" Build CSS / JS tiles dependency from the tile referenced in layout.yaml """
config = parse_xml_layout(layout_name)
tiles_template = hijackNamesTiles(config['tiles_names'])
data = dict(details=config['details'],
layout=config['layout'],
tipboard_css=TIPBOARD_CSS_STYLES,
tipboard_js=TIPBOARD_JAVASCRIPTS,
tiles_css=['tiles/' + '.'.join((name, 'css')) for name in tiles_template],
tiles_js=['tiles/' + '.'.join((name, 'js')) for name in tiles_template])
tiles_css = list() # TODO i think this need to be deleted, no css anymore for specific tile
for tile_css in data['tiles_css']:
if finders.find(tile_css):
tiles_css.append(tile_css)
data['tiles_css'] = tiles_css
return data
def renderHtmlForTiles(request, layout_name='benchmark'): # pragma: no cover
""" Render Htlm page with CSS/JS dependency for all the tiles needed in layout.yaml(dashboard) """
try:
data = getTilesDependency(layout_name)
return render(request, 'layout.html', data)
except FileNotFoundError as e:
if LOG:
print(f'{getTimeStr()}: (+)Config file:{layout_name} not found', flush=True)
msg = f"<br> <div style='color: red'> " \
f'No config file found for dashboard: {layout_name} ' \
f"Make sure that file: '{e.filename}' exists. </div>"
return HttpResponse(msg, status=404)
def demo_controller(request, flagSensors):
""" activate or not the sensors by api """
if request.method == 'GET':
print('Starting the demo: ' + flagSensors)
if flagSensors == "on":
scheduleYourSensors(scheduler)
elif flagSensors == "off":
stopTheSensors(scheduler)
return HttpResponseRedirect('/')
raise Http404
| from django.http import JsonResponse, HttpResponse, Http404, HttpResponseRedirect
from django.contrib.staticfiles import finders
from django.shortcuts import render
from src.tipboard.app.flipboard import Flipboard
from src.tipboard.app.parser import parse_xml_layout
from src.tipboard.app.properties import TIPBOARD_CSS_STYLES, FLIPBOARD_INTERVAL, LOG, TIPBOARD_JAVASCRIPTS
from src.tipboard.app.utils import getTimeStr
from src.sensors.sensors_main import scheduleYourSensors, stopTheSensors
from apscheduler.schedulers.background import BackgroundScheduler
scheduler = BackgroundScheduler()
def renderFlipboardHtml(request): # pragma: no cover
""" Render the Html Flipboard, wich start the js tipboard mecanism """
return render(request,
'flipboard.html',
dict(page_title=Flipboard().get_flipboard_title(),
flipboard_interval=FLIPBOARD_INTERVAL,
tipboard_css=TIPBOARD_CSS_STYLES,
tipboard_js=['js/flipboard.js']))
def getDashboardsPaths(request): # pragma: no cover
""" Return the path of layout prensent in the ./tipboard/app/Config """
return JsonResponse(dict(paths=Flipboard().get_paths()), safe=False)
def hijackNamesTiles(tiles_name):
""" Replace name_tile when it's the same JS tile :), duplicate code is bad """
listOfTiles = list()
for name_tile in tiles_name:
if not listOfTiles.__contains__(name_tile):
if name_tile == "vbar_chart":
name_tile = "bar_chart"
elif name_tile == "cumulative_flow":
name_tile = "line_chart"
elif name_tile == "doughnut_chart":
name_tile = "radar_chart"
listOfTiles.append(name_tile)
return listOfTiles
def getTilesDependency(layout_name):
""" Build CSS / JS tiles dependency from the tile referenced in layout.yaml """
config = parse_xml_layout(layout_name)
tiles_template = hijackNamesTiles(config['tiles_names'])
data = dict(details=config['details'],
layout=config['layout'],
tipboard_css=TIPBOARD_CSS_STYLES,
tipboard_js=TIPBOARD_JAVASCRIPTS,
tiles_css=['tiles/' + '.'.join((name, 'css')) for name in tiles_template],
tiles_js=['tiles/' + '.'.join((name, 'js')) for name in tiles_template])
tiles_css = list() # TODO i think this need to be deleted, no css anymore for specific tile
for tile_css in data['tiles_css']:
if finders.find(tile_css):
tiles_css.append(tile_css)
data['tiles_css'] = tiles_css
return data
def renderHtmlForTiles(request, layout_name='benchmark'): # pragma: no cover
""" Render Htlm page with CSS/JS dependency for all the tiles needed in layout.yaml(dashboard) """
try:
data = getTilesDependency(layout_name)
return render(request, 'layout.html', data)
except FileNotFoundError as e:
if LOG:
print(f'{getTimeStr()}: (+)Config file:{layout_name} not found', flush=True)
msg = f"<br> <div style='color: red'> " \
f'No config file found for dashboard: {layout_name} ' \
f"Make sure that file: '{e.filename}' exists. </div>"
return HttpResponse(msg, status=404)
def demo_controller(request, flagSensors):
""" activate or not the sensors by api """
if request.method == 'GET':
print('Starting the demo: ' + flagSensors)
if flagSensors == "on":
scheduleYourSensors(scheduler)
elif flagSensors == "off":
stopTheSensors(scheduler)
return HttpResponseRedirect('/')
raise Http404
| en | 0.763531 | # pragma: no cover Render the Html Flipboard, wich start the js tipboard mecanism # pragma: no cover Return the path of layout prensent in the ./tipboard/app/Config Replace name_tile when it's the same JS tile :), duplicate code is bad Build CSS / JS tiles dependency from the tile referenced in layout.yaml # TODO i think this need to be deleted, no css anymore for specific tile # pragma: no cover Render Htlm page with CSS/JS dependency for all the tiles needed in layout.yaml(dashboard) activate or not the sensors by api | 2.037247 | 2 |
fatiando/datasets/tests/test_utils.py | XuesongDing/fatiando | 179 | 6617152 | from __future__ import absolute_import
import os
from pytest import raises
from .. import check_hash
TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
def test_check_hash():
"Make sure check_hash works for the Surfer test data"
fname = os.path.join(TEST_DATA_DIR, 'simple_surfer.grd')
# Hashes gotten from openssl
sha256 = "9cbdae1c020797231ff45a18594f80c68c3147d0b976103767a0c2c333b07ff6"
check_hash(fname, sha256, hash_type='sha256')
md5 = '70e2e6f0f37fba97a3545fcab8ffab21'
check_hash(fname, md5, hash_type='md5')
def test_check_hash_fails():
"Test if check_hash fails properly for a wrong known hash"
fname = os.path.join(TEST_DATA_DIR, 'simple_surfer.grd')
# Hashes gotten from openssl and changed by a single character
sha256 = "acbdae1c020797231ff45a18594f80c68c3147d0b976103767a0c2c333b07ff6"
with raises(AssertionError):
check_hash(fname, sha256, hash_type='sha256')
md5 = 'a0e2e6f0f37fba97a3545fcab8ffab21'
with raises(AssertionError):
check_hash(fname, md5, hash_type='md5')
| from __future__ import absolute_import
import os
from pytest import raises
from .. import check_hash
TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
def test_check_hash():
"Make sure check_hash works for the Surfer test data"
fname = os.path.join(TEST_DATA_DIR, 'simple_surfer.grd')
# Hashes gotten from openssl
sha256 = "9cbdae1c020797231ff45a18594f80c68c3147d0b976103767a0c2c333b07ff6"
check_hash(fname, sha256, hash_type='sha256')
md5 = '70e2e6f0f37fba97a3545fcab8ffab21'
check_hash(fname, md5, hash_type='md5')
def test_check_hash_fails():
"Test if check_hash fails properly for a wrong known hash"
fname = os.path.join(TEST_DATA_DIR, 'simple_surfer.grd')
# Hashes gotten from openssl and changed by a single character
sha256 = "acbdae1c020797231ff45a18594f80c68c3147d0b976103767a0c2c333b07ff6"
with raises(AssertionError):
check_hash(fname, sha256, hash_type='sha256')
md5 = 'a0e2e6f0f37fba97a3545fcab8ffab21'
with raises(AssertionError):
check_hash(fname, md5, hash_type='md5')
| en | 0.958931 | # Hashes gotten from openssl # Hashes gotten from openssl and changed by a single character | 2.188277 | 2 |
Collusion/plot_xmni_ylayer_perprobe.py | kuz/Human-Intracranial-Recordings-and-DCNN-to-Compare-Biological-and-Artificial-Mechanisms-of-Vision | 9 | 6617153 | <gh_stars>1-10
import os
import numpy as np
import scipy.io as sio
from matplotlib import pylab as plt
# parameters
nlayers = 9
featureset = 'meangamma_bipolar_noscram_artif_responsive_brodmann'
suffix = '.permatrix.nothresh'
# list of subjects
subjects = sorted(os.listdir('../../Data/Intracranial/Processed/%s/' % featureset))
def collect_rhos(subjects, featureset, suffix, actual, prun):
"""
Compute mapping matrix areas-to-layer with the values showing variance explained
"""
# compute mapping for the origianl data
alldata = {}
for sfile in subjects:
# load probe-to-area mapping for a given subject
s = sio.loadmat('../../Data/Intracranial/Processed/%s/%s' % (featureset, sfile))
sname = s['s']['name'][0][0][0]
mni = s['s']['probes'][0][0][0][0][2]
areas = np.ravel(s['s']['probes'][0][0][0][0][3])
nprobes = len(areas)
# load rho scores for a given subject
if actual:
rhos = np.loadtxt('../../Data/Intracranial/Probe_to_Layer_Maps/rsa_euclidean_%s%s/%s.txt' % (featureset, suffix, sname))
else:
rhos = np.loadtxt('../../Data/Intracranial/Probe_to_Layer_Maps/Permutation/rsa_euclidean_%s%s/%d/%s.txt' % (featureset, suffix, prun, sname))
alldata[sname] = {'rhos': rhos, 'areas': areas, 'mni': mni}
return alldata
# compute mapping matrix on the actual data
true = collect_rhos(subjects, featureset, suffix, actual=True, prun=None)
# load p-values from the permutation test
pvals = {}
for sname in true.keys():
try:
pvals[sname] = np.loadtxt('../../Data/Intracranial/Probe_to_Layer_Maps/Permutation/rsa_euclidean_%s%s_pvalues/%s.txt' % (featureset, suffix, sname))
except:
print '%s.txt not found' % sname
# collect all probes into a matrix with each row having [2nd MNI, layer, rho]
allprobes = np.zeros((0, 2))
for sname in true:
for pid in range(len(true[sname]['areas'])):
if true[sname]['areas'][pid] in [17, 18, 19, 37, 20]:
if np.max(true[sname]['rhos'][pid, :]) > 0.0:
ties = np.where(true[sname]['rhos'][pid, :] == np.max(true[sname]['rhos'][pid, :]))[0]
for lid in ties:
if pvals[sname][pid, lid] <= 0.001:
record = np.array([true[sname]['mni'][pid, 1], lid]).reshape((1, 2))
allprobes = np.concatenate((allprobes, record))
# sort probes by sagittalcoordinate
allprobes = allprobes[allprobes[:,0].argsort()]
# plot
plt.figure(figsize=(10, 10), dpi=300);
for lid in range(nlayers):
x = allprobes[allprobes[:, 1] == lid, 0]
y = allprobes[allprobes[:, 1] == lid, 1]
plt.plot(x, y, 'o');
plt.legend(['Layer %d' % x for x in range(nlayers)]);
plt.savefig('../../Outcome/Figures/Single probe/xmni_ylayer_perprobe_%s%s.png' % (featureset, suffix), bbox_inches='tight');
plt.clf();
# boxplot
plt.figure(figsize=(10, 10), dpi=300);
data = []
for lid in range(nlayers):
data.append(allprobes[allprobes[:, 1] == lid, 0])
plt.boxplot(data, 0, 'rs', 0);
plt.savefig('../../Outcome/Figures/Single probe/xmni_ylayer_perprobe_%s%s_boxplot.png' % (featureset, suffix), bbox_inches='tight');
plt.clf();
| import os
import numpy as np
import scipy.io as sio
from matplotlib import pylab as plt
# parameters
nlayers = 9
featureset = 'meangamma_bipolar_noscram_artif_responsive_brodmann'
suffix = '.permatrix.nothresh'
# list of subjects
subjects = sorted(os.listdir('../../Data/Intracranial/Processed/%s/' % featureset))
def collect_rhos(subjects, featureset, suffix, actual, prun):
"""
Compute mapping matrix areas-to-layer with the values showing variance explained
"""
# compute mapping for the origianl data
alldata = {}
for sfile in subjects:
# load probe-to-area mapping for a given subject
s = sio.loadmat('../../Data/Intracranial/Processed/%s/%s' % (featureset, sfile))
sname = s['s']['name'][0][0][0]
mni = s['s']['probes'][0][0][0][0][2]
areas = np.ravel(s['s']['probes'][0][0][0][0][3])
nprobes = len(areas)
# load rho scores for a given subject
if actual:
rhos = np.loadtxt('../../Data/Intracranial/Probe_to_Layer_Maps/rsa_euclidean_%s%s/%s.txt' % (featureset, suffix, sname))
else:
rhos = np.loadtxt('../../Data/Intracranial/Probe_to_Layer_Maps/Permutation/rsa_euclidean_%s%s/%d/%s.txt' % (featureset, suffix, prun, sname))
alldata[sname] = {'rhos': rhos, 'areas': areas, 'mni': mni}
return alldata
# compute mapping matrix on the actual data
true = collect_rhos(subjects, featureset, suffix, actual=True, prun=None)
# load p-values from the permutation test
pvals = {}
for sname in true.keys():
try:
pvals[sname] = np.loadtxt('../../Data/Intracranial/Probe_to_Layer_Maps/Permutation/rsa_euclidean_%s%s_pvalues/%s.txt' % (featureset, suffix, sname))
except:
print '%s.txt not found' % sname
# collect all probes into a matrix with each row having [2nd MNI, layer, rho]
allprobes = np.zeros((0, 2))
for sname in true:
for pid in range(len(true[sname]['areas'])):
if true[sname]['areas'][pid] in [17, 18, 19, 37, 20]:
if np.max(true[sname]['rhos'][pid, :]) > 0.0:
ties = np.where(true[sname]['rhos'][pid, :] == np.max(true[sname]['rhos'][pid, :]))[0]
for lid in ties:
if pvals[sname][pid, lid] <= 0.001:
record = np.array([true[sname]['mni'][pid, 1], lid]).reshape((1, 2))
allprobes = np.concatenate((allprobes, record))
# sort probes by sagittalcoordinate
allprobes = allprobes[allprobes[:,0].argsort()]
# plot
plt.figure(figsize=(10, 10), dpi=300);
for lid in range(nlayers):
x = allprobes[allprobes[:, 1] == lid, 0]
y = allprobes[allprobes[:, 1] == lid, 1]
plt.plot(x, y, 'o');
plt.legend(['Layer %d' % x for x in range(nlayers)]);
plt.savefig('../../Outcome/Figures/Single probe/xmni_ylayer_perprobe_%s%s.png' % (featureset, suffix), bbox_inches='tight');
plt.clf();
# boxplot
plt.figure(figsize=(10, 10), dpi=300);
data = []
for lid in range(nlayers):
data.append(allprobes[allprobes[:, 1] == lid, 0])
plt.boxplot(data, 0, 'rs', 0);
plt.savefig('../../Outcome/Figures/Single probe/xmni_ylayer_perprobe_%s%s_boxplot.png' % (featureset, suffix), bbox_inches='tight');
plt.clf(); | en | 0.724222 | # parameters # list of subjects Compute mapping matrix areas-to-layer with the values showing variance explained # compute mapping for the origianl data # load probe-to-area mapping for a given subject # load rho scores for a given subject # compute mapping matrix on the actual data # load p-values from the permutation test # collect all probes into a matrix with each row having [2nd MNI, layer, rho] # sort probes by sagittalcoordinate # plot # boxplot | 2.233593 | 2 |
tests/imforge/test_crop.py | antoinehumbert/pimento | 0 | 6617154 | <reponame>antoinehumbert/pimento<gh_stars>0
from io import BytesIO
# noinspection PyPackageRequirements
import cv2
from PIL import Image, ImageDraw
from imforge.crop import crop_pil, crop_cv2, crop
class TestCropPil:
DEBUG = False
def test_full_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_1.png"
crop_box = [(0, 0), (389, 0), (389, 177), (0, 177)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_2.png"
crop_box = [(389, 0), (389, 177), (0, 177), (0, 0)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_3.png"
crop_box = [(389, 177), (0, 177), (0, 0), (389, 0)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_4.png"
crop_box = [(0, 177), (0, 0), (389, 0), (389, 177)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_flip_1.png"
crop_box = [(0, 0), (0, 177), (389, 177), (389, 0)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_flip_2.png"
crop_box = [(389, 0), (0, 0), (0, 177), (389, 177)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_flip_3.png"
crop_box = [(389, 177), (389, 0), (0, 0), (0, 177)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_flip_4.png"
crop_box = [(0, 177), (389, 177), (389, 0), (0, 0)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_out(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_out.png"
crop_box = [(-10, -10), (400, -10), (400, 200), (-10, 200)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_clip(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_clip.png"
crop_box = [(-10, -10), (400, -10), (400, 200), (-10, 200)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box, clip=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_1.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_2.png"
crop_box = [(368, 78), (325, 161), (14, 71), (15, 8)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_3.png"
crop_box = [(325, 161), (14, 71), (15, 8), (368, 78)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_4.png"
crop_box = [(14, 71), (15, 8), (368, 78), (325, 161)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_flip_1.png"
crop_box = [(15, 8), (14, 71), (325, 161), (368, 78)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_flip_2.png"
crop_box = [(368, 78), (15, 8), (14, 71), (325, 161)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_flip_3.png"
crop_box = [(325, 161), (368, 78), (15, 8), (14, 71)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_flip_4.png"
crop_box = [(14, 71), (325, 161), (368, 78), (15, 8)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_fill(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_fill.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box, fillcolor=(255, 0, 255))
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_cut_out(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_cut_out.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box, cut_out=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_cut_out_fill(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_cut_out_fill.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box, fillcolor=(255, 0, 255), cut_out=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_out_of_image(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_out_of_image.png"
crop_box = [(-15, 0), (368, 78), (325, 161), (14, 71)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_out_of_image_clip(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_out_of_image_clip.png"
crop_box = [(-15, 0), (368, 78), (325, 161), (14, 71)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box, clip=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_out_of_image_clip(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_flip_out_of_image_clip.png"
crop_box = [(-15, 0), (325, 161), (14, 71), (368, 78)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box, clip=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_complex(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_complex.png"
crop_box = [(15, 8), (368, 78), (450, 161), (200, 200), (-15, 50)]
with Image.open(resources / "some_text.jpg") as image:
ImageDraw.ImageDraw(image).polygon(crop_box, outline=(0, 0, 255))
cropped_image = crop_pil(image, crop_box, fillcolor=(255, 0, 255), cut_out=True, clip=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_complex(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_flip_complex.png"
crop_box = [(15, 8), (-15, 50), (200, 200), (450, 161), (368, 78)]
with Image.open(resources / "some_text.jpg") as image:
ImageDraw.ImageDraw(image).polygon(crop_box, outline=(0, 0, 255))
cropped_image = crop_pil(image, crop_box, fillcolor=(255, 0, 255), cut_out=True, clip=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_near_edge(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_near_edge.png"
crop_box = [(102, 750), (101, 529), (324, 528), (322, 750)]
with Image.open(resources / "qrcode_multi.png") as image:
cropped_image = crop_pil(image, crop_box, fillcolor=(255, 0, 0))
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_generic_crop_complex(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_complex.png"
crop_box = [(15, 8), (368, 78), (450, 161), (200, 200), (-15, 50)]
with Image.open(resources / "some_text.jpg") as image:
ImageDraw.ImageDraw(image).polygon(crop_box, outline=(0, 0, 255))
cropped_image = crop(image, crop_box, fillcolor=(255, 0, 255), cut_out=True, clip=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
class TestCropCv2:
DEBUG = False
def test_full_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_1.png"
crop_box = [(0, 0), (389, 0), (389, 177), (0, 177)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_2.png"
crop_box = [(389, 0), (389, 177), (0, 177), (0, 0)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_3.png"
crop_box = [(389, 177), (0, 177), (0, 0), (389, 0)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_4.png"
crop_box = [(0, 177), (0, 0), (389, 0), (389, 177)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_flip_1.png"
crop_box = [(0, 0), (0, 177), (389, 177), (389, 0)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_flip_2.png"
crop_box = [(389, 0), (0, 0), (0, 177), (389, 177)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_flip_3.png"
crop_box = [(389, 177), (389, 0), (0, 0), (0, 177)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_flip_4.png"
crop_box = [(0, 177), (389, 177), (389, 0), (0, 0)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_out(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_out.png"
crop_box = [(-10, -10), (400, -10), (400, 200), (-10, 200)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_clip(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_clip.png"
crop_box = [(-10, -10), (400, -10), (400, 200), (-10, 200)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, clip=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_1.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_2.png"
crop_box = [(368, 78), (325, 161), (14, 71), (15, 8)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_3.png"
crop_box = [(325, 161), (14, 71), (15, 8), (368, 78)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_4.png"
crop_box = [(14, 71), (15, 8), (368, 78), (325, 161)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_flip_1.png"
crop_box = [(15, 8), (14, 71), (325, 161), (368, 78)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_flip_2.png"
crop_box = [(368, 78), (15, 8), (14, 71), (325, 161)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_flip_3.png"
crop_box = [(325, 161), (368, 78), (15, 8), (14, 71)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_flip_4.png"
crop_box = [(14, 71), (325, 161), (368, 78), (15, 8)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_fill(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_fill.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, fillcolor=(255, 0, 255))
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_cut_out(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_cut_out.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, cut_out=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_cut_out_fill(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_cut_out_fill.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, fillcolor=(255, 0, 255), cut_out=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_out_of_image(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_out_of_image.png"
crop_box = [(-15, 0), (368, 78), (325, 161), (14, 71)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_out_of_image_clip(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_out_of_image_clip.png"
crop_box = [(-15, 0), (368, 78), (325, 161), (14, 71)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, clip=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_out_of_image_clip(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_flip_out_of_image_clip.png"
crop_box = [(-15, 0), (325, 161), (14, 71), (368, 78)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, clip=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_complex(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_complex.png"
crop_box = [(15, 8), (368, 78), (450, 161), (200, 200), (-15, 50)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, fillcolor=(255, 0, 255), cut_out=True, clip=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_complex(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_flip_complex.png"
crop_box = [(15, 8), (-15, 50), (200, 200), (450, 161), (368, 78)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, fillcolor=(255, 0, 255), cut_out=True, clip=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_near_edge(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_near_edge.png"
crop_box = [(102, 750), (101, 529), (324, 528), (322, 750)]
image = cv2.imread(str(resources / "qrcode_multi.png"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, fillcolor=(255, 0, 255))
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_generic_crop_complex(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_complex.png"
crop_box = [(15, 8), (368, 78), (450, 161), (200, 200), (-15, 50)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop(image, crop_box, fillcolor=(255, 0, 255), cut_out=True, clip=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
| from io import BytesIO
# noinspection PyPackageRequirements
import cv2
from PIL import Image, ImageDraw
from imforge.crop import crop_pil, crop_cv2, crop
class TestCropPil:
DEBUG = False
def test_full_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_1.png"
crop_box = [(0, 0), (389, 0), (389, 177), (0, 177)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_2.png"
crop_box = [(389, 0), (389, 177), (0, 177), (0, 0)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_3.png"
crop_box = [(389, 177), (0, 177), (0, 0), (389, 0)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_4.png"
crop_box = [(0, 177), (0, 0), (389, 0), (389, 177)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_flip_1.png"
crop_box = [(0, 0), (0, 177), (389, 177), (389, 0)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_flip_2.png"
crop_box = [(389, 0), (0, 0), (0, 177), (389, 177)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_flip_3.png"
crop_box = [(389, 177), (389, 0), (0, 0), (0, 177)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_flip_4.png"
crop_box = [(0, 177), (389, 177), (389, 0), (0, 0)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_out(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_out.png"
crop_box = [(-10, -10), (400, -10), (400, 200), (-10, 200)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_clip(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "full_clip.png"
crop_box = [(-10, -10), (400, -10), (400, 200), (-10, 200)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box, clip=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_1.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_2.png"
crop_box = [(368, 78), (325, 161), (14, 71), (15, 8)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_3.png"
crop_box = [(325, 161), (14, 71), (15, 8), (368, 78)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_4.png"
crop_box = [(14, 71), (15, 8), (368, 78), (325, 161)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_flip_1.png"
crop_box = [(15, 8), (14, 71), (325, 161), (368, 78)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_flip_2.png"
crop_box = [(368, 78), (15, 8), (14, 71), (325, 161)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_flip_3.png"
crop_box = [(325, 161), (368, 78), (15, 8), (14, 71)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_flip_4.png"
crop_box = [(14, 71), (325, 161), (368, 78), (15, 8)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_fill(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_fill.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box, fillcolor=(255, 0, 255))
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_cut_out(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_cut_out.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box, cut_out=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_cut_out_fill(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_cut_out_fill.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box, fillcolor=(255, 0, 255), cut_out=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_out_of_image(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_out_of_image.png"
crop_box = [(-15, 0), (368, 78), (325, 161), (14, 71)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_out_of_image_clip(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_out_of_image_clip.png"
crop_box = [(-15, 0), (368, 78), (325, 161), (14, 71)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box, clip=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_out_of_image_clip(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_flip_out_of_image_clip.png"
crop_box = [(-15, 0), (325, 161), (14, 71), (368, 78)]
with Image.open(resources / "some_text.jpg") as image:
cropped_image = crop_pil(image, crop_box, clip=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_complex(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_complex.png"
crop_box = [(15, 8), (368, 78), (450, 161), (200, 200), (-15, 50)]
with Image.open(resources / "some_text.jpg") as image:
ImageDraw.ImageDraw(image).polygon(crop_box, outline=(0, 0, 255))
cropped_image = crop_pil(image, crop_box, fillcolor=(255, 0, 255), cut_out=True, clip=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_complex(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_flip_complex.png"
crop_box = [(15, 8), (-15, 50), (200, 200), (450, 161), (368, 78)]
with Image.open(resources / "some_text.jpg") as image:
ImageDraw.ImageDraw(image).polygon(crop_box, outline=(0, 0, 255))
cropped_image = crop_pil(image, crop_box, fillcolor=(255, 0, 255), cut_out=True, clip=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_near_edge(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_near_edge.png"
crop_box = [(102, 750), (101, 529), (324, 528), (322, 750)]
with Image.open(resources / "qrcode_multi.png") as image:
cropped_image = crop_pil(image, crop_box, fillcolor=(255, 0, 0))
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_generic_crop_complex(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "pil" / "crop_complex.png"
crop_box = [(15, 8), (368, 78), (450, 161), (200, 200), (-15, 50)]
with Image.open(resources / "some_text.jpg") as image:
ImageDraw.ImageDraw(image).polygon(crop_box, outline=(0, 0, 255))
cropped_image = crop(image, crop_box, fillcolor=(255, 0, 255), cut_out=True, clip=True)
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
class TestCropCv2:
DEBUG = False
def test_full_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_1.png"
crop_box = [(0, 0), (389, 0), (389, 177), (0, 177)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_2.png"
crop_box = [(389, 0), (389, 177), (0, 177), (0, 0)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_3.png"
crop_box = [(389, 177), (0, 177), (0, 0), (389, 0)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_4.png"
crop_box = [(0, 177), (0, 0), (389, 0), (389, 177)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_flip_1.png"
crop_box = [(0, 0), (0, 177), (389, 177), (389, 0)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_flip_2.png"
crop_box = [(389, 0), (0, 0), (0, 177), (389, 177)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_flip_3.png"
crop_box = [(389, 177), (389, 0), (0, 0), (0, 177)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_flip_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_flip_4.png"
crop_box = [(0, 177), (389, 177), (389, 0), (0, 0)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_out(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_out.png"
crop_box = [(-10, -10), (400, -10), (400, 200), (-10, 200)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_full_clip(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "full_clip.png"
crop_box = [(-10, -10), (400, -10), (400, 200), (-10, 200)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, clip=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_1.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_2.png"
crop_box = [(368, 78), (325, 161), (14, 71), (15, 8)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_3.png"
crop_box = [(325, 161), (14, 71), (15, 8), (368, 78)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_4.png"
crop_box = [(14, 71), (15, 8), (368, 78), (325, 161)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_1(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_flip_1.png"
crop_box = [(15, 8), (14, 71), (325, 161), (368, 78)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_2(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_flip_2.png"
crop_box = [(368, 78), (15, 8), (14, 71), (325, 161)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_3(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_flip_3.png"
crop_box = [(325, 161), (368, 78), (15, 8), (14, 71)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_4(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_flip_4.png"
crop_box = [(14, 71), (325, 161), (368, 78), (15, 8)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_fill(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_fill.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, fillcolor=(255, 0, 255))
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_cut_out(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_cut_out.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, cut_out=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_cut_out_fill(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_cut_out_fill.png"
crop_box = [(15, 8), (368, 78), (325, 161), (14, 71)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, fillcolor=(255, 0, 255), cut_out=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_out_of_image(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_out_of_image.png"
crop_box = [(-15, 0), (368, 78), (325, 161), (14, 71)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_out_of_image_clip(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_out_of_image_clip.png"
crop_box = [(-15, 0), (368, 78), (325, 161), (14, 71)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, clip=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_out_of_image_clip(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_flip_out_of_image_clip.png"
crop_box = [(-15, 0), (325, 161), (14, 71), (368, 78)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, clip=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_complex(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_complex.png"
crop_box = [(15, 8), (368, 78), (450, 161), (200, 200), (-15, 50)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, fillcolor=(255, 0, 255), cut_out=True, clip=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_flip_complex(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_flip_complex.png"
crop_box = [(15, 8), (-15, 50), (200, 200), (450, 161), (368, 78)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, fillcolor=(255, 0, 255), cut_out=True, clip=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_crop_near_edge(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_near_edge.png"
crop_box = [(102, 750), (101, 529), (324, 528), (322, 750)]
image = cv2.imread(str(resources / "qrcode_multi.png"), cv2.IMREAD_UNCHANGED)
cropped_image = crop_cv2(image, crop_box, fillcolor=(255, 0, 255))
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes()
def test_generic_crop_complex(self, resources):
expected_result = resources / "imforge" / "crop" / "expected" / "cv2" / "crop_complex.png"
crop_box = [(15, 8), (368, 78), (450, 161), (200, 200), (-15, 50)]
image = cv2.imread(str(resources / "some_text.jpg"), cv2.IMREAD_UNCHANGED)
cropped_image = crop(image, crop_box, fillcolor=(255, 0, 255), cut_out=True, clip=True)
# Use PIL image for saving and comparison of result
cropped_image = Image.fromarray(cv2.cvtColor(cropped_image, cv2.COLOR_BGR2RGB), "RGB")
if self.DEBUG:
cropped_image.show()
if not expected_result.exists():
cropped_image.save(expected_result, optimize=True)
image_bytes = BytesIO()
cropped_image.save(image_bytes, format="png", optimize=True)
image_bytes.seek(0)
assert image_bytes.read() == expected_result.read_bytes() | en | 0.840705 | # noinspection PyPackageRequirements # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result # Use PIL image for saving and comparison of result | 2.698255 | 3 |
prev_ob_models/Migliore2015operators/odordisp.py | fameshpatel/olfactorybulb | 5 | 6617155 | <reponame>fameshpatel/olfactorybulb
from colors import palette as nrncolors
try:
from enthought.traits.ui.api import Handler
from enthought.traits.ui.api import UIInfo
except:
from traitsui.api import Handler
from traitsui.api import UIInfo
class OdorHandler(Handler):
def __init__(self, fig, bulb, glomval):
from copy import copy
self.__glomval = copy(glomval)
self.__fig = fig; self.__bulb = bulb
# create method for every odor
code = "def _show%g(self, info): self.show(%g)"
for i in range(len(self.__bulb.real_h)):
x = code % (i, i)
compcode = {}
exec x.strip() in compcode
name = '_show%g' % i
setattr(self.__class__, name, compcode[name])
# clean the glomerulus
def clean(self):
for i in range(len(self.__bulb.real_h)):
self.__bulb.real_h[i].property.color = (1., 0., 0.)
self.__fig.scene.render()
def _clean(self, info): self.clean()
def _setview(self, info): self.setview()
# set best view for the camera
def setview(self):
# set the camera
self.__fig.scene.camera.position = [ -294.68175837, 2301.60733878, 5760.60463821 ]
self.__fig.scene.camera.focal_point = [ 972.90044282, 1081.82497137, -90.96137608 ]
self.__fig.scene.camera.view_angle = 30.
self.__fig.scene.camera.view_up = [ 0.04971769, 0.97984323, -0.19348226 ]
self.__fig.scene.render()
# color gloms
def show(self, i):
for j, x in enumerate(self.__glomval[i]):
self.__bulb.real_h[j].property.color = nrncolors[x]
self.__fig.scene.render()
def OdorsInput(fname):
odorlbl = []; glomval = []
f = open(fname, 'r')
minval = 100.; maxval = -1.
line = f.readline()
while line:
token = line.split()
# labels
lbl = token[0].replace('_', ' ')
odorlbl.append(lbl)
# glom values
vals = []
for i in range(1, len(token)):
vals.append(float(token[i]))
_min = min(vals)
_max = max(vals)
if _min < minval: minval = _min
if _max > maxval: maxval = _max
glomval.append(vals)
line = f.readline()
f.close()
# normalization for a best visualization of colors
for i in range(len(glomval)):
for j in range(len(glomval[i])):
glomval[i][j] -= minval
glomval[i][j] /= (maxval - minval)
glomval[i][j] = int(glomval[i][j] * (len(nrncolors) - 1))
return odorlbl, glomval
def initOdorsDisp(fname, fig, bulb):
odorlbl, glomval = OdorsInput(fname)
try:
from enthought.traits.ui.menu import Action, MenuBar, Menu, Separator # create odor list
except:
from traitsui.menu import Action, MenuBar, Menu, Separator # create odor list
menu = Menu(name='Odors')
for i, name in enumerate(odorlbl): menu.append(Action(name=name, action='_show%g' % i))
#menu.append(Separator())
menu1 = Menu(name='View')
menu1.append(Action(name='Set View as Vinci\'s', action='_setview'))
menu1.append(Action(name='Clean gloms', action='_clean'))
return MenuBar(menu, menu1), OdorHandler(fig, bulb, glomval)
| from colors import palette as nrncolors
try:
from enthought.traits.ui.api import Handler
from enthought.traits.ui.api import UIInfo
except:
from traitsui.api import Handler
from traitsui.api import UIInfo
class OdorHandler(Handler):
def __init__(self, fig, bulb, glomval):
from copy import copy
self.__glomval = copy(glomval)
self.__fig = fig; self.__bulb = bulb
# create method for every odor
code = "def _show%g(self, info): self.show(%g)"
for i in range(len(self.__bulb.real_h)):
x = code % (i, i)
compcode = {}
exec x.strip() in compcode
name = '_show%g' % i
setattr(self.__class__, name, compcode[name])
# clean the glomerulus
def clean(self):
for i in range(len(self.__bulb.real_h)):
self.__bulb.real_h[i].property.color = (1., 0., 0.)
self.__fig.scene.render()
def _clean(self, info): self.clean()
def _setview(self, info): self.setview()
# set best view for the camera
def setview(self):
# set the camera
self.__fig.scene.camera.position = [ -294.68175837, 2301.60733878, 5760.60463821 ]
self.__fig.scene.camera.focal_point = [ 972.90044282, 1081.82497137, -90.96137608 ]
self.__fig.scene.camera.view_angle = 30.
self.__fig.scene.camera.view_up = [ 0.04971769, 0.97984323, -0.19348226 ]
self.__fig.scene.render()
# color gloms
def show(self, i):
for j, x in enumerate(self.__glomval[i]):
self.__bulb.real_h[j].property.color = nrncolors[x]
self.__fig.scene.render()
def OdorsInput(fname):
odorlbl = []; glomval = []
f = open(fname, 'r')
minval = 100.; maxval = -1.
line = f.readline()
while line:
token = line.split()
# labels
lbl = token[0].replace('_', ' ')
odorlbl.append(lbl)
# glom values
vals = []
for i in range(1, len(token)):
vals.append(float(token[i]))
_min = min(vals)
_max = max(vals)
if _min < minval: minval = _min
if _max > maxval: maxval = _max
glomval.append(vals)
line = f.readline()
f.close()
# normalization for a best visualization of colors
for i in range(len(glomval)):
for j in range(len(glomval[i])):
glomval[i][j] -= minval
glomval[i][j] /= (maxval - minval)
glomval[i][j] = int(glomval[i][j] * (len(nrncolors) - 1))
return odorlbl, glomval
def initOdorsDisp(fname, fig, bulb):
odorlbl, glomval = OdorsInput(fname)
try:
from enthought.traits.ui.menu import Action, MenuBar, Menu, Separator # create odor list
except:
from traitsui.menu import Action, MenuBar, Menu, Separator # create odor list
menu = Menu(name='Odors')
for i, name in enumerate(odorlbl): menu.append(Action(name=name, action='_show%g' % i))
#menu.append(Separator())
menu1 = Menu(name='View')
menu1.append(Action(name='Set View as Vinci\'s', action='_setview'))
menu1.append(Action(name='Clean gloms', action='_clean'))
return MenuBar(menu, menu1), OdorHandler(fig, bulb, glomval) | en | 0.423513 | # create method for every odor # clean the glomerulus # set best view for the camera # set the camera # color gloms # labels # glom values # normalization for a best visualization of colors # create odor list # create odor list #menu.append(Separator()) | 2.084646 | 2 |
jinjadp/highlight.py | afg984/happpycoding | 6 | 6617156 | from pygments import highlight as _highlight
from pygments.lexers import CppLexer
from pygments.formatters import HtmlFormatter
from .utils import safe_rendering
@safe_rendering
def highlight(code):
return _highlight(code, CppLexer(), HtmlFormatter())
| from pygments import highlight as _highlight
from pygments.lexers import CppLexer
from pygments.formatters import HtmlFormatter
from .utils import safe_rendering
@safe_rendering
def highlight(code):
return _highlight(code, CppLexer(), HtmlFormatter())
| none | 1 | 2.070953 | 2 | |
thefuck/rules/brew_unknown_command.py | Archstacker/thefuck | 1 | 6617157 | import os
import re
import subprocess
from thefuck.utils import get_closest, replace_command
BREW_CMD_PATH = '/Library/Homebrew/cmd'
TAP_PATH = '/Library/Taps'
TAP_CMD_PATH = '/%s/%s/cmd'
def _get_brew_path_prefix():
"""To get brew path"""
try:
return subprocess.check_output(['brew', '--prefix'],
universal_newlines=True).strip()
except:
return None
def _get_brew_commands(brew_path_prefix):
"""To get brew default commands on local environment"""
brew_cmd_path = brew_path_prefix + BREW_CMD_PATH
commands = [name.replace('.rb', '') for name in os.listdir(brew_cmd_path)
if name.endswith('.rb')]
return commands
def _get_brew_tap_specific_commands(brew_path_prefix):
"""To get tap's specific commands
https://github.com/Homebrew/homebrew/blob/master/Library/brew.rb#L115"""
commands = []
brew_taps_path = brew_path_prefix + TAP_PATH
for user in _get_directory_names_only(brew_taps_path):
taps = _get_directory_names_only(brew_taps_path + '/%s' % user)
# Brew Taps's naming rule
# https://github.com/Homebrew/homebrew/blob/master/share/doc/homebrew/brew-tap.md#naming-conventions-and-limitations
taps = (tap for tap in taps if tap.startswith('homebrew-'))
for tap in taps:
tap_cmd_path = brew_taps_path + TAP_CMD_PATH % (user, tap)
if os.path.isdir(tap_cmd_path):
commands += (name.replace('brew-', '').replace('.rb', '')
for name in os.listdir(tap_cmd_path)
if _is_brew_tap_cmd_naming(name))
return commands
def _is_brew_tap_cmd_naming(name):
if name.startswith('brew-') and name.endswith('.rb'):
return True
return False
def _get_directory_names_only(path):
return [d for d in os.listdir(path)
if os.path.isdir(os.path.join(path, d))]
brew_path_prefix = _get_brew_path_prefix()
# Failback commands for testing (Based on Homebrew 0.9.5)
brew_commands = ['info', 'home', 'options', 'install', 'uninstall',
'search', 'list', 'update', 'upgrade', 'pin', 'unpin',
'doctor', 'create', 'edit']
if brew_path_prefix:
try:
brew_commands = _get_brew_commands(brew_path_prefix) \
+ _get_brew_tap_specific_commands(brew_path_prefix)
except OSError:
pass
def match(command, settings):
is_proper_command = ('brew' in command.script and
'Unknown command' in command.stderr)
has_possible_commands = False
if is_proper_command:
broken_cmd = re.findall(r'Error: Unknown command: ([a-z]+)',
command.stderr)[0]
has_possible_commands = bool(get_closest(broken_cmd, brew_commands))
return has_possible_commands
def get_new_command(command, settings):
broken_cmd = re.findall(r'Error: Unknown command: ([a-z]+)',
command.stderr)[0]
return replace_command(command, broken_cmd, brew_commands)
| import os
import re
import subprocess
from thefuck.utils import get_closest, replace_command
BREW_CMD_PATH = '/Library/Homebrew/cmd'
TAP_PATH = '/Library/Taps'
TAP_CMD_PATH = '/%s/%s/cmd'
def _get_brew_path_prefix():
"""To get brew path"""
try:
return subprocess.check_output(['brew', '--prefix'],
universal_newlines=True).strip()
except:
return None
def _get_brew_commands(brew_path_prefix):
"""To get brew default commands on local environment"""
brew_cmd_path = brew_path_prefix + BREW_CMD_PATH
commands = [name.replace('.rb', '') for name in os.listdir(brew_cmd_path)
if name.endswith('.rb')]
return commands
def _get_brew_tap_specific_commands(brew_path_prefix):
"""To get tap's specific commands
https://github.com/Homebrew/homebrew/blob/master/Library/brew.rb#L115"""
commands = []
brew_taps_path = brew_path_prefix + TAP_PATH
for user in _get_directory_names_only(brew_taps_path):
taps = _get_directory_names_only(brew_taps_path + '/%s' % user)
# Brew Taps's naming rule
# https://github.com/Homebrew/homebrew/blob/master/share/doc/homebrew/brew-tap.md#naming-conventions-and-limitations
taps = (tap for tap in taps if tap.startswith('homebrew-'))
for tap in taps:
tap_cmd_path = brew_taps_path + TAP_CMD_PATH % (user, tap)
if os.path.isdir(tap_cmd_path):
commands += (name.replace('brew-', '').replace('.rb', '')
for name in os.listdir(tap_cmd_path)
if _is_brew_tap_cmd_naming(name))
return commands
def _is_brew_tap_cmd_naming(name):
if name.startswith('brew-') and name.endswith('.rb'):
return True
return False
def _get_directory_names_only(path):
return [d for d in os.listdir(path)
if os.path.isdir(os.path.join(path, d))]
brew_path_prefix = _get_brew_path_prefix()
# Failback commands for testing (Based on Homebrew 0.9.5)
brew_commands = ['info', 'home', 'options', 'install', 'uninstall',
'search', 'list', 'update', 'upgrade', 'pin', 'unpin',
'doctor', 'create', 'edit']
if brew_path_prefix:
try:
brew_commands = _get_brew_commands(brew_path_prefix) \
+ _get_brew_tap_specific_commands(brew_path_prefix)
except OSError:
pass
def match(command, settings):
is_proper_command = ('brew' in command.script and
'Unknown command' in command.stderr)
has_possible_commands = False
if is_proper_command:
broken_cmd = re.findall(r'Error: Unknown command: ([a-z]+)',
command.stderr)[0]
has_possible_commands = bool(get_closest(broken_cmd, brew_commands))
return has_possible_commands
def get_new_command(command, settings):
broken_cmd = re.findall(r'Error: Unknown command: ([a-z]+)',
command.stderr)[0]
return replace_command(command, broken_cmd, brew_commands)
| en | 0.782956 | To get brew path To get brew default commands on local environment To get tap's specific commands https://github.com/Homebrew/homebrew/blob/master/Library/brew.rb#L115 # Brew Taps's naming rule # https://github.com/Homebrew/homebrew/blob/master/share/doc/homebrew/brew-tap.md#naming-conventions-and-limitations # Failback commands for testing (Based on Homebrew 0.9.5) | 2.493396 | 2 |
mkqa_eval_util.py | LaudateCorpus1/ml-mkqa | 92 | 6617158 | <filename>mkqa_eval_util.py
import collections
import os
import re
import string
from collections import Counter, OrderedDict
from multiprocessing import Pool
from typing import Dict, List, Optional
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
matplotlib.use("Agg")
MIXED_SEGMENTATION_LANGS = ["zh_cn", "zh_hk", "zh_tw", "ja", "th", "km"]
ARTICLE_REGEX_BY_LANG = {
"en": r"\b(a|an|the)\b",
"es": r"\b(un|una|unos|unas|el|la|los|las)\b",
"vi": r"\b(của|là|cái|chiếc|những)\b",
"de": r"\b(ein|eine|einen|einem|eines|einer|der|die|das|den|dem|des)\b",
"ar": "\sال^|ال",
"nl": r"\b(de|het|een|des|der|den)\b",
"sv": r"\b(en|ett)\b",
"da": r"\b(en|et)\b",
"no": r"\b(en|et|ei)\b",
"fr": r"\b(le|la|l'|les|du|de|d'|des|un|une|des)",
"pt": r"\b(o|a|os|as|um|uma|uns|umas)\b",
"it": r"\b(il|lo|la|l'|i|gli|le|del|dello|della|dell'|dei|degli|degl'|delle|un'|uno|una|un)",
"fi": r"\b(se|yks|yksi)\b",
"hu": r"\b(a|az|egy)\b",
}
def map_em_value(prediction, gold_answers, lang):
em_value = compute_max_score_over_answers(calculate_em, prediction, gold_answers, lang)
return float(em_value)
def map_f1_value(prediction, gold_answers, lang):
f1_value = compute_max_score_over_answers(calculate_f1, prediction, gold_answers, lang)
return float(f1_value)
def get_text_metrics(
predictions: List[str], gold_answers: List[List[str]], lang: str, serial=True, workers=None
) -> Dict[str, List[float]]:
"""Compute metrics from the predicted and answer texts."""
if serial:
f1_scores = [
map_f1_value(predictions[i], gold_answers[i], lang) for i in range(len(predictions))
]
em_scores = [
map_em_value(predictions[i], gold_answers[i], lang) for i in range(len(predictions))
]
else:
with Pool(workers) as p:
f1_scores = p.starmap(
map_f1_value,
[(predictions[i], gold_answers[i], lang) for i in range(len(predictions))],
chunksize=64,
)
em_scores = p.starmap(
map_em_value,
[(predictions[i], gold_answers[i], lang) for i in range(len(predictions))],
chunksize=64,
)
return {"f1": f1_scores, "exact_match": em_scores}
def summarize_default_metrics(
em_scores, f1_scores, qid_is_answerable, metrics: Optional[Dict[str, float]] = None,
):
"""Summarize EM and F1 based on default threshold"""
assert set(em_scores.keys()) == set(f1_scores.keys()) == set(qid_is_answerable.keys())
ans_em_scores = {qid: em_scores[qid] for qid in em_scores if qid_is_answerable[qid]}
ans_f1_scores = {qid: f1_scores[qid] for qid in f1_scores if qid_is_answerable[qid]}
unans_em_scores = {qid: em_scores[qid] for qid in em_scores if not qid_is_answerable[qid]}
summary = OrderedDict(
[
("exact_match", round(100.0 * np.mean(list(em_scores.values())), 2)),
("f1", round(100.0 * np.mean(list(f1_scores.values())), 2)),
("answerable_exact_match", round(100.0 * np.mean(list(ans_em_scores.values())), 2)),
("answerable_f1", round(100.0 * np.mean(list(ans_f1_scores.values())), 2)),
("unanswerable_exact_match", round(100.0 * np.mean(list(unans_em_scores.values())), 2)),
]
)
if metrics:
metrics.update(summary)
return summary
def aggregate_summaries(dicts):
summaries = collections.defaultdict(list)
for d in dicts:
for k, v in d.items():
assert isinstance(v, float) or isinstance(v, int)
summaries[k].append(v)
results = {}
for k, v in summaries.items():
results[k] = round(float(np.mean(v)), 2)
return results
def whitespace_tokenize(text):
return text.split()
def mixed_segmentation(text):
segs_out = []
temp_str = ""
for char in text:
if temp_str != "":
ss = whitespace_tokenize(temp_str)
segs_out.extend(ss)
temp_str = ""
segs_out.append(char)
if temp_str != "":
ss = whitespace_tokenize(temp_str)
segs_out.extend(ss)
return segs_out
def normalize_answer_by_language(s, lang):
"""Lower text, remove punctuation, articles and extra whitespace.
This function is customized by language.
"""
def remove_articles(text, lang):
article_regex = ARTICLE_REGEX_BY_LANG.get(lang)
if article_regex:
return re.sub(article_regex, " ", text)
else:
return text
def white_space_fix(text, lang):
if lang in MIXED_SEGMENTATION_LANGS:
tokens = mixed_segmentation(text)
else:
tokens = whitespace_tokenize(text)
return " ".join([t for t in tokens if t.strip() != ""])
def remove_punc(text):
exclude = set(string.punctuation)
return "".join(ch for ch in text if ch not in exclude)
def lower(text):
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(s)), lang), lang)
def plot_f1(answerable_f1_by_id, unanswerable_em_by_id, na_probs_by_id, qid_to_has_ans, image_dir):
num_no_ans = sum(1 for k in qid_to_has_ans if not qid_to_has_ans[k])
qid_list = sorted(na_probs_by_id, key=lambda k: na_probs_by_id[k])
question_counts = len(qid_list)
answerable_f1 = []
overall_f1 = []
unanswerable_em = []
thresholds = []
sum_answerable_f1 = 0
sum_unanswerable_em = num_no_ans
for i, qid in enumerate(qid_list):
thresholds.append(na_probs_by_id[qid])
if qid in answerable_f1_by_id:
sum_answerable_f1 += answerable_f1_by_id[qid]
elif qid in unanswerable_em_by_id:
sum_unanswerable_em += unanswerable_em_by_id[qid] - 1
else:
raise ValueError(f"{qid} is not in either answerable or unanswerable predictions")
answerable_f1.append(sum_answerable_f1 / question_counts)
unanswerable_em.append(sum_unanswerable_em / question_counts)
overall_f1.append((sum_answerable_f1 + sum_unanswerable_em) / question_counts)
plt.plot(thresholds, answerable_f1, color="green", label="Answerable F1")
plt.plot(thresholds, unanswerable_em, color="red", label="Unanswerable F1")
plt.plot(thresholds, overall_f1, color="blue", label="Overall F1")
plt.legend()
plt.xlabel("No Answer Threshold")
plt.ylabel("F1")
plt.title("F1 plot for different answer types")
plt.savefig(os.path.join(image_dir, "f1_plot.png"))
plt.clf()
def calculate_em(prediction, gold_answer, language):
norm_pred = normalize_answer_by_language(prediction, language)
norm_answer = normalize_answer_by_language(gold_answer, language)
return int(norm_pred == norm_answer)
def calculate_f1(prediction, gold_answer, language):
gold_toks = normalize_answer_by_language(gold_answer, language).split() if gold_answer else []
pred_toks = normalize_answer_by_language(prediction, language).split() if prediction else []
common = Counter(gold_toks) & Counter(pred_toks)
num_common = sum(common.values())
if len(gold_toks) == 0 or len(pred_toks) == 0:
# If the prediction or gold_answer is No Answer, then F1 is 1 if they agree, 0 otherwise
return int(gold_toks == pred_toks)
if num_common == 0:
return 0.0
recall = 1.0 * num_common / len(gold_toks)
precision = 1.0 * num_common / len(pred_toks)
return (2.0 * precision * recall) / (precision + recall)
def compute_max_score_over_answers(metric_fn, prediction, ground_truths, language):
assert len(ground_truths) > 0, "Gold truth answers list should never be empty."
scores_by_answer = [
metric_fn(prediction, ground_truth, language) for ground_truth in ground_truths
]
return max(scores_by_answer)
def compute_best_score_and_threshold(
predictions, scores, no_answer_probs, qid_has_answer
) -> Dict[str, float]:
# Begin at threshold of 0, where all predictions are No Answer.
best_threshold = 0.0
current_score = best_score = sum(1 for k in qid_has_answer if not qid_has_answer[k])
exs_sorted_by_na_prob = sorted(no_answer_probs, key=lambda k: no_answer_probs[k])
for qid in exs_sorted_by_na_prob:
if qid_has_answer[qid]: # Gold truth is answer, and we predict an answer
score_diff = scores[qid]
elif predictions[qid]: # If gold truth is No Answer, but we predict an answer
score_diff = -1
else: # If gold truth and prediction are both No Answer
score_diff = 0
current_score += score_diff
# Update best score and threshold if new max value
if current_score > best_score:
best_threshold = no_answer_probs[qid]
best_score = current_score
return {
"best_score": 100.0 * best_score / len(scores),
"best_threshold": best_threshold,
}
def apply_no_answer_threshold(scores, no_answer_probs, qid_has_answer, no_answer_thresh):
new_scores = {}
for qid, s in scores.items():
pred_no_answer = no_answer_probs[qid] > no_answer_thresh
new_scores[qid] = float(not qid_has_answer[qid]) if pred_no_answer else s
return new_scores
def plot_na_prob_histogram(no_answer_probs, qid_list, outdir, name):
x = [no_answer_probs[k] for k in qid_list]
weights = np.ones_like(x) / float(len(x))
plt.hist(x, weights=weights, bins=20, range=(0.0, 1.0))
plt.xlabel("No Answer Probability")
plt.ylabel("Proportion of Dataset")
plt.title(f"No Answer Probability Histogram: {name}")
plt.savefig(os.path.join(outdir, f"na_prob_histogram_{name}.png"))
plt.clf()
| <filename>mkqa_eval_util.py
import collections
import os
import re
import string
from collections import Counter, OrderedDict
from multiprocessing import Pool
from typing import Dict, List, Optional
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
matplotlib.use("Agg")
MIXED_SEGMENTATION_LANGS = ["zh_cn", "zh_hk", "zh_tw", "ja", "th", "km"]
ARTICLE_REGEX_BY_LANG = {
"en": r"\b(a|an|the)\b",
"es": r"\b(un|una|unos|unas|el|la|los|las)\b",
"vi": r"\b(của|là|cái|chiếc|những)\b",
"de": r"\b(ein|eine|einen|einem|eines|einer|der|die|das|den|dem|des)\b",
"ar": "\sال^|ال",
"nl": r"\b(de|het|een|des|der|den)\b",
"sv": r"\b(en|ett)\b",
"da": r"\b(en|et)\b",
"no": r"\b(en|et|ei)\b",
"fr": r"\b(le|la|l'|les|du|de|d'|des|un|une|des)",
"pt": r"\b(o|a|os|as|um|uma|uns|umas)\b",
"it": r"\b(il|lo|la|l'|i|gli|le|del|dello|della|dell'|dei|degli|degl'|delle|un'|uno|una|un)",
"fi": r"\b(se|yks|yksi)\b",
"hu": r"\b(a|az|egy)\b",
}
def map_em_value(prediction, gold_answers, lang):
em_value = compute_max_score_over_answers(calculate_em, prediction, gold_answers, lang)
return float(em_value)
def map_f1_value(prediction, gold_answers, lang):
f1_value = compute_max_score_over_answers(calculate_f1, prediction, gold_answers, lang)
return float(f1_value)
def get_text_metrics(
predictions: List[str], gold_answers: List[List[str]], lang: str, serial=True, workers=None
) -> Dict[str, List[float]]:
"""Compute metrics from the predicted and answer texts."""
if serial:
f1_scores = [
map_f1_value(predictions[i], gold_answers[i], lang) for i in range(len(predictions))
]
em_scores = [
map_em_value(predictions[i], gold_answers[i], lang) for i in range(len(predictions))
]
else:
with Pool(workers) as p:
f1_scores = p.starmap(
map_f1_value,
[(predictions[i], gold_answers[i], lang) for i in range(len(predictions))],
chunksize=64,
)
em_scores = p.starmap(
map_em_value,
[(predictions[i], gold_answers[i], lang) for i in range(len(predictions))],
chunksize=64,
)
return {"f1": f1_scores, "exact_match": em_scores}
def summarize_default_metrics(
em_scores, f1_scores, qid_is_answerable, metrics: Optional[Dict[str, float]] = None,
):
"""Summarize EM and F1 based on default threshold"""
assert set(em_scores.keys()) == set(f1_scores.keys()) == set(qid_is_answerable.keys())
ans_em_scores = {qid: em_scores[qid] for qid in em_scores if qid_is_answerable[qid]}
ans_f1_scores = {qid: f1_scores[qid] for qid in f1_scores if qid_is_answerable[qid]}
unans_em_scores = {qid: em_scores[qid] for qid in em_scores if not qid_is_answerable[qid]}
summary = OrderedDict(
[
("exact_match", round(100.0 * np.mean(list(em_scores.values())), 2)),
("f1", round(100.0 * np.mean(list(f1_scores.values())), 2)),
("answerable_exact_match", round(100.0 * np.mean(list(ans_em_scores.values())), 2)),
("answerable_f1", round(100.0 * np.mean(list(ans_f1_scores.values())), 2)),
("unanswerable_exact_match", round(100.0 * np.mean(list(unans_em_scores.values())), 2)),
]
)
if metrics:
metrics.update(summary)
return summary
def aggregate_summaries(dicts):
summaries = collections.defaultdict(list)
for d in dicts:
for k, v in d.items():
assert isinstance(v, float) or isinstance(v, int)
summaries[k].append(v)
results = {}
for k, v in summaries.items():
results[k] = round(float(np.mean(v)), 2)
return results
def whitespace_tokenize(text):
return text.split()
def mixed_segmentation(text):
segs_out = []
temp_str = ""
for char in text:
if temp_str != "":
ss = whitespace_tokenize(temp_str)
segs_out.extend(ss)
temp_str = ""
segs_out.append(char)
if temp_str != "":
ss = whitespace_tokenize(temp_str)
segs_out.extend(ss)
return segs_out
def normalize_answer_by_language(s, lang):
"""Lower text, remove punctuation, articles and extra whitespace.
This function is customized by language.
"""
def remove_articles(text, lang):
article_regex = ARTICLE_REGEX_BY_LANG.get(lang)
if article_regex:
return re.sub(article_regex, " ", text)
else:
return text
def white_space_fix(text, lang):
if lang in MIXED_SEGMENTATION_LANGS:
tokens = mixed_segmentation(text)
else:
tokens = whitespace_tokenize(text)
return " ".join([t for t in tokens if t.strip() != ""])
def remove_punc(text):
exclude = set(string.punctuation)
return "".join(ch for ch in text if ch not in exclude)
def lower(text):
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(s)), lang), lang)
def plot_f1(answerable_f1_by_id, unanswerable_em_by_id, na_probs_by_id, qid_to_has_ans, image_dir):
num_no_ans = sum(1 for k in qid_to_has_ans if not qid_to_has_ans[k])
qid_list = sorted(na_probs_by_id, key=lambda k: na_probs_by_id[k])
question_counts = len(qid_list)
answerable_f1 = []
overall_f1 = []
unanswerable_em = []
thresholds = []
sum_answerable_f1 = 0
sum_unanswerable_em = num_no_ans
for i, qid in enumerate(qid_list):
thresholds.append(na_probs_by_id[qid])
if qid in answerable_f1_by_id:
sum_answerable_f1 += answerable_f1_by_id[qid]
elif qid in unanswerable_em_by_id:
sum_unanswerable_em += unanswerable_em_by_id[qid] - 1
else:
raise ValueError(f"{qid} is not in either answerable or unanswerable predictions")
answerable_f1.append(sum_answerable_f1 / question_counts)
unanswerable_em.append(sum_unanswerable_em / question_counts)
overall_f1.append((sum_answerable_f1 + sum_unanswerable_em) / question_counts)
plt.plot(thresholds, answerable_f1, color="green", label="Answerable F1")
plt.plot(thresholds, unanswerable_em, color="red", label="Unanswerable F1")
plt.plot(thresholds, overall_f1, color="blue", label="Overall F1")
plt.legend()
plt.xlabel("No Answer Threshold")
plt.ylabel("F1")
plt.title("F1 plot for different answer types")
plt.savefig(os.path.join(image_dir, "f1_plot.png"))
plt.clf()
def calculate_em(prediction, gold_answer, language):
norm_pred = normalize_answer_by_language(prediction, language)
norm_answer = normalize_answer_by_language(gold_answer, language)
return int(norm_pred == norm_answer)
def calculate_f1(prediction, gold_answer, language):
gold_toks = normalize_answer_by_language(gold_answer, language).split() if gold_answer else []
pred_toks = normalize_answer_by_language(prediction, language).split() if prediction else []
common = Counter(gold_toks) & Counter(pred_toks)
num_common = sum(common.values())
if len(gold_toks) == 0 or len(pred_toks) == 0:
# If the prediction or gold_answer is No Answer, then F1 is 1 if they agree, 0 otherwise
return int(gold_toks == pred_toks)
if num_common == 0:
return 0.0
recall = 1.0 * num_common / len(gold_toks)
precision = 1.0 * num_common / len(pred_toks)
return (2.0 * precision * recall) / (precision + recall)
def compute_max_score_over_answers(metric_fn, prediction, ground_truths, language):
assert len(ground_truths) > 0, "Gold truth answers list should never be empty."
scores_by_answer = [
metric_fn(prediction, ground_truth, language) for ground_truth in ground_truths
]
return max(scores_by_answer)
def compute_best_score_and_threshold(
predictions, scores, no_answer_probs, qid_has_answer
) -> Dict[str, float]:
# Begin at threshold of 0, where all predictions are No Answer.
best_threshold = 0.0
current_score = best_score = sum(1 for k in qid_has_answer if not qid_has_answer[k])
exs_sorted_by_na_prob = sorted(no_answer_probs, key=lambda k: no_answer_probs[k])
for qid in exs_sorted_by_na_prob:
if qid_has_answer[qid]: # Gold truth is answer, and we predict an answer
score_diff = scores[qid]
elif predictions[qid]: # If gold truth is No Answer, but we predict an answer
score_diff = -1
else: # If gold truth and prediction are both No Answer
score_diff = 0
current_score += score_diff
# Update best score and threshold if new max value
if current_score > best_score:
best_threshold = no_answer_probs[qid]
best_score = current_score
return {
"best_score": 100.0 * best_score / len(scores),
"best_threshold": best_threshold,
}
def apply_no_answer_threshold(scores, no_answer_probs, qid_has_answer, no_answer_thresh):
new_scores = {}
for qid, s in scores.items():
pred_no_answer = no_answer_probs[qid] > no_answer_thresh
new_scores[qid] = float(not qid_has_answer[qid]) if pred_no_answer else s
return new_scores
def plot_na_prob_histogram(no_answer_probs, qid_list, outdir, name):
x = [no_answer_probs[k] for k in qid_list]
weights = np.ones_like(x) / float(len(x))
plt.hist(x, weights=weights, bins=20, range=(0.0, 1.0))
plt.xlabel("No Answer Probability")
plt.ylabel("Proportion of Dataset")
plt.title(f"No Answer Probability Histogram: {name}")
plt.savefig(os.path.join(outdir, f"na_prob_histogram_{name}.png"))
plt.clf()
| en | 0.830271 | Compute metrics from the predicted and answer texts. Summarize EM and F1 based on default threshold Lower text, remove punctuation, articles and extra whitespace. This function is customized by language. # If the prediction or gold_answer is No Answer, then F1 is 1 if they agree, 0 otherwise # Begin at threshold of 0, where all predictions are No Answer. # Gold truth is answer, and we predict an answer # If gold truth is No Answer, but we predict an answer # If gold truth and prediction are both No Answer # Update best score and threshold if new max value | 2.148356 | 2 |
poloniex/client.py | t4ko/closegaps_tradebot | 13 | 6617159 | <gh_stars>10-100
import pycurl
import json
import time
import hmac, hashlib
from io import BytesIO
def create_time_stamp(datestr, format="%Y-%m-%d %H:%M:%S"):
return time.mktime(time.strptime(datestr, format))
class poloniex:
API_URL = 'https://192.168.127.12'
API_VERSION = 'v1'
def __init__(self, APIKey, Secret):
self.APIKey = APIKey
self.Secret = Secret
self.session = self._init_session()
def _init_session(self):
session = pycurl.Curl()
session.setopt(pycurl.SSL_VERIFYPEER, 0)
session.setopt(pycurl.SSL_VERIFYHOST, 0)
session.setopt(pycurl.TCP_NODELAY, 1)
session.setopt(pycurl.TCP_KEEPALIVE, 1)
return session
def _order_params_for_sig(self, data):
strs = []
for key in sorted(data.keys()):
strs.append("%s=%s" % (key, data[key]))
return '&'.join(strs)
def post_process(self, before):
after = before
# Add timestamps if there isnt one but is a datetime
if 'return' in after and isinstance(after['return'], list):
for x in xrange(0, len(after['return'])):
if isinstance(after['return'][x], dict):
if 'datetime' in after['return'][x] and 'timestamp' not in after['return'][x]:
after['return'][x]['timestamp'] = float(create_time_stamp(after['return'][x]['datetime']))
return after
def api_query(self, method, command, req={}):
session = self.session
# Prepare the basic headers
headers = ['User-Agent: ']
must_process = False
req['command'] = command
if method == "get":
url = "https://poloniex.com/public"
data_string = self._order_params_for_sig(req)
if data_string != "":
url = "%s?%s" % (url, data_string)
session.setopt(pycurl.HTTPGET, True)
else:
must_process = True
req['nonce'] = int(time.time() * 1000)
data_string = self._order_params_for_sig(req)
sign = hmac.new(self.Secret, data_string, hashlib.sha512).hexdigest()
url = "https://poloniex.com/tradingApi"
session.setopt(pycurl.POSTFIELDS, data_string)
session.setopt(pycurl.POST, True)
headers.append('Sign: %s' % sign)
headers.append('Key: %s' % self.APIKey)
# Apply the settings
buff = BytesIO()
session.setopt(pycurl.HTTPHEADER, headers)
session.setopt(pycurl.URL, url)
session.setopt(pycurl.WRITEDATA, buff)
session.perform()
# Handle the response
json_ret = json.loads(buff.getvalue())
if must_process:
return self.post_process(json_ret)
return json_ret
def return_ticker(self):
return self.api_query(
"get",
"returnTicker"
)
def return_24volume(self):
return self.api_query(
"get",
"return24Volume"
)
def return_order_book(self, currency_pair, depth=1):
return self.api_query(
"get",
"returnOrderBook",
{
'currency_pair': currency_pair,
'depth': '%d' % depth
}
)
def return_market_trade_history(self, currency_pair):
return self.api_query(
"get",
"returnTradeHistory",
{
'currency_pair': currency_pair
}
)
def return_chart_data(self, currency_pair, start, end):
return self.api_query(
"get",
"returnChartData",
{
'currency_pair': currency_pair,
'start': start,
'end': end
}
)
def return_currencies(self):
return self.api_query("get", "returnCurrencies")
def return_loan_orders(self):
return self.api_query("get", "returnLoadOrders")
# Returns all of your balances.
# Outputs:
# {"BTC":"0.59098578","LTC":"3.31117268", ... }
def return_balances(self):
return self.api_query(
'post',
'returnBalances'
)
# Returns your open orders for a given market, specified by the "currency_pair" POST parameter, e.g. "BTC_XCP"
# Inputs:
# currency_pair The currency pair e.g. "BTC_XCP"
# Outputs:
# order_number The order number
# type sell or buy
# rate Price the order is selling or buying at
# Amount Quantity of order
# total Total value of order (price * quantity)
def return_open_orders(self, currency_pair):
return self.api_query(
'post',
'returnOpenOrders',
{
"currency_pair": currency_pair
}
)
# Returns your trade history for a given market, specified by the "currency_pair" POST parameter
# Inputs:
# currency_pair The currency pair e.g. "BTC_XCP"
# Outputs:
# date Date in the form: "2014-02-19 03:44:59"
# rate Price the order is selling or buying at
# amount Quantity of order
# total Total value of order (price * quantity)
# type sell or buy
def return_trade_history(self, currency_pair):
return self.api_query(
'post',
'returnTradeHistory',
{
"currency_pair": currency_pair
}
)
# Places a buy order in a given market. Required POST parameters are "currency_pair", "rate", and "amount". If successful, the method will return the order number.
# Inputs:
# currency_pair The currency pair
# rate price the order is buying at
# amount Amount of coins to buy
# Outputs:
# order_number The order number
def buy(self, currency_pair, rate, amount):
return self.api_query(
'post',
'buy',
{
"currency_pair": currency_pair,
"rate": rate,
"amount": amount
}
)
# Places a sell order in a given market. Required POST parameters are "currency_pair", "rate", and "amount". If successful, the method will return the order number.
# Inputs:
# currency_pair The currency pair
# rate price the order is selling at
# amount Amount of coins to sell
# Outputs:
# order_number The order number
def sell(self, currency_pair, rate, amount):
return self.api_query(
'post',
'sell',
{
"currency_pair": currency_pair,
"rate": rate,
"amount": amount
}
)
# Cancels an order you have placed in a given market. Required POST parameters are "currency_pair" and "order_number".
# Inputs:
# currency_pair The currency pair
# order_number The order number to cancel
# Outputs:
# succes 1 or 0
def cancel(self, currency_pair, order_number):
return self.api_query(
'post',
'cancelOrder',
{
"currency_pair": currency_pair,
"order_number": order_number
}
)
# Immediately places a withdrawal for a given currency, with no email confirmation. In order to use this method, the withdrawal privilege must be enabled for your API key. Required POST parameters are "currency", "amount", and "address". Sample output: {"response":"Withdrew 2398 NXT."}
# Inputs:
# currency The currency to withdraw
# amount The amount of this coin to withdraw
# address The withdrawal address
# Outputs:
# response Text containing message about the withdrawal
def withdraw(self, currency, amount, address):
return self.api_query(
'post',
'withdraw',
{
"currency": currency,
"amount": amount,
"address": address
}
) | import pycurl
import json
import time
import hmac, hashlib
from io import BytesIO
def create_time_stamp(datestr, format="%Y-%m-%d %H:%M:%S"):
return time.mktime(time.strptime(datestr, format))
class poloniex:
API_URL = 'https://192.168.127.12'
API_VERSION = 'v1'
def __init__(self, APIKey, Secret):
self.APIKey = APIKey
self.Secret = Secret
self.session = self._init_session()
def _init_session(self):
session = pycurl.Curl()
session.setopt(pycurl.SSL_VERIFYPEER, 0)
session.setopt(pycurl.SSL_VERIFYHOST, 0)
session.setopt(pycurl.TCP_NODELAY, 1)
session.setopt(pycurl.TCP_KEEPALIVE, 1)
return session
def _order_params_for_sig(self, data):
strs = []
for key in sorted(data.keys()):
strs.append("%s=%s" % (key, data[key]))
return '&'.join(strs)
def post_process(self, before):
after = before
# Add timestamps if there isnt one but is a datetime
if 'return' in after and isinstance(after['return'], list):
for x in xrange(0, len(after['return'])):
if isinstance(after['return'][x], dict):
if 'datetime' in after['return'][x] and 'timestamp' not in after['return'][x]:
after['return'][x]['timestamp'] = float(create_time_stamp(after['return'][x]['datetime']))
return after
def api_query(self, method, command, req={}):
session = self.session
# Prepare the basic headers
headers = ['User-Agent: ']
must_process = False
req['command'] = command
if method == "get":
url = "https://poloniex.com/public"
data_string = self._order_params_for_sig(req)
if data_string != "":
url = "%s?%s" % (url, data_string)
session.setopt(pycurl.HTTPGET, True)
else:
must_process = True
req['nonce'] = int(time.time() * 1000)
data_string = self._order_params_for_sig(req)
sign = hmac.new(self.Secret, data_string, hashlib.sha512).hexdigest()
url = "https://poloniex.com/tradingApi"
session.setopt(pycurl.POSTFIELDS, data_string)
session.setopt(pycurl.POST, True)
headers.append('Sign: %s' % sign)
headers.append('Key: %s' % self.APIKey)
# Apply the settings
buff = BytesIO()
session.setopt(pycurl.HTTPHEADER, headers)
session.setopt(pycurl.URL, url)
session.setopt(pycurl.WRITEDATA, buff)
session.perform()
# Handle the response
json_ret = json.loads(buff.getvalue())
if must_process:
return self.post_process(json_ret)
return json_ret
def return_ticker(self):
return self.api_query(
"get",
"returnTicker"
)
def return_24volume(self):
return self.api_query(
"get",
"return24Volume"
)
def return_order_book(self, currency_pair, depth=1):
return self.api_query(
"get",
"returnOrderBook",
{
'currency_pair': currency_pair,
'depth': '%d' % depth
}
)
def return_market_trade_history(self, currency_pair):
return self.api_query(
"get",
"returnTradeHistory",
{
'currency_pair': currency_pair
}
)
def return_chart_data(self, currency_pair, start, end):
return self.api_query(
"get",
"returnChartData",
{
'currency_pair': currency_pair,
'start': start,
'end': end
}
)
def return_currencies(self):
return self.api_query("get", "returnCurrencies")
def return_loan_orders(self):
return self.api_query("get", "returnLoadOrders")
# Returns all of your balances.
# Outputs:
# {"BTC":"0.59098578","LTC":"3.31117268", ... }
def return_balances(self):
return self.api_query(
'post',
'returnBalances'
)
# Returns your open orders for a given market, specified by the "currency_pair" POST parameter, e.g. "BTC_XCP"
# Inputs:
# currency_pair The currency pair e.g. "BTC_XCP"
# Outputs:
# order_number The order number
# type sell or buy
# rate Price the order is selling or buying at
# Amount Quantity of order
# total Total value of order (price * quantity)
def return_open_orders(self, currency_pair):
return self.api_query(
'post',
'returnOpenOrders',
{
"currency_pair": currency_pair
}
)
# Returns your trade history for a given market, specified by the "currency_pair" POST parameter
# Inputs:
# currency_pair The currency pair e.g. "BTC_XCP"
# Outputs:
# date Date in the form: "2014-02-19 03:44:59"
# rate Price the order is selling or buying at
# amount Quantity of order
# total Total value of order (price * quantity)
# type sell or buy
def return_trade_history(self, currency_pair):
return self.api_query(
'post',
'returnTradeHistory',
{
"currency_pair": currency_pair
}
)
# Places a buy order in a given market. Required POST parameters are "currency_pair", "rate", and "amount". If successful, the method will return the order number.
# Inputs:
# currency_pair The currency pair
# rate price the order is buying at
# amount Amount of coins to buy
# Outputs:
# order_number The order number
def buy(self, currency_pair, rate, amount):
return self.api_query(
'post',
'buy',
{
"currency_pair": currency_pair,
"rate": rate,
"amount": amount
}
)
# Places a sell order in a given market. Required POST parameters are "currency_pair", "rate", and "amount". If successful, the method will return the order number.
# Inputs:
# currency_pair The currency pair
# rate price the order is selling at
# amount Amount of coins to sell
# Outputs:
# order_number The order number
def sell(self, currency_pair, rate, amount):
return self.api_query(
'post',
'sell',
{
"currency_pair": currency_pair,
"rate": rate,
"amount": amount
}
)
# Cancels an order you have placed in a given market. Required POST parameters are "currency_pair" and "order_number".
# Inputs:
# currency_pair The currency pair
# order_number The order number to cancel
# Outputs:
# succes 1 or 0
def cancel(self, currency_pair, order_number):
return self.api_query(
'post',
'cancelOrder',
{
"currency_pair": currency_pair,
"order_number": order_number
}
)
# Immediately places a withdrawal for a given currency, with no email confirmation. In order to use this method, the withdrawal privilege must be enabled for your API key. Required POST parameters are "currency", "amount", and "address". Sample output: {"response":"Withdrew 2398 NXT."}
# Inputs:
# currency The currency to withdraw
# amount The amount of this coin to withdraw
# address The withdrawal address
# Outputs:
# response Text containing message about the withdrawal
def withdraw(self, currency, amount, address):
return self.api_query(
'post',
'withdraw',
{
"currency": currency,
"amount": amount,
"address": address
}
) | en | 0.757305 | # Add timestamps if there isnt one but is a datetime # Prepare the basic headers # Apply the settings # Handle the response # Returns all of your balances. # Outputs: # {"BTC":"0.59098578","LTC":"3.31117268", ... } # Returns your open orders for a given market, specified by the "currency_pair" POST parameter, e.g. "BTC_XCP" # Inputs: # currency_pair The currency pair e.g. "BTC_XCP" # Outputs: # order_number The order number # type sell or buy # rate Price the order is selling or buying at # Amount Quantity of order # total Total value of order (price * quantity) # Returns your trade history for a given market, specified by the "currency_pair" POST parameter # Inputs: # currency_pair The currency pair e.g. "BTC_XCP" # Outputs: # date Date in the form: "2014-02-19 03:44:59" # rate Price the order is selling or buying at # amount Quantity of order # total Total value of order (price * quantity) # type sell or buy # Places a buy order in a given market. Required POST parameters are "currency_pair", "rate", and "amount". If successful, the method will return the order number. # Inputs: # currency_pair The currency pair # rate price the order is buying at # amount Amount of coins to buy # Outputs: # order_number The order number # Places a sell order in a given market. Required POST parameters are "currency_pair", "rate", and "amount". If successful, the method will return the order number. # Inputs: # currency_pair The currency pair # rate price the order is selling at # amount Amount of coins to sell # Outputs: # order_number The order number # Cancels an order you have placed in a given market. Required POST parameters are "currency_pair" and "order_number". # Inputs: # currency_pair The currency pair # order_number The order number to cancel # Outputs: # succes 1 or 0 # Immediately places a withdrawal for a given currency, with no email confirmation. In order to use this method, the withdrawal privilege must be enabled for your API key. Required POST parameters are "currency", "amount", and "address". Sample output: {"response":"Withdrew 2398 NXT."} # Inputs: # currency The currency to withdraw # amount The amount of this coin to withdraw # address The withdrawal address # Outputs: # response Text containing message about the withdrawal | 2.583699 | 3 |
tools/tools/ccv.py | jpanikulam/sonder | 1 | 6617160 | <reponame>jpanikulam/sonder
import sympy
import esym
if __name__ == '__main__':
p1 = esym.vector('p1')
c1 = esym.vector('c1')
c2 = esym.vector('c2')
n1 = esym.vector('n1')
n2 = esym.vector('n2')
r1 = sympy.Symbol('r1', positive=True)
r2 = sympy.Symbol('r2', positive=True)
p2 = esym.vector('p2')
facts = [
(esym.norm(c1 - p1) ** 2) - (r1 ** 2),
(esym.norm(c2 - p1) ** 2) - (r2 ** 2),
(p1 - c1).dot(n1),
(p1 - c2).dot(n2),
esym.norm(c1 - p2) - r1,
esym.norm(c2 - p2) - r2,
(p2 - c1).dot(n1),
(p2 - c2).dot(n2),
]
t = sympy.Symbol('t')
print facts[0]
facts_2 = [
esym.norm((n1 * t) - c1) - r1,
# esym.norm((n1 * t) - c2) - r2,
]
nrm1 = esym.norm((n1 * t) - c1)
nrm2 = esym.norm((n1 * t) - c2)
soln = sympy.solve(facts_2, t)
print soln
import IPython; IPython.embed() | import sympy
import esym
if __name__ == '__main__':
p1 = esym.vector('p1')
c1 = esym.vector('c1')
c2 = esym.vector('c2')
n1 = esym.vector('n1')
n2 = esym.vector('n2')
r1 = sympy.Symbol('r1', positive=True)
r2 = sympy.Symbol('r2', positive=True)
p2 = esym.vector('p2')
facts = [
(esym.norm(c1 - p1) ** 2) - (r1 ** 2),
(esym.norm(c2 - p1) ** 2) - (r2 ** 2),
(p1 - c1).dot(n1),
(p1 - c2).dot(n2),
esym.norm(c1 - p2) - r1,
esym.norm(c2 - p2) - r2,
(p2 - c1).dot(n1),
(p2 - c2).dot(n2),
]
t = sympy.Symbol('t')
print facts[0]
facts_2 = [
esym.norm((n1 * t) - c1) - r1,
# esym.norm((n1 * t) - c2) - r2,
]
nrm1 = esym.norm((n1 * t) - c1)
nrm2 = esym.norm((n1 * t) - c2)
soln = sympy.solve(facts_2, t)
print soln
import IPython; IPython.embed() | es | 0.330563 | # esym.norm((n1 * t) - c2) - r2, | 2.797474 | 3 |
dlgo/zobrist.py | abjordan/dlgo | 0 | 6617161 | <gh_stars>0
from .gotypes import Player, Point
__all__ = ['HASH_CODE', 'EMPTY_BOARD']
HASH_CODE = {
(Point(row=1, col=1), Player.black): 3358364442651403282,
(Point(row=1, col=1), Player.white): 7470933530584932282,
(Point(row=1, col=2), Player.black): 3908477744752221130,
(Point(row=1, col=2), Player.white): 1562668501702532755,
(Point(row=1, col=3), Player.black): 2935795650793747641,
(Point(row=1, col=3), Player.white): 6785400224249859256,
(Point(row=1, col=4), Player.black): 8009767182471757917,
(Point(row=1, col=4), Player.white): 35550824089951956,
(Point(row=1, col=5), Player.black): 6773097344601636250,
(Point(row=1, col=5), Player.white): 8847166111597618811,
(Point(row=1, col=6), Player.black): 8670100078235412825,
(Point(row=1, col=6), Player.white): 7008955978465777678,
(Point(row=1, col=7), Player.black): 5464139211459645769,
(Point(row=1, col=7), Player.white): 8171595736379093123,
(Point(row=1, col=8), Player.black): 351909656387621001,
(Point(row=1, col=8), Player.white): 7238015216612105884,
(Point(row=1, col=9), Player.black): 3396724620650770147,
(Point(row=1, col=9), Player.white): 4209004723882000576,
(Point(row=1, col=10), Player.black): 4842779035545402021,
(Point(row=1, col=10), Player.white): 6886319625847032459,
(Point(row=1, col=11), Player.black): 524803705257336124,
(Point(row=1, col=11), Player.white): 1056449941733952409,
(Point(row=1, col=12), Player.black): 6489365225745754382,
(Point(row=1, col=12), Player.white): 2437438899944803018,
(Point(row=1, col=13), Player.black): 8947346722438719161,
(Point(row=1, col=13), Player.white): 2599070081342983667,
(Point(row=1, col=14), Player.black): 2930941330745387947,
(Point(row=1, col=14), Player.white): 2007749035277495621,
(Point(row=1, col=15), Player.black): 6065841185823253414,
(Point(row=1, col=15), Player.white): 193120643649747335,
(Point(row=1, col=16), Player.black): 2588400943218332221,
(Point(row=1, col=16), Player.white): 2029576827960769569,
(Point(row=1, col=17), Player.black): 3721163493106219606,
(Point(row=1, col=17), Player.white): 10216008672517638,
(Point(row=1, col=18), Player.black): 4912989059172363883,
(Point(row=1, col=18), Player.white): 5569829595169653615,
(Point(row=1, col=19), Player.black): 2138513457095070154,
(Point(row=1, col=19), Player.white): 1320455088185461752,
(Point(row=2, col=1), Player.black): 7136038498507050230,
(Point(row=2, col=1), Player.white): 6372693857312078170,
(Point(row=2, col=2), Player.black): 1876075245466805558,
(Point(row=2, col=2), Player.white): 5869045570448901886,
(Point(row=2, col=3), Player.black): 2243429705853894366,
(Point(row=2, col=3), Player.white): 9107310619990862130,
(Point(row=2, col=4), Player.black): 6974778138575169367,
(Point(row=2, col=4), Player.white): 6106156854666751401,
(Point(row=2, col=5), Player.black): 7556636260769482629,
(Point(row=2, col=5), Player.white): 3084523102838529514,
(Point(row=2, col=6), Player.black): 7111911632335533472,
(Point(row=2, col=6), Player.white): 6028072164826460819,
(Point(row=2, col=7), Player.black): 1915335096391320529,
(Point(row=2, col=7), Player.white): 8061998162183177218,
(Point(row=2, col=8), Player.black): 4016149980235550064,
(Point(row=2, col=8), Player.white): 7047649178198708366,
(Point(row=2, col=9), Player.black): 4708166101478557489,
(Point(row=2, col=9), Player.white): 2215219507826833770,
(Point(row=2, col=10), Player.black): 3340274879306913100,
(Point(row=2, col=10), Player.white): 1959796252649520039,
(Point(row=2, col=11), Player.black): 6105253527464181164,
(Point(row=2, col=11), Player.white): 5464577752823254244,
(Point(row=2, col=12), Player.black): 5729047054197537807,
(Point(row=2, col=12), Player.white): 198009915777229217,
(Point(row=2, col=13), Player.black): 6601814483141386855,
(Point(row=2, col=13), Player.white): 5869590559312693351,
(Point(row=2, col=14), Player.black): 8628386210791566486,
(Point(row=2, col=14), Player.white): 4877653724634908127,
(Point(row=2, col=15), Player.black): 7890344071649023278,
(Point(row=2, col=15), Player.white): 8998144387626287997,
(Point(row=2, col=16), Player.black): 913453038731891113,
(Point(row=2, col=16), Player.white): 4209879683093159541,
(Point(row=2, col=17), Player.black): 7407760281507574211,
(Point(row=2, col=17), Player.white): 3661338262012502056,
(Point(row=2, col=18), Player.black): 8068287257985125442,
(Point(row=2, col=18), Player.white): 6356154620615642781,
(Point(row=2, col=19), Player.black): 6774018702842503738,
(Point(row=2, col=19), Player.white): 5902567971825953990,
(Point(row=3, col=1), Player.black): 4048345810909377744,
(Point(row=3, col=1), Player.white): 9196143852617131564,
(Point(row=3, col=2), Player.black): 6975287365584650059,
(Point(row=3, col=2), Player.white): 6899074477900044532,
(Point(row=3, col=3), Player.black): 7772648527475620349,
(Point(row=3, col=3), Player.white): 2310556956700779283,
(Point(row=3, col=4), Player.black): 7451894683190357140,
(Point(row=3, col=4), Player.white): 2178858547315922304,
(Point(row=3, col=5), Player.black): 344601012743458982,
(Point(row=3, col=5), Player.white): 7439600122260057570,
(Point(row=3, col=6), Player.black): 3587854205726735566,
(Point(row=3, col=6), Player.white): 8901337557448145585,
(Point(row=3, col=7), Player.black): 1573954452452139533,
(Point(row=3, col=7), Player.white): 3403114058165668101,
(Point(row=3, col=8), Player.black): 8367254702727005883,
(Point(row=3, col=8), Player.white): 6955872926229753235,
(Point(row=3, col=9), Player.black): 3619111347943775729,
(Point(row=3, col=9), Player.white): 6386208164298176371,
(Point(row=3, col=10), Player.black): 3148441976799681341,
(Point(row=3, col=10), Player.white): 7856395865548432460,
(Point(row=3, col=11), Player.black): 6409076893157255060,
(Point(row=3, col=11), Player.white): 124395272292207998,
(Point(row=3, col=12), Player.black): 3143880823288471964,
(Point(row=3, col=12), Player.white): 1531234352959259847,
(Point(row=3, col=13), Player.black): 7399262572288059981,
(Point(row=3, col=13), Player.white): 1551835472134981779,
(Point(row=3, col=14), Player.black): 6966735695951350693,
(Point(row=3, col=14), Player.white): 2226180808931432928,
(Point(row=3, col=15), Player.black): 7950221191774108447,
(Point(row=3, col=15), Player.white): 5372070084430614066,
(Point(row=3, col=16), Player.black): 4932087973874359151,
(Point(row=3, col=16), Player.white): 4861217845376437768,
(Point(row=3, col=17), Player.black): 6051641886456887859,
(Point(row=3, col=17), Player.white): 8682525157318542658,
(Point(row=3, col=18), Player.black): 5219460463872050236,
(Point(row=3, col=18), Player.white): 2006674587315476121,
(Point(row=3, col=19), Player.black): 1858501859510840375,
(Point(row=3, col=19), Player.white): 1149751197252921630,
(Point(row=4, col=1), Player.black): 2398662377782215974,
(Point(row=4, col=1), Player.white): 2022847834450689858,
(Point(row=4, col=2), Player.black): 5069135804981210434,
(Point(row=4, col=2), Player.white): 8163668278688808633,
(Point(row=4, col=3), Player.black): 6861279557339037816,
(Point(row=4, col=3), Player.white): 403173259000503783,
(Point(row=4, col=4), Player.black): 6742465731882952113,
(Point(row=4, col=4), Player.white): 7236553203790249172,
(Point(row=4, col=5), Player.black): 237100204552564806,
(Point(row=4, col=5), Player.white): 2253480256099219716,
(Point(row=4, col=6), Player.black): 8653445792663566538,
(Point(row=4, col=6), Player.white): 8332972213608264449,
(Point(row=4, col=7), Player.black): 5078569967117087361,
(Point(row=4, col=7), Player.white): 1557213285863104694,
(Point(row=4, col=8), Player.black): 2827551827228022681,
(Point(row=4, col=8), Player.white): 3349858998829898310,
(Point(row=4, col=9), Player.black): 8066286474284701662,
(Point(row=4, col=9), Player.white): 3028979353583824304,
(Point(row=4, col=10), Player.black): 7922214592449460781,
(Point(row=4, col=10), Player.white): 4401701324525596990,
(Point(row=4, col=11), Player.black): 3886897718507761264,
(Point(row=4, col=11), Player.white): 6460451817414461410,
(Point(row=4, col=12), Player.black): 7028395470388266531,
(Point(row=4, col=12), Player.white): 6397147923820227503,
(Point(row=4, col=13), Player.black): 8673992362436018560,
(Point(row=4, col=13), Player.white): 558308551984182854,
(Point(row=4, col=14), Player.black): 5813119482407248397,
(Point(row=4, col=14), Player.white): 486055615340767652,
(Point(row=4, col=15), Player.black): 1429987174565568248,
(Point(row=4, col=15), Player.white): 8412546385952468176,
(Point(row=4, col=16), Player.black): 3611103175382149140,
(Point(row=4, col=16), Player.white): 6903982908273117840,
(Point(row=4, col=17), Player.black): 3592518960087764980,
(Point(row=4, col=17), Player.white): 7828295962542054267,
(Point(row=4, col=18), Player.black): 4525724720334310980,
(Point(row=4, col=18), Player.white): 267756311306906827,
(Point(row=4, col=19), Player.black): 6370693981989277912,
(Point(row=4, col=19), Player.white): 6786204491859299846,
(Point(row=5, col=1), Player.black): 819992008768999570,
(Point(row=5, col=1), Player.white): 5766528066731209464,
(Point(row=5, col=2), Player.black): 140046101684148532,
(Point(row=5, col=2), Player.white): 2432801741437513948,
(Point(row=5, col=3), Player.black): 849966200711015504,
(Point(row=5, col=3), Player.white): 3608309475012801311,
(Point(row=5, col=4), Player.black): 7768307362647367400,
(Point(row=5, col=4), Player.white): 1401509817423818336,
(Point(row=5, col=5), Player.black): 978308582591520472,
(Point(row=5, col=5), Player.white): 8157243956377698400,
(Point(row=5, col=6), Player.black): 6729878835345781212,
(Point(row=5, col=6), Player.white): 6046318031522706143,
(Point(row=5, col=7), Player.black): 3931891382545183014,
(Point(row=5, col=7), Player.white): 4565326691920452325,
(Point(row=5, col=8), Player.black): 3751207472807055819,
(Point(row=5, col=8), Player.white): 1750985725059676286,
(Point(row=5, col=9), Player.black): 4739768336697872363,
(Point(row=5, col=9), Player.white): 4663188651196470664,
(Point(row=5, col=10), Player.black): 8709261564447766898,
(Point(row=5, col=10), Player.white): 1989905579540671866,
(Point(row=5, col=11), Player.black): 8386773075655989811,
(Point(row=5, col=11), Player.white): 9126047819967038123,
(Point(row=5, col=12), Player.black): 3638879086648420737,
(Point(row=5, col=12), Player.white): 5802750860279031868,
(Point(row=5, col=13), Player.black): 6302226771349348014,
(Point(row=5, col=13), Player.white): 391189191613921792,
(Point(row=5, col=14), Player.black): 7508186241717994621,
(Point(row=5, col=14), Player.white): 2980849813380702168,
(Point(row=5, col=15), Player.black): 7178012196189677148,
(Point(row=5, col=15), Player.white): 3152954361793989785,
(Point(row=5, col=16), Player.black): 9046974077767159071,
(Point(row=5, col=16), Player.white): 5896322307150774831,
(Point(row=5, col=17), Player.black): 8644164651450999910,
(Point(row=5, col=17), Player.white): 8372756108283824593,
(Point(row=5, col=18), Player.black): 6205959058144257403,
(Point(row=5, col=18), Player.white): 3658039270432317209,
(Point(row=5, col=19), Player.black): 815296926362577910,
(Point(row=5, col=19), Player.white): 7244805626828897449,
(Point(row=6, col=1), Player.black): 1711429897047583139,
(Point(row=6, col=1), Player.white): 3796916877444870134,
(Point(row=6, col=2), Player.black): 9178534209148476838,
(Point(row=6, col=2), Player.white): 910115998406578065,
(Point(row=6, col=3), Player.black): 2165067175098864523,
(Point(row=6, col=3), Player.white): 2519141993243517052,
(Point(row=6, col=4), Player.black): 5978107613277318827,
(Point(row=6, col=4), Player.white): 109336010425203641,
(Point(row=6, col=5), Player.black): 4943941116244090107,
(Point(row=6, col=5), Player.white): 132140693945639607,
(Point(row=6, col=6), Player.black): 6045300607696569417,
(Point(row=6, col=6), Player.white): 6277531507132282289,
(Point(row=6, col=7), Player.black): 6699260733919604273,
(Point(row=6, col=7), Player.white): 5635216845699331450,
(Point(row=6, col=8), Player.black): 2635626576307750647,
(Point(row=6, col=8), Player.white): 165956464313047071,
(Point(row=6, col=9), Player.black): 2633270617992739718,
(Point(row=6, col=9), Player.white): 9177199448496737764,
(Point(row=6, col=10), Player.black): 4633272803934465732,
(Point(row=6, col=10), Player.white): 2304944580014465590,
(Point(row=6, col=11), Player.black): 7159833061097635087,
(Point(row=6, col=11), Player.white): 4817018317389969148,
(Point(row=6, col=12), Player.black): 499241394719117871,
(Point(row=6, col=12), Player.white): 8227903593761465195,
(Point(row=6, col=13), Player.black): 8364921257158333077,
(Point(row=6, col=13), Player.white): 3843280891352272396,
(Point(row=6, col=14), Player.black): 2406128505165246278,
(Point(row=6, col=14), Player.white): 3243820097331539459,
(Point(row=6, col=15), Player.black): 2538402448163358592,
(Point(row=6, col=15), Player.white): 5713242412303705912,
(Point(row=6, col=16), Player.black): 1622840734849354897,
(Point(row=6, col=16), Player.white): 2670353397930539318,
(Point(row=6, col=17), Player.black): 6811172777951878745,
(Point(row=6, col=17), Player.white): 4287438501612118956,
(Point(row=6, col=18), Player.black): 7361038496467019309,
(Point(row=6, col=18), Player.white): 1178537626058696537,
(Point(row=6, col=19), Player.black): 7194193256565057849,
(Point(row=6, col=19), Player.white): 4531567429014641722,
(Point(row=7, col=1), Player.black): 7505223898204509614,
(Point(row=7, col=1), Player.white): 3523880371388781823,
(Point(row=7, col=2), Player.black): 7758104689270626726,
(Point(row=7, col=2), Player.white): 5154901945034984635,
(Point(row=7, col=3), Player.black): 7908884449190200159,
(Point(row=7, col=3), Player.white): 6091771335169468026,
(Point(row=7, col=4), Player.black): 5496243953189431987,
(Point(row=7, col=4), Player.white): 1072781641727085262,
(Point(row=7, col=5), Player.black): 2259729783707561481,
(Point(row=7, col=5), Player.white): 3792565084245892139,
(Point(row=7, col=6), Player.black): 6937440406685817812,
(Point(row=7, col=6), Player.white): 7347417551552588410,
(Point(row=7, col=7), Player.black): 274363203168610705,
(Point(row=7, col=7), Player.white): 2991158354686106602,
(Point(row=7, col=8), Player.black): 3543462707343638869,
(Point(row=7, col=8), Player.white): 6897241357902524968,
(Point(row=7, col=9), Player.black): 1803449132465680439,
(Point(row=7, col=9), Player.white): 1805461137455131380,
(Point(row=7, col=10), Player.black): 1344205374809929348,
(Point(row=7, col=10), Player.white): 7421761836628972954,
(Point(row=7, col=11), Player.black): 2469075855792438661,
(Point(row=7, col=11), Player.white): 8737157820839145170,
(Point(row=7, col=12), Player.black): 4645139307448094535,
(Point(row=7, col=12), Player.white): 2013571706545530756,
(Point(row=7, col=13), Player.black): 4440052020092490667,
(Point(row=7, col=13), Player.white): 2198253462349413335,
(Point(row=7, col=14), Player.black): 8385158062981409426,
(Point(row=7, col=14), Player.white): 7396242276331402070,
(Point(row=7, col=15), Player.black): 7873991431591633416,
(Point(row=7, col=15), Player.white): 5245134034253075294,
(Point(row=7, col=16), Player.black): 3545473594019224005,
(Point(row=7, col=16), Player.white): 5362036617969886734,
(Point(row=7, col=17), Player.black): 3399820090180437921,
(Point(row=7, col=17), Player.white): 6304594393136496423,
(Point(row=7, col=18), Player.black): 6808871930382356840,
(Point(row=7, col=18), Player.white): 7238493628329406408,
(Point(row=7, col=19), Player.black): 2941984664248357117,
(Point(row=7, col=19), Player.white): 8777325077230667191,
(Point(row=8, col=1), Player.black): 8555550697672412722,
(Point(row=8, col=1), Player.white): 4441764663700603604,
(Point(row=8, col=2), Player.black): 8114022609843408184,
(Point(row=8, col=2), Player.white): 4880052519835673534,
(Point(row=8, col=3), Player.black): 8383176396540237499,
(Point(row=8, col=3), Player.white): 2301192195844013630,
(Point(row=8, col=4), Player.black): 3131193544615645612,
(Point(row=8, col=4), Player.white): 6496246924947363002,
(Point(row=8, col=5), Player.black): 2206697360492955428,
(Point(row=8, col=5), Player.white): 4633890430473590804,
(Point(row=8, col=6), Player.black): 3311088317107600064,
(Point(row=8, col=6), Player.white): 2553631626031439660,
(Point(row=8, col=7), Player.black): 3729471844600876976,
(Point(row=8, col=7), Player.white): 4630838550204380176,
(Point(row=8, col=8), Player.black): 3230962609338477572,
(Point(row=8, col=8), Player.white): 4025708318954871104,
(Point(row=8, col=9), Player.black): 7403400805718996734,
(Point(row=8, col=9), Player.white): 1120604723508655748,
(Point(row=8, col=10), Player.black): 9032266673291657252,
(Point(row=8, col=10), Player.white): 6256949844472726520,
(Point(row=8, col=11), Player.black): 8823919370741438055,
(Point(row=8, col=11), Player.white): 8094495405381248685,
(Point(row=8, col=12), Player.black): 3334094283060363945,
(Point(row=8, col=12), Player.white): 7246504600406415928,
(Point(row=8, col=13), Player.black): 3322886122246972265,
(Point(row=8, col=13), Player.white): 6478044293231350930,
(Point(row=8, col=14), Player.black): 7134935804336880955,
(Point(row=8, col=14), Player.white): 5440616859773655111,
(Point(row=8, col=15), Player.black): 5622883183821794068,
(Point(row=8, col=15), Player.white): 6876298074949829141,
(Point(row=8, col=16), Player.black): 9208319379299370358,
(Point(row=8, col=16), Player.white): 4279884586140213756,
(Point(row=8, col=17), Player.black): 6972919005596349799,
(Point(row=8, col=17), Player.white): 1188432116140140788,
(Point(row=8, col=18), Player.black): 4125474500416311866,
(Point(row=8, col=18), Player.white): 8307027400913603825,
(Point(row=8, col=19), Player.black): 3809353618715193806,
(Point(row=8, col=19), Player.white): 7558139490627685539,
(Point(row=9, col=1), Player.black): 1234119251793759246,
(Point(row=9, col=1), Player.white): 659297191654056693,
(Point(row=9, col=2), Player.black): 3261231943328448969,
(Point(row=9, col=2), Player.white): 1057278803599943353,
(Point(row=9, col=3), Player.black): 839483174647562586,
(Point(row=9, col=3), Player.white): 7194207691870590917,
(Point(row=9, col=4), Player.black): 5606650524210787489,
(Point(row=9, col=4), Player.white): 1895686981850231399,
(Point(row=9, col=5), Player.black): 4920547008176236549,
(Point(row=9, col=5), Player.white): 7565282943460912257,
(Point(row=9, col=6), Player.black): 3019587166136150912,
(Point(row=9, col=6), Player.white): 6707744018245089668,
(Point(row=9, col=7), Player.black): 6047206723760136289,
(Point(row=9, col=7), Player.white): 1193385319107794151,
(Point(row=9, col=8), Player.black): 7350618264927500762,
(Point(row=9, col=8), Player.white): 4066371643782662552,
(Point(row=9, col=9), Player.black): 2562145697504571198,
(Point(row=9, col=9), Player.white): 8175127414110258267,
(Point(row=9, col=10), Player.black): 6330184047076642702,
(Point(row=9, col=10), Player.white): 6088733421600405183,
(Point(row=9, col=11), Player.black): 8770761754687024922,
(Point(row=9, col=11), Player.white): 2303893899043859828,
(Point(row=9, col=12), Player.black): 4397892265666622492,
(Point(row=9, col=12), Player.white): 171511519220923650,
(Point(row=9, col=13), Player.black): 8556804551828495508,
(Point(row=9, col=13), Player.white): 6157667425414523251,
(Point(row=9, col=14), Player.black): 5240233257427575055,
(Point(row=9, col=14), Player.white): 380479988002564699,
(Point(row=9, col=15), Player.black): 4101188141905754953,
(Point(row=9, col=15), Player.white): 1188661981929465449,
(Point(row=9, col=16), Player.black): 2361008046500479745,
(Point(row=9, col=16), Player.white): 3420251487035079939,
(Point(row=9, col=17), Player.black): 5215788347647149491,
(Point(row=9, col=17), Player.white): 2847796804167233977,
(Point(row=9, col=18), Player.black): 4069384458604210450,
(Point(row=9, col=18), Player.white): 2667905631455246850,
(Point(row=9, col=19), Player.black): 4621288857791839751,
(Point(row=9, col=19), Player.white): 6192346017211071691,
(Point(row=10, col=1), Player.black): 343025508629360270,
(Point(row=10, col=1), Player.white): 6080370293523829540,
(Point(row=10, col=2), Player.black): 1276762341542167984,
(Point(row=10, col=2), Player.white): 174121238909199648,
(Point(row=10, col=3), Player.black): 2702169347958700192,
(Point(row=10, col=3), Player.white): 6176058303263585605,
(Point(row=10, col=4), Player.black): 177207636035983233,
(Point(row=10, col=4), Player.white): 5447343180839162201,
(Point(row=10, col=5), Player.black): 6273473410032224609,
(Point(row=10, col=5), Player.white): 2606986174731848762,
(Point(row=10, col=6), Player.black): 679745883356101322,
(Point(row=10, col=6), Player.white): 1860549189994338815,
(Point(row=10, col=7), Player.black): 2212940180756290274,
(Point(row=10, col=7), Player.white): 2133459037170470402,
(Point(row=10, col=8), Player.black): 1151882381744130865,
(Point(row=10, col=8), Player.white): 1649565364735167801,
(Point(row=10, col=9), Player.black): 2227196407143349414,
(Point(row=10, col=9), Player.white): 392846935006568137,
(Point(row=10, col=10), Player.black): 5502109392080621468,
(Point(row=10, col=10), Player.white): 8203674047163161602,
(Point(row=10, col=11), Player.black): 8204569383492405547,
(Point(row=10, col=11), Player.white): 7856323060487033755,
(Point(row=10, col=12), Player.black): 4312995889218800335,
(Point(row=10, col=12), Player.white): 1309294955384986112,
(Point(row=10, col=13), Player.black): 6596319202406351081,
(Point(row=10, col=13), Player.white): 8472831084798519156,
(Point(row=10, col=14), Player.black): 7648621437688537580,
(Point(row=10, col=14), Player.white): 8298210324352215352,
(Point(row=10, col=15), Player.black): 7902326659402518162,
(Point(row=10, col=15), Player.white): 1026162954916652360,
(Point(row=10, col=16), Player.black): 453485620993637077,
(Point(row=10, col=16), Player.white): 5025150808266618120,
(Point(row=10, col=17), Player.black): 1368378052776399459,
(Point(row=10, col=17), Player.white): 5541073040166699043,
(Point(row=10, col=18), Player.black): 7371506648251071272,
(Point(row=10, col=18), Player.white): 4032893015872145474,
(Point(row=10, col=19), Player.black): 6643168683325655773,
(Point(row=10, col=19), Player.white): 4009809049430315338,
(Point(row=11, col=1), Player.black): 3096446795331724776,
(Point(row=11, col=1), Player.white): 2633382253148960257,
(Point(row=11, col=2), Player.black): 8998600115078807949,
(Point(row=11, col=2), Player.white): 6713751278338346518,
(Point(row=11, col=3), Player.black): 5208695325110140904,
(Point(row=11, col=3), Player.white): 1082179704516872692,
(Point(row=11, col=4), Player.black): 2523720905537884611,
(Point(row=11, col=4), Player.white): 3884059083352746776,
(Point(row=11, col=5), Player.black): 8545932761490838754,
(Point(row=11, col=5), Player.white): 245119635031855594,
(Point(row=11, col=6), Player.black): 1768635026383023070,
(Point(row=11, col=6), Player.white): 5181208296292712804,
(Point(row=11, col=7), Player.black): 5573685131613925947,
(Point(row=11, col=7), Player.white): 9163096949906583680,
(Point(row=11, col=8), Player.black): 3184510654753152217,
(Point(row=11, col=8), Player.white): 8161887973309818925,
(Point(row=11, col=9), Player.black): 4589957902412425606,
(Point(row=11, col=9), Player.white): 6614686928272258750,
(Point(row=11, col=10), Player.black): 1434970922067050951,
(Point(row=11, col=10), Player.white): 1542376642545734726,
(Point(row=11, col=11), Player.black): 3961857311564892297,
(Point(row=11, col=11), Player.white): 1749905237812230915,
(Point(row=11, col=12), Player.black): 2144619526750562568,
(Point(row=11, col=12), Player.white): 3221030485317486750,
(Point(row=11, col=13), Player.black): 4041826991333486485,
(Point(row=11, col=13), Player.white): 5942340235373680092,
(Point(row=11, col=14), Player.black): 9002038659705724757,
(Point(row=11, col=14), Player.white): 7138245409904158102,
(Point(row=11, col=15), Player.black): 5838259029170475524,
(Point(row=11, col=15), Player.white): 5672234601146253184,
(Point(row=11, col=16), Player.black): 1045733023448870563,
(Point(row=11, col=16), Player.white): 679611892779038914,
(Point(row=11, col=17), Player.black): 1298269724157302892,
(Point(row=11, col=17), Player.white): 5800391453045182497,
(Point(row=11, col=18), Player.black): 7223484368370892141,
(Point(row=11, col=18), Player.white): 5013843226800780409,
(Point(row=11, col=19), Player.black): 6717520096129358646,
(Point(row=11, col=19), Player.white): 3979410374790927324,
(Point(row=12, col=1), Player.black): 4237432067300266239,
(Point(row=12, col=1), Player.white): 1900326310330401594,
(Point(row=12, col=2), Player.black): 6020336884909591346,
(Point(row=12, col=2), Player.white): 4978369381451594153,
(Point(row=12, col=3), Player.black): 2875147101496747986,
(Point(row=12, col=3), Player.white): 5232195392172726139,
(Point(row=12, col=4), Player.black): 4160918800097480322,
(Point(row=12, col=4), Player.white): 4446606576908106867,
(Point(row=12, col=5), Player.black): 301901673100119914,
(Point(row=12, col=5), Player.white): 1381555220051745289,
(Point(row=12, col=6), Player.black): 5369120539341183152,
(Point(row=12, col=6), Player.white): 644216968574007553,
(Point(row=12, col=7), Player.black): 7146906404801466533,
(Point(row=12, col=7), Player.white): 7370308202015538185,
(Point(row=12, col=8), Player.black): 5146719683181996868,
(Point(row=12, col=8), Player.white): 1638481902956858698,
(Point(row=12, col=9), Player.black): 2966254449544871752,
(Point(row=12, col=9), Player.white): 7166366522903388539,
(Point(row=12, col=10), Player.black): 8669835952454086115,
(Point(row=12, col=10), Player.white): 3999739516462966514,
(Point(row=12, col=11), Player.black): 855708420875865809,
(Point(row=12, col=11), Player.white): 7828618074418290839,
(Point(row=12, col=12), Player.black): 5044662642018461692,
(Point(row=12, col=12), Player.white): 3554905510548596463,
(Point(row=12, col=13), Player.black): 2423221727702418569,
(Point(row=12, col=13), Player.white): 7195366456353503774,
(Point(row=12, col=14), Player.black): 7961160117947599979,
(Point(row=12, col=14), Player.white): 4514564854734422812,
(Point(row=12, col=15), Player.black): 8303001813249370739,
(Point(row=12, col=15), Player.white): 304459573680012461,
(Point(row=12, col=16), Player.black): 379920350548093466,
(Point(row=12, col=16), Player.white): 7347478704353768904,
(Point(row=12, col=17), Player.black): 534784805381573035,
(Point(row=12, col=17), Player.white): 4491128130790950050,
(Point(row=12, col=18), Player.black): 8137182860817142099,
(Point(row=12, col=18), Player.white): 7620379291808426848,
(Point(row=12, col=19), Player.black): 1534801273780498768,
(Point(row=12, col=19), Player.white): 4906197558596922854,
(Point(row=13, col=1), Player.black): 628519232311208342,
(Point(row=13, col=1), Player.white): 9012322733551751208,
(Point(row=13, col=2), Player.black): 8968880824205860309,
(Point(row=13, col=2), Player.white): 4265106643919859481,
(Point(row=13, col=3), Player.black): 7172697365550842776,
(Point(row=13, col=3), Player.white): 7788270755108520750,
(Point(row=13, col=4), Player.black): 5934746258269661794,
(Point(row=13, col=4), Player.white): 1709526351801541778,
(Point(row=13, col=5), Player.black): 919479787036723555,
(Point(row=13, col=5), Player.white): 4617408397607156904,
(Point(row=13, col=6), Player.black): 3017909277574588438,
(Point(row=13, col=6), Player.white): 7714590061989806584,
(Point(row=13, col=7), Player.black): 7314539837012564425,
(Point(row=13, col=7), Player.white): 94478039237250124,
(Point(row=13, col=8), Player.black): 8855572335534100334,
(Point(row=13, col=8), Player.white): 3314306729734646544,
(Point(row=13, col=9), Player.black): 6383574213562494992,
(Point(row=13, col=9), Player.white): 3757348259985433820,
(Point(row=13, col=10), Player.black): 3218528706928942381,
(Point(row=13, col=10), Player.white): 8755498210776823764,
(Point(row=13, col=11), Player.black): 3871250504067913307,
(Point(row=13, col=11), Player.white): 4308010933751666358,
(Point(row=13, col=12), Player.black): 5447423350043419269,
(Point(row=13, col=12), Player.white): 4492692585168184810,
(Point(row=13, col=13), Player.black): 6621210539793049111,
(Point(row=13, col=13), Player.white): 4594886184845142207,
(Point(row=13, col=14), Player.black): 862191208922178062,
(Point(row=13, col=14), Player.white): 7092871381086951781,
(Point(row=13, col=15), Player.black): 6866995587800386541,
(Point(row=13, col=15), Player.white): 6639527157322646114,
(Point(row=13, col=16), Player.black): 2888342856649783777,
(Point(row=13, col=16), Player.white): 1820400694587150928,
(Point(row=13, col=17), Player.black): 8154638980954009758,
(Point(row=13, col=17), Player.white): 1107948918448171521,
(Point(row=13, col=18), Player.black): 7349691568439432277,
(Point(row=13, col=18), Player.white): 4143663161241702655,
(Point(row=13, col=19), Player.black): 8222110816052728820,
(Point(row=13, col=19), Player.white): 2975311221132972775,
(Point(row=14, col=1), Player.black): 1863961998848803501,
(Point(row=14, col=1), Player.white): 7104725216272792042,
(Point(row=14, col=2), Player.black): 5130133929769739441,
(Point(row=14, col=2), Player.white): 5515413891651105130,
(Point(row=14, col=3), Player.black): 6375493983351865171,
(Point(row=14, col=3), Player.white): 1463469150697238708,
(Point(row=14, col=4), Player.black): 252795421472879551,
(Point(row=14, col=4), Player.white): 7952614238200037422,
(Point(row=14, col=5), Player.black): 9192476257961760960,
(Point(row=14, col=5), Player.white): 7237280507776858765,
(Point(row=14, col=6), Player.black): 3226782926372388843,
(Point(row=14, col=6), Player.white): 6555655830423192450,
(Point(row=14, col=7), Player.black): 4941625599644320622,
(Point(row=14, col=7), Player.white): 1550340492340576377,
(Point(row=14, col=8), Player.black): 8596556921098917884,
(Point(row=14, col=8), Player.white): 5399793192187999531,
(Point(row=14, col=9), Player.black): 1913157374311153115,
(Point(row=14, col=9), Player.white): 2090100440980625023,
(Point(row=14, col=10), Player.black): 3884906940515336836,
(Point(row=14, col=10), Player.white): 1094744694082516003,
(Point(row=14, col=11), Player.black): 511237614993445060,
(Point(row=14, col=11), Player.white): 870696949153471276,
(Point(row=14, col=12), Player.black): 8112979758197630309,
(Point(row=14, col=12), Player.white): 4104435499041060474,
(Point(row=14, col=13), Player.black): 4349505038784158528,
(Point(row=14, col=13), Player.white): 3687577306600975975,
(Point(row=14, col=14), Player.black): 6284350769554616357,
(Point(row=14, col=14), Player.white): 4476656798978243543,
(Point(row=14, col=15), Player.black): 6794476950564796792,
(Point(row=14, col=15), Player.white): 7565334783614946406,
(Point(row=14, col=16), Player.black): 8650038375122719192,
(Point(row=14, col=16), Player.white): 5330719931269832842,
(Point(row=14, col=17), Player.black): 5474151832658659964,
(Point(row=14, col=17), Player.white): 8898789792690767615,
(Point(row=14, col=18), Player.black): 9107979683561190367,
(Point(row=14, col=18), Player.white): 7386253956306785607,
(Point(row=14, col=19), Player.black): 9123697209982643191,
(Point(row=14, col=19), Player.white): 6974485572415237028,
(Point(row=15, col=1), Player.black): 4337644991122521375,
(Point(row=15, col=1), Player.white): 1304217693852367506,
(Point(row=15, col=2), Player.black): 3915759986910688774,
(Point(row=15, col=2), Player.white): 3021592151500229552,
(Point(row=15, col=3), Player.black): 9152414171465063482,
(Point(row=15, col=3), Player.white): 5011221247388892469,
(Point(row=15, col=4), Player.black): 1663741174692729438,
(Point(row=15, col=4), Player.white): 970787474517028793,
(Point(row=15, col=5), Player.black): 6054076358328179929,
(Point(row=15, col=5), Player.white): 8873489489970631395,
(Point(row=15, col=6), Player.black): 1930655201443325074,
(Point(row=15, col=6), Player.white): 3755209027942385337,
(Point(row=15, col=7), Player.black): 2800688815387392340,
(Point(row=15, col=7), Player.white): 971278159515243294,
(Point(row=15, col=8), Player.black): 5723849708320704378,
(Point(row=15, col=8), Player.white): 3062832581215699049,
(Point(row=15, col=9), Player.black): 5008650182265349991,
(Point(row=15, col=9), Player.white): 7970740444508237001,
(Point(row=15, col=10), Player.black): 1945630331907886676,
(Point(row=15, col=10), Player.white): 7837072882802447145,
(Point(row=15, col=11), Player.black): 1559032358509347907,
(Point(row=15, col=11), Player.white): 8195124455274423927,
(Point(row=15, col=12), Player.black): 3080717012381103321,
(Point(row=15, col=12), Player.white): 2116189025776235672,
(Point(row=15, col=13), Player.black): 4438915457264671544,
(Point(row=15, col=13), Player.white): 3544646783687733085,
(Point(row=15, col=14), Player.black): 9182647124447361118,
(Point(row=15, col=14), Player.white): 7152109489993458264,
(Point(row=15, col=15), Player.black): 7599834199622978070,
(Point(row=15, col=15), Player.white): 4036331458128460820,
(Point(row=15, col=16), Player.black): 1415266564916260710,
(Point(row=15, col=16), Player.white): 5796893193674302860,
(Point(row=15, col=17), Player.black): 5539890679357278387,
(Point(row=15, col=17), Player.white): 3248025904750380682,
(Point(row=15, col=18), Player.black): 9194567845794524408,
(Point(row=15, col=18), Player.white): 6026377756896667687,
(Point(row=15, col=19), Player.black): 6868019737008349080,
(Point(row=15, col=19), Player.white): 6471698776878323096,
(Point(row=16, col=1), Player.black): 8991487232045549389,
(Point(row=16, col=1), Player.white): 5292450315230397106,
(Point(row=16, col=2), Player.black): 7723531242393393408,
(Point(row=16, col=2), Player.white): 8640602358328257048,
(Point(row=16, col=3), Player.black): 36045184498053208,
(Point(row=16, col=3), Player.white): 2556814445019739422,
(Point(row=16, col=4), Player.black): 5422600141578148808,
(Point(row=16, col=4), Player.white): 3641709947971188688,
(Point(row=16, col=5), Player.black): 3941905135271360926,
(Point(row=16, col=5), Player.white): 5600531804657799353,
(Point(row=16, col=6), Player.black): 8560618806127429120,
(Point(row=16, col=6), Player.white): 7554582082083704046,
(Point(row=16, col=7), Player.black): 711440492874123203,
(Point(row=16, col=7), Player.white): 5720976881304707141,
(Point(row=16, col=8), Player.black): 9087984682276420089,
(Point(row=16, col=8), Player.white): 8657421347777767052,
(Point(row=16, col=9), Player.black): 7197332612673537633,
(Point(row=16, col=9), Player.white): 1828614306412416161,
(Point(row=16, col=10), Player.black): 4545490557918492692,
(Point(row=16, col=10), Player.white): 532147880755185042,
(Point(row=16, col=11), Player.black): 8411029884315206204,
(Point(row=16, col=11), Player.white): 4652796430036246273,
(Point(row=16, col=12), Player.black): 3507493795367353287,
(Point(row=16, col=12), Player.white): 6470926671418166497,
(Point(row=16, col=13), Player.black): 2063273199615108609,
(Point(row=16, col=13), Player.white): 446552949043624027,
(Point(row=16, col=14), Player.black): 1084184415353974420,
(Point(row=16, col=14), Player.white): 2132042809237663779,
(Point(row=16, col=15), Player.black): 1164947278522434713,
(Point(row=16, col=15), Player.white): 8113913068687439739,
(Point(row=16, col=16), Player.black): 4738038481647783630,
(Point(row=16, col=16), Player.white): 320417080161112040,
(Point(row=16, col=17), Player.black): 6828331179952690545,
(Point(row=16, col=17), Player.white): 7521943669316976736,
(Point(row=16, col=18), Player.black): 3665889502745802761,
(Point(row=16, col=18), Player.white): 320329133695211709,
(Point(row=16, col=19), Player.black): 5796780711744761775,
(Point(row=16, col=19), Player.white): 2676583062155451949,
(Point(row=17, col=1), Player.black): 3484635079624457265,
(Point(row=17, col=1), Player.white): 6069213419291382027,
(Point(row=17, col=2), Player.black): 6835249005884985106,
(Point(row=17, col=2), Player.white): 1714916385018676562,
(Point(row=17, col=3), Player.black): 5149738028957454524,
(Point(row=17, col=3), Player.white): 6819673557238720111,
(Point(row=17, col=4), Player.black): 6846920340321586680,
(Point(row=17, col=4), Player.white): 5650517261180495057,
(Point(row=17, col=5), Player.black): 6859895030673951287,
(Point(row=17, col=5), Player.white): 3277513334765081493,
(Point(row=17, col=6), Player.black): 1781421777867771146,
(Point(row=17, col=6), Player.white): 3898205830612768585,
(Point(row=17, col=7), Player.black): 2862248799182911371,
(Point(row=17, col=7), Player.white): 4885801817779977498,
(Point(row=17, col=8), Player.black): 640910451840955527,
(Point(row=17, col=8), Player.white): 6184530106675719419,
(Point(row=17, col=9), Player.black): 8388784745661908862,
(Point(row=17, col=9), Player.white): 6394645469104644915,
(Point(row=17, col=10), Player.black): 8356532995058933724,
(Point(row=17, col=10), Player.white): 35742002312566326,
(Point(row=17, col=11), Player.black): 7981877101829988437,
(Point(row=17, col=11), Player.white): 8135915718523895351,
(Point(row=17, col=12), Player.black): 7759432412829636136,
(Point(row=17, col=12), Player.white): 5316827354945357851,
(Point(row=17, col=13), Player.black): 4292051684425464986,
(Point(row=17, col=13), Player.white): 3625805327819456798,
(Point(row=17, col=14), Player.black): 6961131621182590848,
(Point(row=17, col=14), Player.white): 7745696647646107839,
(Point(row=17, col=15), Player.black): 6209362538359629951,
(Point(row=17, col=15), Player.white): 8099791659260235772,
(Point(row=17, col=16), Player.black): 257615418633975847,
(Point(row=17, col=16), Player.white): 8769897204170535168,
(Point(row=17, col=17), Player.black): 8464341603580847613,
(Point(row=17, col=17), Player.white): 5455593262036144966,
(Point(row=17, col=18), Player.black): 956745645481024571,
(Point(row=17, col=18), Player.white): 5230033481442448385,
(Point(row=17, col=19), Player.black): 7178070736944945619,
(Point(row=17, col=19), Player.white): 1767245828672898173,
(Point(row=18, col=1), Player.black): 1488274364920839354,
(Point(row=18, col=1), Player.white): 2736154559139493439,
(Point(row=18, col=2), Player.black): 2707528031970491560,
(Point(row=18, col=2), Player.white): 3590892185812303723,
(Point(row=18, col=3), Player.black): 7955289994222595950,
(Point(row=18, col=3), Player.white): 4893715292640171992,
(Point(row=18, col=4), Player.black): 9073979189337898629,
(Point(row=18, col=4), Player.white): 5188402570778007682,
(Point(row=18, col=5), Player.black): 6100171414922106651,
(Point(row=18, col=5), Player.white): 3097614018122640439,
(Point(row=18, col=6), Player.black): 2716657759579873547,
(Point(row=18, col=6), Player.white): 2211120146174758744,
(Point(row=18, col=7), Player.black): 1250011601734050975,
(Point(row=18, col=7), Player.white): 6044916952500492878,
(Point(row=18, col=8), Player.black): 2791686430399746459,
(Point(row=18, col=8), Player.white): 9024779735456073531,
(Point(row=18, col=9), Player.black): 1180528573623622328,
(Point(row=18, col=9), Player.white): 3195794075210655928,
(Point(row=18, col=10), Player.black): 5352540450218185348,
(Point(row=18, col=10), Player.white): 8324497323245206107,
(Point(row=18, col=11), Player.black): 1561159839581437198,
(Point(row=18, col=11), Player.white): 1150004927503836540,
(Point(row=18, col=12), Player.black): 3592193043639446072,
(Point(row=18, col=12), Player.white): 5961001420408436479,
(Point(row=18, col=13), Player.black): 9170590488411860263,
(Point(row=18, col=13), Player.white): 4393550962680193281,
(Point(row=18, col=14), Player.black): 4459136446402603669,
(Point(row=18, col=14), Player.white): 3678532975382333317,
(Point(row=18, col=15), Player.black): 3407376268502759002,
(Point(row=18, col=15), Player.white): 7004392700522659827,
(Point(row=18, col=16), Player.black): 7025224267149331137,
(Point(row=18, col=16), Player.white): 4266828283927072776,
(Point(row=18, col=17), Player.black): 3820477092415434327,
(Point(row=18, col=17), Player.white): 6987910699423728036,
(Point(row=18, col=18), Player.black): 4404475055628968454,
(Point(row=18, col=18), Player.white): 7263273303364001536,
(Point(row=18, col=19), Player.black): 3861799992048340088,
(Point(row=18, col=19), Player.white): 4745260880005722357,
(Point(row=19, col=1), Player.black): 3204765945343917947,
(Point(row=19, col=1), Player.white): 6471475421459578330,
(Point(row=19, col=2), Player.black): 7217255437762615997,
(Point(row=19, col=2), Player.white): 2042324360141252017,
(Point(row=19, col=3), Player.black): 1573271722322701274,
(Point(row=19, col=3), Player.white): 4251862130437553808,
(Point(row=19, col=4), Player.black): 4874022962146164970,
(Point(row=19, col=4), Player.white): 5580404813819044185,
(Point(row=19, col=5), Player.black): 3353651861083802778,
(Point(row=19, col=5), Player.white): 4365684057282895693,
(Point(row=19, col=6), Player.black): 7183087483554163443,
(Point(row=19, col=6), Player.white): 6124351538038844700,
(Point(row=19, col=7), Player.black): 552362206998334380,
(Point(row=19, col=7), Player.white): 4001317785718717269,
(Point(row=19, col=8), Player.black): 2460342757764868330,
(Point(row=19, col=8), Player.white): 4363761240248314342,
(Point(row=19, col=9), Player.black): 555305416298891847,
(Point(row=19, col=9), Player.white): 8119791214296736653,
(Point(row=19, col=10), Player.black): 3279046237149845916,
(Point(row=19, col=10), Player.white): 7923879540147169965,
(Point(row=19, col=11), Player.black): 4458693056595344855,
(Point(row=19, col=11), Player.white): 3651817262257460536,
(Point(row=19, col=12), Player.black): 5185309799039877575,
(Point(row=19, col=12), Player.white): 8262961320022098574,
(Point(row=19, col=13), Player.black): 2439407622347387200,
(Point(row=19, col=13), Player.white): 8796788343413577457,
(Point(row=19, col=14), Player.black): 6880826668981171262,
(Point(row=19, col=14), Player.white): 4043062046885032638,
(Point(row=19, col=15), Player.black): 4955624846223592658,
(Point(row=19, col=15), Player.white): 6576542069929904305,
(Point(row=19, col=16), Player.black): 7983787934871059066,
(Point(row=19, col=16), Player.white): 2035781571561811243,
(Point(row=19, col=17), Player.black): 5069116270545998666,
(Point(row=19, col=17), Player.white): 6753388973612546863,
(Point(row=19, col=18), Player.black): 2454745758168160778,
(Point(row=19, col=18), Player.white): 8486626821235143188,
(Point(row=19, col=19), Player.black): 8788385670717328555,
(Point(row=19, col=19), Player.white): 8976707363182962042,
}
EMPTY_BOARD = 0
| from .gotypes import Player, Point
__all__ = ['HASH_CODE', 'EMPTY_BOARD']
HASH_CODE = {
(Point(row=1, col=1), Player.black): 3358364442651403282,
(Point(row=1, col=1), Player.white): 7470933530584932282,
(Point(row=1, col=2), Player.black): 3908477744752221130,
(Point(row=1, col=2), Player.white): 1562668501702532755,
(Point(row=1, col=3), Player.black): 2935795650793747641,
(Point(row=1, col=3), Player.white): 6785400224249859256,
(Point(row=1, col=4), Player.black): 8009767182471757917,
(Point(row=1, col=4), Player.white): 35550824089951956,
(Point(row=1, col=5), Player.black): 6773097344601636250,
(Point(row=1, col=5), Player.white): 8847166111597618811,
(Point(row=1, col=6), Player.black): 8670100078235412825,
(Point(row=1, col=6), Player.white): 7008955978465777678,
(Point(row=1, col=7), Player.black): 5464139211459645769,
(Point(row=1, col=7), Player.white): 8171595736379093123,
(Point(row=1, col=8), Player.black): 351909656387621001,
(Point(row=1, col=8), Player.white): 7238015216612105884,
(Point(row=1, col=9), Player.black): 3396724620650770147,
(Point(row=1, col=9), Player.white): 4209004723882000576,
(Point(row=1, col=10), Player.black): 4842779035545402021,
(Point(row=1, col=10), Player.white): 6886319625847032459,
(Point(row=1, col=11), Player.black): 524803705257336124,
(Point(row=1, col=11), Player.white): 1056449941733952409,
(Point(row=1, col=12), Player.black): 6489365225745754382,
(Point(row=1, col=12), Player.white): 2437438899944803018,
(Point(row=1, col=13), Player.black): 8947346722438719161,
(Point(row=1, col=13), Player.white): 2599070081342983667,
(Point(row=1, col=14), Player.black): 2930941330745387947,
(Point(row=1, col=14), Player.white): 2007749035277495621,
(Point(row=1, col=15), Player.black): 6065841185823253414,
(Point(row=1, col=15), Player.white): 193120643649747335,
(Point(row=1, col=16), Player.black): 2588400943218332221,
(Point(row=1, col=16), Player.white): 2029576827960769569,
(Point(row=1, col=17), Player.black): 3721163493106219606,
(Point(row=1, col=17), Player.white): 10216008672517638,
(Point(row=1, col=18), Player.black): 4912989059172363883,
(Point(row=1, col=18), Player.white): 5569829595169653615,
(Point(row=1, col=19), Player.black): 2138513457095070154,
(Point(row=1, col=19), Player.white): 1320455088185461752,
(Point(row=2, col=1), Player.black): 7136038498507050230,
(Point(row=2, col=1), Player.white): 6372693857312078170,
(Point(row=2, col=2), Player.black): 1876075245466805558,
(Point(row=2, col=2), Player.white): 5869045570448901886,
(Point(row=2, col=3), Player.black): 2243429705853894366,
(Point(row=2, col=3), Player.white): 9107310619990862130,
(Point(row=2, col=4), Player.black): 6974778138575169367,
(Point(row=2, col=4), Player.white): 6106156854666751401,
(Point(row=2, col=5), Player.black): 7556636260769482629,
(Point(row=2, col=5), Player.white): 3084523102838529514,
(Point(row=2, col=6), Player.black): 7111911632335533472,
(Point(row=2, col=6), Player.white): 6028072164826460819,
(Point(row=2, col=7), Player.black): 1915335096391320529,
(Point(row=2, col=7), Player.white): 8061998162183177218,
(Point(row=2, col=8), Player.black): 4016149980235550064,
(Point(row=2, col=8), Player.white): 7047649178198708366,
(Point(row=2, col=9), Player.black): 4708166101478557489,
(Point(row=2, col=9), Player.white): 2215219507826833770,
(Point(row=2, col=10), Player.black): 3340274879306913100,
(Point(row=2, col=10), Player.white): 1959796252649520039,
(Point(row=2, col=11), Player.black): 6105253527464181164,
(Point(row=2, col=11), Player.white): 5464577752823254244,
(Point(row=2, col=12), Player.black): 5729047054197537807,
(Point(row=2, col=12), Player.white): 198009915777229217,
(Point(row=2, col=13), Player.black): 6601814483141386855,
(Point(row=2, col=13), Player.white): 5869590559312693351,
(Point(row=2, col=14), Player.black): 8628386210791566486,
(Point(row=2, col=14), Player.white): 4877653724634908127,
(Point(row=2, col=15), Player.black): 7890344071649023278,
(Point(row=2, col=15), Player.white): 8998144387626287997,
(Point(row=2, col=16), Player.black): 913453038731891113,
(Point(row=2, col=16), Player.white): 4209879683093159541,
(Point(row=2, col=17), Player.black): 7407760281507574211,
(Point(row=2, col=17), Player.white): 3661338262012502056,
(Point(row=2, col=18), Player.black): 8068287257985125442,
(Point(row=2, col=18), Player.white): 6356154620615642781,
(Point(row=2, col=19), Player.black): 6774018702842503738,
(Point(row=2, col=19), Player.white): 5902567971825953990,
(Point(row=3, col=1), Player.black): 4048345810909377744,
(Point(row=3, col=1), Player.white): 9196143852617131564,
(Point(row=3, col=2), Player.black): 6975287365584650059,
(Point(row=3, col=2), Player.white): 6899074477900044532,
(Point(row=3, col=3), Player.black): 7772648527475620349,
(Point(row=3, col=3), Player.white): 2310556956700779283,
(Point(row=3, col=4), Player.black): 7451894683190357140,
(Point(row=3, col=4), Player.white): 2178858547315922304,
(Point(row=3, col=5), Player.black): 344601012743458982,
(Point(row=3, col=5), Player.white): 7439600122260057570,
(Point(row=3, col=6), Player.black): 3587854205726735566,
(Point(row=3, col=6), Player.white): 8901337557448145585,
(Point(row=3, col=7), Player.black): 1573954452452139533,
(Point(row=3, col=7), Player.white): 3403114058165668101,
(Point(row=3, col=8), Player.black): 8367254702727005883,
(Point(row=3, col=8), Player.white): 6955872926229753235,
(Point(row=3, col=9), Player.black): 3619111347943775729,
(Point(row=3, col=9), Player.white): 6386208164298176371,
(Point(row=3, col=10), Player.black): 3148441976799681341,
(Point(row=3, col=10), Player.white): 7856395865548432460,
(Point(row=3, col=11), Player.black): 6409076893157255060,
(Point(row=3, col=11), Player.white): 124395272292207998,
(Point(row=3, col=12), Player.black): 3143880823288471964,
(Point(row=3, col=12), Player.white): 1531234352959259847,
(Point(row=3, col=13), Player.black): 7399262572288059981,
(Point(row=3, col=13), Player.white): 1551835472134981779,
(Point(row=3, col=14), Player.black): 6966735695951350693,
(Point(row=3, col=14), Player.white): 2226180808931432928,
(Point(row=3, col=15), Player.black): 7950221191774108447,
(Point(row=3, col=15), Player.white): 5372070084430614066,
(Point(row=3, col=16), Player.black): 4932087973874359151,
(Point(row=3, col=16), Player.white): 4861217845376437768,
(Point(row=3, col=17), Player.black): 6051641886456887859,
(Point(row=3, col=17), Player.white): 8682525157318542658,
(Point(row=3, col=18), Player.black): 5219460463872050236,
(Point(row=3, col=18), Player.white): 2006674587315476121,
(Point(row=3, col=19), Player.black): 1858501859510840375,
(Point(row=3, col=19), Player.white): 1149751197252921630,
(Point(row=4, col=1), Player.black): 2398662377782215974,
(Point(row=4, col=1), Player.white): 2022847834450689858,
(Point(row=4, col=2), Player.black): 5069135804981210434,
(Point(row=4, col=2), Player.white): 8163668278688808633,
(Point(row=4, col=3), Player.black): 6861279557339037816,
(Point(row=4, col=3), Player.white): 403173259000503783,
(Point(row=4, col=4), Player.black): 6742465731882952113,
(Point(row=4, col=4), Player.white): 7236553203790249172,
(Point(row=4, col=5), Player.black): 237100204552564806,
(Point(row=4, col=5), Player.white): 2253480256099219716,
(Point(row=4, col=6), Player.black): 8653445792663566538,
(Point(row=4, col=6), Player.white): 8332972213608264449,
(Point(row=4, col=7), Player.black): 5078569967117087361,
(Point(row=4, col=7), Player.white): 1557213285863104694,
(Point(row=4, col=8), Player.black): 2827551827228022681,
(Point(row=4, col=8), Player.white): 3349858998829898310,
(Point(row=4, col=9), Player.black): 8066286474284701662,
(Point(row=4, col=9), Player.white): 3028979353583824304,
(Point(row=4, col=10), Player.black): 7922214592449460781,
(Point(row=4, col=10), Player.white): 4401701324525596990,
(Point(row=4, col=11), Player.black): 3886897718507761264,
(Point(row=4, col=11), Player.white): 6460451817414461410,
(Point(row=4, col=12), Player.black): 7028395470388266531,
(Point(row=4, col=12), Player.white): 6397147923820227503,
(Point(row=4, col=13), Player.black): 8673992362436018560,
(Point(row=4, col=13), Player.white): 558308551984182854,
(Point(row=4, col=14), Player.black): 5813119482407248397,
(Point(row=4, col=14), Player.white): 486055615340767652,
(Point(row=4, col=15), Player.black): 1429987174565568248,
(Point(row=4, col=15), Player.white): 8412546385952468176,
(Point(row=4, col=16), Player.black): 3611103175382149140,
(Point(row=4, col=16), Player.white): 6903982908273117840,
(Point(row=4, col=17), Player.black): 3592518960087764980,
(Point(row=4, col=17), Player.white): 7828295962542054267,
(Point(row=4, col=18), Player.black): 4525724720334310980,
(Point(row=4, col=18), Player.white): 267756311306906827,
(Point(row=4, col=19), Player.black): 6370693981989277912,
(Point(row=4, col=19), Player.white): 6786204491859299846,
(Point(row=5, col=1), Player.black): 819992008768999570,
(Point(row=5, col=1), Player.white): 5766528066731209464,
(Point(row=5, col=2), Player.black): 140046101684148532,
(Point(row=5, col=2), Player.white): 2432801741437513948,
(Point(row=5, col=3), Player.black): 849966200711015504,
(Point(row=5, col=3), Player.white): 3608309475012801311,
(Point(row=5, col=4), Player.black): 7768307362647367400,
(Point(row=5, col=4), Player.white): 1401509817423818336,
(Point(row=5, col=5), Player.black): 978308582591520472,
(Point(row=5, col=5), Player.white): 8157243956377698400,
(Point(row=5, col=6), Player.black): 6729878835345781212,
(Point(row=5, col=6), Player.white): 6046318031522706143,
(Point(row=5, col=7), Player.black): 3931891382545183014,
(Point(row=5, col=7), Player.white): 4565326691920452325,
(Point(row=5, col=8), Player.black): 3751207472807055819,
(Point(row=5, col=8), Player.white): 1750985725059676286,
(Point(row=5, col=9), Player.black): 4739768336697872363,
(Point(row=5, col=9), Player.white): 4663188651196470664,
(Point(row=5, col=10), Player.black): 8709261564447766898,
(Point(row=5, col=10), Player.white): 1989905579540671866,
(Point(row=5, col=11), Player.black): 8386773075655989811,
(Point(row=5, col=11), Player.white): 9126047819967038123,
(Point(row=5, col=12), Player.black): 3638879086648420737,
(Point(row=5, col=12), Player.white): 5802750860279031868,
(Point(row=5, col=13), Player.black): 6302226771349348014,
(Point(row=5, col=13), Player.white): 391189191613921792,
(Point(row=5, col=14), Player.black): 7508186241717994621,
(Point(row=5, col=14), Player.white): 2980849813380702168,
(Point(row=5, col=15), Player.black): 7178012196189677148,
(Point(row=5, col=15), Player.white): 3152954361793989785,
(Point(row=5, col=16), Player.black): 9046974077767159071,
(Point(row=5, col=16), Player.white): 5896322307150774831,
(Point(row=5, col=17), Player.black): 8644164651450999910,
(Point(row=5, col=17), Player.white): 8372756108283824593,
(Point(row=5, col=18), Player.black): 6205959058144257403,
(Point(row=5, col=18), Player.white): 3658039270432317209,
(Point(row=5, col=19), Player.black): 815296926362577910,
(Point(row=5, col=19), Player.white): 7244805626828897449,
(Point(row=6, col=1), Player.black): 1711429897047583139,
(Point(row=6, col=1), Player.white): 3796916877444870134,
(Point(row=6, col=2), Player.black): 9178534209148476838,
(Point(row=6, col=2), Player.white): 910115998406578065,
(Point(row=6, col=3), Player.black): 2165067175098864523,
(Point(row=6, col=3), Player.white): 2519141993243517052,
(Point(row=6, col=4), Player.black): 5978107613277318827,
(Point(row=6, col=4), Player.white): 109336010425203641,
(Point(row=6, col=5), Player.black): 4943941116244090107,
(Point(row=6, col=5), Player.white): 132140693945639607,
(Point(row=6, col=6), Player.black): 6045300607696569417,
(Point(row=6, col=6), Player.white): 6277531507132282289,
(Point(row=6, col=7), Player.black): 6699260733919604273,
(Point(row=6, col=7), Player.white): 5635216845699331450,
(Point(row=6, col=8), Player.black): 2635626576307750647,
(Point(row=6, col=8), Player.white): 165956464313047071,
(Point(row=6, col=9), Player.black): 2633270617992739718,
(Point(row=6, col=9), Player.white): 9177199448496737764,
(Point(row=6, col=10), Player.black): 4633272803934465732,
(Point(row=6, col=10), Player.white): 2304944580014465590,
(Point(row=6, col=11), Player.black): 7159833061097635087,
(Point(row=6, col=11), Player.white): 4817018317389969148,
(Point(row=6, col=12), Player.black): 499241394719117871,
(Point(row=6, col=12), Player.white): 8227903593761465195,
(Point(row=6, col=13), Player.black): 8364921257158333077,
(Point(row=6, col=13), Player.white): 3843280891352272396,
(Point(row=6, col=14), Player.black): 2406128505165246278,
(Point(row=6, col=14), Player.white): 3243820097331539459,
(Point(row=6, col=15), Player.black): 2538402448163358592,
(Point(row=6, col=15), Player.white): 5713242412303705912,
(Point(row=6, col=16), Player.black): 1622840734849354897,
(Point(row=6, col=16), Player.white): 2670353397930539318,
(Point(row=6, col=17), Player.black): 6811172777951878745,
(Point(row=6, col=17), Player.white): 4287438501612118956,
(Point(row=6, col=18), Player.black): 7361038496467019309,
(Point(row=6, col=18), Player.white): 1178537626058696537,
(Point(row=6, col=19), Player.black): 7194193256565057849,
(Point(row=6, col=19), Player.white): 4531567429014641722,
(Point(row=7, col=1), Player.black): 7505223898204509614,
(Point(row=7, col=1), Player.white): 3523880371388781823,
(Point(row=7, col=2), Player.black): 7758104689270626726,
(Point(row=7, col=2), Player.white): 5154901945034984635,
(Point(row=7, col=3), Player.black): 7908884449190200159,
(Point(row=7, col=3), Player.white): 6091771335169468026,
(Point(row=7, col=4), Player.black): 5496243953189431987,
(Point(row=7, col=4), Player.white): 1072781641727085262,
(Point(row=7, col=5), Player.black): 2259729783707561481,
(Point(row=7, col=5), Player.white): 3792565084245892139,
(Point(row=7, col=6), Player.black): 6937440406685817812,
(Point(row=7, col=6), Player.white): 7347417551552588410,
(Point(row=7, col=7), Player.black): 274363203168610705,
(Point(row=7, col=7), Player.white): 2991158354686106602,
(Point(row=7, col=8), Player.black): 3543462707343638869,
(Point(row=7, col=8), Player.white): 6897241357902524968,
(Point(row=7, col=9), Player.black): 1803449132465680439,
(Point(row=7, col=9), Player.white): 1805461137455131380,
(Point(row=7, col=10), Player.black): 1344205374809929348,
(Point(row=7, col=10), Player.white): 7421761836628972954,
(Point(row=7, col=11), Player.black): 2469075855792438661,
(Point(row=7, col=11), Player.white): 8737157820839145170,
(Point(row=7, col=12), Player.black): 4645139307448094535,
(Point(row=7, col=12), Player.white): 2013571706545530756,
(Point(row=7, col=13), Player.black): 4440052020092490667,
(Point(row=7, col=13), Player.white): 2198253462349413335,
(Point(row=7, col=14), Player.black): 8385158062981409426,
(Point(row=7, col=14), Player.white): 7396242276331402070,
(Point(row=7, col=15), Player.black): 7873991431591633416,
(Point(row=7, col=15), Player.white): 5245134034253075294,
(Point(row=7, col=16), Player.black): 3545473594019224005,
(Point(row=7, col=16), Player.white): 5362036617969886734,
(Point(row=7, col=17), Player.black): 3399820090180437921,
(Point(row=7, col=17), Player.white): 6304594393136496423,
(Point(row=7, col=18), Player.black): 6808871930382356840,
(Point(row=7, col=18), Player.white): 7238493628329406408,
(Point(row=7, col=19), Player.black): 2941984664248357117,
(Point(row=7, col=19), Player.white): 8777325077230667191,
(Point(row=8, col=1), Player.black): 8555550697672412722,
(Point(row=8, col=1), Player.white): 4441764663700603604,
(Point(row=8, col=2), Player.black): 8114022609843408184,
(Point(row=8, col=2), Player.white): 4880052519835673534,
(Point(row=8, col=3), Player.black): 8383176396540237499,
(Point(row=8, col=3), Player.white): 2301192195844013630,
(Point(row=8, col=4), Player.black): 3131193544615645612,
(Point(row=8, col=4), Player.white): 6496246924947363002,
(Point(row=8, col=5), Player.black): 2206697360492955428,
(Point(row=8, col=5), Player.white): 4633890430473590804,
(Point(row=8, col=6), Player.black): 3311088317107600064,
(Point(row=8, col=6), Player.white): 2553631626031439660,
(Point(row=8, col=7), Player.black): 3729471844600876976,
(Point(row=8, col=7), Player.white): 4630838550204380176,
(Point(row=8, col=8), Player.black): 3230962609338477572,
(Point(row=8, col=8), Player.white): 4025708318954871104,
(Point(row=8, col=9), Player.black): 7403400805718996734,
(Point(row=8, col=9), Player.white): 1120604723508655748,
(Point(row=8, col=10), Player.black): 9032266673291657252,
(Point(row=8, col=10), Player.white): 6256949844472726520,
(Point(row=8, col=11), Player.black): 8823919370741438055,
(Point(row=8, col=11), Player.white): 8094495405381248685,
(Point(row=8, col=12), Player.black): 3334094283060363945,
(Point(row=8, col=12), Player.white): 7246504600406415928,
(Point(row=8, col=13), Player.black): 3322886122246972265,
(Point(row=8, col=13), Player.white): 6478044293231350930,
(Point(row=8, col=14), Player.black): 7134935804336880955,
(Point(row=8, col=14), Player.white): 5440616859773655111,
(Point(row=8, col=15), Player.black): 5622883183821794068,
(Point(row=8, col=15), Player.white): 6876298074949829141,
(Point(row=8, col=16), Player.black): 9208319379299370358,
(Point(row=8, col=16), Player.white): 4279884586140213756,
(Point(row=8, col=17), Player.black): 6972919005596349799,
(Point(row=8, col=17), Player.white): 1188432116140140788,
(Point(row=8, col=18), Player.black): 4125474500416311866,
(Point(row=8, col=18), Player.white): 8307027400913603825,
(Point(row=8, col=19), Player.black): 3809353618715193806,
(Point(row=8, col=19), Player.white): 7558139490627685539,
(Point(row=9, col=1), Player.black): 1234119251793759246,
(Point(row=9, col=1), Player.white): 659297191654056693,
(Point(row=9, col=2), Player.black): 3261231943328448969,
(Point(row=9, col=2), Player.white): 1057278803599943353,
(Point(row=9, col=3), Player.black): 839483174647562586,
(Point(row=9, col=3), Player.white): 7194207691870590917,
(Point(row=9, col=4), Player.black): 5606650524210787489,
(Point(row=9, col=4), Player.white): 1895686981850231399,
(Point(row=9, col=5), Player.black): 4920547008176236549,
(Point(row=9, col=5), Player.white): 7565282943460912257,
(Point(row=9, col=6), Player.black): 3019587166136150912,
(Point(row=9, col=6), Player.white): 6707744018245089668,
(Point(row=9, col=7), Player.black): 6047206723760136289,
(Point(row=9, col=7), Player.white): 1193385319107794151,
(Point(row=9, col=8), Player.black): 7350618264927500762,
(Point(row=9, col=8), Player.white): 4066371643782662552,
(Point(row=9, col=9), Player.black): 2562145697504571198,
(Point(row=9, col=9), Player.white): 8175127414110258267,
(Point(row=9, col=10), Player.black): 6330184047076642702,
(Point(row=9, col=10), Player.white): 6088733421600405183,
(Point(row=9, col=11), Player.black): 8770761754687024922,
(Point(row=9, col=11), Player.white): 2303893899043859828,
(Point(row=9, col=12), Player.black): 4397892265666622492,
(Point(row=9, col=12), Player.white): 171511519220923650,
(Point(row=9, col=13), Player.black): 8556804551828495508,
(Point(row=9, col=13), Player.white): 6157667425414523251,
(Point(row=9, col=14), Player.black): 5240233257427575055,
(Point(row=9, col=14), Player.white): 380479988002564699,
(Point(row=9, col=15), Player.black): 4101188141905754953,
(Point(row=9, col=15), Player.white): 1188661981929465449,
(Point(row=9, col=16), Player.black): 2361008046500479745,
(Point(row=9, col=16), Player.white): 3420251487035079939,
(Point(row=9, col=17), Player.black): 5215788347647149491,
(Point(row=9, col=17), Player.white): 2847796804167233977,
(Point(row=9, col=18), Player.black): 4069384458604210450,
(Point(row=9, col=18), Player.white): 2667905631455246850,
(Point(row=9, col=19), Player.black): 4621288857791839751,
(Point(row=9, col=19), Player.white): 6192346017211071691,
(Point(row=10, col=1), Player.black): 343025508629360270,
(Point(row=10, col=1), Player.white): 6080370293523829540,
(Point(row=10, col=2), Player.black): 1276762341542167984,
(Point(row=10, col=2), Player.white): 174121238909199648,
(Point(row=10, col=3), Player.black): 2702169347958700192,
(Point(row=10, col=3), Player.white): 6176058303263585605,
(Point(row=10, col=4), Player.black): 177207636035983233,
(Point(row=10, col=4), Player.white): 5447343180839162201,
(Point(row=10, col=5), Player.black): 6273473410032224609,
(Point(row=10, col=5), Player.white): 2606986174731848762,
(Point(row=10, col=6), Player.black): 679745883356101322,
(Point(row=10, col=6), Player.white): 1860549189994338815,
(Point(row=10, col=7), Player.black): 2212940180756290274,
(Point(row=10, col=7), Player.white): 2133459037170470402,
(Point(row=10, col=8), Player.black): 1151882381744130865,
(Point(row=10, col=8), Player.white): 1649565364735167801,
(Point(row=10, col=9), Player.black): 2227196407143349414,
(Point(row=10, col=9), Player.white): 392846935006568137,
(Point(row=10, col=10), Player.black): 5502109392080621468,
(Point(row=10, col=10), Player.white): 8203674047163161602,
(Point(row=10, col=11), Player.black): 8204569383492405547,
(Point(row=10, col=11), Player.white): 7856323060487033755,
(Point(row=10, col=12), Player.black): 4312995889218800335,
(Point(row=10, col=12), Player.white): 1309294955384986112,
(Point(row=10, col=13), Player.black): 6596319202406351081,
(Point(row=10, col=13), Player.white): 8472831084798519156,
(Point(row=10, col=14), Player.black): 7648621437688537580,
(Point(row=10, col=14), Player.white): 8298210324352215352,
(Point(row=10, col=15), Player.black): 7902326659402518162,
(Point(row=10, col=15), Player.white): 1026162954916652360,
(Point(row=10, col=16), Player.black): 453485620993637077,
(Point(row=10, col=16), Player.white): 5025150808266618120,
(Point(row=10, col=17), Player.black): 1368378052776399459,
(Point(row=10, col=17), Player.white): 5541073040166699043,
(Point(row=10, col=18), Player.black): 7371506648251071272,
(Point(row=10, col=18), Player.white): 4032893015872145474,
(Point(row=10, col=19), Player.black): 6643168683325655773,
(Point(row=10, col=19), Player.white): 4009809049430315338,
(Point(row=11, col=1), Player.black): 3096446795331724776,
(Point(row=11, col=1), Player.white): 2633382253148960257,
(Point(row=11, col=2), Player.black): 8998600115078807949,
(Point(row=11, col=2), Player.white): 6713751278338346518,
(Point(row=11, col=3), Player.black): 5208695325110140904,
(Point(row=11, col=3), Player.white): 1082179704516872692,
(Point(row=11, col=4), Player.black): 2523720905537884611,
(Point(row=11, col=4), Player.white): 3884059083352746776,
(Point(row=11, col=5), Player.black): 8545932761490838754,
(Point(row=11, col=5), Player.white): 245119635031855594,
(Point(row=11, col=6), Player.black): 1768635026383023070,
(Point(row=11, col=6), Player.white): 5181208296292712804,
(Point(row=11, col=7), Player.black): 5573685131613925947,
(Point(row=11, col=7), Player.white): 9163096949906583680,
(Point(row=11, col=8), Player.black): 3184510654753152217,
(Point(row=11, col=8), Player.white): 8161887973309818925,
(Point(row=11, col=9), Player.black): 4589957902412425606,
(Point(row=11, col=9), Player.white): 6614686928272258750,
(Point(row=11, col=10), Player.black): 1434970922067050951,
(Point(row=11, col=10), Player.white): 1542376642545734726,
(Point(row=11, col=11), Player.black): 3961857311564892297,
(Point(row=11, col=11), Player.white): 1749905237812230915,
(Point(row=11, col=12), Player.black): 2144619526750562568,
(Point(row=11, col=12), Player.white): 3221030485317486750,
(Point(row=11, col=13), Player.black): 4041826991333486485,
(Point(row=11, col=13), Player.white): 5942340235373680092,
(Point(row=11, col=14), Player.black): 9002038659705724757,
(Point(row=11, col=14), Player.white): 7138245409904158102,
(Point(row=11, col=15), Player.black): 5838259029170475524,
(Point(row=11, col=15), Player.white): 5672234601146253184,
(Point(row=11, col=16), Player.black): 1045733023448870563,
(Point(row=11, col=16), Player.white): 679611892779038914,
(Point(row=11, col=17), Player.black): 1298269724157302892,
(Point(row=11, col=17), Player.white): 5800391453045182497,
(Point(row=11, col=18), Player.black): 7223484368370892141,
(Point(row=11, col=18), Player.white): 5013843226800780409,
(Point(row=11, col=19), Player.black): 6717520096129358646,
(Point(row=11, col=19), Player.white): 3979410374790927324,
(Point(row=12, col=1), Player.black): 4237432067300266239,
(Point(row=12, col=1), Player.white): 1900326310330401594,
(Point(row=12, col=2), Player.black): 6020336884909591346,
(Point(row=12, col=2), Player.white): 4978369381451594153,
(Point(row=12, col=3), Player.black): 2875147101496747986,
(Point(row=12, col=3), Player.white): 5232195392172726139,
(Point(row=12, col=4), Player.black): 4160918800097480322,
(Point(row=12, col=4), Player.white): 4446606576908106867,
(Point(row=12, col=5), Player.black): 301901673100119914,
(Point(row=12, col=5), Player.white): 1381555220051745289,
(Point(row=12, col=6), Player.black): 5369120539341183152,
(Point(row=12, col=6), Player.white): 644216968574007553,
(Point(row=12, col=7), Player.black): 7146906404801466533,
(Point(row=12, col=7), Player.white): 7370308202015538185,
(Point(row=12, col=8), Player.black): 5146719683181996868,
(Point(row=12, col=8), Player.white): 1638481902956858698,
(Point(row=12, col=9), Player.black): 2966254449544871752,
(Point(row=12, col=9), Player.white): 7166366522903388539,
(Point(row=12, col=10), Player.black): 8669835952454086115,
(Point(row=12, col=10), Player.white): 3999739516462966514,
(Point(row=12, col=11), Player.black): 855708420875865809,
(Point(row=12, col=11), Player.white): 7828618074418290839,
(Point(row=12, col=12), Player.black): 5044662642018461692,
(Point(row=12, col=12), Player.white): 3554905510548596463,
(Point(row=12, col=13), Player.black): 2423221727702418569,
(Point(row=12, col=13), Player.white): 7195366456353503774,
(Point(row=12, col=14), Player.black): 7961160117947599979,
(Point(row=12, col=14), Player.white): 4514564854734422812,
(Point(row=12, col=15), Player.black): 8303001813249370739,
(Point(row=12, col=15), Player.white): 304459573680012461,
(Point(row=12, col=16), Player.black): 379920350548093466,
(Point(row=12, col=16), Player.white): 7347478704353768904,
(Point(row=12, col=17), Player.black): 534784805381573035,
(Point(row=12, col=17), Player.white): 4491128130790950050,
(Point(row=12, col=18), Player.black): 8137182860817142099,
(Point(row=12, col=18), Player.white): 7620379291808426848,
(Point(row=12, col=19), Player.black): 1534801273780498768,
(Point(row=12, col=19), Player.white): 4906197558596922854,
(Point(row=13, col=1), Player.black): 628519232311208342,
(Point(row=13, col=1), Player.white): 9012322733551751208,
(Point(row=13, col=2), Player.black): 8968880824205860309,
(Point(row=13, col=2), Player.white): 4265106643919859481,
(Point(row=13, col=3), Player.black): 7172697365550842776,
(Point(row=13, col=3), Player.white): 7788270755108520750,
(Point(row=13, col=4), Player.black): 5934746258269661794,
(Point(row=13, col=4), Player.white): 1709526351801541778,
(Point(row=13, col=5), Player.black): 919479787036723555,
(Point(row=13, col=5), Player.white): 4617408397607156904,
(Point(row=13, col=6), Player.black): 3017909277574588438,
(Point(row=13, col=6), Player.white): 7714590061989806584,
(Point(row=13, col=7), Player.black): 7314539837012564425,
(Point(row=13, col=7), Player.white): 94478039237250124,
(Point(row=13, col=8), Player.black): 8855572335534100334,
(Point(row=13, col=8), Player.white): 3314306729734646544,
(Point(row=13, col=9), Player.black): 6383574213562494992,
(Point(row=13, col=9), Player.white): 3757348259985433820,
(Point(row=13, col=10), Player.black): 3218528706928942381,
(Point(row=13, col=10), Player.white): 8755498210776823764,
(Point(row=13, col=11), Player.black): 3871250504067913307,
(Point(row=13, col=11), Player.white): 4308010933751666358,
(Point(row=13, col=12), Player.black): 5447423350043419269,
(Point(row=13, col=12), Player.white): 4492692585168184810,
(Point(row=13, col=13), Player.black): 6621210539793049111,
(Point(row=13, col=13), Player.white): 4594886184845142207,
(Point(row=13, col=14), Player.black): 862191208922178062,
(Point(row=13, col=14), Player.white): 7092871381086951781,
(Point(row=13, col=15), Player.black): 6866995587800386541,
(Point(row=13, col=15), Player.white): 6639527157322646114,
(Point(row=13, col=16), Player.black): 2888342856649783777,
(Point(row=13, col=16), Player.white): 1820400694587150928,
(Point(row=13, col=17), Player.black): 8154638980954009758,
(Point(row=13, col=17), Player.white): 1107948918448171521,
(Point(row=13, col=18), Player.black): 7349691568439432277,
(Point(row=13, col=18), Player.white): 4143663161241702655,
(Point(row=13, col=19), Player.black): 8222110816052728820,
(Point(row=13, col=19), Player.white): 2975311221132972775,
(Point(row=14, col=1), Player.black): 1863961998848803501,
(Point(row=14, col=1), Player.white): 7104725216272792042,
(Point(row=14, col=2), Player.black): 5130133929769739441,
(Point(row=14, col=2), Player.white): 5515413891651105130,
(Point(row=14, col=3), Player.black): 6375493983351865171,
(Point(row=14, col=3), Player.white): 1463469150697238708,
(Point(row=14, col=4), Player.black): 252795421472879551,
(Point(row=14, col=4), Player.white): 7952614238200037422,
(Point(row=14, col=5), Player.black): 9192476257961760960,
(Point(row=14, col=5), Player.white): 7237280507776858765,
(Point(row=14, col=6), Player.black): 3226782926372388843,
(Point(row=14, col=6), Player.white): 6555655830423192450,
(Point(row=14, col=7), Player.black): 4941625599644320622,
(Point(row=14, col=7), Player.white): 1550340492340576377,
(Point(row=14, col=8), Player.black): 8596556921098917884,
(Point(row=14, col=8), Player.white): 5399793192187999531,
(Point(row=14, col=9), Player.black): 1913157374311153115,
(Point(row=14, col=9), Player.white): 2090100440980625023,
(Point(row=14, col=10), Player.black): 3884906940515336836,
(Point(row=14, col=10), Player.white): 1094744694082516003,
(Point(row=14, col=11), Player.black): 511237614993445060,
(Point(row=14, col=11), Player.white): 870696949153471276,
(Point(row=14, col=12), Player.black): 8112979758197630309,
(Point(row=14, col=12), Player.white): 4104435499041060474,
(Point(row=14, col=13), Player.black): 4349505038784158528,
(Point(row=14, col=13), Player.white): 3687577306600975975,
(Point(row=14, col=14), Player.black): 6284350769554616357,
(Point(row=14, col=14), Player.white): 4476656798978243543,
(Point(row=14, col=15), Player.black): 6794476950564796792,
(Point(row=14, col=15), Player.white): 7565334783614946406,
(Point(row=14, col=16), Player.black): 8650038375122719192,
(Point(row=14, col=16), Player.white): 5330719931269832842,
(Point(row=14, col=17), Player.black): 5474151832658659964,
(Point(row=14, col=17), Player.white): 8898789792690767615,
(Point(row=14, col=18), Player.black): 9107979683561190367,
(Point(row=14, col=18), Player.white): 7386253956306785607,
(Point(row=14, col=19), Player.black): 9123697209982643191,
(Point(row=14, col=19), Player.white): 6974485572415237028,
(Point(row=15, col=1), Player.black): 4337644991122521375,
(Point(row=15, col=1), Player.white): 1304217693852367506,
(Point(row=15, col=2), Player.black): 3915759986910688774,
(Point(row=15, col=2), Player.white): 3021592151500229552,
(Point(row=15, col=3), Player.black): 9152414171465063482,
(Point(row=15, col=3), Player.white): 5011221247388892469,
(Point(row=15, col=4), Player.black): 1663741174692729438,
(Point(row=15, col=4), Player.white): 970787474517028793,
(Point(row=15, col=5), Player.black): 6054076358328179929,
(Point(row=15, col=5), Player.white): 8873489489970631395,
(Point(row=15, col=6), Player.black): 1930655201443325074,
(Point(row=15, col=6), Player.white): 3755209027942385337,
(Point(row=15, col=7), Player.black): 2800688815387392340,
(Point(row=15, col=7), Player.white): 971278159515243294,
(Point(row=15, col=8), Player.black): 5723849708320704378,
(Point(row=15, col=8), Player.white): 3062832581215699049,
(Point(row=15, col=9), Player.black): 5008650182265349991,
(Point(row=15, col=9), Player.white): 7970740444508237001,
(Point(row=15, col=10), Player.black): 1945630331907886676,
(Point(row=15, col=10), Player.white): 7837072882802447145,
(Point(row=15, col=11), Player.black): 1559032358509347907,
(Point(row=15, col=11), Player.white): 8195124455274423927,
(Point(row=15, col=12), Player.black): 3080717012381103321,
(Point(row=15, col=12), Player.white): 2116189025776235672,
(Point(row=15, col=13), Player.black): 4438915457264671544,
(Point(row=15, col=13), Player.white): 3544646783687733085,
(Point(row=15, col=14), Player.black): 9182647124447361118,
(Point(row=15, col=14), Player.white): 7152109489993458264,
(Point(row=15, col=15), Player.black): 7599834199622978070,
(Point(row=15, col=15), Player.white): 4036331458128460820,
(Point(row=15, col=16), Player.black): 1415266564916260710,
(Point(row=15, col=16), Player.white): 5796893193674302860,
(Point(row=15, col=17), Player.black): 5539890679357278387,
(Point(row=15, col=17), Player.white): 3248025904750380682,
(Point(row=15, col=18), Player.black): 9194567845794524408,
(Point(row=15, col=18), Player.white): 6026377756896667687,
(Point(row=15, col=19), Player.black): 6868019737008349080,
(Point(row=15, col=19), Player.white): 6471698776878323096,
(Point(row=16, col=1), Player.black): 8991487232045549389,
(Point(row=16, col=1), Player.white): 5292450315230397106,
(Point(row=16, col=2), Player.black): 7723531242393393408,
(Point(row=16, col=2), Player.white): 8640602358328257048,
(Point(row=16, col=3), Player.black): 36045184498053208,
(Point(row=16, col=3), Player.white): 2556814445019739422,
(Point(row=16, col=4), Player.black): 5422600141578148808,
(Point(row=16, col=4), Player.white): 3641709947971188688,
(Point(row=16, col=5), Player.black): 3941905135271360926,
(Point(row=16, col=5), Player.white): 5600531804657799353,
(Point(row=16, col=6), Player.black): 8560618806127429120,
(Point(row=16, col=6), Player.white): 7554582082083704046,
(Point(row=16, col=7), Player.black): 711440492874123203,
(Point(row=16, col=7), Player.white): 5720976881304707141,
(Point(row=16, col=8), Player.black): 9087984682276420089,
(Point(row=16, col=8), Player.white): 8657421347777767052,
(Point(row=16, col=9), Player.black): 7197332612673537633,
(Point(row=16, col=9), Player.white): 1828614306412416161,
(Point(row=16, col=10), Player.black): 4545490557918492692,
(Point(row=16, col=10), Player.white): 532147880755185042,
(Point(row=16, col=11), Player.black): 8411029884315206204,
(Point(row=16, col=11), Player.white): 4652796430036246273,
(Point(row=16, col=12), Player.black): 3507493795367353287,
(Point(row=16, col=12), Player.white): 6470926671418166497,
(Point(row=16, col=13), Player.black): 2063273199615108609,
(Point(row=16, col=13), Player.white): 446552949043624027,
(Point(row=16, col=14), Player.black): 1084184415353974420,
(Point(row=16, col=14), Player.white): 2132042809237663779,
(Point(row=16, col=15), Player.black): 1164947278522434713,
(Point(row=16, col=15), Player.white): 8113913068687439739,
(Point(row=16, col=16), Player.black): 4738038481647783630,
(Point(row=16, col=16), Player.white): 320417080161112040,
(Point(row=16, col=17), Player.black): 6828331179952690545,
(Point(row=16, col=17), Player.white): 7521943669316976736,
(Point(row=16, col=18), Player.black): 3665889502745802761,
(Point(row=16, col=18), Player.white): 320329133695211709,
(Point(row=16, col=19), Player.black): 5796780711744761775,
(Point(row=16, col=19), Player.white): 2676583062155451949,
(Point(row=17, col=1), Player.black): 3484635079624457265,
(Point(row=17, col=1), Player.white): 6069213419291382027,
(Point(row=17, col=2), Player.black): 6835249005884985106,
(Point(row=17, col=2), Player.white): 1714916385018676562,
(Point(row=17, col=3), Player.black): 5149738028957454524,
(Point(row=17, col=3), Player.white): 6819673557238720111,
(Point(row=17, col=4), Player.black): 6846920340321586680,
(Point(row=17, col=4), Player.white): 5650517261180495057,
(Point(row=17, col=5), Player.black): 6859895030673951287,
(Point(row=17, col=5), Player.white): 3277513334765081493,
(Point(row=17, col=6), Player.black): 1781421777867771146,
(Point(row=17, col=6), Player.white): 3898205830612768585,
(Point(row=17, col=7), Player.black): 2862248799182911371,
(Point(row=17, col=7), Player.white): 4885801817779977498,
(Point(row=17, col=8), Player.black): 640910451840955527,
(Point(row=17, col=8), Player.white): 6184530106675719419,
(Point(row=17, col=9), Player.black): 8388784745661908862,
(Point(row=17, col=9), Player.white): 6394645469104644915,
(Point(row=17, col=10), Player.black): 8356532995058933724,
(Point(row=17, col=10), Player.white): 35742002312566326,
(Point(row=17, col=11), Player.black): 7981877101829988437,
(Point(row=17, col=11), Player.white): 8135915718523895351,
(Point(row=17, col=12), Player.black): 7759432412829636136,
(Point(row=17, col=12), Player.white): 5316827354945357851,
(Point(row=17, col=13), Player.black): 4292051684425464986,
(Point(row=17, col=13), Player.white): 3625805327819456798,
(Point(row=17, col=14), Player.black): 6961131621182590848,
(Point(row=17, col=14), Player.white): 7745696647646107839,
(Point(row=17, col=15), Player.black): 6209362538359629951,
(Point(row=17, col=15), Player.white): 8099791659260235772,
(Point(row=17, col=16), Player.black): 257615418633975847,
(Point(row=17, col=16), Player.white): 8769897204170535168,
(Point(row=17, col=17), Player.black): 8464341603580847613,
(Point(row=17, col=17), Player.white): 5455593262036144966,
(Point(row=17, col=18), Player.black): 956745645481024571,
(Point(row=17, col=18), Player.white): 5230033481442448385,
(Point(row=17, col=19), Player.black): 7178070736944945619,
(Point(row=17, col=19), Player.white): 1767245828672898173,
(Point(row=18, col=1), Player.black): 1488274364920839354,
(Point(row=18, col=1), Player.white): 2736154559139493439,
(Point(row=18, col=2), Player.black): 2707528031970491560,
(Point(row=18, col=2), Player.white): 3590892185812303723,
(Point(row=18, col=3), Player.black): 7955289994222595950,
(Point(row=18, col=3), Player.white): 4893715292640171992,
(Point(row=18, col=4), Player.black): 9073979189337898629,
(Point(row=18, col=4), Player.white): 5188402570778007682,
(Point(row=18, col=5), Player.black): 6100171414922106651,
(Point(row=18, col=5), Player.white): 3097614018122640439,
(Point(row=18, col=6), Player.black): 2716657759579873547,
(Point(row=18, col=6), Player.white): 2211120146174758744,
(Point(row=18, col=7), Player.black): 1250011601734050975,
(Point(row=18, col=7), Player.white): 6044916952500492878,
(Point(row=18, col=8), Player.black): 2791686430399746459,
(Point(row=18, col=8), Player.white): 9024779735456073531,
(Point(row=18, col=9), Player.black): 1180528573623622328,
(Point(row=18, col=9), Player.white): 3195794075210655928,
(Point(row=18, col=10), Player.black): 5352540450218185348,
(Point(row=18, col=10), Player.white): 8324497323245206107,
(Point(row=18, col=11), Player.black): 1561159839581437198,
(Point(row=18, col=11), Player.white): 1150004927503836540,
(Point(row=18, col=12), Player.black): 3592193043639446072,
(Point(row=18, col=12), Player.white): 5961001420408436479,
(Point(row=18, col=13), Player.black): 9170590488411860263,
(Point(row=18, col=13), Player.white): 4393550962680193281,
(Point(row=18, col=14), Player.black): 4459136446402603669,
(Point(row=18, col=14), Player.white): 3678532975382333317,
(Point(row=18, col=15), Player.black): 3407376268502759002,
(Point(row=18, col=15), Player.white): 7004392700522659827,
(Point(row=18, col=16), Player.black): 7025224267149331137,
(Point(row=18, col=16), Player.white): 4266828283927072776,
(Point(row=18, col=17), Player.black): 3820477092415434327,
(Point(row=18, col=17), Player.white): 6987910699423728036,
(Point(row=18, col=18), Player.black): 4404475055628968454,
(Point(row=18, col=18), Player.white): 7263273303364001536,
(Point(row=18, col=19), Player.black): 3861799992048340088,
(Point(row=18, col=19), Player.white): 4745260880005722357,
(Point(row=19, col=1), Player.black): 3204765945343917947,
(Point(row=19, col=1), Player.white): 6471475421459578330,
(Point(row=19, col=2), Player.black): 7217255437762615997,
(Point(row=19, col=2), Player.white): 2042324360141252017,
(Point(row=19, col=3), Player.black): 1573271722322701274,
(Point(row=19, col=3), Player.white): 4251862130437553808,
(Point(row=19, col=4), Player.black): 4874022962146164970,
(Point(row=19, col=4), Player.white): 5580404813819044185,
(Point(row=19, col=5), Player.black): 3353651861083802778,
(Point(row=19, col=5), Player.white): 4365684057282895693,
(Point(row=19, col=6), Player.black): 7183087483554163443,
(Point(row=19, col=6), Player.white): 6124351538038844700,
(Point(row=19, col=7), Player.black): 552362206998334380,
(Point(row=19, col=7), Player.white): 4001317785718717269,
(Point(row=19, col=8), Player.black): 2460342757764868330,
(Point(row=19, col=8), Player.white): 4363761240248314342,
(Point(row=19, col=9), Player.black): 555305416298891847,
(Point(row=19, col=9), Player.white): 8119791214296736653,
(Point(row=19, col=10), Player.black): 3279046237149845916,
(Point(row=19, col=10), Player.white): 7923879540147169965,
(Point(row=19, col=11), Player.black): 4458693056595344855,
(Point(row=19, col=11), Player.white): 3651817262257460536,
(Point(row=19, col=12), Player.black): 5185309799039877575,
(Point(row=19, col=12), Player.white): 8262961320022098574,
(Point(row=19, col=13), Player.black): 2439407622347387200,
(Point(row=19, col=13), Player.white): 8796788343413577457,
(Point(row=19, col=14), Player.black): 6880826668981171262,
(Point(row=19, col=14), Player.white): 4043062046885032638,
(Point(row=19, col=15), Player.black): 4955624846223592658,
(Point(row=19, col=15), Player.white): 6576542069929904305,
(Point(row=19, col=16), Player.black): 7983787934871059066,
(Point(row=19, col=16), Player.white): 2035781571561811243,
(Point(row=19, col=17), Player.black): 5069116270545998666,
(Point(row=19, col=17), Player.white): 6753388973612546863,
(Point(row=19, col=18), Player.black): 2454745758168160778,
(Point(row=19, col=18), Player.white): 8486626821235143188,
(Point(row=19, col=19), Player.black): 8788385670717328555,
(Point(row=19, col=19), Player.white): 8976707363182962042,
}
EMPTY_BOARD = 0 | none | 1 | 2.058888 | 2 | |
src/feature_engineering.py | philippschmalen/ds-workbench | 0 | 6617162 | <reponame>philippschmalen/ds-workbench
"""
Methods to create features based on available columns.
Example: Median split on a column
"""
def median_split(df, columns, label_high='high', label_low='low'):
"""Split numeric column into two distinct sets based on its median value"""
assert isinstance(columns, list), f"Columns has to be a list. Instead it is {type(columns)}"
assert isinstance(df, pd.DataFrame), f"df has to be a pd.DataFrame. Instead it is {type(df)}"
# calculate medians
median_series = df[columns].median()
assert set(columns).issubset(median_series.index) , "Not all columns have numeric dtype to calculate .median()"
for column in columns:
# split on median into high and low category
df[column] = df[column].mask(df[column] >= median_series[column], label_high)
df[column] = df[column].mask(df[column] != label_high, label_low)
return df
| """
Methods to create features based on available columns.
Example: Median split on a column
"""
def median_split(df, columns, label_high='high', label_low='low'):
"""Split numeric column into two distinct sets based on its median value"""
assert isinstance(columns, list), f"Columns has to be a list. Instead it is {type(columns)}"
assert isinstance(df, pd.DataFrame), f"df has to be a pd.DataFrame. Instead it is {type(df)}"
# calculate medians
median_series = df[columns].median()
assert set(columns).issubset(median_series.index) , "Not all columns have numeric dtype to calculate .median()"
for column in columns:
# split on median into high and low category
df[column] = df[column].mask(df[column] >= median_series[column], label_high)
df[column] = df[column].mask(df[column] != label_high, label_low)
return df | en | 0.902018 | Methods to create features based on available columns. Example: Median split on a column Split numeric column into two distinct sets based on its median value # calculate medians # split on median into high and low category | 4.130095 | 4 |
source/x86/_run_qemu.py | imallett/MOSS | 2 | 6617163 | <reponame>imallett/MOSS
from subprocess import call
from scripts import _paths
def main():
call([
_paths.qemu32,
"-hda",_paths.vmdk
])
if __name__ == "__main__": main()
| from subprocess import call
from scripts import _paths
def main():
call([
_paths.qemu32,
"-hda",_paths.vmdk
])
if __name__ == "__main__": main() | none | 1 | 1.742032 | 2 | |
cp1_3_03_chess_bishop.py | kukgini/my-py | 0 | 6617164 | # 비숍 위치가 주어졌을 때 한번에 잡히지 않도록 체스 말을 놓을 수 있는 빈칸 갯수 반환
# 예:
# [D5] ==> 50
# [D5, E8, G2] => 42
#
# 힌트: ord 는 ascii code 를 반환해 줌
def code_to_pos(code):
x = ord(code[0]) - ord('A') # A = 0, B = 1, ..., H = 7
y = int(code[1]) - 1 # board 의 좌표가 0 부터 시작하므로 1다 을 뺀다.
return (y, x)
def move(board, position, direction):
print(f'move to {position}')
if 0 <= position[0] < 8 and 0 <= position[1] < 8:
board[position[0]][position[1]] = 0
next = (position[0] + direction[0], position[1] + direction[1])
move(board, next, direction)
else:
print('end of move')
def make_board(n):
board = [[1]*n for _ in range(8)]
print_board(board)
return board
def print_board(board):
for row in board: print(row)
def solution(bishops):
board = make_board(8)
directions = [[1,1], [-1,1], [-1,-1], [1,-1]]
for bishop in bishops:
print(f'bishop={bishop}')
for direction in directions:
move(board, code_to_pos(bishop), direction)
print_board(board)
return sum([sum(x) for x in board])
if __name__ == "__main__":
bishops = ["D5","E8","G2"]
print(f'bishops={bishops}, answer={solution(bishops)}')
bishops = ["D5"]
print(f'bishops={bishops}, answer={solution(bishops)}') | # 비숍 위치가 주어졌을 때 한번에 잡히지 않도록 체스 말을 놓을 수 있는 빈칸 갯수 반환
# 예:
# [D5] ==> 50
# [D5, E8, G2] => 42
#
# 힌트: ord 는 ascii code 를 반환해 줌
def code_to_pos(code):
x = ord(code[0]) - ord('A') # A = 0, B = 1, ..., H = 7
y = int(code[1]) - 1 # board 의 좌표가 0 부터 시작하므로 1다 을 뺀다.
return (y, x)
def move(board, position, direction):
print(f'move to {position}')
if 0 <= position[0] < 8 and 0 <= position[1] < 8:
board[position[0]][position[1]] = 0
next = (position[0] + direction[0], position[1] + direction[1])
move(board, next, direction)
else:
print('end of move')
def make_board(n):
board = [[1]*n for _ in range(8)]
print_board(board)
return board
def print_board(board):
for row in board: print(row)
def solution(bishops):
board = make_board(8)
directions = [[1,1], [-1,1], [-1,-1], [1,-1]]
for bishop in bishops:
print(f'bishop={bishop}')
for direction in directions:
move(board, code_to_pos(bishop), direction)
print_board(board)
return sum([sum(x) for x in board])
if __name__ == "__main__":
bishops = ["D5","E8","G2"]
print(f'bishops={bishops}, answer={solution(bishops)}')
bishops = ["D5"]
print(f'bishops={bishops}, answer={solution(bishops)}') | ko | 0.998913 | # 비숍 위치가 주어졌을 때 한번에 잡히지 않도록 체스 말을 놓을 수 있는 빈칸 갯수 반환 # 예: # [D5] ==> 50 # [D5, E8, G2] => 42 # # 힌트: ord 는 ascii code 를 반환해 줌 # A = 0, B = 1, ..., H = 7 # board 의 좌표가 0 부터 시작하므로 1다 을 뺀다. | 3.79046 | 4 |
homework10.py | WillSkywalker/core-python-programming | 1 | 6617165 | #!/usr/bin/env python
# Author: <NAME>
# Core Python Pogramming - Homework 10
import math
def refined_open(filename, mode='r'):
try:
fhand = open(filename, mode)
except (ValueError, TypeError), e:
fhand = None
return fhand
def safe_input():
try:
output = raw_input()
except (EOFError, KeyboardInterrupt), e:
output = None
return output
def safe_sqrt(num):
try:
output = math.sqrt(num)
except ValueError, e:
output = complex(imag=math.sqrt(abs(num)))
return output
if __name__ == '__main__':
print safe_sqrt(int(raw_input('Number: '))) | #!/usr/bin/env python
# Author: <NAME>
# Core Python Pogramming - Homework 10
import math
def refined_open(filename, mode='r'):
try:
fhand = open(filename, mode)
except (ValueError, TypeError), e:
fhand = None
return fhand
def safe_input():
try:
output = raw_input()
except (EOFError, KeyboardInterrupt), e:
output = None
return output
def safe_sqrt(num):
try:
output = math.sqrt(num)
except ValueError, e:
output = complex(imag=math.sqrt(abs(num)))
return output
if __name__ == '__main__':
print safe_sqrt(int(raw_input('Number: '))) | en | 0.46614 | #!/usr/bin/env python # Author: <NAME> # Core Python Pogramming - Homework 10 | 3.893579 | 4 |
src/darwpy/__init__.py | andreaskranis/darwpy | 0 | 6617166 | #import numpy as np
from .operators import mutate,recombine,strategy_breed,init_pop,sort_pop,evaluate
from .algos import GA
from .firefly import FireflyPop
from .problems import get_problems
| #import numpy as np
from .operators import mutate,recombine,strategy_breed,init_pop,sort_pop,evaluate
from .algos import GA
from .firefly import FireflyPop
from .problems import get_problems
| en | 0.779721 | #import numpy as np | 0.970448 | 1 |
Source/Vehicle.py | Arkhorse/wot-Camo-Mod | 0 | 6617167 | <filename>Source/Vehicle.py
# Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/Vehicle.py
import logging
import math
import random
import weakref
from collections import namedtuple
import BigWorld
import Math
import NetworkFilters
import WoT
import AreaDestructibles
import ArenaType
import BattleReplay
import DestructiblesCache
import TriggersManager
import constants
import physics_shared
from AvatarInputHandler.aih_constants import ShakeReason
from TriggersManager import TRIGGER_TYPE
from VehicleEffects import DamageFromShotDecoder
from constants import SPT_MATKIND
from constants import VEHICLE_HIT_EFFECT, VEHICLE_SIEGE_STATE
from gui.battle_control.battle_constants import FEEDBACK_EVENT_ID as _GUI_EVENT_ID, VEHICLE_VIEW_STATE
from gun_rotation_shared import decodeGunAngles
from helpers import dependency
from helpers.EffectMaterialCalculation import calcSurfaceMaterialNearPoint
from helpers.EffectsList import SoundStartParam
from items import vehicles
from material_kinds import EFFECT_MATERIAL_INDEXES_BY_NAMES, EFFECT_MATERIALS
from skeletons.gui.battle_session import IBattleSessionProvider
from skeletons.gui.lobby_context import ILobbyContext
from soft_exception import SoftException
from special_sound import setSpecialVoice
from vehicle_systems import appearance_cache
from vehicle_systems.entity_components.battle_abilities_component import BattleAbilitiesComponent
from vehicle_systems.stricted_loading import loadingPriority
from vehicle_systems.tankStructure import TankPartNames, TankPartIndexes, TankSoundObjectsIndexes
_logger = logging.getLogger(__name__)
LOW_ENERGY_COLLISION_D = 0.3
HIGH_ENERGY_COLLISION_D = 0.6
_g_waitingVehicle = dict()
class _Vector4Provider(object):
__slots__ = ('_v',)
@property
def value(self):
return self._v
def __int__(self):
self._v = Math.Vector4(0.0, 0.0, 0.0, 0.0)
class _VehicleSpeedProvider(object):
__slots__ = ('__value',)
@property
def value(self):
return self.__value.value
def __init__(self):
self.__value = Math.Vector4Basic()
def set(self, val):
self.__value = val
def reset(self):
self.__value = Math.Vector4Basic()
SegmentCollisionResultExt = namedtuple('SegmentCollisionResultExt', ('dist',
'hitAngleCos',
'matInfo',
'compName'))
StunInfo = namedtuple('StunInfo', ('startTime',
'endTime',
'duration',
'totalTime'))
VEHICLE_COMPONENTS = {BattleAbilitiesComponent}
class Vehicle(BigWorld.Entity, BattleAbilitiesComponent):
isEnteringWorld = property(lambda self: self.__isEnteringWorld)
isTurretDetached = property(lambda self: constants.SPECIAL_VEHICLE_HEALTH.IS_TURRET_DETACHED(self.health) and self.__turretDetachmentConfirmed)
isTurretMarkedForDetachment = property(lambda self: constants.SPECIAL_VEHICLE_HEALTH.IS_TURRET_DETACHED(self.health))
isTurretDetachmentConfirmationNeeded = property(lambda self: not self.__turretDetachmentConfirmed)
hasMovingFlags = property(lambda self: self.engineMode is not None and self.engineMode[1] & 3)
guiSessionProvider = dependency.descriptor(IBattleSessionProvider)
lobbyContext = dependency.descriptor(ILobbyContext)
@property
def speedInfo(self):
return self.__speedInfo
@property
def isWheeledTech(self):
return 'wheeledVehicle' in self.typeDescriptor.type.tags
@property
def wheelsScrollSmoothed(self):
if self.__wheelsScrollFilter is not None:
return [ scrollFilter.output(BigWorld.time()) for scrollFilter in self.__wheelsScrollFilter ]
else:
return
@property
def wheelsScrollFilters(self):
return self.__wheelsScrollFilter
@property
def wheelsSteeringSmoothed(self):
if self.__wheelsSteeringFilter is not None:
return [ steeringFilter.output(BigWorld.time()) for steeringFilter in self.__wheelsSteeringFilter ]
else:
return
@property
def wheelsSteeringFilters(self):
return self.__wheelsSteeringFilter
def getBounds(self, partIdx):
return self.appearance.getBounds(partIdx) if self.appearance is not None else (Math.Vector3(0.0, 0.0, 0.0), Math.Vector3(0.0, 0.0, 0.0), 0)
def getSpeed(self):
return self.__speedInfo.value[0]
def __init__(self):
global _g_waitingVehicle
for comp in VEHICLE_COMPONENTS:
comp.__init__(self)
self.proxy = weakref.proxy(self)
self.extras = {}
self.typeDescriptor = None
self.appearance = None
self.isPlayerVehicle = False
self.isStarted = False
self.__isEnteringWorld = False
self.__turretDetachmentConfirmed = False
self.__speedInfo = _VehicleSpeedProvider()
_g_waitingVehicle[self.id] = weakref.ref(self)
self.respawnCompactDescr = None
self.respawnOutfitCompactDescr = None
self.__cachedStunInfo = StunInfo(0.0, 0.0, 0.0, 0.0)
self.__burnoutStarted = False
self.__handbrakeFired = False
self.__wheelsScrollFilter = None
self.__wheelsSteeringFilter = None
return
def __del__(self):
if _g_waitingVehicle.has_key(self.id):
del _g_waitingVehicle[self.id]
def reload(self):
if self.isStarted:
self.stopVisual()
vehicles.reload()
self.respawn(self.publicInfo.compDescr)
def prerequisites(self, respawnCompactDescr=None):
if self.respawnCompactDescr is not None:
respawnCompactDescr = self.respawnCompactDescr
self.isCrewActive = True
self.respawnCompactDescr = None
if self.respawnOutfitCompactDescr is not None:
outfitDescr = self.respawnOutfitCompactDescr
self.respawnOutfitCompactDescr = None
else:
outfitDescr = self.publicInfo.outfit
if respawnCompactDescr is None and self.typeDescriptor is not None:
return
else:
self.typeDescriptor = self.getDescr(respawnCompactDescr)
forceReloading = respawnCompactDescr is not None
self.appearance, _, prereqs = appearance_cache.createAppearance(self.id, self.typeDescriptor, self.health, self.isCrewActive, self.isTurretDetached, outfitDescr, forceReloading)
return (loadingPriority(self.id), prereqs)
def getDescr(self, respawnCompactDescr):
if respawnCompactDescr is not None:
descr = vehicles.VehicleDescr(respawnCompactDescr)
self.health = descr.maxHealth
return descr
else:
return vehicles.VehicleDescr(compactDescr=_stripVehCompDescrIfRoaming(self.publicInfo.compDescr))
@staticmethod
def respawnVehicle(vID, compactDescr=None, outfitCompactDescr=None):
vehicleRef = _g_waitingVehicle.get(vID, None)
if vehicleRef is not None:
vehicle = vehicleRef()
if vehicle is not None:
vehicle.respawnCompactDescr = compactDescr
vehicle.respawnOutfitCompactDescr = outfitCompactDescr
if not BigWorld.entities.get(vID):
_logger.error('respawn vehicle: Vehicle ref is not None but entity does not exist anymore. Skip wg_respawn')
else:
try:
vehicle.wg_respawn()
except Exception:
_logger.error('respawn vehicle: Vehicle ref is not None but failed to call respawn: %s', vID)
return
def __initAdditionalFilters(self):
self.__wheelsScrollFilter = None
self.__wheelsSteeringFilter = None
if self.typeDescriptor.chassis.generalWheelsAnimatorConfig is not None:
scrollableWheelsCount = self.typeDescriptor.chassis.generalWheelsAnimatorConfig.getWheelsCount()
self.__wheelsScrollFilter = []
for _ in range(scrollableWheelsCount):
self.__wheelsScrollFilter.append(NetworkFilters.FloatLatencyDelayingFilter())
self.__wheelsScrollFilter[-1].input(BigWorld.time(), 0.0)
steerableWheelsCount = self.typeDescriptor.chassis.generalWheelsAnimatorConfig.getSteerableWheelsCount()
self.__wheelsSteeringFilter = []
for _ in range(steerableWheelsCount):
self.__wheelsSteeringFilter.append(NetworkFilters.FloatLatencyDelayingFilter())
self.__wheelsSteeringFilter[-1].input(BigWorld.time(), 0.0)
return
def onEnterWorld(self, prereqs):
self.__prereqs = prereqs
self.__isEnteringWorld = True
self.__prevDamageStickers = frozenset()
self.__prevPublicStateModifiers = frozenset()
self.targetFullBounds = True
self.__initAdditionalFilters()
player = BigWorld.player()
player.vehicle_onEnterWorld(self)
if self.isPlayerVehicle:
self.cell.sendStateToOwnClient()
player.initSpace()
self.__isEnteringWorld = False
if self.respawnCompactDescr:
_logger.debug('respawn compact descr is still valid, request reloading of tank resources')
BigWorld.callback(0.0, lambda : Vehicle.respawnVehicle(self.id, self.respawnCompactDescr))
def onLeaveWorld(self):
self.__stopExtras()
BigWorld.player().vehicle_onLeaveWorld(self)
def showShooting(self, burstCount, isPredictedShot=False):
blockShooting = self.siegeState is not None and self.siegeState != VEHICLE_SIEGE_STATE.ENABLED and self.siegeState != VEHICLE_SIEGE_STATE.DISABLED
if not self.isStarted or blockShooting:
return
else:
if not isPredictedShot and self.isPlayerVehicle and not BigWorld.player().isWaitingForShot:
if not BattleReplay.g_replayCtrl.isPlaying:
return
extra = self.typeDescriptor.extrasDict['shoot']
extra.stopFor(self)
extra.startFor(self, burstCount)
if not isPredictedShot and self.isPlayerVehicle:
ctrl = self.guiSessionProvider.shared.feedback
if ctrl is not None:
ctrl.onShotDone()
BigWorld.player().cancelWaitingForShot()
return
def showDamageFromShot(self, attackerID, points, effectsIndex, damageFactor):
if not self.isStarted:
return
else:
effectsDescr = vehicles.g_cache.shotEffects[effectsIndex]
maxComponentIdx = TankPartIndexes.ALL[-1]
wheelsConfig = self.appearance.typeDescriptor.chassis.generalWheelsAnimatorConfig
if wheelsConfig:
maxComponentIdx = maxComponentIdx + wheelsConfig.getWheelsCount()
maxHitEffectCode, decodedPoints, maxDamagedComponent = DamageFromShotDecoder.decodeHitPoints(points, self.appearance.collisions, maxComponentIdx)
hasPiercedHit = DamageFromShotDecoder.hasDamaged(maxHitEffectCode)
firstHitDir = Math.Vector3(0)
if decodedPoints:
firstHitPoint = decodedPoints[0]
compoundModel = self.appearance.compoundModel
compMatrix = Math.Matrix(compoundModel.node(firstHitPoint.componentName))
firstHitDirLocal = firstHitPoint.matrix.applyToAxis(2)
firstHitDir = compMatrix.applyVector(firstHitDirLocal)
self.appearance.receiveShotImpulse(firstHitDir, effectsDescr['targetImpulse'])
self.appearance.executeHitVibrations(maxHitEffectCode)
player = BigWorld.player()
player.inputHandler.onVehicleShaken(self, compMatrix.translation, firstHitDir, effectsDescr['caliber'], ShakeReason.HIT if hasPiercedHit else ShakeReason.HIT_NO_DAMAGE)
sessionProvider = self.guiSessionProvider
isAlly = sessionProvider.getArenaDP().isAlly(attackerID)
showFriendlyFlashBang = sessionProvider.arenaVisitor.hasCustomAllyDamageEffect() and isAlly
for shotPoint in decodedPoints:
showFullscreenEffs = self.isPlayerVehicle and self.isAlive()
keyPoints, effects, _ = effectsDescr[shotPoint.hitEffectGroup]
self.appearance.boundEffects.addNewToNode(shotPoint.componentName, shotPoint.matrix, effects, keyPoints, isPlayerVehicle=self.isPlayerVehicle, showShockWave=showFullscreenEffs, showFlashBang=showFullscreenEffs and not showFriendlyFlashBang, showFriendlyFlashBang=showFullscreenEffs and showFriendlyFlashBang, entity_id=self.id, damageFactor=damageFactor, attackerID=attackerID, hitdir=firstHitDir)
if not self.isAlive():
return
if attackerID == BigWorld.player().playerVehicleID:
if maxHitEffectCode is not None and not self.isPlayerVehicle:
if maxHitEffectCode in VEHICLE_HIT_EFFECT.RICOCHETS:
eventID = _GUI_EVENT_ID.VEHICLE_RICOCHET
elif maxHitEffectCode == VEHICLE_HIT_EFFECT.CRITICAL_HIT:
if maxDamagedComponent == TankPartNames.CHASSIS:
if damageFactor:
eventID = _GUI_EVENT_ID.VEHICLE_CRITICAL_HIT_CHASSIS_PIERCED
else:
eventID = _GUI_EVENT_ID.VEHICLE_CRITICAL_HIT_CHASSIS
else:
eventID = _GUI_EVENT_ID.VEHICLE_CRITICAL_HIT
elif hasPiercedHit:
eventID = _GUI_EVENT_ID.VEHICLE_ARMOR_PIERCED
else:
eventID = _GUI_EVENT_ID.VEHICLE_HIT
ctrl = self.guiSessionProvider.shared.feedback
ctrl is not None and ctrl.setVehicleState(self.id, eventID)
return
def showDamageFromExplosion(self, attackerID, center, effectsIndex, damageFactor):
if not self.isStarted:
return
else:
impulse = vehicles.g_cache.shotEffects[effectsIndex]['targetImpulse']
direction = self.position - center
direction.normalise()
self.appearance.receiveShotImpulse(direction, impulse / 4.0)
self.appearance.executeHitVibrations(VEHICLE_HIT_EFFECT.MAX_CODE + 1)
if not self.isAlive():
return
self.showSplashHitEffect(effectsIndex, damageFactor)
if self.id == attackerID:
return
player = BigWorld.player()
player.inputHandler.onVehicleShaken(self, center, direction, vehicles.g_cache.shotEffects[effectsIndex]['caliber'], ShakeReason.SPLASH)
if attackerID == BigWorld.player().playerVehicleID:
ctrl = self.guiSessionProvider.shared.feedback
if ctrl is not None:
ctrl.setVehicleState(self.id, _GUI_EVENT_ID.VEHICLE_ARMOR_PIERCED)
return
def showVehicleCollisionEffect(self, pos, delta_spd, energy=0):
if not self.isStarted:
return
else:
if delta_spd >= 3:
effectName = 'collisionVehicleHeavy2'
mass = self.typeDescriptor.physics['weight']
if mass < 18000:
effectName = 'collisionVehicleHeavy1'
elif mass > 46000:
effectName = 'collisionVehicleHeavy3'
else:
effectName = 'collisionVehicleLight'
self.showCollisionEffect(pos, effectName, None, False, 0, None, energy)
self.appearance.executeRammingVibrations()
return
def showCollisionEffect(self, hitPos, collisionEffectName='collisionVehicle', collisionNormal=None, isTracks=False, damageFactor=0, impulse=None, pcEnergy=None):
invWorldMatrix = Math.Matrix(self.matrix)
invWorldMatrix.invert()
rot = Math.Matrix()
if collisionNormal is None:
rot.setRotateYPR((random.uniform(-3.14, 3.14), random.uniform(-1.5, 1.5), 0.0))
else:
rot.setRotateYPR((0, 0, 0))
mat = Math.Matrix()
mat.setTranslate(hitPos)
mat.preMultiply(rot)
mat.postMultiply(invWorldMatrix)
if pcEnergy is not None:
collisionEnergy = [SoundStartParam('RTPC_ext_collision_impulse_tank', pcEnergy)]
else:
collisionEnergy = []
effectsList = self.typeDescriptor.type.effects.get(collisionEffectName, [])
if effectsList:
keyPoints, effects, _ = random.choice(effectsList)
self.appearance.boundEffects.addNewToNode(TankPartNames.HULL, mat, effects, keyPoints, entity=self, surfaceNormal=collisionNormal, isTracks=isTracks, impulse=impulse, damageFactor=damageFactor, hitPoint=hitPos, soundParams=collisionEnergy)
return
def showSplashHitEffect(self, effectsIndex, damageFactor):
effectsList = vehicles.g_cache.shotEffects[effectsIndex].get('armorSplashHit', None)
if effectsList:
mat = Math.Matrix()
mat.setTranslate((0.0, 0.0, 0.0))
self.appearance.boundEffects.addNewToNode(TankPartNames.HULL, mat, effectsList[1], effectsList[0], entity=self, damageFactor=damageFactor)
return
def set_burnoutLevel(self, prev):
attachedVehicle = BigWorld.player().getVehicleAttached()
if attachedVehicle is None:
return
else:
isAttachedVehicle = self.id == attachedVehicle.id
if self.appearance.detailedEngineState is not None:
self.appearance.detailedEngineState.throttle = 1 if self.burnoutLevel > 0.01 else 0
if self.burnoutLevel > 0 and not self.__handbrakeFired:
if self.getSpeed() > 0.5:
if not self.__burnoutStarted:
soundObject = self.appearance.engineAudition.getSoundObject(TankSoundObjectsIndexes.CHASSIS)
soundObject.play('wheel_vehicle_burnout')
self.__burnoutStarted = True
else:
self.__burnoutStarted = False
if isAttachedVehicle:
self.guiSessionProvider.invalidateVehicleState(VEHICLE_VIEW_STATE.BURNOUT, self.burnoutLevel)
return
def set_wheelsState(self, prev):
if self.appearance is None:
return
else:
__WHEEL_DESTROYED = 3
for i in xrange(0, 8):
prevState = prev >> i * 2 & 3
newState = self.wheelsState >> i * 2 & 3
if prevState != newState:
if newState == __WHEEL_DESTROYED:
self.appearance.onChassisDestroySound(False, True, i)
elif prevState == __WHEEL_DESTROYED:
self.appearance.onChassisDestroySound(False, False, i)
return
def set_damageStickers(self, prev=None):
if self.isStarted:
prev = self.__prevDamageStickers
curr = frozenset(self.damageStickers)
self.__prevDamageStickers = curr
for sticker in prev.difference(curr):
self.appearance.removeDamageSticker(sticker)
maxComponentIdx = TankPartIndexes.ALL[-1]
wheelsConfig = self.appearance.typeDescriptor.chassis.generalWheelsAnimatorConfig
if wheelsConfig:
maxComponentIdx = maxComponentIdx + wheelsConfig.getWheelsCount()
for sticker in curr.difference(prev):
self.appearance.addDamageSticker(sticker, *DamageFromShotDecoder.decodeSegment(sticker, self.appearance.collisions, maxComponentIdx))
def set_publicStateModifiers(self, prev=None):
if self.isStarted:
prev = self.__prevPublicStateModifiers
curr = frozenset(self.publicStateModifiers)
self.__prevPublicStateModifiers = curr
self.__updateModifiers(curr.difference(prev), prev.difference(curr))
if not self.isPlayerVehicle:
self.updateStunInfo()
def set_engineMode(self, prev):
if self.isStarted and self.isAlive():
self.appearance.changeEngineMode(self.engineMode, True)
def set_isStrafing(self, prev):
if hasattr(self.filter, 'isStrafing'):
self.filter.isStrafing = self.isStrafing
def set_gunAnglesPacked(self, prev):
syncGunAngles = getattr(self.filter, 'syncGunAngles', None)
if syncGunAngles:
yaw, pitch = decodeGunAngles(self.gunAnglesPacked, self.typeDescriptor.gun.pitchLimits['absolute'])
syncGunAngles(yaw, pitch)
return
def set_health(self, prev):
pass
def set_isCrewActive(self, prev):
if self.isStarted:
self.appearance.onVehicleHealthChanged()
if not self.isPlayerVehicle:
ctrl = self.guiSessionProvider.shared.feedback
if ctrl is not None:
ctrl.setVehicleNewHealth(self.id, self.health)
if not self.isCrewActive and self.health > 0:
self.__onVehicleDeath()
return
def set_siegeState(self, prev):
if not self.isPlayerVehicle:
self.onSiegeStateUpdated(self.siegeState, 0.0)
def set_isSpeedCapturing(self, prev=None):
_logger.debug('set_isSpeedCapturing %s', self.isSpeedCapturing)
if not self.isPlayerVehicle:
ctrl = self.guiSessionProvider.shared.feedback
if ctrl is not None:
ctrl.invalidatePassiveEngineering(self.id, (True, self.isSpeedCapturing))
return
def set_isBlockingCapture(self, prev=None):
_logger.debug('set_isBlockingCapture %s', self.isBlockingCapture)
if not self.isPlayerVehicle:
ctrl = self.guiSessionProvider.shared.feedback
if ctrl is not None:
ctrl.invalidatePassiveEngineering(self.id, (False, self.isBlockingCapture))
return
def set_steeringAngles(self, prev=None):
if self.__wheelsSteeringFilter is not None:
for packedValue, steeringFilter in zip(self.steeringAngles, self.__wheelsSteeringFilter):
unpackedValue = WoT.unpackWheelSteering(packedValue)
steeringFilter.input(BigWorld.time(), unpackedValue)
return
def set_wheelsScroll(self, prev=None):
if self.__wheelsScrollFilter is not None:
for packedValue, scrollFilter in zip(self.wheelsScroll, self.__wheelsScrollFilter):
unpackedValue = WoT.unpackWheelScroll(packedValue)
scrollFilter.input(BigWorld.time(), unpackedValue)
return
def onHealthChanged(self, newHealth, attackerID, attackReasonID):
if newHealth > 0 and self.health <= 0:
self.health = newHealth
return
if not self.isStarted:
return
self.guiSessionProvider.setVehicleHealth(self.isPlayerVehicle, self.id, newHealth, attackerID, attackReasonID)
if not self.appearance.damageState.isCurrentModelDamaged:
self.appearance.onVehicleHealthChanged()
if self.health <= 0 and self.isCrewActive:
self.__onVehicleDeath()
if self.isPlayerVehicle:
TriggersManager.g_manager.activateTrigger(TRIGGER_TYPE.PLAYER_RECEIVE_DAMAGE, attackerId=attackerID)
def set_stunInfo(self, prev):
_logger.debug('Set stun info(curr,~ prev): %s, %s', self.stunInfo, prev)
self.updateStunInfo()
def __updateCachedStunInfo(self, endTime):
if endTime:
cachedStartTime = self.__cachedStunInfo.startTime
startTime = cachedStartTime if cachedStartTime > 0.0 else BigWorld.serverTime()
totalTime = max(self.__cachedStunInfo.duration, endTime - startTime)
duration = endTime - BigWorld.serverTime() if endTime > 0.0 else 0.0
self.__cachedStunInfo = StunInfo(startTime, endTime, duration, totalTime)
else:
self.__cachedStunInfo = StunInfo(0.0, 0.0, 0.0, 0.0)
def updateStunInfo(self):
attachedVehicle = BigWorld.player().getVehicleAttached()
if attachedVehicle is None:
return
else:
self.__updateCachedStunInfo(self.stunInfo)
if self.lobbyContext.getServerSettings().spgRedesignFeatures.isStunEnabled():
isAttachedVehicle = self.id == attachedVehicle.id
if isAttachedVehicle:
self.guiSessionProvider.invalidateVehicleState(VEHICLE_VIEW_STATE.STUN, self.__cachedStunInfo)
if not self.isPlayerVehicle:
ctrl = self.guiSessionProvider.shared.feedback
if ctrl is not None:
ctrl.invalidateStun(self.id, self.__cachedStunInfo)
else:
_logger.warning('Stun features is disabled!')
return
def showAmmoBayEffect(self, mode, fireballVolume, projectedTurretSpeed):
if self.isStarted:
self.appearance.showAmmoBayEffect(mode, fireballVolume)
def onPushed(self, x, z):
try:
distSqr = BigWorld.player().position.distSqrTo(self.position)
if distSqr > 1600.0:
self.filter.setPosition(x, z)
except Exception:
pass
def showRammingEffect(self, energy, point):
if not self.isStarted:
return
if energy < 600:
self.showCollisionEffect(point, 'rammingCollisionLight')
else:
self.showCollisionEffect(point, 'rammingCollisionHeavy')
def onStaticCollision(self, energy, point, normal, miscFlags, damage, destrEffectIdx, destrMaxHealth):
if not self.isStarted:
return
self.appearance.stopSwinging()
BigWorld.player().inputHandler.onVehicleCollision(self, self.getSpeed())
isTrackCollision = bool(miscFlags & 1)
isSptCollision = bool(miscFlags >> 1 & 1)
isSptDestroyed = bool(miscFlags >> 2 & 1)
if isSptDestroyed:
return
hitPoint = point
surfNormal = normal
matKind = SPT_MATKIND.SOLID
if destrEffectIdx < 0:
if not isSptCollision:
surfaceMaterial = calcSurfaceMaterialNearPoint(hitPoint, normal, self.spaceID)
hitPoint, surfNormal, matKind, effectIdx = surfaceMaterial
else:
effectIdx = EFFECT_MATERIAL_INDEXES_BY_NAMES['wood']
if matKind != 0:
self.__showStaticCollisionEffect(energy, effectIdx, hitPoint, surfNormal, isTrackCollision, damage * 100.0)
else:
self.__showDynamicCollisionEffect(energy, destrMaxHealth, hitPoint, surfNormal)
if self.isPlayerVehicle:
self.appearance.executeRammingVibrations(matKind)
def getAimParams(self):
if self.appearance is not None:
turretYaw = Math.Matrix(self.appearance.turretMatrix).yaw
gunPitch = Math.Matrix(self.appearance.gunMatrix).pitch
return (turretYaw, gunPitch)
else:
return (0.0, 0.0)
def onSiegeStateUpdated(self, newState, timeToNextMode):
if self.typeDescriptor is not None and self.typeDescriptor.hasSiegeMode:
self.typeDescriptor.onSiegeStateChanged(newState)
if self.isStarted:
self.appearance.onSiegeStateChanged(newState)
if self.isPlayerVehicle:
inputHandler = BigWorld.player().inputHandler
inputHandler.siegeModeControl.notifySiegeModeChanged(self, newState, timeToNextMode)
else:
_logger.error('Wrong usage! Should be called only on vehicle with valid typeDescriptor and siege mode')
return
def collideSegmentExt(self, startPoint, endPoint):
if self.appearance.collisions is not None:
collisions = self.appearance.collisions.collideAllWorld(startPoint, endPoint)
if collisions:
res = []
for collision in collisions:
matInfo = self.getMatinfo(collision[3], collision[2])
res.append(SegmentCollisionResultExt(collision[0], collision[1], matInfo, collision[3]))
return res
return
def getMatinfo(self, parIndex, matKind):
matInfo = None
if parIndex == TankPartIndexes.CHASSIS:
matInfo = self.typeDescriptor.chassis.materials.get(matKind)
elif parIndex == TankPartIndexes.HULL:
matInfo = self.typeDescriptor.hull.materials.get(matKind)
elif parIndex == TankPartIndexes.TURRET:
matInfo = self.typeDescriptor.turret.materials.get(matKind)
elif parIndex == TankPartIndexes.GUN:
matInfo = self.typeDescriptor.gun.materials.get(matKind)
return matInfo
def isAlive(self):
return self.isCrewActive and self.health > 0
def isPitchHullAimingAvailable(self):
return self.typeDescriptor is not None and self.typeDescriptor.isPitchHullAimingAvailable
def getServerGunAngles(self):
return decodeGunAngles(self.gunAnglesPacked, self.typeDescriptor.gun.pitchLimits['absolute'])
def startVisual(self):
if self.isStarted:
raise SoftException('Vehicle is already started')
avatar = BigWorld.player()
self.appearance = appearance_cache.getAppearance(self.id, self.__prereqs)
self.appearance.setVehicle(self)
self.appearance.activate()
self.appearance.changeEngineMode(self.engineMode)
self.appearance.onVehicleHealthChanged(self.isPlayerVehicle)
if self.isPlayerVehicle:
if self.isAlive():
self.appearance.setupGunMatrixTargets(avatar.gunRotator)
if hasattr(self.filter, 'allowStrafeCompensation'):
self.filter.allowStrafeCompensation = not self.isPlayerVehicle
self.isStarted = True
if not self.appearance.isObserver:
self.show(True)
self.set_publicStateModifiers()
self.set_damageStickers()
if TriggersManager.g_manager:
TriggersManager.g_manager.activateTrigger(TriggersManager.TRIGGER_TYPE.VEHICLE_VISUAL_VISIBILITY_CHANGED, vehicleId=self.id, isVisible=True)
self.guiSessionProvider.startVehicleVisual(self.proxy, True)
if self.stunInfo > 0.0:
self.updateStunInfo()
self.set_inspiringEffect()
self.set_inspired()
if self.isSpeedCapturing:
self.set_isSpeedCapturing()
if self.isBlockingCapture:
self.set_isBlockingCapture()
if not self.isAlive():
self.__onVehicleDeath(True)
if self.isTurretMarkedForDetachment:
self.confirmTurretDetachment()
self.__startWGPhysics()
self.refreshNationalVoice()
self.__prereqs = None
self.appearance.highlighter.setVehicleOwnership()
if self.respawnCompactDescr:
_logger.debug('respawn compact descr is still valid, request reloading of tank resources %s', self.id)
BigWorld.callback(0.0, lambda : Vehicle.respawnVehicle(self.id, self.respawnCompactDescr))
return
def refreshNationalVoice(self):
player = BigWorld.player()
if self is not player.getVehicleAttached():
return
commanderSkinID = self.publicInfo.commanderSkinID
vehicleType = self.typeDescriptor.type
setSpecialVoice(self.publicInfo.crewGroup, commanderSkinID, vehicleType, self.id == player.playerVehicleID)
def stopVisual(self, showStipple=False):
if not self.isStarted:
raise SoftException('Vehicle is already stopped')
stippleModel = None
showStipple = False
if showStipple:
self.appearance.assembleStipple()
self.__stopExtras()
if TriggersManager.g_manager:
TriggersManager.g_manager.activateTrigger(TriggersManager.TRIGGER_TYPE.VEHICLE_VISUAL_VISIBILITY_CHANGED, vehicleId=self.id, isVisible=False)
self.guiSessionProvider.stopVehicleVisual(self.id, self.isPlayerVehicle)
self.appearance.deactivate()
self.appearance = None
self.isStarted = False
self.__speedInfo.reset()
return stippleModel
def show(self, show):
if show:
drawFlags = BigWorld.DrawAll
else:
drawFlags = BigWorld.ShadowPassBit
if self.isStarted:
va = self.appearance
va.changeDrawPassVisibility(drawFlags)
va.showStickers(show)
def addCameraCollider(self):
if self.appearance is not None:
self.appearance.addCameraCollider()
return
def removeCameraCollider(self):
if self.appearance is not None:
self.appearance.removeCameraCollider()
return
def _isDestructibleMayBeBroken(self, chunkID, itemIndex, matKind, itemFilename, itemScale, vehSpeed):
desc = AreaDestructibles.g_cache.getDescByFilename(itemFilename)
if desc is None:
return False
else:
ctrl = AreaDestructibles.g_destructiblesManager.getController(chunkID)
if ctrl is None:
return False
if ctrl.isDestructibleBroken(itemIndex, matKind, desc['type']):
return True
mass = self.typeDescriptor.physics['weight']
instantDamage = 0.5 * mass * vehSpeed * vehSpeed * 0.00015
if desc['type'] == DestructiblesCache.DESTR_TYPE_STRUCTURE:
moduleDesc = desc['modules'].get(matKind)
if moduleDesc is None:
return False
refHealth = moduleDesc['health']
else:
unitMass = AreaDestructibles.g_cache.unitVehicleMass
instantDamage *= math.pow(mass / unitMass, desc['kineticDamageCorrection'])
refHealth = desc['health']
return DestructiblesCache.scaledDestructibleHealth(itemScale, refHealth) < instantDamage
def __showStaticCollisionEffect(self, energy, effectIdx, hitPoint, normal, isTrackCollision, damageFactor):
heavyVelocities = self.typeDescriptor.type.heavyCollisionEffectVelocities
heavyEnergy = heavyVelocities['track'] if isTrackCollision else heavyVelocities['hull']
heavyEnergy = 0.5 * heavyEnergy * heavyEnergy
postfix = '%sCollisionLight' if energy < heavyEnergy else '%sCollisionHeavy'
effectName = ''
if effectIdx < len(EFFECT_MATERIALS):
effectName = EFFECT_MATERIALS[effectIdx]
effectName = postfix % effectName
if effectName in self.typeDescriptor.type.effects:
self.showCollisionEffect(hitPoint, effectName, normal, isTrackCollision, damageFactor, self.__getImpulse(self.getSpeed()))
def __showDynamicCollisionEffect(self, energy, destrMaxHealth, hitPoint, surfNormal):
effectName = 'dynamicCollision'
if effectName in self.typeDescriptor.type.effects:
self.showCollisionEffect(hitPoint, effectName, surfNormal, False, 0, self.__getDynamicImpulse(self.getSpeed(), destrMaxHealth))
def __startWGPhysics(self):
if not hasattr(self.filter, 'setVehiclePhysics'):
return
typeDescr = self.typeDescriptor
isWheeled = 'wheeledVehicle' in self.typeDescriptor.type.tags
physics = BigWorld.WGWheeledPhysics() if isWheeled else BigWorld.WGTankPhysics()
physics_shared.initVehiclePhysicsClient(physics, typeDescr)
arenaMinBound, arenaMaxBound = (-10000, -10000), (10000, 10000)
physics.setArenaBounds(arenaMinBound, arenaMaxBound)
physics.owner = weakref.ref(self)
physics.staticMode = False
physics.movementSignals = 0
self.filter.setVehiclePhysics(physics)
physics.visibilityMask = ArenaType.getVisibilityMask(BigWorld.player().arenaTypeID >> 16)
yaw, pitch = decodeGunAngles(self.gunAnglesPacked, typeDescr.gun.pitchLimits['absolute'])
self.filter.syncGunAngles(yaw, pitch)
self.__speedInfo.set(self.filter.speedInfo)
def __stopWGPhysics(self):
self.__speedInfo.reset()
def __getImpulse(self, speed):
mass = self.typeDescriptor.physics['weight']
maxSpeed = self.typeDescriptor.physics['speedLimits'][0]
return math.fabs(speed * mass / (maxSpeed * mass))
def __getDynamicImpulse(self, speed, maxHealth):
maxSpeed = 20.0
relSpeed = min(math.fabs(speed / maxSpeed), 1.0)
relSpeed *= relSpeed
relHeath = min(min(maxHealth, 90.0) / 90.0, 1.0)
return 0.5 * (relSpeed + relHeath)
def __stopExtras(self):
extraTypes = self.typeDescriptor.extras
for index, data in self.extras.items():
extraTypes[index].stop(data)
if self.extras:
_logger.warning('this code point should have never been reached')
def __updateModifiers(self, addedExtras, removedExtras):
extraTypes = self.typeDescriptor.extras
for idx in removedExtras:
extraTypes[idx].stopFor(self)
for idx in addedExtras:
try:
extraTypes[idx].startFor(self)
except Exception:
_logger.exception('Update modifiers')
def __onVehicleDeath(self, isDeadStarted=False):
if not self.isPlayerVehicle:
ctrl = self.guiSessionProvider.shared.feedback
if ctrl is not None:
ctrl.setVehicleState(self.id, _GUI_EVENT_ID.VEHICLE_DEAD, isDeadStarted)
TriggersManager.g_manager.fireTrigger(TRIGGER_TYPE.VEHICLE_DESTROYED, vehicleId=self.id)
self._removeInspire()
bwfilter = self.filter
if hasattr(bwfilter, 'velocityErrorCompensation'):
bwfilter.velocityErrorCompensation = 100.0
return
def confirmTurretDetachment(self):
self.__turretDetachmentConfirmed = True
if not self.isTurretDetached:
_logger.error('Vehicle::confirmTurretDetachment: Confirming turret detachment, though the turret is not detached')
self.appearance.updateTurretVisibility()
def drawEdge(self, forceSimpleEdge=False):
self.appearance.highlighter.highlight(True, forceSimpleEdge)
def removeEdge(self, forceSimpleEdge=False):
self.appearance.highlighter.highlight(False, forceSimpleEdge)
def addModel(self, model):
super(Vehicle, self).addModel(model)
highlighter = self.appearance.highlighter
if highlighter.enabled:
highlighter.highlight(True)
def delModel(self, model):
highlighter = self.appearance.highlighter
hlEnabled = highlighter.enabled
hlSimpleEdge = highlighter.isSimpleEdge
if hlEnabled:
highlighter.removeHighlight()
super(Vehicle, self).delModel(model)
if hlEnabled:
highlighter.highlight(True, hlSimpleEdge)
def notifyInputKeysDown(self, movementDir, rotationDir, handbrakeFired):
self.filter.notifyInputKeysDown(movementDir, rotationDir)
self.__handbrakeFired = handbrakeFired
if self.appearance.detailedEngineState is not None:
self.appearance.detailedEngineState.throttle = movementDir or rotationDir
return
def turnoffThrottle(self):
if self.appearance.detailedEngineState is not None:
self.appearance.detailedEngineState.throttle = 0
return
@dependency.replace_none_kwargs(lobbyContext=ILobbyContext)
def _stripVehCompDescrIfRoaming(vehCompDescr, lobbyContext=None):
serverSettings = lobbyContext.getServerSettings() if lobbyContext is not None else None
if serverSettings is not None:
if serverSettings.roaming.isInRoaming():
vehCompDescr = vehicles.stripCustomizationFromVehicleCompactDescr(vehCompDescr, True, True, False)[0]
return vehCompDescr
| <filename>Source/Vehicle.py
# Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/Vehicle.py
import logging
import math
import random
import weakref
from collections import namedtuple
import BigWorld
import Math
import NetworkFilters
import WoT
import AreaDestructibles
import ArenaType
import BattleReplay
import DestructiblesCache
import TriggersManager
import constants
import physics_shared
from AvatarInputHandler.aih_constants import ShakeReason
from TriggersManager import TRIGGER_TYPE
from VehicleEffects import DamageFromShotDecoder
from constants import SPT_MATKIND
from constants import VEHICLE_HIT_EFFECT, VEHICLE_SIEGE_STATE
from gui.battle_control.battle_constants import FEEDBACK_EVENT_ID as _GUI_EVENT_ID, VEHICLE_VIEW_STATE
from gun_rotation_shared import decodeGunAngles
from helpers import dependency
from helpers.EffectMaterialCalculation import calcSurfaceMaterialNearPoint
from helpers.EffectsList import SoundStartParam
from items import vehicles
from material_kinds import EFFECT_MATERIAL_INDEXES_BY_NAMES, EFFECT_MATERIALS
from skeletons.gui.battle_session import IBattleSessionProvider
from skeletons.gui.lobby_context import ILobbyContext
from soft_exception import SoftException
from special_sound import setSpecialVoice
from vehicle_systems import appearance_cache
from vehicle_systems.entity_components.battle_abilities_component import BattleAbilitiesComponent
from vehicle_systems.stricted_loading import loadingPriority
from vehicle_systems.tankStructure import TankPartNames, TankPartIndexes, TankSoundObjectsIndexes
_logger = logging.getLogger(__name__)
LOW_ENERGY_COLLISION_D = 0.3
HIGH_ENERGY_COLLISION_D = 0.6
_g_waitingVehicle = dict()
class _Vector4Provider(object):
__slots__ = ('_v',)
@property
def value(self):
return self._v
def __int__(self):
self._v = Math.Vector4(0.0, 0.0, 0.0, 0.0)
class _VehicleSpeedProvider(object):
__slots__ = ('__value',)
@property
def value(self):
return self.__value.value
def __init__(self):
self.__value = Math.Vector4Basic()
def set(self, val):
self.__value = val
def reset(self):
self.__value = Math.Vector4Basic()
SegmentCollisionResultExt = namedtuple('SegmentCollisionResultExt', ('dist',
'hitAngleCos',
'matInfo',
'compName'))
StunInfo = namedtuple('StunInfo', ('startTime',
'endTime',
'duration',
'totalTime'))
VEHICLE_COMPONENTS = {BattleAbilitiesComponent}
class Vehicle(BigWorld.Entity, BattleAbilitiesComponent):
isEnteringWorld = property(lambda self: self.__isEnteringWorld)
isTurretDetached = property(lambda self: constants.SPECIAL_VEHICLE_HEALTH.IS_TURRET_DETACHED(self.health) and self.__turretDetachmentConfirmed)
isTurretMarkedForDetachment = property(lambda self: constants.SPECIAL_VEHICLE_HEALTH.IS_TURRET_DETACHED(self.health))
isTurretDetachmentConfirmationNeeded = property(lambda self: not self.__turretDetachmentConfirmed)
hasMovingFlags = property(lambda self: self.engineMode is not None and self.engineMode[1] & 3)
guiSessionProvider = dependency.descriptor(IBattleSessionProvider)
lobbyContext = dependency.descriptor(ILobbyContext)
@property
def speedInfo(self):
return self.__speedInfo
@property
def isWheeledTech(self):
return 'wheeledVehicle' in self.typeDescriptor.type.tags
@property
def wheelsScrollSmoothed(self):
if self.__wheelsScrollFilter is not None:
return [ scrollFilter.output(BigWorld.time()) for scrollFilter in self.__wheelsScrollFilter ]
else:
return
@property
def wheelsScrollFilters(self):
return self.__wheelsScrollFilter
@property
def wheelsSteeringSmoothed(self):
if self.__wheelsSteeringFilter is not None:
return [ steeringFilter.output(BigWorld.time()) for steeringFilter in self.__wheelsSteeringFilter ]
else:
return
@property
def wheelsSteeringFilters(self):
return self.__wheelsSteeringFilter
def getBounds(self, partIdx):
return self.appearance.getBounds(partIdx) if self.appearance is not None else (Math.Vector3(0.0, 0.0, 0.0), Math.Vector3(0.0, 0.0, 0.0), 0)
def getSpeed(self):
return self.__speedInfo.value[0]
def __init__(self):
global _g_waitingVehicle
for comp in VEHICLE_COMPONENTS:
comp.__init__(self)
self.proxy = weakref.proxy(self)
self.extras = {}
self.typeDescriptor = None
self.appearance = None
self.isPlayerVehicle = False
self.isStarted = False
self.__isEnteringWorld = False
self.__turretDetachmentConfirmed = False
self.__speedInfo = _VehicleSpeedProvider()
_g_waitingVehicle[self.id] = weakref.ref(self)
self.respawnCompactDescr = None
self.respawnOutfitCompactDescr = None
self.__cachedStunInfo = StunInfo(0.0, 0.0, 0.0, 0.0)
self.__burnoutStarted = False
self.__handbrakeFired = False
self.__wheelsScrollFilter = None
self.__wheelsSteeringFilter = None
return
def __del__(self):
if _g_waitingVehicle.has_key(self.id):
del _g_waitingVehicle[self.id]
def reload(self):
if self.isStarted:
self.stopVisual()
vehicles.reload()
self.respawn(self.publicInfo.compDescr)
def prerequisites(self, respawnCompactDescr=None):
if self.respawnCompactDescr is not None:
respawnCompactDescr = self.respawnCompactDescr
self.isCrewActive = True
self.respawnCompactDescr = None
if self.respawnOutfitCompactDescr is not None:
outfitDescr = self.respawnOutfitCompactDescr
self.respawnOutfitCompactDescr = None
else:
outfitDescr = self.publicInfo.outfit
if respawnCompactDescr is None and self.typeDescriptor is not None:
return
else:
self.typeDescriptor = self.getDescr(respawnCompactDescr)
forceReloading = respawnCompactDescr is not None
self.appearance, _, prereqs = appearance_cache.createAppearance(self.id, self.typeDescriptor, self.health, self.isCrewActive, self.isTurretDetached, outfitDescr, forceReloading)
return (loadingPriority(self.id), prereqs)
def getDescr(self, respawnCompactDescr):
if respawnCompactDescr is not None:
descr = vehicles.VehicleDescr(respawnCompactDescr)
self.health = descr.maxHealth
return descr
else:
return vehicles.VehicleDescr(compactDescr=_stripVehCompDescrIfRoaming(self.publicInfo.compDescr))
@staticmethod
def respawnVehicle(vID, compactDescr=None, outfitCompactDescr=None):
vehicleRef = _g_waitingVehicle.get(vID, None)
if vehicleRef is not None:
vehicle = vehicleRef()
if vehicle is not None:
vehicle.respawnCompactDescr = compactDescr
vehicle.respawnOutfitCompactDescr = outfitCompactDescr
if not BigWorld.entities.get(vID):
_logger.error('respawn vehicle: Vehicle ref is not None but entity does not exist anymore. Skip wg_respawn')
else:
try:
vehicle.wg_respawn()
except Exception:
_logger.error('respawn vehicle: Vehicle ref is not None but failed to call respawn: %s', vID)
return
def __initAdditionalFilters(self):
self.__wheelsScrollFilter = None
self.__wheelsSteeringFilter = None
if self.typeDescriptor.chassis.generalWheelsAnimatorConfig is not None:
scrollableWheelsCount = self.typeDescriptor.chassis.generalWheelsAnimatorConfig.getWheelsCount()
self.__wheelsScrollFilter = []
for _ in range(scrollableWheelsCount):
self.__wheelsScrollFilter.append(NetworkFilters.FloatLatencyDelayingFilter())
self.__wheelsScrollFilter[-1].input(BigWorld.time(), 0.0)
steerableWheelsCount = self.typeDescriptor.chassis.generalWheelsAnimatorConfig.getSteerableWheelsCount()
self.__wheelsSteeringFilter = []
for _ in range(steerableWheelsCount):
self.__wheelsSteeringFilter.append(NetworkFilters.FloatLatencyDelayingFilter())
self.__wheelsSteeringFilter[-1].input(BigWorld.time(), 0.0)
return
def onEnterWorld(self, prereqs):
self.__prereqs = prereqs
self.__isEnteringWorld = True
self.__prevDamageStickers = frozenset()
self.__prevPublicStateModifiers = frozenset()
self.targetFullBounds = True
self.__initAdditionalFilters()
player = BigWorld.player()
player.vehicle_onEnterWorld(self)
if self.isPlayerVehicle:
self.cell.sendStateToOwnClient()
player.initSpace()
self.__isEnteringWorld = False
if self.respawnCompactDescr:
_logger.debug('respawn compact descr is still valid, request reloading of tank resources')
BigWorld.callback(0.0, lambda : Vehicle.respawnVehicle(self.id, self.respawnCompactDescr))
def onLeaveWorld(self):
self.__stopExtras()
BigWorld.player().vehicle_onLeaveWorld(self)
def showShooting(self, burstCount, isPredictedShot=False):
blockShooting = self.siegeState is not None and self.siegeState != VEHICLE_SIEGE_STATE.ENABLED and self.siegeState != VEHICLE_SIEGE_STATE.DISABLED
if not self.isStarted or blockShooting:
return
else:
if not isPredictedShot and self.isPlayerVehicle and not BigWorld.player().isWaitingForShot:
if not BattleReplay.g_replayCtrl.isPlaying:
return
extra = self.typeDescriptor.extrasDict['shoot']
extra.stopFor(self)
extra.startFor(self, burstCount)
if not isPredictedShot and self.isPlayerVehicle:
ctrl = self.guiSessionProvider.shared.feedback
if ctrl is not None:
ctrl.onShotDone()
BigWorld.player().cancelWaitingForShot()
return
def showDamageFromShot(self, attackerID, points, effectsIndex, damageFactor):
if not self.isStarted:
return
else:
effectsDescr = vehicles.g_cache.shotEffects[effectsIndex]
maxComponentIdx = TankPartIndexes.ALL[-1]
wheelsConfig = self.appearance.typeDescriptor.chassis.generalWheelsAnimatorConfig
if wheelsConfig:
maxComponentIdx = maxComponentIdx + wheelsConfig.getWheelsCount()
maxHitEffectCode, decodedPoints, maxDamagedComponent = DamageFromShotDecoder.decodeHitPoints(points, self.appearance.collisions, maxComponentIdx)
hasPiercedHit = DamageFromShotDecoder.hasDamaged(maxHitEffectCode)
firstHitDir = Math.Vector3(0)
if decodedPoints:
firstHitPoint = decodedPoints[0]
compoundModel = self.appearance.compoundModel
compMatrix = Math.Matrix(compoundModel.node(firstHitPoint.componentName))
firstHitDirLocal = firstHitPoint.matrix.applyToAxis(2)
firstHitDir = compMatrix.applyVector(firstHitDirLocal)
self.appearance.receiveShotImpulse(firstHitDir, effectsDescr['targetImpulse'])
self.appearance.executeHitVibrations(maxHitEffectCode)
player = BigWorld.player()
player.inputHandler.onVehicleShaken(self, compMatrix.translation, firstHitDir, effectsDescr['caliber'], ShakeReason.HIT if hasPiercedHit else ShakeReason.HIT_NO_DAMAGE)
sessionProvider = self.guiSessionProvider
isAlly = sessionProvider.getArenaDP().isAlly(attackerID)
showFriendlyFlashBang = sessionProvider.arenaVisitor.hasCustomAllyDamageEffect() and isAlly
for shotPoint in decodedPoints:
showFullscreenEffs = self.isPlayerVehicle and self.isAlive()
keyPoints, effects, _ = effectsDescr[shotPoint.hitEffectGroup]
self.appearance.boundEffects.addNewToNode(shotPoint.componentName, shotPoint.matrix, effects, keyPoints, isPlayerVehicle=self.isPlayerVehicle, showShockWave=showFullscreenEffs, showFlashBang=showFullscreenEffs and not showFriendlyFlashBang, showFriendlyFlashBang=showFullscreenEffs and showFriendlyFlashBang, entity_id=self.id, damageFactor=damageFactor, attackerID=attackerID, hitdir=firstHitDir)
if not self.isAlive():
return
if attackerID == BigWorld.player().playerVehicleID:
if maxHitEffectCode is not None and not self.isPlayerVehicle:
if maxHitEffectCode in VEHICLE_HIT_EFFECT.RICOCHETS:
eventID = _GUI_EVENT_ID.VEHICLE_RICOCHET
elif maxHitEffectCode == VEHICLE_HIT_EFFECT.CRITICAL_HIT:
if maxDamagedComponent == TankPartNames.CHASSIS:
if damageFactor:
eventID = _GUI_EVENT_ID.VEHICLE_CRITICAL_HIT_CHASSIS_PIERCED
else:
eventID = _GUI_EVENT_ID.VEHICLE_CRITICAL_HIT_CHASSIS
else:
eventID = _GUI_EVENT_ID.VEHICLE_CRITICAL_HIT
elif hasPiercedHit:
eventID = _GUI_EVENT_ID.VEHICLE_ARMOR_PIERCED
else:
eventID = _GUI_EVENT_ID.VEHICLE_HIT
ctrl = self.guiSessionProvider.shared.feedback
ctrl is not None and ctrl.setVehicleState(self.id, eventID)
return
def showDamageFromExplosion(self, attackerID, center, effectsIndex, damageFactor):
if not self.isStarted:
return
else:
impulse = vehicles.g_cache.shotEffects[effectsIndex]['targetImpulse']
direction = self.position - center
direction.normalise()
self.appearance.receiveShotImpulse(direction, impulse / 4.0)
self.appearance.executeHitVibrations(VEHICLE_HIT_EFFECT.MAX_CODE + 1)
if not self.isAlive():
return
self.showSplashHitEffect(effectsIndex, damageFactor)
if self.id == attackerID:
return
player = BigWorld.player()
player.inputHandler.onVehicleShaken(self, center, direction, vehicles.g_cache.shotEffects[effectsIndex]['caliber'], ShakeReason.SPLASH)
if attackerID == BigWorld.player().playerVehicleID:
ctrl = self.guiSessionProvider.shared.feedback
if ctrl is not None:
ctrl.setVehicleState(self.id, _GUI_EVENT_ID.VEHICLE_ARMOR_PIERCED)
return
def showVehicleCollisionEffect(self, pos, delta_spd, energy=0):
if not self.isStarted:
return
else:
if delta_spd >= 3:
effectName = 'collisionVehicleHeavy2'
mass = self.typeDescriptor.physics['weight']
if mass < 18000:
effectName = 'collisionVehicleHeavy1'
elif mass > 46000:
effectName = 'collisionVehicleHeavy3'
else:
effectName = 'collisionVehicleLight'
self.showCollisionEffect(pos, effectName, None, False, 0, None, energy)
self.appearance.executeRammingVibrations()
return
def showCollisionEffect(self, hitPos, collisionEffectName='collisionVehicle', collisionNormal=None, isTracks=False, damageFactor=0, impulse=None, pcEnergy=None):
invWorldMatrix = Math.Matrix(self.matrix)
invWorldMatrix.invert()
rot = Math.Matrix()
if collisionNormal is None:
rot.setRotateYPR((random.uniform(-3.14, 3.14), random.uniform(-1.5, 1.5), 0.0))
else:
rot.setRotateYPR((0, 0, 0))
mat = Math.Matrix()
mat.setTranslate(hitPos)
mat.preMultiply(rot)
mat.postMultiply(invWorldMatrix)
if pcEnergy is not None:
collisionEnergy = [SoundStartParam('RTPC_ext_collision_impulse_tank', pcEnergy)]
else:
collisionEnergy = []
effectsList = self.typeDescriptor.type.effects.get(collisionEffectName, [])
if effectsList:
keyPoints, effects, _ = random.choice(effectsList)
self.appearance.boundEffects.addNewToNode(TankPartNames.HULL, mat, effects, keyPoints, entity=self, surfaceNormal=collisionNormal, isTracks=isTracks, impulse=impulse, damageFactor=damageFactor, hitPoint=hitPos, soundParams=collisionEnergy)
return
def showSplashHitEffect(self, effectsIndex, damageFactor):
effectsList = vehicles.g_cache.shotEffects[effectsIndex].get('armorSplashHit', None)
if effectsList:
mat = Math.Matrix()
mat.setTranslate((0.0, 0.0, 0.0))
self.appearance.boundEffects.addNewToNode(TankPartNames.HULL, mat, effectsList[1], effectsList[0], entity=self, damageFactor=damageFactor)
return
def set_burnoutLevel(self, prev):
attachedVehicle = BigWorld.player().getVehicleAttached()
if attachedVehicle is None:
return
else:
isAttachedVehicle = self.id == attachedVehicle.id
if self.appearance.detailedEngineState is not None:
self.appearance.detailedEngineState.throttle = 1 if self.burnoutLevel > 0.01 else 0
if self.burnoutLevel > 0 and not self.__handbrakeFired:
if self.getSpeed() > 0.5:
if not self.__burnoutStarted:
soundObject = self.appearance.engineAudition.getSoundObject(TankSoundObjectsIndexes.CHASSIS)
soundObject.play('wheel_vehicle_burnout')
self.__burnoutStarted = True
else:
self.__burnoutStarted = False
if isAttachedVehicle:
self.guiSessionProvider.invalidateVehicleState(VEHICLE_VIEW_STATE.BURNOUT, self.burnoutLevel)
return
def set_wheelsState(self, prev):
if self.appearance is None:
return
else:
__WHEEL_DESTROYED = 3
for i in xrange(0, 8):
prevState = prev >> i * 2 & 3
newState = self.wheelsState >> i * 2 & 3
if prevState != newState:
if newState == __WHEEL_DESTROYED:
self.appearance.onChassisDestroySound(False, True, i)
elif prevState == __WHEEL_DESTROYED:
self.appearance.onChassisDestroySound(False, False, i)
return
def set_damageStickers(self, prev=None):
if self.isStarted:
prev = self.__prevDamageStickers
curr = frozenset(self.damageStickers)
self.__prevDamageStickers = curr
for sticker in prev.difference(curr):
self.appearance.removeDamageSticker(sticker)
maxComponentIdx = TankPartIndexes.ALL[-1]
wheelsConfig = self.appearance.typeDescriptor.chassis.generalWheelsAnimatorConfig
if wheelsConfig:
maxComponentIdx = maxComponentIdx + wheelsConfig.getWheelsCount()
for sticker in curr.difference(prev):
self.appearance.addDamageSticker(sticker, *DamageFromShotDecoder.decodeSegment(sticker, self.appearance.collisions, maxComponentIdx))
def set_publicStateModifiers(self, prev=None):
if self.isStarted:
prev = self.__prevPublicStateModifiers
curr = frozenset(self.publicStateModifiers)
self.__prevPublicStateModifiers = curr
self.__updateModifiers(curr.difference(prev), prev.difference(curr))
if not self.isPlayerVehicle:
self.updateStunInfo()
def set_engineMode(self, prev):
if self.isStarted and self.isAlive():
self.appearance.changeEngineMode(self.engineMode, True)
def set_isStrafing(self, prev):
if hasattr(self.filter, 'isStrafing'):
self.filter.isStrafing = self.isStrafing
def set_gunAnglesPacked(self, prev):
syncGunAngles = getattr(self.filter, 'syncGunAngles', None)
if syncGunAngles:
yaw, pitch = decodeGunAngles(self.gunAnglesPacked, self.typeDescriptor.gun.pitchLimits['absolute'])
syncGunAngles(yaw, pitch)
return
def set_health(self, prev):
pass
def set_isCrewActive(self, prev):
if self.isStarted:
self.appearance.onVehicleHealthChanged()
if not self.isPlayerVehicle:
ctrl = self.guiSessionProvider.shared.feedback
if ctrl is not None:
ctrl.setVehicleNewHealth(self.id, self.health)
if not self.isCrewActive and self.health > 0:
self.__onVehicleDeath()
return
def set_siegeState(self, prev):
if not self.isPlayerVehicle:
self.onSiegeStateUpdated(self.siegeState, 0.0)
def set_isSpeedCapturing(self, prev=None):
_logger.debug('set_isSpeedCapturing %s', self.isSpeedCapturing)
if not self.isPlayerVehicle:
ctrl = self.guiSessionProvider.shared.feedback
if ctrl is not None:
ctrl.invalidatePassiveEngineering(self.id, (True, self.isSpeedCapturing))
return
def set_isBlockingCapture(self, prev=None):
_logger.debug('set_isBlockingCapture %s', self.isBlockingCapture)
if not self.isPlayerVehicle:
ctrl = self.guiSessionProvider.shared.feedback
if ctrl is not None:
ctrl.invalidatePassiveEngineering(self.id, (False, self.isBlockingCapture))
return
def set_steeringAngles(self, prev=None):
if self.__wheelsSteeringFilter is not None:
for packedValue, steeringFilter in zip(self.steeringAngles, self.__wheelsSteeringFilter):
unpackedValue = WoT.unpackWheelSteering(packedValue)
steeringFilter.input(BigWorld.time(), unpackedValue)
return
def set_wheelsScroll(self, prev=None):
if self.__wheelsScrollFilter is not None:
for packedValue, scrollFilter in zip(self.wheelsScroll, self.__wheelsScrollFilter):
unpackedValue = WoT.unpackWheelScroll(packedValue)
scrollFilter.input(BigWorld.time(), unpackedValue)
return
def onHealthChanged(self, newHealth, attackerID, attackReasonID):
if newHealth > 0 and self.health <= 0:
self.health = newHealth
return
if not self.isStarted:
return
self.guiSessionProvider.setVehicleHealth(self.isPlayerVehicle, self.id, newHealth, attackerID, attackReasonID)
if not self.appearance.damageState.isCurrentModelDamaged:
self.appearance.onVehicleHealthChanged()
if self.health <= 0 and self.isCrewActive:
self.__onVehicleDeath()
if self.isPlayerVehicle:
TriggersManager.g_manager.activateTrigger(TRIGGER_TYPE.PLAYER_RECEIVE_DAMAGE, attackerId=attackerID)
def set_stunInfo(self, prev):
_logger.debug('Set stun info(curr,~ prev): %s, %s', self.stunInfo, prev)
self.updateStunInfo()
def __updateCachedStunInfo(self, endTime):
if endTime:
cachedStartTime = self.__cachedStunInfo.startTime
startTime = cachedStartTime if cachedStartTime > 0.0 else BigWorld.serverTime()
totalTime = max(self.__cachedStunInfo.duration, endTime - startTime)
duration = endTime - BigWorld.serverTime() if endTime > 0.0 else 0.0
self.__cachedStunInfo = StunInfo(startTime, endTime, duration, totalTime)
else:
self.__cachedStunInfo = StunInfo(0.0, 0.0, 0.0, 0.0)
def updateStunInfo(self):
attachedVehicle = BigWorld.player().getVehicleAttached()
if attachedVehicle is None:
return
else:
self.__updateCachedStunInfo(self.stunInfo)
if self.lobbyContext.getServerSettings().spgRedesignFeatures.isStunEnabled():
isAttachedVehicle = self.id == attachedVehicle.id
if isAttachedVehicle:
self.guiSessionProvider.invalidateVehicleState(VEHICLE_VIEW_STATE.STUN, self.__cachedStunInfo)
if not self.isPlayerVehicle:
ctrl = self.guiSessionProvider.shared.feedback
if ctrl is not None:
ctrl.invalidateStun(self.id, self.__cachedStunInfo)
else:
_logger.warning('Stun features is disabled!')
return
def showAmmoBayEffect(self, mode, fireballVolume, projectedTurretSpeed):
if self.isStarted:
self.appearance.showAmmoBayEffect(mode, fireballVolume)
def onPushed(self, x, z):
try:
distSqr = BigWorld.player().position.distSqrTo(self.position)
if distSqr > 1600.0:
self.filter.setPosition(x, z)
except Exception:
pass
def showRammingEffect(self, energy, point):
if not self.isStarted:
return
if energy < 600:
self.showCollisionEffect(point, 'rammingCollisionLight')
else:
self.showCollisionEffect(point, 'rammingCollisionHeavy')
def onStaticCollision(self, energy, point, normal, miscFlags, damage, destrEffectIdx, destrMaxHealth):
if not self.isStarted:
return
self.appearance.stopSwinging()
BigWorld.player().inputHandler.onVehicleCollision(self, self.getSpeed())
isTrackCollision = bool(miscFlags & 1)
isSptCollision = bool(miscFlags >> 1 & 1)
isSptDestroyed = bool(miscFlags >> 2 & 1)
if isSptDestroyed:
return
hitPoint = point
surfNormal = normal
matKind = SPT_MATKIND.SOLID
if destrEffectIdx < 0:
if not isSptCollision:
surfaceMaterial = calcSurfaceMaterialNearPoint(hitPoint, normal, self.spaceID)
hitPoint, surfNormal, matKind, effectIdx = surfaceMaterial
else:
effectIdx = EFFECT_MATERIAL_INDEXES_BY_NAMES['wood']
if matKind != 0:
self.__showStaticCollisionEffect(energy, effectIdx, hitPoint, surfNormal, isTrackCollision, damage * 100.0)
else:
self.__showDynamicCollisionEffect(energy, destrMaxHealth, hitPoint, surfNormal)
if self.isPlayerVehicle:
self.appearance.executeRammingVibrations(matKind)
def getAimParams(self):
if self.appearance is not None:
turretYaw = Math.Matrix(self.appearance.turretMatrix).yaw
gunPitch = Math.Matrix(self.appearance.gunMatrix).pitch
return (turretYaw, gunPitch)
else:
return (0.0, 0.0)
def onSiegeStateUpdated(self, newState, timeToNextMode):
if self.typeDescriptor is not None and self.typeDescriptor.hasSiegeMode:
self.typeDescriptor.onSiegeStateChanged(newState)
if self.isStarted:
self.appearance.onSiegeStateChanged(newState)
if self.isPlayerVehicle:
inputHandler = BigWorld.player().inputHandler
inputHandler.siegeModeControl.notifySiegeModeChanged(self, newState, timeToNextMode)
else:
_logger.error('Wrong usage! Should be called only on vehicle with valid typeDescriptor and siege mode')
return
def collideSegmentExt(self, startPoint, endPoint):
if self.appearance.collisions is not None:
collisions = self.appearance.collisions.collideAllWorld(startPoint, endPoint)
if collisions:
res = []
for collision in collisions:
matInfo = self.getMatinfo(collision[3], collision[2])
res.append(SegmentCollisionResultExt(collision[0], collision[1], matInfo, collision[3]))
return res
return
def getMatinfo(self, parIndex, matKind):
matInfo = None
if parIndex == TankPartIndexes.CHASSIS:
matInfo = self.typeDescriptor.chassis.materials.get(matKind)
elif parIndex == TankPartIndexes.HULL:
matInfo = self.typeDescriptor.hull.materials.get(matKind)
elif parIndex == TankPartIndexes.TURRET:
matInfo = self.typeDescriptor.turret.materials.get(matKind)
elif parIndex == TankPartIndexes.GUN:
matInfo = self.typeDescriptor.gun.materials.get(matKind)
return matInfo
def isAlive(self):
return self.isCrewActive and self.health > 0
def isPitchHullAimingAvailable(self):
return self.typeDescriptor is not None and self.typeDescriptor.isPitchHullAimingAvailable
def getServerGunAngles(self):
return decodeGunAngles(self.gunAnglesPacked, self.typeDescriptor.gun.pitchLimits['absolute'])
def startVisual(self):
if self.isStarted:
raise SoftException('Vehicle is already started')
avatar = BigWorld.player()
self.appearance = appearance_cache.getAppearance(self.id, self.__prereqs)
self.appearance.setVehicle(self)
self.appearance.activate()
self.appearance.changeEngineMode(self.engineMode)
self.appearance.onVehicleHealthChanged(self.isPlayerVehicle)
if self.isPlayerVehicle:
if self.isAlive():
self.appearance.setupGunMatrixTargets(avatar.gunRotator)
if hasattr(self.filter, 'allowStrafeCompensation'):
self.filter.allowStrafeCompensation = not self.isPlayerVehicle
self.isStarted = True
if not self.appearance.isObserver:
self.show(True)
self.set_publicStateModifiers()
self.set_damageStickers()
if TriggersManager.g_manager:
TriggersManager.g_manager.activateTrigger(TriggersManager.TRIGGER_TYPE.VEHICLE_VISUAL_VISIBILITY_CHANGED, vehicleId=self.id, isVisible=True)
self.guiSessionProvider.startVehicleVisual(self.proxy, True)
if self.stunInfo > 0.0:
self.updateStunInfo()
self.set_inspiringEffect()
self.set_inspired()
if self.isSpeedCapturing:
self.set_isSpeedCapturing()
if self.isBlockingCapture:
self.set_isBlockingCapture()
if not self.isAlive():
self.__onVehicleDeath(True)
if self.isTurretMarkedForDetachment:
self.confirmTurretDetachment()
self.__startWGPhysics()
self.refreshNationalVoice()
self.__prereqs = None
self.appearance.highlighter.setVehicleOwnership()
if self.respawnCompactDescr:
_logger.debug('respawn compact descr is still valid, request reloading of tank resources %s', self.id)
BigWorld.callback(0.0, lambda : Vehicle.respawnVehicle(self.id, self.respawnCompactDescr))
return
def refreshNationalVoice(self):
player = BigWorld.player()
if self is not player.getVehicleAttached():
return
commanderSkinID = self.publicInfo.commanderSkinID
vehicleType = self.typeDescriptor.type
setSpecialVoice(self.publicInfo.crewGroup, commanderSkinID, vehicleType, self.id == player.playerVehicleID)
def stopVisual(self, showStipple=False):
if not self.isStarted:
raise SoftException('Vehicle is already stopped')
stippleModel = None
showStipple = False
if showStipple:
self.appearance.assembleStipple()
self.__stopExtras()
if TriggersManager.g_manager:
TriggersManager.g_manager.activateTrigger(TriggersManager.TRIGGER_TYPE.VEHICLE_VISUAL_VISIBILITY_CHANGED, vehicleId=self.id, isVisible=False)
self.guiSessionProvider.stopVehicleVisual(self.id, self.isPlayerVehicle)
self.appearance.deactivate()
self.appearance = None
self.isStarted = False
self.__speedInfo.reset()
return stippleModel
def show(self, show):
if show:
drawFlags = BigWorld.DrawAll
else:
drawFlags = BigWorld.ShadowPassBit
if self.isStarted:
va = self.appearance
va.changeDrawPassVisibility(drawFlags)
va.showStickers(show)
def addCameraCollider(self):
if self.appearance is not None:
self.appearance.addCameraCollider()
return
def removeCameraCollider(self):
if self.appearance is not None:
self.appearance.removeCameraCollider()
return
def _isDestructibleMayBeBroken(self, chunkID, itemIndex, matKind, itemFilename, itemScale, vehSpeed):
desc = AreaDestructibles.g_cache.getDescByFilename(itemFilename)
if desc is None:
return False
else:
ctrl = AreaDestructibles.g_destructiblesManager.getController(chunkID)
if ctrl is None:
return False
if ctrl.isDestructibleBroken(itemIndex, matKind, desc['type']):
return True
mass = self.typeDescriptor.physics['weight']
instantDamage = 0.5 * mass * vehSpeed * vehSpeed * 0.00015
if desc['type'] == DestructiblesCache.DESTR_TYPE_STRUCTURE:
moduleDesc = desc['modules'].get(matKind)
if moduleDesc is None:
return False
refHealth = moduleDesc['health']
else:
unitMass = AreaDestructibles.g_cache.unitVehicleMass
instantDamage *= math.pow(mass / unitMass, desc['kineticDamageCorrection'])
refHealth = desc['health']
return DestructiblesCache.scaledDestructibleHealth(itemScale, refHealth) < instantDamage
def __showStaticCollisionEffect(self, energy, effectIdx, hitPoint, normal, isTrackCollision, damageFactor):
heavyVelocities = self.typeDescriptor.type.heavyCollisionEffectVelocities
heavyEnergy = heavyVelocities['track'] if isTrackCollision else heavyVelocities['hull']
heavyEnergy = 0.5 * heavyEnergy * heavyEnergy
postfix = '%sCollisionLight' if energy < heavyEnergy else '%sCollisionHeavy'
effectName = ''
if effectIdx < len(EFFECT_MATERIALS):
effectName = EFFECT_MATERIALS[effectIdx]
effectName = postfix % effectName
if effectName in self.typeDescriptor.type.effects:
self.showCollisionEffect(hitPoint, effectName, normal, isTrackCollision, damageFactor, self.__getImpulse(self.getSpeed()))
def __showDynamicCollisionEffect(self, energy, destrMaxHealth, hitPoint, surfNormal):
effectName = 'dynamicCollision'
if effectName in self.typeDescriptor.type.effects:
self.showCollisionEffect(hitPoint, effectName, surfNormal, False, 0, self.__getDynamicImpulse(self.getSpeed(), destrMaxHealth))
def __startWGPhysics(self):
if not hasattr(self.filter, 'setVehiclePhysics'):
return
typeDescr = self.typeDescriptor
isWheeled = 'wheeledVehicle' in self.typeDescriptor.type.tags
physics = BigWorld.WGWheeledPhysics() if isWheeled else BigWorld.WGTankPhysics()
physics_shared.initVehiclePhysicsClient(physics, typeDescr)
arenaMinBound, arenaMaxBound = (-10000, -10000), (10000, 10000)
physics.setArenaBounds(arenaMinBound, arenaMaxBound)
physics.owner = weakref.ref(self)
physics.staticMode = False
physics.movementSignals = 0
self.filter.setVehiclePhysics(physics)
physics.visibilityMask = ArenaType.getVisibilityMask(BigWorld.player().arenaTypeID >> 16)
yaw, pitch = decodeGunAngles(self.gunAnglesPacked, typeDescr.gun.pitchLimits['absolute'])
self.filter.syncGunAngles(yaw, pitch)
self.__speedInfo.set(self.filter.speedInfo)
def __stopWGPhysics(self):
self.__speedInfo.reset()
def __getImpulse(self, speed):
mass = self.typeDescriptor.physics['weight']
maxSpeed = self.typeDescriptor.physics['speedLimits'][0]
return math.fabs(speed * mass / (maxSpeed * mass))
def __getDynamicImpulse(self, speed, maxHealth):
maxSpeed = 20.0
relSpeed = min(math.fabs(speed / maxSpeed), 1.0)
relSpeed *= relSpeed
relHeath = min(min(maxHealth, 90.0) / 90.0, 1.0)
return 0.5 * (relSpeed + relHeath)
def __stopExtras(self):
extraTypes = self.typeDescriptor.extras
for index, data in self.extras.items():
extraTypes[index].stop(data)
if self.extras:
_logger.warning('this code point should have never been reached')
def __updateModifiers(self, addedExtras, removedExtras):
extraTypes = self.typeDescriptor.extras
for idx in removedExtras:
extraTypes[idx].stopFor(self)
for idx in addedExtras:
try:
extraTypes[idx].startFor(self)
except Exception:
_logger.exception('Update modifiers')
def __onVehicleDeath(self, isDeadStarted=False):
if not self.isPlayerVehicle:
ctrl = self.guiSessionProvider.shared.feedback
if ctrl is not None:
ctrl.setVehicleState(self.id, _GUI_EVENT_ID.VEHICLE_DEAD, isDeadStarted)
TriggersManager.g_manager.fireTrigger(TRIGGER_TYPE.VEHICLE_DESTROYED, vehicleId=self.id)
self._removeInspire()
bwfilter = self.filter
if hasattr(bwfilter, 'velocityErrorCompensation'):
bwfilter.velocityErrorCompensation = 100.0
return
def confirmTurretDetachment(self):
self.__turretDetachmentConfirmed = True
if not self.isTurretDetached:
_logger.error('Vehicle::confirmTurretDetachment: Confirming turret detachment, though the turret is not detached')
self.appearance.updateTurretVisibility()
def drawEdge(self, forceSimpleEdge=False):
self.appearance.highlighter.highlight(True, forceSimpleEdge)
def removeEdge(self, forceSimpleEdge=False):
self.appearance.highlighter.highlight(False, forceSimpleEdge)
def addModel(self, model):
super(Vehicle, self).addModel(model)
highlighter = self.appearance.highlighter
if highlighter.enabled:
highlighter.highlight(True)
def delModel(self, model):
highlighter = self.appearance.highlighter
hlEnabled = highlighter.enabled
hlSimpleEdge = highlighter.isSimpleEdge
if hlEnabled:
highlighter.removeHighlight()
super(Vehicle, self).delModel(model)
if hlEnabled:
highlighter.highlight(True, hlSimpleEdge)
def notifyInputKeysDown(self, movementDir, rotationDir, handbrakeFired):
self.filter.notifyInputKeysDown(movementDir, rotationDir)
self.__handbrakeFired = handbrakeFired
if self.appearance.detailedEngineState is not None:
self.appearance.detailedEngineState.throttle = movementDir or rotationDir
return
def turnoffThrottle(self):
if self.appearance.detailedEngineState is not None:
self.appearance.detailedEngineState.throttle = 0
return
@dependency.replace_none_kwargs(lobbyContext=ILobbyContext)
def _stripVehCompDescrIfRoaming(vehCompDescr, lobbyContext=None):
serverSettings = lobbyContext.getServerSettings() if lobbyContext is not None else None
if serverSettings is not None:
if serverSettings.roaming.isInRoaming():
vehCompDescr = vehicles.stripCustomizationFromVehicleCompactDescr(vehCompDescr, True, True, False)[0]
return vehCompDescr
| en | 0.715995 | # Python bytecode 2.7 (decompiled from Python 2.7) # Embedded file name: scripts/client/Vehicle.py | 1.868516 | 2 |
mirumon/api/devices/http_endpoints/models/create_device_request.py | mirumon/mirumon-backend | 19 | 6617168 | from mirumon.api.api_model import APIModel
class CreateDeviceRequest(APIModel):
name: str
| from mirumon.api.api_model import APIModel
class CreateDeviceRequest(APIModel):
name: str
| none | 1 | 1.474526 | 1 | |
blog-server/blog/resolvers/admin.py | rob-blackbourn/blog-engine | 1 | 6617169 | <gh_stars>1-10
from ..utils.casing import dict_to_snakecase_dict
async def register_user(root, info, *args, **kwargs):
return await info.context.repositories.admin.register_user(
info.context,
**dict_to_snakecase_dict(kwargs))
async def authenticate_user(root, info, *args, **kwargs):
return await info.context.repositories.admin.authenticate_user(
info.context,
**dict_to_snakecase_dict(kwargs))
async def update_roles(root, info, *args, **kwargs):
return await info.context.repositories.admin.update_roles(
info.context,
**dict_to_snakecase_dict(kwargs))
| from ..utils.casing import dict_to_snakecase_dict
async def register_user(root, info, *args, **kwargs):
return await info.context.repositories.admin.register_user(
info.context,
**dict_to_snakecase_dict(kwargs))
async def authenticate_user(root, info, *args, **kwargs):
return await info.context.repositories.admin.authenticate_user(
info.context,
**dict_to_snakecase_dict(kwargs))
async def update_roles(root, info, *args, **kwargs):
return await info.context.repositories.admin.update_roles(
info.context,
**dict_to_snakecase_dict(kwargs)) | none | 1 | 2.125598 | 2 | |
unit_tests/test_cloud_add.py | hep-gc/cloud-scheduler-2 | 3 | 6617170 | from unit_test_common import execute_csv2_request, initialize_csv2_request, ut_id, sanity_requests, parameters_requests
from sys import argv
# lno: CV - error code identifier.
def main(gvar):
if not gvar:
gvar = {}
if len(argv) > 1:
initialize_csv2_request(gvar, selections=argv[1])
else:
initialize_csv2_request(gvar)
# Bad requests.
# 01 - 05
sanity_requests(gvar, '/cloud/add/', ut_id(gvar, 'ctg1'), ut_id(gvar, 'ctu1'), ut_id(gvar, 'ctg2'), ut_id(gvar, 'ctu2'))
parameters = {
# 06 Send a GET request.
# 07 Give an invalid parameter.
# 08 Omit cloud_name.
# 09 Give two cloud_names.
'cloud_name': {'valid': 'invalid-unit-test', 'test_cases': {
# 10
'': 'cloud add value specified for "cloud_name" must not be the empty string.',
# 11
'Invalid-Unit-Test': 'cloud add value specified for "cloud_name" must be all lowercase letters, digits, dashes, underscores, periods, and colons, and cannot contain more than one consecutive dash or start or end with a dash.',
# 12
'invalid-unit--test': 'cloud add value specified for "cloud_name" must be all lowercase letters, digits, dashes, underscores, periods, and colons, and cannot contain more than one consecutive dash or start or end with a dash.',
# 13
'-invalid-unit-test': 'cloud add value specified for "cloud_name" must be all lowercase letters, digits, dashes, underscores, periods, and colons, and cannot contain more than one consecutive dash or start or end with a dash.',
# 14
'invalid-unit-test!': 'cloud add value specified for "cloud_name" must be all lowercase letters, digits, dashes, underscores, periods, and colons, and cannot contain more than one consecutive dash or start or end with a dash.',
# 15
'cloud-name-that-is-too-long-for-the-database': 'Data too long for column \'cloud_name\' at row 1',
# 16 Attempt to create a cloud that already exists.
ut_id(gvar, 'ctc2'): 'Duplicate entry \'{}-{}\' for key \'PRIMARY\''.format(ut_id(gvar, 'ctg1'), ut_id(gvar, 'ctc2'))
}, 'mandatory': True},
# 17 Omit cloud_type.
# 18 Give two cloud_types.
# 19
'cloud_type': {'valid': 'local', 'test_cases': {'invalid-unit-test': 'cloud add value specified for "cloud_type" must be one of the following options: [\'amazon\', \'local\', \'openstack\'].'}, 'mandatory': True},
# 20 Omit authurl.
# 21 Give two authurls.
# 22
'authurl': {'valid': gvar['cloud_credentials']['authurl'], 'test_cases': {'': 'cloud add parameter "authurl" contains an empty string which is specifically disallowed.'}, 'mandatory': True},
# 23 Omit username.
# 24 Give two usernames.
# 25
'username': {'valid': gvar['cloud_credentials']['username'], 'test_cases': {'': 'cloud add parameter "username" contains an empty string which is specifically disallowed.'}, 'mandatory': True},
# 26 Omit password.
# 27 Give two passwords.
# 28
'password': {'valid': gvar['cloud_credentials']['password'], 'test_cases': {'': 'cloud add parameter "password" contains an empty string which is specifically disallowed.'}, 'mandatory': True},
# 29 Omit project.
# 30 Give two projects.
# 31
'project': {'valid': gvar['cloud_credentials']['project'], 'test_cases': {'': 'cloud add parameter "project" contains an empty string which is specifically disallowed.'}, 'mandatory': True},
# 32 Omit region.
# 33 Give two regions.
# 34
'region': {'valid': gvar['cloud_credentials']['region'], 'test_cases': {'': 'cloud add parameter "region" contains an empty string which is specifically disallowed.'}, 'mandatory': True},
# 35 Give two ram_ctls.
# 36
'ram_ctl': {'valid': 0, 'test_cases': {'invalid-unit-test': 'cloud add value specified for "ram_ctl" must be an integer value.'}},
# 37 Give two cores_ctls.
# 38
'cores_ctl': {'valid': 0, 'test_cases': {'invalid-unit-test': 'cloud add value specified for "cores_ctl" must be an integer value.'}},
# 39 Give two vm_keep_alives.
# 40
'vm_keep_alive': {'valid': 0, 'test_cases': {'invalid-unit-test': 'cloud add value specified for "vm_keep_alive" must be an integer value.'}},
# 41 Give two enableds.
# 42
'enabled': {'valid': 0, 'test_cases': {'invalid-unit-test': 'cloud add boolean value specified for "enabled" must be one of the following: true, false, yes, no, 1, or 0.'}},
# 43 Give two spot_prices.
# 44
'spot_price': {'valid': 0.0, 'test_cases': {'invalid-unit-test': 'cloud add value specified for "spot_price" must be a floating point value.'}},
# 45 Give metadata_name and metadata_name.1.
# 46
'metadata_name': {'valid': '', 'test_cases': {'invalid-unit-test': 'cloud add, "invalid-unit-test" failed - specified metadata_name "invalid-unit-test" does not exist.'}, 'array_field': True},
# 47 Give two vm_images.
# 48
'vm_image': {'valid': '', 'test_cases': {'invalid-unit-test': 'cloud add, "invalid-unit-test" failed - specified item does not exist: vm_image=invalid-unit-test, group_name={}, cloud_name=invalid-unit-test.'.format(ut_id(gvar, 'ctg1'))}},
# 49 Give two vm_flavors.
# 50
'vm_flavor': {'valid': '', 'test_cases': {'invalid-unit-test': 'cloud add, "invalid-unit-test" failed - specified item does not exist: vm_flavor=invalid-unit-test, group_name={}, cloud_name=invalid-unit-test.'.format(ut_id(gvar, 'ctg1'))}},
# 51 Give two vm_networks.
# 52
'vm_network': {'valid': '', 'test_cases': {'invalid-unit-test': 'cloud add, "invalid-unit-test" failed - specified item does not exist: vm_network=invalid-unit-test, group_name={}, cloud_name=invalid-unit-test.'.format(ut_id(gvar, 'ctg1'))}},
# 53 Give two vm_keynames.
# 54
'vm_keyname': {'valid': '', 'test_cases': {'invalid-unit-test': 'cloud add, "invalid-unit-test" failed - specified item does not exist: vm_keyname=invalid-unit-test, group_name={}, cloud_name=invalid-unit-test.'.format(ut_id(gvar, 'ctg1'))}}
}
parameters_requests(gvar, '/cloud/add/', ut_id(gvar, 'ctg1'), ut_id(gvar, 'ctu1'), parameters)
# Parameter combinations that do not fit well into the above format.
# 55 Known to fail if run twice without setup or cleanup in between.
execute_csv2_request(
gvar, 0, None, 'cloud "{}::{}" successfully added.'.format(ut_id(gvar, 'ctg1'), ut_id(gvar, 'ctc5')),
'/cloud/add/', group=ut_id(gvar, 'ctg1'), form_data={
'cloud_name': ut_id(gvar, 'ctc5'),
'cloud_type': 'openstack',
'priority': '31',
'cacertificate': None,
'user_domain_name': 'Default',
'project_domain_name': 'Default',
'enabled': 1,
'vm_keep_alive': -31,
'metadata_name': ut_id(gvar, 'cty1'),
'spot_price': 10,
'cores_softmax': -1,
**gvar['cloud_credentials']
},
server_user=ut_id(gvar, 'ctu1')
)
# 56 Ensure that 40 actually created a cloud.
execute_csv2_request(
gvar, 0, None, None,
'/cloud/list/', group=ut_id(gvar, 'ctg1'),
expected_list='cloud_list', list_filter={'cloud_name': ut_id(gvar, 'ctc5')},
values={
'group_name': ut_id(gvar, 'ctg1'),
'cloud_name': ut_id(gvar, 'ctc5'),
'authurl': gvar['cloud_credentials']['authurl'],
'username': gvar['cloud_credentials']['username'],
'region': gvar['cloud_credentials']['region'],
'project': gvar['cloud_credentials']['project'],
'cloud_type': 'openstack',
'cloud_priority': 31,
'cacertificate': None,
'user_domain_name': 'Default',
'project_domain_name': 'Default',
'enabled': 1,
'vm_keep_alive': -31,
'group_exclusions': ut_id(gvar, 'cty1'),
'spot_price': 10,
},
server_user=ut_id(gvar, 'ctu1')
)
# 57 Known to fail if run twice without setup or cleanup in between.
execute_csv2_request(
gvar, 0, None, 'cloud "{}::{}" successfully added.'.format(ut_id(gvar, 'ctg1'), ut_id(gvar, 'ctc6')),
'/cloud/add/', group=ut_id(gvar, 'ctg1'), form_data={
'cloud_name': ut_id(gvar, 'ctc6'),
'cloud_type': 'openstack',
'metadata_name.1': ut_id(gvar, 'cty1'),
'metadata_name.2': ut_id(gvar, 'cty2'),
'metadata_name.3': ut_id(gvar, 'cty3'),
**gvar['cloud_credentials']
},
server_user=ut_id(gvar, 'ctu1')
)
# 58 Ensure that 42 actually created a cloud.
execute_csv2_request(
gvar, 0, None, None,
'/cloud/list/', group=ut_id(gvar, 'ctg1'),
expected_list='cloud_list', list_filter={'cloud_name': ut_id(gvar, 'ctc6')},
values={
'group_name': ut_id(gvar, 'ctg1'),
'cloud_name': ut_id(gvar, 'ctc6'),
'group_exclusions': ut_id(gvar, 'cty1,cty2,cty3')
},
server_user=ut_id(gvar, 'ctu1')
)
if __name__ == "__main__":
main(None)
| from unit_test_common import execute_csv2_request, initialize_csv2_request, ut_id, sanity_requests, parameters_requests
from sys import argv
# lno: CV - error code identifier.
def main(gvar):
if not gvar:
gvar = {}
if len(argv) > 1:
initialize_csv2_request(gvar, selections=argv[1])
else:
initialize_csv2_request(gvar)
# Bad requests.
# 01 - 05
sanity_requests(gvar, '/cloud/add/', ut_id(gvar, 'ctg1'), ut_id(gvar, 'ctu1'), ut_id(gvar, 'ctg2'), ut_id(gvar, 'ctu2'))
parameters = {
# 06 Send a GET request.
# 07 Give an invalid parameter.
# 08 Omit cloud_name.
# 09 Give two cloud_names.
'cloud_name': {'valid': 'invalid-unit-test', 'test_cases': {
# 10
'': 'cloud add value specified for "cloud_name" must not be the empty string.',
# 11
'Invalid-Unit-Test': 'cloud add value specified for "cloud_name" must be all lowercase letters, digits, dashes, underscores, periods, and colons, and cannot contain more than one consecutive dash or start or end with a dash.',
# 12
'invalid-unit--test': 'cloud add value specified for "cloud_name" must be all lowercase letters, digits, dashes, underscores, periods, and colons, and cannot contain more than one consecutive dash or start or end with a dash.',
# 13
'-invalid-unit-test': 'cloud add value specified for "cloud_name" must be all lowercase letters, digits, dashes, underscores, periods, and colons, and cannot contain more than one consecutive dash or start or end with a dash.',
# 14
'invalid-unit-test!': 'cloud add value specified for "cloud_name" must be all lowercase letters, digits, dashes, underscores, periods, and colons, and cannot contain more than one consecutive dash or start or end with a dash.',
# 15
'cloud-name-that-is-too-long-for-the-database': 'Data too long for column \'cloud_name\' at row 1',
# 16 Attempt to create a cloud that already exists.
ut_id(gvar, 'ctc2'): 'Duplicate entry \'{}-{}\' for key \'PRIMARY\''.format(ut_id(gvar, 'ctg1'), ut_id(gvar, 'ctc2'))
}, 'mandatory': True},
# 17 Omit cloud_type.
# 18 Give two cloud_types.
# 19
'cloud_type': {'valid': 'local', 'test_cases': {'invalid-unit-test': 'cloud add value specified for "cloud_type" must be one of the following options: [\'amazon\', \'local\', \'openstack\'].'}, 'mandatory': True},
# 20 Omit authurl.
# 21 Give two authurls.
# 22
'authurl': {'valid': gvar['cloud_credentials']['authurl'], 'test_cases': {'': 'cloud add parameter "authurl" contains an empty string which is specifically disallowed.'}, 'mandatory': True},
# 23 Omit username.
# 24 Give two usernames.
# 25
'username': {'valid': gvar['cloud_credentials']['username'], 'test_cases': {'': 'cloud add parameter "username" contains an empty string which is specifically disallowed.'}, 'mandatory': True},
# 26 Omit password.
# 27 Give two passwords.
# 28
'password': {'valid': gvar['cloud_credentials']['password'], 'test_cases': {'': 'cloud add parameter "password" contains an empty string which is specifically disallowed.'}, 'mandatory': True},
# 29 Omit project.
# 30 Give two projects.
# 31
'project': {'valid': gvar['cloud_credentials']['project'], 'test_cases': {'': 'cloud add parameter "project" contains an empty string which is specifically disallowed.'}, 'mandatory': True},
# 32 Omit region.
# 33 Give two regions.
# 34
'region': {'valid': gvar['cloud_credentials']['region'], 'test_cases': {'': 'cloud add parameter "region" contains an empty string which is specifically disallowed.'}, 'mandatory': True},
# 35 Give two ram_ctls.
# 36
'ram_ctl': {'valid': 0, 'test_cases': {'invalid-unit-test': 'cloud add value specified for "ram_ctl" must be an integer value.'}},
# 37 Give two cores_ctls.
# 38
'cores_ctl': {'valid': 0, 'test_cases': {'invalid-unit-test': 'cloud add value specified for "cores_ctl" must be an integer value.'}},
# 39 Give two vm_keep_alives.
# 40
'vm_keep_alive': {'valid': 0, 'test_cases': {'invalid-unit-test': 'cloud add value specified for "vm_keep_alive" must be an integer value.'}},
# 41 Give two enableds.
# 42
'enabled': {'valid': 0, 'test_cases': {'invalid-unit-test': 'cloud add boolean value specified for "enabled" must be one of the following: true, false, yes, no, 1, or 0.'}},
# 43 Give two spot_prices.
# 44
'spot_price': {'valid': 0.0, 'test_cases': {'invalid-unit-test': 'cloud add value specified for "spot_price" must be a floating point value.'}},
# 45 Give metadata_name and metadata_name.1.
# 46
'metadata_name': {'valid': '', 'test_cases': {'invalid-unit-test': 'cloud add, "invalid-unit-test" failed - specified metadata_name "invalid-unit-test" does not exist.'}, 'array_field': True},
# 47 Give two vm_images.
# 48
'vm_image': {'valid': '', 'test_cases': {'invalid-unit-test': 'cloud add, "invalid-unit-test" failed - specified item does not exist: vm_image=invalid-unit-test, group_name={}, cloud_name=invalid-unit-test.'.format(ut_id(gvar, 'ctg1'))}},
# 49 Give two vm_flavors.
# 50
'vm_flavor': {'valid': '', 'test_cases': {'invalid-unit-test': 'cloud add, "invalid-unit-test" failed - specified item does not exist: vm_flavor=invalid-unit-test, group_name={}, cloud_name=invalid-unit-test.'.format(ut_id(gvar, 'ctg1'))}},
# 51 Give two vm_networks.
# 52
'vm_network': {'valid': '', 'test_cases': {'invalid-unit-test': 'cloud add, "invalid-unit-test" failed - specified item does not exist: vm_network=invalid-unit-test, group_name={}, cloud_name=invalid-unit-test.'.format(ut_id(gvar, 'ctg1'))}},
# 53 Give two vm_keynames.
# 54
'vm_keyname': {'valid': '', 'test_cases': {'invalid-unit-test': 'cloud add, "invalid-unit-test" failed - specified item does not exist: vm_keyname=invalid-unit-test, group_name={}, cloud_name=invalid-unit-test.'.format(ut_id(gvar, 'ctg1'))}}
}
parameters_requests(gvar, '/cloud/add/', ut_id(gvar, 'ctg1'), ut_id(gvar, 'ctu1'), parameters)
# Parameter combinations that do not fit well into the above format.
# 55 Known to fail if run twice without setup or cleanup in between.
execute_csv2_request(
gvar, 0, None, 'cloud "{}::{}" successfully added.'.format(ut_id(gvar, 'ctg1'), ut_id(gvar, 'ctc5')),
'/cloud/add/', group=ut_id(gvar, 'ctg1'), form_data={
'cloud_name': ut_id(gvar, 'ctc5'),
'cloud_type': 'openstack',
'priority': '31',
'cacertificate': None,
'user_domain_name': 'Default',
'project_domain_name': 'Default',
'enabled': 1,
'vm_keep_alive': -31,
'metadata_name': ut_id(gvar, 'cty1'),
'spot_price': 10,
'cores_softmax': -1,
**gvar['cloud_credentials']
},
server_user=ut_id(gvar, 'ctu1')
)
# 56 Ensure that 40 actually created a cloud.
execute_csv2_request(
gvar, 0, None, None,
'/cloud/list/', group=ut_id(gvar, 'ctg1'),
expected_list='cloud_list', list_filter={'cloud_name': ut_id(gvar, 'ctc5')},
values={
'group_name': ut_id(gvar, 'ctg1'),
'cloud_name': ut_id(gvar, 'ctc5'),
'authurl': gvar['cloud_credentials']['authurl'],
'username': gvar['cloud_credentials']['username'],
'region': gvar['cloud_credentials']['region'],
'project': gvar['cloud_credentials']['project'],
'cloud_type': 'openstack',
'cloud_priority': 31,
'cacertificate': None,
'user_domain_name': 'Default',
'project_domain_name': 'Default',
'enabled': 1,
'vm_keep_alive': -31,
'group_exclusions': ut_id(gvar, 'cty1'),
'spot_price': 10,
},
server_user=ut_id(gvar, 'ctu1')
)
# 57 Known to fail if run twice without setup or cleanup in between.
execute_csv2_request(
gvar, 0, None, 'cloud "{}::{}" successfully added.'.format(ut_id(gvar, 'ctg1'), ut_id(gvar, 'ctc6')),
'/cloud/add/', group=ut_id(gvar, 'ctg1'), form_data={
'cloud_name': ut_id(gvar, 'ctc6'),
'cloud_type': 'openstack',
'metadata_name.1': ut_id(gvar, 'cty1'),
'metadata_name.2': ut_id(gvar, 'cty2'),
'metadata_name.3': ut_id(gvar, 'cty3'),
**gvar['cloud_credentials']
},
server_user=ut_id(gvar, 'ctu1')
)
# 58 Ensure that 42 actually created a cloud.
execute_csv2_request(
gvar, 0, None, None,
'/cloud/list/', group=ut_id(gvar, 'ctg1'),
expected_list='cloud_list', list_filter={'cloud_name': ut_id(gvar, 'ctc6')},
values={
'group_name': ut_id(gvar, 'ctg1'),
'cloud_name': ut_id(gvar, 'ctc6'),
'group_exclusions': ut_id(gvar, 'cty1,cty2,cty3')
},
server_user=ut_id(gvar, 'ctu1')
)
if __name__ == "__main__":
main(None)
| en | 0.781033 | # lno: CV - error code identifier. # Bad requests. # 01 - 05 # 06 Send a GET request. # 07 Give an invalid parameter. # 08 Omit cloud_name. # 09 Give two cloud_names. # 10 # 11 # 12 # 13 # 14 # 15 # 16 Attempt to create a cloud that already exists. # 17 Omit cloud_type. # 18 Give two cloud_types. # 19 # 20 Omit authurl. # 21 Give two authurls. # 22 # 23 Omit username. # 24 Give two usernames. # 25 # 26 Omit password. # 27 Give two passwords. # 28 # 29 Omit project. # 30 Give two projects. # 31 # 32 Omit region. # 33 Give two regions. # 34 # 35 Give two ram_ctls. # 36 # 37 Give two cores_ctls. # 38 # 39 Give two vm_keep_alives. # 40 # 41 Give two enableds. # 42 # 43 Give two spot_prices. # 44 # 45 Give metadata_name and metadata_name.1. # 46 # 47 Give two vm_images. # 48 # 49 Give two vm_flavors. # 50 # 51 Give two vm_networks. # 52 # 53 Give two vm_keynames. # 54 # Parameter combinations that do not fit well into the above format. # 55 Known to fail if run twice without setup or cleanup in between. # 56 Ensure that 40 actually created a cloud. # 57 Known to fail if run twice without setup or cleanup in between. # 58 Ensure that 42 actually created a cloud. | 2.881071 | 3 |
template/service-name/app/util/logging/handler.py | endRuz/fastapi_build_template | 1 | 6617171 | <filename>template/service-name/app/util/logging/handler.py
#!/usr/bin/env python
# coding:utf-8
import os
import time
from logging.handlers import TimedRotatingFileHandler
class MultiProcessSafeTimedRotatingFileHandler(TimedRotatingFileHandler):
"""
该类是对 TimedRotatingFileHandler 的重写,已解决其多进程不安全,丢失日志的问题
TimedRotatingFileHandler 文件切割的逻辑为:
1. 判断准备切入的文件 error.log.2021-11-11 是否存在,如果存在,则删除该文件
2. 然后把 error.log 重命名为 error.log.2021-11-11
3. doRollover 结束,FileHandler.emit 将消息写入 error.log (新建)
修改后逻辑如下所示。
"""
def __init__(
self,
filename,
when="h",
interval=1,
backupCount=0,
encoding=None,
delay=False,
utc=False,
atTime=None,
):
super().__init__(
filename,
when=when,
interval=interval,
backupCount=backupCount,
encoding=encoding,
delay=delay,
utc=utc,
atTime=atTime,
)
def doRollover(self):
"""
do a rollover; in this case, a date/time stamp is appended to the filename
when the rollover happens. However, you want the file to be named for the
start of the interval, not the current time. If there is a backup count,
then we have to get a list of matching filenames, sort them and remove
the one with the oldest suffix.
"""
if self.stream:
self.stream.close()
self.stream = None
# get the time that this sequence started at and make it a TimeTuple
currentTime = int(time.time())
dstNow = time.localtime(currentTime)[-1]
t = self.rolloverAt - self.interval
if self.utc:
timeTuple = time.gmtime(t)
else:
timeTuple = time.localtime(t)
dstThen = timeTuple[-1]
if dstNow != dstThen:
if dstNow:
addend = 3600
else:
addend = -3600
timeTuple = time.localtime(t + addend)
dfn = self.rotation_filename(
self.baseFilename + "." + time.strftime(self.suffix, timeTuple)
)
# 修改代码
# 在多进程下,判断 dfn 是否存在:
# 若 dfn 已存在,则表示已经有其他进程将日志文件按时间切割了,只需重新打开新的日志文件,写入当前日志;
# 若 dfn 不存在,则将当前日志文件重命名,并打开新的日志文件。
if not os.path.exists(dfn):
try:
self.rotate(self.baseFilename, dfn)
except FileNotFoundError:
# 这里可能会报 FileNotFoundError
# 原因是其他进程对该日志文件重命名了
# pass 即可,当前日志内容不会丢失,还会输出到重命名后的文件中
pass
# 原代码
"""
if os.path.exists(dfn):
os.remove(dfn)
self.rotate(self.baseFilename, dfn)
"""
if self.backupCount > 0:
for s in self.getFilesToDelete():
os.remove(s)
if not self.delay:
self.stream = self._open()
newRolloverAt = self.computeRollover(currentTime)
while newRolloverAt <= currentTime:
newRolloverAt = newRolloverAt + self.interval
# If DST changes and midnight or weekly rollover, adjust for this.
if (self.when == "MIDNIGHT" or self.when.startswith("W")) and not self.utc:
dstAtRollover = time.localtime(newRolloverAt)[-1]
if dstNow != dstAtRollover:
if (
not dstNow
): # DST kicks in before next rollover, so we need to deduct an hour
addend = -3600
else: # DST bows out before next rollover, so we need to add an hour
addend = 3600
newRolloverAt += addend
self.rolloverAt = newRolloverAt
| <filename>template/service-name/app/util/logging/handler.py
#!/usr/bin/env python
# coding:utf-8
import os
import time
from logging.handlers import TimedRotatingFileHandler
class MultiProcessSafeTimedRotatingFileHandler(TimedRotatingFileHandler):
"""
该类是对 TimedRotatingFileHandler 的重写,已解决其多进程不安全,丢失日志的问题
TimedRotatingFileHandler 文件切割的逻辑为:
1. 判断准备切入的文件 error.log.2021-11-11 是否存在,如果存在,则删除该文件
2. 然后把 error.log 重命名为 error.log.2021-11-11
3. doRollover 结束,FileHandler.emit 将消息写入 error.log (新建)
修改后逻辑如下所示。
"""
def __init__(
self,
filename,
when="h",
interval=1,
backupCount=0,
encoding=None,
delay=False,
utc=False,
atTime=None,
):
super().__init__(
filename,
when=when,
interval=interval,
backupCount=backupCount,
encoding=encoding,
delay=delay,
utc=utc,
atTime=atTime,
)
def doRollover(self):
"""
do a rollover; in this case, a date/time stamp is appended to the filename
when the rollover happens. However, you want the file to be named for the
start of the interval, not the current time. If there is a backup count,
then we have to get a list of matching filenames, sort them and remove
the one with the oldest suffix.
"""
if self.stream:
self.stream.close()
self.stream = None
# get the time that this sequence started at and make it a TimeTuple
currentTime = int(time.time())
dstNow = time.localtime(currentTime)[-1]
t = self.rolloverAt - self.interval
if self.utc:
timeTuple = time.gmtime(t)
else:
timeTuple = time.localtime(t)
dstThen = timeTuple[-1]
if dstNow != dstThen:
if dstNow:
addend = 3600
else:
addend = -3600
timeTuple = time.localtime(t + addend)
dfn = self.rotation_filename(
self.baseFilename + "." + time.strftime(self.suffix, timeTuple)
)
# 修改代码
# 在多进程下,判断 dfn 是否存在:
# 若 dfn 已存在,则表示已经有其他进程将日志文件按时间切割了,只需重新打开新的日志文件,写入当前日志;
# 若 dfn 不存在,则将当前日志文件重命名,并打开新的日志文件。
if not os.path.exists(dfn):
try:
self.rotate(self.baseFilename, dfn)
except FileNotFoundError:
# 这里可能会报 FileNotFoundError
# 原因是其他进程对该日志文件重命名了
# pass 即可,当前日志内容不会丢失,还会输出到重命名后的文件中
pass
# 原代码
"""
if os.path.exists(dfn):
os.remove(dfn)
self.rotate(self.baseFilename, dfn)
"""
if self.backupCount > 0:
for s in self.getFilesToDelete():
os.remove(s)
if not self.delay:
self.stream = self._open()
newRolloverAt = self.computeRollover(currentTime)
while newRolloverAt <= currentTime:
newRolloverAt = newRolloverAt + self.interval
# If DST changes and midnight or weekly rollover, adjust for this.
if (self.when == "MIDNIGHT" or self.when.startswith("W")) and not self.utc:
dstAtRollover = time.localtime(newRolloverAt)[-1]
if dstNow != dstAtRollover:
if (
not dstNow
): # DST kicks in before next rollover, so we need to deduct an hour
addend = -3600
else: # DST bows out before next rollover, so we need to add an hour
addend = 3600
newRolloverAt += addend
self.rolloverAt = newRolloverAt
| zh | 0.432984 | #!/usr/bin/env python # coding:utf-8 该类是对 TimedRotatingFileHandler 的重写,已解决其多进程不安全,丢失日志的问题 TimedRotatingFileHandler 文件切割的逻辑为: 1. 判断准备切入的文件 error.log.2021-11-11 是否存在,如果存在,则删除该文件 2. 然后把 error.log 重命名为 error.log.2021-11-11 3. doRollover 结束,FileHandler.emit 将消息写入 error.log (新建) 修改后逻辑如下所示。 do a rollover; in this case, a date/time stamp is appended to the filename when the rollover happens. However, you want the file to be named for the start of the interval, not the current time. If there is a backup count, then we have to get a list of matching filenames, sort them and remove the one with the oldest suffix. # get the time that this sequence started at and make it a TimeTuple # 修改代码 # 在多进程下,判断 dfn 是否存在: # 若 dfn 已存在,则表示已经有其他进程将日志文件按时间切割了,只需重新打开新的日志文件,写入当前日志; # 若 dfn 不存在,则将当前日志文件重命名,并打开新的日志文件。 # 这里可能会报 FileNotFoundError # 原因是其他进程对该日志文件重命名了 # pass 即可,当前日志内容不会丢失,还会输出到重命名后的文件中 # 原代码 if os.path.exists(dfn): os.remove(dfn) self.rotate(self.baseFilename, dfn) # If DST changes and midnight or weekly rollover, adjust for this. # DST kicks in before next rollover, so we need to deduct an hour # DST bows out before next rollover, so we need to add an hour | 2.779741 | 3 |
vor/backends/phone/base.py | ryankanno/vor | 0 | 6617172 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import abc
class PhoneNumberProvider(object):
__metaclass__ = abc.ABCMeta
def __init__(self, *args, **kwargs):
super(PhoneNumberProvider, self).__init__(*args, **kwargs)
@abc.abstractmethod
def get_phone_number(self):
raise NotImplementedError # pragma: no cover
# vim: filetype=python
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import abc
class PhoneNumberProvider(object):
__metaclass__ = abc.ABCMeta
def __init__(self, *args, **kwargs):
super(PhoneNumberProvider, self).__init__(*args, **kwargs)
@abc.abstractmethod
def get_phone_number(self):
raise NotImplementedError # pragma: no cover
# vim: filetype=python
| en | 0.324972 | #!/usr/bin/env python # -*- coding: utf-8 -*- # pragma: no cover # vim: filetype=python | 3.026899 | 3 |
model.py | redprogrammer/tf-cartpolev1 | 0 | 6617173 | <gh_stars>0
import tensorflow as tf
import numpy as np
class Model:
def __init__(self, input_size, output_size):
self.model = tf.keras.Sequential([
tf.keras.layers.Dense(input_shape=input_size, units=128, activation=tf.nn.relu),
tf.keras.layers.Dense(52, activation=tf.nn.relu),
tf.keras.layers.Dense(output_size, activation=tf.keras.activations.linear)
])
self.model.compile(loss='mean_squared_error',
optimizer=tf.keras.optimizers.Adam())
self.model.summary()
def train_model(self, train_data):
input = np.array(train_data[0])
output = np.array(train_data[1])
self.model.fit(input, output, epochs=10)
def predict_model(self, input):
return self.model.predict(input)
| import tensorflow as tf
import numpy as np
class Model:
def __init__(self, input_size, output_size):
self.model = tf.keras.Sequential([
tf.keras.layers.Dense(input_shape=input_size, units=128, activation=tf.nn.relu),
tf.keras.layers.Dense(52, activation=tf.nn.relu),
tf.keras.layers.Dense(output_size, activation=tf.keras.activations.linear)
])
self.model.compile(loss='mean_squared_error',
optimizer=tf.keras.optimizers.Adam())
self.model.summary()
def train_model(self, train_data):
input = np.array(train_data[0])
output = np.array(train_data[1])
self.model.fit(input, output, epochs=10)
def predict_model(self, input):
return self.model.predict(input) | none | 1 | 3.141027 | 3 | |
proc_name_gen.py | vjek/procedural_generation | 0 | 6617174 | #!/usr/bin/env python3
# This script produces a list of names based on either a fixed value seed, or a seed
# value provided on the command line as arg1. A name is chosen out of the list, procedurally
# and both the list of names and the one chosen will be the same, by seed value.
# Typically, the names created are roughly pronounceable in English.
#
# Sample output is:
#
# :./proc-name-gen.py 10
# ['Tubeso', 'Cuti', 'Pare', 'Rilohi', 'Sesu', 'Xak', 'Mupe', 'Rodunu', 'Reca', 'Yawag', 'Yefas', 'Poro', 'Guvef', 'Mezeho', 'Rebuc', 'Kaloq'] 16 Xak 5
#
# :./proc-name-gen.py 2
# ['Dih', 'Yeyax', 'Zoviwo', 'Bis', 'Ruhuhe', 'Hih', 'Vuhor', 'Pohos', 'Lovupo', 'Xusok', 'Yiti', 'Metupu', 'Befax', 'Xefug', 'Jara', 'Pekad', 'Bab', 'Gehub', 'Kec', 'Yamit', 'Suyal', 'Tedi', 'Sev', 'Vigili', 'Wec', 'Geh', 'Zevake', 'Lax'] 28 Befax 12
###
import random,sys,string
def generate_name(length):
VOWELS = "aeiou"
#if you don't sort this join below , the set() - set() returns a random jumble.
CONSONANTS = "".join(sorted(set(string.ascii_lowercase) - set(VOWELS)))
name = ""
for i in range(length): #this length was chosen procedurally
if i % 2 == 0: #if even, place a consonant
name += rand1.choice(CONSONANTS)
else: #otherwise, place a vowel
name += rand1.choice(VOWELS)
return name
def create_name(): #function to pick name length
min_name_length=3 #names can be short
max_name_length=7 #or long.
inval=ord(get_next_rand())
name_length = int(mapFromTo(inval,0,255,min_name_length,max_name_length)) #pick a length from randint->range
rand_name = generate_name(name_length) #get the name from the generator
return rand_name
def get_next_rand(): #get the next deterministic value from the stream
return chr(rand1.randint(0,255)) #return the next byte as char
def mapFromTo(x,a,b,c,d): #inval,start1,end1,start2,end2 for range translation
y=(x-a)/(b-a)*(d-c)+c #most often used by me to translate 0,255 to a smaller range.
return y #as a float; could be an int, but we may use this function differently elsewhere
seed1=8675309 #a _completely_ random seed value..
if sys.argv[1:]: #but if there is one provided
seed1 = int(sys.argv[1]) #use that instead
rand1=random.Random() #create a new .Random intance
rand1.seed(seed1) #and make it deterministic/procedural, as in, the output will be the same
name_storage=[]
number_of_names=ord(get_next_rand()) #pick how many to generate
for a in range(number_of_names): #name generation loop
name_storage += [(create_name().title())] #store them;first letter capitalized.
inval=ord(get_next_rand())
name_to_pick=int(mapFromTo(inval,0,255,0,number_of_names)) #use rand range to pick # of names
#we're at the end, so print out the list of names, number of names, a random name, and it's index
print(name_storage,len(name_storage),name_storage[name_to_pick],name_to_pick)
| #!/usr/bin/env python3
# This script produces a list of names based on either a fixed value seed, or a seed
# value provided on the command line as arg1. A name is chosen out of the list, procedurally
# and both the list of names and the one chosen will be the same, by seed value.
# Typically, the names created are roughly pronounceable in English.
#
# Sample output is:
#
# :./proc-name-gen.py 10
# ['Tubeso', 'Cuti', 'Pare', 'Rilohi', 'Sesu', 'Xak', 'Mupe', 'Rodunu', 'Reca', 'Yawag', 'Yefas', 'Poro', 'Guvef', 'Mezeho', 'Rebuc', 'Kaloq'] 16 Xak 5
#
# :./proc-name-gen.py 2
# ['Dih', 'Yeyax', 'Zoviwo', 'Bis', 'Ruhuhe', 'Hih', 'Vuhor', 'Pohos', 'Lovupo', 'Xusok', 'Yiti', 'Metupu', 'Befax', 'Xefug', 'Jara', 'Pekad', 'Bab', 'Gehub', 'Kec', 'Yamit', 'Suyal', 'Tedi', 'Sev', 'Vigili', 'Wec', 'Geh', 'Zevake', 'Lax'] 28 Befax 12
###
import random,sys,string
def generate_name(length):
VOWELS = "aeiou"
#if you don't sort this join below , the set() - set() returns a random jumble.
CONSONANTS = "".join(sorted(set(string.ascii_lowercase) - set(VOWELS)))
name = ""
for i in range(length): #this length was chosen procedurally
if i % 2 == 0: #if even, place a consonant
name += rand1.choice(CONSONANTS)
else: #otherwise, place a vowel
name += rand1.choice(VOWELS)
return name
def create_name(): #function to pick name length
min_name_length=3 #names can be short
max_name_length=7 #or long.
inval=ord(get_next_rand())
name_length = int(mapFromTo(inval,0,255,min_name_length,max_name_length)) #pick a length from randint->range
rand_name = generate_name(name_length) #get the name from the generator
return rand_name
def get_next_rand(): #get the next deterministic value from the stream
return chr(rand1.randint(0,255)) #return the next byte as char
def mapFromTo(x,a,b,c,d): #inval,start1,end1,start2,end2 for range translation
y=(x-a)/(b-a)*(d-c)+c #most often used by me to translate 0,255 to a smaller range.
return y #as a float; could be an int, but we may use this function differently elsewhere
seed1=8675309 #a _completely_ random seed value..
if sys.argv[1:]: #but if there is one provided
seed1 = int(sys.argv[1]) #use that instead
rand1=random.Random() #create a new .Random intance
rand1.seed(seed1) #and make it deterministic/procedural, as in, the output will be the same
name_storage=[]
number_of_names=ord(get_next_rand()) #pick how many to generate
for a in range(number_of_names): #name generation loop
name_storage += [(create_name().title())] #store them;first letter capitalized.
inval=ord(get_next_rand())
name_to_pick=int(mapFromTo(inval,0,255,0,number_of_names)) #use rand range to pick # of names
#we're at the end, so print out the list of names, number of names, a random name, and it's index
print(name_storage,len(name_storage),name_storage[name_to_pick],name_to_pick)
| en | 0.718894 | #!/usr/bin/env python3 # This script produces a list of names based on either a fixed value seed, or a seed # value provided on the command line as arg1. A name is chosen out of the list, procedurally # and both the list of names and the one chosen will be the same, by seed value. # Typically, the names created are roughly pronounceable in English. # # Sample output is: # # :./proc-name-gen.py 10 # ['Tubeso', 'Cuti', 'Pare', 'Rilohi', 'Sesu', 'Xak', 'Mupe', 'Rodunu', 'Reca', 'Yawag', 'Yefas', 'Poro', 'Guvef', 'Mezeho', 'Rebuc', 'Kaloq'] 16 Xak 5 # # :./proc-name-gen.py 2 # ['Dih', 'Yeyax', 'Zoviwo', 'Bis', 'Ruhuhe', 'Hih', 'Vuhor', 'Pohos', 'Lovupo', 'Xusok', 'Yiti', 'Metupu', 'Befax', 'Xefug', 'Jara', 'Pekad', 'Bab', 'Gehub', 'Kec', 'Yamit', 'Suyal', 'Tedi', 'Sev', 'Vigili', 'Wec', 'Geh', 'Zevake', 'Lax'] 28 Befax 12 ### #if you don't sort this join below , the set() - set() returns a random jumble. #this length was chosen procedurally #if even, place a consonant #otherwise, place a vowel #function to pick name length #names can be short #or long. #pick a length from randint->range #get the name from the generator #get the next deterministic value from the stream #return the next byte as char #inval,start1,end1,start2,end2 for range translation #most often used by me to translate 0,255 to a smaller range. #as a float; could be an int, but we may use this function differently elsewhere #a _completely_ random seed value.. #but if there is one provided #use that instead #create a new .Random intance #and make it deterministic/procedural, as in, the output will be the same #pick how many to generate #name generation loop #store them;first letter capitalized. #use rand range to pick # of names #we're at the end, so print out the list of names, number of names, a random name, and it's index | 3.658877 | 4 |
src/gluonts/nursery/redsds/data/preprocess_bee.py | abdulfatir/gluon-ts | 1 | 6617175 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import os
import numpy as np
def preprocess_seq(seq_path):
mapping = {"waggle": 0, "turn_right": 1, "turn_left": 2}
x_btf = os.path.join(seq_path, "btf/ximage.btf")
x = np.loadtxt(x_btf)
x = (x - x.mean(0)) / x.std(0)
y_btf = os.path.join(seq_path, "btf/yimage.btf")
y = np.loadtxt(y_btf)
y = (y - y.mean(0)) / y.std(0)
theta_btf = os.path.join(seq_path, "btf/timage.btf")
theta = np.loadtxt(theta_btf)
label_btf = os.path.join(seq_path, "btf/label0.btf")
labels = np.loadtxt(label_btf, dtype=str).tolist()
labels = np.array([mapping[lab] for lab in labels])
observations = np.stack([x, y, np.sin(theta), np.cos(theta)], 1)
return observations, labels
def preprocess(seq_paths, npz_path):
data_y = []
data_z = []
ctx = 120
count = 0
for seq_path in seq_paths:
observations, labels = preprocess_seq(seq_path)
cp = np.where(np.concatenate([[1], np.diff(labels)]) != 0)[0]
for c in cp:
if labels[c : c + ctx].shape[0] == ctx:
data_y.append(observations[c : c + ctx])
data_z.append(labels[c : c + ctx])
count += 1
data_y = np.stack(data_y, 0)
data_z = np.stack(data_z, 0)
np.savez(npz_path, y=data_y, z=data_z)
if __name__ == "__main__":
base_path = "./psslds/zips"
train_seqs = [
os.path.join(base_path, "data/sequence%d") % i for i in [1, 3, 4, 5, 6]
]
preprocess(train_seqs, "bee.npz")
test_seqs = [os.path.join(base_path, "data/sequence%d") % i for i in [2]]
preprocess(test_seqs, "bee_test.npz")
| # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import os
import numpy as np
def preprocess_seq(seq_path):
mapping = {"waggle": 0, "turn_right": 1, "turn_left": 2}
x_btf = os.path.join(seq_path, "btf/ximage.btf")
x = np.loadtxt(x_btf)
x = (x - x.mean(0)) / x.std(0)
y_btf = os.path.join(seq_path, "btf/yimage.btf")
y = np.loadtxt(y_btf)
y = (y - y.mean(0)) / y.std(0)
theta_btf = os.path.join(seq_path, "btf/timage.btf")
theta = np.loadtxt(theta_btf)
label_btf = os.path.join(seq_path, "btf/label0.btf")
labels = np.loadtxt(label_btf, dtype=str).tolist()
labels = np.array([mapping[lab] for lab in labels])
observations = np.stack([x, y, np.sin(theta), np.cos(theta)], 1)
return observations, labels
def preprocess(seq_paths, npz_path):
data_y = []
data_z = []
ctx = 120
count = 0
for seq_path in seq_paths:
observations, labels = preprocess_seq(seq_path)
cp = np.where(np.concatenate([[1], np.diff(labels)]) != 0)[0]
for c in cp:
if labels[c : c + ctx].shape[0] == ctx:
data_y.append(observations[c : c + ctx])
data_z.append(labels[c : c + ctx])
count += 1
data_y = np.stack(data_y, 0)
data_z = np.stack(data_z, 0)
np.savez(npz_path, y=data_y, z=data_z)
if __name__ == "__main__":
base_path = "./psslds/zips"
train_seqs = [
os.path.join(base_path, "data/sequence%d") % i for i in [1, 3, 4, 5, 6]
]
preprocess(train_seqs, "bee.npz")
test_seqs = [os.path.join(base_path, "data/sequence%d") % i for i in [2]]
preprocess(test_seqs, "bee_test.npz")
| en | 0.875619 | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). # You may not use this file except in compliance with the License. # A copy of the License is located at # # http://www.apache.org/licenses/LICENSE-2.0 # # or in the "license" file accompanying this file. This file is distributed # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either # express or implied. See the License for the specific language governing # permissions and limitations under the License. | 1.829322 | 2 |
intactness/summary.py | BWH-Lichterfeld-Lab/Intactness-Pipeline | 1 | 6617176 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 8 00:09:49 2017
@author: <NAME>
@author: <NAME>
"""
import logging
from collections import Counter
# pylint: disable=C0103
# Invalid constant name
logger = logging.getLogger('pipe.summary')
# pylint: disable=R0902
# Disable too many return
# pylint: disable=R0911
def _call(calls):
"""Make final call"""
final_call = ''
if calls['is_hiv'] == 'No':
final_call = 'NonHIV'
return final_call
if calls['deletion'] == 'Yes':
final_call = 'Large Deletion'
if calls['inversion'] == 'Yes':
final_call += ' with Internal Inversion'
elif calls['hypermut'] == 'Yes':
final_call += ' with Hypermut'
return final_call
if calls['inversion'] == 'Yes':
final_call = 'Internal Inversion'
return final_call
if calls['hypermut'] == 'Yes':
final_call = 'Hypermut'
return final_call
if calls['psc'] == 'Yes':
final_call = 'Premature Stop Codon'
return final_call
if calls['defect'] == 'Yes' and calls['primer'] == 'Yes':
final_call = "5' defect"
return final_call
if calls['primer'] == 'No':
final_call = 'Inferred Intact'
return final_call
return 'Intact'
def summary(configs, seqs):
"""Final call"""
# Disable too many local variables
# pylint: disable=R0914
# Logging
logger.info('Generating summaries')
# Multiple contig sample
sample_ids = [qid.split('_')[0] for qid in seqs.qids]
freq = Counter(sample_ids)
sids_multi_seq = set(sid for sid, cnt in freq.items() if cnt > 1)
# Multiple contig sample
sample_ids = [qid.split('_')[0] for qid in seqs.qids
if seqs.call[qid]['is_hiv'] == 'Yes']
freq = Counter(sample_ids)
sids_multi_hiv = set(sid for sid, cnt in freq.items() if cnt > 1)
with open(configs['file_out'], 'w') as fh_o:
cols = ["Contig ID",
"Sample ID",
"Multi-Contig Sample?",
"Multi-HIV Sample?",
"Contig Length",
"Aligned Length",
"Aligned coverage of Contig",
"Ref Seq ID",
"Aligned Start at Ref",
"Ref Strand",
"Is HIV?",
"Primer",
"Primer Seq",
"Large Deletion?",
"Internal Inversion?",
"Hypermut?",
"Hypermut pval",
"PSC?",
# "PSC Type",
"gag",
"pol",
"env",
"5' Defect",
"5' Gaps",
"5' Inserts",
"Gag Start Codon Missing?",
"Gag Start Seq",
"Final Call",
"Comments",
"Contig Sequence"]
fmts = ["{}", # Contig ID
"{}", # Sample ID
"{}", # Multi-Contig Sample?
"{}", # Multi-HIV Sample?
"{}", # Contig Length
"{}", # Aligned Length
"{:.2f}%", # Aligned coverage of Contig
"{}", # Ref Seq ID
"{}", # Aligned Start at Ref
"{}", # Ref Strand
"{}", # Is HIV?
"{}", # Primer
"{}", # Primer Seq
"{}", # Large Deletion?
"{}", # Internal Inversion?
"{}", # Hypermut?
"{}", # Hypermut pval
"{}", # PSC?
# "{}", # PSC Type
"{}", # gag
"{}", # pol
"{}", # env
"{}", # 5' Defect
"{}", # 5' Gaps
"{}", # 5' Inserts
"{}", # Gag Start Codon
"{}", # Gag Start Seq
"{}", # Final Call
"{}", # Comments
"{}"] # Contig sequence
header = ','.join(cols)
print(header, file=fh_o)
fmt_str = ','.join(fmts)
ref_id = seqs.ref_id
for qid in seqs.qids:
sample_id = qid.split('_')[0]
multi_seq_sample = 'Yes' if sample_id in sids_multi_seq else 'No'
multi_hiv_sample = 'Yes' if sample_id in sids_multi_hiv else 'No'
call = seqs.call[qid]
info = seqs.info[qid]
aln_len, qlen, sstart, strand = info['blast']
is_hiv = call['is_hiv']
primer = call['primer']
primer_seq = ';'.join(info['primer'])
deletion = call['deletion']
inversion = call['inversion']
hypermut = call['hypermut']
hypermut_p = info['hypermut'][-1]
psc = call['psc']
psc_info = info['psc']
gag = psc_info['Gag']
pol = psc_info['Pol']
env = psc_info['Env']
defect = call['defect']
defect_gaps, defect_inserts = info['defect']
gag_codon = call['gag_codon']
gag_codon_seq = info['gag_codon']
comment = seqs.comments[qid]
qseq = str(seqs.qry[qid].seq)
line = fmt_str.format(qid,
sample_id,
multi_seq_sample,
multi_hiv_sample,
qlen,
aln_len,
aln_len/qlen*100,
ref_id,
sstart,
strand,
is_hiv,
primer,
primer_seq,
deletion,
inversion,
hypermut,
hypermut_p,
psc,
# psc_type,
gag,
pol,
env,
defect,
defect_gaps,
defect_inserts,
gag_codon,
gag_codon_seq,
_call(call),
comment,
qseq)
print(line, file=fh_o)
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 8 00:09:49 2017
@author: <NAME>
@author: <NAME>
"""
import logging
from collections import Counter
# pylint: disable=C0103
# Invalid constant name
logger = logging.getLogger('pipe.summary')
# pylint: disable=R0902
# Disable too many return
# pylint: disable=R0911
def _call(calls):
"""Make final call"""
final_call = ''
if calls['is_hiv'] == 'No':
final_call = 'NonHIV'
return final_call
if calls['deletion'] == 'Yes':
final_call = 'Large Deletion'
if calls['inversion'] == 'Yes':
final_call += ' with Internal Inversion'
elif calls['hypermut'] == 'Yes':
final_call += ' with Hypermut'
return final_call
if calls['inversion'] == 'Yes':
final_call = 'Internal Inversion'
return final_call
if calls['hypermut'] == 'Yes':
final_call = 'Hypermut'
return final_call
if calls['psc'] == 'Yes':
final_call = 'Premature Stop Codon'
return final_call
if calls['defect'] == 'Yes' and calls['primer'] == 'Yes':
final_call = "5' defect"
return final_call
if calls['primer'] == 'No':
final_call = 'Inferred Intact'
return final_call
return 'Intact'
def summary(configs, seqs):
"""Final call"""
# Disable too many local variables
# pylint: disable=R0914
# Logging
logger.info('Generating summaries')
# Multiple contig sample
sample_ids = [qid.split('_')[0] for qid in seqs.qids]
freq = Counter(sample_ids)
sids_multi_seq = set(sid for sid, cnt in freq.items() if cnt > 1)
# Multiple contig sample
sample_ids = [qid.split('_')[0] for qid in seqs.qids
if seqs.call[qid]['is_hiv'] == 'Yes']
freq = Counter(sample_ids)
sids_multi_hiv = set(sid for sid, cnt in freq.items() if cnt > 1)
with open(configs['file_out'], 'w') as fh_o:
cols = ["Contig ID",
"Sample ID",
"Multi-Contig Sample?",
"Multi-HIV Sample?",
"Contig Length",
"Aligned Length",
"Aligned coverage of Contig",
"Ref Seq ID",
"Aligned Start at Ref",
"Ref Strand",
"Is HIV?",
"Primer",
"Primer Seq",
"Large Deletion?",
"Internal Inversion?",
"Hypermut?",
"Hypermut pval",
"PSC?",
# "PSC Type",
"gag",
"pol",
"env",
"5' Defect",
"5' Gaps",
"5' Inserts",
"Gag Start Codon Missing?",
"Gag Start Seq",
"Final Call",
"Comments",
"Contig Sequence"]
fmts = ["{}", # Contig ID
"{}", # Sample ID
"{}", # Multi-Contig Sample?
"{}", # Multi-HIV Sample?
"{}", # Contig Length
"{}", # Aligned Length
"{:.2f}%", # Aligned coverage of Contig
"{}", # Ref Seq ID
"{}", # Aligned Start at Ref
"{}", # Ref Strand
"{}", # Is HIV?
"{}", # Primer
"{}", # Primer Seq
"{}", # Large Deletion?
"{}", # Internal Inversion?
"{}", # Hypermut?
"{}", # Hypermut pval
"{}", # PSC?
# "{}", # PSC Type
"{}", # gag
"{}", # pol
"{}", # env
"{}", # 5' Defect
"{}", # 5' Gaps
"{}", # 5' Inserts
"{}", # Gag Start Codon
"{}", # Gag Start Seq
"{}", # Final Call
"{}", # Comments
"{}"] # Contig sequence
header = ','.join(cols)
print(header, file=fh_o)
fmt_str = ','.join(fmts)
ref_id = seqs.ref_id
for qid in seqs.qids:
sample_id = qid.split('_')[0]
multi_seq_sample = 'Yes' if sample_id in sids_multi_seq else 'No'
multi_hiv_sample = 'Yes' if sample_id in sids_multi_hiv else 'No'
call = seqs.call[qid]
info = seqs.info[qid]
aln_len, qlen, sstart, strand = info['blast']
is_hiv = call['is_hiv']
primer = call['primer']
primer_seq = ';'.join(info['primer'])
deletion = call['deletion']
inversion = call['inversion']
hypermut = call['hypermut']
hypermut_p = info['hypermut'][-1]
psc = call['psc']
psc_info = info['psc']
gag = psc_info['Gag']
pol = psc_info['Pol']
env = psc_info['Env']
defect = call['defect']
defect_gaps, defect_inserts = info['defect']
gag_codon = call['gag_codon']
gag_codon_seq = info['gag_codon']
comment = seqs.comments[qid]
qseq = str(seqs.qry[qid].seq)
line = fmt_str.format(qid,
sample_id,
multi_seq_sample,
multi_hiv_sample,
qlen,
aln_len,
aln_len/qlen*100,
ref_id,
sstart,
strand,
is_hiv,
primer,
primer_seq,
deletion,
inversion,
hypermut,
hypermut_p,
psc,
# psc_type,
gag,
pol,
env,
defect,
defect_gaps,
defect_inserts,
gag_codon,
gag_codon_seq,
_call(call),
comment,
qseq)
print(line, file=fh_o)
| en | 0.651357 | #!/usr/bin/env python3 # -*- coding: utf-8 -*- Created on Sat Apr 8 00:09:49 2017 @author: <NAME> @author: <NAME> # pylint: disable=C0103 # Invalid constant name # pylint: disable=R0902 # Disable too many return # pylint: disable=R0911 Make final call Final call # Disable too many local variables # pylint: disable=R0914 # Logging # Multiple contig sample # Multiple contig sample # "PSC Type", # Contig ID # Sample ID # Multi-Contig Sample? # Multi-HIV Sample? # Contig Length # Aligned Length # Aligned coverage of Contig # Ref Seq ID # Aligned Start at Ref # Ref Strand # Is HIV? # Primer # Primer Seq # Large Deletion? # Internal Inversion? # Hypermut? # Hypermut pval # PSC? # "{}", # PSC Type # gag # pol # env # 5' Defect # 5' Gaps # 5' Inserts # Gag Start Codon # Gag Start Seq # Final Call # Comments # Contig sequence # psc_type, | 2.333875 | 2 |
binary/pyinstaller-bundle-script.py | ethanio12345/pymedphys | 207 | 6617177 | # Copyright (C) 2020 <NAME>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
import pathlib
import tarfile
import shutil
PYMEDPHYS_BAT_NAME = "pymedphys.bat"
def main():
"""The script that boots when PyMedPhysGUI-vX.Y.Z.exe is run.
This script checks to see if the required PyMedPhys files have been
installed within the current working directory. If they have not
it extracts them.
Once the embedded Python distribution is provisioned this boots up
the PyMedPhys streamlit app.
Note
----
This script will run with pyinstaller's Python install. However, no
external libraries are installed within this Python instance.
PyMedPhys itself is stored within ``python-embed/Lib/site-packages``.
The Python within ``python-embed`` is not the same Python install
that pyinstaller is using to run this script.
"""
cwd = pathlib.Path(os.getcwd())
installation_path = cwd.joinpath("python-embed")
pymedphys_bat = cwd.joinpath(PYMEDPHYS_BAT_NAME)
if not pymedphys_bat.exists():
_install(cwd, installation_path)
_boot_streamlit_app(installation_path)
def _install(cwd, installation_path):
"""Extract the Python embedded environment to the current working directory.
Note
----
The ``pymedphys.bat`` is extracted last, as this is used to test
whether or not the install was completed.
"""
pyinstaller_temp_dir = pathlib.Path(
sys._MEIPASS # pylint: disable = no-member, protected-access
)
data_path = pyinstaller_temp_dir.joinpath("data")
python_xztar = data_path.joinpath("python-embed.tar.xz")
installation_path.mkdir()
with tarfile.open(python_xztar) as f:
f.extractall(installation_path)
for f in ["LICENSE", PYMEDPHYS_BAT_NAME]:
shutil.copy(data_path.joinpath(f), cwd.joinpath(f))
def _boot_streamlit_app(python_embedded_directory):
"""Starts the PyMedPhys GUI within the Python embedded distribution.
Parameters
----------
python_embedded_directory
The full path to the Python embedded distribution.
"""
subprocess.check_call(
"python.exe -m pymedphys gui", cwd=python_embedded_directory, shell=True
)
if __name__ == "__main__":
main()
| # Copyright (C) 2020 <NAME>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
import pathlib
import tarfile
import shutil
PYMEDPHYS_BAT_NAME = "pymedphys.bat"
def main():
"""The script that boots when PyMedPhysGUI-vX.Y.Z.exe is run.
This script checks to see if the required PyMedPhys files have been
installed within the current working directory. If they have not
it extracts them.
Once the embedded Python distribution is provisioned this boots up
the PyMedPhys streamlit app.
Note
----
This script will run with pyinstaller's Python install. However, no
external libraries are installed within this Python instance.
PyMedPhys itself is stored within ``python-embed/Lib/site-packages``.
The Python within ``python-embed`` is not the same Python install
that pyinstaller is using to run this script.
"""
cwd = pathlib.Path(os.getcwd())
installation_path = cwd.joinpath("python-embed")
pymedphys_bat = cwd.joinpath(PYMEDPHYS_BAT_NAME)
if not pymedphys_bat.exists():
_install(cwd, installation_path)
_boot_streamlit_app(installation_path)
def _install(cwd, installation_path):
"""Extract the Python embedded environment to the current working directory.
Note
----
The ``pymedphys.bat`` is extracted last, as this is used to test
whether or not the install was completed.
"""
pyinstaller_temp_dir = pathlib.Path(
sys._MEIPASS # pylint: disable = no-member, protected-access
)
data_path = pyinstaller_temp_dir.joinpath("data")
python_xztar = data_path.joinpath("python-embed.tar.xz")
installation_path.mkdir()
with tarfile.open(python_xztar) as f:
f.extractall(installation_path)
for f in ["LICENSE", PYMEDPHYS_BAT_NAME]:
shutil.copy(data_path.joinpath(f), cwd.joinpath(f))
def _boot_streamlit_app(python_embedded_directory):
"""Starts the PyMedPhys GUI within the Python embedded distribution.
Parameters
----------
python_embedded_directory
The full path to the Python embedded distribution.
"""
subprocess.check_call(
"python.exe -m pymedphys gui", cwd=python_embedded_directory, shell=True
)
if __name__ == "__main__":
main()
| en | 0.858504 | # Copyright (C) 2020 <NAME> # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. The script that boots when PyMedPhysGUI-vX.Y.Z.exe is run. This script checks to see if the required PyMedPhys files have been installed within the current working directory. If they have not it extracts them. Once the embedded Python distribution is provisioned this boots up the PyMedPhys streamlit app. Note ---- This script will run with pyinstaller's Python install. However, no external libraries are installed within this Python instance. PyMedPhys itself is stored within ``python-embed/Lib/site-packages``. The Python within ``python-embed`` is not the same Python install that pyinstaller is using to run this script. Extract the Python embedded environment to the current working directory. Note ---- The ``pymedphys.bat`` is extracted last, as this is used to test whether or not the install was completed. # pylint: disable = no-member, protected-access Starts the PyMedPhys GUI within the Python embedded distribution. Parameters ---------- python_embedded_directory The full path to the Python embedded distribution. | 2.153594 | 2 |
benchmarks/code_splice/rf.py | PSSF23/SPDT | 3 | 6617178 | <filename>benchmarks/code_splice/rf.py
"""
Author: <NAME>
"""
import time
import numpy as np
import pandas as pd
from numpy.random import permutation
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
def write_result(filename, acc_ls):
"""Writes results to specified text file"""
output = open(filename, "w")
for acc in acc_ls:
output.write(str(acc) + "\n")
def prediction(classifier):
"""Generates predictions from model"""
predictions = classifier.predict(X_test)
p_t = 0
for i in range(X_test.shape[0]):
if predictions[i] == y_test[i]:
p_t += 1
return p_t / X_test.shape[0]
def experiment_rf():
"""Runs experiments for Random Forest"""
rf_l = []
train_time_l = []
test_time_l = []
rf = RandomForestClassifier()
for i in range(23):
X_t = X_r[: (i + 1) * 100]
y_t = y_r[: (i + 1) * 100]
# Train the model
start_time = time.perf_counter()
rf.fit(X_t, y_t)
end_time = time.perf_counter()
train_time_l.append(end_time - start_time)
# Test the model
start_time = time.perf_counter()
rf_l.append(prediction(rf))
end_time = time.perf_counter()
test_time_l.append(end_time - start_time)
return rf_l, train_time_l, test_time_l
# prepare splice DNA data
df = pd.read_csv("../dna.csv")
X = df.drop(["Label"], axis=1).values
y = df["Label"].values
X_train, X_test, y_train, y_test = train_test_split(X, y)
# Perform experiments
rf_acc_l = []
rf_train_t_l = []
rf_test_t_l = []
for i in range(10):
p = permutation(X_train.shape[0])
X_r = X_train[p]
y_r = y_train[p]
rf_acc, rf_train_t, rf_test_t = experiment_rf()
rf_acc_l.append(rf_acc)
rf_train_t_l.append(rf_train_t)
rf_test_t_l.append(rf_test_t)
write_result("../rf/splice_acc.txt", rf_acc_l)
write_result("../rf/splice_train_t.txt", rf_train_t_l)
write_result("../rf/splice_test_t.txt", rf_test_t_l)
| <filename>benchmarks/code_splice/rf.py
"""
Author: <NAME>
"""
import time
import numpy as np
import pandas as pd
from numpy.random import permutation
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
def write_result(filename, acc_ls):
"""Writes results to specified text file"""
output = open(filename, "w")
for acc in acc_ls:
output.write(str(acc) + "\n")
def prediction(classifier):
"""Generates predictions from model"""
predictions = classifier.predict(X_test)
p_t = 0
for i in range(X_test.shape[0]):
if predictions[i] == y_test[i]:
p_t += 1
return p_t / X_test.shape[0]
def experiment_rf():
"""Runs experiments for Random Forest"""
rf_l = []
train_time_l = []
test_time_l = []
rf = RandomForestClassifier()
for i in range(23):
X_t = X_r[: (i + 1) * 100]
y_t = y_r[: (i + 1) * 100]
# Train the model
start_time = time.perf_counter()
rf.fit(X_t, y_t)
end_time = time.perf_counter()
train_time_l.append(end_time - start_time)
# Test the model
start_time = time.perf_counter()
rf_l.append(prediction(rf))
end_time = time.perf_counter()
test_time_l.append(end_time - start_time)
return rf_l, train_time_l, test_time_l
# prepare splice DNA data
df = pd.read_csv("../dna.csv")
X = df.drop(["Label"], axis=1).values
y = df["Label"].values
X_train, X_test, y_train, y_test = train_test_split(X, y)
# Perform experiments
rf_acc_l = []
rf_train_t_l = []
rf_test_t_l = []
for i in range(10):
p = permutation(X_train.shape[0])
X_r = X_train[p]
y_r = y_train[p]
rf_acc, rf_train_t, rf_test_t = experiment_rf()
rf_acc_l.append(rf_acc)
rf_train_t_l.append(rf_train_t)
rf_test_t_l.append(rf_test_t)
write_result("../rf/splice_acc.txt", rf_acc_l)
write_result("../rf/splice_train_t.txt", rf_train_t_l)
write_result("../rf/splice_test_t.txt", rf_test_t_l)
| en | 0.74957 | Author: <NAME> Writes results to specified text file Generates predictions from model Runs experiments for Random Forest # Train the model # Test the model # prepare splice DNA data # Perform experiments | 2.633634 | 3 |
src/airflow_sync/utils.py | techalchemy/airflow-sync | 1 | 6617179 | <filename>src/airflow_sync/utils.py
# -*- coding: utf-8 -*-
from collections import OrderedDict, namedtuple
from itertools import chain, tee
from pathlib import Path
from typing import FrozenSet, List, NamedTuple, Optional, Set, Tuple, TypeVar, Union
import attr
from airflow.models import Variable
TRIGGER_TYPES = ("upsert", "delete", "insert", "update")
class Pipeline(NamedTuple):
path: str = None
trigger_path: str = None
dependencies: List[str] = None
def get_sql_dir():
sql_dir = Path(Variable.get("sql_dir"))
if not sql_dir.exists():
PKG_PARENT = Path(__file__).absolute().parent.parent.parent.parent
sql_dir = PKG_PARENT / "airflow-core/sql"
return sql_dir
def convert_path(path: Optional[Path]) -> Optional[Path]:
if not path:
return None
if not isinstance(path, Path):
path = Path(path)
if not path.is_absolute():
path = SqlFile.DEFAULT_SQL_DIR / path
return path
def dedup(iterable):
return iter(OrderedDict.fromkeys(iterable))
class SqlFile__MetaClass(type):
@property
def DEFAULT_SQL_DIR(cls):
sql_dir = Path(Variable.get("sql_dir"))
if not sql_dir.exists():
PKG_PARENT = Path(__file__).absolute().parent.parent.parent.parent
sql_dir = PKG_PARENT / "airflow-core/sql"
return sql_dir / "salesforce"
@attr.s(frozen=True)
class SqlFile(metaclass=SqlFile__MetaClass):
path = attr.ib(type=Path, converter=convert_path)
sql = attr.ib(type=str, default=None)
dependencies = attr.ib(factory=tuple, hash=False, type=Tuple["SqlFile"])
dependants = attr.ib(factory=tuple, hash=False, type=Tuple["SqlFile"])
triggers = attr.ib(factory=tuple, hash=False, type=Tuple["SqlFile"])
@property
def name(self) -> str:
if self.path.parent.name == "salesforce":
return self.path.stem
return f"{self.path.parent.name}/{self.path.stem}"
@property
def is_trigger(self):
"""Indicates whether the current sql file represents a trigger
Triggers are the direct result of an upstream database execution, e.g. every time
an insert is performed, the given trigger is then performed. In this context, a
trigger simply represents any sql that should be invoked as a result of another
sql file running.
:return: Whether the current file is a trigger
:rtype: bool
"""
return any(self.name.startswith(trigger) for trigger in TRIGGER_TYPES)
def merge(self, other: "SqlFile") -> "SqlFile":
"""Merges the given dependency tree with another metadata set for the same file
This is typically used for updating the dependency and trigger information of a
given sql file instance.
:return: A new instance of the sql file with the given instance merged in
:rtype: SqlFile
"""
new_dependencies = tuple(self.dependencies) + other.dependants
new_triggers = tuple(self.triggers) + other.triggers
new_dependants = tuple(self.dependants) + other.dependants
new_dependants = tuple(list(dedup(new_dependants)))
new_dependencies = tuple(list(dedup(new_dependencies)))
new_triggers = tuple(list(dedup(new_triggers)))
return attr.evolve(
self,
dependencies=new_dependencies,
dependants=new_dependants,
triggers=new_triggers,
)
def load_sql(self):
return attr.evolve(self, sql=self.path.read_text())
@classmethod
def merge_and_update(
cls, original_list: List["SqlFile"], new_file: "SqlFile"
) -> List["SqlFile"]:
if new_file in original_list:
idx = original_list.index(new_file)
new_file = new_file.merge(original_list.pop(idx))
original_list.insert(idx, new_file)
else:
original_list.append(new_file)
return original_list
@classmethod
def merge_from_list(
cls, original_list: List["SqlFile"], new_list: List["SqlFile"]
) -> List["SqlFile"]:
for sql_file in new_list:
original_list = cls.merge_and_update(original_list, sql_file)
return original_list
def depends_on(self, sql_file: "SqlFile") -> "SqlFile":
"""Indicate that the current sql file has an upstream dependency on *sql_file*
This tells the task runner that before this file can be executed, *sql_file* must
be executed succesfully.
:param SqlFile sql_file: A :class:`SqlFile` instance which must run first
:return: An updated version of the current :class:`SqlFile` with new dependencies
:rtype: SqlFile
"""
dep_list: List["SqlFile"] = list(self.dependencies)
new_deps = tuple(self.merge_and_update(dep_list, sql_file))
return attr.evolve(self, dependencies=new_deps)
def with_dependencies(self, dependencies: List["SqlFile"]) -> "SqlFile":
"""Indicate that the current sql file has multiple upstream dependencies.
This tells the task runner that before this file can be executed, *dependencies*
must all be executed successfully.
:return: An updated version of the current :class:`SqlFile` with new dependencies
:rtype: SqlFile
"""
dep_list: List["SqlFile"] = list(self.dependencies)
new_dependencies = tuple(self.merge_from_list(dep_list, dependencies))
return attr.evolve(self, dependencies=new_dependencies)
def with_trigger(self, sql_file: "SqlFile") -> "SqlFile":
"""Indicate that the current sql file triggers *sql_file* to run
This tells the task runner that before after this file is executed, *sql_file*
should be executed.
:param SqlFile sql_file: A :class:`SqlFile` instance which must run first
:return: An updated version of the current :class:`SqlFile` with new triggers
:rtype: SqlFile
"""
trigger_list: List["SqlFile"] = list(self.triggers)
new_triggers = tuple(self.merge_and_update(trigger_list, sql_file))
return attr.evolve(self, triggers=new_triggers)
def with_triggers(self, triggers: List["SqlFile"]) -> "SqlFile":
"""Indicate that the current sql file has multiple downstream triggers.
This tells the task runner that after this file is executed, *triggers*
must all be executed.
:return: An updated version of the current :class:`SqlFile` with new triggers
:rtype: SqlFile
"""
trigger_list: List["SqlFile"] = list(self.triggers)
new_triggers = tuple(self.merge_from_list(trigger_list, triggers))
return attr.evolve(self, triggers=new_triggers)
def with_child(self, sql_file: "SqlFile") -> "SqlFile":
"""Indicate that the current sql file has a downstream dependant of *sql_file*
This tells the task runner that after this file is executed, *sql_file* should
be executed.
:param SqlFile sql_file: A :class:`SqlFile` instance which waits on this file
:return: An updated version of the current :class:`SqlFile` with new children
:rtype: SqlFile
"""
dependant_list: List["SqlFile"] = list(self.dependants)
new_dependants = tuple(self.merge_and_update(dependant_list, sql_file))
return attr.evolve(self, dependants=new_dependants)
def with_children(self, children: List["SqlFile"]) -> "SqlFile":
"""Indicate that the current sql file has multiple downstream child dependants.
This tells the task runner that after this file is executed, *dependencies*
should all be executed.
:return: An updated version of the current :class:`SqlFile` with new dependants
:rtype: SqlFile
"""
dependant_list: List["SqlFile"] = list(self.dependants)
new_dependants = tuple(self.merge_from_list(dependant_list, children))
return attr.evolve(self, dependants=new_dependants)
@classmethod
def from_tuple(
cls,
pipeline: Tuple[
Union[str, Path],
Union[str, List[Union[str, "SqlFile"]]],
List[Union[str, "SqlFile"]],
],
) -> Tuple["SqlFile", List["SqlFile"], Optional[List["SqlFile"]]]:
"""Creates a :class:`SqlFile` instance from a tuple of file paths.
:return: A new :class:`SqlFile` and its corresponding triggers and trigger deps
:rtype: Tuple[`SqlFile`, List[`SqlFile`], Optional[List[`SqlFile`]]]
"""
path, triggers, deps = pipeline
trigger_list: List[Union[str, "SqlFile"]] = []
if triggers and not isinstance(triggers, (list, tuple)):
trigger_list = [triggers]
elif triggers:
trigger_list = list(triggers)
return cls.create(path=path, triggers=trigger_list, dependencies=deps)
@classmethod
def build_trigger(
cls,
parent: "SqlFile",
trigger: Union["SqlFile", str],
deps: Optional[List["SqlFile"]],
) -> Tuple["SqlFile", "SqlFile", Optional[List["SqlFile"]]]:
if isinstance(trigger, cls):
trigger_instance: "SqlFile" = trigger
elif isinstance(trigger, str):
if not trigger.endswith(".sql"):
trigger = f"{trigger}.sql"
trigger_instance: "SqlFile" = cls(path=trigger).load_sql() # type: ignore
if deps:
dep_list = [dep.with_child(trigger_instance) for dep in deps]
trigger_instance = trigger_instance.with_dependencies(deps)
return (
parent.with_trigger(trigger_instance.with_dependencies(deps)),
trigger_instance,
dep_list,
)
return (parent.with_trigger(trigger_instance), trigger_instance, [])
@classmethod
def build_dependencies(
cls, dependencies: List[Union["SqlFile", str]], parent: Optional["SqlFile"] = None
) -> Tuple[Optional["SqlFile"], List["SqlFile"]]:
dep_instances: List["SqlFile"] = []
if not dependencies:
return parent, []
for dep in dependencies:
if not dep:
continue
if isinstance(dep, str):
if not dep.endswith(".sql"):
dep = f"{dep}.sql"
dep_instance: "SqlFile" = cls(path=dep).load_sql() # type: ignore
elif isinstance(dep, cls):
dep_instance = dep
if dep not in dep_instances:
dep_instances.append(dep_instance)
if parent:
return parent.with_dependencies(dep_instances), dep_instances
return parent, dep_instances
@classmethod
def build_triggers(
cls,
parent: "SqlFile",
triggers: List[Union[str, "SqlFile"]],
dependencies: List[Union[str, "SqlFile"]],
) -> Tuple["SqlFile", List["SqlFile"], Optional[List["SqlFile"]]]:
deps: Optional[List["SqlFile"]] = None
_, deps = cls.build_dependencies(dependencies)
trigger_instances: List["SqlFile"] = []
for trigger in triggers:
if not trigger:
continue
parent, trigger, deps = cls.build_trigger(parent, trigger, deps)
trigger_instances.append(trigger.depends_on(parent))
return parent, trigger_instances, deps
@classmethod
def create(
cls,
path: Union[str, Path],
triggers: List[Union[str, "SqlFile"]] = None,
dependencies: List[Union[str, "SqlFile"]] = None,
) -> Tuple["SqlFile", List["SqlFile"], Optional[List["SqlFile"]]]:
"""Given a path and a list of triggered sql files, produces a SqlFile + triggers
:param Union[str, Path] path: Path to the parent file
:param List[Union[str, SqlFile]] triggers: A list of sql triggered by the parent
:param List[Union[str, SqlFile]] dependencies: A list of trigger dependency files
:return: A 2-tuple representing the parent sqlfile and its dependant triggers
:rtype: Tuple[SqlFile, List[SqlFile], Optional[List[SqlFile]]]
"""
if triggers is None:
triggers = []
if dependencies is None:
dependencies = []
if not path:
raise TypeError(f"Expected an value for *path*")
if not isinstance(path, Path):
path_sqlfile = cls(path=Path(path)).load_sql()
else:
path_sqlfile = cls(path=path).load_sql()
return cls.build_triggers(path_sqlfile, triggers, dependencies)
def get_upsert_mapping(pipeline_list: List[Pipeline], allow_upsert: bool = False):
"""Given a list of **pipeline** instances, create dependency graphs and return them.
:param pipeline_list: A list of `namedtuple` instances with (file, trigger, deps)
:type pipeline_list: List[`namedtuple`]
:param allow_upsert: Whether to include `deps` in the upsert tree, defaults to False
:param allow_upsert: bool, optional
:return: A mapping of names to `SqlFile` instances, list of roots, list of triggers
:rtype: Tuple[Dict[str, SqlFile], List[SqlFile], List[SqlFile]]
"""
UPSERT_PATH = {}
UPSERT_SEQUENCE = []
TRIGGERS = []
for pipeline in pipeline_list:
insert, triggers, dependencies = SqlFile.from_tuple(pipeline)
if not allow_upsert:
triggers = []
for trigger in triggers:
path_value = UPSERT_PATH.get(trigger.name, {})
existing_file = path_value.get("sql_file", None)
if not existing_file:
UPSERT_PATH[trigger.name] = {"sql_file": trigger}
else:
UPSERT_PATH[trigger.name]["sql_file"] = trigger.merge(existing_file)
TRIGGERS.append(UPSERT_PATH[trigger.name]["sql_file"])
UPSERT_PATH[insert.name] = {"sql_file": insert}
UPSERT_SEQUENCE.append(insert)
return UPSERT_PATH, UPSERT_SEQUENCE, TRIGGERS
def pairwise(seq):
a, b = tee(seq)
next(b, None)
return zip(a, b)
def annotated_last(seq):
"""Returns an iterable of pairs of input item and a boolean that show if
the current item is the last item in the sequence."""
MISSING = object()
for current_item, next_item in pairwise(chain(seq, [MISSING])):
yield current_item, next_item is MISSING
| <filename>src/airflow_sync/utils.py
# -*- coding: utf-8 -*-
from collections import OrderedDict, namedtuple
from itertools import chain, tee
from pathlib import Path
from typing import FrozenSet, List, NamedTuple, Optional, Set, Tuple, TypeVar, Union
import attr
from airflow.models import Variable
TRIGGER_TYPES = ("upsert", "delete", "insert", "update")
class Pipeline(NamedTuple):
path: str = None
trigger_path: str = None
dependencies: List[str] = None
def get_sql_dir():
sql_dir = Path(Variable.get("sql_dir"))
if not sql_dir.exists():
PKG_PARENT = Path(__file__).absolute().parent.parent.parent.parent
sql_dir = PKG_PARENT / "airflow-core/sql"
return sql_dir
def convert_path(path: Optional[Path]) -> Optional[Path]:
if not path:
return None
if not isinstance(path, Path):
path = Path(path)
if not path.is_absolute():
path = SqlFile.DEFAULT_SQL_DIR / path
return path
def dedup(iterable):
return iter(OrderedDict.fromkeys(iterable))
class SqlFile__MetaClass(type):
@property
def DEFAULT_SQL_DIR(cls):
sql_dir = Path(Variable.get("sql_dir"))
if not sql_dir.exists():
PKG_PARENT = Path(__file__).absolute().parent.parent.parent.parent
sql_dir = PKG_PARENT / "airflow-core/sql"
return sql_dir / "salesforce"
@attr.s(frozen=True)
class SqlFile(metaclass=SqlFile__MetaClass):
path = attr.ib(type=Path, converter=convert_path)
sql = attr.ib(type=str, default=None)
dependencies = attr.ib(factory=tuple, hash=False, type=Tuple["SqlFile"])
dependants = attr.ib(factory=tuple, hash=False, type=Tuple["SqlFile"])
triggers = attr.ib(factory=tuple, hash=False, type=Tuple["SqlFile"])
@property
def name(self) -> str:
if self.path.parent.name == "salesforce":
return self.path.stem
return f"{self.path.parent.name}/{self.path.stem}"
@property
def is_trigger(self):
"""Indicates whether the current sql file represents a trigger
Triggers are the direct result of an upstream database execution, e.g. every time
an insert is performed, the given trigger is then performed. In this context, a
trigger simply represents any sql that should be invoked as a result of another
sql file running.
:return: Whether the current file is a trigger
:rtype: bool
"""
return any(self.name.startswith(trigger) for trigger in TRIGGER_TYPES)
def merge(self, other: "SqlFile") -> "SqlFile":
"""Merges the given dependency tree with another metadata set for the same file
This is typically used for updating the dependency and trigger information of a
given sql file instance.
:return: A new instance of the sql file with the given instance merged in
:rtype: SqlFile
"""
new_dependencies = tuple(self.dependencies) + other.dependants
new_triggers = tuple(self.triggers) + other.triggers
new_dependants = tuple(self.dependants) + other.dependants
new_dependants = tuple(list(dedup(new_dependants)))
new_dependencies = tuple(list(dedup(new_dependencies)))
new_triggers = tuple(list(dedup(new_triggers)))
return attr.evolve(
self,
dependencies=new_dependencies,
dependants=new_dependants,
triggers=new_triggers,
)
def load_sql(self):
return attr.evolve(self, sql=self.path.read_text())
@classmethod
def merge_and_update(
cls, original_list: List["SqlFile"], new_file: "SqlFile"
) -> List["SqlFile"]:
if new_file in original_list:
idx = original_list.index(new_file)
new_file = new_file.merge(original_list.pop(idx))
original_list.insert(idx, new_file)
else:
original_list.append(new_file)
return original_list
@classmethod
def merge_from_list(
cls, original_list: List["SqlFile"], new_list: List["SqlFile"]
) -> List["SqlFile"]:
for sql_file in new_list:
original_list = cls.merge_and_update(original_list, sql_file)
return original_list
def depends_on(self, sql_file: "SqlFile") -> "SqlFile":
"""Indicate that the current sql file has an upstream dependency on *sql_file*
This tells the task runner that before this file can be executed, *sql_file* must
be executed succesfully.
:param SqlFile sql_file: A :class:`SqlFile` instance which must run first
:return: An updated version of the current :class:`SqlFile` with new dependencies
:rtype: SqlFile
"""
dep_list: List["SqlFile"] = list(self.dependencies)
new_deps = tuple(self.merge_and_update(dep_list, sql_file))
return attr.evolve(self, dependencies=new_deps)
def with_dependencies(self, dependencies: List["SqlFile"]) -> "SqlFile":
"""Indicate that the current sql file has multiple upstream dependencies.
This tells the task runner that before this file can be executed, *dependencies*
must all be executed successfully.
:return: An updated version of the current :class:`SqlFile` with new dependencies
:rtype: SqlFile
"""
dep_list: List["SqlFile"] = list(self.dependencies)
new_dependencies = tuple(self.merge_from_list(dep_list, dependencies))
return attr.evolve(self, dependencies=new_dependencies)
def with_trigger(self, sql_file: "SqlFile") -> "SqlFile":
"""Indicate that the current sql file triggers *sql_file* to run
This tells the task runner that before after this file is executed, *sql_file*
should be executed.
:param SqlFile sql_file: A :class:`SqlFile` instance which must run first
:return: An updated version of the current :class:`SqlFile` with new triggers
:rtype: SqlFile
"""
trigger_list: List["SqlFile"] = list(self.triggers)
new_triggers = tuple(self.merge_and_update(trigger_list, sql_file))
return attr.evolve(self, triggers=new_triggers)
def with_triggers(self, triggers: List["SqlFile"]) -> "SqlFile":
"""Indicate that the current sql file has multiple downstream triggers.
This tells the task runner that after this file is executed, *triggers*
must all be executed.
:return: An updated version of the current :class:`SqlFile` with new triggers
:rtype: SqlFile
"""
trigger_list: List["SqlFile"] = list(self.triggers)
new_triggers = tuple(self.merge_from_list(trigger_list, triggers))
return attr.evolve(self, triggers=new_triggers)
def with_child(self, sql_file: "SqlFile") -> "SqlFile":
"""Indicate that the current sql file has a downstream dependant of *sql_file*
This tells the task runner that after this file is executed, *sql_file* should
be executed.
:param SqlFile sql_file: A :class:`SqlFile` instance which waits on this file
:return: An updated version of the current :class:`SqlFile` with new children
:rtype: SqlFile
"""
dependant_list: List["SqlFile"] = list(self.dependants)
new_dependants = tuple(self.merge_and_update(dependant_list, sql_file))
return attr.evolve(self, dependants=new_dependants)
def with_children(self, children: List["SqlFile"]) -> "SqlFile":
"""Indicate that the current sql file has multiple downstream child dependants.
This tells the task runner that after this file is executed, *dependencies*
should all be executed.
:return: An updated version of the current :class:`SqlFile` with new dependants
:rtype: SqlFile
"""
dependant_list: List["SqlFile"] = list(self.dependants)
new_dependants = tuple(self.merge_from_list(dependant_list, children))
return attr.evolve(self, dependants=new_dependants)
@classmethod
def from_tuple(
cls,
pipeline: Tuple[
Union[str, Path],
Union[str, List[Union[str, "SqlFile"]]],
List[Union[str, "SqlFile"]],
],
) -> Tuple["SqlFile", List["SqlFile"], Optional[List["SqlFile"]]]:
"""Creates a :class:`SqlFile` instance from a tuple of file paths.
:return: A new :class:`SqlFile` and its corresponding triggers and trigger deps
:rtype: Tuple[`SqlFile`, List[`SqlFile`], Optional[List[`SqlFile`]]]
"""
path, triggers, deps = pipeline
trigger_list: List[Union[str, "SqlFile"]] = []
if triggers and not isinstance(triggers, (list, tuple)):
trigger_list = [triggers]
elif triggers:
trigger_list = list(triggers)
return cls.create(path=path, triggers=trigger_list, dependencies=deps)
@classmethod
def build_trigger(
cls,
parent: "SqlFile",
trigger: Union["SqlFile", str],
deps: Optional[List["SqlFile"]],
) -> Tuple["SqlFile", "SqlFile", Optional[List["SqlFile"]]]:
if isinstance(trigger, cls):
trigger_instance: "SqlFile" = trigger
elif isinstance(trigger, str):
if not trigger.endswith(".sql"):
trigger = f"{trigger}.sql"
trigger_instance: "SqlFile" = cls(path=trigger).load_sql() # type: ignore
if deps:
dep_list = [dep.with_child(trigger_instance) for dep in deps]
trigger_instance = trigger_instance.with_dependencies(deps)
return (
parent.with_trigger(trigger_instance.with_dependencies(deps)),
trigger_instance,
dep_list,
)
return (parent.with_trigger(trigger_instance), trigger_instance, [])
@classmethod
def build_dependencies(
cls, dependencies: List[Union["SqlFile", str]], parent: Optional["SqlFile"] = None
) -> Tuple[Optional["SqlFile"], List["SqlFile"]]:
dep_instances: List["SqlFile"] = []
if not dependencies:
return parent, []
for dep in dependencies:
if not dep:
continue
if isinstance(dep, str):
if not dep.endswith(".sql"):
dep = f"{dep}.sql"
dep_instance: "SqlFile" = cls(path=dep).load_sql() # type: ignore
elif isinstance(dep, cls):
dep_instance = dep
if dep not in dep_instances:
dep_instances.append(dep_instance)
if parent:
return parent.with_dependencies(dep_instances), dep_instances
return parent, dep_instances
@classmethod
def build_triggers(
cls,
parent: "SqlFile",
triggers: List[Union[str, "SqlFile"]],
dependencies: List[Union[str, "SqlFile"]],
) -> Tuple["SqlFile", List["SqlFile"], Optional[List["SqlFile"]]]:
deps: Optional[List["SqlFile"]] = None
_, deps = cls.build_dependencies(dependencies)
trigger_instances: List["SqlFile"] = []
for trigger in triggers:
if not trigger:
continue
parent, trigger, deps = cls.build_trigger(parent, trigger, deps)
trigger_instances.append(trigger.depends_on(parent))
return parent, trigger_instances, deps
@classmethod
def create(
cls,
path: Union[str, Path],
triggers: List[Union[str, "SqlFile"]] = None,
dependencies: List[Union[str, "SqlFile"]] = None,
) -> Tuple["SqlFile", List["SqlFile"], Optional[List["SqlFile"]]]:
"""Given a path and a list of triggered sql files, produces a SqlFile + triggers
:param Union[str, Path] path: Path to the parent file
:param List[Union[str, SqlFile]] triggers: A list of sql triggered by the parent
:param List[Union[str, SqlFile]] dependencies: A list of trigger dependency files
:return: A 2-tuple representing the parent sqlfile and its dependant triggers
:rtype: Tuple[SqlFile, List[SqlFile], Optional[List[SqlFile]]]
"""
if triggers is None:
triggers = []
if dependencies is None:
dependencies = []
if not path:
raise TypeError(f"Expected an value for *path*")
if not isinstance(path, Path):
path_sqlfile = cls(path=Path(path)).load_sql()
else:
path_sqlfile = cls(path=path).load_sql()
return cls.build_triggers(path_sqlfile, triggers, dependencies)
def get_upsert_mapping(pipeline_list: List[Pipeline], allow_upsert: bool = False):
"""Given a list of **pipeline** instances, create dependency graphs and return them.
:param pipeline_list: A list of `namedtuple` instances with (file, trigger, deps)
:type pipeline_list: List[`namedtuple`]
:param allow_upsert: Whether to include `deps` in the upsert tree, defaults to False
:param allow_upsert: bool, optional
:return: A mapping of names to `SqlFile` instances, list of roots, list of triggers
:rtype: Tuple[Dict[str, SqlFile], List[SqlFile], List[SqlFile]]
"""
UPSERT_PATH = {}
UPSERT_SEQUENCE = []
TRIGGERS = []
for pipeline in pipeline_list:
insert, triggers, dependencies = SqlFile.from_tuple(pipeline)
if not allow_upsert:
triggers = []
for trigger in triggers:
path_value = UPSERT_PATH.get(trigger.name, {})
existing_file = path_value.get("sql_file", None)
if not existing_file:
UPSERT_PATH[trigger.name] = {"sql_file": trigger}
else:
UPSERT_PATH[trigger.name]["sql_file"] = trigger.merge(existing_file)
TRIGGERS.append(UPSERT_PATH[trigger.name]["sql_file"])
UPSERT_PATH[insert.name] = {"sql_file": insert}
UPSERT_SEQUENCE.append(insert)
return UPSERT_PATH, UPSERT_SEQUENCE, TRIGGERS
def pairwise(seq):
a, b = tee(seq)
next(b, None)
return zip(a, b)
def annotated_last(seq):
"""Returns an iterable of pairs of input item and a boolean that show if
the current item is the last item in the sequence."""
MISSING = object()
for current_item, next_item in pairwise(chain(seq, [MISSING])):
yield current_item, next_item is MISSING
| en | 0.78871 | # -*- coding: utf-8 -*- Indicates whether the current sql file represents a trigger Triggers are the direct result of an upstream database execution, e.g. every time an insert is performed, the given trigger is then performed. In this context, a trigger simply represents any sql that should be invoked as a result of another sql file running. :return: Whether the current file is a trigger :rtype: bool Merges the given dependency tree with another metadata set for the same file This is typically used for updating the dependency and trigger information of a given sql file instance. :return: A new instance of the sql file with the given instance merged in :rtype: SqlFile Indicate that the current sql file has an upstream dependency on *sql_file* This tells the task runner that before this file can be executed, *sql_file* must be executed succesfully. :param SqlFile sql_file: A :class:`SqlFile` instance which must run first :return: An updated version of the current :class:`SqlFile` with new dependencies :rtype: SqlFile Indicate that the current sql file has multiple upstream dependencies. This tells the task runner that before this file can be executed, *dependencies* must all be executed successfully. :return: An updated version of the current :class:`SqlFile` with new dependencies :rtype: SqlFile Indicate that the current sql file triggers *sql_file* to run This tells the task runner that before after this file is executed, *sql_file* should be executed. :param SqlFile sql_file: A :class:`SqlFile` instance which must run first :return: An updated version of the current :class:`SqlFile` with new triggers :rtype: SqlFile Indicate that the current sql file has multiple downstream triggers. This tells the task runner that after this file is executed, *triggers* must all be executed. :return: An updated version of the current :class:`SqlFile` with new triggers :rtype: SqlFile Indicate that the current sql file has a downstream dependant of *sql_file* This tells the task runner that after this file is executed, *sql_file* should be executed. :param SqlFile sql_file: A :class:`SqlFile` instance which waits on this file :return: An updated version of the current :class:`SqlFile` with new children :rtype: SqlFile Indicate that the current sql file has multiple downstream child dependants. This tells the task runner that after this file is executed, *dependencies* should all be executed. :return: An updated version of the current :class:`SqlFile` with new dependants :rtype: SqlFile Creates a :class:`SqlFile` instance from a tuple of file paths. :return: A new :class:`SqlFile` and its corresponding triggers and trigger deps :rtype: Tuple[`SqlFile`, List[`SqlFile`], Optional[List[`SqlFile`]]] # type: ignore # type: ignore Given a path and a list of triggered sql files, produces a SqlFile + triggers :param Union[str, Path] path: Path to the parent file :param List[Union[str, SqlFile]] triggers: A list of sql triggered by the parent :param List[Union[str, SqlFile]] dependencies: A list of trigger dependency files :return: A 2-tuple representing the parent sqlfile and its dependant triggers :rtype: Tuple[SqlFile, List[SqlFile], Optional[List[SqlFile]]] Given a list of **pipeline** instances, create dependency graphs and return them. :param pipeline_list: A list of `namedtuple` instances with (file, trigger, deps) :type pipeline_list: List[`namedtuple`] :param allow_upsert: Whether to include `deps` in the upsert tree, defaults to False :param allow_upsert: bool, optional :return: A mapping of names to `SqlFile` instances, list of roots, list of triggers :rtype: Tuple[Dict[str, SqlFile], List[SqlFile], List[SqlFile]] Returns an iterable of pairs of input item and a boolean that show if the current item is the last item in the sequence. | 2.165065 | 2 |
home/migrations/0001_initial.py | pacbac/georadio | 0 | 6617180 | # Generated by Django 2.1 on 2018-08-09 01:31
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Playlist',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('lat', models.FloatField(blank=True, null=True)),
('lng', models.FloatField(blank=True, null=True)),
('name', models.CharField(default='', max_length=50)),
],
),
migrations.CreateModel(
name='Song',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50)),
('url', models.CharField(default='#', max_length=200)),
('albumArt', models.CharField(default='#', max_length=200)),
('artist', models.CharField(default='Unknown Artist', max_length=50)),
],
),
migrations.AddField(
model_name='playlist',
name='songs',
field=models.ManyToManyField(to='home.Song'),
),
]
| # Generated by Django 2.1 on 2018-08-09 01:31
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Playlist',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('lat', models.FloatField(blank=True, null=True)),
('lng', models.FloatField(blank=True, null=True)),
('name', models.CharField(default='', max_length=50)),
],
),
migrations.CreateModel(
name='Song',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50)),
('url', models.CharField(default='#', max_length=200)),
('albumArt', models.CharField(default='#', max_length=200)),
('artist', models.CharField(default='Unknown Artist', max_length=50)),
],
),
migrations.AddField(
model_name='playlist',
name='songs',
field=models.ManyToManyField(to='home.Song'),
),
]
| en | 0.750499 | # Generated by Django 2.1 on 2018-08-09 01:31 | 1.975718 | 2 |
hihunter/common/json2tree.py | a232319779/hihunter | 1 | 6617181 | # -*- coding: utf-8 -*-
# @Time : 2022/02/10 16:39:46
# @Author : ddvv
# @Site : https://ddvvmmzz.github.io
# @File : json2tree.py
# @Software : Visual Studio Code
# @WeChat : NextB
from io import StringIO
_branch_extend = '│ '
_branch_mid = '├─ '
_branch_last = '└─ '
_spacing = ' '
lang_map = {
'process': '启动',
'behavior': '操作',
'drop': '释放',
'net': '连接'
}
def _getHierarchy(graph, name='', file=None, _prefix='', _last=True):
""" Recursively parse json data to print data types """
if isinstance(graph, dict):
op_type = graph.get('type', '')
if op_type:
name = lang_map.get(op_type, op_type) + ' ' + graph.get('name')
print(_prefix, _branch_last if _last else _branch_mid, \
name, sep="", file=file)
_prefix += _spacing if _last else _branch_extend
length = len(graph)
for i, key in enumerate(graph.keys()):
_last = i == (length - 1)
_getHierarchy(graph[key], '"' + key + '"', file, _prefix, _last)
elif isinstance(graph, list):
for each_json in graph:
_getHierarchy(each_json, '', file, _prefix, _last=True)
else:
pass
def graph2tree(graph):
messageFile = StringIO()
_getHierarchy(graph, file=messageFile)
message = messageFile.getvalue()
messageFile.close()
return message
| # -*- coding: utf-8 -*-
# @Time : 2022/02/10 16:39:46
# @Author : ddvv
# @Site : https://ddvvmmzz.github.io
# @File : json2tree.py
# @Software : Visual Studio Code
# @WeChat : NextB
from io import StringIO
_branch_extend = '│ '
_branch_mid = '├─ '
_branch_last = '└─ '
_spacing = ' '
lang_map = {
'process': '启动',
'behavior': '操作',
'drop': '释放',
'net': '连接'
}
def _getHierarchy(graph, name='', file=None, _prefix='', _last=True):
""" Recursively parse json data to print data types """
if isinstance(graph, dict):
op_type = graph.get('type', '')
if op_type:
name = lang_map.get(op_type, op_type) + ' ' + graph.get('name')
print(_prefix, _branch_last if _last else _branch_mid, \
name, sep="", file=file)
_prefix += _spacing if _last else _branch_extend
length = len(graph)
for i, key in enumerate(graph.keys()):
_last = i == (length - 1)
_getHierarchy(graph[key], '"' + key + '"', file, _prefix, _last)
elif isinstance(graph, list):
for each_json in graph:
_getHierarchy(each_json, '', file, _prefix, _last=True)
else:
pass
def graph2tree(graph):
messageFile = StringIO()
_getHierarchy(graph, file=messageFile)
message = messageFile.getvalue()
messageFile.close()
return message
| en | 0.317863 | # -*- coding: utf-8 -*- # @Time : 2022/02/10 16:39:46 # @Author : ddvv # @Site : https://ddvvmmzz.github.io # @File : json2tree.py # @Software : Visual Studio Code # @WeChat : NextB Recursively parse json data to print data types | 2.710356 | 3 |
tests/query_test/test_parquet_stats.py | michaelhkw/impala | 0 | 6617182 | <gh_stars>0
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from tests.common.test_vector import ImpalaTestDimension
from tests.common.impala_test_suite import ImpalaTestSuite
MT_DOP_VALUES = [0, 1, 2, 8]
class TestParquetStats(ImpalaTestSuite):
"""
This suite tests runtime optimizations based on Parquet statistics.
"""
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestParquetStats, cls).add_test_dimensions()
cls.ImpalaTestMatrix.add_dimension(ImpalaTestDimension('mt_dop', *MT_DOP_VALUES))
cls.ImpalaTestMatrix.add_constraint(
lambda v: v.get_value('table_format').file_format == 'parquet')
def test_parquet_stats(self, vector, unique_database):
# The test makes assumptions about the number of row groups that are processed and
# skipped inside a fragment, so we ensure that the tests run in a single fragment.
vector.get_value('exec_option')['num_nodes'] = 1
self.run_test_case('QueryTest/parquet_stats', vector, use_db=unique_database)
| # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from tests.common.test_vector import ImpalaTestDimension
from tests.common.impala_test_suite import ImpalaTestSuite
MT_DOP_VALUES = [0, 1, 2, 8]
class TestParquetStats(ImpalaTestSuite):
"""
This suite tests runtime optimizations based on Parquet statistics.
"""
@classmethod
def get_workload(cls):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestParquetStats, cls).add_test_dimensions()
cls.ImpalaTestMatrix.add_dimension(ImpalaTestDimension('mt_dop', *MT_DOP_VALUES))
cls.ImpalaTestMatrix.add_constraint(
lambda v: v.get_value('table_format').file_format == 'parquet')
def test_parquet_stats(self, vector, unique_database):
# The test makes assumptions about the number of row groups that are processed and
# skipped inside a fragment, so we ensure that the tests run in a single fragment.
vector.get_value('exec_option')['num_nodes'] = 1
self.run_test_case('QueryTest/parquet_stats', vector, use_db=unique_database) | en | 0.879178 | # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. This suite tests runtime optimizations based on Parquet statistics. # The test makes assumptions about the number of row groups that are processed and # skipped inside a fragment, so we ensure that the tests run in a single fragment. | 1.934466 | 2 |
bisectriz.py | juanitopereza/Granulacion | 0 | 6617183 | <reponame>juanitopereza/Granulacion<filename>bisectriz.py
import numpy as np
import matplotlib.pyplot as plt
#%%
def find_nearest(array,value):
idx = np.argsort(np.abs(array-value))[0:2]
return idx
#%%
NIR = np.loadtxt("spnir.dat")
VIS = np.loadtxt("spvis.dat")
NIR[:,0] = (1.0/NIR[:,0] )*1e8
VIS[:,0] = (1.0/VIS[:,0] )*1e8
#%%
horiz = np.ones(len(NIR[:,0]))
# limites de la region de la linea en Angstroms
lambda_min = 5475.1
lambda_max = 5475.65
fig = plt.figure(figsize = (10,10))
plt.plot(NIR[:,0], NIR[:,1], c="r")
plt.plot(NIR[:,0], horiz)
plt.xlim(lambda_min,lambda_max)
plt.ticklabel_format(useOffset=False)
plt.title("Espectro corregido")
plt.xlabel(u"$Número\ de\ onda\ cm^{-1}$")
plt.show()
#%%
# porcentaje de desecho antes del continuo
des = 10.0
#NUmero de puntos para definir la bisectriz
N_partes = 40
region = NIR[(NIR[:,0] > lambda_min) & (NIR[:,0] < lambda_max)]
minimo = min(region[:,1])
L_min = region[:,0][region[:,1] == min(region[:,1])]
tope = 1.0 - (des/100.0*(1.00 - minimo))
region_izq = region[region[:,0] < L_min]
region_der = region[region[:,0] > L_min]
bisectriz = np.zeros((N_partes,2))
bisectriz[0,1] = minimo
bisectriz[0,0] = L_min
pasos = np.linspace(minimo,tope,N_partes)
bisectriz[:,1] = pasos
for i in range(1,N_partes):
izquierda = region_izq[:,0:2][find_nearest(region_izq[:,1],pasos[i])]
x_izq = izquierda[0,0]+(pasos[i]-izquierda[0,1])*(izquierda[1,0]-izquierda[0,0])/(izquierda[1,1]-izquierda[0,1])
derecha = region_der[:,0:2][find_nearest(region_der[:,1],pasos[i])]
x_der = derecha[0,0]+(pasos[i]-derecha[0,1])*(derecha[1,0]-derecha[0,0])/(derecha[1,1]-derecha[0,1])
bisectriz[i,0] = (x_izq + x_der)/2.0
n_points_extrapol = 5
m,b = np.polyfit(bisectriz[1:n_points_extrapol+1,0],bisectriz[1:n_points_extrapol+1,1],1)
core = np.array([0.0,0.0])
core[0] = (minimo-b)/m
core[1] = minimo
core
x = np.linspace(lambda_min,lambda_max)
#%%
line = np.ones(len(region[:,0]))*tope
fig = plt.figure(figsize = (15,15))
plt.plot(region[:,0], line, c="r")
plt.scatter(region[:,0], region[:,1], s=5)
plt.scatter(L_min,minimo)
plt.scatter(bisectriz[:,0], bisectriz[:,1], c="r")
plt.scatter(core[0],core[1])
plt.plot(x,m*x+b)
plt.ticklabel_format(useOffset=False)
plt.xlim(lambda_min,lambda_max)
#plt.xlim(bisectriz[0,0]-0.01,bisectriz[0,0]+0.01)
plt.ylim(minimo-0.1,1.1)
plt.xlabel(u"$Longitud\ de\ onda\ [\AA]$")
#plt.savefig("core_5puntos.pdf")
plt.show()
#%%
final = core
final = np.vstack((final,bisectriz))
np.savetxt("./lineas/bisec_5475.txt",final)
#%%
| import numpy as np
import matplotlib.pyplot as plt
#%%
def find_nearest(array,value):
idx = np.argsort(np.abs(array-value))[0:2]
return idx
#%%
NIR = np.loadtxt("spnir.dat")
VIS = np.loadtxt("spvis.dat")
NIR[:,0] = (1.0/NIR[:,0] )*1e8
VIS[:,0] = (1.0/VIS[:,0] )*1e8
#%%
horiz = np.ones(len(NIR[:,0]))
# limites de la region de la linea en Angstroms
lambda_min = 5475.1
lambda_max = 5475.65
fig = plt.figure(figsize = (10,10))
plt.plot(NIR[:,0], NIR[:,1], c="r")
plt.plot(NIR[:,0], horiz)
plt.xlim(lambda_min,lambda_max)
plt.ticklabel_format(useOffset=False)
plt.title("Espectro corregido")
plt.xlabel(u"$Número\ de\ onda\ cm^{-1}$")
plt.show()
#%%
# porcentaje de desecho antes del continuo
des = 10.0
#NUmero de puntos para definir la bisectriz
N_partes = 40
region = NIR[(NIR[:,0] > lambda_min) & (NIR[:,0] < lambda_max)]
minimo = min(region[:,1])
L_min = region[:,0][region[:,1] == min(region[:,1])]
tope = 1.0 - (des/100.0*(1.00 - minimo))
region_izq = region[region[:,0] < L_min]
region_der = region[region[:,0] > L_min]
bisectriz = np.zeros((N_partes,2))
bisectriz[0,1] = minimo
bisectriz[0,0] = L_min
pasos = np.linspace(minimo,tope,N_partes)
bisectriz[:,1] = pasos
for i in range(1,N_partes):
izquierda = region_izq[:,0:2][find_nearest(region_izq[:,1],pasos[i])]
x_izq = izquierda[0,0]+(pasos[i]-izquierda[0,1])*(izquierda[1,0]-izquierda[0,0])/(izquierda[1,1]-izquierda[0,1])
derecha = region_der[:,0:2][find_nearest(region_der[:,1],pasos[i])]
x_der = derecha[0,0]+(pasos[i]-derecha[0,1])*(derecha[1,0]-derecha[0,0])/(derecha[1,1]-derecha[0,1])
bisectriz[i,0] = (x_izq + x_der)/2.0
n_points_extrapol = 5
m,b = np.polyfit(bisectriz[1:n_points_extrapol+1,0],bisectriz[1:n_points_extrapol+1,1],1)
core = np.array([0.0,0.0])
core[0] = (minimo-b)/m
core[1] = minimo
core
x = np.linspace(lambda_min,lambda_max)
#%%
line = np.ones(len(region[:,0]))*tope
fig = plt.figure(figsize = (15,15))
plt.plot(region[:,0], line, c="r")
plt.scatter(region[:,0], region[:,1], s=5)
plt.scatter(L_min,minimo)
plt.scatter(bisectriz[:,0], bisectriz[:,1], c="r")
plt.scatter(core[0],core[1])
plt.plot(x,m*x+b)
plt.ticklabel_format(useOffset=False)
plt.xlim(lambda_min,lambda_max)
#plt.xlim(bisectriz[0,0]-0.01,bisectriz[0,0]+0.01)
plt.ylim(minimo-0.1,1.1)
plt.xlabel(u"$Longitud\ de\ onda\ [\AA]$")
#plt.savefig("core_5puntos.pdf")
plt.show()
#%%
final = core
final = np.vstack((final,bisectriz))
np.savetxt("./lineas/bisec_5475.txt",final)
#%% | es | 0.643945 | #%% #%% #%% # limites de la region de la linea en Angstroms #%% # porcentaje de desecho antes del continuo #NUmero de puntos para definir la bisectriz #%% #plt.xlim(bisectriz[0,0]-0.01,bisectriz[0,0]+0.01) #plt.savefig("core_5puntos.pdf") #%% #%% | 2.877176 | 3 |
script_event_generator.py | lumbermixalot/ScriptEvent_Transpiler | 3 | 6617184 | <filename>script_event_generator.py
# -*- coding: utf-8 -*-
"""
Copyright (c) 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
class ScriptEventGenerator():
"""This class generates a ScriptEvent lua file.
"""
def __init__(self, ebus_name, address_type):
self.ebusName = ebus_name
self.addressType = address_type
self.localBusName = ebus_name[0].lower() + ebus_name[1:]
def Generate(self, output_filename, functions_list):
with open(output_filename, 'wt') as file_obj:
self._WriteInfoHeader(file_obj)
file_obj.write("local {0} = ScriptEvent(\"{1}\"".format(self.localBusName, self.ebusName))
if self.addressType == "String":
file_obj.write(", typeid(\"\")")
elif self.addressType == "EntityId":
file_obj.write(", typeid(EntityId())")
elif self.addressType == "Crc32":
file_obj.write(", typeid(Crc32())")
file_obj.write(")\n\n")
for comment, funcName, params, retType in functions_list:
if len(comment) > 0:
file_obj.write("{}\n".format(comment))
self._WriteMethodAndParameters(file_obj, funcName, params, retType)
file_obj.write("{0}:Register()".format(self.localBusName))
def _WriteInfoHeader(self, file_obj):
file_obj.write(
"-- This ScriptEvent file was auto generated with ScriptEvent_Transpiler.\n"
"-- Source Available at https://github.com/lumbermixalot/ScriptEvent_Transpiler.git\n")
def _WriteMethodAndParameters(self, file_obj, funcName, params, retType):
file_obj.write("local method{0} = {1}:AddMethod(\"{0}\"".format(funcName, self.localBusName))
if retType is not None:
file_obj.write(", typeid({})".format(retType))
file_obj.write(")\n")
for name, value in params:
file_obj.write("method{0}:AddParameter(\"{1}\", typeid({2}))\n".format(funcName, name, value))
file_obj.write("\n") | <filename>script_event_generator.py
# -*- coding: utf-8 -*-
"""
Copyright (c) 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
class ScriptEventGenerator():
"""This class generates a ScriptEvent lua file.
"""
def __init__(self, ebus_name, address_type):
self.ebusName = ebus_name
self.addressType = address_type
self.localBusName = ebus_name[0].lower() + ebus_name[1:]
def Generate(self, output_filename, functions_list):
with open(output_filename, 'wt') as file_obj:
self._WriteInfoHeader(file_obj)
file_obj.write("local {0} = ScriptEvent(\"{1}\"".format(self.localBusName, self.ebusName))
if self.addressType == "String":
file_obj.write(", typeid(\"\")")
elif self.addressType == "EntityId":
file_obj.write(", typeid(EntityId())")
elif self.addressType == "Crc32":
file_obj.write(", typeid(Crc32())")
file_obj.write(")\n\n")
for comment, funcName, params, retType in functions_list:
if len(comment) > 0:
file_obj.write("{}\n".format(comment))
self._WriteMethodAndParameters(file_obj, funcName, params, retType)
file_obj.write("{0}:Register()".format(self.localBusName))
def _WriteInfoHeader(self, file_obj):
file_obj.write(
"-- This ScriptEvent file was auto generated with ScriptEvent_Transpiler.\n"
"-- Source Available at https://github.com/lumbermixalot/ScriptEvent_Transpiler.git\n")
def _WriteMethodAndParameters(self, file_obj, funcName, params, retType):
file_obj.write("local method{0} = {1}:AddMethod(\"{0}\"".format(funcName, self.localBusName))
if retType is not None:
file_obj.write(", typeid({})".format(retType))
file_obj.write(")\n")
for name, value in params:
file_obj.write("method{0}:AddParameter(\"{1}\", typeid({2}))\n".format(funcName, name, value))
file_obj.write("\n") | en | 0.767681 | # -*- coding: utf-8 -*- Copyright (c) 2020 <NAME> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. This class generates a ScriptEvent lua file. | 2.052623 | 2 |
ic.py | cs-fullstack-2019-fall/python-review2-cw-rachel-jack | 0 | 6617185 | <filename>ic.py
theArray = ["wash clothes", "wash dishes"]
listTask(theArray)
def deleteTask(array):
userIn = input("which task to delete ")
array.remove(userIn)
| <filename>ic.py
theArray = ["wash clothes", "wash dishes"]
listTask(theArray)
def deleteTask(array):
userIn = input("which task to delete ")
array.remove(userIn)
| none | 1 | 3.078432 | 3 | |
demo/kitchen_sink.py | DN6/gradio | 2 | 6617186 | <reponame>DN6/gradio<gh_stars>1-10
import gradio as gr
import numpy as np
from typing import List
CHOICES = ["foo", "bar", "baz"]
def generate_tone(note=0, octave=1, duration=3):
sr = 48000
a4_freq, tones_from_a4 = 440, 12 * (octave - 4) + (note - 9)
frequency = a4_freq * 2 ** (tones_from_a4 / 12)
duration = int(duration)
audio = np.linspace(0, duration, duration * sr)
audio = (20000 * np.sin(audio * (2 * np.pi * frequency))).astype(np.int16)
return sr, audio
def fn(
*args,
**kwargs,
):
return (
"Sample output text",
{
"positive": 0.287,
"negative": 0.517,
"neutral": 0.197,
},
generate_tone(),
np.ones((300, 300, 3)),
)
iface = gr.Interface(
fn,
inputs=[
gr.inputs.Textbox(label="Textbox"),
gr.inputs.Number(label="Number", default=42),
gr.inputs.Slider(label="Slider"),
gr.inputs.Checkbox(label="Checkbox"),
gr.inputs.CheckboxGroup(label="CheckboxGroup", choices=CHOICES),
gr.inputs.Radio(label="Radio", choices=CHOICES),
gr.inputs.Dropdown(label="Dropdown", choices=CHOICES),
gr.inputs.Image(label="Image"),
# TODO(Uncomment those when supported by the React version)
# gr.inputs.Video(),
# gr.inputs.Audio(),
# gr.inputs.File(),
# gr.inputs.Dataframe(),
],
outputs=[
gr.outputs.Textbox(),
gr.outputs.Label(),
gr.outputs.Audio(type="numpy"),
gr.outputs.Image(type="numpy"),
],
theme="huggingface",
)
iface.launch()
| import gradio as gr
import numpy as np
from typing import List
CHOICES = ["foo", "bar", "baz"]
def generate_tone(note=0, octave=1, duration=3):
sr = 48000
a4_freq, tones_from_a4 = 440, 12 * (octave - 4) + (note - 9)
frequency = a4_freq * 2 ** (tones_from_a4 / 12)
duration = int(duration)
audio = np.linspace(0, duration, duration * sr)
audio = (20000 * np.sin(audio * (2 * np.pi * frequency))).astype(np.int16)
return sr, audio
def fn(
*args,
**kwargs,
):
return (
"Sample output text",
{
"positive": 0.287,
"negative": 0.517,
"neutral": 0.197,
},
generate_tone(),
np.ones((300, 300, 3)),
)
iface = gr.Interface(
fn,
inputs=[
gr.inputs.Textbox(label="Textbox"),
gr.inputs.Number(label="Number", default=42),
gr.inputs.Slider(label="Slider"),
gr.inputs.Checkbox(label="Checkbox"),
gr.inputs.CheckboxGroup(label="CheckboxGroup", choices=CHOICES),
gr.inputs.Radio(label="Radio", choices=CHOICES),
gr.inputs.Dropdown(label="Dropdown", choices=CHOICES),
gr.inputs.Image(label="Image"),
# TODO(Uncomment those when supported by the React version)
# gr.inputs.Video(),
# gr.inputs.Audio(),
# gr.inputs.File(),
# gr.inputs.Dataframe(),
],
outputs=[
gr.outputs.Textbox(),
gr.outputs.Label(),
gr.outputs.Audio(type="numpy"),
gr.outputs.Image(type="numpy"),
],
theme="huggingface",
)
iface.launch() | en | 0.35308 | # TODO(Uncomment those when supported by the React version) # gr.inputs.Video(), # gr.inputs.Audio(), # gr.inputs.File(), # gr.inputs.Dataframe(), | 2.485103 | 2 |
regression.py | HAOYUatHZ/crypto-currency-ma-signal | 1 | 6617187 | <gh_stars>1-10
#!/usr/bin/env python
import settings
import urllib2
import json
import time
import threading
sample_nums = [5, 10, 20, 50, 100, 200] # in ASC order
coin = settings.regr_coin
def main(regr_time, file):
print "--------------------------"
print time.ctime(regr_time)
print "--------------------------"
try:
price_url = "https://marketapi.blockmeta.com/kline/%s/%s_usd/1min?count=%d&format_type=all&end_time=%d" % (settings.exchange, coin, 1, regr_time)
print "getting price from:", price_url
contents = urllib2.urlopen(price_url).read()
data = json.loads(contents)
regr_price = data[0]["close"]
print regr_price
file.write(time.ctime(regr_time) + ": regression_price "+ str(regr_price) + "\n")
file.flush()
kline_url = "https://marketapi.blockmeta.com/kline/%s/%s_usd/1hour?count=%d&format_type=all&end_time=%d" % (settings.exchange, coin, max(sample_nums), regr_time)
print "getting klines from:", kline_url
contents = urllib2.urlopen(kline_url).read()
data = json.loads(contents)
data = data[::-1] # date DESC
mas = {}
# calc MA
for i in range(len(sample_nums)):
sample_num = sample_nums[i]
ma = 0
for j in range(sample_num):
ma += data[j]["close"]
ma /= sample_num
ma_name = "ma%d" % sample_num
mas[ma_name] = ma
# calc EMA
for i in range(len(sample_nums)):
sample_num = sample_nums[i]
samples = data[0:sample_num][::-1] # ASC
if settings.sensitive:
alpha = 2/(sample_num+1)
else:
alpha = 1/sample_num
ema = samples[0]["close"]
for i in xrange(1,sample_num):
ema = alpha * samples[i]["close"] + (1-alpha) * ema
ema_name = "ema%d" % sample_num
mas[ema_name] = ema
# make decision
up_cnt = 0
down_cnt = 0
for ma_name in mas:
if regr_price > mas[ma_name]:
up_cnt += 1
if regr_price < mas[ma_name]:
down_cnt += 1
print "up: %d/%d, down: %d/%d" % (up_cnt, len(mas), down_cnt, len(mas))
if up_cnt > len(mas)*settings.strength_threshold:
print "%s: BUY BUY BUY %s at price: %f" % (time.ctime(regr_time), coin, regr_price)
file.write(time.ctime(regr_time) + ": BUY at "+ str(regr_price) + "\n")
file.flush()
if down_cnt > len(mas)*settings.strength_threshold:
print "%s: SELL SELL SELL %s at price: %f" % (time.ctime(regr_time), coin, regr_price)
file.write(time.ctime(regr_time) + ": SELL at "+ str(regr_price) + "\n")
file.flush()
return True
except Exception as e:
print "Error:", e
return False
if __name__ == "__main__":
print "Regression for", coin
regr_time = settings.regr_start_timestamp
filename = "regression_" + coin + "_" + str(regr_time) + "_strength_" + str(settings.strength_threshold) + "_sensitive_" + str(settings.sensitive) + ".log"
file = open(filename, "w")
file.flush()
now = int(time.time())
while regr_time <= now:
if main(regr_time, file):
regr_time += 60*60 # move forward 1h
file.close() | #!/usr/bin/env python
import settings
import urllib2
import json
import time
import threading
sample_nums = [5, 10, 20, 50, 100, 200] # in ASC order
coin = settings.regr_coin
def main(regr_time, file):
print "--------------------------"
print time.ctime(regr_time)
print "--------------------------"
try:
price_url = "https://marketapi.blockmeta.com/kline/%s/%s_usd/1min?count=%d&format_type=all&end_time=%d" % (settings.exchange, coin, 1, regr_time)
print "getting price from:", price_url
contents = urllib2.urlopen(price_url).read()
data = json.loads(contents)
regr_price = data[0]["close"]
print regr_price
file.write(time.ctime(regr_time) + ": regression_price "+ str(regr_price) + "\n")
file.flush()
kline_url = "https://marketapi.blockmeta.com/kline/%s/%s_usd/1hour?count=%d&format_type=all&end_time=%d" % (settings.exchange, coin, max(sample_nums), regr_time)
print "getting klines from:", kline_url
contents = urllib2.urlopen(kline_url).read()
data = json.loads(contents)
data = data[::-1] # date DESC
mas = {}
# calc MA
for i in range(len(sample_nums)):
sample_num = sample_nums[i]
ma = 0
for j in range(sample_num):
ma += data[j]["close"]
ma /= sample_num
ma_name = "ma%d" % sample_num
mas[ma_name] = ma
# calc EMA
for i in range(len(sample_nums)):
sample_num = sample_nums[i]
samples = data[0:sample_num][::-1] # ASC
if settings.sensitive:
alpha = 2/(sample_num+1)
else:
alpha = 1/sample_num
ema = samples[0]["close"]
for i in xrange(1,sample_num):
ema = alpha * samples[i]["close"] + (1-alpha) * ema
ema_name = "ema%d" % sample_num
mas[ema_name] = ema
# make decision
up_cnt = 0
down_cnt = 0
for ma_name in mas:
if regr_price > mas[ma_name]:
up_cnt += 1
if regr_price < mas[ma_name]:
down_cnt += 1
print "up: %d/%d, down: %d/%d" % (up_cnt, len(mas), down_cnt, len(mas))
if up_cnt > len(mas)*settings.strength_threshold:
print "%s: BUY BUY BUY %s at price: %f" % (time.ctime(regr_time), coin, regr_price)
file.write(time.ctime(regr_time) + ": BUY at "+ str(regr_price) + "\n")
file.flush()
if down_cnt > len(mas)*settings.strength_threshold:
print "%s: SELL SELL SELL %s at price: %f" % (time.ctime(regr_time), coin, regr_price)
file.write(time.ctime(regr_time) + ": SELL at "+ str(regr_price) + "\n")
file.flush()
return True
except Exception as e:
print "Error:", e
return False
if __name__ == "__main__":
print "Regression for", coin
regr_time = settings.regr_start_timestamp
filename = "regression_" + coin + "_" + str(regr_time) + "_strength_" + str(settings.strength_threshold) + "_sensitive_" + str(settings.sensitive) + ".log"
file = open(filename, "w")
file.flush()
now = int(time.time())
while regr_time <= now:
if main(regr_time, file):
regr_time += 60*60 # move forward 1h
file.close() | en | 0.504724 | #!/usr/bin/env python # in ASC order # date DESC # calc MA # calc EMA # ASC # make decision # move forward 1h | 2.851148 | 3 |
bmcs_beam/mxn/reinf_layout/reinf_layout_component.py | bmcs-group/bmcs_beam | 1 | 6617188 | '''
Created on Sep 4, 2012
@author: rch
'''
from traits.api import \
WeakRef
from bmcs_beam.mxn.matrix_cross_section import \
MatrixCrossSection
from bmcs_beam.mxn.cross_section_component import \
CrossSectionComponent
STATE_AND_GEOMETRY_CHANGE = 'eps_changed,+geo_input,matrix_cs.geo.changed'
STATE_LAW_AND_GEOMETRY_CHANGE = 'eps_changed,+geo_input,matrix_cs.geo.changed,material_changed,law_changed,material,material_law'
class ReinfLayoutComponent(CrossSectionComponent):
'''Cross section characteristics needed for tensile specimens
'''
matrix_cs = WeakRef(MatrixCrossSection, transient=True)
def __getstate__ (self):
'''Overriding __getstate__ because of WeakRef usage
'''
state = super(ReinfLayoutComponent, self).__getstate__()
for key in ['matrix_cs', 'matrix_cs_']:
if key in state:
del state[ key ]
return state
#===============================================================================
# Plotting functions
#===============================================================================
def plot_geometry(self, ax, clr):
'''Plot geometry'''
return
def plot_eps(self, ax):
return
def plot_sig(self, ax):
return
def plot(self, fig):
'''Plots the cross section - particular reinforcement component
plotted with distinctive color to others
'''
ax1 = fig.add_subplot(1, 2, 1)
self.state.plot_geometry(ax1)
self.plot_geometry(ax1, clr='red')
ax2 = fig.add_subplot(1, 2, 2)
self.material_law_.plot_ax(ax2)
| '''
Created on Sep 4, 2012
@author: rch
'''
from traits.api import \
WeakRef
from bmcs_beam.mxn.matrix_cross_section import \
MatrixCrossSection
from bmcs_beam.mxn.cross_section_component import \
CrossSectionComponent
STATE_AND_GEOMETRY_CHANGE = 'eps_changed,+geo_input,matrix_cs.geo.changed'
STATE_LAW_AND_GEOMETRY_CHANGE = 'eps_changed,+geo_input,matrix_cs.geo.changed,material_changed,law_changed,material,material_law'
class ReinfLayoutComponent(CrossSectionComponent):
'''Cross section characteristics needed for tensile specimens
'''
matrix_cs = WeakRef(MatrixCrossSection, transient=True)
def __getstate__ (self):
'''Overriding __getstate__ because of WeakRef usage
'''
state = super(ReinfLayoutComponent, self).__getstate__()
for key in ['matrix_cs', 'matrix_cs_']:
if key in state:
del state[ key ]
return state
#===============================================================================
# Plotting functions
#===============================================================================
def plot_geometry(self, ax, clr):
'''Plot geometry'''
return
def plot_eps(self, ax):
return
def plot_sig(self, ax):
return
def plot(self, fig):
'''Plots the cross section - particular reinforcement component
plotted with distinctive color to others
'''
ax1 = fig.add_subplot(1, 2, 1)
self.state.plot_geometry(ax1)
self.plot_geometry(ax1, clr='red')
ax2 = fig.add_subplot(1, 2, 2)
self.material_law_.plot_ax(ax2)
| en | 0.596143 | Created on Sep 4, 2012 @author: rch Cross section characteristics needed for tensile specimens Overriding __getstate__ because of WeakRef usage #=============================================================================== # Plotting functions #=============================================================================== Plot geometry Plots the cross section - particular reinforcement component plotted with distinctive color to others | 1.88166 | 2 |
teamtemp/tests/models/test_temp_model.py | travisjwarren/teamtemp | 0 | 6617189 | __author__ = 'traviswarren'
from django.test import TestCase
from teamtemp.tests.factories import TeamTemperatureFactory, TeamResponseFactory
class TeamTemperatureStatsTestCases(TestCase):
def test_stats_count(self):
team_temp = TeamTemperatureFactory()
self.assertEqual(team_temp.stats()['count'], 0)
TeamResponseFactory(request=team_temp)
self.assertEqual(team_temp.stats()['count'], 1)
TeamResponseFactory(request=team_temp)
TeamResponseFactory(request=team_temp)
self.assertEqual(team_temp.stats()['count'], 3)
def test_stats_average(self):
team_temp = TeamTemperatureFactory()
self.assertIsNone(team_temp.stats()['average']['score__avg'])
TeamResponseFactory(request=team_temp, score=5)
self.assertEqual(team_temp.stats()['average']['score__avg'], 5.0)
TeamResponseFactory(request=team_temp, score=7)
TeamResponseFactory(request=team_temp, score=6)
self.assertEqual(team_temp.stats()['average']['score__avg'], 6.0)
def test_stats_word(self):
team_temp = TeamTemperatureFactory()
self.assertEqual(len(team_temp.stats()['words']), 0)
TeamResponseFactory(request=team_temp, word='first')
self.assertEqual(len(team_temp.stats()['words']), 1)
TeamResponseFactory(request=team_temp, word='second')
TeamResponseFactory(request=team_temp, word='third')
self.assertEqual(len(team_temp.stats()['words']), 3)
| __author__ = 'traviswarren'
from django.test import TestCase
from teamtemp.tests.factories import TeamTemperatureFactory, TeamResponseFactory
class TeamTemperatureStatsTestCases(TestCase):
def test_stats_count(self):
team_temp = TeamTemperatureFactory()
self.assertEqual(team_temp.stats()['count'], 0)
TeamResponseFactory(request=team_temp)
self.assertEqual(team_temp.stats()['count'], 1)
TeamResponseFactory(request=team_temp)
TeamResponseFactory(request=team_temp)
self.assertEqual(team_temp.stats()['count'], 3)
def test_stats_average(self):
team_temp = TeamTemperatureFactory()
self.assertIsNone(team_temp.stats()['average']['score__avg'])
TeamResponseFactory(request=team_temp, score=5)
self.assertEqual(team_temp.stats()['average']['score__avg'], 5.0)
TeamResponseFactory(request=team_temp, score=7)
TeamResponseFactory(request=team_temp, score=6)
self.assertEqual(team_temp.stats()['average']['score__avg'], 6.0)
def test_stats_word(self):
team_temp = TeamTemperatureFactory()
self.assertEqual(len(team_temp.stats()['words']), 0)
TeamResponseFactory(request=team_temp, word='first')
self.assertEqual(len(team_temp.stats()['words']), 1)
TeamResponseFactory(request=team_temp, word='second')
TeamResponseFactory(request=team_temp, word='third')
self.assertEqual(len(team_temp.stats()['words']), 3)
| none | 1 | 2.384973 | 2 | |
src/collection/utils.py | adelmofilho/BeachWaterQuality | 0 | 6617190 | import os
import json
import yaml
def get_configs(file="config.yaml"):
"""
Load project config file.
Parameters
----------
file: str
Filepath of config file
Returns
-------
config: dict
A loaded config file
"""
if file.endswith('.yaml') or file.endswith('.yml'):
try:
with open(file, 'r') as f:
config = yaml.load(f, Loader=yaml.FullLoader)
return config
except FileNotFoundError as err:
print(err, "File not found")
else:
raise TypeError("Config file extension is not .yaml or .yml")
def init_logs(config):
"""
Logs file inicialization.
Parameters
----------
config: dict
Dictionary containing configuration variables
Returns
-------
filepath: str
Filepath of logs file
"""
# Define environment variables
logs_dir = config.get("logs").get("dir")
filename = config.get("logs").get("file")
filepath = f"{logs_dir}/{filename}"
# Create directory if necessary
if not os.path.exists(logs_dir):
os.makedirs(logs_dir)
# Create logs file if necessary
if not os.path.exists(filepath):
with open(filepath, "w") as f:
empty_file = dict()
json.dump(empty_file, f)
# return logs filepath
return filepath
def get_logs(logs_filepath):
"""
Load project config file.
Parameters
----------
logs_filepath: str
Filepath of logs file
Returns
-------
logs: dict
A loaded logs file
"""
with open(logs_filepath, 'r') as f:
logs = json.load(f)
return logs
| import os
import json
import yaml
def get_configs(file="config.yaml"):
"""
Load project config file.
Parameters
----------
file: str
Filepath of config file
Returns
-------
config: dict
A loaded config file
"""
if file.endswith('.yaml') or file.endswith('.yml'):
try:
with open(file, 'r') as f:
config = yaml.load(f, Loader=yaml.FullLoader)
return config
except FileNotFoundError as err:
print(err, "File not found")
else:
raise TypeError("Config file extension is not .yaml or .yml")
def init_logs(config):
"""
Logs file inicialization.
Parameters
----------
config: dict
Dictionary containing configuration variables
Returns
-------
filepath: str
Filepath of logs file
"""
# Define environment variables
logs_dir = config.get("logs").get("dir")
filename = config.get("logs").get("file")
filepath = f"{logs_dir}/{filename}"
# Create directory if necessary
if not os.path.exists(logs_dir):
os.makedirs(logs_dir)
# Create logs file if necessary
if not os.path.exists(filepath):
with open(filepath, "w") as f:
empty_file = dict()
json.dump(empty_file, f)
# return logs filepath
return filepath
def get_logs(logs_filepath):
"""
Load project config file.
Parameters
----------
logs_filepath: str
Filepath of logs file
Returns
-------
logs: dict
A loaded logs file
"""
with open(logs_filepath, 'r') as f:
logs = json.load(f)
return logs
| en | 0.425247 | Load project config file. Parameters ---------- file: str Filepath of config file Returns ------- config: dict A loaded config file Logs file inicialization. Parameters ---------- config: dict Dictionary containing configuration variables Returns ------- filepath: str Filepath of logs file # Define environment variables # Create directory if necessary # Create logs file if necessary # return logs filepath Load project config file. Parameters ---------- logs_filepath: str Filepath of logs file Returns ------- logs: dict A loaded logs file | 3.046647 | 3 |
tests/test_examples.py | belbio/schemas | 2 | 6617191 | <gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Usage: program.py <customer>
"""
import jsonschema
from jsonschema import validate
from jsonschema.exceptions import ValidationError
import pytest
import yaml
import glob
import re
import os
# get the current path name
cur_path = os.path.dirname(os.path.realpath(__file__))
home_path = cur_path.replace('/tests', '')
def test_examples():
"""Check that schemas are valid"""
examples = glob.glob(f'{home_path}/examples/*.yaml')
for fn in examples:
print('Example fn: ', fn)
basename = os.path.basename(fn)
with open(fn, 'r') as f:
example = yaml.load(f)
schema_basename = re.sub('\-.*?\-', '-', basename)
schema_fn = f'{home_path}/schemas/{schema_basename}'
print('Schema FN', schema_fn)
with open(schema_fn, 'r') as f:
schema = yaml.load(f)
v = jsonschema.Draft4Validator(schema)
errors = sorted(v.iter_errors(example), key=lambda e: e.path)
for error in errors:
for suberror in sorted(error.context, key=lambda e: e.schema_path):
print(list(suberror.schema_path), suberror.message, sep=", ")
if errors:
assert False
# except Exception as e:
# print('Problem with example: ', fn)
# print(e)
def test_instances():
test_instances = glob.glob(f'{home_path}/tests/test_instances/*.yaml')
for fn in test_instances:
basename = os.path.basename(fn)
print('Fn', fn)
with open(fn, 'r') as f:
test_instance = yaml.load(f)
schema_basename = re.sub('\-.*?\-', '-', basename)
schema_fn = f'{home_path}/schemas/{schema_basename}'
print('Schema FN', schema_fn)
with open(schema_fn, 'r') as f:
schema = yaml.load(f)
good_flag = False # whether test instance should succeed or fail
if re.search('-good.*?-', fn):
good_flag = True
if good_flag:
v = jsonschema.Draft4Validator(schema)
errors = sorted(v.iter_errors(test_instance), key=lambda e: e.path)
for error in errors:
for suberror in sorted(error.context, key=lambda e: e.schema_path):
print(list(suberror.schema_path), suberror.message, sep=", ")
if errors:
assert False
# try:
# validate(test_instance, schema)
# except Exception as e:
# assert False
# print('Problem with test instance: ', fn, ' against schema: ', schema_fn)
# print(e)
else:
with pytest.raises(ValidationError, message='Expecting JSONschema validation error') as e:
validate(test_instance, schema)
print(e.value)
def main():
test_examples()
quit()
import json
import pprint
with open('../schemas/test2.yaml', 'r') as f:
schema = yaml.load(f)
v = jsonschema.Draft4Validator(schema)
errors = sorted(v.iter_errors(example), key=lambda e: e.path)
for error in errors:
for suberror in sorted(error.context, key=lambda e: e.schema_path):
print(list(suberror.schema_path), suberror.message, sep=", ")
# validate(example, schema)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Usage: program.py <customer>
"""
import jsonschema
from jsonschema import validate
from jsonschema.exceptions import ValidationError
import pytest
import yaml
import glob
import re
import os
# get the current path name
cur_path = os.path.dirname(os.path.realpath(__file__))
home_path = cur_path.replace('/tests', '')
def test_examples():
"""Check that schemas are valid"""
examples = glob.glob(f'{home_path}/examples/*.yaml')
for fn in examples:
print('Example fn: ', fn)
basename = os.path.basename(fn)
with open(fn, 'r') as f:
example = yaml.load(f)
schema_basename = re.sub('\-.*?\-', '-', basename)
schema_fn = f'{home_path}/schemas/{schema_basename}'
print('Schema FN', schema_fn)
with open(schema_fn, 'r') as f:
schema = yaml.load(f)
v = jsonschema.Draft4Validator(schema)
errors = sorted(v.iter_errors(example), key=lambda e: e.path)
for error in errors:
for suberror in sorted(error.context, key=lambda e: e.schema_path):
print(list(suberror.schema_path), suberror.message, sep=", ")
if errors:
assert False
# except Exception as e:
# print('Problem with example: ', fn)
# print(e)
def test_instances():
test_instances = glob.glob(f'{home_path}/tests/test_instances/*.yaml')
for fn in test_instances:
basename = os.path.basename(fn)
print('Fn', fn)
with open(fn, 'r') as f:
test_instance = yaml.load(f)
schema_basename = re.sub('\-.*?\-', '-', basename)
schema_fn = f'{home_path}/schemas/{schema_basename}'
print('Schema FN', schema_fn)
with open(schema_fn, 'r') as f:
schema = yaml.load(f)
good_flag = False # whether test instance should succeed or fail
if re.search('-good.*?-', fn):
good_flag = True
if good_flag:
v = jsonschema.Draft4Validator(schema)
errors = sorted(v.iter_errors(test_instance), key=lambda e: e.path)
for error in errors:
for suberror in sorted(error.context, key=lambda e: e.schema_path):
print(list(suberror.schema_path), suberror.message, sep=", ")
if errors:
assert False
# try:
# validate(test_instance, schema)
# except Exception as e:
# assert False
# print('Problem with test instance: ', fn, ' against schema: ', schema_fn)
# print(e)
else:
with pytest.raises(ValidationError, message='Expecting JSONschema validation error') as e:
validate(test_instance, schema)
print(e.value)
def main():
test_examples()
quit()
import json
import pprint
with open('../schemas/test2.yaml', 'r') as f:
schema = yaml.load(f)
v = jsonschema.Draft4Validator(schema)
errors = sorted(v.iter_errors(example), key=lambda e: e.path)
for error in errors:
for suberror in sorted(error.context, key=lambda e: e.schema_path):
print(list(suberror.schema_path), suberror.message, sep=", ")
# validate(example, schema)
if __name__ == '__main__':
main() | en | 0.634555 | #!/usr/bin/env python # -*- coding: utf-8 -*- Usage: program.py <customer> # get the current path name Check that schemas are valid # except Exception as e: # print('Problem with example: ', fn) # print(e) # whether test instance should succeed or fail # try: # validate(test_instance, schema) # except Exception as e: # assert False # print('Problem with test instance: ', fn, ' against schema: ', schema_fn) # print(e) # validate(example, schema) | 2.619807 | 3 |
stage_0_Maurya.py | 1anj-pixel/team-greider | 2 | 6617192 | print("NAME: <NAME> \nE-MAIL: <EMAIL> \nSLACK USERNAME: @Maurya \nBIOSTACK: Drug Development")
| print("NAME: <NAME> \nE-MAIL: <EMAIL> \nSLACK USERNAME: @Maurya \nBIOSTACK: Drug Development")
| none | 1 | 1.243172 | 1 | |
bmkcompilers.py | darkf/bmk | 1 | 6617193 | <gh_stars>1-10
# License: zlib license
# see accompanying LICENSE file
from bmk_mingw import BMKC_MinGW
compilers = {"mingw": BMKC_MinGW
} | # License: zlib license
# see accompanying LICENSE file
from bmk_mingw import BMKC_MinGW
compilers = {"mingw": BMKC_MinGW
} | en | 0.78917 | # License: zlib license # see accompanying LICENSE file | 1.059806 | 1 |
src/tasks/dockerDeploy/acs-kubernetes/acsclient.py | Bhaskers-Blu-Org2/vsts-docker | 8 | 6617194 | import logging
import os
import socket
import subprocess
import time
from StringIO import StringIO
import paramiko
import requests
from sshtunnel import SSHTunnelForwarder
class ACSClient(object):
"""
Class for connecting to the ACS cluster and making requests
"""
current_tunnel = ()
# Max wait time (seconds) for tunnel to be established
max_wait_time = 5 * 60
def __init__(self, cluster_info):
self.cluster_info = cluster_info
self.tunnel_server = None
self.is_direct = False
self.is_running = False
# If master_url is provided, we have a direct connection
if self.cluster_info.api_endpoint:
logging.debug('Using Direct connection')
self.is_direct = True
else:
logging.debug('Using SSH connection')
def shutdown(self):
"""
Stops the tunnel if its started
"""
if self.current_tunnel and self.is_running:
logging.debug('Stopping SSH tunnel')
self.current_tunnel[0].stop()
self.is_running = False
def _wait_for_tunnel(self, start_time, url):
"""
Waits until the SSH tunnel is available and
we can start sending requests through it
"""
succeeded = False
while not time.time() - start_time > self.max_wait_time:
try:
response = requests.get(url)
if response.status_code == 200:
succeeded = True
self.is_running = True
break
except:
time.sleep(5)
if not succeeded:
raise Exception(
'Could not establish connection to "{}".'.format(
self.cluster_info.host))
def _get_private_key(self):
"""
Creates an RSAKey instance from provided private key string
and password
"""
if not self.cluster_info.private_key:
raise Exception('Private key was not provided')
private_key_file = StringIO()
private_key_file.write(self.cluster_info.private_key)
private_key_file.seek(0)
return paramiko.RSAKey.from_private_key(private_key_file, self.cluster_info.password)
def _setup_tunnel_server(self):
"""
Gets the local port to access the tunnel
"""
if not self.current_tunnel:
logging.debug('Create a new SSH tunnel')
local_port = self.get_available_local_port()
log = logging.getLogger()
previous_log_level = log.level
log.setLevel(logging.INFO)
forwarder = SSHTunnelForwarder(
ssh_address_or_host=(self.cluster_info.host,
int(self.cluster_info.port)),
ssh_username=self.cluster_info.username,
ssh_pkey=self._get_private_key(),
remote_bind_address=(
'localhost', self.cluster_info.get_api_endpoint_port()),
local_bind_address=('0.0.0.0', int(local_port)),
logger=log)
forwarder.start()
start_time = time.time()
url = 'http://127.0.0.1:{}'.format(str(local_port))
self._wait_for_tunnel(start_time, url)
self.current_tunnel = (forwarder, int(local_port))
log.setLevel(previous_log_level)
return self.current_tunnel[1]
def create_request_url(self, path):
"""
Creates the request URL from provided path. Depending on which
connection type was picked, it will create an SSH tunnel
"""
if self.is_direct:
raise NotImplementedError("Direct connection is not implemented yet")
else:
local_port = self._setup_tunnel_server()
url = 'http://127.0.0.1:{}/{}'.format(str(local_port), path)
return url
def make_request(self, path, method, data=None, port=None, **kwargs):
"""
Makes an HTTP request with specified method
"""
url = self.create_request_url(path)
logging.debug('%s: %s (DATA=%s)', method, url, data)
if not hasattr(requests, method):
raise Exception('Invalid method {}'.format(method))
method_to_call = getattr(requests, method)
headers = {
'Content-type': 'application/json',
}
if not data:
response = method_to_call(
url, headers=headers, **kwargs)
else:
response = method_to_call(
url, data, headers=headers, **kwargs)
return response
def get_request(self, path):
"""
Makes a GET request to an endpoint on the cluster
:param path: Path part of the URL to make the request to
:type path: String
"""
return self.make_request(path, 'get')
def delete_request(self, path):
"""
Makes a DELETE request to an endpoint on the cluster
:param path: Path part of the URL to make the request to
:type path: String
"""
return self.make_request(path, 'delete')
def post_request(self, path, post_data):
"""
Makes a POST request to an endpoint on the cluster
:param path: Path part of the URL to make the request to
:type path: String
"""
return self.make_request(path, 'post', data=post_data)
def put_request(self, path, put_data=None, **kwargs):
"""
Makes a POST request to an endpoint on the cluster)
:param path: Path part of the URL to make the request to
:type path: String
"""
return self.make_request(path, 'put', data=put_data, **kwargs)
def get_available_local_port(self):
"""
Gets a random, available local port
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('', 0))
sock.listen(1)
port = sock.getsockname()[1]
sock.close()
return port
| import logging
import os
import socket
import subprocess
import time
from StringIO import StringIO
import paramiko
import requests
from sshtunnel import SSHTunnelForwarder
class ACSClient(object):
"""
Class for connecting to the ACS cluster and making requests
"""
current_tunnel = ()
# Max wait time (seconds) for tunnel to be established
max_wait_time = 5 * 60
def __init__(self, cluster_info):
self.cluster_info = cluster_info
self.tunnel_server = None
self.is_direct = False
self.is_running = False
# If master_url is provided, we have a direct connection
if self.cluster_info.api_endpoint:
logging.debug('Using Direct connection')
self.is_direct = True
else:
logging.debug('Using SSH connection')
def shutdown(self):
"""
Stops the tunnel if its started
"""
if self.current_tunnel and self.is_running:
logging.debug('Stopping SSH tunnel')
self.current_tunnel[0].stop()
self.is_running = False
def _wait_for_tunnel(self, start_time, url):
"""
Waits until the SSH tunnel is available and
we can start sending requests through it
"""
succeeded = False
while not time.time() - start_time > self.max_wait_time:
try:
response = requests.get(url)
if response.status_code == 200:
succeeded = True
self.is_running = True
break
except:
time.sleep(5)
if not succeeded:
raise Exception(
'Could not establish connection to "{}".'.format(
self.cluster_info.host))
def _get_private_key(self):
"""
Creates an RSAKey instance from provided private key string
and password
"""
if not self.cluster_info.private_key:
raise Exception('Private key was not provided')
private_key_file = StringIO()
private_key_file.write(self.cluster_info.private_key)
private_key_file.seek(0)
return paramiko.RSAKey.from_private_key(private_key_file, self.cluster_info.password)
def _setup_tunnel_server(self):
"""
Gets the local port to access the tunnel
"""
if not self.current_tunnel:
logging.debug('Create a new SSH tunnel')
local_port = self.get_available_local_port()
log = logging.getLogger()
previous_log_level = log.level
log.setLevel(logging.INFO)
forwarder = SSHTunnelForwarder(
ssh_address_or_host=(self.cluster_info.host,
int(self.cluster_info.port)),
ssh_username=self.cluster_info.username,
ssh_pkey=self._get_private_key(),
remote_bind_address=(
'localhost', self.cluster_info.get_api_endpoint_port()),
local_bind_address=('0.0.0.0', int(local_port)),
logger=log)
forwarder.start()
start_time = time.time()
url = 'http://127.0.0.1:{}'.format(str(local_port))
self._wait_for_tunnel(start_time, url)
self.current_tunnel = (forwarder, int(local_port))
log.setLevel(previous_log_level)
return self.current_tunnel[1]
def create_request_url(self, path):
"""
Creates the request URL from provided path. Depending on which
connection type was picked, it will create an SSH tunnel
"""
if self.is_direct:
raise NotImplementedError("Direct connection is not implemented yet")
else:
local_port = self._setup_tunnel_server()
url = 'http://127.0.0.1:{}/{}'.format(str(local_port), path)
return url
def make_request(self, path, method, data=None, port=None, **kwargs):
"""
Makes an HTTP request with specified method
"""
url = self.create_request_url(path)
logging.debug('%s: %s (DATA=%s)', method, url, data)
if not hasattr(requests, method):
raise Exception('Invalid method {}'.format(method))
method_to_call = getattr(requests, method)
headers = {
'Content-type': 'application/json',
}
if not data:
response = method_to_call(
url, headers=headers, **kwargs)
else:
response = method_to_call(
url, data, headers=headers, **kwargs)
return response
def get_request(self, path):
"""
Makes a GET request to an endpoint on the cluster
:param path: Path part of the URL to make the request to
:type path: String
"""
return self.make_request(path, 'get')
def delete_request(self, path):
"""
Makes a DELETE request to an endpoint on the cluster
:param path: Path part of the URL to make the request to
:type path: String
"""
return self.make_request(path, 'delete')
def post_request(self, path, post_data):
"""
Makes a POST request to an endpoint on the cluster
:param path: Path part of the URL to make the request to
:type path: String
"""
return self.make_request(path, 'post', data=post_data)
def put_request(self, path, put_data=None, **kwargs):
"""
Makes a POST request to an endpoint on the cluster)
:param path: Path part of the URL to make the request to
:type path: String
"""
return self.make_request(path, 'put', data=put_data, **kwargs)
def get_available_local_port(self):
"""
Gets a random, available local port
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('', 0))
sock.listen(1)
port = sock.getsockname()[1]
sock.close()
return port
| en | 0.842927 | Class for connecting to the ACS cluster and making requests # Max wait time (seconds) for tunnel to be established # If master_url is provided, we have a direct connection Stops the tunnel if its started Waits until the SSH tunnel is available and we can start sending requests through it Creates an RSAKey instance from provided private key string and password Gets the local port to access the tunnel Creates the request URL from provided path. Depending on which connection type was picked, it will create an SSH tunnel Makes an HTTP request with specified method Makes a GET request to an endpoint on the cluster :param path: Path part of the URL to make the request to :type path: String Makes a DELETE request to an endpoint on the cluster :param path: Path part of the URL to make the request to :type path: String Makes a POST request to an endpoint on the cluster :param path: Path part of the URL to make the request to :type path: String Makes a POST request to an endpoint on the cluster) :param path: Path part of the URL to make the request to :type path: String Gets a random, available local port | 2.718508 | 3 |
PytorchRouting/DecisionLayers/ReinforcementLearning/WPL.py | mattriemer/RoutingNetworks | 2 | 6617195 | """
This file defines class REINFORCE.
@author: <NAME> :: <EMAIL>
@created: 6/7/18
"""
import copy
import torch
import torch.nn.functional as F
from ..Decision import Decision
class WPL(Decision):
"""
Weighted Policy Learner (WPL) Multi-Agent Reinforcement Learning based decision making.
"""
def __init__(self, *args, **kwargs):
Decision.__init__(self, *args, **kwargs)
self._value_mem = self._construct_policy_storage(
self._num_selections, self._pol_type, None, self._pol_hidden_dims)
def _loss(self, sample):
grad_est = sample.cum_return - sample.state[:, sample.action, 1]
grad_projected = torch.where(grad_est < 0, 1. + grad_est, 2. - grad_est)
prob_taken = sample.state[:, sample.action, 0]
prob_target = (prob_taken * grad_projected).detach()
act_loss = self.bellman_loss_func(prob_taken, prob_target)
ret_loss = self.bellman_loss_func(sample.state[:, sample.action, 1], sample.cum_return.detach()).unsqueeze(-1)
return act_loss + ret_loss
def _forward(self, xs, mxs, agent):
policy = self._policy[agent](xs)
# policy = F.relu(policy) - F.relu(policy - 1.) + 1e-6
policy = policy - policy.min(dim=1)[0] + 1e-6
# policy = policy/policy.sum(dim=1)
values = self._value_mem[agent](xs)
distribution = torch.distributions.Categorical(probs=policy)
if self.training:
actions = distribution.sample()
else:
actions = distribution.logits.max(dim=1)[1]
state = torch.stack([distribution.logits, values], 2)
return xs, actions, state
# @staticmethod
# def _loss(sample):
# grad_est = sample.cum_return - sample.state[:, 0, 1]
# # ret_loss = F.smooth_l1_loss(sample.state[:, sample.action, 1], sample.cum_return).unsqueeze(-1)
# ret_loss = F.smooth_l1_loss(sample.state[:, 0, 1], sample.cum_return).unsqueeze(-1)
# grad_projected = grad_est * 1.3
# grad_projected = torch.pow(grad_projected, 3.)
# if grad_projected < 0:
# pol_update = 1. + grad_projected
# else:
# pol_update = 2. - grad_projected
# pol_update = sample.state[:, sample.action, 0] * pol_update
# act_loss = F.smooth_l1_loss(sample.state[:, sample.action, 0], pol_update.data)
# return act_loss + ret_loss
# # @staticmethod
# def _loss(self, sample):
# grad_est = sample.cum_return - sample.state[:, 0, 1]
# # ret_loss = F.smooth_l1_loss(sample.state[:, sample.action, 1], sample.cum_return).unsqueeze(-1)
# # ret_loss = F.smooth_l1_loss(sample.state[:, 0, 1], sample.cum_return).unsqueeze(-1)
# grad_projected = grad_est * 1.3
# grad_projected = torch.pow(grad_projected, 3.)
# if grad_projected < 0:
# pol_update = 1. + grad_projected
# else:
# pol_update = 2. - grad_projected
# pol_update = sample.state[:, sample.action, 0] * pol_update
# self._policy[sample.prior_action]._approx.data[0, sample.action] = pol_update.data
# self._value_mem[sample.prior_action]._approx.data[0, sample.action] = \
# 0.9 * self._value_mem[sample.prior_action]._approx.data[0, sample.action] + 0.1 * sample.cum_return
# # act_loss = F.smooth_l1_loss(sample.state[:, sample.action, 0], pol_update.data)
# return torch.zeros(1).to(sample.action.device)
| """
This file defines class REINFORCE.
@author: <NAME> :: <EMAIL>
@created: 6/7/18
"""
import copy
import torch
import torch.nn.functional as F
from ..Decision import Decision
class WPL(Decision):
"""
Weighted Policy Learner (WPL) Multi-Agent Reinforcement Learning based decision making.
"""
def __init__(self, *args, **kwargs):
Decision.__init__(self, *args, **kwargs)
self._value_mem = self._construct_policy_storage(
self._num_selections, self._pol_type, None, self._pol_hidden_dims)
def _loss(self, sample):
grad_est = sample.cum_return - sample.state[:, sample.action, 1]
grad_projected = torch.where(grad_est < 0, 1. + grad_est, 2. - grad_est)
prob_taken = sample.state[:, sample.action, 0]
prob_target = (prob_taken * grad_projected).detach()
act_loss = self.bellman_loss_func(prob_taken, prob_target)
ret_loss = self.bellman_loss_func(sample.state[:, sample.action, 1], sample.cum_return.detach()).unsqueeze(-1)
return act_loss + ret_loss
def _forward(self, xs, mxs, agent):
policy = self._policy[agent](xs)
# policy = F.relu(policy) - F.relu(policy - 1.) + 1e-6
policy = policy - policy.min(dim=1)[0] + 1e-6
# policy = policy/policy.sum(dim=1)
values = self._value_mem[agent](xs)
distribution = torch.distributions.Categorical(probs=policy)
if self.training:
actions = distribution.sample()
else:
actions = distribution.logits.max(dim=1)[1]
state = torch.stack([distribution.logits, values], 2)
return xs, actions, state
# @staticmethod
# def _loss(sample):
# grad_est = sample.cum_return - sample.state[:, 0, 1]
# # ret_loss = F.smooth_l1_loss(sample.state[:, sample.action, 1], sample.cum_return).unsqueeze(-1)
# ret_loss = F.smooth_l1_loss(sample.state[:, 0, 1], sample.cum_return).unsqueeze(-1)
# grad_projected = grad_est * 1.3
# grad_projected = torch.pow(grad_projected, 3.)
# if grad_projected < 0:
# pol_update = 1. + grad_projected
# else:
# pol_update = 2. - grad_projected
# pol_update = sample.state[:, sample.action, 0] * pol_update
# act_loss = F.smooth_l1_loss(sample.state[:, sample.action, 0], pol_update.data)
# return act_loss + ret_loss
# # @staticmethod
# def _loss(self, sample):
# grad_est = sample.cum_return - sample.state[:, 0, 1]
# # ret_loss = F.smooth_l1_loss(sample.state[:, sample.action, 1], sample.cum_return).unsqueeze(-1)
# # ret_loss = F.smooth_l1_loss(sample.state[:, 0, 1], sample.cum_return).unsqueeze(-1)
# grad_projected = grad_est * 1.3
# grad_projected = torch.pow(grad_projected, 3.)
# if grad_projected < 0:
# pol_update = 1. + grad_projected
# else:
# pol_update = 2. - grad_projected
# pol_update = sample.state[:, sample.action, 0] * pol_update
# self._policy[sample.prior_action]._approx.data[0, sample.action] = pol_update.data
# self._value_mem[sample.prior_action]._approx.data[0, sample.action] = \
# 0.9 * self._value_mem[sample.prior_action]._approx.data[0, sample.action] + 0.1 * sample.cum_return
# # act_loss = F.smooth_l1_loss(sample.state[:, sample.action, 0], pol_update.data)
# return torch.zeros(1).to(sample.action.device)
| en | 0.40175 | This file defines class REINFORCE. @author: <NAME> :: <EMAIL> @created: 6/7/18 Weighted Policy Learner (WPL) Multi-Agent Reinforcement Learning based decision making. # policy = F.relu(policy) - F.relu(policy - 1.) + 1e-6 # policy = policy/policy.sum(dim=1) # @staticmethod # def _loss(sample): # grad_est = sample.cum_return - sample.state[:, 0, 1] # # ret_loss = F.smooth_l1_loss(sample.state[:, sample.action, 1], sample.cum_return).unsqueeze(-1) # ret_loss = F.smooth_l1_loss(sample.state[:, 0, 1], sample.cum_return).unsqueeze(-1) # grad_projected = grad_est * 1.3 # grad_projected = torch.pow(grad_projected, 3.) # if grad_projected < 0: # pol_update = 1. + grad_projected # else: # pol_update = 2. - grad_projected # pol_update = sample.state[:, sample.action, 0] * pol_update # act_loss = F.smooth_l1_loss(sample.state[:, sample.action, 0], pol_update.data) # return act_loss + ret_loss # # @staticmethod # def _loss(self, sample): # grad_est = sample.cum_return - sample.state[:, 0, 1] # # ret_loss = F.smooth_l1_loss(sample.state[:, sample.action, 1], sample.cum_return).unsqueeze(-1) # # ret_loss = F.smooth_l1_loss(sample.state[:, 0, 1], sample.cum_return).unsqueeze(-1) # grad_projected = grad_est * 1.3 # grad_projected = torch.pow(grad_projected, 3.) # if grad_projected < 0: # pol_update = 1. + grad_projected # else: # pol_update = 2. - grad_projected # pol_update = sample.state[:, sample.action, 0] * pol_update # self._policy[sample.prior_action]._approx.data[0, sample.action] = pol_update.data # self._value_mem[sample.prior_action]._approx.data[0, sample.action] = \ # 0.9 * self._value_mem[sample.prior_action]._approx.data[0, sample.action] + 0.1 * sample.cum_return # # act_loss = F.smooth_l1_loss(sample.state[:, sample.action, 0], pol_update.data) # return torch.zeros(1).to(sample.action.device) | 2.38514 | 2 |
demo_impl/webui/api_blueprint/routes/supplementary.py | JFF-Bohdan/vrc_t70_demo | 0 | 6617196 | from demo_impl.webui.__version__ import __version__
from flask import jsonify
from loguru import logger
from ..api_blueprint import api_blueprint
@api_blueprint.route("/version")
@api_blueprint.route("/liveness")
def version_endpoint():
res = {
"version": __version__,
"short_name": "VRC-T70 Demo Application",
"long_name": "Flask based demo application for VRC-T70 Python package (SQLite db used)"
}
logger.info("test-2")
return jsonify(res)
| from demo_impl.webui.__version__ import __version__
from flask import jsonify
from loguru import logger
from ..api_blueprint import api_blueprint
@api_blueprint.route("/version")
@api_blueprint.route("/liveness")
def version_endpoint():
res = {
"version": __version__,
"short_name": "VRC-T70 Demo Application",
"long_name": "Flask based demo application for VRC-T70 Python package (SQLite db used)"
}
logger.info("test-2")
return jsonify(res)
| none | 1 | 1.843107 | 2 | |
framework/oktopus/solver/__init__.py | charlee593/oktopus_framework | 1 | 6617197 | from .oktopus import OktopusSolver
from .mldp import MLDPSolver
from .rspv import RSVPSolver
from .mtrsa import MTRSASolver
from .msa import MSASolver
from .cplex import CPLEXMTESolver, CPLEXSCSolver
from technology import OFRoutingTechnology, SRMcastRoutingTechnology
__all__ = ['OktopusSolver', 'MLDPSolver', 'RSVPSolver',
'OFRoutingTechnology', 'SRMcastRoutingTechnology', 'MTRSASolver', 'MSASolver', 'CPLEXMTESolver', 'CPLEXSCSolver']
ALGO_MAP = {
'oktopus': OktopusSolver,
'mldp': MLDPSolver,
'rspv': RSVPSolver,
'mtrsa': MTRSASolver,
'msa' : MSASolver,
'cplex_mte': CPLEXMTESolver,
'cplex_sc': CPLEXSCSolver
} | from .oktopus import OktopusSolver
from .mldp import MLDPSolver
from .rspv import RSVPSolver
from .mtrsa import MTRSASolver
from .msa import MSASolver
from .cplex import CPLEXMTESolver, CPLEXSCSolver
from technology import OFRoutingTechnology, SRMcastRoutingTechnology
__all__ = ['OktopusSolver', 'MLDPSolver', 'RSVPSolver',
'OFRoutingTechnology', 'SRMcastRoutingTechnology', 'MTRSASolver', 'MSASolver', 'CPLEXMTESolver', 'CPLEXSCSolver']
ALGO_MAP = {
'oktopus': OktopusSolver,
'mldp': MLDPSolver,
'rspv': RSVPSolver,
'mtrsa': MTRSASolver,
'msa' : MSASolver,
'cplex_mte': CPLEXMTESolver,
'cplex_sc': CPLEXSCSolver
} | none | 1 | 1.441939 | 1 | |
cmdb-admin/settings.py | zjj1002/aws-cloud-cmdb-system | 0 | 6617198 | <reponame>zjj1002/aws-cloud-cmdb-system
#!/usr/bin/env python
# -*-coding:utf-8-*-
"""
Author : shenshuo
Date : 2017-10-11 12:58:26
Desc : 配置文件
"""
import os
from websdk.consts import const
ROOT_DIR = os.path.dirname(__file__)
debug = True
xsrf_cookies = False
expire_seconds = 365 * 24 * 60 * 60
ADMIN_SECRET_KEY = os.getenv('ADMIN_SECRET_KEY', '')
ADMIN_TOKEN_SECRET = os.getenv('ADMIN_TOKEN_SECRET', '')
ADMIN_COOKIES_SECRET = os.getenv('ADMIN_COOKIES_SECRET', '')
DEFAULT_DB_DBHOST = os.getenv('DEFAULT_DB_DBHOST', '172.16.0.223')
DEFAULT_DB_DBPORT = os.getenv('DEFAULT_DB_DBPORT', '3306')
DEFAULT_DB_DBUSER = os.getenv('DEFAULT_DB_DBUSER', 'root')
DEFAULT_DB_DBPWD = os.getenv('DEFAULT_DB_DBPWD', '<PASSWORD>')
DEFAULT_DB_DBNAME = os.getenv('DEFAULT_DB_DBNAME', 'cmdb_admin')
READONLY_DB_DBHOST = os.getenv('READONLY_DB_DBHOST', '172.16.0.223')
READONLY_DB_DBPORT = os.getenv('READONLY_DB_DBPORT', '3306')
READONLY_DB_DBUSER = os.getenv('READONLY_DB_DBUSER', 'root')
READONLY_DB_DBPWD = os.getenv('READONLY_DB_DBPWD', '<PASSWORD>')
READONLY_DB_DBNAME = os.getenv('READONLY_DB_DBNAME', 'cmdb_admin')
DEFAULT_REDIS_HOST = os.getenv('DEFAULT_REDIS_HOST', '172.16.0.223')
DEFAULT_REDIS_PORT = os.getenv('DEFAULT_REDIS_PORT', '6379')
DEFAULT_REDIS_DB = 8
DEFAULT_REDIS_AUTH = True
DEFAULT_REDIS_CHARSET = 'utf-8'
DEFAULT_REDIS_PASSWORD = os.getenv('DEFAULT_REDIS_PASSWORD', '<PASSWORD>')
SMTP_SERVER = os.getenv('SMTP_SERVER', '')
SMTP_USER = os.getenv('SMTP_USER', '')
SMTP_PASSWORD = os.getenv('SMTP_PASSWORD', '')
try:
from local_settings import *
except:
pass
settings = dict(
debug=debug,
xsrf_cookies=xsrf_cookies,
cookie_secret=ADMIN_COOKIES_SECRET,
token_secret=ADMIN_TOKEN_SECRET,
secret_key=ADMIN_SECRET_KEY,
expire_seconds=expire_seconds,
app_name='do_mg',
databases={
const.DEFAULT_DB_KEY: {
const.DBHOST_KEY: DEFAULT_DB_DBHOST,
const.DBPORT_KEY: DEFAULT_DB_DBPORT,
const.DBUSER_KEY: DEFAULT_DB_DBUSER,
const.DBPWD_KEY: DEFAULT_DB_DBPWD,
const.DBNAME_KEY: DEFAULT_DB_DBNAME,
},
const.READONLY_DB_KEY: {
const.DBHOST_KEY: READONLY_DB_DBHOST,
const.DBPORT_KEY: READONLY_DB_DBPORT,
const.DBUSER_KEY: READONLY_DB_DBUSER,
const.DBPWD_KEY: READONLY_DB_DBPWD,
const.DBNAME_KEY: READONLY_DB_DBNAME,
}
},
redises={
const.DEFAULT_RD_KEY: {
const.RD_HOST_KEY: DEFAULT_REDIS_HOST,
const.RD_PORT_KEY: DEFAULT_REDIS_PORT,
const.RD_DB_KEY: DEFAULT_REDIS_DB,
const.RD_AUTH_KEY: DEFAULT_REDIS_AUTH,
const.RD_CHARSET_KEY: DEFAULT_REDIS_CHARSET,
const.RD_PASSWORD_KEY: DEFAULT_REDIS_PASSWORD
}
},
smtp={
"mail_host": SMTP_SERVER,
"mail_user": SMTP_USER,
"mail_password": <PASSWORD>,
"mail_port": 465,
"mail_ssl": True
}
)
| #!/usr/bin/env python
# -*-coding:utf-8-*-
"""
Author : shenshuo
Date : 2017-10-11 12:58:26
Desc : 配置文件
"""
import os
from websdk.consts import const
ROOT_DIR = os.path.dirname(__file__)
debug = True
xsrf_cookies = False
expire_seconds = 365 * 24 * 60 * 60
ADMIN_SECRET_KEY = os.getenv('ADMIN_SECRET_KEY', '')
ADMIN_TOKEN_SECRET = os.getenv('ADMIN_TOKEN_SECRET', '')
ADMIN_COOKIES_SECRET = os.getenv('ADMIN_COOKIES_SECRET', '')
DEFAULT_DB_DBHOST = os.getenv('DEFAULT_DB_DBHOST', '172.16.0.223')
DEFAULT_DB_DBPORT = os.getenv('DEFAULT_DB_DBPORT', '3306')
DEFAULT_DB_DBUSER = os.getenv('DEFAULT_DB_DBUSER', 'root')
DEFAULT_DB_DBPWD = os.getenv('DEFAULT_DB_DBPWD', '<PASSWORD>')
DEFAULT_DB_DBNAME = os.getenv('DEFAULT_DB_DBNAME', 'cmdb_admin')
READONLY_DB_DBHOST = os.getenv('READONLY_DB_DBHOST', '172.16.0.223')
READONLY_DB_DBPORT = os.getenv('READONLY_DB_DBPORT', '3306')
READONLY_DB_DBUSER = os.getenv('READONLY_DB_DBUSER', 'root')
READONLY_DB_DBPWD = os.getenv('READONLY_DB_DBPWD', '<PASSWORD>')
READONLY_DB_DBNAME = os.getenv('READONLY_DB_DBNAME', 'cmdb_admin')
DEFAULT_REDIS_HOST = os.getenv('DEFAULT_REDIS_HOST', '172.16.0.223')
DEFAULT_REDIS_PORT = os.getenv('DEFAULT_REDIS_PORT', '6379')
DEFAULT_REDIS_DB = 8
DEFAULT_REDIS_AUTH = True
DEFAULT_REDIS_CHARSET = 'utf-8'
DEFAULT_REDIS_PASSWORD = os.getenv('DEFAULT_REDIS_PASSWORD', '<PASSWORD>')
SMTP_SERVER = os.getenv('SMTP_SERVER', '')
SMTP_USER = os.getenv('SMTP_USER', '')
SMTP_PASSWORD = os.getenv('SMTP_PASSWORD', '')
try:
from local_settings import *
except:
pass
settings = dict(
debug=debug,
xsrf_cookies=xsrf_cookies,
cookie_secret=ADMIN_COOKIES_SECRET,
token_secret=ADMIN_TOKEN_SECRET,
secret_key=ADMIN_SECRET_KEY,
expire_seconds=expire_seconds,
app_name='do_mg',
databases={
const.DEFAULT_DB_KEY: {
const.DBHOST_KEY: DEFAULT_DB_DBHOST,
const.DBPORT_KEY: DEFAULT_DB_DBPORT,
const.DBUSER_KEY: DEFAULT_DB_DBUSER,
const.DBPWD_KEY: DEFAULT_DB_DBPWD,
const.DBNAME_KEY: DEFAULT_DB_DBNAME,
},
const.READONLY_DB_KEY: {
const.DBHOST_KEY: READONLY_DB_DBHOST,
const.DBPORT_KEY: READONLY_DB_DBPORT,
const.DBUSER_KEY: READONLY_DB_DBUSER,
const.DBPWD_KEY: READONLY_DB_DBPWD,
const.DBNAME_KEY: READONLY_DB_DBNAME,
}
},
redises={
const.DEFAULT_RD_KEY: {
const.RD_HOST_KEY: DEFAULT_REDIS_HOST,
const.RD_PORT_KEY: DEFAULT_REDIS_PORT,
const.RD_DB_KEY: DEFAULT_REDIS_DB,
const.RD_AUTH_KEY: DEFAULT_REDIS_AUTH,
const.RD_CHARSET_KEY: DEFAULT_REDIS_CHARSET,
const.RD_PASSWORD_KEY: DEFAULT_REDIS_PASSWORD
}
},
smtp={
"mail_host": SMTP_SERVER,
"mail_user": SMTP_USER,
"mail_password": <PASSWORD>,
"mail_port": 465,
"mail_ssl": True
}
) | zh | 0.299537 | #!/usr/bin/env python # -*-coding:utf-8-*- Author : shenshuo Date : 2017-10-11 12:58:26 Desc : 配置文件 | 1.750005 | 2 |
Eye_Tracking_part1/main.py | AishwaryaVarma/Eyes-Position-Estimator-Mediapipe | 32 | 6617199 | <reponame>AishwaryaVarma/Eyes-Position-Estimator-Mediapipe
import cv2 as cv
import mediapipe as mp
import time
from numpy import greater
import utils
# variables
frame_counter =0
# constants
FONTS =cv.FONT_HERSHEY_COMPLEX
# face bounder indices
FACE_OVAL=[ 10, 338, 297, 332, 284, 251, 389, 356, 454, 323, 361, 288, 397, 365, 379, 378, 400, 377, 152, 148, 176, 149, 150, 136, 172, 58, 132, 93, 234, 127, 162, 21, 54, 103,67, 109]
# lips indices for Landmarks
LIPS=[ 61, 146, 91, 181, 84, 17, 314, 405, 321, 375,291, 308, 324, 318, 402, 317, 14, 87, 178, 88, 95,185, 40, 39, 37,0 ,267 ,269 ,270 ,409, 415, 310, 311, 312, 13, 82, 81, 42, 183, 78 ]
LOWER_LIPS =[61, 146, 91, 181, 84, 17, 314, 405, 321, 375, 291, 308, 324, 318, 402, 317, 14, 87, 178, 88, 95]
UPPER_LIPS=[ 185, 40, 39, 37,0 ,267 ,269 ,270 ,409, 415, 310, 311, 312, 13, 82, 81, 42, 183, 78]
# Left eyes indices
LEFT_EYE =[ 362, 382, 381, 380, 374, 373, 390, 249, 263, 466, 388, 387, 386, 385,384, 398 ]
LEFT_EYEBROW =[ 336, 296, 334, 293, 300, 276, 283, 282, 295, 285 ]
# right eyes indices
RIGHT_EYE=[ 33, 7, 163, 144, 145, 153, 154, 155, 133, 173, 157, 158, 159, 160, 161 , 246 ]
RIGHT_EYEBROW=[ 70, 63, 105, 66, 107, 55, 65, 52, 53, 46 ]
map_face_mesh = mp.solutions.face_mesh
# camera object
camera = cv.VideoCapture("VideoFile.mp4")
# landmark detection function
def landmarksDetection(img, results, draw=False):
img_height, img_width= img.shape[:2]
# list[(x,y), (x,y)....]
mesh_coord = [(int(point.x * img_width), int(point.y * img_height)) for point in results.multi_face_landmarks[0].landmark]
if draw :
[cv.circle(img, p, 2, utils.GREEN, -1) for p in mesh_coord]
# returning the list of tuples for each landmarks
return mesh_coord
with map_face_mesh.FaceMesh(min_detection_confidence =0.5, min_tracking_confidence=0.5) as face_mesh:
# starting time here
start_time = time.time()
# starting Video loop here.
while True:
frame_counter +=1 # frame counter
ret, frame = camera.read() # getting frame from camera
if not ret:
break # no more frames break
# resizing frame
# frame = cv.resize(frame, None, fx=2.0, fy=2.0, interpolation=cv.INTER_CUBIC)
# writing orginal image image thumbnail
# cv.imwrite(f'img/img_{frame_counter}.png', frame)
# print(frame_counter)
rgb_frame = cv.cvtColor(frame, cv.COLOR_RGB2BGR)
results = face_mesh.process(rgb_frame)
if results.multi_face_landmarks:
mesh_coords = landmarksDetection(frame, results, False)
frame =utils.fillPolyTrans(frame, [mesh_coords[p] for p in FACE_OVAL], utils.WHITE, opacity=0.4)
frame =utils.fillPolyTrans(frame, [mesh_coords[p] for p in LEFT_EYE], utils.GREEN, opacity=0.4)
frame =utils.fillPolyTrans(frame, [mesh_coords[p] for p in RIGHT_EYE], utils.GREEN, opacity=0.4)
frame =utils.fillPolyTrans(frame, [mesh_coords[p] for p in LEFT_EYEBROW], utils.ORANGE, opacity=0.4)
frame =utils.fillPolyTrans(frame, [mesh_coords[p] for p in RIGHT_EYEBROW], utils.ORANGE, opacity=0.4)
frame =utils.fillPolyTrans(frame, [mesh_coords[p] for p in LIPS], utils.BLACK, opacity=0.3 )
# Changes for Thumbnail of youtube Video
[cv.circle(frame,mesh_coords[p], 1, utils.GREEN , -1, cv.LINE_AA) for p in LIPS]
[cv.circle(frame,mesh_coords[p], 1, utils.BLACK ,- 1, cv.LINE_AA) for p in RIGHT_EYE]
[cv.circle(frame,mesh_coords[p], 1, utils.BLACK , -1, cv.LINE_AA) for p in LEFT_EYE]
[cv.circle(frame,mesh_coords[p], 1, utils.BLACK , -1, cv.LINE_AA) for p in RIGHT_EYEBROW]
[cv.circle(frame,mesh_coords[p], 1, utils.BLACK , -1, cv.LINE_AA) for p in LEFT_EYEBROW]
[cv.circle(frame,mesh_coords[p], 1, utils.RED , -1, cv.LINE_AA) for p in FACE_OVAL]
# calculating frame per seconds FPS
end_time = time.time()-start_time
fps = frame_counter/end_time
frame =utils.textWithBackground(frame,f'FPS: {round(fps,1)}',FONTS, 1.0, (20, 50), bgOpacity=0.9, textThickness=2)
# writing image for thumbnail drawing shape
# cv.imwrite(f'img/frame_{frame_counter}.png', frame)
cv.imshow('frame', frame)
key = cv.waitKey(1)
if key==ord('q') or key ==ord('Q'):
break
cv.destroyAllWindows()
camera.release()
| import cv2 as cv
import mediapipe as mp
import time
from numpy import greater
import utils
# variables
frame_counter =0
# constants
FONTS =cv.FONT_HERSHEY_COMPLEX
# face bounder indices
FACE_OVAL=[ 10, 338, 297, 332, 284, 251, 389, 356, 454, 323, 361, 288, 397, 365, 379, 378, 400, 377, 152, 148, 176, 149, 150, 136, 172, 58, 132, 93, 234, 127, 162, 21, 54, 103,67, 109]
# lips indices for Landmarks
LIPS=[ 61, 146, 91, 181, 84, 17, 314, 405, 321, 375,291, 308, 324, 318, 402, 317, 14, 87, 178, 88, 95,185, 40, 39, 37,0 ,267 ,269 ,270 ,409, 415, 310, 311, 312, 13, 82, 81, 42, 183, 78 ]
LOWER_LIPS =[61, 146, 91, 181, 84, 17, 314, 405, 321, 375, 291, 308, 324, 318, 402, 317, 14, 87, 178, 88, 95]
UPPER_LIPS=[ 185, 40, 39, 37,0 ,267 ,269 ,270 ,409, 415, 310, 311, 312, 13, 82, 81, 42, 183, 78]
# Left eyes indices
LEFT_EYE =[ 362, 382, 381, 380, 374, 373, 390, 249, 263, 466, 388, 387, 386, 385,384, 398 ]
LEFT_EYEBROW =[ 336, 296, 334, 293, 300, 276, 283, 282, 295, 285 ]
# right eyes indices
RIGHT_EYE=[ 33, 7, 163, 144, 145, 153, 154, 155, 133, 173, 157, 158, 159, 160, 161 , 246 ]
RIGHT_EYEBROW=[ 70, 63, 105, 66, 107, 55, 65, 52, 53, 46 ]
map_face_mesh = mp.solutions.face_mesh
# camera object
camera = cv.VideoCapture("VideoFile.mp4")
# landmark detection function
def landmarksDetection(img, results, draw=False):
img_height, img_width= img.shape[:2]
# list[(x,y), (x,y)....]
mesh_coord = [(int(point.x * img_width), int(point.y * img_height)) for point in results.multi_face_landmarks[0].landmark]
if draw :
[cv.circle(img, p, 2, utils.GREEN, -1) for p in mesh_coord]
# returning the list of tuples for each landmarks
return mesh_coord
with map_face_mesh.FaceMesh(min_detection_confidence =0.5, min_tracking_confidence=0.5) as face_mesh:
# starting time here
start_time = time.time()
# starting Video loop here.
while True:
frame_counter +=1 # frame counter
ret, frame = camera.read() # getting frame from camera
if not ret:
break # no more frames break
# resizing frame
# frame = cv.resize(frame, None, fx=2.0, fy=2.0, interpolation=cv.INTER_CUBIC)
# writing orginal image image thumbnail
# cv.imwrite(f'img/img_{frame_counter}.png', frame)
# print(frame_counter)
rgb_frame = cv.cvtColor(frame, cv.COLOR_RGB2BGR)
results = face_mesh.process(rgb_frame)
if results.multi_face_landmarks:
mesh_coords = landmarksDetection(frame, results, False)
frame =utils.fillPolyTrans(frame, [mesh_coords[p] for p in FACE_OVAL], utils.WHITE, opacity=0.4)
frame =utils.fillPolyTrans(frame, [mesh_coords[p] for p in LEFT_EYE], utils.GREEN, opacity=0.4)
frame =utils.fillPolyTrans(frame, [mesh_coords[p] for p in RIGHT_EYE], utils.GREEN, opacity=0.4)
frame =utils.fillPolyTrans(frame, [mesh_coords[p] for p in LEFT_EYEBROW], utils.ORANGE, opacity=0.4)
frame =utils.fillPolyTrans(frame, [mesh_coords[p] for p in RIGHT_EYEBROW], utils.ORANGE, opacity=0.4)
frame =utils.fillPolyTrans(frame, [mesh_coords[p] for p in LIPS], utils.BLACK, opacity=0.3 )
# Changes for Thumbnail of youtube Video
[cv.circle(frame,mesh_coords[p], 1, utils.GREEN , -1, cv.LINE_AA) for p in LIPS]
[cv.circle(frame,mesh_coords[p], 1, utils.BLACK ,- 1, cv.LINE_AA) for p in RIGHT_EYE]
[cv.circle(frame,mesh_coords[p], 1, utils.BLACK , -1, cv.LINE_AA) for p in LEFT_EYE]
[cv.circle(frame,mesh_coords[p], 1, utils.BLACK , -1, cv.LINE_AA) for p in RIGHT_EYEBROW]
[cv.circle(frame,mesh_coords[p], 1, utils.BLACK , -1, cv.LINE_AA) for p in LEFT_EYEBROW]
[cv.circle(frame,mesh_coords[p], 1, utils.RED , -1, cv.LINE_AA) for p in FACE_OVAL]
# calculating frame per seconds FPS
end_time = time.time()-start_time
fps = frame_counter/end_time
frame =utils.textWithBackground(frame,f'FPS: {round(fps,1)}',FONTS, 1.0, (20, 50), bgOpacity=0.9, textThickness=2)
# writing image for thumbnail drawing shape
# cv.imwrite(f'img/frame_{frame_counter}.png', frame)
cv.imshow('frame', frame)
key = cv.waitKey(1)
if key==ord('q') or key ==ord('Q'):
break
cv.destroyAllWindows()
camera.release() | en | 0.56329 | # variables # constants # face bounder indices # lips indices for Landmarks # Left eyes indices # right eyes indices # camera object # landmark detection function # list[(x,y), (x,y)....] # returning the list of tuples for each landmarks # starting time here # starting Video loop here. # frame counter # getting frame from camera # no more frames break # resizing frame # frame = cv.resize(frame, None, fx=2.0, fy=2.0, interpolation=cv.INTER_CUBIC) # writing orginal image image thumbnail # cv.imwrite(f'img/img_{frame_counter}.png', frame) # print(frame_counter) # Changes for Thumbnail of youtube Video # calculating frame per seconds FPS # writing image for thumbnail drawing shape # cv.imwrite(f'img/frame_{frame_counter}.png', frame) | 2.277474 | 2 |
project/apps/books/admin.py | cantoniazzi/book-review | 0 | 6617200 | from django.contrib import admin
from .models import Book, Cover
class CoverAdmin(admin.ModelAdmin):
search_fields = ('type', 'name')
list_display = ('type', 'name')
list_filter = ['name']
save_on_top = True
class BookAdmin(admin.ModelAdmin):
search_fields = ('name', 'author','isbn')
list_display = ('name', 'author','isbn','edition')
list_filter = ['name']
save_on_top = True
admin.site.register(Cover, CoverAdmin)
admin.site.register(Book, BookAdmin) | from django.contrib import admin
from .models import Book, Cover
class CoverAdmin(admin.ModelAdmin):
search_fields = ('type', 'name')
list_display = ('type', 'name')
list_filter = ['name']
save_on_top = True
class BookAdmin(admin.ModelAdmin):
search_fields = ('name', 'author','isbn')
list_display = ('name', 'author','isbn','edition')
list_filter = ['name']
save_on_top = True
admin.site.register(Cover, CoverAdmin)
admin.site.register(Book, BookAdmin) | none | 1 | 2.08415 | 2 | |
pages_builder/asset_compiler.py | quis/digitalmarketplace-frontend-toolkit | 13 | 6617201 | <gh_stars>10-100
# coding=utf-8
import os
import sass
import shutil
import codecs
class AssetCompiler(object):
def __init__(self):
self.repo_root = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
self.sass_src_root = os.path.join(self.repo_root, "toolkit/scss")
self.sass_dest_root = os.path.join(self.repo_root, "pages/public/stylesheets")
self.sass_default_options = {
"output_style": "nested",
"include_paths": [
os.path.join(self.repo_root, "pages_builder/govuk_frontend_toolkit/stylesheets"),
os.path.join(self.repo_root, "pages_builder/govuk-elements-sass/public/sass"),
os.path.join(self.repo_root, "toolkit/scss"),
os.path.join(self.repo_root, "pages_builder/assets/scss")
]
}
self.clean()
def compile_file(self, file, source_folder):
sass_options = self.sass_default_options
sass_options["filename"] = os.path.join(source_folder, file)
dest_path_abs = os.path.join(self.sass_dest_root, file)
dest_path_abs = self.__change_extension_to(dest_path_abs, "css")
result = sass.compile(**sass_options)
print(" " + sass_options["filename"])
print("▸ " + dest_path_abs)
print("")
with codecs.open(dest_path_abs, "w+", "utf-8") as file:
file.write(result)
def compile(self, folder):
for root, dirs, files in os.walk(folder):
for dir in dirs:
dest_dir = os.path.join(self.sass_dest_root, dir)
if os.path.isdir(dest_dir) is False:
print("★ Creating " + dest_dir)
os.mkdir(dest_dir)
else:
print("✔ Found " + dest_dir)
print("")
for file in files:
if self.__get_filename_parts(file)["extension"] == ".scss":
self.compile_file(os.path.join(root, file).replace(folder + "/", ""), folder)
def clean(self):
if os.path.isdir('pages/public') is False:
os.mkdir('pages/public')
if os.path.isdir(self.sass_dest_root) is True:
shutil.rmtree(self.sass_dest_root)
os.mkdir(self.sass_dest_root)
def __change_extension_to(self, filename, new_extension):
file = self.__get_filename_parts(filename)
return file['name'] + "." + new_extension
def __get_filename_parts(self, filename):
name, extension = os.path.splitext(filename)
file = {
'name': name,
'extension': extension
}
return file
| # coding=utf-8
import os
import sass
import shutil
import codecs
class AssetCompiler(object):
def __init__(self):
self.repo_root = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
self.sass_src_root = os.path.join(self.repo_root, "toolkit/scss")
self.sass_dest_root = os.path.join(self.repo_root, "pages/public/stylesheets")
self.sass_default_options = {
"output_style": "nested",
"include_paths": [
os.path.join(self.repo_root, "pages_builder/govuk_frontend_toolkit/stylesheets"),
os.path.join(self.repo_root, "pages_builder/govuk-elements-sass/public/sass"),
os.path.join(self.repo_root, "toolkit/scss"),
os.path.join(self.repo_root, "pages_builder/assets/scss")
]
}
self.clean()
def compile_file(self, file, source_folder):
sass_options = self.sass_default_options
sass_options["filename"] = os.path.join(source_folder, file)
dest_path_abs = os.path.join(self.sass_dest_root, file)
dest_path_abs = self.__change_extension_to(dest_path_abs, "css")
result = sass.compile(**sass_options)
print(" " + sass_options["filename"])
print("▸ " + dest_path_abs)
print("")
with codecs.open(dest_path_abs, "w+", "utf-8") as file:
file.write(result)
def compile(self, folder):
for root, dirs, files in os.walk(folder):
for dir in dirs:
dest_dir = os.path.join(self.sass_dest_root, dir)
if os.path.isdir(dest_dir) is False:
print("★ Creating " + dest_dir)
os.mkdir(dest_dir)
else:
print("✔ Found " + dest_dir)
print("")
for file in files:
if self.__get_filename_parts(file)["extension"] == ".scss":
self.compile_file(os.path.join(root, file).replace(folder + "/", ""), folder)
def clean(self):
if os.path.isdir('pages/public') is False:
os.mkdir('pages/public')
if os.path.isdir(self.sass_dest_root) is True:
shutil.rmtree(self.sass_dest_root)
os.mkdir(self.sass_dest_root)
def __change_extension_to(self, filename, new_extension):
file = self.__get_filename_parts(filename)
return file['name'] + "." + new_extension
def __get_filename_parts(self, filename):
name, extension = os.path.splitext(filename)
file = {
'name': name,
'extension': extension
}
return file | en | 0.644078 | # coding=utf-8 | 2.528903 | 3 |
pnuml/evaluator/prediction_evaluator.py | Snowapril/pnu-machine-learning-2021 | 2 | 6617202 | <gh_stars>1-10
import numpy as np
import matplotlib.pyplot as plt
def summary_difference(prediction, label):
"""
This summary only works in two-case classes prediction.
Summarize difference in prediction and label set into
dictionary with counts FP, FN, TP and TN
and return it.
"""
diff = {"TN": 0, "FN": 0, "FP": 0, "TP": 0}
diff_label = ["TN", "FN", "FP", "TP"]
for (y_hat, y) in zip(prediction, label):
idx = int(y_hat) * 2 + int(y)
diff[diff_label[idx]] += 1
return diff
| import numpy as np
import matplotlib.pyplot as plt
def summary_difference(prediction, label):
"""
This summary only works in two-case classes prediction.
Summarize difference in prediction and label set into
dictionary with counts FP, FN, TP and TN
and return it.
"""
diff = {"TN": 0, "FN": 0, "FP": 0, "TP": 0}
diff_label = ["TN", "FN", "FP", "TP"]
for (y_hat, y) in zip(prediction, label):
idx = int(y_hat) * 2 + int(y)
diff[diff_label[idx]] += 1
return diff | en | 0.905822 | This summary only works in two-case classes prediction. Summarize difference in prediction and label set into dictionary with counts FP, FN, TP and TN and return it. | 3.424788 | 3 |
_site/assets/python_scripts/cvFlip.py | weiSupreme/weiSupreme.github.io | 3 | 6617203 | <reponame>weiSupreme/weiSupreme.github.io
import os
import cv2
src_dir = "select2000/"
flag = 0
if flag == 1:
name_idx = 0
dest_img_dir = "horizontalFlip/image/"
dest_txt_dir = "horizontalFlip/txt/"
else:
name_idx = 3000
dest_img_dir = "verticalFlip/image/"
dest_txt_dir = "verticalFlip/txt/"
img_list = os.listdir(src_dir)
def write_label(old_name, new_name, flag, img_height, img_width):
old_obj = open(src_dir+old_name)
new_obj = open(dest_txt_dir+new_name, 'w')
old_txt = old_obj.read()
writeline_str = ''
gt_split = old_txt.split('\n')
for gt_line in gt_split:
gt_ind = gt_line.split(',')
cordiante_str = []
if len(gt_ind) > 8:
if flag == 1:
for i in range(0, 7, 2):
cordiante_str.append(str(img_width-1-float(gt_ind[i])))
#print len(cordiante_str)
writeline_str = cordiante_str[0] + ',' + gt_ind[1] + ',' + cordiante_str[1] + ',' + gt_ind[3] + ',' + cordiante_str[2] + ',' + gt_ind[5] + ',' + cordiante_str[3] + ',' + gt_ind[7] + ',' + gt_ind[8] + '\n'
else:
for i in range(1, 8, 2):
cordiante_str.append(str(img_height-1-float(gt_ind[i])))
writeline_str = gt_ind[0] + ',' + cordiante_str[0] + ',' + gt_ind[2] + ',' + cordiante_str[1] + ',' + gt_ind[4] + ',' + cordiante_str[2] + ',' + gt_ind[6] + ',' + cordiante_str[3] + ',' + gt_ind[8] + '\n'
new_obj.write(writeline_str)
old_obj.close()
new_obj.close()
for img_name in img_list:
if '.txt' in img_name:
continue
print img_name
txt_name = img_name.rstrip('jpg') + 'txt'
new_txt_name = str(name_idx).zfill(6) + '.txt'
img = cv2.imread(src_dir+img_name)
height, width, c = img.shape
#print height, width
#break
flip_img = []
if flag == 1:
flip_img = cv2.flip(img, 1)
else:
flip_img = cv2.flip(img, 0)
write_label(txt_name, new_txt_name, flag, height, width)
cv2.imwrite(dest_img_dir+str(name_idx).zfill(6)+'.jpg', flip_img)
name_idx += 1
| import os
import cv2
src_dir = "select2000/"
flag = 0
if flag == 1:
name_idx = 0
dest_img_dir = "horizontalFlip/image/"
dest_txt_dir = "horizontalFlip/txt/"
else:
name_idx = 3000
dest_img_dir = "verticalFlip/image/"
dest_txt_dir = "verticalFlip/txt/"
img_list = os.listdir(src_dir)
def write_label(old_name, new_name, flag, img_height, img_width):
old_obj = open(src_dir+old_name)
new_obj = open(dest_txt_dir+new_name, 'w')
old_txt = old_obj.read()
writeline_str = ''
gt_split = old_txt.split('\n')
for gt_line in gt_split:
gt_ind = gt_line.split(',')
cordiante_str = []
if len(gt_ind) > 8:
if flag == 1:
for i in range(0, 7, 2):
cordiante_str.append(str(img_width-1-float(gt_ind[i])))
#print len(cordiante_str)
writeline_str = cordiante_str[0] + ',' + gt_ind[1] + ',' + cordiante_str[1] + ',' + gt_ind[3] + ',' + cordiante_str[2] + ',' + gt_ind[5] + ',' + cordiante_str[3] + ',' + gt_ind[7] + ',' + gt_ind[8] + '\n'
else:
for i in range(1, 8, 2):
cordiante_str.append(str(img_height-1-float(gt_ind[i])))
writeline_str = gt_ind[0] + ',' + cordiante_str[0] + ',' + gt_ind[2] + ',' + cordiante_str[1] + ',' + gt_ind[4] + ',' + cordiante_str[2] + ',' + gt_ind[6] + ',' + cordiante_str[3] + ',' + gt_ind[8] + '\n'
new_obj.write(writeline_str)
old_obj.close()
new_obj.close()
for img_name in img_list:
if '.txt' in img_name:
continue
print img_name
txt_name = img_name.rstrip('jpg') + 'txt'
new_txt_name = str(name_idx).zfill(6) + '.txt'
img = cv2.imread(src_dir+img_name)
height, width, c = img.shape
#print height, width
#break
flip_img = []
if flag == 1:
flip_img = cv2.flip(img, 1)
else:
flip_img = cv2.flip(img, 0)
write_label(txt_name, new_txt_name, flag, height, width)
cv2.imwrite(dest_img_dir+str(name_idx).zfill(6)+'.jpg', flip_img)
name_idx += 1 | ru | 0.125202 | #print len(cordiante_str) #print height, width #break | 2.239904 | 2 |
defenses/neural_cleanse/neural_cleanse.py | Qinaty/input-aware-backdoor-attack-release | 67 | 6617204 | <reponame>Qinaty/input-aware-backdoor-attack-release<gh_stars>10-100
import sys
import numpy as np
from config import get_argument
from detecting import *
sys.path.insert(0, "../..")
def outlier_detection(l1_norm_list, idx_mapping, opt):
print("-" * 30)
print("Determining whether model is backdoor")
consistency_constant = 1.4826
median = torch.median(l1_norm_list)
mad = consistency_constant * torch.median(torch.abs(l1_norm_list - median))
min_mad = torch.abs(torch.min(l1_norm_list) - median) / mad
print("Median: {}, MAD: {}".format(median, mad))
print("Anomaly index: {}".format(min_mad))
if min_mad < 2:
print("Not a backdoor model")
else:
print("This is a backdoor model")
if opt.to_file:
result_path = os.path.join(opt.result, opt.dataset, opt.attack_mode)
output_path = os.path.join(result_path, "{}_{}_output.txt".format(opt.attack_mode, opt.dataset))
with open(output_path, "a+") as f:
f.write(
str(median.cpu().numpy()) + ", " + str(mad.cpu().numpy()) + ", " + str(min_mad.cpu().numpy()) + "\n"
)
l1_norm_list_to_save = [str(value) for value in l1_norm_list.cpu().numpy()]
f.write(", ".join(l1_norm_list_to_save) + "\n")
flag_list = []
for y_label in idx_mapping:
if l1_norm_list[idx_mapping[y_label]] > median:
continue
if torch.abs(l1_norm_list[idx_mapping[y_label]] - median) / mad > 2:
flag_list.append((y_label, l1_norm_list[idx_mapping[y_label]]))
if len(flag_list) > 0:
flag_list = sorted(flag_list, key=lambda x: x[1])
print(
"Flagged label list: {}".format(",".join(["{}: {}".format(y_label, l_norm) for y_label, l_norm in flag_list]))
)
def main():
opt = config.get_argument().parse_args()
if opt.dataset == "mnist" or opt.dataset == "cifar10":
opt.total_label = 10
elif opt.dataset == "gtsrb":
opt.total_label = 43
else:
raise Exception("Invalid Dataset")
if opt.dataset == "cifar10":
opt.input_height = 32
opt.input_width = 32
opt.input_channel = 3
elif opt.dataset == "gtsrb":
opt.input_height = 32
opt.input_width = 32
opt.input_channel = 3
elif opt.dataset == "mnist":
opt.input_height = 28
opt.input_width = 28
opt.input_channel = 1
else:
raise Exception("Invalid Dataset")
result_path = os.path.join(opt.result, opt.dataset, opt.attack_mode)
if not os.path.exists(result_path):
os.makedirs(result_path)
output_path = os.path.join(result_path, "{}_{}_output.txt".format(opt.attack_mode, opt.dataset))
if opt.to_file:
with open(output_path, "w+") as f:
f.write("Output for neural cleanse: {} - {}".format(opt.attack_mode, opt.dataset) + "\n")
# init_mask = np.random.randn(1, opt.input_height, opt.input_width).astype(np.float32)
# init_pattern = np.random.randn(opt.input_channel, opt.input_height, opt.input_width).astype(np.float32)
init_mask = np.ones((1, opt.input_height, opt.input_width)).astype(np.float32)
init_pattern = np.ones((opt.input_channel, opt.input_height, opt.input_width)).astype(np.float32)
for test in range(opt.n_times_test):
print("Test {}:".format(test))
if opt.to_file:
with open(output_path, "a+") as f:
f.write("-" * 30 + "\n")
f.write("Test {}:".format(str(test)) + "\n")
masks = []
idx_mapping = {}
for target_label in range(opt.total_label):
print("----------------- Analyzing label: {} -----------------".format(target_label))
opt.target_label = target_label
recorder, opt = train(opt, init_mask, init_pattern)
mask = recorder.mask_best
masks.append(mask)
idx_mapping[target_label] = len(masks) - 1
l1_norm_list = torch.stack([torch.sum(torch.abs(m)) for m in masks])
print("{} labels found".format(len(l1_norm_list)))
print("Norm values: {}".format(l1_norm_list))
outlier_detection(l1_norm_list, idx_mapping, opt)
if __name__ == "__main__":
main()
| import sys
import numpy as np
from config import get_argument
from detecting import *
sys.path.insert(0, "../..")
def outlier_detection(l1_norm_list, idx_mapping, opt):
print("-" * 30)
print("Determining whether model is backdoor")
consistency_constant = 1.4826
median = torch.median(l1_norm_list)
mad = consistency_constant * torch.median(torch.abs(l1_norm_list - median))
min_mad = torch.abs(torch.min(l1_norm_list) - median) / mad
print("Median: {}, MAD: {}".format(median, mad))
print("Anomaly index: {}".format(min_mad))
if min_mad < 2:
print("Not a backdoor model")
else:
print("This is a backdoor model")
if opt.to_file:
result_path = os.path.join(opt.result, opt.dataset, opt.attack_mode)
output_path = os.path.join(result_path, "{}_{}_output.txt".format(opt.attack_mode, opt.dataset))
with open(output_path, "a+") as f:
f.write(
str(median.cpu().numpy()) + ", " + str(mad.cpu().numpy()) + ", " + str(min_mad.cpu().numpy()) + "\n"
)
l1_norm_list_to_save = [str(value) for value in l1_norm_list.cpu().numpy()]
f.write(", ".join(l1_norm_list_to_save) + "\n")
flag_list = []
for y_label in idx_mapping:
if l1_norm_list[idx_mapping[y_label]] > median:
continue
if torch.abs(l1_norm_list[idx_mapping[y_label]] - median) / mad > 2:
flag_list.append((y_label, l1_norm_list[idx_mapping[y_label]]))
if len(flag_list) > 0:
flag_list = sorted(flag_list, key=lambda x: x[1])
print(
"Flagged label list: {}".format(",".join(["{}: {}".format(y_label, l_norm) for y_label, l_norm in flag_list]))
)
def main():
opt = config.get_argument().parse_args()
if opt.dataset == "mnist" or opt.dataset == "cifar10":
opt.total_label = 10
elif opt.dataset == "gtsrb":
opt.total_label = 43
else:
raise Exception("Invalid Dataset")
if opt.dataset == "cifar10":
opt.input_height = 32
opt.input_width = 32
opt.input_channel = 3
elif opt.dataset == "gtsrb":
opt.input_height = 32
opt.input_width = 32
opt.input_channel = 3
elif opt.dataset == "mnist":
opt.input_height = 28
opt.input_width = 28
opt.input_channel = 1
else:
raise Exception("Invalid Dataset")
result_path = os.path.join(opt.result, opt.dataset, opt.attack_mode)
if not os.path.exists(result_path):
os.makedirs(result_path)
output_path = os.path.join(result_path, "{}_{}_output.txt".format(opt.attack_mode, opt.dataset))
if opt.to_file:
with open(output_path, "w+") as f:
f.write("Output for neural cleanse: {} - {}".format(opt.attack_mode, opt.dataset) + "\n")
# init_mask = np.random.randn(1, opt.input_height, opt.input_width).astype(np.float32)
# init_pattern = np.random.randn(opt.input_channel, opt.input_height, opt.input_width).astype(np.float32)
init_mask = np.ones((1, opt.input_height, opt.input_width)).astype(np.float32)
init_pattern = np.ones((opt.input_channel, opt.input_height, opt.input_width)).astype(np.float32)
for test in range(opt.n_times_test):
print("Test {}:".format(test))
if opt.to_file:
with open(output_path, "a+") as f:
f.write("-" * 30 + "\n")
f.write("Test {}:".format(str(test)) + "\n")
masks = []
idx_mapping = {}
for target_label in range(opt.total_label):
print("----------------- Analyzing label: {} -----------------".format(target_label))
opt.target_label = target_label
recorder, opt = train(opt, init_mask, init_pattern)
mask = recorder.mask_best
masks.append(mask)
idx_mapping[target_label] = len(masks) - 1
l1_norm_list = torch.stack([torch.sum(torch.abs(m)) for m in masks])
print("{} labels found".format(len(l1_norm_list)))
print("Norm values: {}".format(l1_norm_list))
outlier_detection(l1_norm_list, idx_mapping, opt)
if __name__ == "__main__":
main() | ru | 0.074422 | # init_mask = np.random.randn(1, opt.input_height, opt.input_width).astype(np.float32) # init_pattern = np.random.randn(opt.input_channel, opt.input_height, opt.input_width).astype(np.float32) | 2.486603 | 2 |
conanfile.py | ZaMaZaN4iK/conan-openal | 0 | 6617205 | <filename>conanfile.py
from conans import CMake, ConanFile, tools
import os
class OpenALConan(ConanFile):
name = "openal"
version = "1.18.2"
md5 = "fa2cb3df766ab5976c86efbcc1d24d68"
description = "OpenAL Soft is a software implementation of the OpenAL 3D audio API."
url = "http://github.com/bincrafters/conan-openal"
homepage = "https://www.openal.org/"
author = "Bincrafters <<EMAIL>>"
license = "MIT"
exports = ["LICENSE.md"]
exports_sources = ["CMakeLists.txt"]
generators = "cmake"
source_subfolder = "source_subfolder"
settings = "os", "arch", "compiler", "build_type"
options = {"shared": [True, False], "fPIC": [True, False]}
default_options = "shared=False", "fPIC=True"
def configure(self):
if self.settings.os == 'Windows':
del self.options.fPIC
del self.settings.compiler.libcxx
def requirements(self):
if self.settings.os == "Linux":
self.requires("libalsa/1.1.5@conan/stable")
def source(self):
source_url = "https://github.com/kcat/openal-soft"
tools.get("{0}/archive/openal-soft-{1}.tar.gz".format(source_url, self.version), self.md5)
extracted_dir = "openal-soft-openal-soft-" + self.version
os.rename(extracted_dir, self.source_subfolder)
if self.settings.os == 'Windows':
tools.replace_in_file(os.path.join(self.source_subfolder, 'CMakeLists.txt'),
'CHECK_INCLUDE_FILES("windows.h;mmsystem.h" HAVE_MMSYSTEM_H -D_WIN32_WINNT=0x0502)',
'CHECK_INCLUDE_FILES("windows.h;mmsystem.h" HAVE_MMSYSTEM_H)')
def build(self):
cmake = CMake(self)
if self.settings.compiler != 'Visual Studio':
cmake.definitions['CMAKE_POSITION_INDEPENDENT_CODE'] = self.options.fPIC
cmake.definitions['LIBTYPE'] = 'SHARED' if self.options.shared else 'STATIC'
cmake.configure()
cmake.build()
cmake.install()
def package(self):
self.copy("*COPYING", dst="licenses", keep_path=False, ignore_case=True)
def package_info(self):
if self.settings.os == "Windows":
self.cpp_info.libs = ["OpenAL32", 'winmm']
else:
self.cpp_info.libs = ["openal"]
if self.settings.os == 'Linux':
self.cpp_info.libs.extend(['dl', 'm'])
elif self.settings.os == 'Macos':
frameworks = ['AudioToolbox', 'CoreAudio']
for framework in frameworks:
self.cpp_info.exelinkflags.append("-framework %s" % framework)
self.cpp_info.sharedlinkflags = self.cpp_info.exelinkflags
self.cpp_info.includedirs = ["include", "include/AL"]
if not self.options.shared:
self.cpp_info.defines.append('AL_LIBTYPE_STATIC')
| <filename>conanfile.py
from conans import CMake, ConanFile, tools
import os
class OpenALConan(ConanFile):
name = "openal"
version = "1.18.2"
md5 = "fa2cb3df766ab5976c86efbcc1d24d68"
description = "OpenAL Soft is a software implementation of the OpenAL 3D audio API."
url = "http://github.com/bincrafters/conan-openal"
homepage = "https://www.openal.org/"
author = "Bincrafters <<EMAIL>>"
license = "MIT"
exports = ["LICENSE.md"]
exports_sources = ["CMakeLists.txt"]
generators = "cmake"
source_subfolder = "source_subfolder"
settings = "os", "arch", "compiler", "build_type"
options = {"shared": [True, False], "fPIC": [True, False]}
default_options = "shared=False", "fPIC=True"
def configure(self):
if self.settings.os == 'Windows':
del self.options.fPIC
del self.settings.compiler.libcxx
def requirements(self):
if self.settings.os == "Linux":
self.requires("libalsa/1.1.5@conan/stable")
def source(self):
source_url = "https://github.com/kcat/openal-soft"
tools.get("{0}/archive/openal-soft-{1}.tar.gz".format(source_url, self.version), self.md5)
extracted_dir = "openal-soft-openal-soft-" + self.version
os.rename(extracted_dir, self.source_subfolder)
if self.settings.os == 'Windows':
tools.replace_in_file(os.path.join(self.source_subfolder, 'CMakeLists.txt'),
'CHECK_INCLUDE_FILES("windows.h;mmsystem.h" HAVE_MMSYSTEM_H -D_WIN32_WINNT=0x0502)',
'CHECK_INCLUDE_FILES("windows.h;mmsystem.h" HAVE_MMSYSTEM_H)')
def build(self):
cmake = CMake(self)
if self.settings.compiler != 'Visual Studio':
cmake.definitions['CMAKE_POSITION_INDEPENDENT_CODE'] = self.options.fPIC
cmake.definitions['LIBTYPE'] = 'SHARED' if self.options.shared else 'STATIC'
cmake.configure()
cmake.build()
cmake.install()
def package(self):
self.copy("*COPYING", dst="licenses", keep_path=False, ignore_case=True)
def package_info(self):
if self.settings.os == "Windows":
self.cpp_info.libs = ["OpenAL32", 'winmm']
else:
self.cpp_info.libs = ["openal"]
if self.settings.os == 'Linux':
self.cpp_info.libs.extend(['dl', 'm'])
elif self.settings.os == 'Macos':
frameworks = ['AudioToolbox', 'CoreAudio']
for framework in frameworks:
self.cpp_info.exelinkflags.append("-framework %s" % framework)
self.cpp_info.sharedlinkflags = self.cpp_info.exelinkflags
self.cpp_info.includedirs = ["include", "include/AL"]
if not self.options.shared:
self.cpp_info.defines.append('AL_LIBTYPE_STATIC')
| none | 1 | 2.160061 | 2 | |
src/harrastuspassi/harrastuspassi/views/auth.py | savilmik/harrastuspassi-backend | 2 | 6617206 |
# -*- coding: utf-8 -*-
from rest_framework.authentication import SessionAuthentication
from rest_framework.exceptions import NotAuthenticated
from rest_framework.response import Response
from rest_framework_simplejwt.serializers import TokenObtainPairSerializer
from rest_framework_simplejwt.tokens import RefreshToken
from rest_framework_simplejwt.views import TokenObtainPairView as BaseTokenObtainPairView
def get_harrastuspassi_token(user):
token = RefreshToken.for_user(user)
token['first_name'] = user.first_name
token['last_name'] = user.last_name
return token
class TokenPairSerializer(TokenObtainPairSerializer):
@classmethod
def get_token(cls, user):
return get_harrastuspassi_token(user)
class TokenObtainPairView(BaseTokenObtainPairView):
""" View for obtaining a pair of refresh and access tokens for API authentication.
Response will contain both:
{
"access":"<access token>",
"refresh":"<refresh token>"
}
The access token must be included as a header in subsequent API calls:
Authorization: Bearer <access token>
"""
authentication_classes = [SessionAuthentication]
serializer_class = TokenPairSerializer
def get(self, request, *args, **kwargs):
# User with valid session authentication can receive tokens via simple GET
if request.user.is_authenticated:
refresh_token = get_harrastuspassi_token(request.user)
data = {
'refresh': str(refresh_token),
'access': str(refresh_token.access_token),
}
return Response(data)
else:
raise NotAuthenticated
|
# -*- coding: utf-8 -*-
from rest_framework.authentication import SessionAuthentication
from rest_framework.exceptions import NotAuthenticated
from rest_framework.response import Response
from rest_framework_simplejwt.serializers import TokenObtainPairSerializer
from rest_framework_simplejwt.tokens import RefreshToken
from rest_framework_simplejwt.views import TokenObtainPairView as BaseTokenObtainPairView
def get_harrastuspassi_token(user):
token = RefreshToken.for_user(user)
token['first_name'] = user.first_name
token['last_name'] = user.last_name
return token
class TokenPairSerializer(TokenObtainPairSerializer):
@classmethod
def get_token(cls, user):
return get_harrastuspassi_token(user)
class TokenObtainPairView(BaseTokenObtainPairView):
""" View for obtaining a pair of refresh and access tokens for API authentication.
Response will contain both:
{
"access":"<access token>",
"refresh":"<refresh token>"
}
The access token must be included as a header in subsequent API calls:
Authorization: Bearer <access token>
"""
authentication_classes = [SessionAuthentication]
serializer_class = TokenPairSerializer
def get(self, request, *args, **kwargs):
# User with valid session authentication can receive tokens via simple GET
if request.user.is_authenticated:
refresh_token = get_harrastuspassi_token(request.user)
data = {
'refresh': str(refresh_token),
'access': str(refresh_token.access_token),
}
return Response(data)
else:
raise NotAuthenticated
| en | 0.79996 | # -*- coding: utf-8 -*- View for obtaining a pair of refresh and access tokens for API authentication. Response will contain both: { "access":"<access token>", "refresh":"<refresh token>" } The access token must be included as a header in subsequent API calls: Authorization: Bearer <access token> # User with valid session authentication can receive tokens via simple GET | 2.526594 | 3 |
tests/options/beam/test_beam_cylindrical.py | pymontecarlo/pymontecarlo | 5 | 6617207 | <reponame>pymontecarlo/pymontecarlo
#!/usr/bin/env python
""" """
# Standard library modules.
# Third party modules.
import pytest
# Local modules.
from pymontecarlo.options.beam.cylindrical import (
CylindricalBeam,
CylindricalBeamBuilder,
)
from pymontecarlo.options.particle import Particle
import pymontecarlo.util.testutil as testutil
# Globals and constants variables.
@pytest.fixture
def beam():
return CylindricalBeam(15e3, 123.456, Particle.POSITRON, 1.0, 2.0)
@pytest.fixture
def builder():
return CylindricalBeamBuilder()
def test_cylindricalbeam(beam):
assert beam.particle == Particle.POSITRON
assert beam.energy_eV == pytest.approx(15e3, abs=1e-4)
assert beam.energy_keV == pytest.approx(15.0, abs=1e-4)
assert beam.diameter_m == pytest.approx(123.456, abs=1e-4)
assert beam.x0_m == pytest.approx(1.0, abs=1e-4)
assert beam.y0_m == pytest.approx(2.0, abs=1e-4)
def test_cylindricalbeam_repr(beam):
assert repr(beam) == "<CylindricalBeam(POSITRON, 15000 eV, 123.456 m, (1, 2) m)>"
def test_cylindricalbeam_eq(beam):
assert beam == CylindricalBeam(15e3, 123.456, Particle.POSITRON, 1.0, 2.0)
def test_cylindricalbeam_ne(beam):
assert not beam == CylindricalBeam(14e3, 123.456, Particle.POSITRON, 1.0, 2.0)
assert not beam == CylindricalBeam(15e3, 124.456, Particle.POSITRON, 1.0, 2.0)
assert not beam == CylindricalBeam(15e3, 123.456, Particle.ELECTRON, 1.0, 2.0)
assert not beam == CylindricalBeam(15e3, 123.456, Particle.POSITRON, 1.1, 2.0)
assert not beam == CylindricalBeam(15e3, 123.456, Particle.POSITRON, 1.0, 2.1)
assert not beam == object()
def test_cylindricalbeam_hdf5(beam, tmp_path):
testutil.assert_convert_parse_hdf5(beam, tmp_path)
def test_cylindricalbeam_copy(beam):
testutil.assert_copy(beam)
def test_cylindricalbeam_pickle(beam):
testutil.assert_pickle(beam)
def test_cylindricalbeam_series(beam, seriesbuilder):
beam.convert_series(seriesbuilder)
assert len(seriesbuilder.build()) == 5
def test_cylindricalbeam_document(beam, documentbuilder):
beam.convert_document(documentbuilder)
document = documentbuilder.build()
assert testutil.count_document_nodes(document) == 4
def test_cylindricalbeambuilder(builder):
builder.add_energy_eV(10e3)
builder.add_energy_keV(10) # Not added
builder.add_diameter_m(0.0)
builder.add_diameter_m(0.1)
builder.add_position(0.0, 0.0)
builder.add_position(0.0, 0.1)
beams = builder.build()
assert len(beams) == 4
assert len(builder) == 4
for beam in beams:
assert beam.particle == Particle.ELECTRON
def test_cylindricalbeambuilder_nodiameter(builder):
builder.add_energy_eV(10e3)
builder.add_position(0.0, 0.0)
builder.add_position(0.0, 0.1)
builder.add_particle(Particle.ELECTRON)
beams = builder.build()
assert len(beams) == 0
assert len(builder) == 0
def test_cylindricalbeambuilder_noposition(builder):
builder.add_energy_eV(10e3)
builder.add_diameter_m(0.1)
builder.add_particle(Particle.ELECTRON)
beams = builder.build()
assert len(beams) == 0
assert len(builder) == 0
def test_cylindricalbeambuilder_linescan(builder):
builder.add_energy_eV(10e3)
builder.add_diameter_m(0.123)
builder.add_linescan_x(0.0, 5.0, 1.0, y0_m=0.456)
beams = builder.build()
assert len(beams) == 5
assert len(builder) == 5
for beam in beams:
assert beam.particle == Particle.ELECTRON
assert beam.diameter_m == pytest.approx(0.123, abs=1e-4)
assert beam.y0_m == pytest.approx(0.456, abs=1e-4)
| #!/usr/bin/env python
""" """
# Standard library modules.
# Third party modules.
import pytest
# Local modules.
from pymontecarlo.options.beam.cylindrical import (
CylindricalBeam,
CylindricalBeamBuilder,
)
from pymontecarlo.options.particle import Particle
import pymontecarlo.util.testutil as testutil
# Globals and constants variables.
@pytest.fixture
def beam():
return CylindricalBeam(15e3, 123.456, Particle.POSITRON, 1.0, 2.0)
@pytest.fixture
def builder():
return CylindricalBeamBuilder()
def test_cylindricalbeam(beam):
assert beam.particle == Particle.POSITRON
assert beam.energy_eV == pytest.approx(15e3, abs=1e-4)
assert beam.energy_keV == pytest.approx(15.0, abs=1e-4)
assert beam.diameter_m == pytest.approx(123.456, abs=1e-4)
assert beam.x0_m == pytest.approx(1.0, abs=1e-4)
assert beam.y0_m == pytest.approx(2.0, abs=1e-4)
def test_cylindricalbeam_repr(beam):
assert repr(beam) == "<CylindricalBeam(POSITRON, 15000 eV, 123.456 m, (1, 2) m)>"
def test_cylindricalbeam_eq(beam):
assert beam == CylindricalBeam(15e3, 123.456, Particle.POSITRON, 1.0, 2.0)
def test_cylindricalbeam_ne(beam):
assert not beam == CylindricalBeam(14e3, 123.456, Particle.POSITRON, 1.0, 2.0)
assert not beam == CylindricalBeam(15e3, 124.456, Particle.POSITRON, 1.0, 2.0)
assert not beam == CylindricalBeam(15e3, 123.456, Particle.ELECTRON, 1.0, 2.0)
assert not beam == CylindricalBeam(15e3, 123.456, Particle.POSITRON, 1.1, 2.0)
assert not beam == CylindricalBeam(15e3, 123.456, Particle.POSITRON, 1.0, 2.1)
assert not beam == object()
def test_cylindricalbeam_hdf5(beam, tmp_path):
testutil.assert_convert_parse_hdf5(beam, tmp_path)
def test_cylindricalbeam_copy(beam):
testutil.assert_copy(beam)
def test_cylindricalbeam_pickle(beam):
testutil.assert_pickle(beam)
def test_cylindricalbeam_series(beam, seriesbuilder):
beam.convert_series(seriesbuilder)
assert len(seriesbuilder.build()) == 5
def test_cylindricalbeam_document(beam, documentbuilder):
beam.convert_document(documentbuilder)
document = documentbuilder.build()
assert testutil.count_document_nodes(document) == 4
def test_cylindricalbeambuilder(builder):
builder.add_energy_eV(10e3)
builder.add_energy_keV(10) # Not added
builder.add_diameter_m(0.0)
builder.add_diameter_m(0.1)
builder.add_position(0.0, 0.0)
builder.add_position(0.0, 0.1)
beams = builder.build()
assert len(beams) == 4
assert len(builder) == 4
for beam in beams:
assert beam.particle == Particle.ELECTRON
def test_cylindricalbeambuilder_nodiameter(builder):
builder.add_energy_eV(10e3)
builder.add_position(0.0, 0.0)
builder.add_position(0.0, 0.1)
builder.add_particle(Particle.ELECTRON)
beams = builder.build()
assert len(beams) == 0
assert len(builder) == 0
def test_cylindricalbeambuilder_noposition(builder):
builder.add_energy_eV(10e3)
builder.add_diameter_m(0.1)
builder.add_particle(Particle.ELECTRON)
beams = builder.build()
assert len(beams) == 0
assert len(builder) == 0
def test_cylindricalbeambuilder_linescan(builder):
builder.add_energy_eV(10e3)
builder.add_diameter_m(0.123)
builder.add_linescan_x(0.0, 5.0, 1.0, y0_m=0.456)
beams = builder.build()
assert len(beams) == 5
assert len(builder) == 5
for beam in beams:
assert beam.particle == Particle.ELECTRON
assert beam.diameter_m == pytest.approx(0.123, abs=1e-4)
assert beam.y0_m == pytest.approx(0.456, abs=1e-4) | en | 0.480633 | #!/usr/bin/env python # Standard library modules. # Third party modules. # Local modules. # Globals and constants variables. # Not added | 2.284718 | 2 |
scraper.py | tderick/selenium-youtube-scraper | 0 | 6617208 | import pandas as pd
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
YOUTUBE_TRENDING_URL ='https://www.youtube.com/feed/trending'
def get_driver():
chrome_options = Options()
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-dev-shm-usage')
driver = webdriver.Chrome(options=chrome_options)
return driver
def get_videos(driver):
VIDEO_DIV_TAG='ytd-video-renderer'
driver.get(YOUTUBE_TRENDING_URL)
driver.page_source # <=== This line is very for this function to work
videos = driver.find_elements(By.TAG_NAME, VIDEO_DIV_TAG)
return videos
def parse_video(video):
title_tag = video.find_element(By.ID, "video-title")
title = title_tag.text
url = title_tag.get_attribute('href')
channel = video.find_element(By.ID, 'channel-name').text
description = video.find_element(By.ID, 'description-text').text
thumbnail_url = video.find_element(By.TAG_NAME, 'img').get_attribute('src')
video_duration = video.find_element(By.ID, 'text').text
meta_data = video.find_element(By.ID, 'metadata-line').find_elements(By.TAG_NAME, 'span')
number_of_views = meta_data[0].text
publication_date = meta_data[1].text
return {
"title":title,
"url":url,
"video_duration":video_duration,
"publication_date":publication_date,
"number_of_views":number_of_views,
"channel":channel,
"thumbnail_url":thumbnail_url,
"description":description
}
if __name__=="__main__":
driver = get_driver()
print("Fetch data from Youtube")
videos = get_videos(driver)
print(f'========> We found {len(videos)} videos')
print('========> Parse top ten vidéo')
videos_parses = [parse_video(video) for video in videos[:10]]
data = pd.DataFrame(videos_parses)
print(data)
data.to_csv("trending.csv", index=None)
| import pandas as pd
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
YOUTUBE_TRENDING_URL ='https://www.youtube.com/feed/trending'
def get_driver():
chrome_options = Options()
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-dev-shm-usage')
driver = webdriver.Chrome(options=chrome_options)
return driver
def get_videos(driver):
VIDEO_DIV_TAG='ytd-video-renderer'
driver.get(YOUTUBE_TRENDING_URL)
driver.page_source # <=== This line is very for this function to work
videos = driver.find_elements(By.TAG_NAME, VIDEO_DIV_TAG)
return videos
def parse_video(video):
title_tag = video.find_element(By.ID, "video-title")
title = title_tag.text
url = title_tag.get_attribute('href')
channel = video.find_element(By.ID, 'channel-name').text
description = video.find_element(By.ID, 'description-text').text
thumbnail_url = video.find_element(By.TAG_NAME, 'img').get_attribute('src')
video_duration = video.find_element(By.ID, 'text').text
meta_data = video.find_element(By.ID, 'metadata-line').find_elements(By.TAG_NAME, 'span')
number_of_views = meta_data[0].text
publication_date = meta_data[1].text
return {
"title":title,
"url":url,
"video_duration":video_duration,
"publication_date":publication_date,
"number_of_views":number_of_views,
"channel":channel,
"thumbnail_url":thumbnail_url,
"description":description
}
if __name__=="__main__":
driver = get_driver()
print("Fetch data from Youtube")
videos = get_videos(driver)
print(f'========> We found {len(videos)} videos')
print('========> Parse top ten vidéo')
videos_parses = [parse_video(video) for video in videos[:10]]
data = pd.DataFrame(videos_parses)
print(data)
data.to_csv("trending.csv", index=None)
| en | 0.976694 | # <=== This line is very for this function to work | 3.315799 | 3 |
Day12/Day12-1.py | remonedo/AdventOfCode2019 | 0 | 6617209 |
def update_velocity(velocity, moon, neighbour, axis):
result = list(velocity)
result[axis] += max(min(neighbour[axis]-moon[axis], 1), -1)
return tuple(result)
def apply_gravity(moons):
result = {}
for moon in moons:
velocity = moons[moon]
for neighbour in moons:
if moon == neighbour:
continue
velocity = update_velocity(velocity, moon, neighbour, 0)
velocity = update_velocity(velocity, moon, neighbour, 1)
velocity = update_velocity(velocity, moon, neighbour, 2)
result[moon] = velocity
return result
def apply_velocity(moons):
result = {}
for moon in moons:
location = list(moon)
location[0] += moons[moon][0]
location[1] += moons[moon][1]
location[2] += moons[moon][2]
result[tuple(location)] = moons[moon]
return result
def main():
moons = {(-9, -1, -1): (0, 0, 0), (2, 9, 5): (0, 0, 0), (10, 18, -12): (0, 0, 0), (-6, 15, -7): (0, 0, 0)}
for _ in range(0, 1000):
moons = apply_gravity(moons)
moons = apply_velocity(moons)
return sum([sum(abs(m) for m in list(moon)) * sum(abs(v) for v in list(moons[moon])) for moon in moons])
print(main())
|
def update_velocity(velocity, moon, neighbour, axis):
result = list(velocity)
result[axis] += max(min(neighbour[axis]-moon[axis], 1), -1)
return tuple(result)
def apply_gravity(moons):
result = {}
for moon in moons:
velocity = moons[moon]
for neighbour in moons:
if moon == neighbour:
continue
velocity = update_velocity(velocity, moon, neighbour, 0)
velocity = update_velocity(velocity, moon, neighbour, 1)
velocity = update_velocity(velocity, moon, neighbour, 2)
result[moon] = velocity
return result
def apply_velocity(moons):
result = {}
for moon in moons:
location = list(moon)
location[0] += moons[moon][0]
location[1] += moons[moon][1]
location[2] += moons[moon][2]
result[tuple(location)] = moons[moon]
return result
def main():
moons = {(-9, -1, -1): (0, 0, 0), (2, 9, 5): (0, 0, 0), (10, 18, -12): (0, 0, 0), (-6, 15, -7): (0, 0, 0)}
for _ in range(0, 1000):
moons = apply_gravity(moons)
moons = apply_velocity(moons)
return sum([sum(abs(m) for m in list(moon)) * sum(abs(v) for v in list(moons[moon])) for moon in moons])
print(main())
| none | 1 | 3.626023 | 4 | |
biggest_Devidor.py | bogdanevropin/euler_project_tasks | 0 | 6617210 | import math
def biggest_devidor(n):
Ans = []
d = 2
while d * d <= n:
if n % d ==0:
Ans.append(d)
n //= d
else:
d += 1
if n > 1:
Ans.append(n)
return Ans
print(biggest_devidor(600851475143))
print(max(biggest_devidor(600851475143))) | import math
def biggest_devidor(n):
Ans = []
d = 2
while d * d <= n:
if n % d ==0:
Ans.append(d)
n //= d
else:
d += 1
if n > 1:
Ans.append(n)
return Ans
print(biggest_devidor(600851475143))
print(max(biggest_devidor(600851475143))) | none | 1 | 3.536827 | 4 | |
src/tests/testApiWithRepetition.py | autumnjolitz/vbox | 1 | 6617211 | import functools
import os
import logging
import unittest
import time
FD_IMG=os.path.realpath(os.path.join(os.path.dirname(__file__), "fd.img"))
def with_new_vm(func):
@functools.wraps(func)
def _wrapper(self):
name = "{}_{}".format(self.__class__.__name__, func.__name__)
vm = self.api.vms.getOrCreate(name)
rv = func(self, vm)
vm.destroy()
return rv
return _wrapper
class TestBound(unittest.TestCase):
"""Ali-level tests than can cause repeated CLI commands to be issued.
Hence, the CLI duplicate command detection is disabled here.
"""
def setUp(self):
import vbox
logging.basicConfig(level=logging.DEBUG)
self.api = vbox.VBox(['C:\Program Files\Oracle\VirtualBox']).api
@with_new_vm
def testFdBoot(self, vm):
img = self.api.floppies.fromFile(FD_IMG)
vm.storageControllers.ensureExist("floppy")
controller = vm.storageControllers.floppy
controller.attach(img, bootable=True)
self.assertFalse(vm.state.running)
oldTime = vm.changeTime
vm.state.start()
self.assertTrue(vm.state.running)
vm.wait(timeout=2)
# this FD image simply stays turned on.
self.assertTrue(vm.state.running)
vm.state.pause()
self.assertFalse(vm.state.running)
self.assertTrue(vm.state.paused)
vm.state.resume()
self.assertTrue(vm.state.running)
# Reset
vm.state.reset()
self.assertTrue(vm.state.running)
vm.state.powerOff()
self.assertGreater(vm.changeTime, oldTime)
self.assertFalse(vm.state.running)
def testDoubleDestroy(self):
name = "{}_testDoubleDestroy".format(self.__class__.__name__)
vm = self.api.vms.new(name)
vm.destroy()
with self.assertRaises(vm.exceptions.VmNotFound):
vm.destroy() | import functools
import os
import logging
import unittest
import time
FD_IMG=os.path.realpath(os.path.join(os.path.dirname(__file__), "fd.img"))
def with_new_vm(func):
@functools.wraps(func)
def _wrapper(self):
name = "{}_{}".format(self.__class__.__name__, func.__name__)
vm = self.api.vms.getOrCreate(name)
rv = func(self, vm)
vm.destroy()
return rv
return _wrapper
class TestBound(unittest.TestCase):
"""Ali-level tests than can cause repeated CLI commands to be issued.
Hence, the CLI duplicate command detection is disabled here.
"""
def setUp(self):
import vbox
logging.basicConfig(level=logging.DEBUG)
self.api = vbox.VBox(['C:\Program Files\Oracle\VirtualBox']).api
@with_new_vm
def testFdBoot(self, vm):
img = self.api.floppies.fromFile(FD_IMG)
vm.storageControllers.ensureExist("floppy")
controller = vm.storageControllers.floppy
controller.attach(img, bootable=True)
self.assertFalse(vm.state.running)
oldTime = vm.changeTime
vm.state.start()
self.assertTrue(vm.state.running)
vm.wait(timeout=2)
# this FD image simply stays turned on.
self.assertTrue(vm.state.running)
vm.state.pause()
self.assertFalse(vm.state.running)
self.assertTrue(vm.state.paused)
vm.state.resume()
self.assertTrue(vm.state.running)
# Reset
vm.state.reset()
self.assertTrue(vm.state.running)
vm.state.powerOff()
self.assertGreater(vm.changeTime, oldTime)
self.assertFalse(vm.state.running)
def testDoubleDestroy(self):
name = "{}_testDoubleDestroy".format(self.__class__.__name__)
vm = self.api.vms.new(name)
vm.destroy()
with self.assertRaises(vm.exceptions.VmNotFound):
vm.destroy() | en | 0.953554 | Ali-level tests than can cause repeated CLI commands to be issued. Hence, the CLI duplicate command detection is disabled here. # this FD image simply stays turned on. # Reset | 2.281641 | 2 |
leetcode/contest/weekly-contest-291/A.py | RegiusQuant/leetcode | 0 | 6617212 | <gh_stars>0
class Solution:
def removeDigit(self, number: str, digit: str) -> str:
result = 0
for i, c in enumerate(number):
if c == digit:
result = max(result, int(number[:i] + number[i + 1:]))
return str(result)
if __name__ == "__main__":
solution = Solution()
print(solution.removeDigit("123", "3"))
print(solution.removeDigit("1231", "1"))
print(solution.removeDigit("551", "5"))
| class Solution:
def removeDigit(self, number: str, digit: str) -> str:
result = 0
for i, c in enumerate(number):
if c == digit:
result = max(result, int(number[:i] + number[i + 1:]))
return str(result)
if __name__ == "__main__":
solution = Solution()
print(solution.removeDigit("123", "3"))
print(solution.removeDigit("1231", "1"))
print(solution.removeDigit("551", "5")) | none | 1 | 3.307527 | 3 | |
src/wmle_pl.py | njanirudh/R-D-HBRS | 3 | 6617213 | <gh_stars>1-10
import numpy as np
import pandas
import pandas as pd
import matplotlib.pyplot as plt
from scipy.stats import norm
fig = plt.figure()
X_val = []
Y_val = []
Z_val = []
# File = '/home/nj/HBRS/RnD/Research-Development-HBRS/reports/drawer_handle_grasp - fridge.csv'
File = '/home/nj/HBRS/RnD/Research-Development-HBRS/reports/drawer_handle_grasp - drawer.csv'
# File = '/home/nj/HBRS/RnD/Research-Development-HBRS/reports/drawer_handle_grasp - door.csv'
# File = '/home/jayasimha/NJ/GitHub/Research-Development-HBRS/reports/drawer_handle_grasp - fridge.csv'
# with open() as csv_file:
with open(File) as csv_file:
csv_reader = pd.read_csv(csv_file, delimiter=',')
print(csv_reader.head())
# print(csv_reader['handle_x'].tolist())
X_val = csv_reader['handle_x'].tolist()
Y_val = csv_reader['handle_y'].tolist()
Z_val = csv_reader['handle_z'].tolist()
success = np.array(csv_reader['Success'].tolist())
# Run for FRIDGE
failure_x = csv_reader['X - failure'].tolist()
failure_y = csv_reader['Y - failure'].tolist()
failure_z = csv_reader['Z - failure'].tolist()
# # RUN for DRAWER
# Note = csv_reader['Notes'].tolist()
# for val in Note:
# print(val)
# # print(val.find('x - failure'))
grasp = csv_reader['Grasp'].tolist()
opening = csv_reader['Opening'].tolist()
# print(Note)
failure_x_weight = [2 if x == 'X' else 1 for x in failure_x]
failure_y_weight = [2 if x == 'Y' else 1 for x in failure_y]
failure_z_weight = [2 if x == 'Z' else 1 for x in failure_z]
grasp_weight = [3 if x=='X' else 1 for x in grasp]
opening_weight = [5 if x=='X' else 1 for x in opening]
failure_x_weighted = np.repeat(X_val, failure_x_weight)
failure_x_weighted = failure_x_weighted[np.isfinite(failure_x_weighted)]
failure_y_weighted = np.repeat(Y_val, failure_y_weight)
failure_y_weighted = failure_y_weighted[np.isfinite(failure_y_weighted)]
failure_z_weighted = np.repeat(Z_val, failure_z_weight)
failure_z_weighted = failure_z_weighted[np.isfinite(failure_z_weighted)]
# # print(np.count_nonzero(failure_x_weight))
# # print(np.count_nonzero(failure_y_weight))
# # print(np.count_nonzero(failure_z_weight))
#
mu_x,sigma_y = norm.fit(failure_x_weighted)
print(mu_x,sigma_y)
mu_y,sigma_y = norm.fit(failure_y_weighted)
print(mu_y,sigma_y )
mu_z,sigma_z = norm.fit(failure_z_weighted)
print(mu_z,sigma_z )
| import numpy as np
import pandas
import pandas as pd
import matplotlib.pyplot as plt
from scipy.stats import norm
fig = plt.figure()
X_val = []
Y_val = []
Z_val = []
# File = '/home/nj/HBRS/RnD/Research-Development-HBRS/reports/drawer_handle_grasp - fridge.csv'
File = '/home/nj/HBRS/RnD/Research-Development-HBRS/reports/drawer_handle_grasp - drawer.csv'
# File = '/home/nj/HBRS/RnD/Research-Development-HBRS/reports/drawer_handle_grasp - door.csv'
# File = '/home/jayasimha/NJ/GitHub/Research-Development-HBRS/reports/drawer_handle_grasp - fridge.csv'
# with open() as csv_file:
with open(File) as csv_file:
csv_reader = pd.read_csv(csv_file, delimiter=',')
print(csv_reader.head())
# print(csv_reader['handle_x'].tolist())
X_val = csv_reader['handle_x'].tolist()
Y_val = csv_reader['handle_y'].tolist()
Z_val = csv_reader['handle_z'].tolist()
success = np.array(csv_reader['Success'].tolist())
# Run for FRIDGE
failure_x = csv_reader['X - failure'].tolist()
failure_y = csv_reader['Y - failure'].tolist()
failure_z = csv_reader['Z - failure'].tolist()
# # RUN for DRAWER
# Note = csv_reader['Notes'].tolist()
# for val in Note:
# print(val)
# # print(val.find('x - failure'))
grasp = csv_reader['Grasp'].tolist()
opening = csv_reader['Opening'].tolist()
# print(Note)
failure_x_weight = [2 if x == 'X' else 1 for x in failure_x]
failure_y_weight = [2 if x == 'Y' else 1 for x in failure_y]
failure_z_weight = [2 if x == 'Z' else 1 for x in failure_z]
grasp_weight = [3 if x=='X' else 1 for x in grasp]
opening_weight = [5 if x=='X' else 1 for x in opening]
failure_x_weighted = np.repeat(X_val, failure_x_weight)
failure_x_weighted = failure_x_weighted[np.isfinite(failure_x_weighted)]
failure_y_weighted = np.repeat(Y_val, failure_y_weight)
failure_y_weighted = failure_y_weighted[np.isfinite(failure_y_weighted)]
failure_z_weighted = np.repeat(Z_val, failure_z_weight)
failure_z_weighted = failure_z_weighted[np.isfinite(failure_z_weighted)]
# # print(np.count_nonzero(failure_x_weight))
# # print(np.count_nonzero(failure_y_weight))
# # print(np.count_nonzero(failure_z_weight))
#
mu_x,sigma_y = norm.fit(failure_x_weighted)
print(mu_x,sigma_y)
mu_y,sigma_y = norm.fit(failure_y_weighted)
print(mu_y,sigma_y )
mu_z,sigma_z = norm.fit(failure_z_weighted)
print(mu_z,sigma_z ) | en | 0.612751 | # File = '/home/nj/HBRS/RnD/Research-Development-HBRS/reports/drawer_handle_grasp - fridge.csv' # File = '/home/nj/HBRS/RnD/Research-Development-HBRS/reports/drawer_handle_grasp - door.csv' # File = '/home/jayasimha/NJ/GitHub/Research-Development-HBRS/reports/drawer_handle_grasp - fridge.csv' # with open() as csv_file: # print(csv_reader['handle_x'].tolist()) # Run for FRIDGE # # RUN for DRAWER # Note = csv_reader['Notes'].tolist() # for val in Note: # print(val) # # print(val.find('x - failure')) # print(Note) # # print(np.count_nonzero(failure_x_weight)) # # print(np.count_nonzero(failure_y_weight)) # # print(np.count_nonzero(failure_z_weight)) # | 2.963761 | 3 |
run_t2mapping.py | ypauchard/t2mapping_python | 2 | 6617214 | # Will call the ITK t2mapping executable
#
# Copyright (C) 2018 <NAME>
# License: BSD 3-clause (see LICENSE)
import os
import subprocess
import logging
import configparser
import argparse
import csv
# path to t2mapping executable expected in ./bin/t2mapping
current_script_path = os.path.realpath(__file__)
current_script_dir = os.path.dirname(current_script_path)
exec_path = os.path.join(current_script_dir, 'bin', 't2mapping')
# Create and configure logger
LOG_FORMAT = "%(levelname)s %(asctime)s - %(message)s" # see https://docs.python.org/2/library/logging.html#logrecord-attributes
logging.basicConfig(format=LOG_FORMAT, level=logging.DEBUG)
logger = logging.getLogger()
def is_ini_ok(parser):
"""Checks if ini file has the necessary contents.
[t2map]
experiments_to_run = experiment1, experiment2
[experiment1]
input_dir = norm/
image_list_csv = config/image_list.csv
# which images from csv list to use for mapping, 0-based index
images_to_use = 0, 2
output_dir = t2maps/
output_basename = MOJO_0054_3_SE_TR2100.0_1010
# --- optional
# default is _reg_norm
# input_filename_ending = _reg_norm
# 0 LIN, 1 NONLIN (default), 2 NONLIN w. constant
# method = 1
# default 0.0
# threshold = 30.5
"""
expected_sections = [ 't2map' ]
expected_options = ['experiments_to_run']
expected_experiment_options = [ 'input_dir', 'image_list_csv', 'images_to_use', 'output_dir' , 'output_basename']
is_ok = True
for section in expected_sections:
if not parser.has_section(section) :
print('Config section {} missing, please add.'.format(section))
is_ok = False
else:
for candidate in expected_options:
if not parser.has_option(section, candidate):
print( 'Option {}.{} missing, please add.'.format(section, candidate ))
is_ok = False
# if main section is OK, check experiment sections
if is_ok:
experiments = parser.get('t2map','experiments_to_run').replace(" ","").split(',')
for experiment in experiments:
if not parser.has_section(experiment) :
print('Config section {} missing, please add.'.format(experiment))
is_ok = False
else:
for candidate in expected_experiment_options:
if not parser.has_option(experiment, candidate):
print( 'Option {}.{} missing, please add.'.format(experiment, candidate ))
is_ok = False
return is_ok
def get_image_value_list(csv_file_name):
"""
Expects a csv file with header/structure:
filename, TE, mean_background
skips the first (header) line
returns a list of tuples with (filename, TE)
"""
image_value_list = []
with open(csv_file_name) as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
# skip the header (first) line
next(readCSV)
for row in readCSV:
image_value_list.append((row[0], row[1].strip()))
return image_value_list
# Argument parser
a_parser = argparse.ArgumentParser(
description='Calls t2mapping executable to perform t2mapping with given list of images.',
epilog='Example: python normalize_images.py path_to_ini_file \n Configuration is in the ini file.\n ')
a_parser.add_argument('path_to_ini_file', help='Path to configuration (ini) file')
# a_parser.add_argument("-v", "--verbose", help="increase output verbosity (more prints)", action="store_true")
# Parse arguments
args = a_parser.parse_args()
config = configparser.ConfigParser()
config.read(args.path_to_ini_file)
# Check that all parameters needed are in configuration
if not is_ini_ok(config):
exit(0)
# Get all experiments remove whitespces and split into list
experiments = config.get('t2map','experiments_to_run').replace(" ","").split(',')
logger.info("Experiemnts {} defined in {}".format(experiments, args.path_to_ini_file))
for experiment in experiments:
# get parameters
input_dir = config.get(experiment, 'input_dir')
image_and_values = get_image_value_list(config.get(experiment, 'image_list_csv'))
output_dir = config.get(experiment, 'output_dir')
# this is a comma separated string, so we have to split and convert to int
images_to_use = list(map(int, config.get(experiment, 'images_to_use').split(',')))
output_basename = config.get(experiment, 'output_basename')
# get optional parameters
if config.has_option(experiment,'input_filename_ending'):
filename_ending = config.get(experiment,'input_filename_ending')
else:
filename_ending = '_reg_norm'
if config.has_option(experiment, 'method'):
method = config.get(experiment, 'method')
else:
method = '1'
if config.has_option(experiment, 'threshold'):
threshold = config.get(experiment, 'threshold')
else:
threshold = '0.0'
logger.info("Parameters for {} found in {}".format(experiment, args.path_to_ini_file))
logger.info(input_dir)
logger.info(image_and_values)
logger.info(filename_ending)
logger.info(output_dir)
logger.info(images_to_use)
logger.info(output_basename)
logger.info(method)
logger.info(threshold)
# T2mapping needs full path for output
full_output_basename = os.path.join(output_dir, output_basename)
# creating subprocess call list
call_list = [exec_path,
full_output_basename,
method,
threshold]
for idx in images_to_use:
image, te = image_and_values[idx]
split_filename = os.path.splitext(image)
image = split_filename[0] + filename_ending + split_filename[1]
call_list.append(os.path.join(input_dir, image))
call_list.append(te)
#check if output_dir exists, create if not.
if not os.path.exists(output_dir):
logger.info("Creating output directory {}".format(output_dir))
os.makedirs(output_dir)
logger.info("starting process with: {}".format(call_list))
# call t2mapping executable
subprocess.check_call(call_list)
| # Will call the ITK t2mapping executable
#
# Copyright (C) 2018 <NAME>
# License: BSD 3-clause (see LICENSE)
import os
import subprocess
import logging
import configparser
import argparse
import csv
# path to t2mapping executable expected in ./bin/t2mapping
current_script_path = os.path.realpath(__file__)
current_script_dir = os.path.dirname(current_script_path)
exec_path = os.path.join(current_script_dir, 'bin', 't2mapping')
# Create and configure logger
LOG_FORMAT = "%(levelname)s %(asctime)s - %(message)s" # see https://docs.python.org/2/library/logging.html#logrecord-attributes
logging.basicConfig(format=LOG_FORMAT, level=logging.DEBUG)
logger = logging.getLogger()
def is_ini_ok(parser):
"""Checks if ini file has the necessary contents.
[t2map]
experiments_to_run = experiment1, experiment2
[experiment1]
input_dir = norm/
image_list_csv = config/image_list.csv
# which images from csv list to use for mapping, 0-based index
images_to_use = 0, 2
output_dir = t2maps/
output_basename = MOJO_0054_3_SE_TR2100.0_1010
# --- optional
# default is _reg_norm
# input_filename_ending = _reg_norm
# 0 LIN, 1 NONLIN (default), 2 NONLIN w. constant
# method = 1
# default 0.0
# threshold = 30.5
"""
expected_sections = [ 't2map' ]
expected_options = ['experiments_to_run']
expected_experiment_options = [ 'input_dir', 'image_list_csv', 'images_to_use', 'output_dir' , 'output_basename']
is_ok = True
for section in expected_sections:
if not parser.has_section(section) :
print('Config section {} missing, please add.'.format(section))
is_ok = False
else:
for candidate in expected_options:
if not parser.has_option(section, candidate):
print( 'Option {}.{} missing, please add.'.format(section, candidate ))
is_ok = False
# if main section is OK, check experiment sections
if is_ok:
experiments = parser.get('t2map','experiments_to_run').replace(" ","").split(',')
for experiment in experiments:
if not parser.has_section(experiment) :
print('Config section {} missing, please add.'.format(experiment))
is_ok = False
else:
for candidate in expected_experiment_options:
if not parser.has_option(experiment, candidate):
print( 'Option {}.{} missing, please add.'.format(experiment, candidate ))
is_ok = False
return is_ok
def get_image_value_list(csv_file_name):
"""
Expects a csv file with header/structure:
filename, TE, mean_background
skips the first (header) line
returns a list of tuples with (filename, TE)
"""
image_value_list = []
with open(csv_file_name) as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
# skip the header (first) line
next(readCSV)
for row in readCSV:
image_value_list.append((row[0], row[1].strip()))
return image_value_list
# Argument parser
a_parser = argparse.ArgumentParser(
description='Calls t2mapping executable to perform t2mapping with given list of images.',
epilog='Example: python normalize_images.py path_to_ini_file \n Configuration is in the ini file.\n ')
a_parser.add_argument('path_to_ini_file', help='Path to configuration (ini) file')
# a_parser.add_argument("-v", "--verbose", help="increase output verbosity (more prints)", action="store_true")
# Parse arguments
args = a_parser.parse_args()
config = configparser.ConfigParser()
config.read(args.path_to_ini_file)
# Check that all parameters needed are in configuration
if not is_ini_ok(config):
exit(0)
# Get all experiments remove whitespces and split into list
experiments = config.get('t2map','experiments_to_run').replace(" ","").split(',')
logger.info("Experiemnts {} defined in {}".format(experiments, args.path_to_ini_file))
for experiment in experiments:
# get parameters
input_dir = config.get(experiment, 'input_dir')
image_and_values = get_image_value_list(config.get(experiment, 'image_list_csv'))
output_dir = config.get(experiment, 'output_dir')
# this is a comma separated string, so we have to split and convert to int
images_to_use = list(map(int, config.get(experiment, 'images_to_use').split(',')))
output_basename = config.get(experiment, 'output_basename')
# get optional parameters
if config.has_option(experiment,'input_filename_ending'):
filename_ending = config.get(experiment,'input_filename_ending')
else:
filename_ending = '_reg_norm'
if config.has_option(experiment, 'method'):
method = config.get(experiment, 'method')
else:
method = '1'
if config.has_option(experiment, 'threshold'):
threshold = config.get(experiment, 'threshold')
else:
threshold = '0.0'
logger.info("Parameters for {} found in {}".format(experiment, args.path_to_ini_file))
logger.info(input_dir)
logger.info(image_and_values)
logger.info(filename_ending)
logger.info(output_dir)
logger.info(images_to_use)
logger.info(output_basename)
logger.info(method)
logger.info(threshold)
# T2mapping needs full path for output
full_output_basename = os.path.join(output_dir, output_basename)
# creating subprocess call list
call_list = [exec_path,
full_output_basename,
method,
threshold]
for idx in images_to_use:
image, te = image_and_values[idx]
split_filename = os.path.splitext(image)
image = split_filename[0] + filename_ending + split_filename[1]
call_list.append(os.path.join(input_dir, image))
call_list.append(te)
#check if output_dir exists, create if not.
if not os.path.exists(output_dir):
logger.info("Creating output directory {}".format(output_dir))
os.makedirs(output_dir)
logger.info("starting process with: {}".format(call_list))
# call t2mapping executable
subprocess.check_call(call_list)
| en | 0.565346 | # Will call the ITK t2mapping executable # # Copyright (C) 2018 <NAME> # License: BSD 3-clause (see LICENSE) # path to t2mapping executable expected in ./bin/t2mapping # Create and configure logger # see https://docs.python.org/2/library/logging.html#logrecord-attributes Checks if ini file has the necessary contents. [t2map] experiments_to_run = experiment1, experiment2 [experiment1] input_dir = norm/ image_list_csv = config/image_list.csv # which images from csv list to use for mapping, 0-based index images_to_use = 0, 2 output_dir = t2maps/ output_basename = MOJO_0054_3_SE_TR2100.0_1010 # --- optional # default is _reg_norm # input_filename_ending = _reg_norm # 0 LIN, 1 NONLIN (default), 2 NONLIN w. constant # method = 1 # default 0.0 # threshold = 30.5 # if main section is OK, check experiment sections Expects a csv file with header/structure: filename, TE, mean_background skips the first (header) line returns a list of tuples with (filename, TE) # skip the header (first) line # Argument parser # a_parser.add_argument("-v", "--verbose", help="increase output verbosity (more prints)", action="store_true") # Parse arguments # Check that all parameters needed are in configuration # Get all experiments remove whitespces and split into list # get parameters # this is a comma separated string, so we have to split and convert to int # get optional parameters # T2mapping needs full path for output # creating subprocess call list #check if output_dir exists, create if not. # call t2mapping executable | 2.296883 | 2 |
zisan/Seg/davisinteractive/storage/local_test.py | JintuZheng/zisan | 40 | 6617215 | <filename>zisan/Seg/davisinteractive/storage/local_test.py<gh_stars>10-100
import unittest
import numpy as np
import pytest
from davisinteractive.storage import LocalStorage
class TestLocalStorage(unittest.TestCase):
def test_init(self):
storage = LocalStorage()
for c in storage.COLUMNS:
assert c in storage.report
def test_store_operation(self):
user_id = 'empty'
session_id = '12345'
sequence = 'test'
scribble_idx = 1
interaction = 1
timing = 10.34
objects_idx = [1, 2, 3]
frames = [0, 0, 0]
jaccard = [.2, .3, .4]
contour = [.8, .6, .4]
storage = LocalStorage()
with pytest.raises(ValueError):
storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction,
timing, objects_idx, frames, [.1, .2, 1.0001], contour)
with pytest.raises(ValueError):
storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction,
timing, objects_idx, frames, [-.1, .2, 1], contour)
with pytest.raises(ValueError):
storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction,
timing, objects_idx, [1, 1], jaccard, contour)
with pytest.raises(ValueError):
storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction,
timing, objects_idx, frames, jaccard, [-0.01, 1.0, .4])
assert storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction, timing,
objects_idx, frames, [.1, .000, 1.000], contour)
with pytest.raises(RuntimeError):
storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction,
timing, objects_idx, frames, jaccard, contour)
with pytest.raises(RuntimeError):
storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction + 2,
timing, objects_idx, frames, jaccard, contour)
assert storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction + 1,
timing, objects_idx, frames, jaccard, contour)
def test_annotated_frames(self):
session_id = 'unused'
sequence = 'bmx-trees'
scribble_idx = 1
storage = LocalStorage()
storage.store_annotated_frame(session_id, sequence, scribble_idx, 1,
False)
annotated_frames = storage.get_annotated_frames(session_id, sequence,
scribble_idx)
self.assertEqual(annotated_frames, (1,))
def test_annotated_frames_full(self):
session_id = 'unused'
sequence = 'bmx-trees'
scribble_idx = 1
nb_frames = 80
storage = LocalStorage()
for i in range(nb_frames):
storage.store_annotated_frame(session_id, sequence, scribble_idx, i,
False)
annotated_frames = storage.get_annotated_frames(session_id, sequence,
scribble_idx)
self.assertEqual(annotated_frames, tuple())
| <filename>zisan/Seg/davisinteractive/storage/local_test.py<gh_stars>10-100
import unittest
import numpy as np
import pytest
from davisinteractive.storage import LocalStorage
class TestLocalStorage(unittest.TestCase):
def test_init(self):
storage = LocalStorage()
for c in storage.COLUMNS:
assert c in storage.report
def test_store_operation(self):
user_id = 'empty'
session_id = '12345'
sequence = 'test'
scribble_idx = 1
interaction = 1
timing = 10.34
objects_idx = [1, 2, 3]
frames = [0, 0, 0]
jaccard = [.2, .3, .4]
contour = [.8, .6, .4]
storage = LocalStorage()
with pytest.raises(ValueError):
storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction,
timing, objects_idx, frames, [.1, .2, 1.0001], contour)
with pytest.raises(ValueError):
storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction,
timing, objects_idx, frames, [-.1, .2, 1], contour)
with pytest.raises(ValueError):
storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction,
timing, objects_idx, [1, 1], jaccard, contour)
with pytest.raises(ValueError):
storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction,
timing, objects_idx, frames, jaccard, [-0.01, 1.0, .4])
assert storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction, timing,
objects_idx, frames, [.1, .000, 1.000], contour)
with pytest.raises(RuntimeError):
storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction,
timing, objects_idx, frames, jaccard, contour)
with pytest.raises(RuntimeError):
storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction + 2,
timing, objects_idx, frames, jaccard, contour)
assert storage.store_interactions_results(
user_id, session_id, sequence, scribble_idx, interaction + 1,
timing, objects_idx, frames, jaccard, contour)
def test_annotated_frames(self):
session_id = 'unused'
sequence = 'bmx-trees'
scribble_idx = 1
storage = LocalStorage()
storage.store_annotated_frame(session_id, sequence, scribble_idx, 1,
False)
annotated_frames = storage.get_annotated_frames(session_id, sequence,
scribble_idx)
self.assertEqual(annotated_frames, (1,))
def test_annotated_frames_full(self):
session_id = 'unused'
sequence = 'bmx-trees'
scribble_idx = 1
nb_frames = 80
storage = LocalStorage()
for i in range(nb_frames):
storage.store_annotated_frame(session_id, sequence, scribble_idx, i,
False)
annotated_frames = storage.get_annotated_frames(session_id, sequence,
scribble_idx)
self.assertEqual(annotated_frames, tuple())
| none | 1 | 2.040806 | 2 | |
cymepy/export_manager/hooks/csv_writer.py | GMLC-TDC/cymepy | 1 | 6617216 | from cymepy.export_manager.base_definations import ExportManager
import os
class Writer(ExportManager):
def __init__(self, sim_instance, solver, options, logger, **kwargs):
super(Writer, self).__init__(sim_instance, solver, options, logger, **kwargs)
self.results = ["Timestamp,Class,Element,Property,Value\n"]
self.path = os.path.join(
self.settings["project"]['project_path'],
'exports',
)
self.results = []
self.step=0
return
def update(self):
results = super().update()
self.results.append(results)
return
def export(self):
FileHandles = {}
for results in self.results:
for timestamp, cInfo in results.items():
for cName, eInfo in cInfo.items():
if cName not in FileHandles:
FileHandles[cName] = open(
os.path.join(
self.path, f"{cName}.csv"
),
"w"
)
self.writeHeader = True
else:
self.writeHeader = False
headers = ["Timestamp"]
values = [timestamp]
for eName, pptyInfo in eInfo.items():
for ppty, value in pptyInfo.items():
headers.append(f"{eName}.{ppty}")
values.append(value)
if self.writeHeader:
headers = ",".join(headers)
FileHandles[cName].write(headers + "\n")
values = ",".join(values)
FileHandles[cName].write(values + "\n")
for cNama, handler in FileHandles.items():
handler.flush()
handler.close()
| from cymepy.export_manager.base_definations import ExportManager
import os
class Writer(ExportManager):
def __init__(self, sim_instance, solver, options, logger, **kwargs):
super(Writer, self).__init__(sim_instance, solver, options, logger, **kwargs)
self.results = ["Timestamp,Class,Element,Property,Value\n"]
self.path = os.path.join(
self.settings["project"]['project_path'],
'exports',
)
self.results = []
self.step=0
return
def update(self):
results = super().update()
self.results.append(results)
return
def export(self):
FileHandles = {}
for results in self.results:
for timestamp, cInfo in results.items():
for cName, eInfo in cInfo.items():
if cName not in FileHandles:
FileHandles[cName] = open(
os.path.join(
self.path, f"{cName}.csv"
),
"w"
)
self.writeHeader = True
else:
self.writeHeader = False
headers = ["Timestamp"]
values = [timestamp]
for eName, pptyInfo in eInfo.items():
for ppty, value in pptyInfo.items():
headers.append(f"{eName}.{ppty}")
values.append(value)
if self.writeHeader:
headers = ",".join(headers)
FileHandles[cName].write(headers + "\n")
values = ",".join(values)
FileHandles[cName].write(values + "\n")
for cNama, handler in FileHandles.items():
handler.flush()
handler.close()
| none | 1 | 2.369902 | 2 | |
ds18b20_functions.py | e-tinkers/DS18B20 | 1 | 6617217 | import numpy as np
import pandas as pd
import datetime as dt
import matplotlib.pyplot as plt
def get_data(file_name):
# Get data from ds18b20.log file
df = pd.read_csv(file_name, names=['Date', 'Temp'], header=None, sep=' ')
df['Date'] = [dt.datetime.strptime(datestr, '%Y-%m-%d_%H:%M:%S') for datestr in df['Date']]
df['Temp'] = df['Temp'] / 1000.
df.index=df['Date']
return df
def clean_up_data(df):
# Clean up data with error reading of 85.0
temps = np.array(df['Temp'])
if temps[0] == 85.:
temps[0] = temps[1]
if temps[-1] == 85.:
temps[-1] = temps[-2]
for i in range(len(temps)):
if temps[i] == 85.:
temps[i] = np.mean([temps[i - 1], temps[i + 1]])
df['Temp'] = temps
| import numpy as np
import pandas as pd
import datetime as dt
import matplotlib.pyplot as plt
def get_data(file_name):
# Get data from ds18b20.log file
df = pd.read_csv(file_name, names=['Date', 'Temp'], header=None, sep=' ')
df['Date'] = [dt.datetime.strptime(datestr, '%Y-%m-%d_%H:%M:%S') for datestr in df['Date']]
df['Temp'] = df['Temp'] / 1000.
df.index=df['Date']
return df
def clean_up_data(df):
# Clean up data with error reading of 85.0
temps = np.array(df['Temp'])
if temps[0] == 85.:
temps[0] = temps[1]
if temps[-1] == 85.:
temps[-1] = temps[-2]
for i in range(len(temps)):
if temps[i] == 85.:
temps[i] = np.mean([temps[i - 1], temps[i + 1]])
df['Temp'] = temps
| en | 0.719711 | # Get data from ds18b20.log file # Clean up data with error reading of 85.0 | 3.223142 | 3 |
fig03_terminology2D.py | 0todd0000/nonuniform1d | 0 | 6617218 | <filename>fig03_terminology2D.py<gh_stars>0
from math import pi
import numpy as np
from scipy import signal
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import pyplot,cm
import spm1d #www.spm1d.org
import nonuniform1d #(in this repository)
def fn_mypulse2D(x, y, F, lamx, lamy):
return (float(F)/(lamx*lamy)) * (1+ np.cos(2*pi/lamx*x)) * (1+ np.cos(2*pi/lamy*y))
def gen_mypulse2D(F, lamx, lamy, dt=0.1):
tx,ty = np.arange(-lamx/2, lamx/2+dt, dt), np.arange(-lamy/2, lamy/2+dt, dt)
X,Y = np.meshgrid(tx,ty)
Z = fn_mypulse2D(X, Y, F, lamx, lamy)
return X,Y,Z #N/mm2
#(0) Isotropic 2D data:
np.random.seed(0)
m,n = 128,128
I = np.random.randn(m,n)
lam0x,lam0y = 35,35
dt = 1.0
Z0 = gen_mypulse2D(15, lam0x, lam0y, dt)[-1]
I0 = 1.2*signal.convolve2d(I, Z0, boundary='symm', mode='same')
#(1) Nonisotropic 2D data:
np.random.seed(2)
I1 = np.random.randn(m,n)
np.random.seed(1)
I2 = np.random.randn(m,n)
lam1x,lam1y = 80,10
lam2x,lam2y = 10,80
dt = 1.0
Z1 = gen_mypulse2D(15, lam1x, lam1y, dt)[-1]
Z2 = gen_mypulse2D(15, lam2x, lam2y, dt)[-1]
I1 = signal.convolve2d(I1, Z1, boundary='symm', mode='same')
I2 = signal.convolve2d(I2, Z2, boundary='symm', mode='same')
#(2) Plot:
# pyplot.close('all')
fontname = 'Times New Roman'
vmin,vmax = -2, 2
### create figure and axes:
axx = [0.069,0.40]
axy = np.linspace(0.71,0.07,3)
axw = [0.25, 0.55]
axh = [0.25, 0.3]
fig = pyplot.figure(figsize=(7,7))
fig.canvas.set_window_title('Figure 3')
ax1 = [pyplot.axes([axx[1],yy,axw[1],axh[1]], projection='3d') for yy in axy-0.04]
ax0 = [pyplot.axes([axx[0],yy,axw[0],axh[0]]) for yy in axy]
AX = np.array([ax0,ax1]).T
### set fonts and sizes:
[pyplot.setp(ax.get_xticklabels()+ax.get_yticklabels(), name=fontname, size=8) for ax in AX[:,0]]
[pyplot.setp(ax.get_xticklabels()+ax.get_yticklabels()+ax.get_zticklabels(), name=fontname, size=8) for ax in AX[:,1]]
### plot images:
ticks = [0, 32, 64, 96, 128]
ticklabels = ['0', '', '0.5', '', '1']
for ax,I in zip(AX[:,0],[I0,I1,I2]):
ax.imshow(I, cmap='gray', vmin=vmin, vmax=vmax, origin='lower')
ax.set_xticks(ticks)
ax.set_yticks(ticks)
ax.set_xticklabels(ticklabels)
ax.set_yticklabels(ticklabels)
ax.set_xlabel('X', name=fontname, size=14)
ax.set_ylabel('Y', name=fontname, size=14)
cbs = [pyplot.colorbar(cax=pyplot.axes([0.33,yy,0.025,axh[0]]), mappable=AX[0,0].images[0]) for yy in axy]
[pyplot.setp(cb.ax.get_yticklabels(), name=fontname, size=8) for cb in cbs]
[cb.ax.set_ylabel('DV value', name=fontname, size=14) for cb in cbs]
### plot surfaces:
X = np.linspace(0, 1, m)
Y = np.linspace(0, 1, n)
X, Y = np.meshgrid(Y, X)
ticks = [0, 0.25, 0.5, 0.75, 1]
ticklabels = ['0', '', '0.5', '', '1']
for ax,I in zip(AX[:,1],[I0,I1,I2]):
surf = ax.plot_surface(X, Y, I, rstride=3, cstride=3, cmap=cm.gray_r, linewidth=0.2, edgecolor='0.7', antialiased=True)
pyplot.setp(ax, xticks=ticks, yticks=ticks, xticklabels=ticklabels, yticklabels=ticklabels)
pyplot.setp(ax, xlim=(0,1), ylim=(0,1), zlim=(-15,15))
ax.set_xlabel('X', name=fontname, size=14)
ax.set_ylabel('Y', name=fontname, size=14)
ax.set_zlabel('DV value', name=fontname, size=14)
### add panel labels:
labels = 'Isotropic', 'Nonisotriopic (X smoother)', 'Nonisotriopic (Y smoother)'
yloc = [1.14, 1.00, 1.00]
for i,(ax,label,yy) in enumerate(zip(AX[:,0], labels, yloc)):
ax.text(1.32, yy, '(%s) %s' %(chr(97+i), label), name=fontname, size=14, transform=ax.transAxes, va='top', bbox=dict(color='w', alpha=0.5))
### annotate:
yloc = [0.65, 0.33]
for yy in yloc:
AX[0,0].annotate("", xy=(0, yy), xycoords='figure fraction', xytext=(1, yy), textcoords='figure fraction', arrowprops=dict(arrowstyle="-", color='0.7') )
pyplot.show()
| <filename>fig03_terminology2D.py<gh_stars>0
from math import pi
import numpy as np
from scipy import signal
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import pyplot,cm
import spm1d #www.spm1d.org
import nonuniform1d #(in this repository)
def fn_mypulse2D(x, y, F, lamx, lamy):
return (float(F)/(lamx*lamy)) * (1+ np.cos(2*pi/lamx*x)) * (1+ np.cos(2*pi/lamy*y))
def gen_mypulse2D(F, lamx, lamy, dt=0.1):
tx,ty = np.arange(-lamx/2, lamx/2+dt, dt), np.arange(-lamy/2, lamy/2+dt, dt)
X,Y = np.meshgrid(tx,ty)
Z = fn_mypulse2D(X, Y, F, lamx, lamy)
return X,Y,Z #N/mm2
#(0) Isotropic 2D data:
np.random.seed(0)
m,n = 128,128
I = np.random.randn(m,n)
lam0x,lam0y = 35,35
dt = 1.0
Z0 = gen_mypulse2D(15, lam0x, lam0y, dt)[-1]
I0 = 1.2*signal.convolve2d(I, Z0, boundary='symm', mode='same')
#(1) Nonisotropic 2D data:
np.random.seed(2)
I1 = np.random.randn(m,n)
np.random.seed(1)
I2 = np.random.randn(m,n)
lam1x,lam1y = 80,10
lam2x,lam2y = 10,80
dt = 1.0
Z1 = gen_mypulse2D(15, lam1x, lam1y, dt)[-1]
Z2 = gen_mypulse2D(15, lam2x, lam2y, dt)[-1]
I1 = signal.convolve2d(I1, Z1, boundary='symm', mode='same')
I2 = signal.convolve2d(I2, Z2, boundary='symm', mode='same')
#(2) Plot:
# pyplot.close('all')
fontname = 'Times New Roman'
vmin,vmax = -2, 2
### create figure and axes:
axx = [0.069,0.40]
axy = np.linspace(0.71,0.07,3)
axw = [0.25, 0.55]
axh = [0.25, 0.3]
fig = pyplot.figure(figsize=(7,7))
fig.canvas.set_window_title('Figure 3')
ax1 = [pyplot.axes([axx[1],yy,axw[1],axh[1]], projection='3d') for yy in axy-0.04]
ax0 = [pyplot.axes([axx[0],yy,axw[0],axh[0]]) for yy in axy]
AX = np.array([ax0,ax1]).T
### set fonts and sizes:
[pyplot.setp(ax.get_xticklabels()+ax.get_yticklabels(), name=fontname, size=8) for ax in AX[:,0]]
[pyplot.setp(ax.get_xticklabels()+ax.get_yticklabels()+ax.get_zticklabels(), name=fontname, size=8) for ax in AX[:,1]]
### plot images:
ticks = [0, 32, 64, 96, 128]
ticklabels = ['0', '', '0.5', '', '1']
for ax,I in zip(AX[:,0],[I0,I1,I2]):
ax.imshow(I, cmap='gray', vmin=vmin, vmax=vmax, origin='lower')
ax.set_xticks(ticks)
ax.set_yticks(ticks)
ax.set_xticklabels(ticklabels)
ax.set_yticklabels(ticklabels)
ax.set_xlabel('X', name=fontname, size=14)
ax.set_ylabel('Y', name=fontname, size=14)
cbs = [pyplot.colorbar(cax=pyplot.axes([0.33,yy,0.025,axh[0]]), mappable=AX[0,0].images[0]) for yy in axy]
[pyplot.setp(cb.ax.get_yticklabels(), name=fontname, size=8) for cb in cbs]
[cb.ax.set_ylabel('DV value', name=fontname, size=14) for cb in cbs]
### plot surfaces:
X = np.linspace(0, 1, m)
Y = np.linspace(0, 1, n)
X, Y = np.meshgrid(Y, X)
ticks = [0, 0.25, 0.5, 0.75, 1]
ticklabels = ['0', '', '0.5', '', '1']
for ax,I in zip(AX[:,1],[I0,I1,I2]):
surf = ax.plot_surface(X, Y, I, rstride=3, cstride=3, cmap=cm.gray_r, linewidth=0.2, edgecolor='0.7', antialiased=True)
pyplot.setp(ax, xticks=ticks, yticks=ticks, xticklabels=ticklabels, yticklabels=ticklabels)
pyplot.setp(ax, xlim=(0,1), ylim=(0,1), zlim=(-15,15))
ax.set_xlabel('X', name=fontname, size=14)
ax.set_ylabel('Y', name=fontname, size=14)
ax.set_zlabel('DV value', name=fontname, size=14)
### add panel labels:
labels = 'Isotropic', 'Nonisotriopic (X smoother)', 'Nonisotriopic (Y smoother)'
yloc = [1.14, 1.00, 1.00]
for i,(ax,label,yy) in enumerate(zip(AX[:,0], labels, yloc)):
ax.text(1.32, yy, '(%s) %s' %(chr(97+i), label), name=fontname, size=14, transform=ax.transAxes, va='top', bbox=dict(color='w', alpha=0.5))
### annotate:
yloc = [0.65, 0.33]
for yy in yloc:
AX[0,0].annotate("", xy=(0, yy), xycoords='figure fraction', xytext=(1, yy), textcoords='figure fraction', arrowprops=dict(arrowstyle="-", color='0.7') )
pyplot.show()
| en | 0.364297 | #www.spm1d.org #(in this repository) #N/mm2 #(0) Isotropic 2D data: #(1) Nonisotropic 2D data: #(2) Plot: # pyplot.close('all') ### create figure and axes: ### set fonts and sizes: ### plot images: ### plot surfaces: ### add panel labels: ### annotate: | 2.275906 | 2 |
matchMissing.py | RaquelVasquez/ENTSOE_API_GenerationLoad | 0 | 6617219 | <gh_stars>0
# this code makes magic
# converts quartely hour or half-hour timestamps into standard hourly timestamps
# corrupted data are assigned with the identifier = 0.00000123456789
import sys
import pandas as pd
import numpy as np
def matchData(data, country, samplesPerYear, start, end):
data_Period = knowSampleTime(samplesPerYear, len(data), country)
# reviewing data downloaded
if data_Period[1] == 1: # if downloaded data is OK
if data_Period[0] == 4:
return( np.add.reduceat(data, np.arange(0, len(data), 4)) ) # scales up from quarter hour to hour
elif data_Period[0] == 2:
return( np.add.reduceat(data, np.arange(0, len(data), 2)) ) # scales up from half hour to hour
elif data_Period[0] == 1:
return(data)
else: # if downloaded data is NO OK (missing data)
data = missingDataFunc(data, data_Period, start, end)
return(data)
def knowSampleTime(samplesPerYear, length, country):
'''
input:
samplesPerYear: days of the year * 24
length: lenght of DataFrame
factor:
factor to know of there is data missing
return:
[(1 for hourly 2 for halfhour or 4 quarterhour), (1 for exact data 0 for missing data)]
'''
#import pdb; pdb.set_trace()
factor = 0.90 # porcentage of length data
if length > (samplesPerYear * 4 * factor):
if length == (samplesPerYear * 4):return([4,1])
else:return([4,0])
elif length > (samplesPerYear * 2 * factor): # for halfhour (samplesPerYear *2)
if length == (samplesPerYear * 2):return([2,1])
else:return([2,0])
elif length > (samplesPerYear * factor): # hourly (samplesPerYear)
if length == (samplesPerYear):return([1,1])
else:return([1,0])
else:
print('_ Country: {}, data vector length: {}, '.format(country, length))
sys.exit('Error in the frequency (length) of the data')
def missingDataFunc(data, data_Period, start, end):
if data_Period[0] == 4:
freq = '15min'
data = discardMissingData(freq, data, start, end)
data = np.add.reduceat(data, np.arange(0, len(data), 4)) # scales up from quarter hour to hour
elif data_Period[0] == 2:
freq = '30min'
data = discardMissingData(freq, data, start, end)
data = np.add.reduceat(data, np.arange(0, len(data), 2)) # scales up from half hour to hour
elif data_Period[0] == 1:
freq = 'H'
data = discardMissingData(freq, data, start, end)
return(data)
def discardMissingData(f, data, start, end):
dates_downloaded = pd.to_datetime(data.index.values, 'datetime64[ns]')
dates_downloaded = dates_downloaded.tz_localize(None)
original_dates = pd.to_datetime(pd.date_range(start, end, freq=f), 'datetime64[ns]')
original_dates = original_dates.tz_localize(None)
count = 0
cols = 1
identifier = 0.00000123456789
flagSeries = False
try:
(rows,cols) = data.shape
except(ValueError):
flagSeries = True
rows = (len(original_dates) -1)
dataNew = np.zeros((rows,cols)) # create matrix with the size of data
for row in range(len(original_dates)-1):
try:
if original_dates[row] == dates_downloaded[row - count]:
if flagSeries:
dataNew[row] = data[(row - count)]
else:
for col in range(cols):
dataNew[row,col] = data.iloc[(row - count),col]
else:
if flagSeries:
dataNew[row] = identifier
else:
for col in range(cols):
dataNew[row,col] = identifier
count = count + 1
except(IndexError):
dataNew[row] = identifier
return(dataNew)
| # this code makes magic
# converts quartely hour or half-hour timestamps into standard hourly timestamps
# corrupted data are assigned with the identifier = 0.00000123456789
import sys
import pandas as pd
import numpy as np
def matchData(data, country, samplesPerYear, start, end):
data_Period = knowSampleTime(samplesPerYear, len(data), country)
# reviewing data downloaded
if data_Period[1] == 1: # if downloaded data is OK
if data_Period[0] == 4:
return( np.add.reduceat(data, np.arange(0, len(data), 4)) ) # scales up from quarter hour to hour
elif data_Period[0] == 2:
return( np.add.reduceat(data, np.arange(0, len(data), 2)) ) # scales up from half hour to hour
elif data_Period[0] == 1:
return(data)
else: # if downloaded data is NO OK (missing data)
data = missingDataFunc(data, data_Period, start, end)
return(data)
def knowSampleTime(samplesPerYear, length, country):
'''
input:
samplesPerYear: days of the year * 24
length: lenght of DataFrame
factor:
factor to know of there is data missing
return:
[(1 for hourly 2 for halfhour or 4 quarterhour), (1 for exact data 0 for missing data)]
'''
#import pdb; pdb.set_trace()
factor = 0.90 # porcentage of length data
if length > (samplesPerYear * 4 * factor):
if length == (samplesPerYear * 4):return([4,1])
else:return([4,0])
elif length > (samplesPerYear * 2 * factor): # for halfhour (samplesPerYear *2)
if length == (samplesPerYear * 2):return([2,1])
else:return([2,0])
elif length > (samplesPerYear * factor): # hourly (samplesPerYear)
if length == (samplesPerYear):return([1,1])
else:return([1,0])
else:
print('_ Country: {}, data vector length: {}, '.format(country, length))
sys.exit('Error in the frequency (length) of the data')
def missingDataFunc(data, data_Period, start, end):
if data_Period[0] == 4:
freq = '15min'
data = discardMissingData(freq, data, start, end)
data = np.add.reduceat(data, np.arange(0, len(data), 4)) # scales up from quarter hour to hour
elif data_Period[0] == 2:
freq = '30min'
data = discardMissingData(freq, data, start, end)
data = np.add.reduceat(data, np.arange(0, len(data), 2)) # scales up from half hour to hour
elif data_Period[0] == 1:
freq = 'H'
data = discardMissingData(freq, data, start, end)
return(data)
def discardMissingData(f, data, start, end):
dates_downloaded = pd.to_datetime(data.index.values, 'datetime64[ns]')
dates_downloaded = dates_downloaded.tz_localize(None)
original_dates = pd.to_datetime(pd.date_range(start, end, freq=f), 'datetime64[ns]')
original_dates = original_dates.tz_localize(None)
count = 0
cols = 1
identifier = 0.00000123456789
flagSeries = False
try:
(rows,cols) = data.shape
except(ValueError):
flagSeries = True
rows = (len(original_dates) -1)
dataNew = np.zeros((rows,cols)) # create matrix with the size of data
for row in range(len(original_dates)-1):
try:
if original_dates[row] == dates_downloaded[row - count]:
if flagSeries:
dataNew[row] = data[(row - count)]
else:
for col in range(cols):
dataNew[row,col] = data.iloc[(row - count),col]
else:
if flagSeries:
dataNew[row] = identifier
else:
for col in range(cols):
dataNew[row,col] = identifier
count = count + 1
except(IndexError):
dataNew[row] = identifier
return(dataNew) | en | 0.809728 | # this code makes magic # converts quartely hour or half-hour timestamps into standard hourly timestamps # corrupted data are assigned with the identifier = 0.00000123456789 # reviewing data downloaded # if downloaded data is OK # scales up from quarter hour to hour # scales up from half hour to hour # if downloaded data is NO OK (missing data) input: samplesPerYear: days of the year * 24 length: lenght of DataFrame factor: factor to know of there is data missing return: [(1 for hourly 2 for halfhour or 4 quarterhour), (1 for exact data 0 for missing data)] #import pdb; pdb.set_trace() # porcentage of length data # for halfhour (samplesPerYear *2) # hourly (samplesPerYear) # scales up from quarter hour to hour # scales up from half hour to hour # create matrix with the size of data | 3.221586 | 3 |
fbvoting/db/users.py | corradomonti/fbvoting | 2 | 6617220 | import logging
from collections import defaultdict
from operator import itemgetter
from fbvoting.rediscache import redis_cached
from fbvoting.lib import ignore_errors
import fbvoting.conf as conf
from fbvoting.apis.fb import get_graph_api
from db import mongodb, exists
from categories import categories
from fbvoting.mylogging import report
logger = logging.getLogger(__name__)
@ignore_errors
def store_people_info(user_id, store_his_friends=conf.STORE_NAMES_OF_FRIENDS):
assert type(user_id) in (int, long)
try:
api = get_graph_api()
user = api[user_id]()
doc = {'_id': user_id, 'name': user.name, 'link': user.link}
mongodb.users.save(doc) # save = upsert
try:
if not exists(mongodb.friends, {'_id': user_id }):
try:
friends = user.friends()
except:
logger.info('I cannot get a friend list from user '+str(user_id)+'.')
return
friends = [long(friend['id']) for friend in friends['data']]
mongodb.friends.insert({'_id': user_id, 'friends': friends})
report.mark('saved-friend-list')
if store_his_friends:
logger.info('I am storing also friend information')
for friend in friends:
store_people_info(friend, store_his_friends=False)
except Exception as exc:
logger.error('Was not able to save a friend list. ' + exc.message)
except Exception as exc:
logger.error('Was not able to store user info. ' + exc.message)
def userid_to_name(user_id):
assert type(user_id) in (int, long)
db_results = list(mongodb.users.find(
{'_id': user_id},
limit=1,
fields=('name',)
))
if db_results:
return db_results[0]['name']
else:
try:
api = get_graph_api()
store_people_info(user_id)
user = api[user_id]()
return user['name']
except:
logger.exception('Was not able to get name for id ' + str(user_id))
return "Unknown"
_category_fields = dict([(category, 1) for category in categories()] + [('_id', 0)])
def get_rank_and_percentiles(user_id):
"""
Return a dict made of
{"Jazz": {"score": <points>, "perc": <rounded percentile>}, ...}
"""
assert type(user_id) in (int, long)
results = list(mongodb.userrank.find(
{'_id': user_id},
limit=1, fields=_category_fields
))
if results:
return results[0]
else:
logger.warn("We were asked for ranking of user %i, but I dunno him!", user_id)
return {}
categories = categories()
@redis_cached
def get_guru_friends(user_id):
""" Return a list of tuples like (category, friend_id, percentile) of the
friends of the given user, with at most one per category, ranked by their
score in their respective category.
"""
try:
user = get_graph_api()[user_id]()
friends = user.friends()
except:
return []
friends = [long(friend['id']) for friend in friends['data']]
ranks = list(mongodb.userrank.find(
{'_id': {'$in': friends}}
))
if ranks:
best_friends = []
for cat in categories:
best_friend = max(ranks, key=lambda rank: rank.get(cat, {}).get('score', 1))
if best_friend.get(cat, {}).get('score') > 1:
best_friends.append( (cat, best_friend['_id'], best_friend[cat]['perc']) )
return sorted(best_friends, key=itemgetter(2))
else:
return []
def users_with_missing_votes():
all_categories = set(categories())
n_categories = len(all_categories)
query = mongodb.graph.aggregate([ {'$match': {"rm": {'$ne': True}}} , { '$group': {'_id': "$from", 'votes': {'$addToSet': "$category"}} } ])['result']
user2votes = defaultdict(set, [(item['_id'], set(item['votes'])) for item in query])
query = mongodb.advices.aggregate([ {'$match': {"rm": {'$ne': True}}} , { '$group': {'_id': "$user", 'votes': {'$addToSet': "$category"}} } ])['result']
for item in query:
user2votes[item['_id']] |= set(item['votes'])
for user, votes in user2votes.items():
if 0 < len(votes) < n_categories:
yield user, all_categories - votes
| import logging
from collections import defaultdict
from operator import itemgetter
from fbvoting.rediscache import redis_cached
from fbvoting.lib import ignore_errors
import fbvoting.conf as conf
from fbvoting.apis.fb import get_graph_api
from db import mongodb, exists
from categories import categories
from fbvoting.mylogging import report
logger = logging.getLogger(__name__)
@ignore_errors
def store_people_info(user_id, store_his_friends=conf.STORE_NAMES_OF_FRIENDS):
assert type(user_id) in (int, long)
try:
api = get_graph_api()
user = api[user_id]()
doc = {'_id': user_id, 'name': user.name, 'link': user.link}
mongodb.users.save(doc) # save = upsert
try:
if not exists(mongodb.friends, {'_id': user_id }):
try:
friends = user.friends()
except:
logger.info('I cannot get a friend list from user '+str(user_id)+'.')
return
friends = [long(friend['id']) for friend in friends['data']]
mongodb.friends.insert({'_id': user_id, 'friends': friends})
report.mark('saved-friend-list')
if store_his_friends:
logger.info('I am storing also friend information')
for friend in friends:
store_people_info(friend, store_his_friends=False)
except Exception as exc:
logger.error('Was not able to save a friend list. ' + exc.message)
except Exception as exc:
logger.error('Was not able to store user info. ' + exc.message)
def userid_to_name(user_id):
assert type(user_id) in (int, long)
db_results = list(mongodb.users.find(
{'_id': user_id},
limit=1,
fields=('name',)
))
if db_results:
return db_results[0]['name']
else:
try:
api = get_graph_api()
store_people_info(user_id)
user = api[user_id]()
return user['name']
except:
logger.exception('Was not able to get name for id ' + str(user_id))
return "Unknown"
_category_fields = dict([(category, 1) for category in categories()] + [('_id', 0)])
def get_rank_and_percentiles(user_id):
"""
Return a dict made of
{"Jazz": {"score": <points>, "perc": <rounded percentile>}, ...}
"""
assert type(user_id) in (int, long)
results = list(mongodb.userrank.find(
{'_id': user_id},
limit=1, fields=_category_fields
))
if results:
return results[0]
else:
logger.warn("We were asked for ranking of user %i, but I dunno him!", user_id)
return {}
categories = categories()
@redis_cached
def get_guru_friends(user_id):
""" Return a list of tuples like (category, friend_id, percentile) of the
friends of the given user, with at most one per category, ranked by their
score in their respective category.
"""
try:
user = get_graph_api()[user_id]()
friends = user.friends()
except:
return []
friends = [long(friend['id']) for friend in friends['data']]
ranks = list(mongodb.userrank.find(
{'_id': {'$in': friends}}
))
if ranks:
best_friends = []
for cat in categories:
best_friend = max(ranks, key=lambda rank: rank.get(cat, {}).get('score', 1))
if best_friend.get(cat, {}).get('score') > 1:
best_friends.append( (cat, best_friend['_id'], best_friend[cat]['perc']) )
return sorted(best_friends, key=itemgetter(2))
else:
return []
def users_with_missing_votes():
all_categories = set(categories())
n_categories = len(all_categories)
query = mongodb.graph.aggregate([ {'$match': {"rm": {'$ne': True}}} , { '$group': {'_id': "$from", 'votes': {'$addToSet': "$category"}} } ])['result']
user2votes = defaultdict(set, [(item['_id'], set(item['votes'])) for item in query])
query = mongodb.advices.aggregate([ {'$match': {"rm": {'$ne': True}}} , { '$group': {'_id': "$user", 'votes': {'$addToSet': "$category"}} } ])['result']
for item in query:
user2votes[item['_id']] |= set(item['votes'])
for user, votes in user2votes.items():
if 0 < len(votes) < n_categories:
yield user, all_categories - votes
| en | 0.888907 | # save = upsert Return a dict made of {"Jazz": {"score": <points>, "perc": <rounded percentile>}, ...} Return a list of tuples like (category, friend_id, percentile) of the friends of the given user, with at most one per category, ranked by their score in their respective category. | 2.231117 | 2 |
ml-api-public/model/ml_service.py | ser0090/deployment-pm | 0 | 6617221 | # -*- coding: utf-8 -*-
import json
import time
import redis
import settings
from classifier import SentimentClassifier
########################################################################
########################################################################
# COMPLETAR AQUI: Crear conexion a redis y asignarla a la variable "db".
########################################################################
# db=0 indice para la tabla dentro de la base de datos.
db = redis.Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT,
db=settings.REDIS_DB_ID)
########################################################################
########################################################################
# COMPLETAR AQUI: Instanciar modelo de análisis de sentimientos.
# Use classifier.SentimentClassifier de la libreria
# spanish_sentiment_analysis ya instalada
########################################################################
model = SentimentClassifier()
########################################################################
def sentiment_from_score(score):
"""
Esta función recibe como entrada el score de positividad
de nuestra sentencia y dependiendo su valor devuelve uno
de las siguientes clases:
- "Positivo": Cuando el score es mayor a 0.55.
- "Neutral": Cuando el score se encuentra entre 0.45 y 0.55.
- "Negativo": Cuando el score es menor a 0.45.
Attributes
----------
score : float
Porcentaje de positividad.
Returns
-------
sentiment : str
Una de las siguientes etiquetas: "Negativo", "Neutral" o "Positivo".
"""
####################################################################
# COMPLETAR AQUI
####################################################################
sentiment = ''
if score < 0.45:
sentiment = 'Negativo'
elif score < 0.55:
sentiment = 'Neutral'
else:
sentiment = 'Positivo'
####################################################################
return sentiment
def predict(text: str):
"""
Esta función recibe como entrada una oración y devuelve una
predicción de su sentimiento acompañado del score de positividad.
Attributes
----------
text : str
Sentencia para analizar
Returns
-------
sentiment : str
Una de las siguientes etiquetas: "Negativo", "Neutral" o "Positivo".
score : float
Porcentaje de positividad.
"""
####################################################################
# COMPLETAR AQUI: Utilice el clasificador instanciado previamente
# ("model") para obtener el score de positividad.
# Luego utilice la función "sentiment_from_score" de este módulo
# para obtener el sentimiento ("sentiment") a partir del score.
####################################################################
score = model.predict(text)
sentiment = sentiment_from_score(score)
####################################################################
return sentiment, score
def classify_process():
"""
Obtiene trabajos encolados por el cliente desde Redis. Los procesa
y devuelve resultados.
Toda la comunicación se realiza a travez de Redis, por ello esta
función no posee atributos de entrada ni salida.
"""
# Iteramos intentando obtener trabajos para procesar
# Worker que se encuentra escuchando tod el dia.
while True:
##################################################################
# COMPLETAR AQUI: Obtenga un batch de trabajos encolados, use
# lrange de Redis. Almacene los trabajos en la variable "queue".
# Servidor de procesamiento obtiene 10 tareas encoladas a la base
# de datos
##################################################################
queue = db.lrange(name='service_queue', start=0, end=9)
##################################################################
# Iteramos por cada trabajo obtenido
for item in queue:
##############################################################
# COMPLETAR AQUI:
# - Utilice nuestra función "predict" para procesar la
# sentencia enviada en el trabajo.
# - Cree un diccionario con dos entradas: "prediction" y
# "score" donde almacenara los resultados obtenidos.
# - Utilice la funcion "set" de Redis para enviar la
# respuesta. Recuerde usar como "key" el "job_id".
#
##############################################################
# item = {'text': 'hoy es un lindo dia', 'id': '2'}
# el item se encuentra codificado, ya que el envio de bytes tiene
# menos carga que enviar texto plano.
item_raw = json.loads(item.decode(settings.CODE))
job_id = item_raw[settings.KEY_ID]
sentiment, score = predict(item_raw[settings.KEY_TEXT])
response = {'prediction': sentiment, 'score': score}
# el job_id es el idenficador de respuesta.
db.set(name=job_id, value=json.dumps(response))
##############################################################
##################################################################
# COMPLETAR AQUI: Use ltrim de Redis para borrar los trabajos ya
# procesados. Luego duerma durante unos milisengundos antes de
# pedir por mas trabajos.
##################################################################
##################################################################
# se borran los mensaje de la fifo.
# service_queue, es similar al topic de kafka
db.ltrim(name='service_queue', start=len(queue), end=-1)
time.sleep(2)
##################################################################
if __name__ == "__main__":
print('Launching ML service...')
classify_process()
| # -*- coding: utf-8 -*-
import json
import time
import redis
import settings
from classifier import SentimentClassifier
########################################################################
########################################################################
# COMPLETAR AQUI: Crear conexion a redis y asignarla a la variable "db".
########################################################################
# db=0 indice para la tabla dentro de la base de datos.
db = redis.Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT,
db=settings.REDIS_DB_ID)
########################################################################
########################################################################
# COMPLETAR AQUI: Instanciar modelo de análisis de sentimientos.
# Use classifier.SentimentClassifier de la libreria
# spanish_sentiment_analysis ya instalada
########################################################################
model = SentimentClassifier()
########################################################################
def sentiment_from_score(score):
"""
Esta función recibe como entrada el score de positividad
de nuestra sentencia y dependiendo su valor devuelve uno
de las siguientes clases:
- "Positivo": Cuando el score es mayor a 0.55.
- "Neutral": Cuando el score se encuentra entre 0.45 y 0.55.
- "Negativo": Cuando el score es menor a 0.45.
Attributes
----------
score : float
Porcentaje de positividad.
Returns
-------
sentiment : str
Una de las siguientes etiquetas: "Negativo", "Neutral" o "Positivo".
"""
####################################################################
# COMPLETAR AQUI
####################################################################
sentiment = ''
if score < 0.45:
sentiment = 'Negativo'
elif score < 0.55:
sentiment = 'Neutral'
else:
sentiment = 'Positivo'
####################################################################
return sentiment
def predict(text: str):
"""
Esta función recibe como entrada una oración y devuelve una
predicción de su sentimiento acompañado del score de positividad.
Attributes
----------
text : str
Sentencia para analizar
Returns
-------
sentiment : str
Una de las siguientes etiquetas: "Negativo", "Neutral" o "Positivo".
score : float
Porcentaje de positividad.
"""
####################################################################
# COMPLETAR AQUI: Utilice el clasificador instanciado previamente
# ("model") para obtener el score de positividad.
# Luego utilice la función "sentiment_from_score" de este módulo
# para obtener el sentimiento ("sentiment") a partir del score.
####################################################################
score = model.predict(text)
sentiment = sentiment_from_score(score)
####################################################################
return sentiment, score
def classify_process():
"""
Obtiene trabajos encolados por el cliente desde Redis. Los procesa
y devuelve resultados.
Toda la comunicación se realiza a travez de Redis, por ello esta
función no posee atributos de entrada ni salida.
"""
# Iteramos intentando obtener trabajos para procesar
# Worker que se encuentra escuchando tod el dia.
while True:
##################################################################
# COMPLETAR AQUI: Obtenga un batch de trabajos encolados, use
# lrange de Redis. Almacene los trabajos en la variable "queue".
# Servidor de procesamiento obtiene 10 tareas encoladas a la base
# de datos
##################################################################
queue = db.lrange(name='service_queue', start=0, end=9)
##################################################################
# Iteramos por cada trabajo obtenido
for item in queue:
##############################################################
# COMPLETAR AQUI:
# - Utilice nuestra función "predict" para procesar la
# sentencia enviada en el trabajo.
# - Cree un diccionario con dos entradas: "prediction" y
# "score" donde almacenara los resultados obtenidos.
# - Utilice la funcion "set" de Redis para enviar la
# respuesta. Recuerde usar como "key" el "job_id".
#
##############################################################
# item = {'text': 'hoy es un lindo dia', 'id': '2'}
# el item se encuentra codificado, ya que el envio de bytes tiene
# menos carga que enviar texto plano.
item_raw = json.loads(item.decode(settings.CODE))
job_id = item_raw[settings.KEY_ID]
sentiment, score = predict(item_raw[settings.KEY_TEXT])
response = {'prediction': sentiment, 'score': score}
# el job_id es el idenficador de respuesta.
db.set(name=job_id, value=json.dumps(response))
##############################################################
##################################################################
# COMPLETAR AQUI: Use ltrim de Redis para borrar los trabajos ya
# procesados. Luego duerma durante unos milisengundos antes de
# pedir por mas trabajos.
##################################################################
##################################################################
# se borran los mensaje de la fifo.
# service_queue, es similar al topic de kafka
db.ltrim(name='service_queue', start=len(queue), end=-1)
time.sleep(2)
##################################################################
if __name__ == "__main__":
print('Launching ML service...')
classify_process()
| es | 0.763113 | # -*- coding: utf-8 -*- ######################################################################## ######################################################################## # COMPLETAR AQUI: Crear conexion a redis y asignarla a la variable "db". ######################################################################## # db=0 indice para la tabla dentro de la base de datos. ######################################################################## ######################################################################## # COMPLETAR AQUI: Instanciar modelo de análisis de sentimientos. # Use classifier.SentimentClassifier de la libreria # spanish_sentiment_analysis ya instalada ######################################################################## ######################################################################## Esta función recibe como entrada el score de positividad de nuestra sentencia y dependiendo su valor devuelve uno de las siguientes clases: - "Positivo": Cuando el score es mayor a 0.55. - "Neutral": Cuando el score se encuentra entre 0.45 y 0.55. - "Negativo": Cuando el score es menor a 0.45. Attributes ---------- score : float Porcentaje de positividad. Returns ------- sentiment : str Una de las siguientes etiquetas: "Negativo", "Neutral" o "Positivo". #################################################################### # COMPLETAR AQUI #################################################################### #################################################################### Esta función recibe como entrada una oración y devuelve una predicción de su sentimiento acompañado del score de positividad. Attributes ---------- text : str Sentencia para analizar Returns ------- sentiment : str Una de las siguientes etiquetas: "Negativo", "Neutral" o "Positivo". score : float Porcentaje de positividad. #################################################################### # COMPLETAR AQUI: Utilice el clasificador instanciado previamente # ("model") para obtener el score de positividad. # Luego utilice la función "sentiment_from_score" de este módulo # para obtener el sentimiento ("sentiment") a partir del score. #################################################################### #################################################################### Obtiene trabajos encolados por el cliente desde Redis. Los procesa y devuelve resultados. Toda la comunicación se realiza a travez de Redis, por ello esta función no posee atributos de entrada ni salida. # Iteramos intentando obtener trabajos para procesar # Worker que se encuentra escuchando tod el dia. ################################################################## # COMPLETAR AQUI: Obtenga un batch de trabajos encolados, use # lrange de Redis. Almacene los trabajos en la variable "queue". # Servidor de procesamiento obtiene 10 tareas encoladas a la base # de datos ################################################################## ################################################################## # Iteramos por cada trabajo obtenido ############################################################## # COMPLETAR AQUI: # - Utilice nuestra función "predict" para procesar la # sentencia enviada en el trabajo. # - Cree un diccionario con dos entradas: "prediction" y # "score" donde almacenara los resultados obtenidos. # - Utilice la funcion "set" de Redis para enviar la # respuesta. Recuerde usar como "key" el "job_id". # ############################################################## # item = {'text': 'hoy es un lindo dia', 'id': '2'} # el item se encuentra codificado, ya que el envio de bytes tiene # menos carga que enviar texto plano. # el job_id es el idenficador de respuesta. ############################################################## ################################################################## # COMPLETAR AQUI: Use ltrim de Redis para borrar los trabajos ya # procesados. Luego duerma durante unos milisengundos antes de # pedir por mas trabajos. ################################################################## ################################################################## # se borran los mensaje de la fifo. # service_queue, es similar al topic de kafka ################################################################## | 2.140521 | 2 |
demo/image-classification/predict.py | Austendeng/PaddleHub | 0 | 6617222 | import argparse
import os
import paddle.fluid as fluid
import paddlehub as hub
import numpy as np
# yapf: disable
parser = argparse.ArgumentParser(__doc__)
parser.add_argument("--use_gpu", type=bool, default=False, help="Whether use GPU for predict.")
parser.add_argument("--checkpoint_dir", type=str, default="paddlehub_finetune_ckpt", help="Path to save log data.")
parser.add_argument("--module", type=str, default="resnet50", help="Module used as a feature extractor.")
parser.add_argument("--dataset", type=str, default="flowers", help="Dataset to finetune.")
# yapf: enable.
module_map = {
"resnet50": "resnet_v2_50_imagenet",
"resnet101": "resnet_v2_101_imagenet",
"resnet152": "resnet_v2_152_imagenet",
"mobilenet": "mobilenet_v2_imagenet",
"nasnet": "nasnet_imagenet",
"pnasnet": "pnasnet_imagenet"
}
def predict(args):
if args.dataset.lower() == "flowers":
dataset = hub.dataset.Flowers()
elif args.dataset.lower() == "dogcat":
dataset = hub.dataset.DogCat()
elif args.dataset.lower() == "indoor67":
dataset = hub.dataset.Indoor67()
elif args.dataset.lower() == "food101":
dataset = hub.dataset.Food101()
elif args.dataset.lower() == "stanforddogs":
dataset = hub.dataset.StanfordDogs()
else:
raise ValueError("%s dataset is not defined" % args.dataset)
label_map = dataset.label_dict()
num_labels = len(label_map)
module = hub.Module(name=args.module)
input_dict, output_dict, program = module.context()
data_reader = hub.reader.ImageClassificationReader(
image_width=module.get_expected_image_width(),
image_height=module.get_expected_image_height(),
images_mean=module.get_pretrained_images_mean(),
images_std=module.get_pretrained_images_std(),
dataset=None)
img = input_dict["image"]
feature_map = output_dict["feature_map"]
task = hub.create_img_cls_task(feature=feature_map, num_classes=num_labels)
img = input_dict["image"]
feed_list = [img.name]
with fluid.program_guard(task.inference_program()):
place = fluid.CUDAPlace(0) if args.use_gpu else fluid.CPUPlace()
exe = fluid.Executor(place)
pretrained_model_dir = os.path.join(args.checkpoint_dir, "best_model")
if not os.path.exists(pretrained_model_dir):
hub.logger.error(
"pretrained model dir %s didn't exist" % pretrained_model_dir)
exit(1)
fluid.io.load_persistables(exe, pretrained_model_dir)
feeder = fluid.DataFeeder(feed_list=feed_list, place=place)
data = ["test/test_img_roses.jpg", "test/test_img_daisy.jpg"]
predict_reader = data_reader.data_generator(
phase="predict", batch_size=1, data=data)
for index, batch in enumerate(predict_reader()):
result, = exe.run(
feed=feeder.feed(batch), fetch_list=[task.variable('probs')])
predict_result = label_map[np.argsort(result[0])[::-1][0]]
print("input %i is %s, and the predict result is %s" %
(index, data[index], predict_result))
if __name__ == "__main__":
args = parser.parse_args()
if not args.module in module_map:
hub.logger.error("module should in %s" % module_map.keys())
exit(1)
args.module = module_map[args.module]
predict(args)
| import argparse
import os
import paddle.fluid as fluid
import paddlehub as hub
import numpy as np
# yapf: disable
parser = argparse.ArgumentParser(__doc__)
parser.add_argument("--use_gpu", type=bool, default=False, help="Whether use GPU for predict.")
parser.add_argument("--checkpoint_dir", type=str, default="paddlehub_finetune_ckpt", help="Path to save log data.")
parser.add_argument("--module", type=str, default="resnet50", help="Module used as a feature extractor.")
parser.add_argument("--dataset", type=str, default="flowers", help="Dataset to finetune.")
# yapf: enable.
module_map = {
"resnet50": "resnet_v2_50_imagenet",
"resnet101": "resnet_v2_101_imagenet",
"resnet152": "resnet_v2_152_imagenet",
"mobilenet": "mobilenet_v2_imagenet",
"nasnet": "nasnet_imagenet",
"pnasnet": "pnasnet_imagenet"
}
def predict(args):
if args.dataset.lower() == "flowers":
dataset = hub.dataset.Flowers()
elif args.dataset.lower() == "dogcat":
dataset = hub.dataset.DogCat()
elif args.dataset.lower() == "indoor67":
dataset = hub.dataset.Indoor67()
elif args.dataset.lower() == "food101":
dataset = hub.dataset.Food101()
elif args.dataset.lower() == "stanforddogs":
dataset = hub.dataset.StanfordDogs()
else:
raise ValueError("%s dataset is not defined" % args.dataset)
label_map = dataset.label_dict()
num_labels = len(label_map)
module = hub.Module(name=args.module)
input_dict, output_dict, program = module.context()
data_reader = hub.reader.ImageClassificationReader(
image_width=module.get_expected_image_width(),
image_height=module.get_expected_image_height(),
images_mean=module.get_pretrained_images_mean(),
images_std=module.get_pretrained_images_std(),
dataset=None)
img = input_dict["image"]
feature_map = output_dict["feature_map"]
task = hub.create_img_cls_task(feature=feature_map, num_classes=num_labels)
img = input_dict["image"]
feed_list = [img.name]
with fluid.program_guard(task.inference_program()):
place = fluid.CUDAPlace(0) if args.use_gpu else fluid.CPUPlace()
exe = fluid.Executor(place)
pretrained_model_dir = os.path.join(args.checkpoint_dir, "best_model")
if not os.path.exists(pretrained_model_dir):
hub.logger.error(
"pretrained model dir %s didn't exist" % pretrained_model_dir)
exit(1)
fluid.io.load_persistables(exe, pretrained_model_dir)
feeder = fluid.DataFeeder(feed_list=feed_list, place=place)
data = ["test/test_img_roses.jpg", "test/test_img_daisy.jpg"]
predict_reader = data_reader.data_generator(
phase="predict", batch_size=1, data=data)
for index, batch in enumerate(predict_reader()):
result, = exe.run(
feed=feeder.feed(batch), fetch_list=[task.variable('probs')])
predict_result = label_map[np.argsort(result[0])[::-1][0]]
print("input %i is %s, and the predict result is %s" %
(index, data[index], predict_result))
if __name__ == "__main__":
args = parser.parse_args()
if not args.module in module_map:
hub.logger.error("module should in %s" % module_map.keys())
exit(1)
args.module = module_map[args.module]
predict(args)
| en | 0.590897 | # yapf: disable # yapf: enable. | 2.462949 | 2 |
toss/responses/api/viewsets.py | toss-app/toss-backend | 0 | 6617223 | from ..models import Response
from .serializers import ResponseSerializer
from helpers.api.viewsets import AuthenticatedModelViewSet
class ResponseViewSet(AuthenticatedModelViewSet):
serializer_class = ResponseSerializer
queryset = Response.objects.all()
| from ..models import Response
from .serializers import ResponseSerializer
from helpers.api.viewsets import AuthenticatedModelViewSet
class ResponseViewSet(AuthenticatedModelViewSet):
serializer_class = ResponseSerializer
queryset = Response.objects.all()
| none | 1 | 1.852216 | 2 | |
Classes/Base.py | eddo888/ClassyData | 0 | 6617224 | <gh_stars>0
#!/usr/bin/env python3
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
| #!/usr/bin/env python3
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base() | fr | 0.221828 | #!/usr/bin/env python3 | 1.441163 | 1 |
app/database/config.py | ylgnerbecton/api_luiza_labs | 0 | 6617225 | <filename>app/database/config.py<gh_stars>0
import pymongo
from pymongo import MongoClient
client = MongoClient('localhost', 27017)
db = client.clients_db
def dropDB():
client.drop_database('clients_db')
| <filename>app/database/config.py<gh_stars>0
import pymongo
from pymongo import MongoClient
client = MongoClient('localhost', 27017)
db = client.clients_db
def dropDB():
client.drop_database('clients_db')
| none | 1 | 1.896913 | 2 | |
sample.py | ksboy/ccks3 | 1 | 6617226 | import numpy as np
from numpy import mean
import random
np.random.seed(0)
random.seed(0)
def get_class_name(rawtag):
# get (finegrained) class name
if rawtag.startswith('B-') or rawtag.startswith('I-'):
return rawtag[2:]
else:
return rawtag
class Sample:
def __init__(self, example):
self.words, self.tags = example.words, example.labels
# strip B-, I-
self.normalized_tags = list(map(get_class_name, self.tags))
self.entity_count = {}
def __count_entities__(self):
current_tag = self.normalized_tags[0]
for tag in self.normalized_tags[1:]:
if tag == current_tag:
continue
else:
if current_tag != 'O':
if current_tag in self.entity_count:
self.entity_count[current_tag] += 1
else:
self.entity_count[current_tag] = 1
current_tag = tag
if current_tag != 'O':
if current_tag in self.entity_count:
self.entity_count[current_tag] += 1
else:
self.entity_count[current_tag] = 1
def get_entity_count(self):
if self.entity_count:
return self.entity_count
else:
self.__count_entities__()
return self.entity_count
def get_tag_class(self):
# strip 'B' 'I'
tag_class = list(set(self.normalized_tags))
if 'O' in tag_class:
tag_class.remove('O')
return tag_class
def valid(self, target_classes):
return True
# return set(self.get_entity_count().keys()).intersection(set(target_classes)) and not set(self.get_entity_count().keys()).difference(set(target_classes))
def __str__(self):
newlines = zip(self.words, self.tags)
return '\n'.join(['\t'.join(line) for line in newlines])
class BatchSampler:
def __init__(self, N, K, Q, samples, classes):
self.K = K
self.N = N
self.Q = Q
self.samples = samples
self.classes= classes
def __additem__(self, index, set_class):
entity_count = self.samples[index].get_entity_count()
for class_name in entity_count:
if class_name in set_class:
set_class[class_name] += entity_count[class_name]
else:
set_class[class_name] = entity_count[class_name]
def __popitem__(self, index, set_class):
entity_count = self.samples[index].get_entity_count()
for class_name in entity_count:
if class_name in set_class:
set_class[class_name] -= entity_count[class_name]
else:
assert(0)
def __valid_add_sample__(self, sample, set_class, target_classes):
threshold = 3 * set_class['k']
entity_count = sample.get_entity_count()
if not entity_count:
return False
isvalid = False
for class_name in entity_count:
if class_name not in target_classes:
return False
elif class_name not in set_class:
isvalid = True
elif set_class[class_name] + entity_count[class_name] > threshold:
return False
elif set_class[class_name] < set_class['k']:
isvalid = True
return isvalid
def __valid_pop_sample__(self, sample, set_class, target_classes):
threshold = 10000 * set_class['k']
entity_count = sample.get_entity_count()
if not entity_count:
return False
isvalid = False
for class_name in entity_count:
if class_name not in target_classes:
return False
elif class_name not in set_class:
isvalid = True
elif set_class[class_name] > threshold:
return False
elif set_class[class_name] - entity_count[class_name] < set_class['k']:
isvalid = True
return isvalid
def __finish__(self, set_class):
if len(set_class) < self.N+1:
return False
for k in set_class:
if set_class[k] < set_class['k']:
return False
return True
def __get_candidates__(self, target_classes):
return [idx for idx, sample in enumerate(self.samples) if sample.valid(target_classes)]
def sample(self):
target_classes = random.sample(self.classes, self.N)
support_class = {'k':self.K}
# ['收购-number'] ['收购-sub-per'] ['签署合同-obj-per'] ['收购-proportion']
support_idx = [180-1, 228-1] + [117-1, 220-1] + \
[516-1, 531-1, 748-1, 768-1] + [37-1, 57-1]
for index in support_idx:
self.__additem__(index, support_class)
query_class = {'k':self.Q}
query_idx = []
candidates = self.__get_candidates__(target_classes)
# greedy search for support set
step = 0
while not self.__finish__(support_class) and step < 100000:
step += 1
index = random.sample(candidates, 1)[0]
if index not in support_idx:
if self.__valid_add_sample__(self.samples[index], support_class, target_classes):
self.__additem__(index, support_class)
support_idx.append(index)
for index in support_idx:
if not self.__valid_pop_sample__(self.samples[index], support_class, target_classes):
self.__popitem__(index, support_class)
support_idx.remove(index)
return target_classes, support_idx
if __name__ == '__main__':
from utils_ner_bio import read_examples_from_file
from utils import get_labels
data_dir = "./data/FewFC-main/rearranged/trans/"
examples = read_examples_from_file(data_dir, mode='train', task="role", dataset="ccks")
samples = [Sample(example) for example in examples]
classes = get_labels(path="./data/FewFC-main/event_schema/trans.json", task='role', mode="classification")
sampler = BatchSampler(31, 5, 5, samples, classes)
target_classes, support_idx = sampler.sample()
print(target_classes, support_idx)
# support_idx = [179, 227, 116, 219, 515, 530, 747, 767, 36, 56, 323, 625, 655, 565, 488, 453, 533, 561, 14, 408, 727, 640, 626, 505, 249, 720, 581, 244, 556, 93, 520, 111, 560, 553, 818, 617, 601, 394, 297, 188, 191, 91, 695, 39, 716, 537, 603, 224, 587, 59, 80, 319, 158, 8, 304]
# lines = open(data_dir+"train.json", encoding='utf-8').read().splitlines()
# res =[lines[index]+"\n" for index in support_idx]
# outf = open(data_dir+"support.json", "w")
# outf.writelines(res)
| import numpy as np
from numpy import mean
import random
np.random.seed(0)
random.seed(0)
def get_class_name(rawtag):
# get (finegrained) class name
if rawtag.startswith('B-') or rawtag.startswith('I-'):
return rawtag[2:]
else:
return rawtag
class Sample:
def __init__(self, example):
self.words, self.tags = example.words, example.labels
# strip B-, I-
self.normalized_tags = list(map(get_class_name, self.tags))
self.entity_count = {}
def __count_entities__(self):
current_tag = self.normalized_tags[0]
for tag in self.normalized_tags[1:]:
if tag == current_tag:
continue
else:
if current_tag != 'O':
if current_tag in self.entity_count:
self.entity_count[current_tag] += 1
else:
self.entity_count[current_tag] = 1
current_tag = tag
if current_tag != 'O':
if current_tag in self.entity_count:
self.entity_count[current_tag] += 1
else:
self.entity_count[current_tag] = 1
def get_entity_count(self):
if self.entity_count:
return self.entity_count
else:
self.__count_entities__()
return self.entity_count
def get_tag_class(self):
# strip 'B' 'I'
tag_class = list(set(self.normalized_tags))
if 'O' in tag_class:
tag_class.remove('O')
return tag_class
def valid(self, target_classes):
return True
# return set(self.get_entity_count().keys()).intersection(set(target_classes)) and not set(self.get_entity_count().keys()).difference(set(target_classes))
def __str__(self):
newlines = zip(self.words, self.tags)
return '\n'.join(['\t'.join(line) for line in newlines])
class BatchSampler:
def __init__(self, N, K, Q, samples, classes):
self.K = K
self.N = N
self.Q = Q
self.samples = samples
self.classes= classes
def __additem__(self, index, set_class):
entity_count = self.samples[index].get_entity_count()
for class_name in entity_count:
if class_name in set_class:
set_class[class_name] += entity_count[class_name]
else:
set_class[class_name] = entity_count[class_name]
def __popitem__(self, index, set_class):
entity_count = self.samples[index].get_entity_count()
for class_name in entity_count:
if class_name in set_class:
set_class[class_name] -= entity_count[class_name]
else:
assert(0)
def __valid_add_sample__(self, sample, set_class, target_classes):
threshold = 3 * set_class['k']
entity_count = sample.get_entity_count()
if not entity_count:
return False
isvalid = False
for class_name in entity_count:
if class_name not in target_classes:
return False
elif class_name not in set_class:
isvalid = True
elif set_class[class_name] + entity_count[class_name] > threshold:
return False
elif set_class[class_name] < set_class['k']:
isvalid = True
return isvalid
def __valid_pop_sample__(self, sample, set_class, target_classes):
threshold = 10000 * set_class['k']
entity_count = sample.get_entity_count()
if not entity_count:
return False
isvalid = False
for class_name in entity_count:
if class_name not in target_classes:
return False
elif class_name not in set_class:
isvalid = True
elif set_class[class_name] > threshold:
return False
elif set_class[class_name] - entity_count[class_name] < set_class['k']:
isvalid = True
return isvalid
def __finish__(self, set_class):
if len(set_class) < self.N+1:
return False
for k in set_class:
if set_class[k] < set_class['k']:
return False
return True
def __get_candidates__(self, target_classes):
return [idx for idx, sample in enumerate(self.samples) if sample.valid(target_classes)]
def sample(self):
target_classes = random.sample(self.classes, self.N)
support_class = {'k':self.K}
# ['收购-number'] ['收购-sub-per'] ['签署合同-obj-per'] ['收购-proportion']
support_idx = [180-1, 228-1] + [117-1, 220-1] + \
[516-1, 531-1, 748-1, 768-1] + [37-1, 57-1]
for index in support_idx:
self.__additem__(index, support_class)
query_class = {'k':self.Q}
query_idx = []
candidates = self.__get_candidates__(target_classes)
# greedy search for support set
step = 0
while not self.__finish__(support_class) and step < 100000:
step += 1
index = random.sample(candidates, 1)[0]
if index not in support_idx:
if self.__valid_add_sample__(self.samples[index], support_class, target_classes):
self.__additem__(index, support_class)
support_idx.append(index)
for index in support_idx:
if not self.__valid_pop_sample__(self.samples[index], support_class, target_classes):
self.__popitem__(index, support_class)
support_idx.remove(index)
return target_classes, support_idx
if __name__ == '__main__':
from utils_ner_bio import read_examples_from_file
from utils import get_labels
data_dir = "./data/FewFC-main/rearranged/trans/"
examples = read_examples_from_file(data_dir, mode='train', task="role", dataset="ccks")
samples = [Sample(example) for example in examples]
classes = get_labels(path="./data/FewFC-main/event_schema/trans.json", task='role', mode="classification")
sampler = BatchSampler(31, 5, 5, samples, classes)
target_classes, support_idx = sampler.sample()
print(target_classes, support_idx)
# support_idx = [179, 227, 116, 219, 515, 530, 747, 767, 36, 56, 323, 625, 655, 565, 488, 453, 533, 561, 14, 408, 727, 640, 626, 505, 249, 720, 581, 244, 556, 93, 520, 111, 560, 553, 818, 617, 601, 394, 297, 188, 191, 91, 695, 39, 716, 537, 603, 224, 587, 59, 80, 319, 158, 8, 304]
# lines = open(data_dir+"train.json", encoding='utf-8').read().splitlines()
# res =[lines[index]+"\n" for index in support_idx]
# outf = open(data_dir+"support.json", "w")
# outf.writelines(res)
| en | 0.425014 | # get (finegrained) class name # strip B-, I- # strip 'B' 'I' # return set(self.get_entity_count().keys()).intersection(set(target_classes)) and not set(self.get_entity_count().keys()).difference(set(target_classes)) # ['收购-number'] ['收购-sub-per'] ['签署合同-obj-per'] ['收购-proportion'] # greedy search for support set # support_idx = [179, 227, 116, 219, 515, 530, 747, 767, 36, 56, 323, 625, 655, 565, 488, 453, 533, 561, 14, 408, 727, 640, 626, 505, 249, 720, 581, 244, 556, 93, 520, 111, 560, 553, 818, 617, 601, 394, 297, 188, 191, 91, 695, 39, 716, 537, 603, 224, 587, 59, 80, 319, 158, 8, 304] # lines = open(data_dir+"train.json", encoding='utf-8').read().splitlines() # res =[lines[index]+"\n" for index in support_idx] # outf = open(data_dir+"support.json", "w") # outf.writelines(res) | 2.895761 | 3 |
mundo2/ex039.py | dilsonm/CeV | 0 | 6617227 | <gh_stars>0
"""
Faça um programa que leia o ano de nascimento de um jovem e informe, de acordo com sua idade:
- Se ele ainda vai se alistar ao serviço militar
- Se e a hora de se alistar ao servico militar
- Se já passou do tempo de alistamento ao serviço militar
(*) Seu programa também devera mostrar quanto tempo falta ou o que passou do tempo
"""
ano = int(input('Digite o ano de seu nascimento: '))
if (2020 - ano) < 18:
print('Você ainda tem {} anos para se alistar ao seviço militar.'.format( ((2020 - ano)-18)*-1 ))
elif (2020 - ano) == 18:
print('Você esta no ano para se alistar ao seviço militar.')
else:
print('Você já tem {} anos passados que deveria se apresentar ao seviço militar.'.format( (2020 - ano )-18 ))
| """
Faça um programa que leia o ano de nascimento de um jovem e informe, de acordo com sua idade:
- Se ele ainda vai se alistar ao serviço militar
- Se e a hora de se alistar ao servico militar
- Se já passou do tempo de alistamento ao serviço militar
(*) Seu programa também devera mostrar quanto tempo falta ou o que passou do tempo
"""
ano = int(input('Digite o ano de seu nascimento: '))
if (2020 - ano) < 18:
print('Você ainda tem {} anos para se alistar ao seviço militar.'.format( ((2020 - ano)-18)*-1 ))
elif (2020 - ano) == 18:
print('Você esta no ano para se alistar ao seviço militar.')
else:
print('Você já tem {} anos passados que deveria se apresentar ao seviço militar.'.format( (2020 - ano )-18 )) | pt | 0.990182 | Faça um programa que leia o ano de nascimento de um jovem e informe, de acordo com sua idade: - Se ele ainda vai se alistar ao serviço militar - Se e a hora de se alistar ao servico militar - Se já passou do tempo de alistamento ao serviço militar (*) Seu programa também devera mostrar quanto tempo falta ou o que passou do tempo | 4.155873 | 4 |
tests/ops/test_dtype.py | wesselb/matrix | 3 | 6617228 | <reponame>wesselb/matrix
import lab as B
import numpy as np
from matrix import Diagonal, Kronecker, LowRank, TiledBlocks
# noinspection PyUnresolvedReferences
from ..util import (
approx,
check_un_op,
const1,
dense1,
diag1,
kron1,
lr1,
lt1,
tb1,
tb_axis,
ut1,
wb1,
zero1,
)
def test_dtype_zero(zero1):
assert B.dtype(zero1) == B.default_dtype
def test_dtype_dense(dense1):
assert B.dtype(dense1) == B.default_dtype
def test_dtype_diag(diag1):
assert B.dtype(diag1) == B.default_dtype
def test_dtype_const(const1):
assert B.dtype(const1) == B.default_dtype
def test_dtype_lt1(lt1):
assert B.dtype(lt1) == B.default_dtype
def test_dtype_ut1(ut1):
assert B.dtype(ut1) == B.default_dtype
def test_dtype_lr(lr1):
assert B.dtype(lr1) == B.default_dtype
def test_dtype_lr_promotion():
lr = LowRank(B.ones(int, 5, 2), B.ones(int, 5, 2), B.ones(int, 2, 2))
assert B.dtype(lr) == np.int64
lr = LowRank(B.ones(float, 5, 2), B.ones(int, 5, 2), B.ones(int, 2, 2))
assert B.dtype(lr) == np.float64
lr = LowRank(B.ones(int, 5, 2), B.ones(float, 5, 2), B.ones(int, 2, 2))
assert B.dtype(lr) == np.float64
lr = LowRank(B.ones(int, 5, 2), B.ones(int, 5, 2), B.ones(float, 2, 2))
assert B.dtype(lr) == np.float64
def test_dtype_wb(wb1):
assert B.dtype(wb1) == B.default_dtype
def test_dtype_wb_promotion():
wb = LowRank(B.ones(int, 5, 5)) + Diagonal(B.ones(int, 5))
assert B.dtype(wb) == np.int64
wb = LowRank(B.ones(float, 5, 5)) + Diagonal(B.ones(int, 5))
assert B.dtype(wb) == np.float64
wb = LowRank(B.ones(int, 5, 5)) + Diagonal(B.ones(float, 5))
assert B.dtype(wb) == np.float64
def test_dtype_kron(kron1):
assert B.dtype(kron1) == B.default_dtype
def test_dtype_kron_promotion():
kron = Kronecker(B.ones(int, 5, 5), B.ones(int, 5, 5))
assert B.dtype(kron) == np.int64
kron = Kronecker(B.ones(float, 5, 5), B.ones(int, 5, 5))
assert B.dtype(kron) == np.float64
kron = Kronecker(B.ones(int, 5, 5), B.ones(float, 5, 5))
assert B.dtype(kron) == np.float64
def test_dtype_tb(tb1):
assert B.dtype(tb1) == B.default_dtype
def test_dtype_tb_promotion():
tb = TiledBlocks((B.ones(int, 5, 5), 2), (B.ones(int, 5, 5), 2))
assert B.dtype(tb) == np.int64
tb = TiledBlocks((B.ones(float, 5, 5), 2), (B.ones(int, 5, 5), 2))
assert B.dtype(tb) == np.float64
tb = TiledBlocks((B.ones(int, 5, 5), 2), (B.ones(float, 5, 5), 2))
assert B.dtype(tb) == np.float64
| import lab as B
import numpy as np
from matrix import Diagonal, Kronecker, LowRank, TiledBlocks
# noinspection PyUnresolvedReferences
from ..util import (
approx,
check_un_op,
const1,
dense1,
diag1,
kron1,
lr1,
lt1,
tb1,
tb_axis,
ut1,
wb1,
zero1,
)
def test_dtype_zero(zero1):
assert B.dtype(zero1) == B.default_dtype
def test_dtype_dense(dense1):
assert B.dtype(dense1) == B.default_dtype
def test_dtype_diag(diag1):
assert B.dtype(diag1) == B.default_dtype
def test_dtype_const(const1):
assert B.dtype(const1) == B.default_dtype
def test_dtype_lt1(lt1):
assert B.dtype(lt1) == B.default_dtype
def test_dtype_ut1(ut1):
assert B.dtype(ut1) == B.default_dtype
def test_dtype_lr(lr1):
assert B.dtype(lr1) == B.default_dtype
def test_dtype_lr_promotion():
lr = LowRank(B.ones(int, 5, 2), B.ones(int, 5, 2), B.ones(int, 2, 2))
assert B.dtype(lr) == np.int64
lr = LowRank(B.ones(float, 5, 2), B.ones(int, 5, 2), B.ones(int, 2, 2))
assert B.dtype(lr) == np.float64
lr = LowRank(B.ones(int, 5, 2), B.ones(float, 5, 2), B.ones(int, 2, 2))
assert B.dtype(lr) == np.float64
lr = LowRank(B.ones(int, 5, 2), B.ones(int, 5, 2), B.ones(float, 2, 2))
assert B.dtype(lr) == np.float64
def test_dtype_wb(wb1):
assert B.dtype(wb1) == B.default_dtype
def test_dtype_wb_promotion():
wb = LowRank(B.ones(int, 5, 5)) + Diagonal(B.ones(int, 5))
assert B.dtype(wb) == np.int64
wb = LowRank(B.ones(float, 5, 5)) + Diagonal(B.ones(int, 5))
assert B.dtype(wb) == np.float64
wb = LowRank(B.ones(int, 5, 5)) + Diagonal(B.ones(float, 5))
assert B.dtype(wb) == np.float64
def test_dtype_kron(kron1):
assert B.dtype(kron1) == B.default_dtype
def test_dtype_kron_promotion():
kron = Kronecker(B.ones(int, 5, 5), B.ones(int, 5, 5))
assert B.dtype(kron) == np.int64
kron = Kronecker(B.ones(float, 5, 5), B.ones(int, 5, 5))
assert B.dtype(kron) == np.float64
kron = Kronecker(B.ones(int, 5, 5), B.ones(float, 5, 5))
assert B.dtype(kron) == np.float64
def test_dtype_tb(tb1):
assert B.dtype(tb1) == B.default_dtype
def test_dtype_tb_promotion():
tb = TiledBlocks((B.ones(int, 5, 5), 2), (B.ones(int, 5, 5), 2))
assert B.dtype(tb) == np.int64
tb = TiledBlocks((B.ones(float, 5, 5), 2), (B.ones(int, 5, 5), 2))
assert B.dtype(tb) == np.float64
tb = TiledBlocks((B.ones(int, 5, 5), 2), (B.ones(float, 5, 5), 2))
assert B.dtype(tb) == np.float64 | en | 0.460624 | # noinspection PyUnresolvedReferences | 1.937524 | 2 |
mmdet/models/backbones/__init__.py | samarthramesh/CDeC-Net | 104 | 6617229 | from .hrnet import HRNet
from .resnet import ResNet, ResNetV1d
from .resnext import ResNeXt
from .ssd_vgg import SSDVGG
from .db_resnet import DB_ResNet, DB_ResNetV1d
from .db_resnext import DB_ResNeXt
__all__ = ['ResNet', 'ResNetV1d', 'ResNeXt', 'SSDVGG', 'HRNet','DB_ResNet','DB_ResNetV1d','DB_ResNeXt']
| from .hrnet import HRNet
from .resnet import ResNet, ResNetV1d
from .resnext import ResNeXt
from .ssd_vgg import SSDVGG
from .db_resnet import DB_ResNet, DB_ResNetV1d
from .db_resnext import DB_ResNeXt
__all__ = ['ResNet', 'ResNetV1d', 'ResNeXt', 'SSDVGG', 'HRNet','DB_ResNet','DB_ResNetV1d','DB_ResNeXt']
| none | 1 | 1.269091 | 1 | |
src/qrl/Qtable3.py | Blackdevil132/machineLearning | 1 | 6617230 | <gh_stars>1-10
import numpy as np
from src.qrl.Qtable import Qtable
# Qtable for 3-dim storing
class Qtable3(Qtable):
def __init__(self, action_space, observation_space_1, observation_space_2, observation_space_3):
Qtable.__init__(self)
self.action_space = action_space
self.observation_space = (observation_space_1, observation_space_2, observation_space_3)
self.table = [{j: {k: np.zeros(action_space) for k in [e2 for e2 in range(observation_space_3)]} for j in [e1 for e1 in range(observation_space_2)]} for i in range(observation_space_1)]
def get(self, state, action=None):
#print(state[0], state[1], state[2])
if action is None:
try:
return self.table[state[0]][state[1]][state[2]][:]
except KeyError:
print("Error at ", state[0], state[1], state[2])
return self.table[state[0]][state[1]][state[2]][action]
def update(self, state, action, newValue):
self.table[state[0]][state[1]][state[2]][action] = newValue
def show(self):
for dim1 in range(self.observation_space[0]):
print("%i " % dim1, end='')
for dim2 in self.table[dim1].keys():
print("%i " % dim2, end='')
for key in self.table[dim1][dim2].keys():
print("\t%i: " % key, end='')
for action in self.table[dim1][dim2][key]:
print("\t%.3f, " % action, end='')
print()
| import numpy as np
from src.qrl.Qtable import Qtable
# Qtable for 3-dim storing
class Qtable3(Qtable):
def __init__(self, action_space, observation_space_1, observation_space_2, observation_space_3):
Qtable.__init__(self)
self.action_space = action_space
self.observation_space = (observation_space_1, observation_space_2, observation_space_3)
self.table = [{j: {k: np.zeros(action_space) for k in [e2 for e2 in range(observation_space_3)]} for j in [e1 for e1 in range(observation_space_2)]} for i in range(observation_space_1)]
def get(self, state, action=None):
#print(state[0], state[1], state[2])
if action is None:
try:
return self.table[state[0]][state[1]][state[2]][:]
except KeyError:
print("Error at ", state[0], state[1], state[2])
return self.table[state[0]][state[1]][state[2]][action]
def update(self, state, action, newValue):
self.table[state[0]][state[1]][state[2]][action] = newValue
def show(self):
for dim1 in range(self.observation_space[0]):
print("%i " % dim1, end='')
for dim2 in self.table[dim1].keys():
print("%i " % dim2, end='')
for key in self.table[dim1][dim2].keys():
print("\t%i: " % key, end='')
for action in self.table[dim1][dim2][key]:
print("\t%.3f, " % action, end='')
print() | en | 0.501408 | # Qtable for 3-dim storing #print(state[0], state[1], state[2]) | 2.746758 | 3 |
SC101_Projects/SC101_Assignment2/breakout.py | TobyCCC/MystanCodeProJects | 0 | 6617231 | """
stanCode Breakout Project
Adapted from <NAME>'s Breakout by
<NAME>, <NAME>, <NAME>,
and <NAME>.
YOUR DESCRIPTION HERE
"""
from campy.gui.events.timer import pause
from breakoutgraphics import BreakoutGraphics
from campy.graphics.gobjects import GLabel
FRAME_RATE = 1000 / 120 # 120 frames per second
NUM_LIVES = 3 # Number of attempts
def main():
graphics = BreakoutGraphics()
while not graphics.game_start(): # 點擊啟動的開關
pause(FRAME_RATE)
dx, dy = graphics.get_speed() # 速度
death_times = 0
win = 10*graphics.brick_rows*graphics.brick_cols # 全部磚塊打完的分數
# Add animation loop here!
while death_times < NUM_LIVES and graphics.score < win:
# 移動
graphics.ball.move(dx, dy)
# 判斷條件
if graphics.hit_block(): # 打到磚塊
dx, dy = graphics.get_speed()
graphics.score_board.text = f"Score : {graphics.score}"
if graphics.ball.x <= 0 or graphics.ball.x >= graphics.window.width-graphics.ball.width: # 打到左右牆壁
graphics.rebound_x()
dx, dy = graphics.get_speed()
if graphics.hit_paddle(): # 打到paddle
if dy > 0: # 確保球不會卡在paddle中
graphics.rebound_y()
dx, dy = graphics.get_speed()
else:
pass
elif graphics.ball.y <= 0: # 打到最上面
graphics.rebound_y()
dx, dy = graphics.get_speed()
elif graphics.ball.y >= graphics.window.height-graphics.ball.height: # 打到最下面而死亡
graphics.reset_ball_position()
dx, dy = graphics.get_speed()
death_times += 1
pause(FRAME_RATE*120) # 使死亡後能暫停一下再開始
# 執行暫停
pause(FRAME_RATE)
# 印出結束字樣
if death_times == NUM_LIVES:
final_label = GLabel("You Lose! ¯\_(ツ)_/¯")
else:
final_label = GLabel("You Win! ¯\_(ツ)_/¯")
final_label.color = "navy"
final_label.font = "Comic Sans MS-40"
graphics.window.remove(graphics.ball)
graphics.window.add(final_label, (graphics.window.width-final_label.width)/2, (graphics.window.height-final_label.height)/2+60)
if __name__ == '__main__':
main()
| """
stanCode Breakout Project
Adapted from <NAME>'s Breakout by
<NAME>, <NAME>, <NAME>,
and <NAME>.
YOUR DESCRIPTION HERE
"""
from campy.gui.events.timer import pause
from breakoutgraphics import BreakoutGraphics
from campy.graphics.gobjects import GLabel
FRAME_RATE = 1000 / 120 # 120 frames per second
NUM_LIVES = 3 # Number of attempts
def main():
graphics = BreakoutGraphics()
while not graphics.game_start(): # 點擊啟動的開關
pause(FRAME_RATE)
dx, dy = graphics.get_speed() # 速度
death_times = 0
win = 10*graphics.brick_rows*graphics.brick_cols # 全部磚塊打完的分數
# Add animation loop here!
while death_times < NUM_LIVES and graphics.score < win:
# 移動
graphics.ball.move(dx, dy)
# 判斷條件
if graphics.hit_block(): # 打到磚塊
dx, dy = graphics.get_speed()
graphics.score_board.text = f"Score : {graphics.score}"
if graphics.ball.x <= 0 or graphics.ball.x >= graphics.window.width-graphics.ball.width: # 打到左右牆壁
graphics.rebound_x()
dx, dy = graphics.get_speed()
if graphics.hit_paddle(): # 打到paddle
if dy > 0: # 確保球不會卡在paddle中
graphics.rebound_y()
dx, dy = graphics.get_speed()
else:
pass
elif graphics.ball.y <= 0: # 打到最上面
graphics.rebound_y()
dx, dy = graphics.get_speed()
elif graphics.ball.y >= graphics.window.height-graphics.ball.height: # 打到最下面而死亡
graphics.reset_ball_position()
dx, dy = graphics.get_speed()
death_times += 1
pause(FRAME_RATE*120) # 使死亡後能暫停一下再開始
# 執行暫停
pause(FRAME_RATE)
# 印出結束字樣
if death_times == NUM_LIVES:
final_label = GLabel("You Lose! ¯\_(ツ)_/¯")
else:
final_label = GLabel("You Win! ¯\_(ツ)_/¯")
final_label.color = "navy"
final_label.font = "Comic Sans MS-40"
graphics.window.remove(graphics.ball)
graphics.window.add(final_label, (graphics.window.width-final_label.width)/2, (graphics.window.height-final_label.height)/2+60)
if __name__ == '__main__':
main()
| zh | 0.432835 | stanCode Breakout Project Adapted from <NAME>'s Breakout by <NAME>, <NAME>, <NAME>, and <NAME>. YOUR DESCRIPTION HERE # 120 frames per second # Number of attempts # 點擊啟動的開關 # 速度 # 全部磚塊打完的分數 # Add animation loop here! # 移動 # 判斷條件 # 打到磚塊 # 打到左右牆壁 # 打到paddle # 確保球不會卡在paddle中 # 打到最上面 # 打到最下面而死亡 # 使死亡後能暫停一下再開始 # 執行暫停 # 印出結束字樣 | 3.29318 | 3 |
cfdi/utils/export.py | joules457/cfdi-iva-snippet | 0 | 6617232 | <reponame>joules457/cfdi-iva-snippet<filename>cfdi/utils/export.py<gh_stars>0
"""
Export Utils
"""
import os
import csv
def csv_file(dict_list: list, filename: str):
"""write a CSV File with given filename and data.
Parameters
----------
dict_list : list
List of dicts.
filename : str
FIlename.
Returns
-------
dict
Result {status, csv_filename}.
"""
result = {
'status': 0,
'csv_filename': None
}
try:
fieldnames = dict_list[0].keys() if dict_list else []
with open(filename, 'w', newline='') as csvfile:
writer = csv.DictWriter(
csvfile,
fieldnames=fieldnames,
quoting=csv.QUOTE_MINIMAL
)
writer.writeheader()
for row in dict_list:
writer.writerow(row)
result['csv_filename'] = filename
except Exception as err:
result['status'] = 1
result['error'] = err
return result
| """
Export Utils
"""
import os
import csv
def csv_file(dict_list: list, filename: str):
"""write a CSV File with given filename and data.
Parameters
----------
dict_list : list
List of dicts.
filename : str
FIlename.
Returns
-------
dict
Result {status, csv_filename}.
"""
result = {
'status': 0,
'csv_filename': None
}
try:
fieldnames = dict_list[0].keys() if dict_list else []
with open(filename, 'w', newline='') as csvfile:
writer = csv.DictWriter(
csvfile,
fieldnames=fieldnames,
quoting=csv.QUOTE_MINIMAL
)
writer.writeheader()
for row in dict_list:
writer.writerow(row)
result['csv_filename'] = filename
except Exception as err:
result['status'] = 1
result['error'] = err
return result | en | 0.511563 | Export Utils write a CSV File with given filename and data. Parameters ---------- dict_list : list List of dicts. filename : str FIlename. Returns ------- dict Result {status, csv_filename}. | 3.355289 | 3 |
pages/flight_payment_page.py | tomekwszelaki/page-object-pattern-python | 0 | 6617233 | from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support.expected_conditions import element_to_be_clickable
from selenium.webdriver.support.expected_conditions import visibility_of_element_located
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.support.select import Select
from pages.log_in_page import LogInPage
from config import users
import string
import random
class FlightPaymentPage(object):
def __init__(self, driver):
self.driver = driver
self.wait = WebDriverWait(self.driver, 10)
self.log_in_btn = (By.CSS_SELECTOR, 'button[ui-sref="login"]')
self.passenger_rows = (By.CSS_SELECTOR, 'div[passengers-form] .row')
self.passenger_title = (By.CSS_SELECTOR, '.payment-passenger-title select')
self.passenger_first_name = (By.CSS_SELECTOR, '.payment-passenger-first-name input')
self.passenger_last_name = (By.CSS_SELECTOR, '.payment-passenger-last-name input')
self.title_select = None
self.terms_tick = (By.CSS_SELECTOR, '.terms input[name="acceptPolicy"]')
self.pay_now_btn = (By.CSS_SELECTOR, '.cta button')
# card details
self.card_number = (By.CSS_SELECTOR, 'payment-method-card input[name="cardNumber"]')
self.card_type = (By.CSS_SELECTOR, 'payment-method-card select[name="cardType"]')
self.card_expiry_month = (By.CSS_SELECTOR, 'payment-method-card select[name="expiryMonth"]')
self.card_expiry_year = (By.CSS_SELECTOR, 'payment-method-card select[name="expiryYear"]')
self.card_cvv = (By.CSS_SELECTOR, 'payment-method-card input[name="securityCode"]')
self.cardholder = (By.CSS_SELECTOR, 'payment-method-card input[name="cardHolderName"]')
# billing address
self.address_line_1 = (By.CSS_SELECTOR, 'div[name="sa.name"] input[name="sa.nameAddressLine1"]')
self.address_city = (By.CSS_SELECTOR, 'div[name="sa.name"] input[name="sa.nameCity"]')
# error prompt
self.error_section = (By.CSS_SELECTOR, 'prompt[text-title="common.components.payment_forms.error_title"]')
self.error_section_msg = (By.CSS_SELECTOR, 'prompt[text-title="common.components.payment_forms.error_title"] .info-text')
def element(self, locator):
return self.driver.find_element(*locator)
def is_ready(self):
# using errors for branching is an anti pattern, but with selenium I don't think we can do anything else
try:
self.wait.until(element_to_be_clickable(self.log_in_btn))
except NoSuchElementException as e:
# we are logged in, so let's do nothing
pass
else:
self.element(self.log_in_btn).click()
return self.log_in_if_necessary()
def log_in_if_necessary(self):
log_in_page = LogInPage(self.driver)
log_in_page.is_ready()
log_in_page.log_in(users["test_user_1"], remember_me=False)
return self
def is_logged_in(self):
self.wait.until(visibility_of_element_located(self.passenger_first_name))
def _fill_in_older_passegers_details(self, row):
self.surname = "Doe" + ''.join(random.sample(string.ascii_lowercase, 3))
Select(row.find_element(*self.passenger_title)).select_by_visible_text('Mr')
row.find_element(*self.passenger_first_name).clear()
row.find_element(*self.passenger_first_name).send_keys('John')
row.find_element(*self.passenger_last_name).clear()
row.find_element(*self.passenger_last_name).send_keys(self.surname)
# TODO: extend the functionality of this method to deal with children and infant passengers as well
def fill_in_passenger_details(self, passengers):
counter = 0
passengers_rows = self.driver.find_elements(*self.passenger_rows)
for i in range(passengers.get("adults", 1)):
self._fill_in_older_passegers_details(passengers_rows[counter])
counter += 1
for i in range(passengers.get("teens", 0)):
self._fill_in_older_passegers_details(passengers_rows[counter])
counter += 1
return self
def fill_in_payment_details(self, card_number, card_type, expiry="12/2020", cvv="123"):
# Debit/Credit card is selected by default
self.element(self.card_number).clear()
self.element(self.card_number).send_keys(card_number)
Select(self.element(self.card_type)).select_by_visible_text(card_type)
month, year = expiry.split('/')
Select(self.element(self.card_expiry_month)).select_by_visible_text(month)
Select(self.element(self.card_expiry_year)).select_by_visible_text(year)
self.element(self.card_cvv).clear()
self.element(self.card_cvv).send_keys(cvv)
self.element(self.cardholder).clear()
self.element(self.cardholder).send_keys('<NAME>')
self.element(self.address_line_1).clear()
self.element(self.address_line_1).send_keys("21 Sun Lane")
self.element(self.address_city).clear()
self.element(self.address_city).send_keys("Cork")
return self
def submit(self):
self.element(self.terms_tick).click()
self.element(self.pay_now_btn).click()
return self
def is_error_shown(self):
WebDriverWait(self.driver, 20).until(visibility_of_element_located(self.error_section))
return self.element(self.error_section).is_displayed()
| from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support.expected_conditions import element_to_be_clickable
from selenium.webdriver.support.expected_conditions import visibility_of_element_located
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.support.select import Select
from pages.log_in_page import LogInPage
from config import users
import string
import random
class FlightPaymentPage(object):
def __init__(self, driver):
self.driver = driver
self.wait = WebDriverWait(self.driver, 10)
self.log_in_btn = (By.CSS_SELECTOR, 'button[ui-sref="login"]')
self.passenger_rows = (By.CSS_SELECTOR, 'div[passengers-form] .row')
self.passenger_title = (By.CSS_SELECTOR, '.payment-passenger-title select')
self.passenger_first_name = (By.CSS_SELECTOR, '.payment-passenger-first-name input')
self.passenger_last_name = (By.CSS_SELECTOR, '.payment-passenger-last-name input')
self.title_select = None
self.terms_tick = (By.CSS_SELECTOR, '.terms input[name="acceptPolicy"]')
self.pay_now_btn = (By.CSS_SELECTOR, '.cta button')
# card details
self.card_number = (By.CSS_SELECTOR, 'payment-method-card input[name="cardNumber"]')
self.card_type = (By.CSS_SELECTOR, 'payment-method-card select[name="cardType"]')
self.card_expiry_month = (By.CSS_SELECTOR, 'payment-method-card select[name="expiryMonth"]')
self.card_expiry_year = (By.CSS_SELECTOR, 'payment-method-card select[name="expiryYear"]')
self.card_cvv = (By.CSS_SELECTOR, 'payment-method-card input[name="securityCode"]')
self.cardholder = (By.CSS_SELECTOR, 'payment-method-card input[name="cardHolderName"]')
# billing address
self.address_line_1 = (By.CSS_SELECTOR, 'div[name="sa.name"] input[name="sa.nameAddressLine1"]')
self.address_city = (By.CSS_SELECTOR, 'div[name="sa.name"] input[name="sa.nameCity"]')
# error prompt
self.error_section = (By.CSS_SELECTOR, 'prompt[text-title="common.components.payment_forms.error_title"]')
self.error_section_msg = (By.CSS_SELECTOR, 'prompt[text-title="common.components.payment_forms.error_title"] .info-text')
def element(self, locator):
return self.driver.find_element(*locator)
def is_ready(self):
# using errors for branching is an anti pattern, but with selenium I don't think we can do anything else
try:
self.wait.until(element_to_be_clickable(self.log_in_btn))
except NoSuchElementException as e:
# we are logged in, so let's do nothing
pass
else:
self.element(self.log_in_btn).click()
return self.log_in_if_necessary()
def log_in_if_necessary(self):
log_in_page = LogInPage(self.driver)
log_in_page.is_ready()
log_in_page.log_in(users["test_user_1"], remember_me=False)
return self
def is_logged_in(self):
self.wait.until(visibility_of_element_located(self.passenger_first_name))
def _fill_in_older_passegers_details(self, row):
self.surname = "Doe" + ''.join(random.sample(string.ascii_lowercase, 3))
Select(row.find_element(*self.passenger_title)).select_by_visible_text('Mr')
row.find_element(*self.passenger_first_name).clear()
row.find_element(*self.passenger_first_name).send_keys('John')
row.find_element(*self.passenger_last_name).clear()
row.find_element(*self.passenger_last_name).send_keys(self.surname)
# TODO: extend the functionality of this method to deal with children and infant passengers as well
def fill_in_passenger_details(self, passengers):
counter = 0
passengers_rows = self.driver.find_elements(*self.passenger_rows)
for i in range(passengers.get("adults", 1)):
self._fill_in_older_passegers_details(passengers_rows[counter])
counter += 1
for i in range(passengers.get("teens", 0)):
self._fill_in_older_passegers_details(passengers_rows[counter])
counter += 1
return self
def fill_in_payment_details(self, card_number, card_type, expiry="12/2020", cvv="123"):
# Debit/Credit card is selected by default
self.element(self.card_number).clear()
self.element(self.card_number).send_keys(card_number)
Select(self.element(self.card_type)).select_by_visible_text(card_type)
month, year = expiry.split('/')
Select(self.element(self.card_expiry_month)).select_by_visible_text(month)
Select(self.element(self.card_expiry_year)).select_by_visible_text(year)
self.element(self.card_cvv).clear()
self.element(self.card_cvv).send_keys(cvv)
self.element(self.cardholder).clear()
self.element(self.cardholder).send_keys('<NAME>')
self.element(self.address_line_1).clear()
self.element(self.address_line_1).send_keys("21 Sun Lane")
self.element(self.address_city).clear()
self.element(self.address_city).send_keys("Cork")
return self
def submit(self):
self.element(self.terms_tick).click()
self.element(self.pay_now_btn).click()
return self
def is_error_shown(self):
WebDriverWait(self.driver, 20).until(visibility_of_element_located(self.error_section))
return self.element(self.error_section).is_displayed()
| en | 0.940393 | # card details # billing address # error prompt # using errors for branching is an anti pattern, but with selenium I don't think we can do anything else # we are logged in, so let's do nothing # TODO: extend the functionality of this method to deal with children and infant passengers as well # Debit/Credit card is selected by default | 2.840873 | 3 |
pydynamo_brain/pydynamo_brain/analysis/functions/puncta.py | ubcbraincircuits/pyDynamo | 4 | 6617234 | <reponame>ubcbraincircuits/pyDynamo
import math
import numpy as np
import pandas as pd
from typing import Any, Dict, List
from tqdm import tqdm
import pydynamo_brain.util as util
from pydynamo_brain.model import FullState, Point
_IMG_CACHE = util.ImageCache()
# Given r, return pi.r^2, the area of a circle
def _radiusToArea(r: float) -> float:
return math.pi * r * r
def _averageIntensity(point: Point, image: np.ndarray, channel: int) -> float:
x, y, z = point.location
zAt = int(round(z))
plane = image[channel][zAt]
r = point.radius
if r is None:
r = point.radiusFromAncestors()
# HACK - find a better way to do this?
intensitySum = 0.0
intensityCount = 0
for r in range(plane.shape[0]):
for c in range(plane.shape[1]):
d = util.deltaSz((c + 0.5, r + 0.5, 0), (x, y, 0))
if d <= r:
intensitySum += 1.0 * plane[r, c]
intensityCount += 1
if intensityCount == 0:
return np.nan
return intensitySum / intensityCount / 255.0
# Provide the size of each individual puncta across time.
def perPunctaSize(fullState: FullState, punctaIDs: List[str], **kwargs: Any) -> pd.DataFrame:
idToIndex = {}
for idx, id in enumerate(punctaIDs):
idToIndex[id] = idx
sizes = np.zeros((len(punctaIDs), len(fullState.puncta)))
for idx, punctaList in enumerate(fullState.puncta):
for puncta in punctaList:
radius = puncta.radius
if radius is None:
radius = puncta.radiusFromAncestors()
pID = puncta.id
if pID in idToIndex:
sizes[idToIndex[pID], idx] = _radiusToArea(radius)
colNames = [('area_%02d' % (i + 1)) for i in range(len(fullState.puncta))]
return pd.DataFrame(data=sizes, index=punctaIDs, columns=colNames)
# Provide the average intensity of puncta across time
def perPunctaIntensity(
fullState: FullState, punctaIDs: List[str], **kwargs: Any
) -> pd.DataFrame:
print ("Pre-loading images...")
for imgPath in tqdm(fullState.filePaths):
_IMG_CACHE.getVolume(imgPath, verbose=False)
print ("Loaded")
channel = 0
if 'channel' in kwargs:
channel = kwargs['channel']
idToIndex = {}
for idx, id in enumerate(punctaIDs):
idToIndex[id] = idx
intensities = np.zeros((len(punctaIDs), len(fullState.puncta)))
for idx, punctaList in enumerate(fullState.puncta):
assert idx < len(fullState.filePaths)
img = _IMG_CACHE.getVolume(fullState.filePaths[idx], verbose=False)
for puncta in punctaList:
pID = puncta.id
if pID in idToIndex:
intensities[idToIndex[pID], idx] = _averageIntensity(puncta, img, channel)
colNames = [('intensity_%02d' % (i + 1)) for i in range(len(fullState.puncta))]
return pd.DataFrame(data=intensities, index=punctaIDs, columns=colNames)
| import math
import numpy as np
import pandas as pd
from typing import Any, Dict, List
from tqdm import tqdm
import pydynamo_brain.util as util
from pydynamo_brain.model import FullState, Point
_IMG_CACHE = util.ImageCache()
# Given r, return pi.r^2, the area of a circle
def _radiusToArea(r: float) -> float:
return math.pi * r * r
def _averageIntensity(point: Point, image: np.ndarray, channel: int) -> float:
x, y, z = point.location
zAt = int(round(z))
plane = image[channel][zAt]
r = point.radius
if r is None:
r = point.radiusFromAncestors()
# HACK - find a better way to do this?
intensitySum = 0.0
intensityCount = 0
for r in range(plane.shape[0]):
for c in range(plane.shape[1]):
d = util.deltaSz((c + 0.5, r + 0.5, 0), (x, y, 0))
if d <= r:
intensitySum += 1.0 * plane[r, c]
intensityCount += 1
if intensityCount == 0:
return np.nan
return intensitySum / intensityCount / 255.0
# Provide the size of each individual puncta across time.
def perPunctaSize(fullState: FullState, punctaIDs: List[str], **kwargs: Any) -> pd.DataFrame:
idToIndex = {}
for idx, id in enumerate(punctaIDs):
idToIndex[id] = idx
sizes = np.zeros((len(punctaIDs), len(fullState.puncta)))
for idx, punctaList in enumerate(fullState.puncta):
for puncta in punctaList:
radius = puncta.radius
if radius is None:
radius = puncta.radiusFromAncestors()
pID = puncta.id
if pID in idToIndex:
sizes[idToIndex[pID], idx] = _radiusToArea(radius)
colNames = [('area_%02d' % (i + 1)) for i in range(len(fullState.puncta))]
return pd.DataFrame(data=sizes, index=punctaIDs, columns=colNames)
# Provide the average intensity of puncta across time
def perPunctaIntensity(
fullState: FullState, punctaIDs: List[str], **kwargs: Any
) -> pd.DataFrame:
print ("Pre-loading images...")
for imgPath in tqdm(fullState.filePaths):
_IMG_CACHE.getVolume(imgPath, verbose=False)
print ("Loaded")
channel = 0
if 'channel' in kwargs:
channel = kwargs['channel']
idToIndex = {}
for idx, id in enumerate(punctaIDs):
idToIndex[id] = idx
intensities = np.zeros((len(punctaIDs), len(fullState.puncta)))
for idx, punctaList in enumerate(fullState.puncta):
assert idx < len(fullState.filePaths)
img = _IMG_CACHE.getVolume(fullState.filePaths[idx], verbose=False)
for puncta in punctaList:
pID = puncta.id
if pID in idToIndex:
intensities[idToIndex[pID], idx] = _averageIntensity(puncta, img, channel)
colNames = [('intensity_%02d' % (i + 1)) for i in range(len(fullState.puncta))]
return pd.DataFrame(data=intensities, index=punctaIDs, columns=colNames) | en | 0.779863 | # Given r, return pi.r^2, the area of a circle # HACK - find a better way to do this? # Provide the size of each individual puncta across time. # Provide the average intensity of puncta across time | 2.215479 | 2 |
hexrd/ui/material_properties_editor.py | cjh1/hexrdgui | 0 | 6617235 | <reponame>cjh1/hexrdgui
import copy
import numpy as np
from hexrd.unitcell import _StiffnessDict
from hexrd.ui.hexrd_config import HexrdConfig
from hexrd.ui.matrix_editor import MatrixEditor
from hexrd.ui.ui_loader import UiLoader
from hexrd.ui.utils import compose
class MaterialPropertiesEditor:
stiffness_tensor_shape = (6, 6)
def __init__(self, parent=None):
loader = UiLoader()
self.ui = loader.load_file('material_properties_editor.ui', parent)
self.null_tensor = np.zeros(self.stiffness_tensor_shape)
self.stiffness_tensor_editor = MatrixEditor(self.null_tensor, self.ui)
self.ui.stiffness_tensor_editor_layout.addWidget(
self.stiffness_tensor_editor)
self.setup_connections()
self.update_gui()
def setup_connections(self):
self.stiffness_tensor_editor.data_modified.connect(
self.stiffness_tensor_edited)
@property
def material(self):
return HexrdConfig().active_material
def update_gui(self):
material = self.material
if hasattr(material.unitcell, 'stiffness'):
data = copy.deepcopy(material.unitcell.stiffness)
else:
# Just use zeros...
data = np.zeros(self.stiffness_tensor_shape)
enabled, constraints = _StiffnessDict[material.unitcell._laueGroup]
constraints_func = compose(apply_symmetric_constraint, constraints)
editor = self.stiffness_tensor_editor
editor.enabled_elements = enabled
editor.apply_constraints_func = constraints_func
editor.data = data
def stiffness_tensor_edited(self):
material = self.material
material.unitcell.stiffness = copy.deepcopy(
self.stiffness_tensor_editor.data)
def apply_symmetric_constraint(x):
# Copy values from upper triangle to lower triangle.
# Only works for square matrices.
for i in range(x.shape[0]):
for j in range(i):
x[i, j] = x[j, i]
return x
| import copy
import numpy as np
from hexrd.unitcell import _StiffnessDict
from hexrd.ui.hexrd_config import HexrdConfig
from hexrd.ui.matrix_editor import MatrixEditor
from hexrd.ui.ui_loader import UiLoader
from hexrd.ui.utils import compose
class MaterialPropertiesEditor:
stiffness_tensor_shape = (6, 6)
def __init__(self, parent=None):
loader = UiLoader()
self.ui = loader.load_file('material_properties_editor.ui', parent)
self.null_tensor = np.zeros(self.stiffness_tensor_shape)
self.stiffness_tensor_editor = MatrixEditor(self.null_tensor, self.ui)
self.ui.stiffness_tensor_editor_layout.addWidget(
self.stiffness_tensor_editor)
self.setup_connections()
self.update_gui()
def setup_connections(self):
self.stiffness_tensor_editor.data_modified.connect(
self.stiffness_tensor_edited)
@property
def material(self):
return HexrdConfig().active_material
def update_gui(self):
material = self.material
if hasattr(material.unitcell, 'stiffness'):
data = copy.deepcopy(material.unitcell.stiffness)
else:
# Just use zeros...
data = np.zeros(self.stiffness_tensor_shape)
enabled, constraints = _StiffnessDict[material.unitcell._laueGroup]
constraints_func = compose(apply_symmetric_constraint, constraints)
editor = self.stiffness_tensor_editor
editor.enabled_elements = enabled
editor.apply_constraints_func = constraints_func
editor.data = data
def stiffness_tensor_edited(self):
material = self.material
material.unitcell.stiffness = copy.deepcopy(
self.stiffness_tensor_editor.data)
def apply_symmetric_constraint(x):
# Copy values from upper triangle to lower triangle.
# Only works for square matrices.
for i in range(x.shape[0]):
for j in range(i):
x[i, j] = x[j, i]
return x | en | 0.625553 | # Just use zeros... # Copy values from upper triangle to lower triangle. # Only works for square matrices. | 2.162269 | 2 |
thenewboston_node/business_logic/tests/test_memory_blockchain/test_validations/test_blockchain_state_validations.py | nishp77/thenewboston-node | 30 | 6617236 | <gh_stars>10-100
import pytest
from thenewboston_node.business_logic.blockchain.memory_blockchain import MemoryBlockchain
from thenewboston_node.business_logic.exceptions import ValidationError
from thenewboston_node.core.utils.types import hexstr
def test_validate_blockchain_state_raises(memory_blockchain: MemoryBlockchain):
blockchain = memory_blockchain
assert blockchain.blockchain_states
for balance in blockchain.blockchain_states[0].account_states.values():
balance.balance_lock = hexstr()
with pytest.raises(ValidationError, match='Account state balance_lock must be not empty'):
blockchain.validate_blockchain_states()
blockchain.blockchain_states = []
with pytest.raises(ValidationError, match='Blockchain must contain at least one blockchain state'):
blockchain.validate_blockchain_states()
| import pytest
from thenewboston_node.business_logic.blockchain.memory_blockchain import MemoryBlockchain
from thenewboston_node.business_logic.exceptions import ValidationError
from thenewboston_node.core.utils.types import hexstr
def test_validate_blockchain_state_raises(memory_blockchain: MemoryBlockchain):
blockchain = memory_blockchain
assert blockchain.blockchain_states
for balance in blockchain.blockchain_states[0].account_states.values():
balance.balance_lock = hexstr()
with pytest.raises(ValidationError, match='Account state balance_lock must be not empty'):
blockchain.validate_blockchain_states()
blockchain.blockchain_states = []
with pytest.raises(ValidationError, match='Blockchain must contain at least one blockchain state'):
blockchain.validate_blockchain_states() | none | 1 | 2.466452 | 2 | |
auth/Editor.py | fmdallas/myTornadoWebApp | 0 | 6617237 | <reponame>fmdallas/myTornadoWebApp
__author__ = 'helloworld'
import Authenticator
import Authorizor
# set up a test user and permission
Authenticator.authenticator.add_user("joe","<PASSWORD>")
Authorizor.authorizor.add_permission("test program")
Authorizor.authorizor.add_permission("change program")
Authorizor.authorizor.permit_user("test program","joe")
class Editor:
def __init__(self):
self.username = None
self.menu_map = {
"login": self.login,
"test":self.test,
"change":self.change,
"quit":self.quit
}
def login(self):
logged_in = False
while not logged_in:
username = input("username: ")
password = input("password: ")
try:
logged_in = Authenticator.authenticator.login(username,password)
except Authenticator.InvalidUsername:
print "Sorry, that username does not exist"
except Authenticator.InvalidPassword:
print "Sorry, incorrect password"
else:
self.username = username
def is_permitted(self, permission):
try:
Authorizor.authorizor.check_permission(permission,self.username)
except Authorizor.NotLoginError as e:
print "{} is not logged in ".format(e.username)
return False
except Authorizor.NotPermittedError as e:
print "{} cannot {}".format(e.username,permission)
return False
else:
return True
def test(self):
if self.is_permitted("test program"):
print "testing program now..."
def change(self):
if self.is_permitted("change program"):
print "changing program now..."
def quit(self):
raise SystemExit()
def menu(self):
try:
answer = ""
while True:
print """Please enter a command:
\tlogin\tLogin
\ttest\tTest the program
\tchange\tChange the program
\tquit\tQuit
"""
answer = input("enter a command: ").lower()
try:
func = self.menu_map[answer]
except KeyError:
print "{} is not a valid option".format(answer)
else:
func()
finally:
print "Thank you for testing the auth module"
Editor().menu()
| __author__ = 'helloworld'
import Authenticator
import Authorizor
# set up a test user and permission
Authenticator.authenticator.add_user("joe","<PASSWORD>")
Authorizor.authorizor.add_permission("test program")
Authorizor.authorizor.add_permission("change program")
Authorizor.authorizor.permit_user("test program","joe")
class Editor:
def __init__(self):
self.username = None
self.menu_map = {
"login": self.login,
"test":self.test,
"change":self.change,
"quit":self.quit
}
def login(self):
logged_in = False
while not logged_in:
username = input("username: ")
password = input("password: ")
try:
logged_in = Authenticator.authenticator.login(username,password)
except Authenticator.InvalidUsername:
print "Sorry, that username does not exist"
except Authenticator.InvalidPassword:
print "Sorry, incorrect password"
else:
self.username = username
def is_permitted(self, permission):
try:
Authorizor.authorizor.check_permission(permission,self.username)
except Authorizor.NotLoginError as e:
print "{} is not logged in ".format(e.username)
return False
except Authorizor.NotPermittedError as e:
print "{} cannot {}".format(e.username,permission)
return False
else:
return True
def test(self):
if self.is_permitted("test program"):
print "testing program now..."
def change(self):
if self.is_permitted("change program"):
print "changing program now..."
def quit(self):
raise SystemExit()
def menu(self):
try:
answer = ""
while True:
print """Please enter a command:
\tlogin\tLogin
\ttest\tTest the program
\tchange\tChange the program
\tquit\tQuit
"""
answer = input("enter a command: ").lower()
try:
func = self.menu_map[answer]
except KeyError:
print "{} is not a valid option".format(answer)
else:
func()
finally:
print "Thank you for testing the auth module"
Editor().menu() | en | 0.559174 | # set up a test user and permission Please enter a command: \tlogin\tLogin \ttest\tTest the program \tchange\tChange the program \tquit\tQuit | 3.459337 | 3 |
split_dataset.py | nico-buehler/UrsoNet | 31 | 6617238 | import argparse
import pandas as pd
import random
import glob
import os
import numpy as np
# Parse command line arguments
parser = argparse.ArgumentParser(description='Split dataset.')
parser.add_argument('--dataset_dir',
required=True,
metavar='dir/to/dataset',
help='Relative path to dataset dir')
parser.add_argument('--test_percentage',
type=int,
default=10,
help='Percentage of images used as a test set')
parser.add_argument('--val_percentage',
type=int,
default=10,
help='Percentage of images used as a validation set')
args = parser.parse_args()
rgb_list = glob.glob(os.path.join(args.dataset_dir, '*rgb.png'))
nr_images = len(rgb_list)
poses = pd.read_csv(os.path.join(args.dataset_dir, 'gt.csv'))
assert nr_images == len(poses)
# Create random list for shuffling
shuffle_ids = np.arange(nr_images)
random.shuffle(shuffle_ids)
nr_testing_images = int(nr_images*args.test_percentage*0.01+0.5)
nr_nontraining_images = int(nr_images*(args.test_percentage+args.val_percentage)*0.01+0.5)
# Split poses according to shuffle
poses_test = poses.loc[shuffle_ids[0:nr_testing_images]]
poses_val = poses.loc[shuffle_ids[nr_testing_images:nr_nontraining_images]]
poses_train = poses.loc[shuffle_ids[nr_nontraining_images:nr_images]]
poses_test.to_csv(os.path.join(args.dataset_dir,'test_poses_gt.csv'), index=False)
poses_val.to_csv(os.path.join(args.dataset_dir,'val_poses_gt.csv'), index=False)
poses_train.to_csv(os.path.join(args.dataset_dir,'train_poses_gt.csv'), index=False)
# Split images according to shuffle
training_img_list = []
testing_img_list = []
val_img_list = []
for i in range(nr_images):
img_name = str(shuffle_ids[i]) + "_rgb.png"
if i<nr_testing_images:
testing_img_list.append(img_name)
elif i<nr_nontraining_images:
val_img_list.append(img_name)
else:
training_img_list.append(img_name)
# Writing img lists
with open(os.path.join(args.dataset_dir,'test_images.csv'), 'w') as f:
for img_name in testing_img_list:
f.write(img_name)
f.write('\n')
with open(os.path.join(args.dataset_dir,'train_images.csv'), 'w') as f:
for img_name in training_img_list:
f.write(img_name)
f.write('\n')
with open(os.path.join(args.dataset_dir,'val_images.csv'), 'w') as f:
for img_name in val_img_list:
f.write(img_name)
f.write('\n')
| import argparse
import pandas as pd
import random
import glob
import os
import numpy as np
# Parse command line arguments
parser = argparse.ArgumentParser(description='Split dataset.')
parser.add_argument('--dataset_dir',
required=True,
metavar='dir/to/dataset',
help='Relative path to dataset dir')
parser.add_argument('--test_percentage',
type=int,
default=10,
help='Percentage of images used as a test set')
parser.add_argument('--val_percentage',
type=int,
default=10,
help='Percentage of images used as a validation set')
args = parser.parse_args()
rgb_list = glob.glob(os.path.join(args.dataset_dir, '*rgb.png'))
nr_images = len(rgb_list)
poses = pd.read_csv(os.path.join(args.dataset_dir, 'gt.csv'))
assert nr_images == len(poses)
# Create random list for shuffling
shuffle_ids = np.arange(nr_images)
random.shuffle(shuffle_ids)
nr_testing_images = int(nr_images*args.test_percentage*0.01+0.5)
nr_nontraining_images = int(nr_images*(args.test_percentage+args.val_percentage)*0.01+0.5)
# Split poses according to shuffle
poses_test = poses.loc[shuffle_ids[0:nr_testing_images]]
poses_val = poses.loc[shuffle_ids[nr_testing_images:nr_nontraining_images]]
poses_train = poses.loc[shuffle_ids[nr_nontraining_images:nr_images]]
poses_test.to_csv(os.path.join(args.dataset_dir,'test_poses_gt.csv'), index=False)
poses_val.to_csv(os.path.join(args.dataset_dir,'val_poses_gt.csv'), index=False)
poses_train.to_csv(os.path.join(args.dataset_dir,'train_poses_gt.csv'), index=False)
# Split images according to shuffle
training_img_list = []
testing_img_list = []
val_img_list = []
for i in range(nr_images):
img_name = str(shuffle_ids[i]) + "_rgb.png"
if i<nr_testing_images:
testing_img_list.append(img_name)
elif i<nr_nontraining_images:
val_img_list.append(img_name)
else:
training_img_list.append(img_name)
# Writing img lists
with open(os.path.join(args.dataset_dir,'test_images.csv'), 'w') as f:
for img_name in testing_img_list:
f.write(img_name)
f.write('\n')
with open(os.path.join(args.dataset_dir,'train_images.csv'), 'w') as f:
for img_name in training_img_list:
f.write(img_name)
f.write('\n')
with open(os.path.join(args.dataset_dir,'val_images.csv'), 'w') as f:
for img_name in val_img_list:
f.write(img_name)
f.write('\n')
| en | 0.701978 | # Parse command line arguments # Create random list for shuffling # Split poses according to shuffle # Split images according to shuffle # Writing img lists | 2.497843 | 2 |
util/plot.py | margitantal68/sapimouse | 2 | 6617239 | import seaborn as sns
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from util.utils import create_userids, print_list
from util.normalization import normalize_rows
from sklearn import metrics
import util.settings as st
import warnings
from sklearn.preprocessing import LabelEncoder
warnings.filterwarnings("ignore")
def plot_scores(positive_scores, negative_scores, filename='scores.png', title='Score distribution'):
set_style()
plt.clf()
df = pd.DataFrame([positive_scores, negative_scores])
BINS = np.linspace(df.min(), df.max(), 31)
sns.distplot(positive_scores, norm_hist=True, color='green', bins=31)
sns.distplot(negative_scores, norm_hist=True, color='red', bins=31)
# plt.legend(loc='upper left')
plt.legend(['Genuine', 'Impostor'], loc='best')
plt.xlabel('Score')
plt.title(title)
plt.show()
# plt.savefig(filename + '.png')
def set_style():
# This sets reasonable defaults for font size for
# a figure that will go in a paper
sns.set_context("paper", font_scale = 2)
# Set the font to be serif, rather than sans
sns.set(font='serif')
# Make the background white, and specify the
# specific font family
sns.set_style("white", {
"font.family": "serif",
"font.serif": ["Times", "Palatino", "serif"]
})
sns.set_style("ticks")
sns.set_style("whitegrid")
def plot_ROC_single(ee_file, title = 'ROC curve'):
set_style()
ee_data = pd.read_csv(ee_file)
auc_ee = metrics.auc(ee_data['FPR'], ee_data['TPR'])
plt.clf()
plt.title(title)
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive rate')
plt.plot(ee_data['FPR'], ee_data['TPR'], '-', label = 'AUC_EE = %0.2f' % auc_ee)
label_ee = 'AUC = %0.2f' % auc_ee
legend_str = [label_ee]
plt.legend(legend_str)
plt.show()
# create a boxplot from a dataframe
#
def csv2boxplot(df, columns, title, ylabel, outputfilename):
myFig = plt.figure()
res = df.boxplot(column=columns, return_type='axes')
plt.title(title)
plt.xlabel('Type of features')
plt.ylabel(ylabel)
myFig.savefig('output_png/boxplot_sapimouse.png', format = 'png')
myFig.savefig(outputfilename + '.png', format='png')
# myFig.savefig(outputfilename + '.eps', format='eps')
# plt.show(res)
def plot_ROC_filelist(filelist, title = 'ROC curve', outputfilename='roc.png'):
set_style()
plt.clf()
counter = 1
labels = []
for file in filelist:
data = pd.read_csv(file)
auc = metrics.auc(data['FPR'], data['TPR'])
# label = 'blocks: %2d' % counter
label = 'AUC #blocks:%2d = %0.2f' % (counter, auc)
plt.plot(data['FPR'], data['TPR'], label = 'AUC %2d blocks = %0.2f' % (counter, auc))
labels.append( label )
counter = counter + 1
plt.title(title)
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive rate')
plt.legend(labels)
plt.show()
# plt.savefig(outputfilename + '.png', format='png')
# plt.savefig(outputfilename + '.eps', format='eps') | import seaborn as sns
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from util.utils import create_userids, print_list
from util.normalization import normalize_rows
from sklearn import metrics
import util.settings as st
import warnings
from sklearn.preprocessing import LabelEncoder
warnings.filterwarnings("ignore")
def plot_scores(positive_scores, negative_scores, filename='scores.png', title='Score distribution'):
set_style()
plt.clf()
df = pd.DataFrame([positive_scores, negative_scores])
BINS = np.linspace(df.min(), df.max(), 31)
sns.distplot(positive_scores, norm_hist=True, color='green', bins=31)
sns.distplot(negative_scores, norm_hist=True, color='red', bins=31)
# plt.legend(loc='upper left')
plt.legend(['Genuine', 'Impostor'], loc='best')
plt.xlabel('Score')
plt.title(title)
plt.show()
# plt.savefig(filename + '.png')
def set_style():
# This sets reasonable defaults for font size for
# a figure that will go in a paper
sns.set_context("paper", font_scale = 2)
# Set the font to be serif, rather than sans
sns.set(font='serif')
# Make the background white, and specify the
# specific font family
sns.set_style("white", {
"font.family": "serif",
"font.serif": ["Times", "Palatino", "serif"]
})
sns.set_style("ticks")
sns.set_style("whitegrid")
def plot_ROC_single(ee_file, title = 'ROC curve'):
set_style()
ee_data = pd.read_csv(ee_file)
auc_ee = metrics.auc(ee_data['FPR'], ee_data['TPR'])
plt.clf()
plt.title(title)
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive rate')
plt.plot(ee_data['FPR'], ee_data['TPR'], '-', label = 'AUC_EE = %0.2f' % auc_ee)
label_ee = 'AUC = %0.2f' % auc_ee
legend_str = [label_ee]
plt.legend(legend_str)
plt.show()
# create a boxplot from a dataframe
#
def csv2boxplot(df, columns, title, ylabel, outputfilename):
myFig = plt.figure()
res = df.boxplot(column=columns, return_type='axes')
plt.title(title)
plt.xlabel('Type of features')
plt.ylabel(ylabel)
myFig.savefig('output_png/boxplot_sapimouse.png', format = 'png')
myFig.savefig(outputfilename + '.png', format='png')
# myFig.savefig(outputfilename + '.eps', format='eps')
# plt.show(res)
def plot_ROC_filelist(filelist, title = 'ROC curve', outputfilename='roc.png'):
set_style()
plt.clf()
counter = 1
labels = []
for file in filelist:
data = pd.read_csv(file)
auc = metrics.auc(data['FPR'], data['TPR'])
# label = 'blocks: %2d' % counter
label = 'AUC #blocks:%2d = %0.2f' % (counter, auc)
plt.plot(data['FPR'], data['TPR'], label = 'AUC %2d blocks = %0.2f' % (counter, auc))
labels.append( label )
counter = counter + 1
plt.title(title)
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive rate')
plt.legend(labels)
plt.show()
# plt.savefig(outputfilename + '.png', format='png')
# plt.savefig(outputfilename + '.eps', format='eps') | en | 0.477408 | # plt.legend(loc='upper left') # plt.savefig(filename + '.png') # This sets reasonable defaults for font size for # a figure that will go in a paper # Set the font to be serif, rather than sans # Make the background white, and specify the # specific font family # create a boxplot from a dataframe # # myFig.savefig(outputfilename + '.eps', format='eps') # plt.show(res) # label = 'blocks: %2d' % counter #blocks:%2d = %0.2f' % (counter, auc) # plt.savefig(outputfilename + '.png', format='png') # plt.savefig(outputfilename + '.eps', format='eps') | 2.799357 | 3 |
programs/plotdi_e.py | yamamon75/PmagPy | 2 | 6617240 | #!/usr/bin/env python
import sys
import matplotlib
if matplotlib.get_backend() != "TKAgg":
matplotlib.use("TKAgg")
import pmagpy.pmag as pmag
import pmagpy.pmagplotlib as pmagplotlib
def main():
"""
NAME
plotdi_e.py
DESCRIPTION
plots equal area projection from dec inc data and cones of confidence
(Fisher, kent or Bingham or bootstrap).
INPUT FORMAT
takes dec/inc as first two columns in space delimited file
SYNTAX
plotdi_e.py [command line options]
OPTIONS
-h prints help message and quits
-i for interactive parameter entry
-f FILE, sets input filename on command line
-Fish plots unit vector mean direction, alpha95
-Bing plots Principal direction, Bingham confidence ellipse
-Kent plots unit vector mean direction, confidence ellipse
-Boot E plots unit vector mean direction, bootstrapped confidence ellipse
-Boot V plots unit vector mean direction, distribution of bootstrapped means
"""
dist='F' # default distribution is Fisherian
mode=1
title=""
EQ={'eq':1}
if len(sys.argv) > 0:
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-i' in sys.argv: # ask for filename
file=input("Enter file name with dec, inc data: ")
dist=input("Enter desired distrubution: [Fish]er, [Bing]ham, [Kent] [Boot] [default is Fisher]: ")
if dist=="":dist="F"
if dist=="Bing":dist="B"
if dist=="Kent":dist="K"
if dist=="Boot":
type=input(" Ellipses or distribution of vectors? [E]/V ")
if type=="" or type=="E":
dist="BE"
else:
dist="BE"
else:
#
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
else:
print('you must specify a file name')
print(main.__doc__)
sys.exit()
if '-Bing' in sys.argv:dist='B'
if '-Kent' in sys.argv:dist='K'
if '-Boot' in sys.argv:
ind=sys.argv.index('-Boot')
type=sys.argv[ind+1]
if type=='E':
dist='BE'
elif type=='V':
dist='BV'
EQ['bdirs']=2
pmagplotlib.plot_init(EQ['bdirs'],5,5)
else:
print(main.__doc__)
sys.exit()
pmagplotlib.plot_init(EQ['eq'],5,5)
#
# get to work
f=open(file,'r')
data=f.readlines()
#
DIs= [] # set up list for dec inc data
DiRecs=[]
pars=[]
nDIs,rDIs,npars,rpars=[],[],[],[]
mode =1
for line in data: # read in the data from standard input
DiRec={}
rec=line.split() # split each line on space to get records
DIs.append((float(rec[0]),float(rec[1]),1.))
DiRec['dec']=rec[0]
DiRec['inc']=rec[1]
DiRec['direction_type']='l'
DiRecs.append(DiRec)
# split into two modes
ppars=pmag.doprinc(DIs) # get principal directions
for rec in DIs:
angle=pmag.angle([rec[0],rec[1]],[ppars['dec'],ppars['inc']])
if angle>90.:
rDIs.append(rec)
else:
nDIs.append(rec)
if dist=='B': # do on whole dataset
title="Bingham confidence ellipse"
bpars=pmag.dobingham(DIs)
for key in list(bpars.keys()):
if key!='n':print(" ",key, '%7.1f'%(bpars[key]))
if key=='n':print(" ",key, ' %i'%(bpars[key]))
npars.append(bpars['dec'])
npars.append(bpars['inc'])
npars.append(bpars['Zeta'])
npars.append(bpars['Zdec'])
npars.append(bpars['Zinc'])
npars.append(bpars['Eta'])
npars.append(bpars['Edec'])
npars.append(bpars['Einc'])
if dist=='F':
title="Fisher confidence cone"
if len(nDIs)>3:
fpars=pmag.fisher_mean(nDIs)
print("mode ",mode)
for key in list(fpars.keys()):
if key!='n':print(" ",key, '%7.1f'%(fpars[key]))
if key=='n':print(" ",key, ' %i'%(fpars[key]))
mode+=1
npars.append(fpars['dec'])
npars.append(fpars['inc'])
npars.append(fpars['alpha95']) # Beta
npars.append(fpars['dec'])
isign=abs(fpars['inc']) / fpars['inc']
npars.append(fpars['inc']-isign*90.) #Beta inc
npars.append(fpars['alpha95']) # gamma
npars.append(fpars['dec']+90.) # Beta dec
npars.append(0.) #Beta inc
if len(rDIs)>3:
fpars=pmag.fisher_mean(rDIs)
print("mode ",mode)
for key in list(fpars.keys()):
if key!='n':print(" ",key, '%7.1f'%(fpars[key]))
if key=='n':print(" ",key, ' %i'%(fpars[key]))
mode+=1
rpars.append(fpars['dec'])
rpars.append(fpars['inc'])
rpars.append(fpars['alpha95']) # Beta
rpars.append(fpars['dec'])
isign=abs(fpars['inc']) / fpars['inc']
rpars.append(fpars['inc']-isign*90.) #Beta inc
rpars.append(fpars['alpha95']) # gamma
rpars.append(fpars['dec']+90.) # Beta dec
rpars.append(0.) #Beta inc
if dist=='K':
title="Kent confidence ellipse"
if len(nDIs)>3:
kpars=pmag.dokent(nDIs,len(nDIs))
print("mode ",mode)
for key in list(kpars.keys()):
if key!='n':print(" ",key, '%7.1f'%(kpars[key]))
if key=='n':print(" ",key, ' %i'%(kpars[key]))
mode+=1
npars.append(kpars['dec'])
npars.append(kpars['inc'])
npars.append(kpars['Zeta'])
npars.append(kpars['Zdec'])
npars.append(kpars['Zinc'])
npars.append(kpars['Eta'])
npars.append(kpars['Edec'])
npars.append(kpars['Einc'])
if len(rDIs)>3:
kpars=pmag.dokent(rDIs,len(rDIs))
print("mode ",mode)
for key in list(kpars.keys()):
if key!='n':print(" ",key, '%7.1f'%(kpars[key]))
if key=='n':print(" ",key, ' %i'%(kpars[key]))
mode+=1
rpars.append(kpars['dec'])
rpars.append(kpars['inc'])
rpars.append(kpars['Zeta'])
rpars.append(kpars['Zdec'])
rpars.append(kpars['Zinc'])
rpars.append(kpars['Eta'])
rpars.append(kpars['Edec'])
rpars.append(kpars['Einc'])
else: # assume bootstrap
if dist=='BE':
if len(nDIs)>5:
BnDIs=pmag.di_boot(nDIs)
Bkpars=pmag.dokent(BnDIs,1.)
print("mode ",mode)
for key in list(Bkpars.keys()):
if key!='n':print(" ",key, '%7.1f'%(Bkpars[key]))
if key=='n':print(" ",key, ' %i'%(Bkpars[key]))
mode+=1
npars.append(Bkpars['dec'])
npars.append(Bkpars['inc'])
npars.append(Bkpars['Zeta'])
npars.append(Bkpars['Zdec'])
npars.append(Bkpars['Zinc'])
npars.append(Bkpars['Eta'])
npars.append(Bkpars['Edec'])
npars.append(Bkpars['Einc'])
if len(rDIs)>5:
BrDIs=pmag.di_boot(rDIs)
Bkpars=pmag.dokent(BrDIs,1.)
print("mode ",mode)
for key in list(Bkpars.keys()):
if key!='n':print(" ",key, '%7.1f'%(Bkpars[key]))
if key=='n':print(" ",key, ' %i'%(Bkpars[key]))
mode+=1
rpars.append(Bkpars['dec'])
rpars.append(Bkpars['inc'])
rpars.append(Bkpars['Zeta'])
rpars.append(Bkpars['Zdec'])
rpars.append(Bkpars['Zinc'])
rpars.append(Bkpars['Eta'])
rpars.append(Bkpars['Edec'])
rpars.append(Bkpars['Einc'])
title="Bootstrapped confidence ellipse"
elif dist=='BV':
if len(nDIs)>5:
pmagplotlib.plot_eq(EQ['eq'],nDIs,'Data')
BnDIs=pmag.di_boot(nDIs)
pmagplotlib.plot_eq(EQ['bdirs'],BnDIs,'Bootstrapped Eigenvectors')
if len(rDIs)>5:
BrDIs=pmag.di_boot(rDIs)
if len(nDIs)>5: # plot on existing plots
pmagplotlib.plot_di(EQ['eq'],rDIs)
pmagplotlib.plot_di(EQ['bdirs'],BrDIs)
else:
pmagplotlib.plot_eq(EQ['eq'],rDIs,'Data')
pmagplotlib.plot_eq(EQ['bdirs'],BrDIs,'Bootstrapped Eigenvectors')
pmagplotlib.draw_figs(EQ)
ans=input('s[a]ve, [q]uit ')
if ans=='q':sys.exit()
if ans=='a':
files={}
for key in list(EQ.keys()):
files[key]='BE_'+key+'.svg'
pmagplotlib.save_plots(EQ,files)
sys.exit()
if len(nDIs)>5:
pmagplotlib.plot_conf(EQ['eq'],title,DiRecs,npars,1)
if len(rDIs)>5 and dist!='B':
pmagplotlib.plot_conf(EQ['eq'],title,[],rpars,0)
elif len(rDIs)>5 and dist!='B':
pmagplotlib.plot_conf(EQ['eq'],title,DiRecs,rpars,1)
pmagplotlib.draw_figs(EQ)
ans=input('s[a]ve, [q]uit ')
if ans=='q':sys.exit()
if ans=='a':
files={}
for key in list(EQ.keys()):
files[key]=key+'.svg'
pmagplotlib.save_plots(EQ,files)
#
if __name__ == "__main__":
main()
| #!/usr/bin/env python
import sys
import matplotlib
if matplotlib.get_backend() != "TKAgg":
matplotlib.use("TKAgg")
import pmagpy.pmag as pmag
import pmagpy.pmagplotlib as pmagplotlib
def main():
"""
NAME
plotdi_e.py
DESCRIPTION
plots equal area projection from dec inc data and cones of confidence
(Fisher, kent or Bingham or bootstrap).
INPUT FORMAT
takes dec/inc as first two columns in space delimited file
SYNTAX
plotdi_e.py [command line options]
OPTIONS
-h prints help message and quits
-i for interactive parameter entry
-f FILE, sets input filename on command line
-Fish plots unit vector mean direction, alpha95
-Bing plots Principal direction, Bingham confidence ellipse
-Kent plots unit vector mean direction, confidence ellipse
-Boot E plots unit vector mean direction, bootstrapped confidence ellipse
-Boot V plots unit vector mean direction, distribution of bootstrapped means
"""
dist='F' # default distribution is Fisherian
mode=1
title=""
EQ={'eq':1}
if len(sys.argv) > 0:
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-i' in sys.argv: # ask for filename
file=input("Enter file name with dec, inc data: ")
dist=input("Enter desired distrubution: [Fish]er, [Bing]ham, [Kent] [Boot] [default is Fisher]: ")
if dist=="":dist="F"
if dist=="Bing":dist="B"
if dist=="Kent":dist="K"
if dist=="Boot":
type=input(" Ellipses or distribution of vectors? [E]/V ")
if type=="" or type=="E":
dist="BE"
else:
dist="BE"
else:
#
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
else:
print('you must specify a file name')
print(main.__doc__)
sys.exit()
if '-Bing' in sys.argv:dist='B'
if '-Kent' in sys.argv:dist='K'
if '-Boot' in sys.argv:
ind=sys.argv.index('-Boot')
type=sys.argv[ind+1]
if type=='E':
dist='BE'
elif type=='V':
dist='BV'
EQ['bdirs']=2
pmagplotlib.plot_init(EQ['bdirs'],5,5)
else:
print(main.__doc__)
sys.exit()
pmagplotlib.plot_init(EQ['eq'],5,5)
#
# get to work
f=open(file,'r')
data=f.readlines()
#
DIs= [] # set up list for dec inc data
DiRecs=[]
pars=[]
nDIs,rDIs,npars,rpars=[],[],[],[]
mode =1
for line in data: # read in the data from standard input
DiRec={}
rec=line.split() # split each line on space to get records
DIs.append((float(rec[0]),float(rec[1]),1.))
DiRec['dec']=rec[0]
DiRec['inc']=rec[1]
DiRec['direction_type']='l'
DiRecs.append(DiRec)
# split into two modes
ppars=pmag.doprinc(DIs) # get principal directions
for rec in DIs:
angle=pmag.angle([rec[0],rec[1]],[ppars['dec'],ppars['inc']])
if angle>90.:
rDIs.append(rec)
else:
nDIs.append(rec)
if dist=='B': # do on whole dataset
title="Bingham confidence ellipse"
bpars=pmag.dobingham(DIs)
for key in list(bpars.keys()):
if key!='n':print(" ",key, '%7.1f'%(bpars[key]))
if key=='n':print(" ",key, ' %i'%(bpars[key]))
npars.append(bpars['dec'])
npars.append(bpars['inc'])
npars.append(bpars['Zeta'])
npars.append(bpars['Zdec'])
npars.append(bpars['Zinc'])
npars.append(bpars['Eta'])
npars.append(bpars['Edec'])
npars.append(bpars['Einc'])
if dist=='F':
title="Fisher confidence cone"
if len(nDIs)>3:
fpars=pmag.fisher_mean(nDIs)
print("mode ",mode)
for key in list(fpars.keys()):
if key!='n':print(" ",key, '%7.1f'%(fpars[key]))
if key=='n':print(" ",key, ' %i'%(fpars[key]))
mode+=1
npars.append(fpars['dec'])
npars.append(fpars['inc'])
npars.append(fpars['alpha95']) # Beta
npars.append(fpars['dec'])
isign=abs(fpars['inc']) / fpars['inc']
npars.append(fpars['inc']-isign*90.) #Beta inc
npars.append(fpars['alpha95']) # gamma
npars.append(fpars['dec']+90.) # Beta dec
npars.append(0.) #Beta inc
if len(rDIs)>3:
fpars=pmag.fisher_mean(rDIs)
print("mode ",mode)
for key in list(fpars.keys()):
if key!='n':print(" ",key, '%7.1f'%(fpars[key]))
if key=='n':print(" ",key, ' %i'%(fpars[key]))
mode+=1
rpars.append(fpars['dec'])
rpars.append(fpars['inc'])
rpars.append(fpars['alpha95']) # Beta
rpars.append(fpars['dec'])
isign=abs(fpars['inc']) / fpars['inc']
rpars.append(fpars['inc']-isign*90.) #Beta inc
rpars.append(fpars['alpha95']) # gamma
rpars.append(fpars['dec']+90.) # Beta dec
rpars.append(0.) #Beta inc
if dist=='K':
title="Kent confidence ellipse"
if len(nDIs)>3:
kpars=pmag.dokent(nDIs,len(nDIs))
print("mode ",mode)
for key in list(kpars.keys()):
if key!='n':print(" ",key, '%7.1f'%(kpars[key]))
if key=='n':print(" ",key, ' %i'%(kpars[key]))
mode+=1
npars.append(kpars['dec'])
npars.append(kpars['inc'])
npars.append(kpars['Zeta'])
npars.append(kpars['Zdec'])
npars.append(kpars['Zinc'])
npars.append(kpars['Eta'])
npars.append(kpars['Edec'])
npars.append(kpars['Einc'])
if len(rDIs)>3:
kpars=pmag.dokent(rDIs,len(rDIs))
print("mode ",mode)
for key in list(kpars.keys()):
if key!='n':print(" ",key, '%7.1f'%(kpars[key]))
if key=='n':print(" ",key, ' %i'%(kpars[key]))
mode+=1
rpars.append(kpars['dec'])
rpars.append(kpars['inc'])
rpars.append(kpars['Zeta'])
rpars.append(kpars['Zdec'])
rpars.append(kpars['Zinc'])
rpars.append(kpars['Eta'])
rpars.append(kpars['Edec'])
rpars.append(kpars['Einc'])
else: # assume bootstrap
if dist=='BE':
if len(nDIs)>5:
BnDIs=pmag.di_boot(nDIs)
Bkpars=pmag.dokent(BnDIs,1.)
print("mode ",mode)
for key in list(Bkpars.keys()):
if key!='n':print(" ",key, '%7.1f'%(Bkpars[key]))
if key=='n':print(" ",key, ' %i'%(Bkpars[key]))
mode+=1
npars.append(Bkpars['dec'])
npars.append(Bkpars['inc'])
npars.append(Bkpars['Zeta'])
npars.append(Bkpars['Zdec'])
npars.append(Bkpars['Zinc'])
npars.append(Bkpars['Eta'])
npars.append(Bkpars['Edec'])
npars.append(Bkpars['Einc'])
if len(rDIs)>5:
BrDIs=pmag.di_boot(rDIs)
Bkpars=pmag.dokent(BrDIs,1.)
print("mode ",mode)
for key in list(Bkpars.keys()):
if key!='n':print(" ",key, '%7.1f'%(Bkpars[key]))
if key=='n':print(" ",key, ' %i'%(Bkpars[key]))
mode+=1
rpars.append(Bkpars['dec'])
rpars.append(Bkpars['inc'])
rpars.append(Bkpars['Zeta'])
rpars.append(Bkpars['Zdec'])
rpars.append(Bkpars['Zinc'])
rpars.append(Bkpars['Eta'])
rpars.append(Bkpars['Edec'])
rpars.append(Bkpars['Einc'])
title="Bootstrapped confidence ellipse"
elif dist=='BV':
if len(nDIs)>5:
pmagplotlib.plot_eq(EQ['eq'],nDIs,'Data')
BnDIs=pmag.di_boot(nDIs)
pmagplotlib.plot_eq(EQ['bdirs'],BnDIs,'Bootstrapped Eigenvectors')
if len(rDIs)>5:
BrDIs=pmag.di_boot(rDIs)
if len(nDIs)>5: # plot on existing plots
pmagplotlib.plot_di(EQ['eq'],rDIs)
pmagplotlib.plot_di(EQ['bdirs'],BrDIs)
else:
pmagplotlib.plot_eq(EQ['eq'],rDIs,'Data')
pmagplotlib.plot_eq(EQ['bdirs'],BrDIs,'Bootstrapped Eigenvectors')
pmagplotlib.draw_figs(EQ)
ans=input('s[a]ve, [q]uit ')
if ans=='q':sys.exit()
if ans=='a':
files={}
for key in list(EQ.keys()):
files[key]='BE_'+key+'.svg'
pmagplotlib.save_plots(EQ,files)
sys.exit()
if len(nDIs)>5:
pmagplotlib.plot_conf(EQ['eq'],title,DiRecs,npars,1)
if len(rDIs)>5 and dist!='B':
pmagplotlib.plot_conf(EQ['eq'],title,[],rpars,0)
elif len(rDIs)>5 and dist!='B':
pmagplotlib.plot_conf(EQ['eq'],title,DiRecs,rpars,1)
pmagplotlib.draw_figs(EQ)
ans=input('s[a]ve, [q]uit ')
if ans=='q':sys.exit()
if ans=='a':
files={}
for key in list(EQ.keys()):
files[key]=key+'.svg'
pmagplotlib.save_plots(EQ,files)
#
if __name__ == "__main__":
main()
| en | 0.689247 | #!/usr/bin/env python NAME plotdi_e.py DESCRIPTION plots equal area projection from dec inc data and cones of confidence (Fisher, kent or Bingham or bootstrap). INPUT FORMAT takes dec/inc as first two columns in space delimited file SYNTAX plotdi_e.py [command line options] OPTIONS -h prints help message and quits -i for interactive parameter entry -f FILE, sets input filename on command line -Fish plots unit vector mean direction, alpha95 -Bing plots Principal direction, Bingham confidence ellipse -Kent plots unit vector mean direction, confidence ellipse -Boot E plots unit vector mean direction, bootstrapped confidence ellipse -Boot V plots unit vector mean direction, distribution of bootstrapped means # default distribution is Fisherian # check if help is needed # graceful quit # ask for filename # # # get to work # # set up list for dec inc data # read in the data from standard input # split each line on space to get records # split into two modes # get principal directions # do on whole dataset # Beta #Beta inc # gamma # Beta dec #Beta inc # Beta #Beta inc # gamma # Beta dec #Beta inc # assume bootstrap # plot on existing plots # | 2.904045 | 3 |
beartype/_data/mod/datamod.py | qiujiangkun/beartype | 0 | 6617241 | <gh_stars>0
#!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **module globals** (i.e., global constants describing various
well-known modules and packages).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
# See the "beartype.cave" submodule for further commentary.
__all__ = ['STAR_IMPORTS_CONSIDERED_HARMFUL']
# ....................{ NAMES }....................
BUILTINS_MODULE_NAME = 'builtins'
'''
Fully-qualified name of the **builtins module** (i.e., objects defined by the
standard :mod:`builtins` module and thus globally available by default
*without* requiring explicit importation).
'''
# ....................{ SETS }....................
TYPING_MODULE_NAMES = frozenset((
# Name of the official typing module bundled with the Python stdlib.
'typing',
# Name of the third-party "typing_extensions" module, backporting "typing"
# hints introduced in newer Python versions to older Python versions.
'typing_extensions',
))
'''
Frozen set of the fully-qualified names of all **typing modules** (i.e.,
modules officially declaring attributes usable for creating PEP-compliant type
hints accepted by both static and runtime type checkers).
'''
TYPING_MODULE_NAMES_DOTTED = frozenset(
f'{typing_module_name}.' for typing_module_name in TYPING_MODULE_NAMES)
'''
Frozen set of the fully-qualified ``.``-suffixed names of all typing modules.
This set is a negligible optimization enabling callers to perform slightly more
efficient testing of string prefixes against items of this specialized set than
those of the more general-purpose :data:`TYPING_MODULE_NAMES` set.
See Also
----------
:data:`TYPING_MODULE_NAMES`
Further details.
'''
| #!/usr/bin/env python3
# --------------------( LICENSE )--------------------
# Copyright (c) 2014-2022 Beartype authors.
# See "LICENSE" for further details.
'''
Project-wide **module globals** (i.e., global constants describing various
well-known modules and packages).
This private submodule is *not* intended for importation by downstream callers.
'''
# ....................{ IMPORTS }....................
# See the "beartype.cave" submodule for further commentary.
__all__ = ['STAR_IMPORTS_CONSIDERED_HARMFUL']
# ....................{ NAMES }....................
BUILTINS_MODULE_NAME = 'builtins'
'''
Fully-qualified name of the **builtins module** (i.e., objects defined by the
standard :mod:`builtins` module and thus globally available by default
*without* requiring explicit importation).
'''
# ....................{ SETS }....................
TYPING_MODULE_NAMES = frozenset((
# Name of the official typing module bundled with the Python stdlib.
'typing',
# Name of the third-party "typing_extensions" module, backporting "typing"
# hints introduced in newer Python versions to older Python versions.
'typing_extensions',
))
'''
Frozen set of the fully-qualified names of all **typing modules** (i.e.,
modules officially declaring attributes usable for creating PEP-compliant type
hints accepted by both static and runtime type checkers).
'''
TYPING_MODULE_NAMES_DOTTED = frozenset(
f'{typing_module_name}.' for typing_module_name in TYPING_MODULE_NAMES)
'''
Frozen set of the fully-qualified ``.``-suffixed names of all typing modules.
This set is a negligible optimization enabling callers to perform slightly more
efficient testing of string prefixes against items of this specialized set than
those of the more general-purpose :data:`TYPING_MODULE_NAMES` set.
See Also
----------
:data:`TYPING_MODULE_NAMES`
Further details.
''' | en | 0.663345 | #!/usr/bin/env python3 # --------------------( LICENSE )-------------------- # Copyright (c) 2014-2022 Beartype authors. # See "LICENSE" for further details. Project-wide **module globals** (i.e., global constants describing various well-known modules and packages). This private submodule is *not* intended for importation by downstream callers. # ....................{ IMPORTS }.................... # See the "beartype.cave" submodule for further commentary. # ....................{ NAMES }.................... Fully-qualified name of the **builtins module** (i.e., objects defined by the standard :mod:`builtins` module and thus globally available by default *without* requiring explicit importation). # ....................{ SETS }.................... # Name of the official typing module bundled with the Python stdlib. # Name of the third-party "typing_extensions" module, backporting "typing" # hints introduced in newer Python versions to older Python versions. Frozen set of the fully-qualified names of all **typing modules** (i.e., modules officially declaring attributes usable for creating PEP-compliant type hints accepted by both static and runtime type checkers). Frozen set of the fully-qualified ``.``-suffixed names of all typing modules. This set is a negligible optimization enabling callers to perform slightly more efficient testing of string prefixes against items of this specialized set than those of the more general-purpose :data:`TYPING_MODULE_NAMES` set. See Also ---------- :data:`TYPING_MODULE_NAMES` Further details. | 1.812123 | 2 |
odpttraininfo/__init__.py | friuli-jokyo/python-odpt-train-info | 0 | 6617242 | from . import config
from .cache import fetch_info, refresh_cache
from .odpt_components import Distributor, TrainInformation, to_json_default
__version__ = "0.1.3"
__all__ = ["config","fetch_info","refresh_cache","Distributor","TrainInformation","to_json_default"] | from . import config
from .cache import fetch_info, refresh_cache
from .odpt_components import Distributor, TrainInformation, to_json_default
__version__ = "0.1.3"
__all__ = ["config","fetch_info","refresh_cache","Distributor","TrainInformation","to_json_default"] | none | 1 | 1.357197 | 1 | |
genutil/__main__.py | MZH-bust/genutil | 0 | 6617243 | """ __main__.py """
def main():
"""Zeigt den Link zum Repository in der Konsole an."""
print("Dieses Tool stellt übergreifende, grundlegende Funktionalitäten zur Verfügung.\n"
"Für weitergehende Informationen: https://github.com/MZH-bust/general_helpers")
if __name__ == "__main__":
main()
| """ __main__.py """
def main():
"""Zeigt den Link zum Repository in der Konsole an."""
print("Dieses Tool stellt übergreifende, grundlegende Funktionalitäten zur Verfügung.\n"
"Für weitergehende Informationen: https://github.com/MZH-bust/general_helpers")
if __name__ == "__main__":
main()
| de | 0.897321 | __main__.py Zeigt den Link zum Repository in der Konsole an. | 1.53637 | 2 |
application/esdl/esh.py | ESDLMapEditorESSIM/energysystem_statistics | 0 | 6617244 | <gh_stars>0
# This work is based on original code developed and copyrighted by TNO 2020.
# Subsequent contributions are licensed to you by the developers of such code and are
# made available to the Project under one or several contributor license agreements.
#
# This work is licensed to you under the Apache License, Version 2.0.
# You may obtain a copy of the license at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Contributors:
# TNO - Initial implementation
# Manager:
# TNO
from pyecore.resources import ResourceSet, URI
from pyecore.resources import ResourceSet, URI
from pyecore.utils import alias
from application.esdl.resources.xmlresource import XMLResource
from application.esdl import esdl
import uuid
import logging
from io import BytesIO
log = logging.getLogger(__name__)
class EnergySystemHandler:
def __init__(self):
self.rset = None
self.resource = None
self.energy_system = None
self._new_resource_set()
esdl.ProfileElement.from_.name = 'from'
setattr(esdl.ProfileElement, 'from', esdl.ProfileElement.from_)
alias('start', esdl.ProfileElement.from_)
esdl.FromToIntItem.from_.name = 'from'
setattr(esdl.FromToIntItem, 'from', esdl.FromToIntItem.from_)
alias('start', esdl.FromToIntItem.from_)
esdl.FromToDoubleItem.from_.name = 'from'
setattr(esdl.FromToDoubleItem, 'from', esdl.FromToDoubleItem.from_)
alias('start', esdl.FromToDoubleItem.from_)
def _new_resource_set(self):
self.rset = ResourceSet()
self._set_resource_factories()
def _set_resource_factories(self):
# Assign files with the .esdl extension to the XMLResource instead of default XMI
self.rset.resource_factory['esdl'] = XMLResource
self.rset.resource_factory['*'] = XMLResource
def load_from_string(self, esdl_string, name='from_string'):
"""Loads an energy system from a string and adds it to a *new* resourceSet
:returns the loaded EnergySystem """
uri = StringURI(name+'.esdl', esdl_string)
self._new_resource_set()
self.resource = self.rset.create_resource(uri)
try:
self.resource.load()
self.energy_system = self.resource.contents[0]
return self.energy_system
except Exception as e:
log.error("Exception when loading resource: {}: {}".format(name, e))
raise
def get_energy_system(self):
return self.energy_system
class StringURI(URI):
def __init__(self, uri, text=None):
super(StringURI, self).__init__(uri)
if text is not None:
self.__stream = BytesIO(text.encode('UTF-8'))
def getvalue(self):
readbytes = self.__stream.getvalue()
# somehow stringIO does not work, so we use BytesIO
string = readbytes.decode('UTF-8')
return string
def create_instream(self):
return self.__stream
def create_outstream(self):
self.__stream = BytesIO()
return self.__stream
def get_stream(self):
return self.__stream | # This work is based on original code developed and copyrighted by TNO 2020.
# Subsequent contributions are licensed to you by the developers of such code and are
# made available to the Project under one or several contributor license agreements.
#
# This work is licensed to you under the Apache License, Version 2.0.
# You may obtain a copy of the license at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Contributors:
# TNO - Initial implementation
# Manager:
# TNO
from pyecore.resources import ResourceSet, URI
from pyecore.resources import ResourceSet, URI
from pyecore.utils import alias
from application.esdl.resources.xmlresource import XMLResource
from application.esdl import esdl
import uuid
import logging
from io import BytesIO
log = logging.getLogger(__name__)
class EnergySystemHandler:
def __init__(self):
self.rset = None
self.resource = None
self.energy_system = None
self._new_resource_set()
esdl.ProfileElement.from_.name = 'from'
setattr(esdl.ProfileElement, 'from', esdl.ProfileElement.from_)
alias('start', esdl.ProfileElement.from_)
esdl.FromToIntItem.from_.name = 'from'
setattr(esdl.FromToIntItem, 'from', esdl.FromToIntItem.from_)
alias('start', esdl.FromToIntItem.from_)
esdl.FromToDoubleItem.from_.name = 'from'
setattr(esdl.FromToDoubleItem, 'from', esdl.FromToDoubleItem.from_)
alias('start', esdl.FromToDoubleItem.from_)
def _new_resource_set(self):
self.rset = ResourceSet()
self._set_resource_factories()
def _set_resource_factories(self):
# Assign files with the .esdl extension to the XMLResource instead of default XMI
self.rset.resource_factory['esdl'] = XMLResource
self.rset.resource_factory['*'] = XMLResource
def load_from_string(self, esdl_string, name='from_string'):
"""Loads an energy system from a string and adds it to a *new* resourceSet
:returns the loaded EnergySystem """
uri = StringURI(name+'.esdl', esdl_string)
self._new_resource_set()
self.resource = self.rset.create_resource(uri)
try:
self.resource.load()
self.energy_system = self.resource.contents[0]
return self.energy_system
except Exception as e:
log.error("Exception when loading resource: {}: {}".format(name, e))
raise
def get_energy_system(self):
return self.energy_system
class StringURI(URI):
def __init__(self, uri, text=None):
super(StringURI, self).__init__(uri)
if text is not None:
self.__stream = BytesIO(text.encode('UTF-8'))
def getvalue(self):
readbytes = self.__stream.getvalue()
# somehow stringIO does not work, so we use BytesIO
string = readbytes.decode('UTF-8')
return string
def create_instream(self):
return self.__stream
def create_outstream(self):
self.__stream = BytesIO()
return self.__stream
def get_stream(self):
return self.__stream | en | 0.874005 | # This work is based on original code developed and copyrighted by TNO 2020. # Subsequent contributions are licensed to you by the developers of such code and are # made available to the Project under one or several contributor license agreements. # # This work is licensed to you under the Apache License, Version 2.0. # You may obtain a copy of the license at # # http://www.apache.org/licenses/LICENSE-2.0 # # Contributors: # TNO - Initial implementation # Manager: # TNO # Assign files with the .esdl extension to the XMLResource instead of default XMI Loads an energy system from a string and adds it to a *new* resourceSet :returns the loaded EnergySystem # somehow stringIO does not work, so we use BytesIO | 1.710442 | 2 |
resolwe_bio/processes/variant_calling/genotype_gvcfs.py | romunov/resolwe-bio | 0 | 6617245 | <reponame>romunov/resolwe-bio<gh_stars>0
"""Run GATK GenotypeGVCFs tool."""
from plumbum import TEE
from resolwe.process import (
BooleanField,
Cmd,
DataField,
FileField,
GroupField,
IntegerField,
ListField,
Process,
SchedulingClass,
StringField,
)
class GatkGenotypeGVCFs(Process):
"""Consolidate GVCFs and run joint calling using GenotypeGVCFs tool."""
slug = "gatk-genotype-gvcfs"
name = "GATK GenotypeGVCFs"
category = "GATK"
process_type = "data:variants:vcf:genotypegvcfs"
version = "1.0.1"
scheduling_class = SchedulingClass.BATCH
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {"image": "public.ecr.aws/s4q6j6e8/resolwebio/dnaseq:6.0.0"}
},
"resources": {
"cores": 4,
"memory": 32768,
},
}
data_name = "Cohort variants"
class Input:
"""Input fields for GatkGenotypeGVCFs."""
gvcfs = ListField(
DataField("variants:gvcf"),
label="Input data (GVCF)",
)
ref_seq = DataField("seq:nucleotide", label="Reference sequence")
intervals = DataField(
"bed",
label="Intervals file (.bed)",
)
dbsnp = DataField("variants:vcf", label="dbSNP file")
advanced = BooleanField(
label="Show advanced options",
description="Inspect and modify parameters.",
default=False,
)
class AdvancedOptions:
"""Advanced options."""
batch_size = IntegerField(
label="Batch size",
default=0,
description="Batch size controls the number of samples "
"for which readers are open at once and therefore provides "
"a way to minimize memory consumption. However, it can "
"take longer to complete. Use the consolidate flag if more "
"than a hundred batches were used. This will improve feature "
"read time. batchSize=0 means no batching "
"(i.e. readers for all samples will be opened at once).",
)
consolidate = BooleanField(
label="Consolidate",
default=False,
description="Boolean flag to enable consolidation. If "
"importing data in batches, a new fragment is created for "
"each batch. In case thousands of fragments are created, "
"GenomicsDB feature readers will try to open ~20x as many "
"files. Also, internally GenomicsDB would consume more "
"memory to maintain bookkeeping data from all fragments. "
"Use this flag to merge all fragments into one. Merging "
"can potentially improve read performance, however overall "
"benefit might not be noticeable as the top Java layers "
"have significantly higher overheads. This flag has no "
"effect if only one batch is used.",
)
advanced_options = GroupField(
AdvancedOptions, label="Advanced options", hidden="!advanced"
)
class Output:
"""Output fields for GatkGenotypeGVCFs."""
vcf = FileField(label="GVCF file")
tbi = FileField(label="Tabix index")
species = StringField(label="Species")
build = StringField(label="Build")
def run(self, inputs, outputs):
"""Run analysis."""
variants = "cohort_variants.vcf"
variants_gz = variants + ".gz"
variants_index = variants_gz + ".tbi"
sample_map_file = "sample_map.txt"
species = inputs.gvcfs[0].output.species
if not all(gvcf.output.species == species for gvcf in inputs.gvcfs):
self.error("Not all of the input samples are of the same species.")
build = inputs.gvcfs[0].output.build
if not all(gvcf.output.build == build for gvcf in inputs.gvcfs):
self.error("Not all of the input samples have the same genome build.")
with open(sample_map_file, "w") as sample_map:
for gvcf in inputs.gvcfs:
sample_map.write(f"{gvcf.entity_name}\t{gvcf.output.vcf.path}\n")
db_import_args = [
"--genomicsdb-workspace-path",
"database",
"-L",
inputs.intervals.output.bed.path,
"--sample-name-map",
sample_map_file,
"--batch-size",
inputs.advanced_options.batch_size,
"--reader-threads",
min(self.requirements.resources.cores, 5),
]
if inputs.advanced_options.consolidate:
db_import_args.append("--seqBias")
return_code, _, _ = Cmd["gatk"]["GenomicsDBImport"][db_import_args] & TEE(
retcode=None
)
if return_code:
self.error("GATK GenomicsDBImport tool failed.")
genotype_gvcfs_inputs = [
"-R",
inputs.ref_seq.output.fasta.path,
"-V",
"gendb://database",
"-O",
variants,
"-L",
inputs.intervals.output.bed.path,
"-D",
inputs.dbsnp.output.vcf.path,
"-G",
"StandardAnnotation",
"-G",
"AS_StandardAnnotation",
"--only-output-calls-starting-in-intervals",
]
return_code, _, _ = Cmd["gatk"]["GenotypeGVCFs"][genotype_gvcfs_inputs] & TEE(
retcode=None
)
if return_code:
self.error("GATK GenotypeGVCFs tool failed.")
# Compress and index the output variants file
(Cmd["bgzip"]["-c", variants] > variants_gz)()
Cmd["tabix"]["-p", "vcf", variants_gz]()
outputs.vcf = variants_gz
outputs.tbi = variants_index
outputs.species = species
outputs.build = build
| """Run GATK GenotypeGVCFs tool."""
from plumbum import TEE
from resolwe.process import (
BooleanField,
Cmd,
DataField,
FileField,
GroupField,
IntegerField,
ListField,
Process,
SchedulingClass,
StringField,
)
class GatkGenotypeGVCFs(Process):
"""Consolidate GVCFs and run joint calling using GenotypeGVCFs tool."""
slug = "gatk-genotype-gvcfs"
name = "GATK GenotypeGVCFs"
category = "GATK"
process_type = "data:variants:vcf:genotypegvcfs"
version = "1.0.1"
scheduling_class = SchedulingClass.BATCH
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {"image": "public.ecr.aws/s4q6j6e8/resolwebio/dnaseq:6.0.0"}
},
"resources": {
"cores": 4,
"memory": 32768,
},
}
data_name = "Cohort variants"
class Input:
"""Input fields for GatkGenotypeGVCFs."""
gvcfs = ListField(
DataField("variants:gvcf"),
label="Input data (GVCF)",
)
ref_seq = DataField("seq:nucleotide", label="Reference sequence")
intervals = DataField(
"bed",
label="Intervals file (.bed)",
)
dbsnp = DataField("variants:vcf", label="dbSNP file")
advanced = BooleanField(
label="Show advanced options",
description="Inspect and modify parameters.",
default=False,
)
class AdvancedOptions:
"""Advanced options."""
batch_size = IntegerField(
label="Batch size",
default=0,
description="Batch size controls the number of samples "
"for which readers are open at once and therefore provides "
"a way to minimize memory consumption. However, it can "
"take longer to complete. Use the consolidate flag if more "
"than a hundred batches were used. This will improve feature "
"read time. batchSize=0 means no batching "
"(i.e. readers for all samples will be opened at once).",
)
consolidate = BooleanField(
label="Consolidate",
default=False,
description="Boolean flag to enable consolidation. If "
"importing data in batches, a new fragment is created for "
"each batch. In case thousands of fragments are created, "
"GenomicsDB feature readers will try to open ~20x as many "
"files. Also, internally GenomicsDB would consume more "
"memory to maintain bookkeeping data from all fragments. "
"Use this flag to merge all fragments into one. Merging "
"can potentially improve read performance, however overall "
"benefit might not be noticeable as the top Java layers "
"have significantly higher overheads. This flag has no "
"effect if only one batch is used.",
)
advanced_options = GroupField(
AdvancedOptions, label="Advanced options", hidden="!advanced"
)
class Output:
"""Output fields for GatkGenotypeGVCFs."""
vcf = FileField(label="GVCF file")
tbi = FileField(label="Tabix index")
species = StringField(label="Species")
build = StringField(label="Build")
def run(self, inputs, outputs):
"""Run analysis."""
variants = "cohort_variants.vcf"
variants_gz = variants + ".gz"
variants_index = variants_gz + ".tbi"
sample_map_file = "sample_map.txt"
species = inputs.gvcfs[0].output.species
if not all(gvcf.output.species == species for gvcf in inputs.gvcfs):
self.error("Not all of the input samples are of the same species.")
build = inputs.gvcfs[0].output.build
if not all(gvcf.output.build == build for gvcf in inputs.gvcfs):
self.error("Not all of the input samples have the same genome build.")
with open(sample_map_file, "w") as sample_map:
for gvcf in inputs.gvcfs:
sample_map.write(f"{gvcf.entity_name}\t{gvcf.output.vcf.path}\n")
db_import_args = [
"--genomicsdb-workspace-path",
"database",
"-L",
inputs.intervals.output.bed.path,
"--sample-name-map",
sample_map_file,
"--batch-size",
inputs.advanced_options.batch_size,
"--reader-threads",
min(self.requirements.resources.cores, 5),
]
if inputs.advanced_options.consolidate:
db_import_args.append("--seqBias")
return_code, _, _ = Cmd["gatk"]["GenomicsDBImport"][db_import_args] & TEE(
retcode=None
)
if return_code:
self.error("GATK GenomicsDBImport tool failed.")
genotype_gvcfs_inputs = [
"-R",
inputs.ref_seq.output.fasta.path,
"-V",
"gendb://database",
"-O",
variants,
"-L",
inputs.intervals.output.bed.path,
"-D",
inputs.dbsnp.output.vcf.path,
"-G",
"StandardAnnotation",
"-G",
"AS_StandardAnnotation",
"--only-output-calls-starting-in-intervals",
]
return_code, _, _ = Cmd["gatk"]["GenotypeGVCFs"][genotype_gvcfs_inputs] & TEE(
retcode=None
)
if return_code:
self.error("GATK GenotypeGVCFs tool failed.")
# Compress and index the output variants file
(Cmd["bgzip"]["-c", variants] > variants_gz)()
Cmd["tabix"]["-p", "vcf", variants_gz]()
outputs.vcf = variants_gz
outputs.tbi = variants_index
outputs.species = species
outputs.build = build | en | 0.634613 | Run GATK GenotypeGVCFs tool. Consolidate GVCFs and run joint calling using GenotypeGVCFs tool. Input fields for GatkGenotypeGVCFs. Advanced options. Output fields for GatkGenotypeGVCFs. Run analysis. # Compress and index the output variants file | 2.251362 | 2 |
eth_enr/enr.py | vaporydev/eth-enr | 4 | 6617246 | import base64
import ipaddress
import operator
from typing import AbstractSet, Any, Iterator, Mapping, Tuple, Type, Union, ValuesView
from eth_typing import NodeID
from eth_utils import ValidationError, encode_hex
import rlp
from eth_enr.abc import (
ENRAPI,
CommonENRAPI,
IdentitySchemeAPI,
IdentitySchemeRegistryAPI,
UnsignedENRAPI,
)
from eth_enr.constants import (
ENR_REPR_PREFIX,
IDENTITY_SCHEME_ENR_KEY,
IP_V4_ADDRESS_ENR_KEY,
IP_V6_ADDRESS_ENR_KEY,
V4_SIGNATURE_KEY,
)
from eth_enr.exceptions import UnknownIdentityScheme
from eth_enr.identity_schemes import (
default_identity_scheme_registry as default_id_scheme_registry,
)
from eth_enr.identity_schemes import IdentitySchemeRegistry
from eth_enr.sedes import ENRContentSedes, ENRSedes
class ENRCommon(CommonENRAPI):
def __init__(
self,
sequence_number: int,
kv_pairs: Mapping[bytes, Any],
identity_scheme_registry: IdentitySchemeRegistryAPI = default_id_scheme_registry,
) -> None:
self._sequence_number = sequence_number
self._kv_pairs = dict(kv_pairs)
self._identity_scheme = self._pick_identity_scheme(identity_scheme_registry)
self._validate_sequence_number()
self.identity_scheme.validate_enr_structure(self)
def _validate_sequence_number(self) -> None:
if self.sequence_number < 0:
raise ValidationError("Sequence number is negative")
def _pick_identity_scheme(
self, identity_scheme_registry: IdentitySchemeRegistryAPI
) -> Type[IdentitySchemeAPI]:
try:
identity_scheme_id = self[IDENTITY_SCHEME_ENR_KEY]
except KeyError:
raise ValidationError("ENR does not specify identity scheme")
try:
return identity_scheme_registry[identity_scheme_id]
except KeyError:
raise UnknownIdentityScheme(
f"ENR uses unsupported identity scheme {identity_scheme_id}"
)
@property
def identity_scheme(self) -> Type[IdentitySchemeAPI]:
return self._identity_scheme
@property
def sequence_number(self) -> int:
return self._sequence_number
@property
def public_key(self) -> bytes:
try:
return self.identity_scheme.extract_public_key(self)
except KeyError:
raise Exception(
"Invariant: presence of public key in ENR has been checked in identity scheme "
"structure check during initialization"
)
@property
def node_id(self) -> NodeID:
try:
return self.identity_scheme.extract_node_id(self)
except KeyError:
raise Exception(
"Invariant: presence of public key in ENR has been checked in identity scheme "
"structure check during initialization"
)
def get_signing_message(self) -> bytes:
return rlp.encode(self, ENRContentSedes) # type: ignore
#
# Mapping interface
#
def __getitem__(self, key: bytes) -> Any:
return self._kv_pairs[key]
def __iter__(self) -> Iterator[bytes]:
return iter(self._kv_pairs)
def __len__(self) -> int:
return len(self._kv_pairs)
def __contains__(self, key: Any) -> bool:
return key in self._kv_pairs
def keys(self) -> AbstractSet[bytes]:
return self._kv_pairs.keys()
def values(self) -> ValuesView[Any]:
return self._kv_pairs.values()
def items(self) -> AbstractSet[Tuple[bytes, Any]]:
return self._kv_pairs.items()
def get(self, key: bytes, default: Any = None) -> Any:
return self._kv_pairs.get(key, default)
class UnsignedENR(ENRCommon, UnsignedENRAPI):
def to_signed_enr(self, private_key: bytes) -> "ENR":
signature = self.identity_scheme.create_enr_signature(self, private_key)
transient_identity_scheme_registry = IdentitySchemeRegistry()
transient_identity_scheme_registry.register(self.identity_scheme)
return ENR(
self.sequence_number,
dict(self),
signature,
identity_scheme_registry=transient_identity_scheme_registry,
)
def __eq__(self, other: Any) -> bool:
return other.__class__ is self.__class__ and dict(other) == dict(self)
def __hash__(self) -> int:
sorted_key_value_pairs = tuple(sorted(self.items(), key=operator.itemgetter(0)))
return hash((self.sequence_number, sorted_key_value_pairs))
def _get_display_str(item: Union[int, bytes]) -> str:
if isinstance(item, bytes):
try:
return item.decode("ascii")
except UnicodeDecodeError:
return encode_hex(item)
elif isinstance(item, int):
return str(item)
else:
raise Exception("Unreachable")
def pretty_print_enr_item(key: bytes, value: Union[int, bytes]) -> str:
if key == IDENTITY_SCHEME_ENR_KEY:
return f"id={_get_display_str(value)}"
elif key == V4_SIGNATURE_KEY:
return f"secp256k1={encode_hex(value)}" # type: ignore
elif key == IP_V4_ADDRESS_ENR_KEY and isinstance(value, bytes) and len(value) == 4:
return f"ip={ipaddress.ip_address(value)}"
elif key == IP_V6_ADDRESS_ENR_KEY and isinstance(value, bytes) and len(value) == 16:
return f"ip={ipaddress.ip_address(value)}"
# final fallback if none of the *fancy* display options work.
return f"{_get_display_str(key)}={_get_display_str(value)}"
class ENR(ENRCommon, ENRSedes, ENRAPI):
def __init__(
self,
sequence_number: int,
kv_pairs: Mapping[bytes, Any],
signature: bytes,
identity_scheme_registry: IdentitySchemeRegistryAPI = default_id_scheme_registry,
) -> None:
self._signature = signature
super().__init__(sequence_number, kv_pairs, identity_scheme_registry)
@classmethod
def from_repr(
cls,
representation: str,
identity_scheme_registry: IdentitySchemeRegistryAPI = default_id_scheme_registry,
) -> "ENR":
if not representation.startswith("enr:"):
raise ValidationError(f"Invalid ENR representation: {representation}")
unpadded_b64 = representation[4:]
padded_b64 = unpadded_b64 + "=" * (4 - len(unpadded_b64) % 4)
rlp_encoded = base64.urlsafe_b64decode(padded_b64)
return rlp.decode( # type: ignore
rlp_encoded, cls, identity_scheme_registry=identity_scheme_registry
)
@property
def signature(self) -> bytes:
return self._signature
def validate_signature(self) -> None:
self.identity_scheme.validate_enr_signature(self)
def __eq__(self, other: Any) -> bool:
return (
other.__class__ is self.__class__
and other.sequence_number == self.sequence_number
and dict(other) == dict(self)
and other.signature == self.signature
)
def __hash__(self) -> int:
sorted_key_value_pairs = tuple(sorted(self.items(), key=operator.itemgetter(0)))
return hash((self.signature, self.sequence_number, sorted_key_value_pairs))
def __str__(self) -> str:
kv_pairs = " ".join(
(pretty_print_enr_item(key, value) for key, value in sorted(self.items()))
)
return (
f"ENR: seq={self.sequence_number} node_id={self.node_id.hex()} "
f"sig={self.signature.hex()} KV: {kv_pairs}"
)
def __repr__(self) -> str:
base64_rlp = base64.urlsafe_b64encode(rlp.encode(self))
unpadded_base64_rlp = base64_rlp.rstrip(b"=")
return "".join((ENR_REPR_PREFIX, unpadded_base64_rlp.decode("ASCII")))
| import base64
import ipaddress
import operator
from typing import AbstractSet, Any, Iterator, Mapping, Tuple, Type, Union, ValuesView
from eth_typing import NodeID
from eth_utils import ValidationError, encode_hex
import rlp
from eth_enr.abc import (
ENRAPI,
CommonENRAPI,
IdentitySchemeAPI,
IdentitySchemeRegistryAPI,
UnsignedENRAPI,
)
from eth_enr.constants import (
ENR_REPR_PREFIX,
IDENTITY_SCHEME_ENR_KEY,
IP_V4_ADDRESS_ENR_KEY,
IP_V6_ADDRESS_ENR_KEY,
V4_SIGNATURE_KEY,
)
from eth_enr.exceptions import UnknownIdentityScheme
from eth_enr.identity_schemes import (
default_identity_scheme_registry as default_id_scheme_registry,
)
from eth_enr.identity_schemes import IdentitySchemeRegistry
from eth_enr.sedes import ENRContentSedes, ENRSedes
class ENRCommon(CommonENRAPI):
def __init__(
self,
sequence_number: int,
kv_pairs: Mapping[bytes, Any],
identity_scheme_registry: IdentitySchemeRegistryAPI = default_id_scheme_registry,
) -> None:
self._sequence_number = sequence_number
self._kv_pairs = dict(kv_pairs)
self._identity_scheme = self._pick_identity_scheme(identity_scheme_registry)
self._validate_sequence_number()
self.identity_scheme.validate_enr_structure(self)
def _validate_sequence_number(self) -> None:
if self.sequence_number < 0:
raise ValidationError("Sequence number is negative")
def _pick_identity_scheme(
self, identity_scheme_registry: IdentitySchemeRegistryAPI
) -> Type[IdentitySchemeAPI]:
try:
identity_scheme_id = self[IDENTITY_SCHEME_ENR_KEY]
except KeyError:
raise ValidationError("ENR does not specify identity scheme")
try:
return identity_scheme_registry[identity_scheme_id]
except KeyError:
raise UnknownIdentityScheme(
f"ENR uses unsupported identity scheme {identity_scheme_id}"
)
@property
def identity_scheme(self) -> Type[IdentitySchemeAPI]:
return self._identity_scheme
@property
def sequence_number(self) -> int:
return self._sequence_number
@property
def public_key(self) -> bytes:
try:
return self.identity_scheme.extract_public_key(self)
except KeyError:
raise Exception(
"Invariant: presence of public key in ENR has been checked in identity scheme "
"structure check during initialization"
)
@property
def node_id(self) -> NodeID:
try:
return self.identity_scheme.extract_node_id(self)
except KeyError:
raise Exception(
"Invariant: presence of public key in ENR has been checked in identity scheme "
"structure check during initialization"
)
def get_signing_message(self) -> bytes:
return rlp.encode(self, ENRContentSedes) # type: ignore
#
# Mapping interface
#
def __getitem__(self, key: bytes) -> Any:
return self._kv_pairs[key]
def __iter__(self) -> Iterator[bytes]:
return iter(self._kv_pairs)
def __len__(self) -> int:
return len(self._kv_pairs)
def __contains__(self, key: Any) -> bool:
return key in self._kv_pairs
def keys(self) -> AbstractSet[bytes]:
return self._kv_pairs.keys()
def values(self) -> ValuesView[Any]:
return self._kv_pairs.values()
def items(self) -> AbstractSet[Tuple[bytes, Any]]:
return self._kv_pairs.items()
def get(self, key: bytes, default: Any = None) -> Any:
return self._kv_pairs.get(key, default)
class UnsignedENR(ENRCommon, UnsignedENRAPI):
def to_signed_enr(self, private_key: bytes) -> "ENR":
signature = self.identity_scheme.create_enr_signature(self, private_key)
transient_identity_scheme_registry = IdentitySchemeRegistry()
transient_identity_scheme_registry.register(self.identity_scheme)
return ENR(
self.sequence_number,
dict(self),
signature,
identity_scheme_registry=transient_identity_scheme_registry,
)
def __eq__(self, other: Any) -> bool:
return other.__class__ is self.__class__ and dict(other) == dict(self)
def __hash__(self) -> int:
sorted_key_value_pairs = tuple(sorted(self.items(), key=operator.itemgetter(0)))
return hash((self.sequence_number, sorted_key_value_pairs))
def _get_display_str(item: Union[int, bytes]) -> str:
if isinstance(item, bytes):
try:
return item.decode("ascii")
except UnicodeDecodeError:
return encode_hex(item)
elif isinstance(item, int):
return str(item)
else:
raise Exception("Unreachable")
def pretty_print_enr_item(key: bytes, value: Union[int, bytes]) -> str:
if key == IDENTITY_SCHEME_ENR_KEY:
return f"id={_get_display_str(value)}"
elif key == V4_SIGNATURE_KEY:
return f"secp256k1={encode_hex(value)}" # type: ignore
elif key == IP_V4_ADDRESS_ENR_KEY and isinstance(value, bytes) and len(value) == 4:
return f"ip={ipaddress.ip_address(value)}"
elif key == IP_V6_ADDRESS_ENR_KEY and isinstance(value, bytes) and len(value) == 16:
return f"ip={ipaddress.ip_address(value)}"
# final fallback if none of the *fancy* display options work.
return f"{_get_display_str(key)}={_get_display_str(value)}"
class ENR(ENRCommon, ENRSedes, ENRAPI):
def __init__(
self,
sequence_number: int,
kv_pairs: Mapping[bytes, Any],
signature: bytes,
identity_scheme_registry: IdentitySchemeRegistryAPI = default_id_scheme_registry,
) -> None:
self._signature = signature
super().__init__(sequence_number, kv_pairs, identity_scheme_registry)
@classmethod
def from_repr(
cls,
representation: str,
identity_scheme_registry: IdentitySchemeRegistryAPI = default_id_scheme_registry,
) -> "ENR":
if not representation.startswith("enr:"):
raise ValidationError(f"Invalid ENR representation: {representation}")
unpadded_b64 = representation[4:]
padded_b64 = unpadded_b64 + "=" * (4 - len(unpadded_b64) % 4)
rlp_encoded = base64.urlsafe_b64decode(padded_b64)
return rlp.decode( # type: ignore
rlp_encoded, cls, identity_scheme_registry=identity_scheme_registry
)
@property
def signature(self) -> bytes:
return self._signature
def validate_signature(self) -> None:
self.identity_scheme.validate_enr_signature(self)
def __eq__(self, other: Any) -> bool:
return (
other.__class__ is self.__class__
and other.sequence_number == self.sequence_number
and dict(other) == dict(self)
and other.signature == self.signature
)
def __hash__(self) -> int:
sorted_key_value_pairs = tuple(sorted(self.items(), key=operator.itemgetter(0)))
return hash((self.signature, self.sequence_number, sorted_key_value_pairs))
def __str__(self) -> str:
kv_pairs = " ".join(
(pretty_print_enr_item(key, value) for key, value in sorted(self.items()))
)
return (
f"ENR: seq={self.sequence_number} node_id={self.node_id.hex()} "
f"sig={self.signature.hex()} KV: {kv_pairs}"
)
def __repr__(self) -> str:
base64_rlp = base64.urlsafe_b64encode(rlp.encode(self))
unpadded_base64_rlp = base64_rlp.rstrip(b"=")
return "".join((ENR_REPR_PREFIX, unpadded_base64_rlp.decode("ASCII")))
| en | 0.307431 | # type: ignore # # Mapping interface # # type: ignore # final fallback if none of the *fancy* display options work. # type: ignore | 2.014282 | 2 |
22nd June Assignments/case study/questions.py | JangirSumit/data_science | 15 | 6617247 | from matplotlib.pylab import rcParams
import requests
import pandas as pd
import numpy as np
from pandas import DataFrame
from io import StringIO
import time
import json
from datetime import date
from statsmodels.tsa.stattools import adfuller, acf, pacf
from statsmodels.tsa.arima_model import ARIMA
from statsmodels.tsa.seasonal import seasonal_decompose
from sklearn.metrics import mean_squared_error
import matplotlib.pylab as plt
get_ipython().run_line_magic('matplotlib', 'inline')
rcParams['figure.figsize'] = 15, 6
data = pd.read_csv("SeaPlaneTravel.csv")
data.head()
data['Month'] = pd.to_datetime(data['Month'])
indexed_df = data.set_index('Month')
ts = indexed_df['#Passengers']
ts.head(5)
plt.plot(ts)
plt.show()
def test_stationarity(timeseries):
# Determing rolling statistics
rolmean = timeseries.rolling(window=12, center=False).mean()
rolstd = timeseries.rolling(window=12, center=False).std()
# Plot rolling statistics:
orig = plt.plot(timeseries, color='blue', label='Original')
mean = plt.plot(rolmean, color='red', label='Rolling Mean')
std = plt.plot(rolstd, color='black', label='Rolling Std')
plt.legend(loc='best')
plt.title('Rolling Mean & Standard Deviation')
plt.show(block=False)
# Perform Dickey-Fuller test:
print('Results of Dickey-Fuller Test:')
dftest = adfuller(timeseries, autolag='AIC')
dfoutput = pd.Series(dftest[0:4], index=[
'Test Statistic', 'p-value', '#Lags Used', 'Number of Observations Used'])
for key, value in dftest[4].items():
dfoutput['Critical Value (%s)' % key] = value
print(dfoutput)
test_stationarity(ts)
ts_log = np.log(ts)
ts_log_diff = ts_log - ts_log.shift()
plt.plot(ts_log_diff)
ts_log_diff.dropna(inplace=True)
test_stationarity(ts_log_diff)
lag_acf = acf(ts_log_diff, nlags=10)
lag_pacf = pacf(ts_log_diff, nlags=10, method='ols')
plt.subplot(121)
plt.plot(lag_acf)
plt.axhline(y=0, linestyle='--', color='gray')
plt.axhline(y=-7.96/np.sqrt(len(ts_log_diff)), linestyle='--', color='gray')
plt.axhline(y=7.96/np.sqrt(len(ts_log_diff)), linestyle='--', color='gray')
plt.title('Autocorrelation Function')
plt.subplot(122)
plt.plot(lag_pacf)
plt.axhline(y=0, linestyle='--', color='gray')
plt.axhline(y=-7.96/np.sqrt(len(ts_log_diff)), linestyle='--', color='gray')
plt.axhline(y=7.96/np.sqrt(len(ts_log_diff)), linestyle='--', color='gray')
plt.title('Partial Autocorrelation Function')
plt.tight_layout()
model = ARIMA(ts_log, order=(2, 1, 1))
results_ARIMA = model.fit(disp=-1)
plt.plot(ts_log_diff)
plt.plot(results_ARIMA.fittedvalues, color='red')
plt.title('RSS: %.4f' % sum((results_ARIMA.fittedvalues-ts_log_diff)**2))
print(results_ARIMA.summary())
# plot residual errors
residuals = DataFrame(results_ARIMA.resid)
residuals.plot(kind='kde')
print(residuals.describe())
predictions_ARIMA_diff = pd.Series(results_ARIMA.fittedvalues, copy=True)
print(predictions_ARIMA_diff.head())
predictions_ARIMA_diff_cumsum = predictions_ARIMA_diff.cumsum()
predictions_ARIMA_log = pd.Series(ts_log.ix[0], index=ts_log.index)
predictions_ARIMA_log = predictions_ARIMA_log.add(
predictions_ARIMA_diff_cumsum, fill_value=0)
predictions_ARIMA = np.exp(predictions_ARIMA_log)
plt.plot(ts)
plt.plot(predictions_ARIMA)
plt.title('RMSE: %.4f' % np.sqrt(sum((predictions_ARIMA-ts)**2)/len(ts)))
size = int(len(ts_log) - 15)
train, test = ts_log[0:size], ts_log[size:len(ts_log)]
history = [x for x in train]
predictions = list()
size = int(len(ts_log) - 15)
train, test = ts_log[0:size], ts_log[size:len(ts_log)]
history = [x for x in train]
predictions = list()
print('Printing Predicted vs Expected Values...')
print('\n')
for t in range(len(test)):
model = ARIMA(history, order=(2, 1, 1))
model_fit = model.fit(disp=0)
output = model_fit.forecast()
yhat = output[0]
predictions.append(float(yhat))
obs = test[t]
history.append(obs)
print('predicted=%f, expected=%f' % (np.exp(yhat), np.exp(obs)))
error = mean_squared_error(test, predictions)
print('\n')
print('Printing Mean Squared Error of Predictions...')
print('Test MSE: %.6f' % error)
predictions_series = pd.Series(predictions, index=test.index)
fig, ax = plt.subplots()
ax.set(title='Spot Exchange Rate, Euro into USD',
xlabel='Date', ylabel='Euro into USD')
ax.plot(ts[-60:], 'o', label='observed')
ax.plot(np.exp(predictions_series), 'g',
label='rolling one-step out-of-sample forecast')
legend = ax.legend(loc='upper left')
legend.get_frame().set_facecolor('w')
| from matplotlib.pylab import rcParams
import requests
import pandas as pd
import numpy as np
from pandas import DataFrame
from io import StringIO
import time
import json
from datetime import date
from statsmodels.tsa.stattools import adfuller, acf, pacf
from statsmodels.tsa.arima_model import ARIMA
from statsmodels.tsa.seasonal import seasonal_decompose
from sklearn.metrics import mean_squared_error
import matplotlib.pylab as plt
get_ipython().run_line_magic('matplotlib', 'inline')
rcParams['figure.figsize'] = 15, 6
data = pd.read_csv("SeaPlaneTravel.csv")
data.head()
data['Month'] = pd.to_datetime(data['Month'])
indexed_df = data.set_index('Month')
ts = indexed_df['#Passengers']
ts.head(5)
plt.plot(ts)
plt.show()
def test_stationarity(timeseries):
# Determing rolling statistics
rolmean = timeseries.rolling(window=12, center=False).mean()
rolstd = timeseries.rolling(window=12, center=False).std()
# Plot rolling statistics:
orig = plt.plot(timeseries, color='blue', label='Original')
mean = plt.plot(rolmean, color='red', label='Rolling Mean')
std = plt.plot(rolstd, color='black', label='Rolling Std')
plt.legend(loc='best')
plt.title('Rolling Mean & Standard Deviation')
plt.show(block=False)
# Perform Dickey-Fuller test:
print('Results of Dickey-Fuller Test:')
dftest = adfuller(timeseries, autolag='AIC')
dfoutput = pd.Series(dftest[0:4], index=[
'Test Statistic', 'p-value', '#Lags Used', 'Number of Observations Used'])
for key, value in dftest[4].items():
dfoutput['Critical Value (%s)' % key] = value
print(dfoutput)
test_stationarity(ts)
ts_log = np.log(ts)
ts_log_diff = ts_log - ts_log.shift()
plt.plot(ts_log_diff)
ts_log_diff.dropna(inplace=True)
test_stationarity(ts_log_diff)
lag_acf = acf(ts_log_diff, nlags=10)
lag_pacf = pacf(ts_log_diff, nlags=10, method='ols')
plt.subplot(121)
plt.plot(lag_acf)
plt.axhline(y=0, linestyle='--', color='gray')
plt.axhline(y=-7.96/np.sqrt(len(ts_log_diff)), linestyle='--', color='gray')
plt.axhline(y=7.96/np.sqrt(len(ts_log_diff)), linestyle='--', color='gray')
plt.title('Autocorrelation Function')
plt.subplot(122)
plt.plot(lag_pacf)
plt.axhline(y=0, linestyle='--', color='gray')
plt.axhline(y=-7.96/np.sqrt(len(ts_log_diff)), linestyle='--', color='gray')
plt.axhline(y=7.96/np.sqrt(len(ts_log_diff)), linestyle='--', color='gray')
plt.title('Partial Autocorrelation Function')
plt.tight_layout()
model = ARIMA(ts_log, order=(2, 1, 1))
results_ARIMA = model.fit(disp=-1)
plt.plot(ts_log_diff)
plt.plot(results_ARIMA.fittedvalues, color='red')
plt.title('RSS: %.4f' % sum((results_ARIMA.fittedvalues-ts_log_diff)**2))
print(results_ARIMA.summary())
# plot residual errors
residuals = DataFrame(results_ARIMA.resid)
residuals.plot(kind='kde')
print(residuals.describe())
predictions_ARIMA_diff = pd.Series(results_ARIMA.fittedvalues, copy=True)
print(predictions_ARIMA_diff.head())
predictions_ARIMA_diff_cumsum = predictions_ARIMA_diff.cumsum()
predictions_ARIMA_log = pd.Series(ts_log.ix[0], index=ts_log.index)
predictions_ARIMA_log = predictions_ARIMA_log.add(
predictions_ARIMA_diff_cumsum, fill_value=0)
predictions_ARIMA = np.exp(predictions_ARIMA_log)
plt.plot(ts)
plt.plot(predictions_ARIMA)
plt.title('RMSE: %.4f' % np.sqrt(sum((predictions_ARIMA-ts)**2)/len(ts)))
size = int(len(ts_log) - 15)
train, test = ts_log[0:size], ts_log[size:len(ts_log)]
history = [x for x in train]
predictions = list()
size = int(len(ts_log) - 15)
train, test = ts_log[0:size], ts_log[size:len(ts_log)]
history = [x for x in train]
predictions = list()
print('Printing Predicted vs Expected Values...')
print('\n')
for t in range(len(test)):
model = ARIMA(history, order=(2, 1, 1))
model_fit = model.fit(disp=0)
output = model_fit.forecast()
yhat = output[0]
predictions.append(float(yhat))
obs = test[t]
history.append(obs)
print('predicted=%f, expected=%f' % (np.exp(yhat), np.exp(obs)))
error = mean_squared_error(test, predictions)
print('\n')
print('Printing Mean Squared Error of Predictions...')
print('Test MSE: %.6f' % error)
predictions_series = pd.Series(predictions, index=test.index)
fig, ax = plt.subplots()
ax.set(title='Spot Exchange Rate, Euro into USD',
xlabel='Date', ylabel='Euro into USD')
ax.plot(ts[-60:], 'o', label='observed')
ax.plot(np.exp(predictions_series), 'g',
label='rolling one-step out-of-sample forecast')
legend = ax.legend(loc='upper left')
legend.get_frame().set_facecolor('w')
| en | 0.700173 | # Determing rolling statistics # Plot rolling statistics: # Perform Dickey-Fuller test: # plot residual errors | 2.633893 | 3 |
scratch/test.py | CUBigDataClass/eth_track | 0 | 6617248 | import requests
import time
apikey = "<KEY>"
startblock=13481774
endblock=startblock
numresults = 10
call = {
"module" : "account",
"action" : "txlistinternal",
"startblock" : f"{startblock}",
"endblock" : f"{endblock}",
"sort" : "asc",
"apikey" : f"{apikey}",
}
print(call)
a = requests.get("https://api.etherscan.io/api", data = call)
#insert try logic
print(a.json())
print(len(a.json()["result"]))
test_url = "http://localhost:5000/api/ethelementfiltered?startblock=14623760&endblock=14623960&numresults=10"
| import requests
import time
apikey = "<KEY>"
startblock=13481774
endblock=startblock
numresults = 10
call = {
"module" : "account",
"action" : "txlistinternal",
"startblock" : f"{startblock}",
"endblock" : f"{endblock}",
"sort" : "asc",
"apikey" : f"{apikey}",
}
print(call)
a = requests.get("https://api.etherscan.io/api", data = call)
#insert try logic
print(a.json())
print(len(a.json()["result"]))
test_url = "http://localhost:5000/api/ethelementfiltered?startblock=14623760&endblock=14623960&numresults=10"
| da | 0.078627 | #insert try logic | 2.461582 | 2 |
FizzBuzz/FizzBuzz.py | bradchesney79/illacceptanything | 1 | 6617249 | def fizz_buzz_wonder(input_func):
user_input = input_func()
if user_input.lower() == 'fizz':
return 'Fizz!'
elif user_input.lower() == 'buzz':
return 'Buzz!'
else:
return 'FizzBuzz!'
def prompt_for_input():
return input('Enter "Fizz" or "Buzz": ').strip()
if __name__ == '__main__':
print(fizz_buzz_wonder(prompt_for_input))
print("Did I get the job?") | def fizz_buzz_wonder(input_func):
user_input = input_func()
if user_input.lower() == 'fizz':
return 'Fizz!'
elif user_input.lower() == 'buzz':
return 'Buzz!'
else:
return 'FizzBuzz!'
def prompt_for_input():
return input('Enter "Fizz" or "Buzz": ').strip()
if __name__ == '__main__':
print(fizz_buzz_wonder(prompt_for_input))
print("Did I get the job?") | none | 1 | 3.85126 | 4 | |
LexAn.py | Logan-Lin/SLRAnalyzer | 6 | 6617250 | <reponame>Logan-Lin/SLRAnalyzer
import re
def split_input_string(input_string):
"""
Split input string into identifiers, symbols and numbers.
:param input_string: str, input string representing an assignment statement,
like 'A=B+C'.
:return: list with each item containing split symbols, identifiers and numbers.
All symbols will remain the same sequence with input string.
"""
if not bool(re.compile("^[a-zA-Z0-9.*/+-=()_' ]+$").search(input_string)):
raise ValueError("Input series contain symbols not in assignment statement.")
return list(map(process_single, filter(len, re.findall(r"[\-+*/=()]|[\w('|_)]+", input_string))))
def process_single(word):
"""
Process a single word, whether it's identifier, number or symbols.
:param word: str, the word to process
:return: str, the input
"""
if word[0].isnumeric():
try:
int(word)
except ValueError:
raise ValueError("Expression {} not valid".format(word))
return word
| import re
def split_input_string(input_string):
"""
Split input string into identifiers, symbols and numbers.
:param input_string: str, input string representing an assignment statement,
like 'A=B+C'.
:return: list with each item containing split symbols, identifiers and numbers.
All symbols will remain the same sequence with input string.
"""
if not bool(re.compile("^[a-zA-Z0-9.*/+-=()_' ]+$").search(input_string)):
raise ValueError("Input series contain symbols not in assignment statement.")
return list(map(process_single, filter(len, re.findall(r"[\-+*/=()]|[\w('|_)]+", input_string))))
def process_single(word):
"""
Process a single word, whether it's identifier, number or symbols.
:param word: str, the word to process
:return: str, the input
"""
if word[0].isnumeric():
try:
int(word)
except ValueError:
raise ValueError("Expression {} not valid".format(word))
return word | en | 0.701445 | Split input string into identifiers, symbols and numbers. :param input_string: str, input string representing an assignment statement, like 'A=B+C'. :return: list with each item containing split symbols, identifiers and numbers. All symbols will remain the same sequence with input string. Process a single word, whether it's identifier, number or symbols. :param word: str, the word to process :return: str, the input | 3.962119 | 4 |