code stringlengths 281 23.7M |
|---|
def extractAnargyatestingHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if (item['tags'] == ['Uncategorized']):
titlemap = [('The Dungeons Demon Lord is The Weakest Ch.', 'The Dungeons Demon Lord is The Weakest', 'translated'), ('The Dungeon Demon Lord is The Weakest Ch.', 'The Dungeons Demon Lord is The Weakest', 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class patch():
def __init__(self, name=None, path=None, preservecase=False):
if ((not (name == None)) and (not (path == None))):
inifile = os.path.join(path, (name + '.ini'))
elif (not (name == None)):
inifile = (name + '.ini')
else:
inifile = None
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--inifile', default=inifile, help='name of the configuration file')
parser.add_argument('--general-broker', default=None, help='general broker')
parser.add_argument('--general-debug', default=None, help='general debug')
parser.add_argument('--general-delay', default=None, help='general delay')
parser.add_argument('--general-logging', default=None, help='general logging')
args = parser.parse_args()
config = configparser.ConfigParser(inline_comment_prefixes=('#', ';'))
if preservecase:
config.optionxform = str
if (('inifile' in args) and (not (args.inifile == None))):
config.read(args.inifile)
args = vars(args)
args = {k: v for (k, v) in args.items() if v}
if ('general_broker' in args):
broker = args['general_broker']
elif config.has_option('general', 'broker'):
broker = config.get('general', 'broker')
elif config.has_section('redis'):
broker = 'redis'
elif config.has_section('zeromq'):
broker = 'zeromq'
else:
broker = 'dummy'
if (broker == 'redis'):
if config.has_option('redis', 'hostname'):
hostname = config.get('redis', 'hostname')
else:
hostname = 'localhost'
if config.has_option('redis', 'port'):
port = config.getint('redis', 'port')
else:
port = 6379
try:
r = redis.StrictRedis(host=hostname, port=port, db=0, charset='utf-8', decode_responses=True)
response = r.client_list()
except redis.ConnectionError:
raise RuntimeError('cannot connect to Redis server')
elif (broker == 'zeromq'):
if config.has_option('zeromq', 'hostname'):
hostname = config.get('zeromq', 'hostname')
else:
hostname = 'localhost'
if config.has_option('zeromq', 'port'):
port = config.getint('zeromq', 'port')
else:
port = 5555
if config.has_option('zeromq', 'timeout'):
timeout = config.getint('zeromq', 'timeout')
else:
timeout = 5000
r = ZmqRedis.client(host=hostname, port=port, timeout=timeout)
if ((not (name == 'redis')) and (not r.connect())):
raise RuntimeError('cannot connect to ZeroMQ server')
elif (broker == 'fake'):
r = DummyRedis.client()
elif (broker == 'dummy'):
r = DummyRedis.client()
else:
raise RuntimeError('unknown broker')
self.args = args
self.config = config
self.redis = r
def pubsub(self):
return self.redis.pubsub()
def publish(self, channel, value):
return self.redis.publish(channel, value)
def get(self, section, item, default=None):
if (((section + '_') + item) in self.args):
return self.args[((section + '_') + item)]
elif self.config.has_option(section, item):
return self.config.get(section, item)
else:
return default
def getfloat(self, section, item, multiple=False, default=None):
if (((section + '_') + item) in self.args):
return float(self.args[((section + '_') + item)])
elif (self.config.has_option(section, item) and (len(self.config.get(section, item)) > 0)):
items = self.config.get(section, item)
if multiple:
if (items.find(',') > (- 1)):
separator = ','
elif (items.find('-') > (- 1)):
separator = '-'
elif (items.find('\t') > (- 1)):
separator = '\t'
else:
separator = ' '
items = squeeze(' ', items)
items = squeeze(separator, items)
items = items.split(separator)
else:
items = [items]
if (multiple and isinstance(default, list)):
val = [float(x) for x in default]
elif (default != None):
val = ([float(default)] * len(items))
else:
val = ([default] * len(items))
for (i, item) in enumerate(items):
try:
val[i] = float(item)
except ValueError:
try:
val[i] = float(self.redis.get(item))
except TypeError:
pass
elif (multiple and isinstance(default, list)):
val = [float(x) for x in default]
elif (multiple and (default == None)):
val = []
elif (multiple and (default != None)):
val = [float(default)]
elif ((not multiple) and (default == None)):
val = default
elif ((not multiple) and (default != None)):
val = float(default)
if (multiple and (not isinstance(val, list))):
return [val]
elif ((not multiple) and isinstance(val, list)):
return val[0]
else:
return val
def getint(self, section, item, multiple=False, default=None):
if (((section + '_') + item) in self.args):
return int(self.args[((section + '_') + item)])
elif (self.config.has_option(section, item) and (len(self.config.get(section, item)) > 0)):
items = self.config.get(section, item)
if multiple:
if (items.find(',') > (- 1)):
separator = ','
elif (items.find('-') > (- 1)):
separator = '-'
elif (items.find('\t') > (- 1)):
separator = '\t'
else:
separator = ' '
items = squeeze(' ', items)
items = squeeze(separator, items)
items = items.split(separator)
else:
items = [items]
if (multiple and isinstance(default, list)):
val = [int(x) for x in default]
elif (default != None):
val = ([int(default)] * len(items))
else:
val = ([default] * len(items))
for (i, item) in enumerate(items):
try:
val[i] = int(item)
except ValueError:
try:
val[i] = int(round(float(self.redis.get(item))))
except TypeError:
pass
elif (multiple and isinstance(default, list)):
val = [int(x) for x in default]
elif (multiple and (default == None)):
val = []
elif (multiple and (default != None)):
val = [int(default)]
elif ((not multiple) and (default == None)):
val = default
elif ((not multiple) and (default != None)):
val = int(default)
if (multiple and (not isinstance(val, list))):
return [val]
elif ((not multiple) and isinstance(val, list)):
return val[0]
else:
return val
def getstring(self, section, item, multiple=False, default=None):
if (((section + '_') + item) in self.args):
return self.args[((section + '_') + item)]
else:
try:
val = self.config.get(section, item)
if self.redis.exists(val):
val = self.redis.get(val)
except:
val = default
if multiple:
if ((val == None) or (len(val) == 0)):
val = []
else:
if (val.find(',') > (- 1)):
separator = ','
elif (val.find('-') > (- 1)):
separator = '-'
elif (val.find('\t') > (- 1)):
separator = '\t'
else:
separator = ' '
val = squeeze(separator, val)
val = val.split(separator)
if (multiple and (not isinstance(val, list))):
return [val]
elif ((not multiple) and isinstance(val, list)):
return val[0]
else:
return val
def hasitem(self, section, item):
if (((section + '_') + item) in self.args):
return True
else:
return self.config.has_option(section, item)
def setvalue(self, item, val, duration=0):
self.redis.set(item, val)
self.redis.publish(item, val)
if (duration > 0):
threading.Timer(duration, self.setvalue, args=[item, 0.0]).start() |
(name='api.vm.status.tasks.vm_status_event_cb', base=InternalTask, ignore_result=True)
def vm_status_event_cb(result, task_id):
vm_uuid = result['zonename']
state_cache = cache.get(Vm.status_key(vm_uuid))
state = Vm.STATUS_DICT[result['state']]
when = result['when']
change_time = datetime.utcfromtimestamp((float(when) / pow(10, (int(log10(when)) - 9)))).replace(tzinfo=utc)
_vm_status_check(task_id, result['node_uuid'], vm_uuid, state, state_cache=state_cache, change_time=change_time) |
class queue_desc_stats_request(stats_request):
version = 5
type = 18
stats_type = 15
def __init__(self, xid=None, flags=None, port_no=None, queue_id=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (port_no != None):
self.port_no = port_no
else:
self.port_no = 0
if (queue_id != None):
self.queue_id = queue_id
else:
self.queue_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack('!L', self.queue_id))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = queue_desc_stats_request()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 18)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 15)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.port_no = util.unpack_port_no(reader)
obj.queue_id = reader.read('!L')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.port_no != other.port_no):
return False
if (self.queue_id != other.queue_id):
return False
return True
def pretty_print(self, q):
q.text('queue_desc_stats_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REQ_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('port_no = ')
q.text(util.pretty_port(self.port_no))
q.text(',')
q.breakable()
q.text('queue_id = ')
q.text(('%#x' % self.queue_id))
q.breakable()
q.text('}') |
class All2Allv_Wait(Function):
def forward(ctx, myreq, *output):
collectiveArgs = myreq.bench.collectiveArgs
backendFuncs = myreq.bench.backendFuncs
myreq.req.wait()
backendFuncs.complete_accel_ops(collectiveArgs)
collectiveArgs.timers['fwd_a2a_end'] = time.monotonic()
a2ai = myreq.a2ai
ctx.a2ai = a2ai
myreq.req = None
myreq.tensor = None
outputs = tuple((out.view([a2ai.lN, (- 1)]) for out in output[0].split(a2ai.emb_split_lengths)))
ctx.myreq = myreq
return outputs
def backward(ctx, *grad_outputs):
myreq = ctx.myreq
backendFuncs = myreq.bench.backendFuncs
collectiveArgs = myreq.bench.collectiveArgs
measured_regions = myreq.bench.measured_regions
commDetails = myreq.bench.commDetails
a2ai = ctx.a2ai
grad_outputs = [gout.contiguous().view([(- 1)]) for gout in grad_outputs]
grad_output = torch.cat(grad_outputs)
grad_input = grad_output.new_empty([(a2ai.N * sum(a2ai.E))])
cur_iter_memory = (grad_input.element_size() * grad_input.nelement())
measured_regions['bwd_a2a']['memory'].append(cur_iter_memory)
commDetails.append({'comms': 'all_to_all', 'msg_size': cur_iter_memory, 'in_split': a2ai.emb_split_lengths, 'out_split': a2ai.mb_split_lengths, 'dtype': str(grad_output.dtype)})
collectiveArgs.opTensor = grad_input
collectiveArgs.ipTensor = grad_output
collectiveArgs.opTensor_split = a2ai.mb_split_lengths
collectiveArgs.ipTensor_split = a2ai.emb_split_lengths
collectiveArgs.asyncOp = True
backendFuncs.sync_barrier(collectiveArgs)
collectiveArgs.timers['bwd_a2a_start'] = time.monotonic()
req = backendFuncs.all_to_allv(collectiveArgs, retFlag=True)
myreq.req = req
myreq.tensor = grad_input
return (None, grad_output) |
def extractOneSecondSpring(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('The Princess Who Cannot Marry' in item['tags']):
return buildReleaseMessageWithType(item, 'The Princess Who Cannot Marry', vol, chp, frag=frag, postfix=postfix)
if ('Heavy Sweetness Ash-like Frost' in item['tags']):
return buildReleaseMessageWithType(item, 'Heavy Sweetness Ash-like Frost', vol, chp, frag=frag, postfix=postfix)
if ('Our Second Master' in item['tags']):
return buildReleaseMessageWithType(item, 'Our Second Master', vol, chp, frag=frag, postfix=postfix)
return False |
def get_dispatch(parsed_list: js_data) -> dict:
reg_graphql = 'e.{method}\\("{queryId}",'.format(method='({0})'.format('|'.join(get_dispatch_list().keys())), queryId='([a-z_/]*?)')
dispatch_list = search_js_reg(parsed_list, reg_graphql)
dispatch_list_unique: list[js_search_data] = []
for i in dispatch_list:
for ii in dispatch_list_unique:
if (''.join(i.data[0]) == ''.join(ii.data[0])):
break
else:
dispatch_list_unique.append(i)
return [{'queryId': d.data[0][1], 'dispatch_key': d.data[0][0]} for d in dispatch_list_unique] |
class EntityData():
unique_id: str
name: str
state: int
attributes: dict
icon: str
device_name: str
status: str
disabled: bool
binary_sensor_device_class: Optional[BinarySensorDeviceClass]
sensor_device_class: Optional[SensorDeviceClass]
sensor_state_class: Optional[SensorStateClass]
def __init__(self):
self.unique_id = ''
self.name = ''
self.state = 0
self.attributes = {}
self.icon = ''
self.device_name = ''
self.status = ENTITY_STATUS_CREATED
self.disabled = False
self.binary_sensor_device_class = None
self.sensor_device_class = None
self.sensor_state_class = None
def __repr__(self):
obj = {ENTITY_NAME: self.name, ENTITY_STATE: self.state, ENTITY_ATTRIBUTES: self.attributes, ENTITY_ICON: self.icon, ENTITY_DEVICE_NAME: self.device_name, ENTITY_STATUS: self.status, ENTITY_UNIQUE_ID: self.unique_id, ENTITY_DISABLED: self.disabled, ENTITY_BINARY_SENSOR_DEVICE_CLASS: self.binary_sensor_device_class, ENTITY_SENSOR_DEVICE_CLASS: self.sensor_device_class, ENTITY_SENSOR_STATE_CLASS: self.sensor_state_class}
to_string = f'{obj}'
return to_string |
class RecordableEnvMixin(ABC):
def get_maze_state(self) -> MazeStateType:
raise NotImplementedError
def get_maze_action(self) -> MazeActionType:
raise NotImplementedError
def get_episode_id(self) -> str:
raise NotImplementedError
def get_renderer(self) -> Renderer:
raise NotImplementedError |
class EpisodeCallbacks():
def __init__(self) -> None:
self._custom_metrics = {}
def custom_metrics(self) -> Dict[(str, Any)]:
return self._custom_metrics
_metrics.setter
def custom_metrics(self, metrics: Dict[(str, Any)]) -> None:
self._custom_metrics = metrics
def reset(self) -> None:
self._custom_metrics.clear()
def on_episode_start(self, index: int) -> None:
pass
def on_episode_init(self, index: int, timestep: TimeStep) -> None:
pass
def on_episode_step(self, index: int, step: int, action: Action, timestep: TimeStep) -> None:
pass
def on_episode_end(self, index: int) -> None:
pass |
def remaining():
w_pawn_count = utils.typeCounter('pawn', 'W')
w_rook_count = utils.typeCounter('rook', 'W')
w_knight_count = utils.typeCounter('knight', 'W')
w_bishop_count = utils.typeCounter('bishop', 'W')
w_queen_count = utils.typeCounter('queen', 'W')
w_king_count = utils.typeCounter('king', 'W')
b_pawn_count = utils.typeCounter('pawn', 'b')
b_rook_count = utils.typeCounter('rook', 'b')
b_knight_count = utils.typeCounter('knight', 'b')
b_bishop_count = utils.typeCounter('bishop', 'b')
b_queen_count = utils.typeCounter('queen', 'b')
b_king_count = utils.typeCounter('king', 'b')
print(' ', end='')
print(' REMAINING:\n ', end='')
for i in range(23):
print('_', end='')
print('\n White: | Black:')
print(" {}P' {}R' | {}p. {}r.".format(w_pawn_count, w_rook_count, b_pawn_count, b_rook_count))
print(" {}N' {}B' | {}n. {}b.".format(w_knight_count, w_bishop_count, b_knight_count, b_bishop_count))
print(" {}Q' {}K' | {}q. {}k.".format(w_queen_count, w_king_count, b_queen_count, b_king_count))
print(' ', end='')
for i in range(23):
print(' ', end='')
print('\n') |
class ChangeEmailActivateView(View):
def get(request, code):
act = get_object_or_404(Activation, code=code)
user = act.user
user.email = act.email
user.save()
act.delete()
messages.success(request, _('You have successfully changed your email!'))
return redirect('accounts:change_email') |
.parametrize(('base_mesh_thunk', 'fs', 'degree'), [((lambda : UnitIntervalMesh(10)), 'CG', 1), ((lambda : UnitIntervalMesh(10)), 'CG', 2), ((lambda : UnitIntervalMesh(10)), 'DG', 1), ((lambda : UnitIntervalMesh(10)), 'DG', 2), ((lambda : UnitSquareMesh(10, 10)), 'CG', 1), ((lambda : UnitSquareMesh(10, 10)), 'CG', 2), ((lambda : UnitSquareMesh(10, 10)), 'DG', 1), ((lambda : UnitSquareMesh(10, 10)), 'DG', 2)])
def test_ufl_vector_element_assembly(base_mesh_thunk, fs, degree):
mesh = ExtrudedMesh(base_mesh_thunk(), 10)
V = VectorFunctionSpace(mesh, fs, degree, dim=3)
assert (V.ufl_element() == VectorElement(fs, mesh.ufl_cell(), degree, dim=3)) |
def get_device_info() -> Tuple[(str, str, str, int, str)]:
(torch_version, device, device_version, device_count, device_other) = (None, 'cpu', None, 0, '')
try:
import torch
torch_version = torch.__version__
if torch.cuda.is_available():
device = 'cuda'
device_version = torch.version.cuda
device_count = torch.cuda.device_count()
elif torch.backends.mps.is_available():
device = 'mps'
except ModuleNotFoundError:
pass
if (not device_version):
device_version = (get_cuda_version_from_nvcc() or get_cuda_version_from_nvidia_smi())
if (device == 'cuda'):
try:
output = subprocess.check_output(['nvidia-smi', '--query-gpu=name,driver_version,memory.total,memory.free,memory.used', '--format=csv'])
device_other = output.decode('utf-8')
except:
pass
return (torch_version, device, device_version, device_count, device_other) |
class TensorFlowShim(Shim):
gradients: Optional[List['tf.Tensor']]
def __init__(self, model: Any, config=None, optimizer: Any=None):
super().__init__(model, config, optimizer)
self.gradients = None
def __str__(self):
lines: List[str] = []
def accumulate(line: str):
lines.append(line)
self._model.summary(print_fn=accumulate)
return '\n'.join(lines)
def __call__(self, X: ArgsKwargs, is_train: bool):
if is_train:
return self.begin_update(X)
else:
return self.predict(X)
def predict(self, X: ArgsKwargs):
old_phase = tf.keras.backend.learning_phase()
tf.keras.backend.set_learning_phase(0)
Y = self._model(*X.args, **X.kwargs)
tf.keras.backend.set_learning_phase(old_phase)
return Y
def begin_update(self, X: ArgsKwargs):
tf.keras.backend.set_learning_phase(1)
tape = tf.GradientTape()
tape.__enter__()
tape.watch(X.args)
output = self._model(*X.args, **X.kwargs)
def backprop(d_output):
tape.__exit__(None, None, None)
if (len(X.args) == 1):
wrt_tensors = [X.args[0]]
else:
wrt_tensors = list(X.args[0])
wrt_tensors.extend(self._model.trainable_variables)
all_gradients = tape.gradient(output, wrt_tensors, output_gradients=d_output)
dX = all_gradients[:len(X.args)]
opt_grads = all_gradients[1:]
if (self.gradients is not None):
assert (len(opt_grads) == len(self.gradients)), 'gradients must match'
variable: tf.Variable
for (variable, new_variable) in zip(self.gradients, opt_grads):
variable.assign_add(new_variable)
else:
self.gradients = [tf.Variable(f) for f in opt_grads]
return ArgsKwargs(args=tuple(dX), kwargs={})
return (output, backprop)
def finish_update(self, optimizer: Optimizer):
if (self.gradients is None):
raise ValueError('There are no gradients for optimization. Be sure to call begin_update before calling finish_update.')
assert (len(self.gradients) == len(self._model.trainable_variables))
grad: tf.Tensor
variable: tf.Variable
params = []
grads = []
shapes = []
for (grad, variable) in zip(self.gradients, self._model.trainable_variables):
param = variable.numpy()
grad = grad.numpy()
shapes.append((param.size, param.shape))
params.append(param.ravel())
grads.append(grad.ravel())
xp = get_array_module(params[0])
(flat_params, flat_grads) = optimizer((self.id, 'tensorflow-shim'), xp.concatenate(params), xp.concatenate(grads))
start = 0
for (grad, variable) in zip(self.gradients, self._model.trainable_variables):
(size, shape) = shapes.pop(0)
param = flat_params[start:(start + size)].reshape(shape)
variable.assign(param)
start += size
self.gradients = None
def _load_weights_from_state_dict(self, state_dict: Optional[Dict[(str, ArrayXd)]]=None):
if (state_dict is None):
state_dict = self._create_state_dict()
for layer in self._model.layers:
current_layer_weights = []
for weight in layer.weights:
current_layer_weights.append(state_dict[weight.name])
layer.set_weights(current_layer_weights)
def _create_state_dict(self):
state_dict = {}
for layer in self._model.layers:
for weight in layer.weights:
state_dict[weight.name] = weight.numpy()
return state_dict
def use_params(self, params):
key_prefix = f'tensorflow_{self.id}_'
state_dict = {}
for (k, v) in params.items():
if (hasattr(k, 'startswith') and k.startswith(key_prefix)):
if (cupy is None):
assert isinstance(v, numpy.ndarray)
else:
if isinstance(v, cupy.core.core.ndarray):
v = cupy.asnumpy(v)
assert isinstance(v, numpy.ndarray)
state_dict[k.replace(key_prefix, '')] = v
if state_dict:
backup = self._create_state_dict()
self._load_weights_from_state_dict(state_dict)
(yield)
self._load_weights_from_state_dict(backup)
else:
(yield)
def _clone_model(self):
model_json_config = self._model.to_json()
tf.keras.backend.clear_session()
self._model = tf.keras.models.model_from_json(model_json_config)
self._load_weights_from_state_dict()
def copy(self):
model_json_config = self._model.to_json()
self._model = None
tf.keras.backend.clear_session()
copied = copy.deepcopy(self)
copied._model = tf.keras.models.model_from_json(model_json_config)
copied._load_weights_from_state_dict()
return copied
def to_device(self, device_type: str, device_id: int):
if (device_type == 'cpu'):
with tf.device('/CPU'):
self._clone_model()
elif (device_type == 'gpu'):
with tf.device('/GPU:{}'.format(device_id)):
self._clone_model()
def to_bytes(self):
filelike = BytesIO()
try:
with h5py.File(filelike, 'w') as f:
self._model.save(f, save_format='h5')
return filelike.getvalue()
except NotImplementedError:
if (not hasattr(self._model, 'catalogue_name')):
raise ValueError("Couldn't serialize to h5, and model has no factory function for component serialization.")
keras_model_fns.get(self._model.catalogue_name)
return (self._model.catalogue_name, self._model.get_weights())
def from_bytes(self, data):
ops: Ops = get_current_ops()
if (ops.device_type == 'cpu'):
device = 'CPU'
else:
device = tf.test.gpu_device_name()
if isinstance(data, (str, bytes)):
tf.keras.backend.clear_session()
filelike = BytesIO(data)
filelike.seek(0)
with h5py.File(filelike, 'r') as f:
with tf.device(device):
self._model = tf.keras.models.load_model(f)
return
(catalogue_name, model_weights) = data
if (self._model is None):
model_fn = keras_model_fns.get(catalogue_name)
tf.keras.backend.clear_session()
with tf.device(device):
if hasattr(self._model, 'eg_args'):
ak: ArgsKwargs = self._model.eg_args
new_model = model_fn(*ak.args, **ak.kwargs)
else:
new_model = model_fn()
self._model_initialized = maybe_handshake_model(new_model)
self._model.set_weights(model_weights) |
class FirewallCommand():
def __init__(self, quiet=False, verbose=False):
self.quiet = quiet
self.verbose = verbose
self.__use_exception_handler = True
self.fw = None
def set_fw(self, fw):
self.fw = fw
def set_quiet(self, flag):
self.quiet = flag
def get_quiet(self):
return self.quiet
def set_verbose(self, flag):
self.verbose = flag
def get_verbose(self):
return self.verbose
def print_msg(self, msg=None):
if ((msg is not None) and (not self.quiet)):
sys.stdout.write((msg + '\n'))
def print_error_msg(self, msg=None):
if ((msg is not None) and (not self.quiet)):
sys.stderr.write((msg + '\n'))
def print_warning(self, msg=None):
FAIL = '\x1b[91m'
END = '\x1b[00m'
if sys.stderr.isatty():
msg = ((FAIL + msg) + END)
self.print_error_msg(msg)
def print_and_exit(self, msg=None, exit_code=0):
if (exit_code > 1):
self.print_warning(msg)
else:
self.print_msg(msg)
sys.exit(exit_code)
def fail(self, msg=None):
self.print_and_exit(msg, 2)
def print_if_verbose(self, msg=None):
if ((msg is not None) and self.verbose):
sys.stdout.write((msg + '\n'))
def __cmd_sequence(self, cmd_type, option, action_method, query_method, parse_method, message, start_args=None, end_args=None, no_exit=False):
if (self.fw is not None):
self.fw.authorizeAll()
items = []
_errors = 0
_error_codes = []
for item in option:
if (parse_method is not None):
try:
item = parse_method(item)
except Exception as msg:
code = FirewallError.get_code(str(msg))
if (len(option) > 1):
self.print_warning(('Warning: %s' % msg))
else:
self.print_and_exit(('Error: %s' % msg), code)
if (code not in _error_codes):
_error_codes.append(code)
_errors += 1
continue
items.append(item)
for item in items:
call_item = []
if (start_args is not None):
call_item += start_args
if ((not isinstance(item, list)) and (not isinstance(item, tuple))):
call_item.append(item)
else:
call_item += item
if (end_args is not None):
call_item += end_args
self.deactivate_exception_handler()
try:
action_method(*call_item)
except (DBusException, Exception) as msg:
if isinstance(msg, DBusException):
self.fail_if_not_authorized(msg.get_dbus_name())
msg = msg.get_dbus_message()
else:
msg = str(msg)
code = FirewallError.get_code(msg)
if (code in [errors.ALREADY_ENABLED, errors.NOT_ENABLED, errors.ZONE_ALREADY_SET, errors.ALREADY_SET]):
code = 0
if (len(option) > 1):
self.print_warning(('Warning: %s' % msg))
elif (code == 0):
self.print_warning(('Warning: %s' % msg))
return
else:
self.print_and_exit(('Error: %s' % msg), code)
if (code not in _error_codes):
_error_codes.append(code)
_errors += 1
self.activate_exception_handler()
if (not no_exit):
if ((len(option) > _errors) or (0 in _error_codes)):
return
elif (len(_error_codes) == 1):
sys.exit(_error_codes[0])
elif (len(_error_codes) > 1):
sys.exit(errors.UNKNOWN_ERROR)
def add_sequence(self, option, action_method, query_method, parse_method, message, no_exit=False):
self.__cmd_sequence('add', option, action_method, query_method, parse_method, message, no_exit=no_exit)
def x_add_sequence(self, x, option, action_method, query_method, parse_method, message, no_exit=False):
self.__cmd_sequence('add', option, action_method, query_method, parse_method, message, start_args=[x], no_exit=no_exit)
def zone_add_timeout_sequence(self, zone, option, action_method, query_method, parse_method, message, timeout, no_exit=False):
self.__cmd_sequence('add', option, action_method, query_method, parse_method, message, start_args=[zone], end_args=[timeout], no_exit=no_exit)
def remove_sequence(self, option, action_method, query_method, parse_method, message, no_exit=False):
self.__cmd_sequence('remove', option, action_method, query_method, parse_method, message, no_exit=no_exit)
def x_remove_sequence(self, x, option, action_method, query_method, parse_method, message, no_exit=False):
self.__cmd_sequence('remove', option, action_method, query_method, parse_method, message, start_args=[x], no_exit=no_exit)
def __query_sequence(self, option, query_method, parse_method, message, start_args=None, no_exit=False):
items = []
for item in option:
if (parse_method is not None):
try:
item = parse_method(item)
except Exception as msg:
if (len(option) > 1):
self.print_warning(('Warning: %s' % msg))
continue
else:
code = FirewallError.get_code(str(msg))
self.print_and_exit(('Error: %s' % msg), code)
items.append(item)
for item in items:
call_item = []
if (start_args is not None):
call_item += start_args
if ((not isinstance(item, list)) and (not isinstance(item, tuple))):
call_item.append(item)
else:
call_item += item
self.deactivate_exception_handler()
try:
res = query_method(*call_item)
except DBusException as msg:
self.fail_if_not_authorized(msg.get_dbus_name())
code = FirewallError.get_code(msg.get_dbus_message())
if (len(option) > 1):
self.print_warning(('Warning: %s' % msg.get_dbus_message()))
continue
else:
self.print_and_exit(('Error: %s' % msg.get_dbus_message()), code)
except Exception as msg:
code = FirewallError.get_code(str(msg))
if (len(option) > 1):
self.print_warning(('Warning: %s' % msg))
else:
self.print_and_exit(('Error: %s' % msg), code)
self.activate_exception_handler()
if (len(option) > 1):
self.print_msg(('%s: %s' % ((message % item), ('no', 'yes')[res])))
else:
self.print_query_result(res)
if (not no_exit):
sys.exit(0)
def query_sequence(self, option, query_method, parse_method, message, no_exit=False):
self.__query_sequence(option, query_method, parse_method, message, no_exit=no_exit)
def x_query_sequence(self, x, option, query_method, parse_method, message, no_exit=False):
self.__query_sequence(option, query_method, parse_method, message, start_args=[x], no_exit=no_exit)
def parse_source(self, value):
if ((not checkIPnMask(value)) and (not checkIP6nMask(value)) and (not check_mac(value)) and (not (value.startswith('ipset:') and (len(value) > 6)))):
raise FirewallError(errors.INVALID_ADDR, ("'%s' is no valid IPv4, IPv6 or MAC address, nor an ipset" % value))
return value
def parse_port(self, value, separator='/'):
try:
(port, proto) = value.split(separator)
except ValueError:
raise FirewallError(errors.INVALID_PORT, ('bad port (most likely missing protocol), correct syntax is portid[-portid]%sprotocol' % separator))
if (not check_port(port)):
raise FirewallError(errors.INVALID_PORT, port)
if (proto not in ['tcp', 'udp', 'sctp', 'dccp']):
raise FirewallError(errors.INVALID_PROTOCOL, ("'%s' not in {'tcp'|'udp'|'sctp'|'dccp'}" % proto))
return (port, proto)
def parse_forward_port(self, value, compat=False):
port = None
protocol = None
toport = None
toaddr = None
i = 0
while ('=' in value[i:]):
opt = value[i:].split('=', 1)[0]
i += (len(opt) + 1)
if ('=' in value[i:]):
val = value[i:].split(':', 1)[0]
else:
val = value[i:]
i += (len(val) + 1)
if (opt == 'port'):
port = val
elif (opt == 'proto'):
protocol = val
elif (opt == 'toport'):
toport = val
elif (opt == 'toaddr'):
toaddr = val
elif ((opt == 'if') and compat):
pass
else:
raise FirewallError(errors.INVALID_FORWARD, ("invalid forward port arg '%s'" % opt))
if (not port):
raise FirewallError(errors.INVALID_FORWARD, 'missing port')
if (not protocol):
raise FirewallError(errors.INVALID_FORWARD, 'missing protocol')
if (not (toport or toaddr)):
raise FirewallError(errors.INVALID_FORWARD, 'missing destination')
if (not check_port(port)):
raise FirewallError(errors.INVALID_PORT, port)
if (protocol not in ['tcp', 'udp', 'sctp', 'dccp']):
raise FirewallError(errors.INVALID_PROTOCOL, ("'%s' not in {'tcp'|'udp'|'sctp'|'dccp'}" % protocol))
if (toport and (not check_port(toport))):
raise FirewallError(errors.INVALID_PORT, toport)
if (toaddr and (not check_single_address('ipv4', toaddr))):
if (compat or (not check_single_address('ipv6', toaddr))):
raise FirewallError(errors.INVALID_ADDR, toaddr)
return (port, protocol, toport, toaddr)
def parse_ipset_option(self, value):
args = value.split('=')
if (len(args) == 1):
return (args[0], '')
elif (len(args) == 2):
return args
else:
raise FirewallError(errors.INVALID_OPTION, ("invalid ipset option '%s'" % value))
def check_destination_ipv(self, value):
ipvs = ['ipv4', 'ipv6']
if (value not in ipvs):
raise FirewallError(errors.INVALID_IPV, ("invalid argument: %s (choose from '%s')" % (value, "', '".join(ipvs))))
return value
def parse_service_destination(self, value):
try:
(ipv, destination) = value.split(':', 1)
except ValueError:
raise FirewallError(errors.INVALID_DESTINATION, 'destination syntax is ipv:address[/mask]')
return (self.check_destination_ipv(ipv), destination)
def check_ipv(self, value):
ipvs = ['ipv4', 'ipv6', 'eb']
if (value not in ipvs):
raise FirewallError(errors.INVALID_IPV, ("invalid argument: %s (choose from '%s')" % (value, "', '".join(ipvs))))
return value
def check_helper_family(self, value):
ipvs = ['', 'ipv4', 'ipv6']
if (value not in ipvs):
raise FirewallError(errors.INVALID_IPV, ("invalid argument: %s (choose from '%s')" % (value, "', '".join(ipvs))))
return value
def check_module(self, value):
if (not value.startswith('nf_conntrack_')):
raise FirewallError(errors.INVALID_MODULE, ("'%s' does not start with 'nf_conntrack_'" % value))
if (len(value.replace('nf_conntrack_', '')) < 1):
raise FirewallError(errors.INVALID_MODULE, ("Module name '%s' too short" % value))
return value
def print_zone_policy_info(self, zone, settings, default_zone=None, extra_interfaces=[], active_zones=[], active_policies=[], isPolicy=True):
target = settings.getTarget()
services = settings.getServices()
ports = settings.getPorts()
protocols = settings.getProtocols()
masquerade = settings.getMasquerade()
forward_ports = settings.getForwardPorts()
source_ports = settings.getSourcePorts()
icmp_blocks = settings.getIcmpBlocks()
rules = settings.getRichRules()
description = settings.getDescription()
short_description = settings.getShort()
if isPolicy:
ingress_zones = settings.getIngressZones()
egress_zones = settings.getEgressZones()
priority = settings.getPriority()
else:
icmp_block_inversion = settings.getIcmpBlockInversion()
interfaces = sorted(set((settings.getInterfaces() + extra_interfaces)))
sources = settings.getSources()
forward = settings.getForward()
ingress_priority = settings.getIngressPriority()
egress_priority = settings.getEgressPriority()
def rich_rule_sorted_key(rule):
priority = 0
search_str = 'priority='
try:
i = rule.index(search_str)
except ValueError:
pass
else:
i += len(search_str)
priority = int(rule[i:(i + rule[i:].index(' '))].replace('"', ''))
return priority
attributes = []
if (default_zone is not None):
if (zone == default_zone):
attributes.append('default')
if ((not isPolicy) and (zone in active_zones)):
attributes.append('active')
if (isPolicy and (zone in active_policies)):
attributes.append('active')
if attributes:
zone = (zone + (' (%s)' % ', '.join(attributes)))
self.print_msg(zone)
if self.verbose:
self.print_msg((' summary: ' + short_description))
self.print_msg((' description: ' + description))
if isPolicy:
self.print_msg((' priority: ' + str(priority)))
self.print_msg((' target: ' + target))
if (not isPolicy):
self.print_msg((' ingress-priority: ' + str(ingress_priority)))
self.print_msg((' egress-priority: ' + str(egress_priority)))
self.print_msg((' icmp-block-inversion: %s' % ('yes' if icmp_block_inversion else 'no')))
if isPolicy:
self.print_msg((' ingress-zones: ' + ' '.join(ingress_zones)))
self.print_msg((' egress-zones: ' + ' '.join(egress_zones)))
else:
self.print_msg((' interfaces: ' + ' '.join(interfaces)))
self.print_msg((' sources: ' + ' '.join(sources)))
self.print_msg((' services: ' + ' '.join(sorted(services))))
self.print_msg((' ports: ' + ' '.join([('%s/%s' % (port[0], port[1])) for port in ports])))
self.print_msg((' protocols: ' + ' '.join(sorted(protocols))))
if (not isPolicy):
self.print_msg((' forward: %s' % ('yes' if forward else 'no')))
self.print_msg((' masquerade: %s' % ('yes' if masquerade else 'no')))
self.print_msg(((' forward-ports: ' + ('\n\t' if forward_ports else '')) + '\n\t'.join([('port=%s:proto=%s:toport=%s:toaddr=%s' % (port, proto, toport, toaddr)) for (port, proto, toport, toaddr) in forward_ports])))
self.print_msg((' source-ports: ' + ' '.join([('%s/%s' % (port[0], port[1])) for port in source_ports])))
self.print_msg((' icmp-blocks: ' + ' '.join(icmp_blocks)))
self.print_msg(((' rich rules: ' + ('\n\t' if rules else '')) + '\n\t'.join(sorted(rules, key=rich_rule_sorted_key))))
def print_zone_info(self, zone, settings, default_zone=None, extra_interfaces=[], active_zones=[]):
self.print_zone_policy_info(zone, settings, default_zone=default_zone, extra_interfaces=extra_interfaces, active_zones=active_zones, isPolicy=False)
def print_policy_info(self, policy, settings, default_zone=None, extra_interfaces=[], active_policies=[]):
self.print_zone_policy_info(policy, settings, default_zone=default_zone, extra_interfaces=extra_interfaces, active_policies=active_policies, isPolicy=True)
def print_service_info(self, service, settings):
ports = settings.getPorts()
protocols = settings.getProtocols()
source_ports = settings.getSourcePorts()
modules = settings.getModules()
description = settings.getDescription()
destinations = settings.getDestinations()
short_description = settings.getShort()
includes = settings.getIncludes()
helpers = settings.getHelpers()
self.print_msg(service)
if self.verbose:
self.print_msg((' summary: ' + short_description))
self.print_msg((' description: ' + description))
self.print_msg((' ports: ' + ' '.join([('%s/%s' % (port[0], port[1])) for port in ports])))
self.print_msg((' protocols: ' + ' '.join(protocols)))
self.print_msg((' source-ports: ' + ' '.join([('%s/%s' % (port[0], port[1])) for port in source_ports])))
self.print_msg((' modules: ' + ' '.join(modules)))
self.print_msg((' destination: ' + ' '.join([('%s:%s' % (k, v)) for (k, v) in destinations.items()])))
self.print_msg((' includes: ' + ' '.join(sorted(includes))))
self.print_msg((' helpers: ' + ' '.join(sorted(helpers))))
def print_icmptype_info(self, icmptype, settings):
destinations = settings.getDestinations()
description = settings.getDescription()
short_description = settings.getShort()
if (len(destinations) == 0):
destinations = ['ipv4', 'ipv6']
self.print_msg(icmptype)
if self.verbose:
self.print_msg((' summary: ' + short_description))
self.print_msg((' description: ' + description))
self.print_msg((' destination: ' + ' '.join(destinations)))
def print_ipset_info(self, ipset, settings):
ipset_type = settings.getType()
options = settings.getOptions()
entries = settings.getEntries()
description = settings.getDescription()
short_description = settings.getShort()
self.print_msg(ipset)
if self.verbose:
self.print_msg((' summary: ' + short_description))
self.print_msg((' description: ' + description))
self.print_msg((' type: ' + ipset_type))
self.print_msg((' options: ' + ' '.join([(('%s=%s' % (k, v)) if v else k) for (k, v) in options.items()])))
self.print_msg((' entries: ' + ' '.join(entries)))
def print_helper_info(self, helper, settings):
ports = settings.getPorts()
module = settings.getModule()
family = settings.getFamily()
description = settings.getDescription()
short_description = settings.getShort()
self.print_msg(helper)
if self.verbose:
self.print_msg((' summary: ' + short_description))
self.print_msg((' description: ' + description))
self.print_msg((' family: ' + family))
self.print_msg((' module: ' + module))
self.print_msg((' ports: ' + ' '.join([('%s/%s' % (port[0], port[1])) for port in ports])))
def print_query_result(self, value):
if value:
self.print_and_exit('yes')
else:
self.print_and_exit('no', 1)
def exception_handler(self, exception_message):
if (not self.__use_exception_handler):
raise
self.fail_if_not_authorized(exception_message)
code = FirewallError.get_code(str(exception_message))
if (code in [errors.ALREADY_ENABLED, errors.NOT_ENABLED, errors.ZONE_ALREADY_SET, errors.ALREADY_SET]):
self.print_warning(('Warning: %s' % exception_message))
else:
self.print_and_exit(('Error: %s' % exception_message), code)
def fail_if_not_authorized(self, exception_message):
if ('NotAuthorizedException' in exception_message):
msg = 'Authorization failed.\n Make sure polkit agent is running or run the application as superuser.'
self.print_and_exit(msg, errors.NOT_AUTHORIZED)
def deactivate_exception_handler(self):
self.__use_exception_handler = False
def activate_exception_handler(self):
self.__use_exception_handler = True
def get_ipset_entries_from_file(self, filename):
entries = []
entries_set = set()
f = open(filename)
for line in f:
if (not line):
break
line = line.strip()
if ((len(line) < 1) or (line[0] in ['#', ';'])):
continue
if (line not in entries_set):
entries.append(line)
entries_set.add(line)
f.close()
return entries |
class TestGetGitLog(unittest.TestCase):
path = os.path.dirname(__file__)
('subprocess.check_output')
def test_doesnt_crash_when_git_command_fails(self, mock_proc):
mock_proc.side_effect = CalledProcessError(999, 'Non-zero return value')
log = ciftify.config.get_git_log(self.path)
assert (not log)
('subprocess.check_output')
def test_adds_file_name_when_given(self, mock_proc):
fname = 'some-file-name'
log = ciftify.config.get_git_log(self.path, fname)
assert (mock_proc.call_count == 1)
git_cmd = mock_proc.call_args_list[0][0][0]
assert ('--follow {}'.format(fname) in git_cmd) |
def cli(f, *, argv=None):
import textwrap
(progname, *args) = (argv or sys.argv)
doc = stringly.util.DocString(f)
serializers = {}
booleans = set()
mandatory = set()
for param in inspect.signature(f).parameters.values():
T = param.annotation
if ((T == param.empty) and (param.default != param.empty)):
T = type(param.default)
if (T == param.empty):
raise Exception(f'cannot determine type for argument {param.name!r}')
try:
s = stringly.serializer.get(T)
except Exception as e:
raise Exception(f'stringly cannot deserialize argument {param.name!r} of type {T}') from e
if (param.kind not in (param.POSITIONAL_OR_KEYWORD, param.KEYWORD_ONLY)):
raise Exception(f'argument {param.name!r} is positional-only')
if ((param.default is param.empty) and (param.name not in doc.defaults)):
mandatory.add(param.name)
if (T == bool):
booleans.add(param.name)
serializers[param.name] = s
usage = [f'USAGE: {progname}']
if doc.presets:
usage.append(f"[{'|'.join(doc.presets)}]")
usage.extend((('{}' if (arg in mandatory) else '[{}]').format(f'{arg}={arg[0].upper()}') for arg in serializers))
usage = '\n'.join(textwrap.wrap(' '.join(usage), subsequent_indent=' '))
if (('-h' in args) or ('--help' in args)):
help = [usage]
if doc.text:
help.append('')
help.append(inspect.cleandoc(doc.text))
if doc.argdocs:
help.append('')
for (k, d) in doc.argdocs.items():
if (k in serializers):
help.append((f'{k} (default: {doc.defaults[k]})' if (k in doc.defaults) else k))
help.extend(textwrap.wrap(doc.argdocs[k], initial_indent=' ', subsequent_indent=' '))
sys.exit('\n'.join(help))
kwargs = doc.defaults
if (args and (args[0] in doc.presets)):
kwargs.update(doc.presets[args.pop(0)])
for arg in args:
(name, sep, value) = arg.partition('=')
kwargs[name] = (value if sep else ('yes' if (name in booleans) else None))
for (name, s) in kwargs.items():
if (name not in serializers):
sys.exit(f'''{usage}
Error: invalid argument {name!r}''')
if (s is None):
sys.exit(f'''{usage}
Error: argument {name!r} requires a value''')
try:
value = serializers[name].loads(s)
except Exception as e:
sys.exit(f'''{usage}
Error: invalid value {s!r} for {name}: {e}''')
kwargs[name] = value
for name in mandatory.difference(kwargs):
sys.exit(f'''{usage}
Error: missing argument {name}''')
return f(**kwargs) |
def translate(text: str, auth_key: str, preserve_formatting: bool=False, target_lang: str='EN-GB') -> str:
translator = deepl.Translator(auth_key)
result = translator.translate_text(text, target_lang=target_lang, preserve_formatting=preserve_formatting)
assert (not isinstance(result, list))
return result.text |
def main(page):
page.title = 'ListTile Examples'
page.add(Card(content=Container(width=500, content=Column([ListTile(title=Text('One-line list tile')), ListTile(title=Text('One-line dense list tile'), dense=True), ListTile(leading=Icon(icons.SETTINGS), title=Text('One-line selected list tile'), selected=True), ListTile(leading=Image(src='/icons/icon-192.png', fit='contain'), title=Text('One-line with leading control')), ListTile(title=Text('One-line with trailing control'), trailing=PopupMenuButton(icon=icons.MORE_VERT, items=[PopupMenuItem(text='Item 1'), PopupMenuItem(text='Item 2')])), ListTile(leading=Icon(icons.ALBUM), title=Text('One-line with leading and trailing controls'), trailing=PopupMenuButton(icon=icons.MORE_VERT, items=[PopupMenuItem(text='Item 1'), PopupMenuItem(text='Item 2')])), ListTile(leading=Icon(icons.SNOOZE), title=Text('Two-line with leading and trailing controls'), subtitle=Text('Here is a second title.'), trailing=PopupMenuButton(icon=icons.MORE_VERT, items=[PopupMenuItem(text='Item 1'), PopupMenuItem(text='Item 2')]))], spacing=0), padding=padding.symmetric(vertical=10)))) |
def test_query_sobj_row_with_blob():
testutil.add_response('login_response_200')
testutil.add_response('api_version_response_200')
testutil.add_response('query_attachments_before_blob_200')
testutil.add_response('query_attachments_blob_200')
client = testutil.get_client()
query_result = client.sobjects(object_type='Attachment', id='00P0Y000000hUviUAE', binary_field='Body').query()
blob_body = ('"%s"' % testutil.mock_responses['query_attachments_blob_200']['body'])
assert (query_result[0] == testutil.mock_responses['query_attachments_before_blob_200']['body'])
assert (query_result[1].status == 200)
assert (query_result[2].status == 200)
assert (query_result[2].response.content.decode('utf-8') == blob_body) |
def get_user_chroots_map(chroots, email_filter):
user_chroot_map = {}
for chroot in chroots:
for admin in coprs_logic.CoprPermissionsLogic.get_admins_for_copr(chroot.copr):
if (email_filter and (admin.mail not in email_filter)):
continue
if (admin not in user_chroot_map):
user_chroot_map[admin] = []
user_chroot_map[admin].append(chroot)
return user_chroot_map |
def convert_rnn_inputs(model: Model, Xp: Padded, is_train: bool):
shim = cast(PyTorchShim, model.shims[0])
size_at_t = Xp.size_at_t
lengths = Xp.lengths
indices = Xp.indices
def convert_from_torch_backward(d_inputs: ArgsKwargs) -> Padded:
dX = torch2xp(d_inputs.args[0])
return Padded(dX, size_at_t, lengths, indices)
output = ArgsKwargs(args=(xp2torch(Xp.data, requires_grad=True, device=shim.device), None), kwargs={})
return (output, convert_from_torch_backward) |
def ui_path(*path, **kwargs):
relto = kwargs.pop('relto', None)
if len(kwargs):
raise ValueError("Only 'relto' is allowed as a keyword argument")
if (relto is None):
return xdg.get_data_path(*path)
else:
return os.path.abspath(os.path.join(os.path.dirname(relto), *path)) |
def chain_without_block_validation(base_db, genesis_state):
klass = MiningChain.configure(__name__='TestChainWithoutBlockValidation', vm_configuration=ConsensusApplier(NoProofConsensus).amend_vm_configuration(((eth_constants.GENESIS_BLOCK_NUMBER, SpuriousDragonVM),)), chain_id=1337)
genesis_params = {'block_number': eth_constants.GENESIS_BLOCK_NUMBER, 'difficulty': eth_constants.GENESIS_DIFFICULTY, 'gas_limit': 3141592, 'parent_hash': eth_constants.GENESIS_PARENT_HASH, 'coinbase': eth_constants.GENESIS_COINBASE, 'nonce': eth_constants.GENESIS_NONCE, 'mix_hash': eth_constants.GENESIS_MIX_HASH, 'extra_data': eth_constants.GENESIS_EXTRA_DATA, 'timestamp': }
chain = klass.from_genesis(base_db, genesis_params, genesis_state)
return chain |
def subscriptgroup_handle(tokens):
internal_assert((0 < len(tokens) <= 3), 'invalid slice args', tokens)
args = []
for arg in tokens:
if (not arg):
arg = 'None'
args.append(arg)
if (len(args) == 1):
return args[0]
else:
return (('_coconut.slice(' + ', '.join(args)) + ')') |
class TestFetchRunnerFunction(unittest.TestCase):
def test_fetch_simple_job_runner(self):
runner = fetch_runner('SimpleJobRunner')
self.assertTrue(isinstance(runner, SimpleJobRunner))
self.assertEqual(runner.nslots, 1)
def test_fetch_simple_job_runner_with_nslots(self):
runner = fetch_runner('SimpleJobRunner(nslots=8)')
self.assertTrue(isinstance(runner, SimpleJobRunner))
self.assertEqual(runner.nslots, 8)
def test_fetch_simple_job_runner_with_join_logs(self):
runner = fetch_runner('SimpleJobRunner(join_logs=False)')
self.assertTrue(isinstance(runner, SimpleJobRunner))
self.assertEqual(runner.nslots, 1)
def test_fetch_ge_job_runner(self):
runner = fetch_runner('GEJobRunner')
self.assertTrue(isinstance(runner, GEJobRunner))
def test_fetch_ge_job_runner_with_extra_args(self):
runner = fetch_runner('GEJobRunner(-j y)')
self.assertTrue(isinstance(runner, GEJobRunner))
self.assertEqual(runner.ge_extra_args, ['-j', 'y'])
def test_fetch_bad_runner_raises_exception(self):
self.assertRaises(Exception, fetch_runner, 'SimpleRunner') |
def as_geojson_stream(dicts, geometry_field='geometry', srid=4326):
to_geojson = GeoJSONConvertor(srid)
header = {'type': 'FeatureCollection', 'crs': {'type': 'name', 'properties': {'name': 'EPSG:{}'.format(srid)}}}
(yield json.dumps(header)[:(- 1)])
(yield ', "features": [')
for (n, dictionary) in enumerate(dicts):
(yield (',\n' if (n > 0) else '\n'))
geometry = dictionary.pop(geometry_field, None)
feature = {'type': 'Feature', 'properties': dictionary}
(yield json.dumps(feature, indent=2)[:(- 2)])
(yield ',\n "geometry": ')
(yield to_geojson(geometry))
(yield '\n}')
(yield '\n]}') |
class Plot2dPane(TraitsTaskPane):
id = 'example.attractors.plot_2d_pane'
name = 'Plot 2D Pane'
active_model = Instance(IPlottable2d)
models = List(IPlottable2d)
plot_type = Property(Str, observe='active_model.plot_type')
title = Property(Str, observe='active_model.name')
x_data = Property(observe='active_model.x_data')
y_data = Property(observe='active_model.y_data')
x_label = Property(Str, observe='active_model.x_label')
y_label = Property(Str, observe='active_model.y_label')
plot = Instance(Plot)
def _plot_default(self):
plot = Plot(ArrayPlotData(x=self.x_data, y=self.y_data))
plot.x_axis.title = self.x_label
plot.y_axis.title = self.y_label
plot.plot(('x', 'y'), type=self.plot_type, name=self.title, marker='pixel', color='blue')
return plot
('x_data,y_data')
def _update_plot_data(self, event):
if (event.name == 'x_data'):
self.plot.data.set_data('x', event.new)
else:
self.plot.data.set_data('y', event.new)
self.plot.invalidate_and_redraw()
('x_label,y_label')
def _update_axis_label(self, event):
if (event.name == 'x_label'):
self.plot.x_axis.title = event.new
else:
self.plot.y_axis.title = event.new
self.plot.invalidate_and_redraw()
('active_model')
def _update_plot_new_model(self, event):
if event.old:
self.plot.delplot(event.old.name)
self.plot.data.set_data('x', event.new.x_data)
self.plot.data.set_data('y', event.new.y_data)
self.plot.plot(('x', 'y'), type=self.plot_type, name=self.title, marker='pixel', color='blue')
self.plot.invalidate_and_redraw()
view = View(HGroup(Label('Model: '), Item('active_model', editor=EnumEditor(name='_enum_map')), show_labels=False), UItem('plot', editor=ComponentEditor()), resizable=True)
_enum_map = Dict(IPlottable2d, Str)
def _get_plot_type(self):
return (self.active_model.plot_type if self.active_model else 'line')
def _get_title(self):
return (self.active_model.name if self.active_model else '')
def _get_x_data(self):
return (self.active_model.x_data if self.active_model else [])
def _get_y_data(self):
return (self.active_model.y_data if self.active_model else [])
def _get_x_label(self):
return (self.active_model.x_label if self.active_model else '')
def _get_y_label(self):
return (self.active_model.y_label if self.active_model else '')
_trait_change('models[]')
def _update_models(self):
if (self.active_model not in self.models):
self.active_model = (self.models[0] if self.models else None)
self._enum_map = dict(((model, model.name) for model in self.models)) |
class PluginManagerTestCase(unittest.TestCase):
def test_get_plugin(self):
simple_plugin = SimplePlugin()
plugin_manager = PluginManager(plugins=[simple_plugin])
plugin = plugin_manager.get_plugin(simple_plugin.id)
self.assertEqual(plugin, simple_plugin)
self.assertEqual(None, plugin_manager.get_plugin('bogus'))
def test_iteration_over_plugins(self):
simple_plugin = SimplePlugin()
bad_plugin = BadPlugin()
plugin_manager = PluginManager(plugins=[simple_plugin, bad_plugin])
plugins = []
for plugin in plugin_manager:
plugins.append(plugin)
self.assertEqual([simple_plugin, bad_plugin], plugins)
def test_start_and_stop(self):
simple_plugin = SimplePlugin()
plugin_manager = PluginManager(plugins=[simple_plugin])
plugin_manager.start()
self.assertEqual(True, simple_plugin.started)
plugin_manager.stop()
self.assertEqual(True, simple_plugin.stopped)
def test_start_and_stop_errors(self):
simple_plugin = SimplePlugin()
bad_plugin = BadPlugin()
plugin_manager = PluginManager(plugins=[simple_plugin, bad_plugin])
with self.assertRaises(ZeroDivisionError):
plugin_manager.start()
with self.assertRaises(ZeroDivisionError):
plugin_manager.stop()
with self.assertRaises(ValueError):
plugin_manager.start_plugin(plugin_id='bogus')
with self.assertRaises(ValueError):
plugin_manager.stop_plugin(plugin_id='bogus')
def test_only_include_plugins_whose_ids_are_in_the_include_list(self):
include = ['foo', 'bar']
with self.assertWarns(DeprecationWarning):
plugin_manager = PluginManager(include=include, plugins=[SimplePlugin(id='foo'), SimplePlugin(id='bar'), SimplePlugin(id='baz')])
expected = ['foo', 'bar']
self._test_start_and_stop(plugin_manager, expected)
def test_only_include_plugins_matching_a_wildcard_in_the_include_list(self):
include = ['b*']
with self.assertWarns(DeprecationWarning):
plugin_manager = PluginManager(include=include, plugins=[SimplePlugin(id='foo'), SimplePlugin(id='bar'), SimplePlugin(id='baz')])
expected = ['bar', 'baz']
self._test_start_and_stop(plugin_manager, expected)
def test_ignore_plugins_whose_ids_are_in_the_exclude_list(self):
exclude = ['foo', 'baz']
with self.assertWarns(DeprecationWarning):
plugin_manager = PluginManager(exclude=exclude, plugins=[SimplePlugin(id='foo'), SimplePlugin(id='bar'), SimplePlugin(id='baz')])
expected = ['bar']
self._test_start_and_stop(plugin_manager, expected)
def test_ignore_plugins_matching_a_wildcard_in_the_exclude_list(self):
exclude = ['b*']
with self.assertWarns(DeprecationWarning):
plugin_manager = PluginManager(exclude=exclude, plugins=[SimplePlugin(id='foo'), SimplePlugin(id='bar'), SimplePlugin(id='baz')])
expected = ['foo']
self._test_start_and_stop(plugin_manager, expected)
def _test_start_and_stop(self, plugin_manager, expected):
self.assertEqual(expected, [plugin.id for plugin in plugin_manager])
plugin_manager.start()
for id in expected:
plugin = plugin_manager.get_plugin(id)
self.assertNotEqual(None, plugin)
self.assertEqual(True, plugin.started)
plugin_manager.stop()
for id in expected:
plugin = plugin_manager.get_plugin(id)
self.assertNotEqual(None, plugin)
self.assertEqual(True, plugin.stopped) |
def validate_name_email(name_email):
if (not ('#' in name_email)):
return False
(name, email) = name_email.lstrip('').split('#', 1)
if (not re.match('^[\\w.-]+$', name)):
return False
if (not re.match('^[-\\w.]+$', email)):
return False
if (not (email.count('') == 1)):
return False
return True |
class AWS(CloudBase):
def __init__(self, aws_access_key_id: Optional[str]=None, aws_secret_access_key: Optional[str]=None, aws_session_token: Optional[str]=None, aws_region: Optional[str]=None) -> None:
aws_access_key_id = (aws_access_key_id or os.environ.get('AWS_ACCESS_KEY_ID'))
aws_secret_access_key = (aws_secret_access_key or os.environ.get('AWS_SECRET_ACCESS_KEY'))
aws_session_token = (aws_session_token or os.environ.get('AWS_SESSION_TOKEN'))
self.aws_region: Optional[str] = (aws_region or os.environ.get('AWS_REGION'))
self.log: logging.Logger = logging.getLogger(__name__)
self.__account_id: Optional[str] = None
try:
self.sts: botocore.client.BaseClient = boto3.client('sts', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token)
self.s3_client: botocore.client.BaseClient = boto3.client('s3', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, region_name=aws_region)
except NoCredentialsError as error:
self.log.error(f'''Error occurred in validating access and secret keys of the aws account.
Please verify if the correct access and secret key of root user are provided.
Access and secret key can be passed using:
1. Passing as variable to class object
2. Placing keys in ~/.aws/config
3. Placing keys in ~/.aws/credentials
4. As environment variables
Please refer to:
Following is the error:
{error}''')
try:
self.log.info('Verifying AWS credentials.')
response = self.sts.get_caller_identity()
self.__account_id = response.get('Account', None)
except NoCredentialsError as error:
self.log.error(f"Couldn't validate the AWS credentials.{error}")
def cloud_type(cls) -> CloudPlatforms:
return CloudPlatforms.AWS
def check_s3_buckets_exists(self, s3_bucket_name: str, bucket_version: bool=True) -> None:
try:
self.log.info(f'Checking if S3 bucket {s3_bucket_name} exists.')
self.s3_client.head_bucket(Bucket=s3_bucket_name)
self.log.info(f'S3 bucket {s3_bucket_name} already exists in the AWS account.')
except ClientError as error:
if (error.response['Error']['Code'] == '404'):
self.log.info(f"S3 bucket {s3_bucket_name} deosn't exists in the AWS account.")
self.create_s3_bucket(s3_bucket_name=s3_bucket_name, bucket_version=bucket_version)
elif (error.response['Error']['Code'] == '403'):
raise AccessDeniedError('Access denied') from error
else:
raise S3BucketCreationError(f"Couldn't create bucket {s3_bucket_name}") from error
def create_s3_bucket(self, s3_bucket_name: str, bucket_version: bool=True) -> None:
bucket_configuration = {'LocationConstraint': self.aws_region}
try:
self.log.info(f'Creating new S3 bucket {s3_bucket_name}')
self.s3_client.create_bucket(Bucket=s3_bucket_name, CreateBucketConfiguration=bucket_configuration)
self.log.info(f'Create S3 bucket {s3_bucket_name} operation was successful.')
except ClientError as error:
error_code = error.response.get('Error', {}).get('Code', None)
raise S3BucketCreationError(f'Failed to create S3 bucket with error code {error_code}') from error
if bucket_version:
self.update_bucket_versioning(s3_bucket_name=s3_bucket_name)
def update_bucket_versioning(self, s3_bucket_name: str, versioning_status: Optional[str]='Enabled') -> None:
versioning_configuration = {'Status': versioning_status}
try:
self.log.info('Creating bucket versioning.')
self.s3_client.put_bucket_versioning(Bucket=s3_bucket_name, VersioningConfiguration=versioning_configuration)
self.log.info(f'Bucket {s3_bucket_name} is enabled with versioning.')
except ClientError as error:
if (error.response['Error']['Code'] == '404'):
raise S3BucketDoesntExist(f"S3 bucket {s3_bucket_name} doesn't exist") from error
elif (error.response['Error']['Code'] == '403'):
raise AccessDeniedError('Access denied') from error
else:
raise S3BucketVersioningFailedError(f'Error in versioning S3 bucket {s3_bucket_name}') from error
def delete_s3_bucket(self, s3_bucket_name: str) -> None:
try:
self.log.info(f'Deleting S3 bucket {s3_bucket_name}')
self.s3_client.delete_bucket(Bucket=s3_bucket_name)
self.log.info(f'Delete S3 bucket {s3_bucket_name} operation was successful.')
except ClientError as error:
raise S3BucketDeleteError(f'Error in deleting bucket {s3_bucket_name}') from error
def check_s3_object_exists(self, s3_bucket_name: str, key_name: str, account_id: Optional[str]='') -> bool:
account_id = (account_id or self.__account_id)
try:
self.log.info(f'Checking for file {key_name} in bucket {s3_bucket_name}')
self.s3_client.head_object(Bucket=s3_bucket_name, Key=key_name, ExpectedBucketOwner=account_id)
self.log.info(f'File {key_name} exists.')
return True
except ClientError as error:
if (error.response['Error']['Code'] == '404'):
self.log.error(f"Couldn't find file {key_name} in bucket {s3_bucket_name}")
elif (error.response['Error']['Code'] == '403'):
self.log.error(f'Access denied: failed to access bucket {s3_bucket_name}')
else:
self.log.error(f'Failed to find file {key_name} in bucket {s3_bucket_name}')
self.log.info(f"File {key_name} doesn't exist.")
return False |
def main():
build_dir = 'gateware'
platform = arty.Platform(variant='a7-35', toolchain='vivado')
from litex.build.generic_platform import Pins, IOStandard
platform.add_extension([('do', 0, Pins('B7'), IOStandard('LVCMOS33'))])
if ('load' in sys.argv[1:]):
prog = platform.create_programmer()
prog.load_bitstream((build_dir + '/top.bit'))
exit()
if ('sim' in sys.argv[1:]):
ring = RingSerialCtrl(4, .0)
run_simulation(ring, test(), clocks={'sys': (.0 / .0)}, vcd_name='sim.vcd')
exit()
design = Tuto(platform)
platform.build(design, build_dir=build_dir) |
def test_receiver_contract(accounts, tester):
tx = tester.doNothing({'from': accounts[0]})
assert (type(tx.receiver) is EthAddress)
assert (tester == tx.receiver)
data = tester.revertStrings.encode_input(5)
tx = accounts[0].transfer(tester.address, 0, data=data)
assert (type(tx.receiver) is EthAddress)
assert (tester == tx.receiver) |
class HopperLevelRef():
async def get(cls, device_id):
max_hopper_cups = 20.5
tbsp_per_cup = 16
device_results = (await KronosDevices.get(device_hid=device_id))
device = device_results[0]
latest_ref_query = hopper_level_references.select().order_by(desc(hopper_level_references.c.timestamp)).where((hopper_level_references.c.device_hid == device_id))
latest_ref = (await db.fetch_one(latest_ref_query))
if (not latest_ref):
raise HTTPException(404, detail=f'Hopper level not set for {device_id}')
logger.debug('Hopper level last set to %d on %d', latest_ref.level, latest_ref.timestamp)
dispensed_grams_query = select([func.sum(feeding_event.c.grams_expected)]).select_from(feeding_event).where((feeding_event.c.start_time >= latest_ref.timestamp)).where((feeding_event.c.device_hid == device_id))
dispensed_grams = (await db.fetch_val(dispensed_grams_query))
if (not dispensed_grams):
dispensed_grams = 0
logger.debug('%d grams of food have been dispensed since %d', dispensed_grams, latest_ref.timestamp)
recipe_query = recipes.select().where((recipes.c.id == device.currentRecipe))
recipe = (await db.fetch_one(recipe_query))
if (not recipe):
raise HTTPException(400, detail='No recipe set for device, cannot calculate hopper level!')
dispensed_cups = ((dispensed_grams / recipe.g_per_tbsp) / tbsp_per_cup)
logger.debug('Using recipeId (%d), at %d g/tbsp, that is %f cups', recipe.id, recipe.g_per_tbsp, dispensed_cups)
ref_level_cups = ((latest_ref.level / 100) * max_hopper_cups)
current_cups = (ref_level_cups - dispensed_cups)
logger.debug('%f cups minus %f cups equals %f cups remaining', ref_level_cups, dispensed_cups, current_cups)
return ((current_cups / max_hopper_cups) * 100)
async def set(cls, *, device_id: str, level: int):
target_device = (await KronosDevices.get(device_hid=device_id))
query = hopper_level_references.insert().values(device_hid=target_device[0].hid, timestamp=get_current_timestamp(), level=level)
return (await db.execute(query)) |
def find_notifiers(notifier_name):
try:
module = importlib.import_module('google.cloud.forseti.notifier.notifiers.{0}'.format(notifier_name))
for filename in dir(module):
obj = getattr(module, filename)
if (inspect.isclass(obj) and issubclass(obj, BaseNotification) and (obj is not BaseNotification)):
return obj
except ImportError:
LOGGER.exception("Can't import notifier %s", notifier_name) |
class MainHandler(RequestHandler):
def _guess_mime_type(self, fname):
guess = mimetypes.guess_type(fname)[0]
if guess:
self.set_header('Content-Type', guess)
def get(self, full_path):
logger.debug(('Incoming request at %s' % full_path))
parts = [p for p in full_path.split('/') if p][1:]
if (not parts):
self.write(('Root url for flexx, missing selector:' + 'assets, assetview, data, info or cmd'))
return
selector = parts[0]
path = '/'.join(parts[1:])
if (selector in ('assets', 'assetview', 'data')):
self._get_asset(selector, path)
elif (selector == 'info'):
self._get_info(selector, path)
elif (selector == 'cmd'):
self._get_cmd(selector, path)
else:
self.write(('Invalid url path "%s".' % full_path))
def _get_asset(self, selector, path):
(session_id, _, filename) = path.partition('/')
session_id = ('' if (session_id == 'shared') else session_id)
asset_provider = assets
if (session_id and (selector != 'data')):
self.write(('Only supports shared assets, not ' % filename))
elif session_id:
asset_provider = manager.get_session_by_id(session_id)
if (asset_provider is None):
self.write(('Invalid session %r' % session_id))
if (not filename):
self.write(('Root dir for %s/%s' % (selector, path)))
if (selector == 'assets'):
if (('.js:' in filename) or ('.css:' in filename) or (filename[0] == ':')):
(fname, where) = filename.split(':')[:2]
return self.redirect(('/flexx/assetview/%s/%s#L%s' % ((session_id or 'shared'), fname.replace('/:', ':'), where)))
try:
res = asset_provider.get_asset(filename)
except KeyError:
self.write(('Could not load asset %r' % filename))
else:
self._guess_mime_type(filename)
self.write(res.to_string())
elif (selector == 'assetview'):
try:
res = asset_provider.get_asset(filename)
except KeyError:
self.write(('Could not load asset %r' % filename))
else:
res = res.to_string()
style = 'pre {display:block; width: 100%; padding:0; margin:0;} a {text-decoration: none; color: #000; background: #ddd;} :target {background:#ada;} '
lines = [('<html><head><style>%s</style></head><body>' % style)]
for (i, line) in enumerate(res.splitlines()):
table = {ord('&'): '&', ord('<'): '<', ord('>'): '>'}
line = line.translate(table).replace('\t', ' ')
lines.append(('<pre id="L%i"><a href="#L%i">%s</a> %s</pre>' % ((i + 1), (i + 1), str((i + 1)).rjust(4).replace(' ', ' '), line)))
lines.append('</body></html>')
self.write('\n'.join(lines))
elif (selector == 'data'):
res = asset_provider.get_data(filename)
if (res is None):
return self.send_error(404)
else:
self._guess_mime_type(filename)
self.write(res)
else:
raise RuntimeError(('Invalid asset type %r' % selector))
def _get_info(self, selector, info):
runtime = (time.time() - IMPORT_TIME)
napps = len(manager.get_app_names())
nsessions = sum([len(manager.get_connections(x)) for x in manager.get_app_names()])
info = []
info.append(('Runtime: %1.1f s' % runtime))
info.append(('Number of apps: %i' % napps))
info.append(('Number of sessions: %i' % nsessions))
info = '\n'.join([('<li>%s</li>' % i) for i in info])
self.write((('<ul>' + info) + '</ul>'))
def _get_cmd(self, selector, path):
if (not self.request.host.startswith('localhost:')):
self.write('403')
return
if (not path):
self.write('No command given')
elif (path == 'info'):
info = dict(address=self.application._flexx_serving, app_names=manager.get_app_names(), nsessions=sum([len(manager.get_connections(x)) for x in manager.get_app_names()]))
self.write(json.dumps(info))
elif (path == 'stop'):
asyncio.get_event_loop().stop()
self.write('Stopping event loop.')
else:
self.write(('unknown command %r' % path)) |
class SelectAttr(bh_plugin.BracketPluginCommand):
def run(self, edit, name, direction='right'):
if (self.left.size() <= 1):
return
tag_settings = sublime.load_settings('bh_tag.sublime-settings')
tag_mode = tags.get_tag_mode(self.view, tag_settings.get('tag_mode', []))
tag_name = tag_settings.get('tag_name')[tag_mode]
attr_name = tag_settings.get('attributes')[tag_mode]
tname = self.view.find(tag_name, self.left.begin)
current_region = self.selection[0]
current_pt = self.selection[0].b
region = self.view.find(attr_name, tname.b)
selection = self.selection
if (direction == 'left'):
last = None
if ((region is not None) and (current_pt <= region.b) and (region.b < self.left.end)):
last = region
while ((region is not None) and (region.b < self.left.end)):
if ((current_pt > region.b) or ((current_pt <= region.b) and (current_region.a >= region.a) and (not ((region.a == current_region.a) and (region.b == current_region.b))))):
selection = [region]
last = None
elif (last is not None):
last = region
region = self.view.find(attr_name, region.b)
if (last is not None):
selection = [last]
else:
first = None
if ((region is not None) and (region.b < self.left.end)):
first = region
while ((region is not None) and (region.b < self.left.end)):
if ((current_pt < region.b) or ((current_pt <= region.b) and (current_region.a >= region.a) and (not ((region.a == current_region.a) and (region.b == current_region.b))))):
selection = [region]
first = None
break
region = self.view.find(attr_name, region.b)
if (first is not None):
selection = [first]
self.selection = selection |
def validate(self, method=None):
settings = frappe.get_cached_doc(SETTINGS_DOCTYPE)
if (not settings.is_enabled()):
return
sales_order = self.get('locations')[0].sales_order
unicommerce_order_code = frappe.db.get_value('Sales Order', sales_order, 'unicommerce_order_code')
if unicommerce_order_code:
if self.get('locations'):
for pl in self.get('locations'):
if (pl.picked_qty and (float(pl.picked_qty) > 0)):
if (pl.picked_qty > pl.qty):
pl.picked_qty = pl.qty
frappe.throw(_('Row {0} Picked Qty cannot be more than Sales Order Qty').format(pl.idx))
if ((pl.picked_qty == 0) and (pl.docstatus == 1)):
frappe.throw(_('You have not picked {0} in row {1} . Pick the item to proceed!').format(pl.item_code, pl.idx))
item_so_list = [d.sales_order for d in self.get('locations')]
unique_so_list = []
for i in item_so_list:
if (i not in unique_so_list):
unique_so_list.append(i)
so_list = [d.sales_order for d in self.get('order_details')]
for so in unique_so_list:
if (so not in so_list):
pl_so_child = self.append('order_details', {})
pl_so_child.sales_order = so
total_item_count = 0
fully_picked_item_count = 0
partial_picked_item_count = 0
for item in self.get('locations'):
if (item.sales_order == so):
total_item_count = (total_item_count + 1)
if (item.picked_qty == item.qty):
fully_picked_item_count = (fully_picked_item_count + 1)
elif (int(item.picked_qty) > 0):
partial_picked_item_count = (partial_picked_item_count + 1)
if (fully_picked_item_count == total_item_count):
for x in self.get('order_details'):
if (x.sales_order == so):
x.pick_status = 'Fully Picked'
elif ((fully_picked_item_count == 0) and (partial_picked_item_count == 0)):
for x in self.get('order_details'):
if (x.sales_order == so):
x.pick_status = ''
elif (int(partial_picked_item_count) > 0):
for x in self.get('order_details'):
if (x.sales_order == so):
x.pick_status = 'Partially Picked' |
class TestDeleteFailsWhenDirectoryIsNotAnAEAProject():
def setup_class(cls):
cls.runner = CliRunner()
cls.agent_name = 'myagent'
cls.cwd = os.getcwd()
cls.t = tempfile.mkdtemp()
os.chdir(cls.t)
Path(cls.t, cls.agent_name).mkdir()
cls.result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'delete', cls.agent_name], standalone_mode=False)
def test_exit_code_equal_to_1(self):
assert (self.result.exit_code == 1)
def test_log_error_message(self):
s = 'The name provided is not a path to an AEA project.'
assert (self.result.exception.message == s)
def teardown_class(cls):
os.chdir(cls.cwd)
try:
shutil.rmtree(cls.t)
except (OSError, IOError):
pass |
class DollyV2Generator(DefaultGenerator):
def __init__(self, tokenizer: Tokenizer, causal_lm: GPTNeoXCausalLM):
eos_id = tokenizer.tokenizer.token_to_id(END_KEY)
super().__init__(tokenizer, causal_lm, default_config=SampleGeneratorConfig(eos_id=eos_id, max_generated_pieces=256, top_p=0.92))
def preprocess_prompts(self, prompts: List[str]) -> List[InputChunks]:
return [InputChunks([TextChunk(INTRO_BLURB), SpecialPieceChunk(INSTRUCTION_KEY, before='\n\n', after='\n'), TextChunk(prompt), SpecialPieceChunk(RESPONSE_KEY, before='\n\n', after='\n')]) for prompt in prompts] |
class BeatBarWidget(QWidget):
def __init__(self, parent):
super().__init__(parent)
self.setMinimumSize(100, 12)
self.beat = 0
def setBeat(self, beat):
if (beat != self.beat):
self.beat = beat
self.update()
def paintEvent(self, e):
painter = QPainter()
painter.begin(self)
painter.setBrush(Qt.SolidPattern)
painter.setPen(Qt.yellow)
box_gap = 6
box_width = (((self.size().width() - 1) - (3 * box_gap)) // 4)
box_height = (self.size().height() - 1)
for x in range(0, 4):
draw_x = (x * (box_width + box_gap))
painter.drawRect(draw_x, 0, box_width, box_height)
if (x == (self.beat - 1)):
painter.fillRect(draw_x, 0, box_width, box_height, Qt.yellow)
painter.end() |
def test_intrinsics():
string = write_rpc_request(1, 'initialize', {'rootPath': str((test_dir / 'hover'))})
file_path = ((test_dir / 'hover') / 'functions.f90')
string += hover_req(file_path, 39, 23)
(errcode, results) = run_request(string, fortls_args=['-n', '1'])
assert (errcode == 0)
with open(((test_dir.parent.parent / 'fortls') / 'intrinsic.procedures.markdown.json')) as f:
intrinsics = json.load(f)
ref_results = [('\n-----\n' + intrinsics['SIZE'])]
validate_hover(results, ref_results) |
class Markup(object):
def __init__(self, name):
self.__plugin_manager = extension.ExtensionManager(namespace='rmtoo.output.markup', invoke_on_load=False)
self.__impl = self.__plugin_manager[name].plugin()
def replace(self, raw_input_str):
return self.__impl.replace(raw_input_str)
def replace_par(self, raw_input_str):
return self.__impl.replace_par(raw_input_str) |
def analyze_alignment_file_querysorted(bam, options):
alignment_it = bam_iterator(bam)
sv_signatures = []
translocation_signatures_all_bnds = []
read_nr = 0
while True:
try:
alignment_iterator_object = next(alignment_it)
(primary_aln, suppl_aln, sec_aln) = alignment_iterator_object
if ((len(primary_aln) != 1) or primary_aln[0].is_unmapped or (primary_aln[0].mapping_quality < options.min_mapq)):
continue
read_nr += 1
if ((read_nr % 10000) == 0):
logging.info('Processed read {0}'.format(read_nr))
good_suppl_alns = [aln for aln in suppl_aln if ((not aln.is_unmapped) and (aln.mapping_quality >= options.min_mapq))]
(sigs, trans_sigs) = analyze_alignment_indel(primary_aln[0], bam, primary_aln[0].query_name, options)
sv_signatures.extend(sigs)
translocation_signatures_all_bnds.extend(trans_sigs)
for alignment in good_suppl_alns:
(sigs, trans_sigs) = analyze_alignment_indel(alignment, bam, alignment.query_name, options)
sv_signatures.extend(sigs)
translocation_signatures_all_bnds.extend(trans_sigs)
(sigs, trans_sigs) = analyze_read_segments(primary_aln[0], good_suppl_alns, bam, options)
sv_signatures.extend(sigs)
translocation_signatures_all_bnds.extend(trans_sigs)
except StopIteration:
break
except KeyboardInterrupt:
logging.warning('Execution interrupted by user. Stop detection and continue with next step..')
break
return (sv_signatures, translocation_signatures_all_bnds) |
.parametrize('calc_cls, kwargs_', [pytest.param(PySCF, {'basis': '321g'}, marks=using('pyscf')), pytest.param(Gaussian16, {'route': 'HF/3-21G'}, marks=using('gaussian16')), pytest.param(Turbomole, {'control_path': './hf_abstr_control_path', 'pal': 1}, marks=using('turbomole'))])
def test_hf_abstraction_dvv(calc_cls, kwargs_, this_dir):
geom = geom_loader('lib:hfabstraction_hf321g_displ_forward.xyz')
calc_kwargs = {'pal': 2}
calc_kwargs.update(kwargs_)
if ('control_path' in calc_kwargs):
calc_kwargs['control_path'] = (this_dir / calc_kwargs['control_path'])
print('Using', calc_cls)
calc = calc_cls(**calc_kwargs)
geom.set_calculator(calc)
irc_kwargs = {'dt0': 0.5, 'v0': 0.04, 'downhill': True, 'max_cycles': 150}
dvv = DampedVelocityVerlet(geom, **irc_kwargs)
dvv.run()
c3d = (geom.coords3d * BOHR2ANG)
def bond(i, j):
return np.linalg.norm((c3d[i] - c3d[j]))
assert (bond(2, 7) == pytest.approx(0.93, abs=0.01))
assert (bond(4, 7) == pytest.approx(2.42, abs=0.01))
assert (bond(2, 0) == pytest.approx(2.23, abs=0.01)) |
.parametrize('endpoint, expected_name', [pytest.param(func_homepage, 'func_homepage', id='function'), pytest.param(Endpoint().my_method, 'my_method', id='method'), pytest.param(Endpoint.my_classmethod, 'my_classmethod', id='classmethod'), pytest.param(Endpoint.my_staticmethod, 'my_staticmethod', id='staticmethod'), pytest.param(Endpoint(), 'Endpoint', id='object'), pytest.param((lambda request: ...), '<lambda>', id='lambda')])
def test_route_name(endpoint: typing.Callable[(..., typing.Any)], expected_name: str):
assert (Route(path='/', endpoint=endpoint).name == expected_name) |
.parametrize(argnames=('resource_dict', 'expected_resource_name'), argvalues=(({'cpu': '2'}, _ResourceName.CPU), ({'mem': '1Gi'}, _ResourceName.MEMORY), ({'gpu': '1'}, _ResourceName.GPU), ({'storage': '100Mb'}, _ResourceName.STORAGE), ({'ephemeral_storage': '123Mb'}, _ResourceName.EPHEMERAL_STORAGE)), ids=('CPU', 'MEMORY', 'GPU', 'STORAGE', 'EPHEMERAL_STORAGE'))
def test_convert_requests(resource_dict: Dict[(str, str)], expected_resource_name: _task_models.Resources):
assert (len(resource_dict) == 1)
expected_resource_value = list(resource_dict.values())[0]
requests = Resources(**resource_dict)
resources_model = convert_resources_to_resource_model(requests=requests)
assert (len(resources_model.requests) == 1)
request = resources_model.requests[0]
assert isinstance(request, _task_models.Resources.ResourceEntry)
assert (request.name == expected_resource_name)
assert (request.value == expected_resource_value)
assert (len(resources_model.limits) == 0) |
class DateStartStopExpand(StartStopExpand):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def parse_config(self):
super().parse_config()
assert isinstance(self.start, datetime.date), (type(self.start), self.start)
assert isinstance(self.end, datetime.date), (type(self.end), self.end)
self.step = datetime.timedelta(days=self.step)
if (isinstance(self.group_by, int) and (self.group_by > 0)):
self.grouper_key = GroupByDays(self.group_by)
else:
self.grouper_key = {0: (lambda dt: 0), 'monthly': (lambda dt: (dt.year, dt.month)), 'daily': (lambda dt: (dt.year, dt.month, dt.day)), 'MMDD': (lambda dt: (dt.month, dt.day))}[self.group_by]
def format(self, x):
return x.isoformat() |
def test_adding_a_secret_mount():
config = '\ndeployment:\n enabled: true\ndaemonset:\n secretMounts:\n - name: elastic-certificates\n secretName: elastic-certificates-name\n path: /usr/share/filebeat/config/certs\n'
r = helm_template(config)
assert ({'mountPath': '/usr/share/filebeat/config/certs', 'name': 'elastic-certificates'} in r['daemonset'][name]['spec']['template']['spec']['containers'][0]['volumeMounts'])
assert ({'name': 'elastic-certificates', 'secret': {'secretName': 'elastic-certificates-name'}} in r['daemonset'][name]['spec']['template']['spec']['volumes'])
assert ({'mountPath': '/usr/share/filebeat/config/certs', 'name': 'elastic-certificates'} not in r['deployment'][name]['spec']['template']['spec']['containers'][0]['volumeMounts'])
assert ({'name': 'elastic-certificates', 'secret': {'secretName': 'elastic-certificates-name'}} not in r['deployment'][name]['spec']['template']['spec']['volumes'])
config = '\ndeployment:\n enabled: true\n secretMounts:\n - name: elastic-certificates\n secretName: elastic-certificates-name\n path: /usr/share/filebeat/config/certs\n'
r = helm_template(config)
assert ({'mountPath': '/usr/share/filebeat/config/certs', 'name': 'elastic-certificates'} in r['deployment'][name]['spec']['template']['spec']['containers'][0]['volumeMounts'])
assert ({'name': 'elastic-certificates', 'secret': {'secretName': 'elastic-certificates-name'}} in r['deployment'][name]['spec']['template']['spec']['volumes'])
assert ({'mountPath': '/usr/share/filebeat/config/certs', 'name': 'elastic-certificates'} not in r['daemonset'][name]['spec']['template']['spec']['containers'][0]['volumeMounts'])
assert ({'name': 'elastic-certificates', 'secret': {'secretName': 'elastic-certificates-name'}} not in r['daemonset'][name]['spec']['template']['spec']['volumes']) |
class FaucetUntaggedControllerNfvTest(FaucetUntaggedTest):
def test_untagged(self):
last_host = self.hosts_name_ordered()[(- 1)]
switch = self.first_switch()
last_host_switch_link = switch.connectionsTo(last_host)[0]
last_host_switch_intf = [intf for intf in last_host_switch_link if (intf in switch.intfList())][0]
super().test_untagged()
ifconfig_output = self.net.controllers[0].cmd(('ifconfig %s' % last_host_switch_intf))
self.assertTrue(re.search('(R|T)X packets[: ][1-9]', ifconfig_output), msg=ifconfig_output) |
class TestNormalizeText():
def test_should_replace_dash_with_hyphen(self):
assert (normalize_text('') == '-')
def test_should_replace_accent_with_quote(self):
assert (normalize_text('') == "'")
def test_should_normalize_multiple_spaces_to_one(self):
assert (normalize_text('a b') == 'a b')
def test_should_preserve_single_line_feed(self):
assert (normalize_text('a\nb') == 'a\nb')
def test_should_remove_space_around_line_feed(self):
assert (normalize_text('a \n b') == 'a\nb') |
(MESSAGING_SECRETS, status_code=HTTP_200_OK, dependencies=[Security(verify_oauth_client, scopes=[MESSAGING_CREATE_OR_UPDATE])], response_model=TestMessagingStatusMessage)
def put_config_secrets(config_key: FidesKey, *, db: Session=Depends(deps.get_db), unvalidated_messaging_secrets: possible_messaging_secrets) -> TestMessagingStatusMessage:
logger.info("Finding messaging config with key '{}'", config_key)
messaging_config = MessagingConfig.get_by(db=db, field='key', value=config_key)
if (not messaging_config):
raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail=f'No messaging configuration with key {config_key}.')
return update_config_secrets(db, messaging_config, unvalidated_messaging_secrets) |
class TestCLIOpenClosed(CuratorTestCase):
def test_open_closed(self):
(idx1, idx2) = ('dummy', 'my_index')
self.create_index(idx1)
self.create_index(idx2)
self.client.indices.close(index=idx2, ignore_unavailable=True)
args = self.get_runner_args()
args += ['--config', self.args['configfile'], 'open', '--filter_list', '{"filtertype":"pattern","kind":"prefix","value":"my"}']
assert (0 == self.run_subprocess(args, logname='TestCLIOpenClosed.test_open_closed'))
csi = self.client.cluster.state(metric=MET)[MET]['indices']
for idx in (idx1, idx2):
assert ('close' != csi[idx]['state']) |
def test_mode_basis_io():
grid = make_pupil_grid(128)
mode_bases = [make_zernike_basis(20, 1, grid, 1), make_xinetics_influence_functions(grid, 8, (1 / 8))]
formats = ['asdf', 'fits', 'fits.gz', 'pkl', 'pickle']
filenames = [('mode_basis_test.' + fmt) for fmt in formats]
for mode_basis in mode_bases:
for fname in filenames:
write_mode_basis(mode_basis, fname)
new_mode_basis = read_mode_basis(fname)
assert (hash(new_mode_basis.grid) == hash(mode_basis.grid))
assert (new_mode_basis.is_sparse == mode_basis.is_sparse)
for i in range(mode_basis.num_modes):
assert np.allclose(mode_basis[i], new_mode_basis[i])
os.remove(fname) |
def test_generate_gpu_will_overwrite_previous_gpu_version(create_test_data, store_local_session, create_pymel, create_maya_env):
data = create_test_data
gen = RepresentationGenerator()
gen.version = data['building1_yapi_model_main_v003']
gen.generate_gpu()
r = Representation(version=data['building1_yapi_model_main_v003'])
v1 = r.find('GPU')
assert (v1 is not None)
gen.generate_gpu()
v2 = r.find('GPU')
assert (v2 is not None)
assert (v1 == v2) |
def is_url_whitelisted(url):
found = False
if ((not found) and (url in whitelist['urlExact'])):
found = True
if (not found):
for regex in whitelist['regexDomainsInURLs']:
if re.search(regex, url):
found = True
if (not found):
for regex in whitelist['urlRegex']:
if re.search(regex, url):
found = True
return found |
.usefixtures('migrate_db')
class TriggerTestCase(common.BaseTestCase):
def setUp(self):
super().setUp()
self.longMessage = True
self.maxDiff = None
self.request_context = rest.app.test_request_context()
self.request_context.push()
self.connection = rest.db.engine.connect()
def _response(self, qry):
response = self.app.get(qry)
self.assertEqual(response.status_code, 200)
result = json.loads(codecs.decode(response.data))
self.assertNotEqual(result, [], 'Empty response!')
self.assertEqual(result['api_version'], __API_VERSION__)
return result
def _results(self, qry):
response = self._response(qry)
return response['results']
def test_schedule_b_exclude(self):
connection = db.engine.connect()
names = {'Test.com': ['Test.com', 'Test com', 'Test .com', 'Test'], "Steven O'Reilly": ["Steven O'Reilly", "Steven O' Reilly", 'Steven O Reilly', "O'Reilly"]}
i = 0
for key in names:
for n in names[key]:
i += 1
data = {'recipient_nm': n, 'sub_id': ( + i), 'filing_form': 'F3'}
insert = (('INSERT INTO disclosure.fec_fitem_sched_b ' + '(recipient_nm, sub_id, filing_form) ') + ' VALUES (%(recipient_nm)s, %(sub_id)s, %(filing_form)s)')
connection.execute(insert, data)
manage.refresh_materialized(concurrent=False)
select = ((('SELECT * from disclosure.fec_fitem_sched_b ' + "WHERE recipient_name_text to_tsquery('") + parse_fulltext(key)) + "');")
results = connection.execute(select).fetchall()
recipient_nm_list = [name[2] for name in results]
self.assertEqual((set(names[key]) - set(recipient_nm_list)), {names[key][(- 1)]})
connection.close()
def test_schedule_a_contributor_name_text(self):
connection = db.engine.connect()
names = {'Test.com': ['Test.com', 'Test com', 'Test .com', 'Test'], "Steven O'Reilly": ["Steven O'Reilly", "Steven O' Reilly", 'Steven O Reilly', "O'Reilly"]}
i = 0
for key in names:
for n in names[key]:
i += 1
data = {'contbr_nm': n, 'sub_id': ( + i), 'filing_form': 'F3'}
insert = (('INSERT INTO disclosure.fec_fitem_sched_a ' + '(contbr_nm, sub_id, filing_form) ') + ' VALUES (%(contbr_nm)s, %(sub_id)s, %(filing_form)s)')
connection.execute(insert, data)
manage.refresh_materialized(concurrent=False)
select = ((('SELECT * from disclosure.fec_fitem_sched_a ' + "WHERE contributor_name_text to_tsquery('") + parse_fulltext(key)) + "');")
results = connection.execute(select).fetchall()
contbr_nm_list = [name[3] for name in results]
self.assertEqual((set(names[key]) - set(contbr_nm_list)), {names[key][(- 1)]})
connection.close()
def test_schedule_a_contributor_employer_text(self):
connection = db.engine.connect()
names = {'Test.com': ['Test.com', 'Test com', 'Test .com', 'Test'], "Steven O'Reilly": ["Steven O'Reilly", "Steven O' Reilly", 'Steven O Reilly', "O'Reilly"]}
i = 0
for key in names:
for n in names[key]:
i += 1
data = {'contbr_employer': n, 'sub_id': ( + i), 'filing_form': 'F3'}
insert = (('INSERT INTO disclosure.fec_fitem_sched_a ' + '(contbr_employer, sub_id, filing_form) ') + ' VALUES (%(contbr_employer)s, %(sub_id)s, %(filing_form)s)')
connection.execute(insert, data)
manage.refresh_materialized(concurrent=False)
select = ((('SELECT * from disclosure.fec_fitem_sched_a ' + "WHERE contributor_employer_text to_tsquery('") + parse_fulltext(key)) + "');")
results = connection.execute(select).fetchall()
contbr_employer_list = [name[16] for name in results]
self.assertEqual((set(names[key]) - set(contbr_employer_list)), {names[key][(- 1)]})
connection.close()
def test_schedule_a_contributor_occupation_text(self):
connection = db.engine.connect()
names = {'Test.com': ['Test.com', 'Test com', 'Test .com', 'Test'], "Steven O'Reilly": ["Steven O'Reilly", "Steven O' Reilly", 'Steven O Reilly', "O'Reilly"]}
i = 0
for key in names:
for n in names[key]:
i += 1
data = {'contbr_occupation': n, 'sub_id': ( + i), 'filing_form': 'F3'}
insert = (('INSERT INTO disclosure.fec_fitem_sched_a ' + '(contbr_occupation, sub_id, filing_form) ') + ' VALUES (%(contbr_occupation)s, %(sub_id)s, %(filing_form)s)')
connection.execute(insert, data)
manage.refresh_materialized(concurrent=False)
select = ((('SELECT * from disclosure.fec_fitem_sched_a ' + "WHERE contributor_occupation_text to_tsquery('") + parse_fulltext(key)) + "');")
results = connection.execute(select).fetchall()
contbr_occupation_list = [name[17] for name in results]
self.assertEqual((set(names[key]) - set(contbr_occupation_list)), {names[key][(- 1)]})
connection.close()
def test_schedule_c_loan_source_name_text(self):
connection = db.engine.connect()
name = "O'Reilly"
names_good = {'O Reilly', "O'Reilly", 'O.Reilly', 'O-Reilly'}
names_bad = {'O', "O'Hare", 'Reilly'}
i = 0
for n in names_good.union(names_bad):
i += 1
data = {'loan_src_nm': n, 'sub_id': ( + i), 'filing_form': 'F3'}
insert = (('INSERT INTO disclosure.fec_fitem_sched_c ' + '(loan_src_nm, sub_id, filing_form) ') + ' VALUES (%(loan_src_nm)s, %(sub_id)s, %(filing_form)s)')
connection.execute(insert, data)
manage.refresh_materialized(concurrent=False)
select = ((('SELECT * from disclosure.fec_fitem_sched_c ' + "WHERE loan_source_name_text to_tsquery('") + parse_fulltext(name)) + "');")
results = connection.execute(select).fetchall()
loan_src_nm_list = {na[7] for na in results}
assert names_good.issubset(loan_src_nm_list)
assert names_bad.isdisjoint(loan_src_nm_list)
connection.close()
def test_schedule_c_candidate_name_text(self):
connection = db.engine.connect()
name = "O'Reilly"
names_good = {'O Reilly', "O'Reilly", 'O.Reilly', 'O-Reilly'}
names_bad = {'O', "O'Hare", 'Reilly'}
i = 0
for n in names_good.union(names_bad):
i += 1
data = {'cand_nm': n, 'sub_id': ( + i), 'filing_form': 'F3'}
insert = (('INSERT INTO disclosure.fec_fitem_sched_c ' + '(cand_nm, sub_id, filing_form) ') + ' VALUES (%(cand_nm)s, %(sub_id)s, %(filing_form)s)')
connection.execute(insert, data)
manage.refresh_materialized(concurrent=False)
select = ((('SELECT * from disclosure.fec_fitem_sched_c ' + "WHERE candidate_name_text to_tsquery('") + parse_fulltext(name)) + "');")
results = connection.execute(select).fetchall()
cand_nm_list = {na[32] for na in results}
assert names_good.issubset(cand_nm_list)
assert names_bad.isdisjoint(cand_nm_list)
connection.close()
def test_schedule_d_creditor_debtor_name_text(self):
connection = db.engine.connect()
name = "O'Reilly"
names_good = {'O Reilly', "O'Reilly", 'O.Reilly', 'O-Reilly'}
names_bad = {'O', "O'Hare", 'Reilly'}
i = 0
for n in names_good.union(names_bad):
i += 1
data = {'cred_dbtr_nm': n, 'sub_id': ( + i), 'filing_form': 'F3'}
insert = (('INSERT INTO disclosure.fec_fitem_sched_d ' + '(cred_dbtr_nm, sub_id, filing_form) ') + ' VALUES (%(cred_dbtr_nm)s, %(sub_id)s, %(filing_form)s)')
connection.execute(insert, data)
manage.refresh_materialized(concurrent=False)
select = ((('SELECT * from disclosure.fec_fitem_sched_d ' + "WHERE creditor_debtor_name_text to_tsquery('") + parse_fulltext(name)) + "');")
results = connection.execute(select).fetchall()
cred_dbtr_nm_list = {na[3] for na in results}
assert names_good.issubset(cred_dbtr_nm_list)
assert names_bad.isdisjoint(cred_dbtr_nm_list)
connection.close()
def test_schedule_f_payee_name_text(self):
connection = db.engine.connect()
name = "O'Reilly"
names_good = {'O Reilly', "O'Reilly", 'O.Reilly', 'O-Reilly'}
names_bad = {'O', "O'Hare", 'Reilly'}
i = 0
for n in names_good.union(names_bad):
i += 1
data = {'pye_nm': n, 'sub_id': ( + i), 'filing_form': 'F3'}
insert = (('INSERT INTO disclosure.fec_fitem_sched_f ' + '(pye_nm, sub_id, filing_form) ') + ' VALUES (%(pye_nm)s, %(sub_id)s, %(filing_form)s)')
connection.execute(insert, data)
manage.refresh_materialized(concurrent=False)
select = ((('SELECT * from disclosure.fec_fitem_sched_f ' + "WHERE payee_name_text to_tsquery('") + parse_fulltext(name)) + "');")
results = connection.execute(select).fetchall()
pye_nm_list = {na[14] for na in results}
assert names_good.issubset(pye_nm_list)
assert names_bad.isdisjoint(pye_nm_list)
connection.close()
def test_schedule_f_payee_name_text_accent(self):
connection = db.engine.connect()
names = {'ACCENTED NAME', 'ACCENTED NAME'}
i = 0
for n in names:
i += 1
data = {'pye_nm': n, 'sub_id': ( + i), 'filing_form': 'F3'}
insert = (('INSERT INTO disclosure.fec_fitem_sched_f ' + '(pye_nm, sub_id, filing_form) ') + ' VALUES (%(pye_nm)s, %(sub_id)s, %(filing_form)s)')
connection.execute(insert, data)
manage.refresh_materialized(concurrent=False)
select = ((('SELECT * from disclosure.fec_fitem_sched_f ' + "WHERE payee_name_text to_tsquery('") + parse_fulltext('ACCENTED NAME')) + "');")
results = connection.execute(select).fetchall()
pye_nm_list = {na[14] for na in results}
assert names.issubset(pye_nm_list)
select = ((('SELECT * from disclosure.fec_fitem_sched_f ' + "WHERE payee_name_text to_tsquery('") + parse_fulltext('ACCENTED NAME')) + "');")
results = connection.execute(select).fetchall()
pye_nm_list = {na[14] for na in results}
assert names.issubset(pye_nm_list)
connection.close()
def test_accent_insensitive_sched_a(self):
connection = db.engine.connect()
names = {'Test.com', 'Test com', 'Test .com', 'test.com', 'TEST.COM', 'Test.com'}
i = 0
for n in names:
i += 1
data = {'contbr_employer': n, 'sub_id': ( + i), 'filing_form': 'F3', 'two_year_transaction_period': 2020}
insert = (('INSERT INTO disclosure.fec_fitem_sched_a ' + '(contbr_employer, sub_id, filing_form, two_year_transaction_period) ') + ' VALUES (%(contbr_employer)s, %(sub_id)s, %(filing_form)s, %(two_year_transaction_period)s)')
connection.execute(insert, data)
manage.refresh_materialized(concurrent=False)
results = self._results(api.url_for(ScheduleAView, contributor_employer='Test.com'))
contbr_employer_list = {r['contributor_employer'] for r in results}
assert names.issubset(contbr_employer_list)
results = self._results(api.url_for(ScheduleAView, contributor_employer='Test.com'))
contbr_employer_list = {r['contributor_employer'] for r in results}
assert names.issubset(contbr_employer_list)
connection.close()
def test_accent_insensitive_sched_b(self):
connection = db.engine.connect()
names = {'est-lou', 'Est lou', 'est lou', 'EST LOU', 'est lou', ' lou---'}
i = 0
for n in names:
i += 1
data = {'recipient_nm': n, 'sub_id': ( + i), 'filing_form': 'F3', 'two_year_transaction_period': 2020}
insert = (('INSERT INTO disclosure.fec_fitem_sched_b ' + '(recipient_nm, sub_id, filing_form, two_year_transaction_period) ') + ' VALUES (%(recipient_nm)s, %(sub_id)s, %(filing_form)s, %(two_year_transaction_period)s)')
connection.execute(insert, data)
manage.refresh_materialized(concurrent=False)
results = self._results(api.url_for(ScheduleBView, recipient_name='est-lou'))
contbr_employer_list = {r['recipient_name'] for r in results}
assert names.issubset(contbr_employer_list)
results = self._results(api.url_for(ScheduleBView, recipient_name='est lou'))
contbr_employer_list = {r['recipient_name'] for r in results}
assert names.issubset(contbr_employer_list)
connection.close()
def real_efile_sa7(self):
connection = db.engine.connect()
data = {'repid': {, , }, 'tran_id': {'4', '5', '6'}, 'fname': {'Oscar', 'The', 'Mr.'}, 'mname': {'The', '', ''}, 'name': {'Grouch', 'Count', 'Rogers'}, 'indemp': {'The Street', 'The Street', 'The Neighborhood'}, 'indocc': {'Lead Grouch', 'Vampire/Educator', 'Neighbor'}}
insert = (('INSERT INTO real_efile_sa7 ' + '(repid, tran_id, fname, mname, name, indemp, indocc) ') + 'VALUES (%(repid)s, %(tran_id)s, %(fname)s, %(mname)s, %(name)s, %(indemp)s, %(indocc)s)')
connection.executemany(insert, data)
manage.refresh_materialized(concurrent=False)
results = self._results(api.url_for(ScheduleAEfileView, contributor_employer='Neighborhood'))
employer_set = {r['contributor_employer'] for r in results}
assert {'The Neighborhood'}.issubset(employer_set)
name_set = {r['contributor_name'] for r in results}
assert {'Mr.'}.issubset(name_set)
occupation_set = {r['contributor_occupation'] for r in results}
assert {'Educator'}.issubset(occupation_set)
connection.close() |
def build_dummy_maze_environment() -> DummyEnvironment:
observation_conversion = ObservationConversion()
return DummyEnvironment(core_env=DummyCoreEnvironment(observation_conversion.space()), action_conversion=[DictDiscreteActionConversion()], observation_conversion=[observation_conversion]) |
class OptionSeriesVennSonificationContexttracksMappingTremolo(Options):
def depth(self) -> 'OptionSeriesVennSonificationContexttracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesVennSonificationContexttracksMappingTremoloDepth)
def speed(self) -> 'OptionSeriesVennSonificationContexttracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesVennSonificationContexttracksMappingTremoloSpeed) |
('bing')
_options
('-k', '--api-key', type=click.STRING, required=True, envvar='BING_API_KEY', help='Bing Maps API key')
def bing(ctx, database, table, location, delay, latitude, longitude, geojson, spatialite, raw, api_key):
click.echo('Using Bing geocoder')
fill_context(ctx, database, table, location, delay, latitude, longitude, geojson, spatialite, raw)
return geocoders.Bing(api_key=api_key) |
class ResourceDailyAvailabilitiesBetweenTestCase(TestCase):
def setUp(self):
self.resource = ResourceFactory()
self.start = date(2016, 1, 10)
self.end = date(2016, 1, 14)
self.booker = UserFactory()
def capacity_on(self, date, quantity):
return CapacityChange.objects.create(resource=self.resource, start_date=date, quantity=quantity)
def use_on(self, arrive, depart):
return Use.objects.create(resource=self.resource, arrive=arrive, depart=depart, status='confirmed', user=self.booker)
def use_on_other_resource(self, arrive, depart):
resource = ResourceFactory(location=self.resource.location)
return Use.objects.create(resource=resource, arrive=arrive, depart=depart, status='confirmed', user=self.booker)
def test_it_returns_zero_quantities_for_each_date_if_resource_has_no_availabilities(self):
result = self.resource.daily_availabilities_within(self.start, self.end)
self.assertEqual(result, [(date(2016, 1, 10), 0), (date(2016, 1, 11), 0), (date(2016, 1, 12), 0), (date(2016, 1, 13), 0), (date(2016, 1, 14), 0)])
def test_it_returns_quantity_for_a_preceeding_capacity(self):
self.capacity_on(date(2015, 1, 1), 2)
result = self.resource.daily_availabilities_within(self.start, self.end)
self.assertEqual(result, [(date(2016, 1, 10), 2), (date(2016, 1, 11), 2), (date(2016, 1, 12), 2), (date(2016, 1, 13), 2), (date(2016, 1, 14), 2)])
def test_it_returns_quantity_availabilities_during(self):
self.capacity_on(date(2016, 1, 12), 3)
self.capacity_on(date(2016, 1, 14), 2)
self.capacity_on(date(2016, 1, 15), 6)
result = self.resource.daily_availabilities_within(self.start, self.end)
self.assertEqual(result, [(date(2016, 1, 10), 0), (date(2016, 1, 11), 0), (date(2016, 1, 12), 3), (date(2016, 1, 13), 3), (date(2016, 1, 14), 2)])
def test_it_returns_subtracts_confirmed_uses_from_simple_capacity(self):
self.capacity_on(date(2016, 1, 8), 10)
self.use_on(date(2016, 1, 8), date(2016, 1, 10))
self.use_on(date(2016, 1, 9), date(2016, 1, 11))
self.use_on(date(2016, 1, 12), date(2016, 1, 20))
self.use_on(date(2016, 1, 13), date(2016, 1, 14))
self.use_on(date(2016, 1, 15), date(2016, 1, 16))
result = self.resource.daily_availabilities_within(self.start, self.end)
self.assertEqual(result, [(date(2016, 1, 10), 9), (date(2016, 1, 11), 10), (date(2016, 1, 12), 9), (date(2016, 1, 13), 8), (date(2016, 1, 14), 9)])
def test_it_returns_subtracts_confirmed_bookings(self):
self.capacity_on(date(2016, 1, 12), 3)
self.capacity_on(date(2016, 1, 14), 2)
self.capacity_on(date(2016, 1, 15), 6)
self.use_on(date(2016, 1, 8), date(2016, 1, 10))
self.use_on(date(2016, 1, 9), date(2016, 1, 11))
self.use_on(date(2016, 1, 12), date(2016, 1, 20))
self.use_on(date(2016, 1, 13), date(2016, 1, 14))
self.use_on(date(2016, 1, 15), date(2016, 1, 16))
result = self.resource.daily_availabilities_within(self.start, self.end)
self.assertEqual(result, [(date(2016, 1, 10), (- 1)), (date(2016, 1, 11), 0), (date(2016, 1, 12), 2), (date(2016, 1, 13), 1), (date(2016, 1, 14), 1)])
def test_it_doesnt_subtract_bookings_for_another_resource(self):
self.capacity_on(date(2016, 1, 8), 10)
self.use_on_other_resource(date(2016, 1, 11), date(2016, 1, 14))
result = self.resource.daily_availabilities_within(self.start, self.end)
self.assertEqual(result, [(date(2016, 1, 10), 10), (date(2016, 1, 11), 10), (date(2016, 1, 12), 10), (date(2016, 1, 13), 10), (date(2016, 1, 14), 10)]) |
class TraitFunction(TraitHandler):
def __init__(self, aFunc):
if (not isinstance(aFunc, CallableTypes)):
raise TraitError('Argument must be callable.')
self.aFunc = aFunc
self.fast_validate = (ValidateTrait.function, aFunc)
def validate(self, object, name, value):
try:
return self.aFunc(object, name, value)
except TraitError:
self.error(object, name, value)
def info(self):
try:
return self.aFunc.info
except:
if self.aFunc.__doc__:
return self.aFunc.__doc__
return 'a legal value' |
def test_registry_key_parsing():
s = 'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows HKLM\\Software\\Microsoft\\Windows HKCC\\Software\\Microsoft\\Windows'
iocs = find_iocs(s)
assert (sorted(['HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows', 'HKLM\\Software\\Microsoft\\Windows', 'HKCC\\Software\\Microsoft\\Windows']) == sorted(iocs['registry_key_paths'])) |
def npfromfile(fname: (((str | pathlib.Path) | io.BytesIO) | io.StringIO), dtype: npt.DTypeLike=np.float32, count: int=1, offset: int=0, mmap: bool=False) -> np.ndarray:
try:
if (mmap and (not isinstance(fname, (io.BytesIO, io.StringIO)))):
vals = np.memmap(fname, dtype=dtype, shape=(count,), mode='r', offset=offset)
else:
vals = np.fromfile(fname, dtype=dtype, count=count, offset=offset)
except TypeError as err:
if ("'offset' is an invalid" not in str(err)):
raise
if (not isinstance(fname, (str, pathlib.Path))):
raise
with open(fname, 'rb') as buffer:
buffer.seek(offset)
vals = np.fromfile(buffer, dtype=dtype, count=count)
return vals |
class LakeGetter(BaseAction):
def __init__(self, model_id, config_json):
BaseAction.__init__(self, model_id=model_id, config_json=config_json, credentials_json=None)
self.model_dest = self._model_path(model_id)
self.dvc_fetcher = DVCFetcher(self.model_dest)
def get(self):
self.dvc_fetcher.get_data() |
class TestsOklab(util.ColorAssertsPyTest):
COLORS = [('red', 'color(--oklab 0.62796 0.22486 0.12585)'), ('orange', 'color(--oklab 0.79269 0.05661 0.16138)'), ('yellow', 'color(--oklab 0.96798 -0.07137 0.19857)'), ('green', 'color(--oklab 0.51975 -0.1403 0.10768)'), ('blue', 'color(--oklab 0.45201 -0.03246 -0.31153)'), ('indigo', 'color(--oklab 0.33898 0.09416 -0.15255)'), ('violet', 'color(--oklab 0.7619 0.15647 -0.1008)'), ('white', 'color(--oklab 1 0 0)'), ('gray', 'color(--oklab 0.59987 0 0)'), ('black', 'color(--oklab 0 0 0)'), ('oklab(1 0.1 -0.1)', 'color(--oklab 1 0.1 -0.1)'), ('oklab(1 0.1 -0.1 / 0.5)', 'color(--oklab 1 0.1 -0.1 / 0.5)'), ('oklab(50% 0.2 -0.2)', 'color(--oklab 0.5 0.2 -0.2)'), ('oklab(50% 50% -50% / 50%)', 'color(--oklab 0.5 0.2 -0.2 / 0.5)'), ('oklab(none none none / none)', 'color(--oklab none none none / none)'), ('oklab(1, 0.1, -0.1)', None), ('color(--oklab 1 0.1 -0.1)', 'color(--oklab 1 0.1 -0.1)'), ('color(--oklab 1 0.1 -0.1 / 0.5)', 'color(--oklab 1 0.1 -0.1 / 0.5)'), ('color(--oklab 50% 50% -50% / 50%)', 'color(--oklab 0.5 0.2 -0.2 / 0.5)'), ('color(--oklab none none none / none)', 'color(--oklab none none none / none)'), ('color(--oklab 0% 0% 0%)', 'color(--oklab 0 0 0)'), ('color(--oklab 100% 100% 100%)', 'color(--oklab 1 0.4 0.4)'), ('color(--oklab -100% -100% -100%)', 'color(--oklab -1 -0.4 -0.4)')]
.parametrize('color1,color2', COLORS)
def test_colors(self, color1, color2):
if (color2 is None):
with pytest.raises(ValueError):
Color(color1)
else:
self.assertColorEqual(Color(color1).convert('oklab'), Color(color2), color=True) |
class OptionSeriesDumbbellSonificationTracksMapping(Options):
def frequency(self) -> 'OptionSeriesDumbbellSonificationTracksMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesDumbbellSonificationTracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesDumbbellSonificationTracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesDumbbellSonificationTracksMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesDumbbellSonificationTracksMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesDumbbellSonificationTracksMappingHighpass)
def lowpass(self) -> 'OptionSeriesDumbbellSonificationTracksMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesDumbbellSonificationTracksMappingLowpass)
def noteDuration(self) -> 'OptionSeriesDumbbellSonificationTracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesDumbbellSonificationTracksMappingNoteduration)
def pan(self) -> 'OptionSeriesDumbbellSonificationTracksMappingPan':
return self._config_sub_data('pan', OptionSeriesDumbbellSonificationTracksMappingPan)
def pitch(self) -> 'OptionSeriesDumbbellSonificationTracksMappingPitch':
return self._config_sub_data('pitch', OptionSeriesDumbbellSonificationTracksMappingPitch)
def playDelay(self) -> 'OptionSeriesDumbbellSonificationTracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesDumbbellSonificationTracksMappingPlaydelay)
def rate(self) -> 'OptionSeriesDumbbellSonificationTracksMappingRate':
return self._config_sub_data('rate', OptionSeriesDumbbellSonificationTracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesDumbbellSonificationTracksMappingTime':
return self._config_sub_data('time', OptionSeriesDumbbellSonificationTracksMappingTime)
def tremolo(self) -> 'OptionSeriesDumbbellSonificationTracksMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesDumbbellSonificationTracksMappingTremolo)
def volume(self) -> 'OptionSeriesDumbbellSonificationTracksMappingVolume':
return self._config_sub_data('volume', OptionSeriesDumbbellSonificationTracksMappingVolume) |
def start_command():
parser = argparse.ArgumentParser(description='EmbedChain DiscordBot command line interface')
parser.add_argument('--include-question', help='include question in query reply, otherwise it is hidden behind the slash command.', action='store_true')
global args
args = parser.parse_args()
global discord_bot
discord_bot = DiscordBot()
discord_bot.start() |
def quartzPathToString(path):
elements = quartzPathElements(path)
output = []
for element in elements:
(elem_type, elem_points) = element
path_points = ' '.join([f'{p.x} {p.y}' for p in elem_points])
output.append(f'{elem_type} {path_points}')
return ' '.join(output) |
class UserFollowGroupDetail(ResourceDetail):
def before_get_object(self, view_kwargs):
if view_kwargs.get('user_id'):
user = safe_query_kwargs(User, view_kwargs, 'user_id')
view_kwargs['id'] = user.id
def after_get_object(self, follower, view_kwargs):
if (not follower):
raise NotFoundError({'source': ''}, 'Group Not Found')
def before_delete_object(self, follower, view_kwargs):
if (not follower):
raise NotFoundError({'source': ''}, 'Group Follower Not Found')
if (current_user.id != follower.user_id):
raise ForbiddenError({'source': ''}, 'User have no permission to delete follower')
view_kwargs = True
methods = ['GET', 'DELETE']
decorators = (jwt_required,)
schema = UserFollowGroupSchema
data_layer = {'session': db.session, 'model': UserFollowGroup, 'methods': {'after_get_object': after_get_object, 'before_delete_object': before_delete_object}} |
def create_test_gitlab(monkeypatch, includes=None, excludes=None, in_file=None, root_group=None):
gl = gitlab_tree.GitlabTree(URL, TOKEN, 'ssh', 'name', includes=includes, excludes=excludes, in_file=in_file, root_group=root_group)
nodes = []
projects = Listable(append_node(nodes, PROJECT_NAME, PROJECT_URL))
subgroup_node = append_node(nodes, SUBGROUP_NAME, SUBGROUP_URL, projects=projects)
subgroups = Listable(subgroup_node)
groups = Listable(append_node(nodes, GROUP_NAME, GROUP_URL, subgroups=subgroups), nodes)
monkeypatch.setattr(gl.gitlab, 'groups', groups)
return gl |
class TestUtil(TestCase):
def test_get_roi_gen_sites(self):
makedb = (lambda sites: {'ATILE': {'bits': {'CLB_IO_CLK': {'baseaddr': '0x00400F00', 'frames': 28, 'height': 2, 'offset': 0, 'words': 2}}, 'grid_x': 10, 'grid_y': 10, 'segment': 'ASEGMENT', 'segment_type': 'bram0_l', 'sites': sites, 'prohibited_sites': [], 'type': 'BRAM_INT_INTERFACE_L'}})
with setup_database(makedb({})):
self.assertListEqual(list(get_roi().gen_sites()), [])
with setup_database(makedb({'FOO': 'BAR'})):
self.assertListEqual(list(get_roi().gen_sites()), [('ATILE', 'FOO', 'BAR')])
def test_in_roi_overlay(self):
region_dict = {}
region_dict['pr1'] = (10, 58, 0, 51)
region_dict['pr2'] = (10, 58, 52, 103)
overlay = Overlay(region_dict)
self.assertFalse(overlay.tile_in_roi(GridLoc(18, 50)))
self.assertFalse(overlay.tile_in_roi(GridLoc(18, 84)))
self.assertTrue(overlay.tile_in_roi(GridLoc(8, 50)))
self.assertTrue(overlay.tile_in_roi(GridLoc(18, 112)))
self.assertTrue(overlay.tile_in_roi(GridLoc(80, 40))) |
def extractDemogorgon1912WixsiteCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesSunburstSonification(Options):
def contextTracks(self) -> 'OptionSeriesSunburstSonificationContexttracks':
return self._config_sub_data('contextTracks', OptionSeriesSunburstSonificationContexttracks)
def defaultInstrumentOptions(self) -> 'OptionSeriesSunburstSonificationDefaultinstrumentoptions':
return self._config_sub_data('defaultInstrumentOptions', OptionSeriesSunburstSonificationDefaultinstrumentoptions)
def defaultSpeechOptions(self) -> 'OptionSeriesSunburstSonificationDefaultspeechoptions':
return self._config_sub_data('defaultSpeechOptions', OptionSeriesSunburstSonificationDefaultspeechoptions)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def pointGrouping(self) -> 'OptionSeriesSunburstSonificationPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesSunburstSonificationPointgrouping)
def tracks(self) -> 'OptionSeriesSunburstSonificationTracks':
return self._config_sub_data('tracks', OptionSeriesSunburstSonificationTracks) |
def find_store(store):
o = urlparse(store)
if ('' in o.netloc):
(auth, server) = o.netloc.split('')
(user, password) = auth.split(':')
if (o.scheme in [' ' 's3']):
return url_to_s3_store(store)
if os.path.exists(store):
if store.endswith('.zip'):
return zarr.ZipStore(store)
return store
if (o.scheme in ['file']):
return store[(len(o.scheme) + 3):]
raise NotImplementedError(f"Unknown protocol '{o.scheme}' for Zarr in {store}") |
def extractRationalistempireTumblrCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.provider(fields.Dictionary({}))
class TracingCoroutineMiddleware(CoroutineMiddleware):
def before_run_coroutine(self):
before_call_trace.append('TracingCoroutineMiddleware')
def coroutine(self, coroutine):
create_call_trace.append('TracingCoroutineMiddleware')
async def wrapper():
run_call_trace_pre.append('TracingCoroutineMiddleware')
try:
return (await coroutine)
finally:
run_call_trace_post.append('TracingCoroutineMiddleware')
return wrapper() |
class boxesPyWrapper(inkex.GenerateExtension):
def add_arguments(self, pars):
args = sys.argv[1:]
for arg in args:
key = arg.split('=')[0]
if (key == '--id'):
continue
if (len(arg.split('=')) == 2):
value = arg.split('=')[1]
pars.add_argument(key, default=key)
def generate(self):
(f, box_file) = tempfile.mkstemp('.svg', 'boxes.py-inkscape')
cmd = 'boxes'
for arg in vars(self.options):
if (arg in ('output', 'id', 'ids', 'selected_nodes', 'input_file', 'tab')):
continue
if ((arg == 'original') and (str(getattr(self.options, arg)) == 'false')):
continue
cmd += f' --{arg} {quote(str(getattr(self.options, arg)))}'
cmd += f' --output {box_file} {box_file}'
cmd = cmd.replace('boxes --generator', 'boxes')
with os.popen(cmd, 'r') as boxes:
result = boxes.read()
try:
stream = open(box_file)
except FileNotFoundError as e:
inkex.utils.debug((('There was no ' + box_file) + ' output generated. Cannot continue. Command was:'))
inkex.utils.debug(str(cmd))
exit(1)
p = etree.XMLParser(huge_tree=True)
doc = etree.parse(stream, parser=etree.XMLParser(huge_tree=True))
stream.close()
if os.path.exists(box_file):
os.remove(box_file)
group = inkex.Group(id='boxes.py')
for element in doc.getroot():
group.append(element)
return group |
class LoopIR_Compare():
def __init__(self):
pass
def match_stmts(self, stmts1, stmts2):
return all((self.match_s(s1, s2) for (s1, s2) in zip(stmts1, stmts2)))
def match_s(self, s1, s2):
if (type(s1) is not type(s2)):
return False
if isinstance(s1, (LoopIR.Assign, LoopIR.Reduce)):
return (self.match_name(s1.name, s2.name) and self.match_t(s1.type, s2.type) and all((self.match_e(i1, i2) for (i1, i2) in zip(s1.idx, s2.idx))) and self.match_e(s1.rhs, s2.rhs))
elif isinstance(s1, LoopIR.WriteConfig):
return ((s1.config == s2.config) and (s1.field == s2.field) and self.match_e(s1.rhs, s2.rhs))
elif isinstance(s1, LoopIR.Pass):
return True
elif isinstance(s1, LoopIR.If):
return (self.match_e(s1.cond, s2.cond) and self.match_stmts(s1.body, s2.body) and self.match_stmts(s1.orelse, s2.orelse))
elif isinstance(s1, LoopIR.For):
return (self.match_name(s1.iter, s2.iter) and self.match_e(s1.lo, s2.lo) and self.match_e(s1.hi, s2.hi) and self.match_stmts(s1.body, s2.body))
elif isinstance(s1, LoopIR.Alloc):
return (self.match_name(s1.name, s2.name) and self.match_t(s1.type, s2.type))
elif isinstance(s1, LoopIR.Call):
return ((s1.f == s2.f) and all((self.match_e(a1, a2) for (a1, a2) in zip(s1.args, s2.args))))
elif isinstance(s1, LoopIR.WindowStmt):
return (self.match_name(s1.lhs, s2.lhs) and self.match_e(s1.rhs, s2.rhs))
else:
assert False, f'bad case: {type(s1)}'
def match_e(self, e1, e2):
if (type(e1) is not type(e2)):
return False
if isinstance(e1, LoopIR.Read):
return (self.match_name(e1.name, e2.name) and all((self.match_e(i1, i2) for (i1, i2) in zip(e1.idx, e2.idx))))
elif isinstance(e1, LoopIR.Const):
return (e1.val == e2.val)
elif isinstance(e1, LoopIR.USub):
return self.match_e(e1.arg, e2.arg)
elif isinstance(e1, LoopIR.BinOp):
return ((e1.op == e2.op) and self.match_e(e1.lhs, e2.lhs) and self.match_e(e1.rhs, e2.rhs))
elif isinstance(e1, LoopIR.BuiltIn):
return ((e1.f is e2.f) and all((self.match_e(a1, a2) for (a1, a2) in zip(e1.args, e2.args))))
elif isinstance(e1, LoopIR.WindowExpr):
return (self.match_name(e1.name, e2.name) and all((self.match_w_access(w1, w2) for (w1, w2) in zip(e1.idx, e2.idx))))
elif isinstance(e1, LoopIR.StrideExpr):
return (self.match_name(e1.name, e2.name) and (e1.dim == e2.dim))
elif isinstance(e1, LoopIR.ReadConfig):
return ((e1.config == e2.config) and (e1.field == e2.field))
else:
assert False, 'bad case'
def match_name(self, n1, n2):
return (n1.name() == n2.name())
def match_w_access(self, w1, w2):
if isinstance(w1, LoopIR.Interval):
return (self.match_e(w1.lo, w2.lo) and self.match_e(w1.hi, w2.hi))
elif isinstance(w1, LoopIR.Point):
return self.match_e(w1.pt, w2.pt)
else:
assert False, 'bad case'
def match_t(self, t1, t2):
if isinstance(t1, LoopIR.Tensor):
return ((t1.is_window == t2.is_window) and self.match_t(t1.type, t2.type) and all((self.match_e(i1, i2) for (i1, i2) in zip(t1.hi, t2.hi))))
else:
return (type(t1) == type(t2)) |
.parametrize('test_name', [1, {}, [], 2.3])
def test_raise_incorrect_name_type(test_name):
pol = Polygons()
data = pd.DataFrame({'X_UTME': [1.0, 2.0], 'Y_UTMN': [2.0, 1.0], 'Z_TVDSS': [3.0, 3.0], 'POLY_ID': [3, 3], 'T_DELTALEN': [2.0, 1.0]})
pol.dataframe = data
pol._pname = 'POLY_ID'
pol._tname = 'T_DELTALEN'
with pytest.raises(ValueError, match='Wrong type of input'):
setattr(pol, 'tname', test_name) |
class OptionSeriesPackedbubbleStatesInactive(Options):
def animation(self) -> 'OptionSeriesPackedbubbleStatesInactiveAnimation':
return self._config_sub_data('animation', OptionSeriesPackedbubbleStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def opacity(self):
return self._config_get(0.2)
def opacity(self, num: float):
self._config(num, js_type=False) |
class DivinePickHandler(THBEventHandler):
interested = ['action_after']
def handle(self, evt_type, act):
if ((evt_type == 'action_after') and isinstance(act, DropCardStage)):
dropper = act.target
g = self.game
if (not dropper.has_skill(Divine)):
return act
pl = [p for p in g.players if ((not p.dead) and (p is not dropper) and (p is dropper.tags['divine_picker']))]
assert (len(pl) <= 1), 'Multiple divine picker!'
dropper.tags['divine_picker'] = None
if (not pl):
return act
picker = pl[0]
dropn = getattr(act, 'dropn', 0)
dropped = getattr(act, 'cards', [])
if (dropn and dropped and (len(dropped) == dropn)):
g.process_action(DivinePickAction(picker, dropper, dropped))
return act |
def extractZumieditsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return False
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
class UnsupportedNodeFixer():
_bmg: BMGraphBuilder
_typer: LatticeTyper
_unsupported_nodes = [bn.Chi2Node, bn.DivisionNode, bn.Exp2Node, bn.IndexNode, bn.ItemNode, bn.Log10Node, bn.Log1pNode, bn.Log2Node, bn.LogSumExpTorchNode, bn.LogAddExpNode, bn.SquareRootNode, bn.SwitchNode, bn.TensorNode, bn.UniformNode]
def __init__(self, bmg: BMGraphBuilder, typer: LatticeTyper) -> None:
self._bmg = bmg
self._typer = typer
def _replace_division(self, node: bn.DivisionNode) -> Optional[bn.BMGNode]:
r = node.right
if isinstance(r, bn.ConstantNode):
return self._bmg.add_multiplication(node.left, self._bmg.add_constant((1.0 / r.value)))
neg1 = self._bmg.add_constant((- 1.0))
powr = self._bmg.add_power(r, neg1)
return self._bmg.add_multiplication(node.left, powr)
def _replace_exp2(self, node: bn.Exp2Node) -> bn.BMGNode:
two = self._bmg.add_constant(2.0)
return self._bmg.add_power(two, node.operand)
def _replace_log10(self, node: bn.Log10Node) -> bn.BMGNode:
c = self._bmg.add_constant((1.0 / math.log(10)))
ln = self._bmg.add_log(node.operand)
return self._bmg.add_multiplication(ln, c)
def _replace_log1p(self, node: bn.Log1pNode) -> bn.BMGNode:
one = self._bmg.add_constant(1.0)
add = self._bmg.add_addition(one, node.operand)
return self._bmg.add_log(add)
def _replace_log2(self, node: bn.Log10Node) -> bn.BMGNode:
c = self._bmg.add_constant((1.0 / math.log(2)))
ln = self._bmg.add_log(node.operand)
return self._bmg.add_multiplication(ln, c)
def _replace_squareroot(self, node: bn.SquareRootNode) -> bn.BMGNode:
half = self._bmg.add_constant(0.5)
return self._bmg.add_power(node.operand, half)
def _replace_uniform(self, node: bn.UniformNode) -> Optional[bn.BMGNode]:
low = node.low
high = node.high
if (isinstance(low, bn.ConstantNode) and (float(low.value) == 0.0) and isinstance(high, bn.ConstantNode) and (float(high.value) == 1.0)):
return self._bmg.add_flat()
return None
def _replace_chi2(self, node: bn.Chi2Node) -> bn.BMGNode:
half = self._bmg.add_constant_of_type(0.5, PositiveReal)
mult = self._bmg.add_multiplication(node.df, half)
return self._bmg.add_gamma(mult, half)
def _replace_index_one_column(self, node: bn.IndexNode) -> bn.BMGNode:
left = node.left
right = node.right
typer = self._typer
assert isinstance(typer, LatticeTyper)
if (isinstance(right, bn.ConstantNode) and typer.is_natural(right)):
r = int(right.value)
if isinstance(left, bn.ConstantNode):
return self._bmg.add_constant(left.value[r])
if isinstance(left, bn.ToMatrixNode):
return left.inputs[(r + 2)]
if isinstance(left, bn.ColumnIndexNode):
collection = left.left
if isinstance(collection, bn.ToMatrixNode):
column_index = left.right
if (isinstance(column_index, bn.ConstantNode) and typer.is_natural(column_index)):
c = int(column_index.value)
rows = int(collection.rows.value)
return collection.inputs[(((rows * c) + r) + 2)]
return self._bmg.add_vector_index(left, right)
def _replace_index_multi_column(self, node: bn.IndexNode) -> bn.BMGNode:
left = node.left
right = node.right
typer = self._typer
assert isinstance(typer, LatticeTyper)
if (isinstance(right, bn.ConstantNode) and typer.is_natural(right)):
r = int(right.value)
if isinstance(left, bn.ConstantNode):
return self._bmg.add_constant(left.value[r])
return self._bmg.add_column_index(left, right)
def _replace_index(self, node: bn.IndexNode) -> Optional[bn.BMGNode]:
left = node.left
node_type = self._typer[left]
if (not isinstance(node_type, bt.BMGMatrixType)):
return None
if (node_type.columns == 1):
return self._replace_index_one_column(node)
return self._replace_index_multi_column(node)
def _replace_item(self, node: bn.ItemNode) -> Optional[bn.BMGNode]:
return node.inputs[0]
def _replace_lse(self, node: bn.LogSumExpTorchNode) -> Optional[bn.BMGNode]:
if ((not bn.is_zero(node.inputs[1])) or (not bn.is_zero(node.inputs[2]))):
return None
operand = node.inputs[0]
operand_type = self._typer[operand]
if ((not isinstance(operand_type, bt.BMGMatrixType)) or (operand_type.columns != 1)):
return None
if isinstance(operand, bn.ToMatrixNode):
assert (len(operand.inputs) >= 3)
if (len(operand.inputs) == 3):
return operand.inputs[2]
elements = operand.inputs.inputs[2:]
assert isinstance(elements, list)
return self._bmg.add_logsumexp(*elements)
return self._bmg.add_logsumexp_vector(operand)
def _replace_lae(self, node: bn.LogAddExpNode) -> Optional[bn.BMGNode]:
return self._bmg.add_logsumexp(*node.inputs)
def _replace_tensor(self, node: bn.TensorNode) -> Optional[bn.BMGNode]:
size = node._size
if (len(size) > 2):
return None
(r, c) = bt._size_to_rc(size)
rows = self._bmg.add_natural(c)
cols = self._bmg.add_natural(r)
tm = self._bmg.add_to_matrix(rows, cols, *node.inputs.inputs)
return tm
def _replace_bool_switch(self, node: bn.SwitchNode) -> bn.BMGNode:
assert (((len(node.inputs) - 1) / 2) == 2)
assert isinstance(node.inputs[1], bn.ConstantNode)
assert isinstance(node.inputs[3], bn.ConstantNode)
if bn.is_zero(node.inputs[1]):
assert bn.is_one(node.inputs[3])
return self._bmg.add_if_then_else(node.inputs[0], node.inputs[4], node.inputs[2])
else:
assert bn.is_one(node.inputs[1])
assert bn.is_zero(node.inputs[3])
return self._bmg.add_if_then_else(node.inputs[0], node.inputs[2], node.inputs[4])
def _replace_natural_switch(self, node: bn.SwitchNode) -> Optional[bn.BMGNode]:
num_cases = ((len(node.inputs) - 1) // 2)
cases = set()
for i in range(num_cases):
c = node.inputs[((i * 2) + 1)]
assert isinstance(c, bn.ConstantNode)
cases.add(int(c.value))
if ((min(cases) != 0) or (max(cases) != (num_cases - 1)) or (len(cases) != num_cases)):
return None
values = ([None] * num_cases)
for i in range(num_cases):
c = node.inputs[((i * 2) + 1)]
assert isinstance(c, bn.ConstantNode)
v = node.inputs[((i * 2) + 2)]
values[int(c.value)] = v
assert (None not in values)
return self._bmg.add_choice(node.inputs[0], *values)
def _replace_switch(self, node: bn.SwitchNode) -> Optional[bn.BMGNode]:
assert ((len(node.inputs) % 2) == 1)
choice = node.inputs[0]
num_cases = ((len(node.inputs) - 1) // 2)
assert (num_cases > 0)
if (num_cases == 1):
assert isinstance(node.inputs[1], bn.ConstantNode)
return node.inputs[2]
assert (not isinstance(choice, bn.ConstantNode))
tc = self._typer[choice]
if (tc == bt.Boolean):
return self._replace_bool_switch(node)
if (tc == bt.Natural):
return self._replace_natural_switch(node)
return None
def _replace_binomial_logit(self, node: bn.BinomialLogitNode) -> Optional[bn.BinomialNode]:
logistic = self._bmg.add_logistic(node.inputs[1])
return self._bmg.add_binomial(node.inputs[0], logistic) |
class AutomationTool():
def __init__(self, task, resultdir, mock_config_file, log, config):
self.task = task
self.resultdir = resultdir
self.mock_config_file = mock_config_file
self.log = log
self.package_name = task['package_name']
self.chroot = task['chroot']
self.config = config
def enabled(self):
raise NotImplementedError
def run(self):
raise NotImplementedError |
def chop(A, tol=1e-10):
A.chop(tol)
B = PETSc.Mat().create(comm=A.comm)
B.setType(A.getType())
B.setSizes(A.getSizes())
B.setBlockSize(A.getBlockSize())
B.setUp()
B.setOption(PETSc.Mat.Option.IGNORE_ZERO_ENTRIES, True)
B.setPreallocationCSR(A.getValuesCSR())
B.assemble()
A.destroy()
return B |
_repr
class Exhibitor(db.Model, Timestamp):
class Status():
PENDING = 'pending'
ACCEPTED = 'accepted'
STATUSES = [PENDING, ACCEPTED]
__tablename__ = 'exhibitors'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, nullable=False)
status = db.Column(db.String, nullable=False, default=Status.PENDING, server_default=Status.PENDING)
description = db.Column(db.String)
url = db.Column(db.String)
position = db.Column(db.Integer, nullable=False, default=0, server_default='0')
logo_url = db.Column(db.String)
banner_url = db.Column(db.String)
thumbnail_image_url = db.Column(db.String)
video_url = db.Column(db.String)
slides_url = db.Column(db.String)
contact_email = db.Column(db.String)
contact_link = db.Column(db.String)
enable_video_room = db.Column(db.Boolean, default=False, nullable=False, server_default='False')
social_links = db.Column(db.JSON)
event_id = db.Column(db.Integer, db.ForeignKey('events.id', ondelete='CASCADE'), nullable=False)
sessions = db.relationship('Session', secondary=exhibitors_sessions, backref=db.backref('exhibitors', lazy='dynamic'))
def __setattr__(self, name, value):
if (name == 'description'):
super().__setattr__(name, clean_html(clean_up_string(value)))
else:
super().__setattr__(name, value) |
_touched_chat
def cmd_wipe(bot, update, chat=None):
subscriptions = list(Subscription.select().where((Subscription.tg_chat == chat)))
subs = 'You had no subscriptions.'
if subscriptions:
subs = ''.join(['For the record, you were subscribed to these users: ', ', '.join((s.tw_user.screen_name for s in subscriptions)), '.'])
bot.reply(update, (("Okay, I'm forgetting about this chat. " + subs) + ' Come back to me anytime you want. Goodbye!'))
chat.delete_instance(recursive=True) |
def parseArguments():
parser = ArgumentParser()
parser.add_argument('teamserver', help='The teamserver to post IOCs to.')
parser.add_argument('files', nargs='+', help='The files to post IOCs of.')
parser.add_argument('-j', metavar='java', dest='java', default='/Applications/Cobalt Strike 4/Cobalt Strike 4.1.app/Contents/Java', help='The path to the cobalt strike java directory. Default is /Applications/Cobalt Strike 4/Cobalt Strike 4.1.app/Contents/Java')
args = parser.parse_args()
return args |
class OptionPlotoptionsScatterSonificationTracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_local_provider_get_empty():
dc = Config.for_sandbox().data_config
with tempfile.TemporaryDirectory() as empty_source:
with tempfile.TemporaryDirectory() as dest_folder:
provider = FileAccessProvider(local_sandbox_dir='/tmp/unittest', raw_output_prefix=empty_source, data_config=dc)
provider.get_data(empty_source, dest_folder, is_multipart=True)
loc = provider.get_filesystem_for_path(dest_folder)
src_files = loc.find(empty_source)
assert (len(src_files) == 0)
dest_files = loc.find(dest_folder)
assert (len(dest_files) == 0) |
('evennia.server.portal.amp.amp.BinaryBoxProtocol.transport')
class TestAMPClientRecv(_TestAMP):
def test_msgportal2server(self, mocktransport):
self._connect_server(mocktransport)
self.amp_server.send_MsgPortal2Server(self.session, text={'foo': 'bar'})
wire_data = self._catch_wire_read(mocktransport)[0]
self._connect_client(mocktransport)
self.amp_client.dataReceived(wire_data)
evennia.SERVER_SESSION_HANDLER.data_in.assert_called_with(self.session, text={'foo': 'bar'})
def test_adminportal2server(self, mocktransport):
self._connect_server(mocktransport)
self.amp_server.send_AdminPortal2Server(self.session, operation=amp.PDISCONNALL)
wire_data = self._catch_wire_read(mocktransport)[0]
self._connect_client(mocktransport)
evennia.SERVER_SESSION_HANDLER.portal_disconnect_all = MagicMock()
self.amp_client.dataReceived(wire_data)
evennia.SERVER_SESSION_HANDLER.portal_disconnect_all.assert_called() |
def test_fmpz_mat():
M = flint.fmpz_mat
a = M(2, 3, [1, 2, 3, 4, 5, 6])
b = M(2, 3, [4, 5, 6, 7, 8, 9])
assert (a == a)
assert (a == M(a))
assert (a != b)
assert (a.nrows() == 2)
assert (a.ncols() == 3)
assert (a.entries() == [1, 2, 3, 4, 5, 6])
assert (a.table() == [[1, 2, 3], [4, 5, 6]])
assert ((a + b).entries() == [5, 7, 9, 11, 13, 15])
assert ((a - b).entries() == [(- 3), (- 3), (- 3), (- 3), (- 3), (- 3)])
assert (a.transpose() == M(3, 2, [1, 4, 2, 5, 3, 6]))
assert raises((lambda : (a + 1)), TypeError)
assert raises((lambda : (1 + a)), TypeError)
assert raises((lambda : (a - 1)), TypeError)
assert raises((lambda : (1 - a)), TypeError)
assert raises(a.det, ValueError)
assert ((+ a) == a)
assert ((- a) == M(2, 3, [(- 1), (- 2), (- 3), (- 4), (- 5), (- 6)]))
c = M(2, 2, [1, 2, 3, 4])
assert (c.det() == (- 2))
assert raises((lambda : (a + c)), ValueError)
assert ((a * 3).entries() == [3, 6, 9, 12, 15, 18])
assert ((3 * a).entries() == [3, 6, 9, 12, 15, 18])
assert ((a * long(3)).entries() == [3, 6, 9, 12, 15, 18])
assert ((long(3) * a).entries() == [3, 6, 9, 12, 15, 18])
assert ((a * flint.fmpz(3)).entries() == [3, 6, 9, 12, 15, 18])
assert ((flint.fmpz(3) * a).entries() == [3, 6, 9, 12, 15, 18])
assert (M.randrank(5, 7, 3, 10).rank() == 3)
A = M.randbits(5, 3, 2)
B = M.randtest(3, 7, 3)
C = M.randtest(7, 2, 4)
assert ((A.nrows(), A.ncols()) == (5, 3))
assert ((B.nrows(), B.ncols()) == (3, 7))
assert ((C.nrows(), C.ncols()) == (7, 2))
assert ((A * (B * C)) == ((A * B) * C))
assert raises((lambda : (A * C)), ValueError)
assert (bool(M(2, 2, [0, 0, 0, 0])) == False)
assert (bool(M(2, 2, [0, 0, 0, 1])) == True)
ctx.pretty = False
assert (repr(M(2, 2, [1, 2, 3, 4])) == 'fmpz_mat(2, 2, [1, 2, 3, 4])')
ctx.pretty = True
assert (str(M(2, 2, [1, 2, 3, 4])) == '[1, 2]\n[3, 4]')
assert ((M(1, 2, [3, 4]) * flint.fmpq(1, 3)) == flint.fmpq_mat(1, 2, [1, flint.fmpq(4, 3)]))
assert ((flint.fmpq(1, 3) * M(1, 2, [3, 4])) == flint.fmpq_mat(1, 2, [1, flint.fmpq(4, 3)]))
assert ((M(1, 2, [3, 4]) / 3) == flint.fmpq_mat(1, 2, [1, flint.fmpq(4, 3)]))
assert (M(2, 2, [1, 2, 3, 4]).inv().det() == (flint.fmpq(1) / M(2, 2, [1, 2, 3, 4]).det()))
assert (M(2, 2, [1, 2, 3, 4]).inv().inv() == M(2, 2, [1, 2, 3, 4]))
assert raises((lambda : M.randrank(4, 3, 4, 1)), ValueError)
assert raises((lambda : M.randrank(3, 4, 4, 1)), ValueError)
assert ((M(1, 1, [3]) ** 5) == M(1, 1, [(3 ** 5)]))
assert raises((lambda : pow(M([[1]]), 2, 3)), NotImplementedError)
assert raises((lambda : (M(1, 2) ** 3)), ValueError)
assert raises((lambda : (M(1, 1) ** M(1, 1))), TypeError)
assert raises((lambda : (1 ** M(1, 1))), TypeError)
assert raises((lambda : M([1])), TypeError)
assert raises((lambda : M([[1], [2, 3]])), ValueError)
assert raises((lambda : M(None)), TypeError)
assert raises((lambda : M(2, 2, [1, 2, 3])), ValueError)
assert raises((lambda : M(2, 2, 2, 2)), TypeError)
assert (M([[1, 2, 3], [4, 5, 6]]) == M(2, 3, [1, 2, 3, 4, 5, 6]))
assert raises((lambda : (M([[1]]) < M([[2]]))), TypeError)
assert ((M([[1]]) == 1) is False)
assert ((1 == M([[1]])) is False)
assert ((M([[1]]) != 1) is True)
assert ((1 != M([[1]])) is True)
D = M([[1, 2], [3, 4]])
assert ((D[(0, 0)], D[(0, 1)], D[(1, 0)], D[(1, 1)]) == (1, 2, 3, 4))
D[(0, 0)] = 3
assert (D == M([[3, 2], [3, 4]]))
def set_bad(i, j):
D[(i, j)] = (- 1)
raises((lambda : set_bad(2, 0)), IndexError)
raises((lambda : set_bad(0, 2)), IndexError)
raises((lambda : D[(0, 2)]), IndexError)
raises((lambda : D[(0, 2)]), IndexError)
raises((lambda : set_bad((- 1), 0)), IndexError)
raises((lambda : set_bad(0, (- 1))), IndexError)
raises((lambda : D[((- 1), 0)]), IndexError)
raises((lambda : D[(0, (- 1))]), IndexError)
assert (M.hadamard(2) == M([[1, 1], [1, (- 1)]]))
assert raises((lambda : M.hadamard(3)), ValueError)
assert (M.hadamard(2).is_hadamard() is True)
assert (M([[1, 2], [3, 4]]).is_hadamard() is False)
M1 = M([[1, 0], [1, 1]])
M2 = M([[1, 0], [(- 1), 1]])
x = M([[3], [4]])
b = M([[3], [7]])
assert (M1.inv() == M2)
assert (M2.inv() == M1)
assert (M1.inv(integer=True) == M2)
assert (M2.inv(integer=True) == M1)
assert ((M1 * x) == b)
assert (M1.solve(b) == x)
assert (M1.solve(b, integer=True) == x)
assert raises((lambda : M1.solve([])), TypeError)
assert raises((lambda : M1.solve(M([[1]]))), ValueError)
assert raises((lambda : M([[1, 1], [1, 1]]).solve(b)), ZeroDivisionError)
assert raises((lambda : M([[1, 2], [3, 4], [5, 6]]).solve(b)), ValueError)
assert (M([[1, 0], [1, 2]]).solve(b) == flint.fmpq_mat([[3], [2]]))
assert raises((lambda : M([[1, 0], [1, 2]]).solve(b, integer=True)), ValueError)
assert raises((lambda : M([[1, 2, 3], [4, 5, 6]]).inv()), ValueError)
assert raises((lambda : M([[1, 1], [1, 1]]).inv()), ZeroDivisionError)
assert raises((lambda : M([[1, 0], [1, 2]]).inv(integer=True)), ValueError)
half = flint.fmpq(1, 2)
assert (M([[1, 0], [1, 2]]).inv() == flint.fmpq_mat([[1, 0], [(- half), half]]))
M3 = M([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
M3_copy = M(M3)
M3r = M([[(- 3), 0, 3], [0, (- 3), (- 6)], [0, 0, 0]])
assert (M3.rref() == (M3r, (- 3), 2))
assert (M3 != M3r)
assert (M3.rref(inplace=True) == (M3r, (- 3), 2))
assert (M3 == M3r)
M3 = M3_copy
M3n = M([[3, 0, 0], [(- 6), 0, 0], [3, 0, 0]])
assert (M3.nullspace() == (M3n, 1))
assert ((M3 * M3.nullspace()[0]) == M(3, 3, ([0] * 9)))
M4 = M([[1, 2, 3], [4, 5, 6], [7, 8, 10]])
L4 = M([[0, 0, 1], [(- 1), 1, 0], [2, 1, 0]])
T4 = M([[1, (- 2), 1], [0, 5, (- 3)], [(- 2), 1, 0]])
assert (L4 == (T4 * M4))
assert (M4.lll() == L4)
assert (M4.lll(transform=True) == (L4, T4))
rep = 'zbasis'
for gram in ('approx', 'exact'):
assert (M4.lll(rep=rep, gram=gram) == L4)
assert (M4.lll(rep=rep, gram=gram, transform=True) == (L4, T4))
assert raises((lambda : M4.lll(rep='bad')), ValueError)
assert raises((lambda : M4.lll(gram='bad')), ValueError)
M5 = M([[1, 2, 3], [4, 5, 6]])
H5 = M([[1, 2, 3], [0, 3, 6]])
T5 = M([[1, 0], [4, (- 1)]])
assert (H5 == (T5 * M5))
assert (M5.hnf() == H5)
assert (M5.hnf(transform=True) == (H5, T5))
assert (M5.is_hnf() is False)
assert (H5.is_hnf() is True)
S5 = M([[1, 0, 0], [0, 3, 0]])
assert (M5.snf() == S5)
assert (M5.is_snf() is False)
assert (S5.is_snf() is True)
M6 = M([[2, 0, 0], [0, 2, 1], [0, 0, 2]])
assert (M6.charpoly() == flint.fmpz_poly([(- 8), 12, (- 6), 1]))
assert (M6.minpoly() == flint.fmpz_poly([4, (- 4), 1]))
assert (list(M6) == [2, 0, 0, 0, 2, 1, 0, 0, 2]) |
def create_default_groups():
from flaskbb.fixtures.groups import fixture
result = []
for (key, value) in fixture.items():
group = Group(name=key)
for (k, v) in value.items():
setattr(group, k, v)
group.save()
result.append(group)
return result |
def _build_wheels(pkg_name: str, pkg_version: str, base_url: str=None, base_url_func: Callable[([str, str, str], str)]=None, pkg_file_func: Callable[([str, str, str, str, OSType], str)]=None, supported_cuda_versions: List[str]=['11.7', '11.8']) -> Optional[str]:
(os_type, _) = get_cpu_avx_support()
cuda_version = get_cuda_version()
py_version = platform.python_version()
py_version = ('cp' + ''.join(py_version.split('.')[0:2]))
if ((os_type == OSType.DARWIN) or (not cuda_version)):
return None
if (cuda_version not in supported_cuda_versions):
print(f'Warnning: {pkg_name} supported cuda version: {supported_cuda_versions}, replace to {supported_cuda_versions[(- 1)]}')
cuda_version = supported_cuda_versions[(- 1)]
cuda_version = ('cu' + cuda_version.replace('.', ''))
os_pkg_name = ('linux_x86_64' if (os_type == OSType.LINUX) else 'win_amd64')
if base_url_func:
base_url = base_url_func(pkg_version, cuda_version, py_version)
if (base_url and base_url.endswith('/')):
base_url = base_url[:(- 1)]
if pkg_file_func:
full_pkg_file = pkg_file_func(pkg_name, pkg_version, cuda_version, py_version, os_type)
else:
full_pkg_file = f'{pkg_name}-{pkg_version}+{cuda_version}-{py_version}-{py_version}-{os_pkg_name}.whl'
if (not base_url):
return full_pkg_file
else:
return f'{base_url}/{full_pkg_file}' |
class TestProvideAppropriateLanguage():
PROVIDER = 'test_provider'
FEATURE = 'test_feature'
SUBFEATURE = 'test_subfeature'
def test_valid_input(self, mocker: MockerFixture):
return_mock = ['en-US', 'fr', 'es']
mocker.patch('edenai_apis.utils.languages.load_language_constraints', return_value=return_mock)
iso_code = 'en'
expected_output = 'en-US'
output = provide_appropriate_language(iso_code, self.PROVIDER, self.FEATURE, self.SUBFEATURE)
assert (output == expected_output), f'Expected `{expected_output}` for `{iso_code}` but got `{output}`'
def test_valid_input_with_valid_region(self, mocker: MockerFixture):
return_mock = ['en-US', 'fr', 'es']
mocker.patch('edenai_apis.utils.languages.load_language_constraints', return_value=return_mock)
iso_code = 'en-US'
expected_output = 'en-US'
output = provide_appropriate_language(iso_code, self.PROVIDER, self.FEATURE, self.SUBFEATURE)
assert (output == expected_output), f'Expected `{expected_output}` for `{iso_code}` but got `{output}`'
def test_valid_input_with_invalid_region(self, mocker: MockerFixture):
return_mock = ['en-US', 'fr', 'es']
mocker.patch('edenai_apis.utils.languages.load_language_constraints', return_value=return_mock)
iso_code = 'en-EN'
expected_output = None
output = provide_appropriate_language(iso_code, self.PROVIDER, self.FEATURE, self.SUBFEATURE)
assert (output == expected_output), f'Expected `{expected_output}` for `{iso_code}` but got `{output}`'
def test_invalid_isocode(self, mocker: MockerFixture):
return_mock = ['en-US', 'fr', 'es']
mocker.patch('edenai_apis.utils.languages.load_language_constraints', return_value=return_mock)
iso_code = 'inv'
expected_output = None
output = provide_appropriate_language(iso_code, self.PROVIDER, self.FEATURE, self.SUBFEATURE)
assert (output == expected_output), f'Expected `{expected_output}` for `{iso_code}` but got `{output}`'
def test_invalid_input(self):
with pytest.raises(SyntaxError):
iso_code = '12345'
provide_appropriate_language(iso_code, self.PROVIDER, self.FEATURE, self.SUBFEATURE) |
class InfosInvoiceParserDataClass(BaseModel):
customer_information: CustomerInformationInvoice = CustomerInformationInvoice.default()
merchant_information: MerchantInformationInvoice = MerchantInformationInvoice.default()
invoice_number: Optional[StrictStr] = None
invoice_total: Optional[float] = None
invoice_subtotal: Optional[float] = None
gratuity: Optional[float] = None
amount_due: Optional[float] = None
previous_unpaid_balance: Optional[float] = None
discount: Optional[float] = None
taxes: Sequence[TaxesInvoice] = Field(default_factory=list)
service_charge: Optional[float] = None
payment_term: Optional[StrictStr] = None
purchase_order: Optional[StrictStr] = None
date: Optional[StrictStr] = None
due_date: Optional[StrictStr] = None
service_date: Optional[StrictStr] = None
service_due_date: Optional[StrictStr] = None
po_number: Optional[StrictStr] = None
locale: LocaleInvoice = LocaleInvoice(currency=None, language=None)
bank_informations: BankInvoice = BankInvoice.default()
item_lines: Sequence[ItemLinesInvoice] = Field(default_factory=list) |
class InheritedModelSerializationTests(TestCase):
def test_multitable_inherited_model_fields_as_expected(self):
child = ChildModel(name1='parent name', name2='child name')
serializer = DerivedModelSerializer(child)
self.assertEqual(set(serializer.data), {'name1', 'name2', 'id', 'childassociatedmodel'}) |
def test_special_text_special_chunk_merge():
chunks = InputChunks([SpecialPieceChunk('<s>', after=' '), TextChunk('Hello world!'), SpecialPieceChunk('</s>', before=' ')])
chunks_copy = deepcopy(chunks)
chunks.merge_text_chunks()
assert (chunks == chunks_copy)
assert (chunks.merge_text_chunks() == [MergedSpecialPieceChunk('<s>'), TextChunk(' Hello world! '), MergedSpecialPieceChunk('</s>')]) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.