code stringlengths 281 23.7M |
|---|
_page.route('/table/save_settings', methods=['POST'])
def save():
res = check_uuid(all_data['uuid'], request.json['uuid'])
if (res != None):
return jsonify(res)
if ('condition' in request.json):
condition = request.json['condition']
all_data['filter_condition'].update(condition)
log_dir = all_data['root_log_dir']
log_config_name = all_data['log_config_name']
save_all_data(all_data, log_dir, log_config_name, force_save=True)
return jsonify(status='success', msg='') |
class TestUCSPoperties(util.ColorAsserts, unittest.TestCase):
def test_u(self):
c = Color('color(--ucs 0.51332 0.92781 1.076)')
self.assertEqual(c['u'], 0.51332)
c['u'] = 0.2
self.assertEqual(c['u'], 0.2)
def test_v(self):
c = Color('color(--ucs 0.51332 0.92781 1.076)')
self.assertEqual(c['v'], 0.92781)
c['v'] = 0.1
self.assertEqual(c['v'], 0.1)
def test_w(self):
c = Color('color(--ucs 0.51332 0.92781 1.076)')
self.assertEqual(c['w'], 1.076)
c['w'] = 0.1
self.assertEqual(c['w'], 0.1)
def test_alpha(self):
c = Color('color(--ucs 0.51332 0.92781 1.076)')
self.assertEqual(c['alpha'], 1)
c['alpha'] = 0.5
self.assertEqual(c['alpha'], 0.5) |
.parametrize('degree', range(1, 4))
def test_right_inverse(mesh, degree):
V = FunctionSpace(mesh, 'DG', degree)
u = TrialFunction(V)
v = TestFunction(V)
form = (inner(u, v) * dx)
A = Tensor(form)
Result = assemble((A * A.inv))
nnode = V.node_count
assert ((Result.M.values - np.identity(nnode)) <= 1e-13).all() |
_ns.after_request
def after_request(response):
request = flask.request
status = response.status_code
prefix = 'Webhook ({}) '.format(status)
if ((request.content_length is not None) and (request.content_length >= (100 * 1024))):
app.logger.info((prefix + 'large content: %d bytes'), request.content_length)
return response
app.logger.info((prefix + 'data: %s'), base64.b64encode(request.get_data()))
return response |
def send_email_notification():
time.sleep(20)
while True:
try:
schedule = get_value('config.property', 'SMTP', 'email_schedule')
records = email_db.db.email.find({})
for data in records:
notification = data['email_notification']
scan_id = data['scanid']
scan_result = scan_status(scan_id)
if ((notification == 'N') and (scan_result == 'Completed')):
try:
email = data['to_email']
email_result = emails.main_email(scan_id, email)
if (email_result is False):
print('failed to connect to SMTP server')
return
email_db.db.email.update({'email_notification': 'N'}, {'$set': {'email_notification': 'Y'}})
except:
pass
time.sleep(int(schedule))
except Exception as e:
logs.logging.info(('Invalid email schedule argument ' + str(e)))
sys.exit(1) |
def audio_converter(audio_file: BufferedReader, export_format: str='wav', frame_rate: Union[(int, None)]=None, channels: Union[(int, None)]=None):
file_extension = audio_file.name.split('.')[(- 1)]
audio_out: AudioSegment = AudioSegment.from_file(audio_file, format=file_extension)
if frame_rate:
audio_out = audio_out.set_frame_rate(frame_rate)
if (channels and (audio_out.channels != channels)):
audio_out = audio_out.set_channels(channels)
return (audio_out.export(format=export_format), audio_out.frame_rate, audio_out.frame_width, audio_out.channels) |
(('src', 'passthrough', 'expected'), [param({'_target_': 'tests.instantiate.Tree', 'value': 1, 'left': {'_target_': 'tests.instantiate.Tree', 'value': 21}}, {}, Tree(value=1, left=Tree(value=21)), id='default'), param({'_target_': 'tests.instantiate.Tree', '_recursive_': True, 'value': 1, 'left': {'_target_': 'tests.instantiate.Tree', 'value': 21}}, {'_recursive_': True}, Tree(value=1, left=Tree(value=21)), id='cfg:true,override:true'), param({'_target_': 'tests.instantiate.Tree', '_recursive_': True, 'value': 1, 'left': {'_target_': 'tests.instantiate.Tree', 'value': 21}}, {'_recursive_': False}, Tree(value=1, left={'_target_': 'tests.instantiate.Tree', 'value': 21}), id='cfg:true,override:false'), param({'_target_': 'tests.instantiate.Tree', '_recursive_': False, 'value': 1, 'left': {'_target_': 'tests.instantiate.Tree', 'value': 21}}, {'_recursive_': True}, Tree(value=1, left=Tree(value=21)), id='cfg:false,override:true'), param({'_target_': 'tests.instantiate.Tree', '_recursive_': False, 'value': 1, 'left': {'_target_': 'tests.instantiate.Tree', 'value': 21}}, {'_recursive_': False}, Tree(value=1, left={'_target_': 'tests.instantiate.Tree', 'value': 21}), id='cfg:false,override:false'), param({'_target_': 'tests.instantiate.Tree', 'value': 1, 'left': {'_target_': 'tests.instantiate.Tree', 'value': 2, 'left': {'_target_': 'tests.instantiate.Tree', 'value': 3}}}, {}, Tree(value=1, left=Tree(value=2, left=Tree(value=3))), id='3_levels:default'), param({'_target_': 'tests.instantiate.Tree', '_recursive_': False, 'value': 1, 'left': {'_target_': 'tests.instantiate.Tree', 'value': 2, 'left': {'_target_': 'tests.instantiate.Tree', 'value': 3}}}, {}, Tree(value=1, left={'_target_': 'tests.instantiate.Tree', 'value': 2, 'left': {'_target_': 'tests.instantiate.Tree', 'value': 3}}), id='3_levels:cfg1=false'), param({'_target_': 'tests.instantiate.Tree', 'value': 1, 'left': {'_target_': 'tests.instantiate.Tree', '_recursive_': False, 'value': 2, 'left': {'_target_': 'tests.instantiate.Tree', 'value': 3}}}, {}, Tree(value=1, left=Tree(value=2, left={'_target_': 'tests.instantiate.Tree', 'value': 3})), id='3_levels:cfg2=false')])
def test_recursive_override(instantiate_func: Any, config: Any, passthrough: Any, expected: Any) -> None:
obj = instantiate_func(config, **passthrough)
assert (obj == expected) |
class CellStyle(object):
def __init__(self):
self.style_elements = {}
self.format_function = None
def set(self, key, value):
self.style_elements[key] = value
def css(self):
style = ''
for key in self.style_elements:
style += ('%s: %s;' % (key, self.style_elements[key]))
return style
def column_format(self, x):
if (self.format_function is None):
return str(x)
else:
try:
return self.format_function(x)
except:
return str(x)
def copy(self):
c = CellStyle()
c.style_elements = self.style_elements.copy()
c.format_function = self.format_function
return c |
def add_style(svgfile, style, replace=False):
if (style == '-'):
style = '/dev/stdin'
(root, ext) = os.path.splitext(style)
if ((ext == '.css') or (root == '/dev/stdin')):
with open(style, encoding='utf-8') as f:
style = replace_comments(f.read())
try:
svg = etree.parse(svgfile).getroot()
except IOError:
try:
svg = etree.fromstring(svgfile)
except UnicodeDecodeError:
svg = etree.fromstring(svgfile.encode('utf-8'))
defs = svg.find('defs', namespaces=svg.nsmap)
if (defs is None):
defs = etree.Element('defs', nsmap=svg.nsmap)
svg.insert(0, defs)
style_element = defs.find('.//style', namespaces=svg.nsmap)
if (style_element is None):
style_element = etree.Element('style', nsmap=svg.nsmap)
defs.append(style_element)
if replace:
style_content = style
else:
style_content = (((style_element.text or '') + ' ') + style)
style_element.text = etree.CDATA(style_content)
return etree.tostring(svg, encoding='utf-8').decode('utf-8') |
def clean_nones(value: dict[(str, Any)]) -> dict[(str, Any)]:
if isinstance(value, list):
return [clean_nones(x) for x in value if (x is not None)]
if isinstance(value, dict):
return {key: clean_nones(val) for (key, val) in value.items() if (val is not None)}
return value |
class Plugin(plugin.PluginProto):
PLUGIN_ID = 523
PLUGIN_NAME = 'Communication - Jami DringCtrl (EXPERIMENTAL)'
PLUGIN_VALUENAME1 = 'State'
PLUGIN_VALUENAME2 = 'Status'
PLUGIN_VALUENAME3 = 'Peer'
PLUGIN_VALUENAME4 = 'Text'
def __init__(self, taskindex):
plugin.PluginProto.__init__(self, taskindex)
self.dtype = rpieGlobals.DEVICE_TYPE_DUMMY
self.vtype = rpieGlobals.SENSOR_TYPE_QUAD
self.readinprogress = 0
self.valuecount = 4
self.senddataoption = True
self.recdataoption = False
self.timeroption = False
self.timeroptional = True
self.formulaoption = False
self.jami = None
self.lastinit = 0
def plugin_init(self, enableplugin=None):
plugin.PluginProto.plugin_init(self, enableplugin)
self.initialized = False
if self.enabled:
misc.addLog(rpieGlobals.LOG_LEVEL_INFO, 'Try to init Jami connection ')
try:
self.sessid = self.jami.sessid
except:
self.sessid = (- 1)
jamict = self.taskdevicepluginconfig[0]
if (jamict == 0):
try:
self.jami = dring.request_dring_channel(self.gettaskname(), 0, True)
self.jami.cb_ring2 = self.cb_ring2
self.jami.cb_call = self.cb_call
self.jami.cb_text = self.cb_text
self.initialized = self.jami.initialized
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('Jami DBUS init error: ' + str(e)))
else:
try:
self.jami = dring.request_dring_bridge()
self.jami.cb_ring2 = self.cb_ring2
self.jami.cb_call = self.cb_call
self.jami.cb_text = self.cb_text
self.initialized = self.jami.initialized
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('Jami bridge init error: ' + str(e)))
if self.initialized:
if (self.sessid != self.jami.sessid):
try:
self.jami.daemon = True
self.jami.start()
except:
pass
if (jamict == 0):
signal.signal(signal.SIGINT, self.signal_handler)
self.set_value(1, 0, False)
self.set_value(2, 'INACTIVE', False)
self.set_value(3, 0, False)
self.set_value(4, '', False)
misc.addLog(rpieGlobals.LOG_LEVEL_INFO, ('Jami connected: ' + str((self.jami.initialized and self.jami.operational))))
else:
rpieTime.addsystemtimer(30, self.initcheck, [0, 0])
def initcheck(self, timerid, pararray):
if (self.initialized == False):
self.plugin_init()
def plugin_exit(self):
try:
self.jami.stopThread()
except:
pass
def signal_handler(self, signal, frame):
import webserver
import commands
try:
import gpios
except Exception as e:
pass
commands.doCleanup()
webserver.WebServer.stop()
try:
gpios.HWPorts.cleanup()
except:
pass
time.sleep(1)
print('\nProgram exiting gracefully')
sys.exit(0)
def call(self, target):
ae = '-1'
clist = self.jami.getContactList()
if str(target).isnumeric():
target = int(target)
if (target < len(clist)):
ae = clist[target]
elif (target in clist):
ae = target
if (ae != '-1'):
self.jami.makeCall(str(ae))
else:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('Jami addressee not in contact list ' + str(target)))
def cb_ring2(self, stateid, statestr, caller=None):
if ((int(self.uservar[0]) != stateid) and (stateid != 0)):
self.set_value(1, stateid)
self.set_value(2, statestr)
self.set_value(3, caller)
if (str(self.uservar[3]) != ''):
self.set_value(4, '')
self.plugin_senddata()
elif ((int(self.uservar[0]) in [1, 2]) and (stateid == 0)):
self.set_value(1, 0)
self.set_value(2, 'INACTIVE')
if (str(self.uservar[3]) != ''):
self.set_value(4, '')
self.plugin_senddata()
return True
def cb_call(self, state):
if (int(state) == 1):
self.set_value(1, 3, False)
self.set_value(2, 'ACTIVE', False)
if (str(self.uservar[3]) != ''):
self.set_value(4, '')
self.plugin_senddata()
elif ((int(self.uservar[0]) != 0) and (state == 0)):
self.set_value(1, 0, False)
self.set_value(2, 'INACTIVE', False)
if (str(self.uservar[3]) != ''):
self.set_value(4, '')
self.plugin_senddata()
return True
def cb_text(self, fromacc, text):
self.set_value(1, 10, False)
self.set_value(2, 'MSG', False)
self.set_value(3, fromacc, False)
self.set_value(4, text, False)
self.plugin_senddata()
return True
def webform_load(self):
choice1 = self.taskdevicepluginconfig[0]
options = ['Direct DBUS session (no root)', 'UDP JamiBridge (any user)']
optionvalues = [0, 1]
webserver.addFormSelector('Connection mode', 'p523_mode', len(optionvalues), options, optionvalues, None, choice1)
webserver.addFormNote("Download and install Jami from <a href=' Set up an account by its GUI, after that it can be monitored by RPIEasy.")
webserver.addFormNote('Direct session can only be used if RPIEasy started with the same user as the Jami application!')
webserver.addFormNote('JamiBridge can be used, IF the external jamibridge.py started with the same user as the Jami application! RPIEasy can be started as root in this case. The jamibridge.py can be found in the same directory as RPIEasy.py, the sample desktop entry can be found at rpieasy/lib/dringctrl/jamibridge.desktop. Please consult your distribution manual for autostarting application with your GUI.')
if self.initialized:
try:
status = (self.jami.initialized and self.jami.operational)
webserver.addHtml((('<tr><td>Connected to Jami daemon<td>' + str(status)) + '</tr>'))
webserver.addHtml((('<tr><td>Account in use<td>' + str(self.jami.account)) + '</tr>'))
webserver.addHtml('<tr><td>Approved contacts<td>')
cl = self.jami.getContactList()
for i in range(len(cl)):
webserver.addHtml((str(cl[i]) + '<BR>'))
webserver.addHtml('</tr>')
except Exception as e:
webserver.addHtml((('<tr><td>Status<td>' + str(e)) + '</tr>'))
return True
def webform_save(self, params):
pval = self.taskdevicepluginconfig[0]
par = webserver.arg('p523_mode', params)
try:
self.taskdevicepluginconfig[0] = int(par)
except:
self.taskdevicepluginconfig[0] = 0
try:
if (pval != self.taskdevicepluginconfig[0]):
self.plugin_init()
except:
pass
return True
def plugin_write(self, cmd):
res = False
cmdarr = cmd.split(',')
cmdarr[0] = cmdarr[0].strip().lower()
if (cmdarr[0] == 'jami'):
if self.initialized:
subcmd = str(cmdarr[1].strip()).lower()
if (subcmd == 'call'):
try:
target = cmdarr[2].strip()
self.jami.makeCall(target)
except Exception as e:
print(e)
elif (subcmd == 'sendtext'):
try:
target = cmdarr[2].strip()
msg = cmdarr[3].strip()
self.jami.sendText(target, msg)
except Exception as e:
print(e)
elif (subcmd == 'accept'):
try:
self.jami.acceptIncoming()
except Exception as e:
print(e)
elif (subcmd == 'refuse'):
try:
self.jami.refuseIncoming()
except Exception as e:
print(e)
elif (subcmd == 'endcall'):
try:
self.jami.endCall()
except Exception as e:
print(e)
elif (subcmd == 'contactlist'):
try:
clist = self.jami.getContactList()
misc.addLog(rpieGlobals.LOG_LEVEL_INFO, ('Jami contacts: ' + str(clist)))
except Exception as e:
print(e)
res = True
else:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, 'Jami is not initialized')
return res |
class Test_auth(unittest.TestCase):
def setUp(self):
self.nxt = 0
self.size = 4
self.spi = 256
self.seq = 1
self.data = b'!\xd3\xa9\\_\xfdM\x18F"\xb9\xf8'
self.auth = ipv6.auth(self.nxt, self.size, self.spi, self.seq, self.data)
self.form = '!BB2xII12s'
self.buf = struct.pack(self.form, self.nxt, self.size, self.spi, self.seq, self.data)
def test_init(self):
eq_(self.nxt, self.auth.nxt)
eq_(self.size, self.auth.size)
eq_(self.spi, self.auth.spi)
eq_(self.seq, self.auth.seq)
eq_(self.data, self.auth.data)
def test_parser(self):
_res = ipv6.auth.parser(self.buf)
if (type(_res) is tuple):
res = _res[0]
else:
res = _res
eq_(self.nxt, res.nxt)
eq_(self.size, res.size)
eq_(self.spi, res.spi)
eq_(self.seq, res.seq)
eq_(self.data, res.data)
def test_serialize(self):
buf = self.auth.serialize()
res = struct.unpack_from(self.form, six.binary_type(buf))
eq_(self.nxt, res[0])
eq_(self.size, res[1])
eq_(self.spi, res[2])
eq_(self.seq, res[3])
eq_(self.data, res[4])
def test_len(self):
eq_(((4 + 2) * 4), len(self.auth))
def test_len_re(self):
size = 5
auth = ipv6.auth(0, size, 256, 1, b'!\xd3\xa9\\_\xfdM\x18F"\xb9\xf8\xf8\xf8\xf8\xf8')
eq_(((size + 2) * 4), len(auth))
def test_default_args(self):
hdr = ipv6.auth()
buf = hdr.serialize()
LOG.info(repr(buf))
res = struct.unpack_from(ipv6.auth._PACK_STR, six.binary_type(buf))
LOG.info(res)
eq_(res[0], 6)
eq_(res[1], 2)
eq_(res[2], 0)
eq_(res[3], 0)
eq_(buf[ipv6.auth._MIN_LEN:], b'\x00\x00\x00\x00') |
class TakeUntil(Op):
__slots__ = ('_notifier',)
def __init__(self, notifier, source=None):
Op.__init__(self, source)
self._notifier = notifier
notifier.connect(self._on_notifier, self.on_source_error, self.on_source_done)
def _on_notifier(self, *args):
self.on_source_done(self._source)
def on_source_done(self, source):
Op.on_source_done(self, self._source)
self._notifier.disconnect(self._on_notifier, self.on_source_error, self.on_source_done)
self._notifier = None |
class CartpoleEnv(mujoco_env.MujocoEnv, utils.EzPickle):
PENDULUM_LENGTH = 0.6
def __init__(self):
utils.EzPickle.__init__(self)
dir_path = os.path.dirname(os.path.realpath(__file__))
mujoco_env.MujocoEnv.__init__(self, ('%s/assets/cartpole.xml' % dir_path), 2)
def step(self, a):
self.do_simulation(a, self.frame_skip)
ob = self._get_obs()
cost_lscale = CartpoleEnv.PENDULUM_LENGTH
reward = np.exp(((- np.sum(np.square((self._get_ee_pos(ob) - np.array([0.0, CartpoleEnv.PENDULUM_LENGTH]))))) / (cost_lscale ** 2)))
reward -= (0.01 * np.sum(np.square(a)))
done = False
return (ob, reward, done, {})
def reset_model(self):
qpos = (self.init_qpos + np.random.normal(0, 0.1, np.shape(self.init_qpos)))
qvel = (self.init_qvel + np.random.normal(0, 0.1, np.shape(self.init_qvel)))
self.set_state(qpos, qvel)
return self._get_obs()
def _get_obs(self):
return np.concatenate([self.sim.data.qpos, self.sim.data.qvel]).ravel()
def _get_ee_pos(x):
(x0, theta) = (x[0], x[1])
return np.array([(x0 - (CartpoleEnv.PENDULUM_LENGTH * np.sin(theta))), ((- CartpoleEnv.PENDULUM_LENGTH) * np.cos(theta))])
def viewer_setup(self):
v = self.viewer
v.cam.trackbodyid = 0
v.cam.distance = v.model.stat.extent |
def fever(n: int):
import os
import json
path = os.path.join(os.path.expanduser('~'), '.cache', 'lmql', 'datasets', 'fever.json')
if (not os.path.exists(path)):
os.makedirs(os.path.join(os.path.expanduser('~'), '.cache', 'lmql', 'datasets'), exist_ok=True)
url = '
subprocess.run(['curl', url], stdout=open(path, 'w'), check=True)
assert os.path.exists(path)
with open(path, 'r') as f:
data = json.load(f)
s = data['examples'][n]
choices = list(s['target_scores'].items())
answer_choices = [x[0].rstrip('.') for x in choices]
answer = [x[0] for x in choices if (x[1] == max([x[1] for x in choices]))][0]
choices_line = ('Answer Choices: ' + ', '.join(answer_choices))
return MultipleChoiceSample(s['input'], answer_choices, answer, choices_line) |
class RedisTransportClientCore(RedisTransportCore):
protocol_version = attr.ib(default=ProtocolVersion.VERSION_3, converter=_convert_protocol_version)
def is_server(self):
return False
def _get_metric_name(self, name):
return 'client.transport.redis_gateway.{name}'.format(name=name) |
def extractDwrfTL(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
tagmap = [('the world is fun as it became a death game', "The World Has Become a Death Game and It's Fun", 'translated'), ('DG:FW', "The World Has Become a Death Game and It's Fun", 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class PromptTemplateRegistry():
def __init__(self) -> None:
self.registry = defaultdict(dict)
def register(self, prompt_template, language: str='en', is_default=False, model_names: List[str]=None) -> None:
scene_name = prompt_template.template_scene
if (not scene_name):
raise ValueError('Prompt template scene name cannot be empty')
if (not model_names):
model_names: List[str] = [_DEFAULT_MODEL_KEY]
scene_registry = self.registry[scene_name]
_register_scene_prompt_template(scene_registry, prompt_template, language, model_names)
if is_default:
_register_scene_prompt_template(scene_registry, prompt_template, _DEFUALT_LANGUAGE_KEY, [_DEFAULT_MODEL_KEY])
_register_scene_prompt_template(scene_registry, prompt_template, language, [_DEFAULT_MODEL_KEY])
def get_prompt_template(self, scene_name: str, language: str, model_name: str, proxyllm_backend: str=None):
scene_registry = self.registry[scene_name]
print(f'Get prompt template of scene_name: {scene_name} with model_name: {model_name}, proxyllm_backend: {proxyllm_backend}, language: {language}')
registry = None
if proxyllm_backend:
registry = scene_registry.get(proxyllm_backend)
if (not registry):
registry = scene_registry.get(model_name)
if (not registry):
registry = scene_registry.get(_DEFAULT_MODEL_KEY)
if (not registry):
raise ValueError(f'There is no template with scene name {scene_name}, model name {model_name}, language {language}')
else:
print(f'scene: {scene_name} has custom prompt template of model: {model_name}, language: {language}')
prompt_template = registry.get(language)
if (not prompt_template):
prompt_template = registry.get(_DEFUALT_LANGUAGE_KEY)
return prompt_template |
class lift(_coconut_base_callable):
__slots__ = ('func',)
def __new__(cls, func, *func_args, **func_kwargs):
self = _coconut.super(_coconut_lift, cls).__new__(cls)
self.func = func
if (func_args or func_kwargs):
self = self(*func_args, **func_kwargs)
return self
def __reduce__(self):
return (self.__class__, (self.func,))
def __call__(self, *func_args, **func_kwargs):
return _coconut_lifted(self.func, *func_args, **func_kwargs)
def __repr__(self):
return ('lift(%r)' % (self.func,)) |
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
(((detect_target().name() == 'cuda') and (int(detect_target()._arch) < 80)), 'Not supported by CUDA < SM80.')
class ConvBiasActFewChannelsTestCase(unittest.TestCase):
def _test_conv_bias_relu_few_channels(self, HH=224, WW=224, CI=4, CO=64, batch=1, copy_op=False, test_name='conv2d_bias_relu_few_channels', dtype='float16'):
KK = 7
stride = 2
pad = 3
target = detect_target()
X = Tensor(shape=[IntImm(batch), HH, WW, CI], dtype=dtype, name='input_0', is_input=True)
W = Tensor(shape=[CO, KK, KK, CI], dtype=dtype, name='input_1', is_input=True)
B = Tensor(shape=[CO], dtype=dtype, name='input_2', is_input=True)
OP = ops.conv2d_bias_relu_few_channels(stride=stride, pad=pad, dilate=1)
if copy_op:
OP = ops.conv2d_bias_relu_few_channels(**OP._get_op_attributes())
Y = OP(X, W, B)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, target, './tmp', test_name)
X_pt = get_random_torch_tensor([batch, CI, HH, WW], dtype=dtype)
W_pt = get_random_torch_tensor([CO, CI, KK, KK], dtype=dtype)
B_pt = get_random_torch_tensor([1, CO, 1, 1], dtype=dtype)
Y_pt = torch.nn.functional.conv2d(X_pt, W_pt, padding=pad, stride=stride)
Y_pt = (Y_pt + B_pt)
Y_pt = torch.nn.functional.relu(Y_pt)
x = X_pt.permute((0, 2, 3, 1)).contiguous()
w = W_pt.permute((0, 2, 3, 1)).contiguous()
inputs = {'input_0': x, 'input_1': w, 'input_2': B_pt.squeeze()}
y = torch.empty_like(Y_pt).permute((0, 2, 3, 1)).contiguous()
module.run_with_tensors(inputs, [y])
y_transpose = y.permute((0, 3, 1, 2))
if (dtype == 'float32'):
self.assertTrue(torch.allclose(Y_pt, y_transpose, atol=0.05, rtol=0.01))
else:
self.assertTrue(torch.allclose(Y_pt, y_transpose, atol=0.01, rtol=0.01))
(['float16', 'float32'])
def test_relu(self, dtype):
self._test_conv_bias_relu_few_channels(test_name=f'conv_bias_relu_few_channels_{dtype}', dtype=dtype)
self._test_conv_bias_relu_few_channels(copy_op=True, test_name=f'conv_bias_relu_few_channels_{dtype}_copy_op', dtype=dtype)
def _test_conv_bias_hardswish_few_channels(self, HH=224, WW=224, CI=4, CO=64, batch=1, copy_op=False, test_name='conv2d_bias_hardswish_few_channels', dtype='float16'):
KK = 7
stride = 2
pad = 3
target = detect_target()
X = Tensor(shape=[IntImm(batch), HH, WW, CI], dtype=dtype, name='input_0', is_input=True)
W = Tensor(shape=[CO, KK, KK, CI], dtype=dtype, name='input_1', is_input=True)
B = Tensor(shape=[CO], dtype=dtype, name='input_2', is_input=True)
OP = ops.conv2d_bias_hardswish_few_channels(stride=stride, pad=pad, dilate=1)
if copy_op:
OP = ops.conv2d_bias_hardswish_few_channels(**OP._get_op_attributes())
Y = OP(X, W, B)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, target, './tmp', test_name)
X_pt = get_random_torch_tensor([batch, CI, HH, WW], dtype=dtype)
W_pt = get_random_torch_tensor([CO, CI, KK, KK], dtype=dtype)
B_pt = get_random_torch_tensor([1, CO, 1, 1], dtype=dtype)
Y_pt = torch.nn.functional.conv2d(X_pt, W_pt, padding=pad, stride=stride)
Y_pt = (Y_pt + B_pt)
Y_pt = hard_swish(Y_pt)
x = X_pt.permute((0, 2, 3, 1)).contiguous()
w = W_pt.permute((0, 2, 3, 1)).contiguous()
inputs = {'input_0': x, 'input_1': w, 'input_2': B_pt.squeeze()}
y = torch.empty_like(Y_pt).permute((0, 2, 3, 1)).contiguous()
module.run_with_tensors(inputs, [y])
y_transpose = y.permute((0, 3, 1, 2))
if (dtype == 'float32'):
self.assertTrue(torch.allclose(Y_pt, y_transpose, atol=0.05, rtol=0.01))
else:
self.assertTrue(torch.allclose(Y_pt, y_transpose, atol=0.01, rtol=0.01))
(['float16', 'float32'])
def test_hardswish(self, dtype):
self._test_conv_bias_hardswish_few_channels(test_name=f'conv_bias_hardswish_few_channels_{dtype}', dtype=dtype)
self._test_conv_bias_hardswish_few_channels(copy_op=True, test_name=f'conv_bias_hardswish_few_channels_{dtype}_copy_op', dtype=dtype) |
class Migration(migrations.Migration):
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('core', '0010_auto__2140')]
operations = [migrations.CreateModel(name='Subscription', fields=[('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(auto_now_add=True)), ('updated', models.DateTimeField(auto_now=True)), ('price', models.DecimalField(max_digits=9, decimal_places=2)), ('description', models.CharField(max_length=256, null=True, blank=True)), ('start_date', models.DateField()), ('end_date', models.DateField(null=True, blank=True)), ('created_by', models.ForeignKey(related_name='+', default=1, to=settings.AUTH_USER_MODEL)), ('location', models.ForeignKey(to='core.Location')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL))]), migrations.CreateModel(name='SubscriptionNote', fields=[('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('created', models.DateTimeField(auto_now_add=True)), ('note', models.TextField(null=True, blank=True)), ('created_by', models.ForeignKey(to=settings.AUTH_USER_MODEL, null=True)), ('subscription', models.ForeignKey(related_name='communitysubscription_notes', to='core.Subscription'))]), migrations.RemoveField(model_name='communitysubscription', name='bills'), migrations.RemoveField(model_name='communitysubscription', name='location'), migrations.RemoveField(model_name='communitysubscription', name='user'), migrations.RemoveField(model_name='roomsubscription', name='bills'), migrations.RemoveField(model_name='roomsubscription', name='location'), migrations.RemoveField(model_name='roomsubscription', name='user'), migrations.AlterField(model_name='room', name='description', field=models.TextField(null=True, blank=True)), migrations.DeleteModel(name='CommunitySubscription'), migrations.DeleteModel(name='RoomSubscription'), migrations.AddField(model_name='subscriptionbill', name='subscription', field=models.ForeignKey(related_name='bills', to='core.Subscription', null=True))] |
class group_add(group_mod):
version = 5
type = 15
command = 0
def __init__(self, xid=None, group_type=None, group_id=None, buckets=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (group_type != None):
self.group_type = group_type
else:
self.group_type = 0
if (group_id != None):
self.group_id = group_id
else:
self.group_id = 0
if (buckets != None):
self.buckets = buckets
else:
self.buckets = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.command))
packed.append(struct.pack('!B', self.group_type))
packed.append(('\x00' * 1))
packed.append(struct.pack('!L', self.group_id))
packed.append(loxi.generic_util.pack_list(self.buckets))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = group_add()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 15)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_command = reader.read('!H')[0]
assert (_command == 0)
obj.group_type = reader.read('!B')[0]
reader.skip(1)
obj.group_id = reader.read('!L')[0]
obj.buckets = loxi.generic_util.unpack_list(reader, ofp.common.bucket.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.group_type != other.group_type):
return False
if (self.group_id != other.group_id):
return False
if (self.buckets != other.buckets):
return False
return True
def pretty_print(self, q):
q.text('group_add {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('group_type = ')
value_name_map = {0: 'OFPGT_ALL', 1: 'OFPGT_SELECT', 2: 'OFPGT_INDIRECT', 3: 'OFPGT_FF'}
if (self.group_type in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.group_type], self.group_type)))
else:
q.text(('%#x' % self.group_type))
q.text(',')
q.breakable()
q.text('group_id = ')
q.text(('%#x' % self.group_id))
q.text(',')
q.breakable()
q.text('buckets = ')
q.pp(self.buckets)
q.breakable()
q.text('}') |
class Cpe(Base):
__tablename__ = 'cpes'
cpe_id = Column(String(), primary_key=True)
vendor = Column(String())
product = Column(String())
version = Column(String())
update = Column(String())
cves = relationship(Association, back_populates='cpe')
def __repr__(self) -> str:
return f'Cpe({self.cpe_id})' |
def rescaleHalflife(prior, scale=1.0):
(alpha, beta, t) = prior
oldHalflife = modelToPercentileDecay(prior)
dt = (oldHalflife / t)
logDenominator = betaln(alpha, beta)
logm2 = (betaln((alpha + (2 * dt)), beta) - logDenominator)
m2 = np.exp(logm2)
newAlphaBeta = ((1 / ((8 * m2) - 2)) - 0.5)
assert (newAlphaBeta > 0)
return (newAlphaBeta, newAlphaBeta, (oldHalflife * scale)) |
class ClickSettings(Settings):
absolute_params = {'angle': 5.0}
relative_params = {'depth': 3.0, 'bottom_radius': 0.1}
def edgeObjects(self, boxes, chars: str='cC', add: bool=True):
edges = [ClickConnector(boxes, self), ClickEdge(boxes, self)]
return self._edgeObjects(edges, boxes, chars, add) |
.skip('These tests take a very long time to compute')
.parametrize('sz', [32, 30, 31, 29, 28])
def test_grad_odd_size_j2(sz):
x = torch.randn(1, 3, sz, sz, requires_grad=True, dtype=torch.double, device=dev)
scat = ScatLayerj2(biort='near_sym_a', qshift='qshift_a').to(dev)
scat = scat.to(torch.double)
gradcheck(scat, (x,)) |
class URLFetcherTest(SimpleTestCase):
def setUp(self):
get_reversed_hashed_files.cache_clear()
def test_default(self):
url = '
with mock.patch('weasyprint.default_url_fetcher') as url_fetcher:
django_url_fetcher(url)
url_fetcher.assert_called_once_with(url)
url = 'file:///media/image.jpg'
with mock.patch('weasyprint.default_url_fetcher') as url_fetcher:
django_url_fetcher(url)
url_fetcher.assert_called_once_with(url)
url = 'file:///static/styles.css'
with mock.patch('weasyprint.default_url_fetcher') as url_fetcher:
django_url_fetcher(url)
url_fetcher.assert_called_once_with(url)
def assert_data(self, data, file_path, mime_type):
self.assertEqual(sorted(data.keys()), ['encoding', 'file_obj', 'filename', 'mime_type', 'redirected_url'])
self.assertEqual(data['filename'], Path(file_path).name)
self.assertEqual(data['mime_type'], mime_type)
self.assertEqual(data['encoding'], None)
self.assertEqual(data['redirected_url'], ('file://' + file_path))
_settings(MEDIA_URL='/media/', MEDIA_ROOT='/www/media/')
('django_weasyprint.utils.default_storage.open')
('weasyprint.default_url_fetcher')
def test_media_with_trailing_slash(self, mock_fetcher, mock_open):
url = 'file:///media/image.jpg'
data = django_url_fetcher(url)
mock_fetcher.assert_not_called()
mock_open.assert_called_once_with('/www/media/image.jpg', 'rb')
self.assert_data(data, '/www/media/image.jpg', 'image/jpeg')
_settings(MEDIA_URL='/media/', MEDIA_ROOT=Path('/www/media'))
('django_weasyprint.utils.default_storage.open')
('weasyprint.default_url_fetcher')
def test_media_root_pathlib_no_slash(self, mock_fetcher, mock_open):
url = 'file:///media/image.jpg'
data = django_url_fetcher(url)
mock_fetcher.assert_not_called()
mock_open.assert_called_once_with('/www/media/image.jpg', 'rb')
self.assert_data(data, '/www/media/image.jpg', 'image/jpeg')
_settings(STATIC_URL='/static/', STATIC_ROOT='/www/static')
('django_weasyprint.utils.open')
('django_weasyprint.utils.find', return_value='/www/static/css/styles.css')
('weasyprint.default_url_fetcher')
def test_static(self, mock_fetcher, mock_find, mock_open):
url = 'file:///static/css/styles.css'
data = django_url_fetcher(url)
mock_fetcher.assert_not_called()
mock_find.assert_called_once_with('css/styles.css')
mock_open.assert_called_once_with('/www/static/css/styles.css', 'rb')
self.assert_data(data, '/www/static/css/styles.css', 'text/css')
_settings(STATIC_URL='/static/', STATIC_ROOT='/www/static', STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage')
('django_weasyprint.utils.staticfiles_storage.hashed_files', new_callable=mock.PropertyMock(return_value={'css/styles.css': 'css/styles.60b250d16a6a.css'}))
('django_weasyprint.utils.open')
('django_weasyprint.utils.find', return_value='/www/static/css/styles.css')
('weasyprint.default_url_fetcher')
def test_manifest_static(self, mock_fetcher, mock_find, mock_open, hashed_files):
url = 'file:///static/css/styles.60b250d16a6a.css'
data = django_url_fetcher(url)
mock_fetcher.assert_not_called()
mock_find.assert_called_once_with('css/styles.css')
mock_open.assert_called_once_with('/www/static/css/styles.css', 'rb')
self.assert_data(data, '/www/static/css/styles.css', 'text/css')
_settings(STATIC_URL='/static/', STATIC_ROOT='/www/static')
('django_weasyprint.utils.open')
('django_weasyprint.utils.find', return_value=None)
('weasyprint.default_url_fetcher')
def test_static_file_not_found(self, mock_fetcher, mock_find, mock_open):
url = 'file:///static/css/missing.css'
django_url_fetcher(url)
mock_find.assert_called_once_with('css/missing.css')
mock_open.assert_not_called()
mock_fetcher.assert_called_once_with('file:///static/css/missing.css') |
def setup():
print('')
global port, mc
plist = list(serial.tools.list_ports.comports())
idx = 1
for port in plist:
print('{} : {}'.format(idx, port))
idx += 1
_in = input('\nPlease input 1 - {} to choice:'.format((idx - 1)))
port = str(plist[(int(_in) - 1)]).split(' - ')[0].strip()
print(port)
print('')
baud = 115200
_baud = input('Please input baud(default:115200):')
try:
baud = int(_baud)
except Exception:
pass
print(baud)
print('')
DEBUG = False
f = input('Wether DEBUG mode[Y/n]:')
if (f in ['y', 'Y', 'yes', 'Yes']):
DEBUG = True
mc = MyCobot(port, baud, debug=DEBUG) |
.external
.parametrize('generate_type, generate_target', [('systems', 'aws'), ('systems', 'okta'), ('datasets', 'db'), ('datasets', 'bigquery'), ('datasets', 'dynamodb')])
def test_generate_failure(test_config: FidesConfig, generate_type: str, generate_target: str, test_client: TestClient) -> None:
data = {'organization_key': 'default_organization', 'generate': {'config': EXTERNAL_FAILURE_CONFIG_BODY[generate_target], 'target': generate_target, 'type': generate_type}}
response = test_client.post(((test_config.cli.server_url + API_PREFIX) + '/generate/'), headers=test_config.user.auth_header, data=dumps(data))
actual_failure_message = loads(response.text)['detail']
print(actual_failure_message)
assert (EXPECTED_FAILURE_MESSAGES[generate_target] in actual_failure_message) |
_flyte_cli.command('list-launch-plan-versions', cls=_FlyteSubCommand)
_project_option
_domain_option
_optional_name_option
_host_option
_insecure_option
_token_option
_limit_option
_show_all_option
_filter_option
_sort_by_option
_optional_urns_only_option
def list_launch_plan_versions(project, domain, name, host, insecure, token, limit, show_all, filter, sort_by, urns_only):
if (not urns_only):
_welcome_message()
_click.echo('Launch Plan Versions Found for {}:{}:{}\n'.format(_tt(project), _tt(domain), _tt(name)))
_click.echo('{:50} {:80} {:30} {:15}'.format('Version', 'Urn', 'Schedule', 'Schedule State'))
client = _get_client(host, insecure)
while True:
(lp_list, next_token) = client.list_launch_plans_paginated(_common_models.NamedEntityIdentifier(project, domain, name), limit=limit, token=token, filters=[_filters.Filter.from_python_std(f) for f in filter], sort_by=(_admin_common.Sort.from_python_std(sort_by) if sort_by else None))
for l in lp_list:
if urns_only:
_click.echo(_tt(cli_identifiers.Identifier.promote_from_model(l.id)))
else:
_click.echo('{:50} {:80} '.format(_tt(l.id.version), _tt(cli_identifiers.Identifier.promote_from_model(l.id))), nl=False)
if ((l.spec.entity_metadata.schedule is not None) and (l.spec.entity_metadata.schedule.cron_expression or l.spec.entity_metadata.schedule.rate)):
_click.echo('{:30} '.format(_render_schedule_expr(l)), nl=False)
_click.secho(_launch_plan.LaunchPlanState.enum_to_string(l.closure.state), fg=('green' if (l.closure.state == _launch_plan.LaunchPlanState.ACTIVE) else None))
else:
_click.echo()
if (show_all is not True):
if (next_token and (not urns_only)):
_click.echo('Received next token: {}\n'.format(next_token))
break
if (not next_token):
break
token = next_token
if (not urns_only):
_click.echo('') |
class IBCCoreConnectionRestClientTestCase(TestCase):
REST_CLIENT = IBCCoreConnectionRestClient
def make_clients(self, response_content: Dict) -> Tuple[(MockRestClient, IBCCoreConnectionRestClient)]:
mock_client = MockRestClient(json_encode(response_content).encode('utf-8'))
rest_client = self.REST_CLIENT(mock_client)
return (mock_client, rest_client)
def test_Connection(self):
content = {'connection': {'client_id': 'string', 'versions': [{'identifier': 'string', 'features': ['string']}], 'state': 'STATE_UNINITIALIZED_UNSPECIFIED', 'counterparty': {'client_id': '1', 'connection_id': '1', 'prefix': {'key_prefix': 'string'}}, 'delay_period': '1'}, 'proof': 'string', 'proof_height': {'revision_number': '1', 'revision_height': '1'}}
(mock_client, rest_client) = self.make_clients(content)
expected_response = ParseDict(content, QueryConnectionResponse())
assert (rest_client.Connection(QueryConnectionRequest(connection_id='connection_id')) == expected_response)
assert (mock_client.last_base_url == '/ibc/core/connection/v1beta1/connections/connection_id')
def test_Connections(self):
content = {'connections': [{'id': '1', 'client_id': '1', 'versions': [{'identifier': 'string', 'features': ['string']}], 'state': 'STATE_UNINITIALIZED_UNSPECIFIED', 'counterparty': {'client_id': '1', 'connection_id': '1', 'prefix': {'key_prefix': 'string'}}, 'delay_period': '1'}], 'pagination': {'next_key': 'string', 'total': '1'}, 'height': {'revision_number': '1', 'revision_height': '1'}}
(mock_client, rest_client) = self.make_clients(content)
expected_response = ParseDict(content, QueryConnectionsResponse())
assert (rest_client.Connections(QueryConnectionsRequest()) == expected_response)
assert (mock_client.last_base_url == '/ibc/core/connection/v1beta1/connections')
def test_ClientConnections(self):
content = {'connection_paths': ['string'], 'proof': 'string', 'proof_height': {'revision_number': '1', 'revision_height': '1'}}
(mock_client, rest_client) = self.make_clients(content)
expected_response = ParseDict(content, QueryClientConnectionsResponse())
assert (rest_client.ClientConnections(QueryClientConnectionsRequest(client_id='111')) == expected_response)
assert ([mock_client.last_base_url] == ['/ibc/core/connection/v1beta1/client_connections/111'])
def test_ConnectionClientState(self):
content = {'identified_client_state': {'client_id': 'string', 'client_state': TYPE}, 'proof': 'string', 'proof_height': {'revision_number': '1', 'revision_height': '1'}}
(mock_client, rest_client) = self.make_clients(content)
expected_response = ParseDict(content, QueryConnectionClientStateResponse())
assert (rest_client.ConnectionClientState(QueryConnectionClientStateRequest(connection_id='connection_id')) == expected_response)
assert (mock_client.last_base_url == '/ibc/core/connection/v1beta1/connections/connection_id/client_state')
def test_ConnectionConsensusState(self):
content = {'consensus_state': TYPE, 'client_id': 'string', 'proof': 'string', 'proof_height': {'revision_number': '1', 'revision_height': '1'}}
(mock_client, rest_client) = self.make_clients(content)
expected_response = ParseDict(content, QueryConnectionConsensusStateResponse())
assert (rest_client.ConnectionConsensusState(QueryConnectionConsensusStateRequest(revision_height=1, revision_number=1, connection_id='connection_id')) == expected_response)
assert (mock_client.last_base_url == '/ibc/core/connection/v1beta1/connections/connection_id/consensus_state/revision/1/height/1') |
class OptionPlotoptionsArcdiagramSonificationDefaultinstrumentoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class AdminIntegrationTest(SearchTestBase):
def setUp(self):
super(AdminIntegrationTest, self).setUp()
self.user = User(username='foo', is_staff=True, is_superuser=True)
self.user.set_password('bar')
self.user.save()
(('django.contrib.admin' in settings.INSTALLED_APPS), 'Django admin site not installed')
def testAdminIntegration(self):
self.client.login(username='foo', password='bar')
response = self.client.get('/admin/test_watson/watsontestmodel1/')
self.assertContains(response, 'instance11')
self.assertContains(response, 'instance12')
self.assertContains(response, 'searchbar')
response = self.client.get('/admin/test_watson/watsontestmodel1/?q=title content description')
self.assertContains(response, 'instance11')
self.assertContains(response, 'instance12')
response = self.client.get('/admin/test_watson/watsontestmodel1/?q=instance11')
self.assertContains(response, 'instance11')
self.assertNotContains(response, 'instance12')
def tearDown(self):
super(AdminIntegrationTest, self).tearDown()
self.user.delete()
del self.user |
class SingleTestRun(EnsembleExperiment):
def __init__(self, simulation_arguments: SingleTestRunArguments, config: ErtConfig, storage: StorageAccessor, id_: UUID):
local_queue_config = config.queue_config.create_local_copy()
super().__init__(simulation_arguments, config, storage, local_queue_config, id_)
def checkHaveSufficientRealizations(num_successful_realizations: int, _: int) -> None:
if (num_successful_realizations != 1):
raise ErtRunError('Experiment failed!')
def run_experiment(self, evaluator_server_config: EvaluatorServerConfig) -> RunContext:
return self.runSimulations__('Running single realisation test ...', evaluator_server_config)
def name(cls) -> str:
return 'Single realization test-run' |
class OptionPlotoptionsAreasplineSonificationContexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_save_as_move_external_files_to_project_folder(create_test_data, create_maya_env, trash_bin):
data = create_test_data
maya_env = create_maya_env
new_texture_file = pm.nt.File()
local_file_full_path = os.path.join(tempfile.gettempdir(), 'temp.png')
with open(local_file_full_path, 'w'):
pass
trash_bin.append(local_file_full_path)
new_texture_file.fileTextureName.set(local_file_full_path)
version1 = Version(task=data['task1'])
DBSession.add(version1)
DBSession.commit()
maya_env.save_as(version1)
expected_path = Repository.to_os_independent_path(os.path.join(version1.absolute_path, 'external_files/Textures/temp.png'))
assert (expected_path == new_texture_file.fileTextureName.get()) |
class TlsSubscriptionResponseAttributesAllOf(ModelNormal):
allowed_values = {('state',): {'PENDING': 'pending', 'PROCESSING': 'processing', 'ISSUED': 'issued', 'RENEWING': 'renewing', 'FAILED': 'failed'}}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'state': (str,)}
_property
def discriminator():
return None
attribute_map = {'state': 'state'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class OptionPlotoptionsAreaSonificationContexttracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Article_CommentSerializer(serializers.ModelSerializer):
user = UserSerializer()
add_time = serializers.DateTimeField(format='%Y-%m-%d %H:%M:%S', required=False, read_only=True)
articlecommentreply_set = ArticleCommentReplySerializer1(many=True, read_only=True)
class Meta():
model = Article_Comment
fields = '__all__' |
.plugin('snakes.nets')
def extend(module):
class PetriNet(module.PetriNet):
def __init__(self, name, **args):
self._hello = args.pop('hello', 'Hello from %s')
module.PetriNet.__init__(self, name, **args)
def hello(self):
print((self._hello % self.name))
return PetriNet |
(max_examples=10)
.filterwarnings('ignore::UserWarning')
.filterwarnings('ignore::RuntimeWarning')
.filterwarnings('ignore::ert.config.ConfigWarning')
(config_generators(use_eclbase=st.just(True)))
def test_that_enkf_obs_keys_are_ordered(tmp_path_factory, config_generator):
with config_generator(tmp_path_factory) as config_values:
observations = ErtConfig.from_dict(config_values.to_config_dict('test.ert', os.getcwd())).enkf_obs
for o in config_values.observations:
assert (o.name in observations)
assert (sorted(set((o.name for o in config_values.observations))) == list(observations.datasets.keys())) |
.django_db
def test_match_from_code_filter_only(client, monkeypatch, elasticsearch_award_index, subaward_with_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_treasury_account_components_subaward(client, {'require': [_agency_path(BASIC_TAS)]}, None)
assert (resp.json()['results'] == [_subaward1()]) |
class CustomFormOptionList(ResourceList):
def query(self, view_kwargs):
query_ = self.session.query(CustomFormOptions)
if view_kwargs.get('custom_form_id'):
query_ = self.session.query(CustomFormOptions).filter((getattr(CustomFormOptions, 'custom_form_id') == view_kwargs['custom_form_id']))
return query_
schema = CustomFormOptionSchema
data_layer = {'session': db.session, 'model': CustomFormOptions, 'methods': {'query': query}} |
def extractImperator(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
class UserTokenAPIView(RetrieveDestroyAPIView):
lookup_field = 'key'
serializer_class = TokenSerializer
queryset = Token.objects.all()
def filter_queryset(self, queryset):
return queryset.filter(user=self.request.user)
def retrieve(self, request, key, *args, **kwargs):
if (key == 'current'):
instance = Token.objects.get(key=request.auth.key)
serializer = self.get_serializer(instance)
return Response(serializer.data)
return super(UserTokenAPIView, self).retrieve(request, key, *args, **kwargs)
def destroy(self, request, key, *args, **kwargs):
if (key == 'current'):
Token.objects.get(key=request.auth.key).delete()
return Response(status=status.HTTP_204_NO_CONTENT)
return super(UserTokenAPIView, self).destroy(request, key, *args, **kwargs) |
def run_near_to_far(mesh, DG0, W):
velocity = as_vector((0.0, 1.0, 0.0))
u0 = project(velocity, W)
xs = SpatialCoordinate(mesh)
inflowexpr = conditional(And((real(xs[2]) > 0.33), (real(xs[2]) < 0.67)), 1.0, 0.5)
inflow = Function(DG0)
inflow.interpolate(inflowexpr)
n = FacetNormal(mesh)
un = (0.5 * (dot(u0, n) + abs(dot(u0, n))))
D = TrialFunction(DG0)
phi = TestFunction(DG0)
a1 = (((- D) * inner(u0, grad(phi))) * dx)
a2 = (inner(((un('+') * D('+')) - (un('-') * D('-'))), jump(phi)) * dS)
a3 = (inner((un * D), phi) * ds(4))
a = ((a1 + a2) + a3)
L = (((- inflow) * inner(dot(u0, n), phi)) * ds(3))
out = Function(DG0)
solve((a == L), out)
assert (max(abs((out.dat.data - inflow.dat.data))) < 1e-06) |
def extractLaoshutranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def extractGrasstranslatesBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PGS', 'Peerless Genius System', 'translated'), ('dkfod', 'Devil King from the Otherworldly Dimension', 'translated'), ('fls', 'Full-time Lottery System', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def format_bulk_insert_list_column_sql(cursor, load_objects, type):
keys = load_objects[0][type].keys()
columns = ['"{}"'.format(key) for key in load_objects[0][type].keys()]
values = [[format_value_for_sql(load_object[type][key], cursor) for key in keys] for load_object in load_objects]
col_string = '({})'.format(','.join(columns))
val_string = ','.join(['({})'.format(','.join(map(str, value))) for value in values])
return (col_string, val_string) |
class NibeClimate(NibeEntity, ClimateEntity):
def __init__(self, system: NibeSystem, climate: ClimateSystem, parameters: set[(ParameterId | None)]):
parameters |= {PARAM_PUMP_SPEED_HEATING_MEDIUM}
super().__init__(system, parameters)
self._climate = climate
self._status = 'DONE'
self._attr_hvac_action = HVACAction.IDLE
self._attr_hvac_mode = HVACMode.HEAT
self._attr_hvac_modes = [HVACMode.HEAT_COOL, HVACMode.HEAT, HVACMode.COOL]
self._attr_name = climate.name
self._attr_supported_features = (ClimateEntityFeature.TARGET_TEMPERATURE_RANGE | ClimateEntityFeature.TARGET_TEMPERATURE)
self._attr_unique_id = f'{self._system_id}_{self._climate.name}'
self.parse_data()
def extra_state_attributes(self):
data = OrderedDict()
data['status'] = self._status
data['pump_speed_heating_medium'] = self.get_float(PARAM_PUMP_SPEED_HEATING_MEDIUM)
return data
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
if (hvac_mode in self._attr_hvac_modes):
self._attr_hvac_mode = hvac_mode
self.async_write_ha_state()
async def async_set_temperature_internal(self, parameter, data):
_LOGGER.debug(f'Set temperature on parameter {parameter} to {data}')
try:
self._status = (await self._uplink.put_parameter(self._system_id, parameter, data))
except BaseException:
self._status = 'ERROR'
raise
finally:
_LOGGER.debug(f'Put parameter response {self._status}')
def parse_data(self):
super().parse_data()
if (('Cooling (Passive)' in self._system.statuses) or ('Cooling (Active)' in self._system.statuses)):
self._attr_hvac_action = HVACAction.COOLING
elif ('Heating' in self._system.statuses):
self._attr_hvac_action = HVACAction.HEATING
elif ('Cooling' in self._system.statuses):
self._attr_hvac_action = HVACAction.COOLING
else:
self._attr_hvac_action = HVACAction.IDLE |
class OptionSeriesArearangeSonificationContexttracksMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesArearangeSonificationContexttracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesArearangeSonificationContexttracksMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesArearangeSonificationContexttracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesArearangeSonificationContexttracksMappingHighpassResonance) |
class ReducedOutputsMeanSquaredError(tf.keras.losses.Loss):
def __init__(self, reduction=tf.keras.losses.Reduction.AUTO, name='reduced_outputs_mean_squared_error'):
super(ReducedOutputsMeanSquaredError, self).__init__(reduction=reduction, name=name)
def call(self, y_true, y_pred):
y_pred = tf.convert_to_tensor(y_pred)
y_true = tf.cast(y_true, y_pred.dtype)
sq_diff = tf.math.squared_difference(y_true, y_pred)
return tf.math.reduce_mean(sq_diff, axis=1)
def residuals(self, y_true, y_pred):
y_pred = tf.convert_to_tensor(y_pred)
y_true = tf.cast(y_true, y_pred.dtype)
sq_diff = tf.math.squared_difference(y_true, y_pred)
eps = tf.keras.backend.epsilon()
return tf.math.sqrt((eps + tf.math.reduce_mean(sq_diff, axis=1))) |
class OptionSeriesErrorbarSonificationTracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def activate_clade(tree_id):
(tid, subtree) = get_tid(tree_id)
tree_data = app.trees[int(tid)]
node = tree_data.tree[subtree]
tree_data.active.clades.results.add(node)
for n in node.descendants():
tree_data.active.clades.results.discard(n)
results = tree_data.active.clades.results
parents = get_parents(results, count_leaves=True)
active_parents = get_active_clades(results, parents)
tree_data.active.clades.results.clear()
tree_data.active.clades.parents.clear()
tree_data.active.clades.results.update(active_parents)
tree_data.active.clades.parents.update(get_parents(active_parents, count_leaves=True)) |
_set_msg_type(ofproto.OFPT_PORT_MOD)
class OFPPortMod(MsgBase):
_TYPE = {'ascii': ['hw_addr']}
def __init__(self, datapath, port_no=0, hw_addr='00:00:00:00:00:00', config=0, mask=0, properties=None):
super(OFPPortMod, self).__init__(datapath)
self.port_no = port_no
self.hw_addr = hw_addr
self.config = config
self.mask = mask
self.properties = (properties or [])
def _serialize_body(self):
bin_props = bytearray()
for p in self.properties:
bin_props += p.serialize()
msg_pack_into(ofproto.OFP_PORT_MOD_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.port_no, addrconv.mac.text_to_bin(self.hw_addr), self.config, self.mask)
self.buf += bin_props |
class RateLimitV1Test(AmbassadorTest):
target: ServiceType
def init(self):
self.target = HTTP()
self.rls = RLSGRPC()
def config(self) -> Generator[(Union[(str, Tuple[(Node, str)])], None, None)]:
(yield (self.target, self.format('\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nname: ratelimit_target_mapping\nhostname: "*"\nprefix: /target/\nservice: {self.target.path.fqdn}\nlabels:\n ambassador:\n - request_label_group:\n - request_headers:\n key: kat-req-rls-allow\n header_name: "kat-req-rls-allow"\n omit_if_not_present: true\n - request_headers:\n key: kat-req-rls-headers-append\n header_name: "kat-req-rls-headers-append"\n omit_if_not_present: true\n')))
(yield (self, self.format('\n---\napiVersion: getambassador.io/v3alpha1\nkind: RateLimitService\nname: {self.rls.path.k8s}\nservice: "{self.rls.path.fqdn}"\ntimeout_ms: 500\nprotocol_version: "v3"\n')))
def queries(self):
(yield Query(self.url('target/')))
(yield Query(self.url('target/'), expected=200, headers={'kat-req-rls-allow': 'true', 'kat-req-rls-headers-append': 'no header'}))
(yield Query(self.url('target/'), expected=429, headers={'kat-req-rls-allow': 'over my dead body', 'kat-req-rls-headers-append': 'Hello=Foo; Hi=Baz'}))
def check(self):
assert (self.results[2].headers['Hello'] == ['Foo'])
assert (self.results[2].headers['Hi'] == ['Baz'])
assert (self.results[2].headers['Content-Type'] == ['application/json'])
assert (self.results[2].headers['Kat-Resp-Rls-Protocol-Version'] == ['v3']) |
def gen_function(func_attrs, exec_cond_template, dim_info_dict, gemm_flag, extra_code='', ndims=2, extra_shape_template=EXTRA_SHAPE_TEMPLATE, problem_args_template=PROBLEM_ARGS_TEMPLATE, extra_header_template=EXTRA_HEADER_TEMPLATE, input_addr_calculator='', output_addr_calculator=''):
func_name = func_attrs['name']
exec_path = func_attrs['exec_path']
op_instance = func_attrs['op_instance']
inst_def_flag = set()
instances = {}
instance_decl = ''
has_d0_flag = has_d0(func_attrs)
has_d1_flag = has_d1(func_attrs)
for (key, value) in exec_path.items():
fname = ('f' + sha1(key.encode()).hexdigest())
algo = value.algo
if (algo not in inst_def_flag):
config = emit_instance(op_instance[algo])
inst_def_flag.add(algo)
else:
config = ''
inst = INSTANCE_TEMPLATE.render(config=config, name=fname, config_name=extract_config_name(config))
instances[key] = inst
instance_decl += inst
extra_shape_func = extra_shape_template.render(indent=' ')
shape_eval_func = gemm_common.gen_shape_eval_code(indent=1, dtype='ck::index_t', dim_info_dict=dim_info_dict, is_ptr=True)
exec_paths = ''
for (key, _) in instances.items():
fname = ('f' + sha1(key.encode()).hexdigest())
problem_args = problem_args_template.render(indent=' ', gemm_flag=gemm_flag, has_d0=has_d0_flag, has_d1=has_d1_flag)
program = EXEC_TEMPLATE.render(indent=' ', instance=fname, problem_args=problem_args, is_profiler=False)
has_dynamic_shape = False
for inp in func_attrs['inputs']:
for dim in inp.shape():
if isinstance(dim, IntVar):
has_dynamic_shape = True
if has_dynamic_shape:
key = 'true'
exec_inst = exec_cond_template.render(indent=' ', cond=key, program=program)
exec_paths += exec_inst
extra_header = extra_header_template.render(gemm_flag=gemm_flag, has_d0=has_d0(func_attrs))
pdims = (len(func_attrs['shape']) if (func_attrs.get('shape') is not None) else 0)
return SRC_TEMPLATE.render(instances=instance_decl, function_name=func_name, shape_func=shape_eval_func, extra_shape=extra_shape_func, input_addr_calculator=input_addr_calculator, output_addr_calculator=output_addr_calculator, exec_paths=exec_paths, extra_code=extra_code, extra_header=extra_header, gemm_flag=gemm_flag, ndims=ndims, pdims=pdims, has_d0=has_d0_flag, has_d1=has_d1_flag) |
def bump_version(cfg: Config, package: Package, hydra_root: str) -> None:
if (package.version_type == VersionType.SETUP):
ver_file = ((Path(hydra_root) / package.path) / 'setup.py')
bump_version_in_file(cfg, package.name, ver_file)
elif (package.version_type == VersionType.FILE):
ver_file = ((Path(hydra_root) / package.path) / package.version_file)
bump_version_in_file(cfg, package.name, ver_file)
else:
raise ValueError() |
.parametrize(('degree', 'family', 'expected_convergence'), [(1, 'CG', 1.8), (2, 'CG', 2.6), (3, 'CG', 3.8), (0, 'DG', 0.8), (1, 'DG', 1.8), (2, 'DG', 2.8)])
def test_convergence(degree, family, expected_convergence):
l2_diff = np.array([run_test(x, degree, family) for x in range(2, 5)])
conv = np.log2((l2_diff[:(- 1)] / l2_diff[1:]))
assert (conv > expected_convergence).all() |
class TestFuzzTHBattle2v2(object):
def testFuzzTHBattle2v2(self):
env = Environ()
t = EventTap()
me = gevent.getcurrent()
def fail_crash(g):
e = Exception('GAME CRASH')
e.__cause__ = g.runner.exception
gevent.kill(me, e)
return g
s = env.server_core()
cl = BatchList([env.client_core() for _ in range(4)])
t.tap(s, *cl)
c1r = BatchList(cl[1:])
c = cl[0]
names = ('Reimu', 'Marisa', 'Youmu', 'Sakuya', 'Satori', 'Koishi', 'Remilia', 'Flandre')
for (i, name) in zip(cl, names):
i.auth.login(name)
wait()
assert all(cl.auth.pid)
c.room.create('Test1', 'THBattle2v2', {})
wait()
gid = c.game.gid_of(t[c.events.game_joined])
c1r.room.join(gid)
wait()
assert (([gid] * 4) == [i.game.gid_of(t[i.events.game_joined]) for i in cl])
s.events.game_crashed += fail_crash
for i in cl:
g = t[i.events.game_joined]
i.events.game_crashed += fail_crash
g.event_observer = BotUserInputHandler(g)
cl.room.get_ready()
wait()
assert (([gid] * 4) == [i.game.gid_of(t[i.events.game_started]) for i in cl])
wait()
def game_ended(g):
import logging
logging.critical('Game ended called ')
gevent.kill(me, GameEnded())
return g
s.events.game_ended += game_ended
[i.game.start_game(t[i.events.game_started]) for i in cl]
try:
let_it_go(*cl)
except GameEnded:
pass |
class BigQueryAgent(AgentBase):
def __init__(self):
super().__init__(task_type='bigquery_query_job_task', asynchronous=False)
def create(self, context: grpc.ServicerContext, output_prefix: str, task_template: TaskTemplate, inputs: Optional[LiteralMap]=None) -> CreateTaskResponse:
job_config = None
if inputs:
ctx = FlyteContextManager.current_context()
python_interface_inputs = {name: TypeEngine.guess_python_type(lt.type) for (name, lt) in task_template.interface.inputs.items()}
native_inputs = TypeEngine.literal_map_to_kwargs(ctx, inputs, python_interface_inputs)
logger.info(f'Create BigQuery job config with inputs: {native_inputs}')
job_config = bigquery.QueryJobConfig(query_parameters=[bigquery.ScalarQueryParameter(name, pythonTypeToBigQueryType[python_interface_inputs[name]], val) for (name, val) in native_inputs.items()])
custom = task_template.custom
project = custom['ProjectID']
location = custom['Location']
client = bigquery.Client(project=project, location=location)
query_job = client.query(task_template.sql.statement, job_config=job_config)
metadata = Metadata(job_id=str(query_job.job_id), location=location, project=project)
return CreateTaskResponse(resource_meta=json.dumps(asdict(metadata)).encode('utf-8'))
def get(self, context: grpc.ServicerContext, resource_meta: bytes) -> GetTaskResponse:
client = bigquery.Client()
metadata = Metadata(**json.loads(resource_meta.decode('utf-8')))
log_links = [TaskLog(uri=f' name='BigQuery Console').to_flyte_idl()]
job = client.get_job(metadata.job_id, metadata.project, metadata.location)
if job.errors:
logger.error(job.errors.__str__())
context.set_code(grpc.StatusCode.INTERNAL)
context.set_details(job.errors.__str__())
return GetTaskResponse(resource=Resource(state=PERMANENT_FAILURE), log_links=log_links)
cur_state = convert_to_flyte_state(str(job.state))
res = None
if (cur_state == SUCCEEDED):
ctx = FlyteContextManager.current_context()
if job.destination:
output_location = f'bq://{job.destination.project}:{job.destination.dataset_id}.{job.destination.table_id}'
res = literals.LiteralMap({'results': TypeEngine.to_literal(ctx, StructuredDataset(uri=output_location), StructuredDataset, LiteralType(structured_dataset_type=StructuredDatasetType(format='')))}).to_flyte_idl()
return GetTaskResponse(resource=Resource(state=cur_state, outputs=res), log_links=log_links)
def delete(self, context: grpc.ServicerContext, resource_meta: bytes) -> DeleteTaskResponse:
client = bigquery.Client()
metadata = Metadata(**json.loads(resource_meta.decode('utf-8')))
client.cancel_job(metadata.job_id, metadata.project, metadata.location)
return DeleteTaskResponse() |
def amazon_receipt_parser_formatter(pages: List[dict]) -> ReceiptParserDataClass:
extracted_data = []
for page in pages:
for receipt in (page.get('ExpenseDocuments') or []):
summary = {}
currencies = {}
for field in (receipt.get('SummaryFields') or []):
field_type = field['Type']['Text']
summary[field_type] = field['ValueDetection']['Text']
field_currency = field.get('Currency', {}).get('Code')
if (field_currency is not None):
if (field_currency not in currencies):
currencies[field_currency] = 1
else:
currencies[field_currency] += 1
item_lines = []
for line_item_group in (receipt.get('LineItemGroups') or []):
for fields in (line_item_group.get('LineItems') or []):
parsed_items = {item['Type']['Text']: item['ValueDetection']['Text'] for item in fields['LineItemExpenseFields']}
item_lines.append(ItemLines(description=parsed_items.get('ITEM'), quantity=convert_string_to_number(parsed_items.get('QUANTITY'), float), amount=convert_string_to_number(parsed_items.get('PRICE'), float), unit_price=convert_string_to_number(parsed_items.get('UNIT_PRICE'), float)))
customer = CustomerInformation(customer_name=summary.get('RECEIVER_NAME', summary.get('NAME')))
merchant = MerchantInformation(merchant_name=summary.get('VENDOR_NAME'), merchant_address=summary.get('VENDOR_ADDRESS'), merchant_phone=summary.get('VENDOR_PHONE'), merchant_url=summary.get('VENDOR_URL'), merchant_siret=None, merchant_siren=None)
invoice_currency = None
if (len(currencies) == 1):
invoice_currency = list(currencies.keys())[0]
elif (len(currencies) > 1):
invoice_currency = max(currencies, key=currencies.get)
locale = Locale(currency=invoice_currency, language=None, country=None)
taxes = [Taxes(taxes=convert_string_to_number(summary.get('TAX'), float), rate=None)]
receipt_infos = InfosReceiptParserDataClass(customer_information=customer, merchant_information=merchant, invoice_number=summary.get('INVOICE_RECEIPT_ID'), invoice_total=convert_string_to_number(summary.get('TOTAL'), float), invoice_subtotal=convert_string_to_number(summary.get('SUBTOTAL'), float), taxes=taxes, date=summary.get('ORDER_DATE', summary.get('INVOICE_RECEIPT_DATE')), due_date=summary.get('DUE_DATE'), locale=locale, item_lines=item_lines, category=None, time=None)
extracted_data.append(receipt_infos)
return ReceiptParserDataClass(extracted_data=extracted_data) |
class ItemsBoxRec():
def from_records(records: List[dict], column: Any, title: str=None, color: str=None, icons=None) -> list:
if (title is None):
title = column
result = {}
if callable(title):
for rec in records:
result[rec[column]] = {'title': title(rec), 'text': rec[column]}
else:
for rec in records:
result[rec[column]] = {'title': rec[title], 'text': rec[column]}
return [result[k] for k in sorted(result.keys())] |
class ChartMultiBar(ChartBar):
def dom(self) -> JsNvd3.JsNvd3MultiBar:
if (self._dom is None):
self._dom = JsNvd3.JsNvd3MultiBar(page=self.page, js_code=self.js_code, component=self)
return self._dom
def colors(self, hex_values: list):
(line_colors, bg_colors) = ([], [])
for h in hex_values:
if (h.upper() in Colors.defined):
h = Colors.defined[h.upper()]['hex']
if (not isinstance(h, tuple)):
line_colors.append(h)
bg_colors.append(('rgba(%s, %s, %s, %s' % (Colors.getHexToRgb(h)[0], Colors.getHexToRgb(h)[1], Colors.getHexToRgb(h)[2], self.options.opacity)))
else:
line_colors.append(h[0])
bg_colors.append(h[0])
self.options.colors = line_colors
self.options.background_colors = bg_colors
self.dom.barColor(line_colors)
for (i, rec) in enumerate(self._datasets):
rec['color'] = self.options.colors[i]
def add_dataset(self, data, label, colors=None, opacity=None, kind=None):
return self.add_trace([{'label': l, 'y': data[i]} for (i, l) in enumerate(self._labels)], name=label) |
class OptionPlotoptionsFunnel3dZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def dashStyle(self):
return self._config_get(None)
def dashStyle(self, text: str):
self._config(text, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False) |
def test_adding_tolerations():
config = '\ndeployment:\n enabled: true\ndaemonset:\n tolerations:\n - key: "key1"\n operator: "Equal"\n value: "value1"\n effect: "NoExecute"\n tolerationSeconds: 3600\n'
r = helm_template(config)
assert (r['daemonset'][name]['spec']['template']['spec']['tolerations'][0]['key'] == 'key1')
assert (r['deployment'][name]['spec']['template']['spec']['tolerations'] == [])
config = '\ndeployment:\n enabled: true\n tolerations:\n - key: "key1"\n operator: "Equal"\n value: "value1"\n effect: "NoExecute"\n tolerationSeconds: 3600\n'
r = helm_template(config)
assert (r['deployment'][name]['spec']['template']['spec']['tolerations'][0]['key'] == 'key1')
assert (r['daemonset'][name]['spec']['template']['spec']['tolerations'] == []) |
def find(search_list, value):
low = 0
high = (len(search_list) - 1)
while (low <= high):
middle = ((low + high) // 2)
if (search_list[middle] > value):
high = (middle - 1)
elif (search_list[middle] < value):
low = (middle + 1)
else:
return middle
raise ValueError('value not in array') |
class KeyValueCache(DataclassAsTuple):
key: Tensor
value: Tensor
def filter_batch_items(self, mask: Tensor) -> 'KeyValueCache':
if (mask.ndim != 1):
raise ValueError(f'Cache mask must be a 1D tensor, has {mask.ndim} dimensions.')
if (mask.size(0) != self.key.size(0)):
raise ValueError(f'Cache mask size ({mask.size(0)}) must match cache batch size ({self.key.size(0)}).')
if (mask.dtype != torch.bool):
raise ValueError(f'Cache mask dtype must be bool, was: {mask.dtype}.')
return KeyValueCache(self.key[mask], self.value[mask])
def jit_rewrap(cls: Type['KeyValueCache'], key_value_cache: Optional[Union[('KeyValueCache', Tuple[(Tensor, Tensor)])]]) -> Optional['KeyValueCache']:
if ((key_value_cache is None) or isinstance(key_value_cache, KeyValueCache)):
return key_value_cache
if ((not isinstance(key_value_cache, tuple)) or (len(key_value_cache) != 2) or (not all((isinstance(item, Tensor) for item in key_value_cache)))):
raise ValueError(f'Key-value cache is not of the `KeyValueCache` type, nor `Tuple[Tensor, Tensor]`: `{type(key_value_cache).__name__}`')
key_cache = key_value_cache[0]
value_cache = key_value_cache[1]
if (key_cache.shape != value_cache.shape):
raise ValueError(f'Key cache ({key_cache.shape}) and value cache ({value_cache.shape}) must have same shapes.')
return cls(key_cache, value_cache) |
def pytorch_to_torchscript_wrapper(model: Model):
shim = model.shims[0]
if (not isinstance(shim, PyTorchShim)):
raise ValueError('Expected PyTorchShim when converting a PyTorch wrapper')
convert_inputs = model.attrs['convert_inputs']
convert_outputs = model.attrs['convert_outputs']
pytorch_model = shim._model
if (not isinstance(pytorch_model, torch.nn.Module)):
raise ValueError('PyTorchShim does not wrap a PyTorch module')
torchscript_model = torch.jit.script(pytorch_model)
grad_scaler = shim._grad_scaler
mixed_precision = shim._mixed_precision
device = shim.device
return TorchScriptWrapper_v1(torchscript_model, convert_inputs=convert_inputs, convert_outputs=convert_outputs, mixed_precision=mixed_precision, grad_scaler=grad_scaler, device=device) |
def get_websocket_user(websocket: WebSocket, ticket_model: t.Optional[TicketInner]=Depends(load_websocket_ticket)):
if (ticket_model is None):
return None
user_queryset = get_user_queryset(User.objects.all(), CallbackContext(websocket.path_params))
return user_queryset.get(id=ticket_model.user) |
class Compose(Bijector):
def __init__(self, bijectors: Sequence[flowtorch.Lazy], *, shape: torch.Size, context_shape: Optional[torch.Size]=None):
assert (len(bijectors) > 0)
super().__init__(None, shape=shape, context_shape=context_shape)
self.bijectors = torch.nn.ModuleList()
for bijector in bijectors:
assert issubclass(bijector.cls, Bijector)
self.bijectors.append(bijector(shape=shape))
shape = self.bijectors[(- 1)].forward_shape(shape)
self.domain = copy.copy(self.bijectors[0].domain)
self.codomain = copy.copy(self.bijectors[(- 1)].codomain)
max_event_dim = max([b.codomain.event_dim for b in self.bijectors])
if (max_event_dim > self.codomain.event_dim):
self.codomain = constraints.independent(self.codomain, (max_event_dim - self.codomain.event_dim))
self._context_shape = context_shape
def forward(self, x: torch.Tensor, context: Optional[torch.Tensor]=None) -> torch.Tensor:
log_detJ: Optional[torch.Tensor] = None
x_temp = x
for bijector in self.bijectors:
y = bijector.forward(x_temp, context)
if (is_record_flow_graph_enabled() and requires_log_detJ()):
if (isinstance(y, BijectiveTensor) and y.from_forward()):
_log_detJ = y._log_detJ
elif (isinstance(x_temp, BijectiveTensor) and x_temp.from_inverse()):
_log_detJ = x_temp._log_detJ
else:
raise RuntimeError('neither of x nor y contains the log-abs-det-jacobian')
_log_detJ = _sum_rightmost(_log_detJ, (self.codomain.event_dim - bijector.codomain.event_dim))
log_detJ = ((log_detJ + _log_detJ) if (log_detJ is not None) else _log_detJ)
x_temp = y
if (is_record_flow_graph_enabled() and (not isinstance(y, BijectiveTensor)) and (not (isinstance(x, BijectiveTensor) and (y in set(x.parents()))))):
y = to_bijective_tensor(x, x_temp, context, self, log_detJ, mode='forward')
return y
def inverse(self, y: torch.Tensor, x: Optional[torch.Tensor]=None, context: Optional[torch.Tensor]=None) -> torch.Tensor:
log_detJ: Optional[torch.Tensor] = None
y_temp = y
for bijector in reversed(self.bijectors._modules.values()):
x = bijector.inverse(y_temp, context)
if (is_record_flow_graph_enabled() and requires_log_detJ()):
if (isinstance(y_temp, BijectiveTensor) and y_temp.from_forward()):
_log_detJ = y_temp._log_detJ
elif (isinstance(x, BijectiveTensor) and x.from_inverse()):
_log_detJ = x._log_detJ
else:
raise RuntimeError('neither of x nor y contains the log-abs-det-jacobian')
event_dim: int = bijector.codomain.event_dim
_log_detJ = _sum_rightmost(_log_detJ, (self.codomain.event_dim - event_dim))
log_detJ = ((log_detJ + _log_detJ) if (log_detJ is not None) else _log_detJ)
y_temp = x
if (is_record_flow_graph_enabled() and (not isinstance(x, BijectiveTensor)) and (not (isinstance(y, BijectiveTensor) and (x in set(y.parents()))))):
x = to_bijective_tensor(y_temp, y, context, self, log_detJ, mode='inverse')
return x
def log_abs_det_jacobian(self, x: torch.Tensor, y: torch.Tensor, context: Optional[torch.Tensor]=None) -> torch.Tensor:
ldj = _sum_rightmost(torch.zeros_like(y), self.codomain.event_dim)
if (isinstance(x, BijectiveTensor) and x.has_ancestor(y)):
_use_cached_inverse = True
parents = []
while (isinstance(x, BijectiveTensor) and (x is not y)):
parents.append(x)
x = x.parent
else:
_use_cached_inverse = False
if (is_record_flow_graph_enabled() and (not _use_cached_inverse) and (not isinstance(y, BijectiveTensor))):
warnings.warn('Computing _log_abs_det_jacobian from values and not from cache.')
for bijector in reversed(self.bijectors._modules.values()):
if (not _use_cached_inverse):
y_inv = bijector.inverse(y, context)
else:
y_inv = parents.pop()
_log_detJ = bijector.log_abs_det_jacobian(y_inv, y, context)
event_dim: int = bijector.codomain.event_dim
_log_detJ = _sum_rightmost(_log_detJ, (self.codomain.event_dim - event_dim))
ldj += _log_detJ
y = y_inv
return ldj
def param_shapes(self, shape: torch.Size) -> Sequence[torch.Size]:
return [] |
def ArgsGeneralWrapper(f):
def decorated(request: gradio.Request, cookies, max_length, llm_model, txt, txt2, top_p, temperature, chatbot, history, system_prompt, plugin_advanced_arg, *args):
txt_passon = txt
if ((txt == '') and (txt2 != '')):
txt_passon = txt2
cookies.update({'top_p': top_p, 'api_key': cookies['api_key'], 'llm_model': llm_model, 'temperature': temperature})
llm_kwargs = {'api_key': cookies['api_key'], 'llm_model': llm_model, 'top_p': top_p, 'max_length': max_length, 'temperature': temperature, 'client_ip': request.client.host}
plugin_kwargs = {'advanced_arg': plugin_advanced_arg}
chatbot_with_cookie = ChatBotWithCookies(cookies)
chatbot_with_cookie.write_list(chatbot)
if (cookies.get('lock_plugin', None) is None):
if (len(args) == 0):
(yield from f(txt_passon, llm_kwargs, plugin_kwargs, chatbot_with_cookie, history, system_prompt, request))
else:
(yield from f(txt_passon, llm_kwargs, plugin_kwargs, chatbot_with_cookie, history, system_prompt, *args))
else:
(module, fn_name) = cookies['lock_plugin'].split('->')
f_hot_reload = getattr(importlib.import_module(module, fn_name), fn_name)
(yield from f_hot_reload(txt_passon, llm_kwargs, plugin_kwargs, chatbot_with_cookie, history, system_prompt, request))
final_cookies = chatbot_with_cookie.get_cookies()
if ((len(args) != 0) and ('files_to_promote' in final_cookies) and (len(final_cookies['files_to_promote']) > 0)):
chatbot_with_cookie.append(['****,', '****'])
(yield from update_ui(chatbot_with_cookie, final_cookies['history'], msg=''))
return decorated |
def extractSouzoukaiHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('nidome murabito', 'Nidome no Jinsei wa Zettai, Shiawase ni! ~Murabito ni Tensei shitan dakedo, Kondo wa Hitonami no Shiawase ga Hoshii! Demo, Dekiru no?~', 'translated'), ('ruri to yuri to hime to majo', 'Ruri to Yuri to Hime to Majo', 'translated'), ('PRC', 'PRC', 'translated'), ('double check', 'Double Check', 'oel'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsVariwide(Options):
def accessibility(self) -> 'OptionPlotoptionsVariwideAccessibility':
return self._config_sub_data('accessibility', OptionPlotoptionsVariwideAccessibility)
def allowPointSelect(self):
return self._config_get(False)
def allowPointSelect(self, flag: bool):
self._config(flag, js_type=False)
def animation(self):
return self._config_get(True)
def animation(self, flag: bool):
self._config(flag, js_type=False)
def animationLimit(self):
return self._config_get(None)
def animationLimit(self, num: float):
self._config(num, js_type=False)
def borderColor(self):
return self._config_get('#ffffff')
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(3)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get('undefined')
def borderWidth(self, num: float):
self._config(num, js_type=False)
def centerInCategory(self):
return self._config_get(False)
def centerInCategory(self, flag: bool):
self._config(flag, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def clip(self):
return self._config_get(True)
def clip(self, flag: bool):
self._config(flag, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def colorAxis(self):
return self._config_get(0)
def colorAxis(self, num: float):
self._config(num, js_type=False)
def colorByPoint(self):
return self._config_get(False)
def colorByPoint(self, flag: bool):
self._config(flag, js_type=False)
def colorIndex(self):
return self._config_get(None)
def colorIndex(self, num: float):
self._config(num, js_type=False)
def colorKey(self):
return self._config_get('y')
def colorKey(self, text: str):
self._config(text, js_type=False)
def colors(self):
return self._config_get(None)
def colors(self, value: Any):
self._config(value, js_type=False)
def cropThreshold(self):
return self._config_get(50)
def cropThreshold(self, num: float):
self._config(num, js_type=False)
def cursor(self):
return self._config_get(None)
def cursor(self, text: str):
self._config(text, js_type=False)
def custom(self):
return self._config_get(None)
def custom(self, value: Any):
self._config(value, js_type=False)
def dashStyle(self):
return self._config_get('Solid')
def dashStyle(self, text: str):
self._config(text, js_type=False)
def dataLabels(self) -> 'OptionPlotoptionsVariwideDatalabels':
return self._config_sub_data('dataLabels', OptionPlotoptionsVariwideDatalabels)
def dataSorting(self) -> 'OptionPlotoptionsVariwideDatasorting':
return self._config_sub_data('dataSorting', OptionPlotoptionsVariwideDatasorting)
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def dragDrop(self) -> 'OptionPlotoptionsVariwideDragdrop':
return self._config_sub_data('dragDrop', OptionPlotoptionsVariwideDragdrop)
def enableMouseTracking(self):
return self._config_get(True)
def enableMouseTracking(self, flag: bool):
self._config(flag, js_type=False)
def events(self) -> 'OptionPlotoptionsVariwideEvents':
return self._config_sub_data('events', OptionPlotoptionsVariwideEvents)
def findNearestPointBy(self):
return self._config_get('x')
def findNearestPointBy(self, text: str):
self._config(text, js_type=False)
def getExtremesFromAll(self):
return self._config_get(False)
def getExtremesFromAll(self, flag: bool):
self._config(flag, js_type=False)
def grouping(self):
return self._config_get(True)
def grouping(self, flag: bool):
self._config(flag, js_type=False)
def groupPadding(self):
return self._config_get(0)
def groupPadding(self, num: float):
self._config(num, js_type=False)
def inactiveOtherPoints(self):
return self._config_get(False)
def inactiveOtherPoints(self, flag: bool):
self._config(flag, js_type=False)
def includeInDataExport(self):
return self._config_get(None)
def includeInDataExport(self, flag: bool):
self._config(flag, js_type=False)
def keys(self):
return self._config_get(None)
def keys(self, value: Any):
self._config(value, js_type=False)
def label(self) -> 'OptionPlotoptionsVariwideLabel':
return self._config_sub_data('label', OptionPlotoptionsVariwideLabel)
def legendSymbol(self):
return self._config_get('rectangle')
def legendSymbol(self, text: str):
self._config(text, js_type=False)
def linkedTo(self):
return self._config_get(None)
def linkedTo(self, text: str):
self._config(text, js_type=False)
def maxPointWidth(self):
return self._config_get(None)
def maxPointWidth(self, num: float):
self._config(num, js_type=False)
def minPointLength(self):
return self._config_get(0)
def minPointLength(self, num: float):
self._config(num, js_type=False)
def negativeColor(self):
return self._config_get(None)
def negativeColor(self, text: str):
self._config(text, js_type=False)
def onPoint(self) -> 'OptionPlotoptionsVariwideOnpoint':
return self._config_sub_data('onPoint', OptionPlotoptionsVariwideOnpoint)
def opacity(self):
return self._config_get(1)
def opacity(self, num: float):
self._config(num, js_type=False)
def point(self) -> 'OptionPlotoptionsVariwidePoint':
return self._config_sub_data('point', OptionPlotoptionsVariwidePoint)
def pointDescriptionFormat(self):
return self._config_get(None)
def pointDescriptionFormat(self, value: Any):
self._config(value, js_type=False)
def pointDescriptionFormatter(self):
return self._config_get(None)
def pointDescriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def pointInterval(self):
return self._config_get(1)
def pointInterval(self, num: float):
self._config(num, js_type=False)
def pointIntervalUnit(self):
return self._config_get(None)
def pointIntervalUnit(self, value: Any):
self._config(value, js_type=False)
def pointPadding(self):
return self._config_get(0)
def pointPadding(self, num: float):
self._config(num, js_type=False)
def pointPlacement(self):
return self._config_get(None)
def pointPlacement(self, text: str):
self._config(text, js_type=False)
def pointRange(self):
return self._config_get(None)
def pointRange(self, num: float):
self._config(num, js_type=False)
def pointStart(self):
return self._config_get(0)
def pointStart(self, num: float):
self._config(num, js_type=False)
def pointWidth(self):
return self._config_get(None)
def pointWidth(self, num: float):
self._config(num, js_type=False)
def relativeXValue(self):
return self._config_get(False)
def relativeXValue(self, flag: bool):
self._config(flag, js_type=False)
def selected(self):
return self._config_get(False)
def selected(self, flag: bool):
self._config(flag, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def showCheckbox(self):
return self._config_get(False)
def showCheckbox(self, flag: bool):
self._config(flag, js_type=False)
def showInLegend(self):
return self._config_get(None)
def showInLegend(self, flag: bool):
self._config(flag, js_type=False)
def skipKeyboardNavigation(self):
return self._config_get(None)
def skipKeyboardNavigation(self, flag: bool):
self._config(flag, js_type=False)
def softThreshold(self):
return self._config_get(True)
def softThreshold(self, flag: bool):
self._config(flag, js_type=False)
def sonification(self) -> 'OptionPlotoptionsVariwideSonification':
return self._config_sub_data('sonification', OptionPlotoptionsVariwideSonification)
def stacking(self):
return self._config_get(None)
def stacking(self, text: str):
self._config(text, js_type=False)
def states(self) -> 'OptionPlotoptionsVariwideStates':
return self._config_sub_data('states', OptionPlotoptionsVariwideStates)
def stickyTracking(self):
return self._config_get(False)
def stickyTracking(self, flag: bool):
self._config(flag, js_type=False)
def threshold(self):
return self._config_get(0)
def threshold(self, num: float):
self._config(num, js_type=False)
def tooltip(self) -> 'OptionPlotoptionsVariwideTooltip':
return self._config_sub_data('tooltip', OptionPlotoptionsVariwideTooltip)
def turboThreshold(self):
return self._config_get(1000)
def turboThreshold(self, num: float):
self._config(num, js_type=False)
def visible(self):
return self._config_get(True)
def visible(self, flag: bool):
self._config(flag, js_type=False)
def zoneAxis(self):
return self._config_get('y')
def zoneAxis(self, text: str):
self._config(text, js_type=False)
def zones(self) -> 'OptionPlotoptionsVariwideZones':
return self._config_sub_data('zones', OptionPlotoptionsVariwideZones) |
class RemoveQueuedItemAfterPlayed(widgets.CheckPreference, widgets.CheckConditional):
default = False
name = 'queue/remove_item_after_played'
condition_preference_name = 'queue/remove_item_when_played'
def __init__(self, preferences, widget):
widgets.CheckPreference.__init__(self, preferences, widget)
widgets.CheckConditional.__init__(self) |
class BasicTransformerBlock(nn.Module):
def __init__(self, dim, n_heads, d_head, dropout=0.0, context_dim=None, gated_ff=True, checkpoint=True):
super().__init__()
self.attn1 = CrossAttention(query_dim=dim, heads=n_heads, dim_head=d_head, dropout=dropout)
self.ff = FeedForward(dim, dropout=dropout, glu=gated_ff)
self.attn2 = CrossAttention(query_dim=dim, context_dim=context_dim, heads=n_heads, dim_head=d_head, dropout=dropout)
self.norm1 = nn.LayerNorm(dim)
self.norm2 = nn.LayerNorm(dim)
self.norm3 = nn.LayerNorm(dim)
self.checkpoint = checkpoint
self.param = (dim, n_heads, d_head, context_dim, gated_ff, checkpoint)
def forward(self, x, context=None):
x = self.attn1(self.norm1(x), residual=x)
x = self.attn2(self.norm2(x), context=context, residual=x)
x = self.ff(self.norm3(x), residual=x)
return x |
def test_wf1_compile_time_constant_vars():
def t1(a: int) -> typing.NamedTuple('OutputsBC', t1_int_output=int, c=str):
return ((a + 2), 'world')
def t2(a: str, b: str) -> str:
return (b + a)
def my_wf(a: int, b: str) -> (int, str):
(x, y) = t1(a=a)
d = t2(a='This is my way', b=b)
return (x, d)
x = my_wf(a=5, b='hello ')
assert (x == (7, 'hello This is my way'))
assert (context_manager.FlyteContextManager.size() == 1) |
class BKZReduction(object):
def __init__(self, A):
if isinstance(A, GSO.Mat):
L = None
M = A
A = M.B
elif isinstance(A, LLL.Reduction):
L = A
M = L.M
A = M.B
elif isinstance(A, IntegerMatrix):
L = None
M = None
A = A
else:
raise TypeError(("Matrix must be IntegerMatrix but got type '%s'" % type(A)))
if ((M is None) and (L is None)):
LLL.reduction(A)
self.A = A
if (M is None):
self.M = GSO.Mat(A, flags=GSO.ROW_EXPO)
else:
self.M = M
if (L is None):
self.lll_obj = LLL.Reduction(self.M, flags=LLL.DEFAULT)
else:
self.lll_obj = L
def __call__(self, params, min_row=0, max_row=(- 1), tracer=False):
tracer = normalize_tracer(tracer)
try:
label = params['name']
except KeyError:
label = 'bkz'
if (not isinstance(tracer, Tracer)):
tracer = tracer(self, root_label=label, verbosity=(params.flags & BKZ.VERBOSE), start_clocks=True, max_depth=2)
if (params.flags & BKZ.AUTO_ABORT):
auto_abort = BKZ.AutoAbort(self.M, self.A.nrows)
cputime_start = process_time()
with tracer.context('lll'):
self.lll_obj()
i = 0
while True:
with tracer.context('tour', i, dump_gso=(params.flags & BKZ.DUMP_GSO)):
clean = self.tour(params, min_row, max_row, tracer)
i += 1
if (clean or (params.block_size >= self.A.nrows)):
break
if ((params.flags & BKZ.AUTO_ABORT) and auto_abort.test_abort()):
break
if ((params.flags & BKZ.MAX_LOOPS) and (i >= params.max_loops)):
break
if ((params.flags & BKZ.MAX_TIME) and ((process_time() - cputime_start) >= params.max_time)):
break
tracer.exit()
try:
self.trace = tracer.trace
except AttributeError:
self.trace = None
return clean
def tour(self, params, min_row=0, max_row=(- 1), tracer=dummy_tracer):
if (max_row == (- 1)):
max_row = self.A.nrows
clean = True
for kappa in range(min_row, (max_row - 1)):
block_size = min(params.block_size, (max_row - kappa))
clean &= self.svp_reduction(kappa, block_size, params, tracer)
self.lll_obj.size_reduction(max(0, (max_row - 1)), max_row, max(0, (max_row - 2)))
return clean
def svp_preprocessing(self, kappa, block_size, params, tracer):
clean = True
lll_start = (kappa if (params.flags & BKZ.BOUNDED_LLL) else 0)
with tracer.context('lll'):
self.lll_obj(lll_start, lll_start, (kappa + block_size))
if (self.lll_obj.nswaps > 0):
clean = False
return clean
def svp_call(self, kappa, block_size, params, tracer=dummy_tracer):
(max_dist, expo) = self.M.get_r_exp(kappa, kappa)
delta_max_dist = (self.lll_obj.delta * max_dist)
if (params.flags & BKZ.GH_BND):
root_det = self.M.get_root_det(kappa, (kappa + block_size))
(max_dist, expo) = adjust_radius_to_gh_bound(max_dist, expo, block_size, root_det, params.gh_factor)
try:
enum_obj = Enumeration(self.M)
with tracer.context('enumeration', enum_obj=enum_obj, probability=1.0):
(max_dist, solution) = enum_obj.enumerate(kappa, (kappa + block_size), max_dist, expo)[0]
except EnumerationError as msg:
if (params.flags & BKZ.GH_BND):
return None
else:
raise EnumerationError(msg)
if (max_dist >= (delta_max_dist * (1 << expo))):
return None
else:
return solution
def svp_postprocessing(self, kappa, block_size, solution, tracer=dummy_tracer):
if (solution is None):
return True
j_nz = None
for i in range(block_size)[::(- 1)]:
if (abs(solution[i]) == 1):
j_nz = i
break
if (len([x for x in solution if x]) == 1):
self.M.move_row((kappa + j_nz), kappa)
elif (j_nz is not None):
with self.M.row_ops((kappa + j_nz), ((kappa + j_nz) + 1)):
for i in range(block_size):
if (solution[i] and (i != j_nz)):
self.M.row_addmul((kappa + j_nz), (kappa + i), (solution[j_nz] * solution[i]))
self.M.move_row((kappa + j_nz), kappa)
else:
solution = list(solution)
for i in range(block_size):
if (solution[i] < 0):
solution[i] = (- solution[i])
self.M.negate_row((kappa + i))
with self.M.row_ops(kappa, (kappa + block_size)):
offset = 1
while (offset < block_size):
k = (block_size - 1)
while ((k - offset) >= 0):
if (solution[k] or solution[(k - offset)]):
if (solution[k] < solution[(k - offset)]):
(solution[k], solution[(k - offset)]) = (solution[(k - offset)], solution[k])
self.M.swap_rows(((kappa + k) - offset), (kappa + k))
while solution[(k - offset)]:
while (solution[(k - offset)] <= solution[k]):
solution[k] = (solution[k] - solution[(k - offset)])
self.M.row_addmul(((kappa + k) - offset), (kappa + k), 1)
(solution[k], solution[(k - offset)]) = (solution[(k - offset)], solution[k])
self.M.swap_rows(((kappa + k) - offset), (kappa + k))
k -= (2 * offset)
offset *= 2
self.M.move_row(((kappa + block_size) - 1), kappa)
return False
def svp_reduction(self, kappa, block_size, params, tracer=dummy_tracer):
clean = True
with tracer.context('preprocessing'):
clean_pre = self.svp_preprocessing(kappa, block_size, params, tracer)
clean &= clean_pre
solution = self.svp_call(kappa, block_size, params, tracer)
with tracer.context('postprocessing'):
clean_post = self.svp_postprocessing(kappa, block_size, solution, tracer)
clean &= clean_post
self.lll_obj.size_reduction(0, (kappa + 1))
return clean |
def extractWtitranslationBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ((item['tags'] == []) and item['title'].startswith('Chapter ')):
return buildReleaseMessageWithType(item, 'Womanizing True Immortal', vol, chp, frag=frag, postfix=postfix, tl_type='translated')
if ((item['tags'] == ['Chapters']) and item['title'].startswith('Chapter ')):
return buildReleaseMessageWithType(item, 'Womanizing True Immortal', vol, chp, frag=frag, postfix=postfix, tl_type='translated')
return False |
def test_autodiff():
geom = geom_from_library('h2o.xyz', coord_type='redund')
print(geom)
B_ref = geom.internal.B
print(B_ref)
sg = autodiff.stretch_grad
bg = autodiff.bend_grad
auto_funcs = {2: sg, 3: bg}
int_ = geom.internal
ref_funcs = {2: int_.calc_stretch, 3: int_.calc_bend}
c3d = geom.coords3d
for (i, pc) in enumerate(geom.internal._prim_internals):
inds = pc.inds
print(i, inds)
l = len(inds)
ag = auto_funcs[l](c3d, inds)
(_, rg) = ref_funcs[l](c3d, inds, grad=True)
np.testing.assert_allclose(ag.flatten(), rg) |
class NetworkSimulator():
def __init__(self, latency=50):
self.agents = []
self.latency_distribution_sample = transform(normal_distribution(latency, ((latency * 2) // 5)), (lambda x: max(x, 0)))
self.time = 0
self.objqueue = {}
self.peers = {}
self.reliability = 0.9
def generate_peers(self, num_peers=5):
self.peers = {}
for a in self.agents:
p = []
while (len(p) <= (num_peers // 2)):
p.append(random.choice(self.agents))
if (p[(- 1)] == a):
p.pop()
self.peers[a.id] = (self.peers.get(a.id, []) + p)
for peer in p:
self.peers[peer.id] = (self.peers.get(peer.id, []) + [a])
def tick(self):
if (self.time in self.objqueue):
for (recipient, obj) in self.objqueue[self.time]:
if (random.random() < self.reliability):
recipient.on_receive(obj)
del self.objqueue[self.time]
for a in self.agents:
a.tick()
self.time += 1
def run(self, steps):
for i in range(steps):
self.tick()
def broadcast(self, sender, obj):
for p in self.peers[sender.id]:
recv_time = (self.time + self.latency_distribution_sample())
if (recv_time not in self.objqueue):
self.objqueue[recv_time] = []
self.objqueue[recv_time].append((p, obj))
def direct_send(self, to_id, obj):
for a in self.agents:
if (a.id == to_id):
recv_time = (self.time + self.latency_distribution_sample())
if (recv_time not in self.objqueue):
self.objqueue[recv_time] = []
self.objqueue[recv_time].append((a, obj))
def knock_offline_random(self, n):
ko = {}
while (len(ko) < n):
c = random.choice(self.agents)
ko[c.id] = c
for c in ko.values():
self.peers[c.id] = []
for a in self.agents:
self.peers[a.id] = [x for x in self.peers[a.id] if (x.id not in ko)]
def partition(self):
a = {}
while (len(a) < (len(self.agents) / 2)):
c = random.choice(self.agents)
a[c.id] = c
for c in self.agents:
if (c.id in a):
self.peers[c.id] = [x for x in self.peers[c.id] if (x.id in a)]
else:
self.peers[c.id] = [x for x in self.peers[c.id] if (x.id not in a)] |
class OptionSeriesPolygonStatesHover(Options):
def animation(self) -> 'OptionSeriesPolygonStatesHoverAnimation':
return self._config_sub_data('animation', OptionSeriesPolygonStatesHoverAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionSeriesPolygonStatesHoverHalo':
return self._config_sub_data('halo', OptionSeriesPolygonStatesHoverHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionSeriesPolygonStatesHoverMarker':
return self._config_sub_data('marker', OptionSeriesPolygonStatesHoverMarker) |
def test_format_dict_error():
with pytest.raises(ValueError) as exc_info:
apply_formatters_to_dict({'myfield': int}, {'myfield': 'a'})
with pytest.raises(ValueError) as exc_info:
eth_utils.apply_formatters_to_dict({'myfield': int}, {'myfield': 'a'})
assert ('myfield' in str(exc_info.value)) |
_parameters()
(name='true_positive', expected_result='fail', drop_failure_percent_threshold=5, metric_value=25)
(name='false_positive', expected_result='fail', drop_failure_percent_threshold=None, metric_value=29)
(name='true_negative', expected_result='pass', drop_failure_percent_threshold=5, metric_value=29)
def test_volume_anomaly_static_data_drop(test_id: str, dbt_project: DbtProject, expected_result: str, drop_failure_percent_threshold: int, metric_value: int):
now = datetime.utcnow()
data = ([{TIMESTAMP_COLUMN: cur_date.strftime(DATE_FORMAT)} for cur_date in generate_dates(base_date=now, step=timedelta(days=1)) if (cur_date < (now - timedelta(days=1)))] * 30)
data += ([{TIMESTAMP_COLUMN: (now - timedelta(days=1)).strftime(DATE_FORMAT)}] * metric_value)
test_args = {**DBT_TEST_ARGS, 'time_bucket': {'period': 'day', 'count': 1}, 'ignore_small_changes': {'drop_failure_percent_threshold': drop_failure_percent_threshold}}
test_result = dbt_project.test(test_id, DBT_TEST_NAME, test_args, data=data)
assert (test_result['status'] == expected_result) |
class crypto_base(object):
__key = None
__is_tcp = None
def __init__(self, is_tcp=False):
self.__is_tcp = is_tcp
self.__key = None
def key(self):
return self.__key
def is_tcp(self):
return self.__is_tcp
def set_key(self, key: str):
key = calc_str_md5(key)
self.__key = key |
_ns.route('/<username>/<coprname>/update_chroot/<chrootname>/', methods=['POST'])
_ns.route('/g/<group_name>/<coprname>/update_chroot/<chrootname>/', methods=['POST'])
_required
_with_copr
def chroot_update(copr, chrootname):
chroot_name = chrootname
form = forms.ChrootForm()
chroot = ComplexLogic.get_copr_chroot(copr, chroot_name)
if (not flask.g.user.can_edit(copr)):
raise AccessRestricted('You are not allowed to modify chroots in project {0}.'.format(copr.name))
if (not form.validate_on_submit()):
flask.flash(form.errors, 'error')
return render_chroot_edit(form, copr, chroot)
if ('submit' in flask.request.form):
action = flask.request.form['submit']
if (action == 'update'):
comps_name = comps_xml = None
if form.comps.data:
comps_xml = form.comps.data.stream.read()
comps_name = form.comps.data.filename
coprs_logic.CoprChrootsLogic.update_chroot(flask.g.user, chroot, form.buildroot_pkgs.data, form.repos.data, comps=comps_xml, comps_name=comps_name, with_opts=form.with_opts.data, without_opts=form.without_opts.data, module_toggle=form.module_toggle.data, bootstrap=form.bootstrap.data, bootstrap_image=form.bootstrap_image.data, isolation=form.isolation.data)
elif (action == 'delete_comps'):
CoprChrootsLogic.remove_comps(flask.g.user, chroot)
flask.flash('Buildroot {0} in project {1} has been updated successfully.'.format(chroot_name, copr.name), 'success')
db.session.commit()
return flask.redirect(url_for_copr_edit(copr)) |
.parametrize('lang', set((lang for lang in Mnemonic.list_languages() if (lang not in ('japanese', 'korean', 'chinese_simplified', 'chinese_traditional')))))
def test_expand(lang):
m = Mnemonic(lang)
words = m.generate()
for word in words.split(' '):
norm_word = normalize_string(word)
for size in range(4, len(norm_word)):
assert (m.expand(norm_word[:(size + 1)]) == word) |
class DistanceDecimeters(DistanceValue):
def __init__(self, decimeters):
self.decimeters = decimeters
def __str__(self):
return (str(self.decimeters) + 'dm')
def __mul__(self, other):
assert isinstance(other, (float, int)), '{} can only be multiplied by an int or float'.format(self)
return DistanceDecimeters((self.decimeters * other))
def mm(self):
return (self.decimeters * DECIMETER_MM) |
class _CRG(Module):
def __init__(self, platform, sys_clk_freq):
self.rst = Signal()
self.clock_domains.cd_sys = ClockDomain()
self.clock_domains.cd_sys4x = ClockDomain(reset_less=True)
self.clock_domains.cd_sys4x_dqs = ClockDomain(reset_less=True)
self.clock_domains.cd_idelay = ClockDomain()
self.clock_domains.cd_eth = ClockDomain()
self.submodules.pll = pll = S7PLL(speedgrade=(- 1))
self.comb += pll.reset.eq(((~ platform.request('cpu_reset')) | self.rst))
pll.register_clkin(platform.request('clk100'), .0)
pll.create_clkout(self.cd_sys, sys_clk_freq)
pll.create_clkout(self.cd_sys4x, (4 * sys_clk_freq))
pll.create_clkout(self.cd_sys4x_dqs, (4 * sys_clk_freq), phase=90)
pll.create_clkout(self.cd_idelay, .0)
pll.create_clkout(self.cd_eth, .0)
platform.add_false_path_constraints(self.cd_sys.clk, pll.clkin)
self.submodules.idelayctrl = S7IDELAYCTRL(self.cd_idelay)
self.comb += platform.request('eth_ref_clk').eq(self.cd_eth.clk) |
class AggregationDialogues(Dialogues, ABC):
END_STATES = frozenset({AggregationDialogue.EndState.SUCCESSFUL, AggregationDialogue.EndState.FAILED})
_keep_terminal_state_dialogues = False
def __init__(self, self_address: Address, role_from_first_message: Callable[([Message, Address], Dialogue.Role)], dialogue_class: Type[AggregationDialogue]=AggregationDialogue) -> None:
Dialogues.__init__(self, self_address=self_address, end_states=cast(FrozenSet[Dialogue.EndState], self.END_STATES), message_class=AggregationMessage, dialogue_class=dialogue_class, role_from_first_message=role_from_first_message) |
def backprop_reduce_mean(d_means, lengths, *, threads_per_block=128, num_blocks=128):
_is_float_array(d_means)
B = len(lengths)
T = int(lengths.sum())
O = d_means.shape[1]
_check_lengths(lengths, T)
out = _alloc((T, O), dtype=d_means.dtype, zeros=False)
if (d_means.dtype == 'float32'):
backprop_reduce_mean_kernel_float((num_blocks,), (threads_per_block,), (out, d_means, lengths, B, T, O))
else:
backprop_reduce_mean_kernel_double((num_blocks,), (threads_per_block,), (out, d_means, lengths, B, T, O))
return out |
class OptionSeriesHistogramSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_task_get_overrides_with_command_environment_and_secrets(task_definition):
task_definition.set_commands(webserver='/usr/bin/python script.py')
task_definition.set_environment((('webserver', 'foo', 'baz'),))
task_definition.set_secrets((('webserver', 'bar', 'qux'),))
overrides = task_definition.get_overrides()
assert (len(overrides) == 1)
assert (overrides[0]['name'] == 'webserver')
assert (overrides[0]['command'] == ['/usr/bin/python', 'script.py'])
assert (dict(name='foo', value='baz') in overrides[0]['environment'])
assert (dict(name='bar', valueFrom='qux') in overrides[0]['secrets']) |
def test_database_url_escape():
u = DatabaseURL(f"postgresql://username:{quote('[password')}/mydatabase")
assert (u.username == 'username')
assert (u.password == '[password')
assert (u.userinfo == f"username:{quote('[password')}".encode('utf-8'))
u2 = DatabaseURL(u)
assert (u2.password == '[password')
u3 = DatabaseURL(str(u))
assert (u3.password == '[password') |
class OptionSeriesPolygonLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
class TestSetup(TestBase):
def setUp(self):
self.tmpdir = tempfile.TemporaryDirectory(prefix='fract4d_')
import sys
if (sys.platform[:6] == 'darwin'):
self.userConfig = fractconfig.DarwinConfig('')
else:
self.userConfig = fractconfig.T('')
self.userConfig.set('general', 'cache_dir', os.path.join(self.tmpdir.name, 'gnofract4d-cache'))
self.userConfig['formula_path'] = {'formulas': None}
self.g_comp = fc.Compiler(self.userConfig)
def tearDown(self):
del self.g_comp
self.tmpdir.cleanup() |
def run_and_assert(command, communicate=True):
print(f'Running command {command}')
output = subprocess.Popen(command, stdout=subprocess.PIPE)
if communicate:
(stdout, stderr) = output.communicate()
print('STDOUT', (stdout.decode('utf-8') if (stdout is not None) else None))
print('STDERR', (stderr.decode('utf-8') if (stderr is not None) else None))
assert (output.returncode == 0), ('non-zero exit status: %d' % output.returncode)
return (stdout.decode('utf-8') if (stdout is not None) else None)
return None |
class RestStatsApi(app_manager.RyuApp):
OFP_VERSIONS = [ofproto_v1_0.OFP_VERSION, ofproto_v1_2.OFP_VERSION, ofproto_v1_3.OFP_VERSION, ofproto_v1_4.OFP_VERSION, ofproto_v1_5.OFP_VERSION]
_CONTEXTS = {'dpset': dpset.DPSet, 'wsgi': WSGIApplication}
def __init__(self, *args, **kwargs):
super(RestStatsApi, self).__init__(*args, **kwargs)
self.dpset = kwargs['dpset']
wsgi = kwargs['wsgi']
self.waiters = {}
self.data = {}
self.data['dpset'] = self.dpset
self.data['waiters'] = self.waiters
mapper = wsgi.mapper
wsgi.registory['StatsController'] = self.data
path = '/stats'
uri = (path + '/switches')
mapper.connect('stats', uri, controller=StatsController, action='get_dpids', conditions=dict(method=['GET']))
uri = (path + '/desc/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_desc_stats', conditions=dict(method=['GET']))
uri = (path + '/flowdesc/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_flow_stats', conditions=dict(method=['GET', 'POST']))
uri = (path + '/flow/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_flow_stats', conditions=dict(method=['GET', 'POST']))
uri = (path + '/aggregateflow/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_aggregate_flow_stats', conditions=dict(method=['GET', 'POST']))
uri = (path + '/table/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_table_stats', conditions=dict(method=['GET']))
uri = (path + '/tablefeatures/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_table_features', conditions=dict(method=['GET']))
uri = (path + '/port/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_port_stats', conditions=dict(method=['GET']))
uri = (path + '/port/{dpid}/{port}')
mapper.connect('stats', uri, controller=StatsController, action='get_port_stats', conditions=dict(method=['GET']))
uri = (path + '/queue/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_queue_stats', conditions=dict(method=['GET']))
uri = (path + '/queue/{dpid}/{port}')
mapper.connect('stats', uri, controller=StatsController, action='get_queue_stats', conditions=dict(method=['GET']))
uri = (path + '/queue/{dpid}/{port}/{queue_id}')
mapper.connect('stats', uri, controller=StatsController, action='get_queue_stats', conditions=dict(method=['GET']))
uri = (path + '/queueconfig/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_queue_config', conditions=dict(method=['GET']))
uri = (path + '/queueconfig/{dpid}/{port}')
mapper.connect('stats', uri, controller=StatsController, action='get_queue_config', conditions=dict(method=['GET']))
uri = (path + '/queuedesc/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_queue_desc', conditions=dict(method=['GET']))
uri = (path + '/queuedesc/{dpid}/{port}')
mapper.connect('stats', uri, controller=StatsController, action='get_queue_desc', conditions=dict(method=['GET']))
uri = (path + '/queuedesc/{dpid}/{port}/{queue}')
mapper.connect('stats', uri, controller=StatsController, action='get_queue_desc', conditions=dict(method=['GET']))
uri = (path + '/meterfeatures/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_meter_features', conditions=dict(method=['GET']))
uri = (path + '/meterconfig/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_meter_config', conditions=dict(method=['GET']))
uri = (path + '/meterconfig/{dpid}/{meter_id}')
mapper.connect('stats', uri, controller=StatsController, action='get_meter_config', conditions=dict(method=['GET']))
uri = (path + '/meterdesc/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_meter_desc', conditions=dict(method=['GET']))
uri = (path + '/meterdesc/{dpid}/{meter_id}')
mapper.connect('stats', uri, controller=StatsController, action='get_meter_desc', conditions=dict(method=['GET']))
uri = (path + '/meter/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_meter_stats', conditions=dict(method=['GET']))
uri = (path + '/meter/{dpid}/{meter_id}')
mapper.connect('stats', uri, controller=StatsController, action='get_meter_stats', conditions=dict(method=['GET']))
uri = (path + '/groupfeatures/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_group_features', conditions=dict(method=['GET']))
uri = (path + '/groupdesc/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_group_desc', conditions=dict(method=['GET']))
uri = (path + '/groupdesc/{dpid}/{group_id}')
mapper.connect('stats', uri, controller=StatsController, action='get_group_desc', conditions=dict(method=['GET']))
uri = (path + '/group/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_group_stats', conditions=dict(method=['GET']))
uri = (path + '/group/{dpid}/{group_id}')
mapper.connect('stats', uri, controller=StatsController, action='get_group_stats', conditions=dict(method=['GET']))
uri = (path + '/portdesc/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_port_desc', conditions=dict(method=['GET']))
uri = (path + '/portdesc/{dpid}/{port_no}')
mapper.connect('stats', uri, controller=StatsController, action='get_port_desc', conditions=dict(method=['GET']))
uri = (path + '/role/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='get_role', conditions=dict(method=['GET']))
uri = (path + '/flowentry/{cmd}')
mapper.connect('stats', uri, controller=StatsController, action='mod_flow_entry', conditions=dict(method=['POST']))
uri = (path + '/flowentry/clear/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='delete_flow_entry', conditions=dict(method=['DELETE']))
uri = (path + '/meterentry/{cmd}')
mapper.connect('stats', uri, controller=StatsController, action='mod_meter_entry', conditions=dict(method=['POST']))
uri = (path + '/groupentry/{cmd}')
mapper.connect('stats', uri, controller=StatsController, action='mod_group_entry', conditions=dict(method=['POST']))
uri = (path + '/portdesc/{cmd}')
mapper.connect('stats', uri, controller=StatsController, action='mod_port_behavior', conditions=dict(method=['POST']))
uri = (path + '/experimenter/{dpid}')
mapper.connect('stats', uri, controller=StatsController, action='send_experimenter', conditions=dict(method=['POST']))
uri = (path + '/role')
mapper.connect('stats', uri, controller=StatsController, action='set_role', conditions=dict(method=['POST']))
_ev_cls([ofp_event.EventOFPStatsReply, ofp_event.EventOFPDescStatsReply, ofp_event.EventOFPFlowStatsReply, ofp_event.EventOFPAggregateStatsReply, ofp_event.EventOFPTableStatsReply, ofp_event.EventOFPTableFeaturesStatsReply, ofp_event.EventOFPPortStatsReply, ofp_event.EventOFPQueueStatsReply, ofp_event.EventOFPQueueDescStatsReply, ofp_event.EventOFPMeterStatsReply, ofp_event.EventOFPMeterFeaturesStatsReply, ofp_event.EventOFPMeterConfigStatsReply, ofp_event.EventOFPGroupStatsReply, ofp_event.EventOFPGroupFeaturesStatsReply, ofp_event.EventOFPGroupDescStatsReply, ofp_event.EventOFPPortDescStatsReply], MAIN_DISPATCHER)
def stats_reply_handler(self, ev):
msg = ev.msg
dp = msg.datapath
if (dp.id not in self.waiters):
return
if (msg.xid not in self.waiters[dp.id]):
return
(lock, msgs) = self.waiters[dp.id][msg.xid]
msgs.append(msg)
flags = 0
if (dp.ofproto.OFP_VERSION == ofproto_v1_0.OFP_VERSION):
flags = dp.ofproto.OFPSF_REPLY_MORE
elif (dp.ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION):
flags = dp.ofproto.OFPSF_REPLY_MORE
elif (dp.ofproto.OFP_VERSION >= ofproto_v1_3.OFP_VERSION):
flags = dp.ofproto.OFPMPF_REPLY_MORE
if (msg.flags & flags):
return
del self.waiters[dp.id][msg.xid]
lock.set()
_ev_cls([ofp_event.EventOFPSwitchFeatures, ofp_event.EventOFPQueueGetConfigReply, ofp_event.EventOFPRoleReply], MAIN_DISPATCHER)
def features_reply_handler(self, ev):
msg = ev.msg
dp = msg.datapath
if (dp.id not in self.waiters):
return
if (msg.xid not in self.waiters[dp.id]):
return
(lock, msgs) = self.waiters[dp.id][msg.xid]
msgs.append(msg)
del self.waiters[dp.id][msg.xid]
lock.set() |
(post_delete, sender=Post)
def decrease_posts_count_after_post_deletion(sender, instance, **kwargs):
if (not instance.approved):
return
try:
assert (instance.poster_id is not None)
poster = User.objects.get(pk=instance.poster_id)
except AssertionError:
return
except ObjectDoesNotExist:
return
(profile, dummy) = ForumProfile.objects.get_or_create(user=poster)
if profile.posts_count:
profile.posts_count = (F('posts_count') - 1)
profile.save() |
class MetaMapping():
_mappings = {'D:displayname': ('name', None, None)}
def _reverse_mapping(cls, mappings):
mappings.update({i[1][0]: (i[0], i[1][1], i[1][2]) for i in mappings.items()})
def _mapping_get(self, key):
return self.__class__._mappings.get(key, (key, None, None))
def map_get(self, info, key):
(key, get_transform, set_transform) = self._mapping_get(key)
value = info.get(key, None)
if (get_transform is not None):
value = get_transform(value)
if (key == 'C:supported-calendar-component-set'):
return (key, getattr(self, 'supported_calendar_component', 'none'))
return (key, value)
def map_set(self, key, value):
(key, get_transform, set_transform) = self._mapping_get(key)
if (set_transform is not None):
value = set_transform(value)
return (key, value) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.