code stringlengths 281 23.7M |
|---|
class Identifier(_core_identifier.Identifier):
_STRING_TO_TYPE_MAP = {'lp': _core_identifier.ResourceType.LAUNCH_PLAN, 'wf': _core_identifier.ResourceType.WORKFLOW, 'tsk': _core_identifier.ResourceType.TASK}
_TYPE_TO_STRING_MAP = {v: k for (k, v) in _STRING_TO_TYPE_MAP.items()}
def promote_from_model(cls, base_model):
return cls(base_model.resource_type, base_model.project, base_model.domain, base_model.name, base_model.version)
def from_flyte_idl(cls, pb2_object):
base_model = super().from_flyte_idl(pb2_object)
return cls.promote_from_model(base_model)
def from_python_std(cls, string):
segments = string.split(':')
if (len(segments) != 5):
raise _user_exceptions.FlyteValueException('The provided string was not in a parseable format. The string for an identifier must be in the format entity_type:project:domain:name:version. Received: {}'.format(string))
(resource_type, project, domain, name, version) = segments
if (resource_type not in cls._STRING_TO_TYPE_MAP):
raise _user_exceptions.FlyteValueException(resource_type, 'The provided string could not be parsed. The first element of an identifier must be one of: {}. Received: {}'.format(list(cls._STRING_TO_TYPE_MAP.keys()), resource_type))
resource_type = cls._STRING_TO_TYPE_MAP[resource_type]
return cls(resource_type, project, domain, name, version)
def __str__(self):
return '{}:{}:{}:{}:{}'.format(type(self)._TYPE_TO_STRING_MAP.get(self.resource_type, '<unknown>'), self.project, self.domain, self.name, self.version) |
class ExtSourceUtilTestCase(unittest.TestCase):
def test_get_rhsa_from_file(self):
content = None
with open('./tests/mock_files/com.redhat.rhsa-2010.tar.bz2', 'rb') as content_file:
content = content_file.read()
(rhsa_list, rhba_list, rhsa_info_list, rhba_info_list) = get_rhsa_and_rhba_lists_from_file(content)
self.assertEqual(len(rhsa_list), 384)
self.assertEqual(len(rhba_list), 0)
self.assertEqual(len(rhsa_info_list), 251)
self.assertEqual(len(rhba_info_list), 0)
self.assertEqual(rhsa_list[0], {'product': 'enterprise_linux', 'vendor': 'redhat', 'rhsa_id': 'RHSA-2010:0002', 'version': '4'})
self.assertEqual(rhsa_info_list[0], {'severity': 'Moderate', 'rhsa_id': 'RHSA-2010:0002', 'title': 'RHSA-2010:0002: PyXML security update (Moderate)', 'cve': ['CVE-2009-3720'], 'description': "PyXML provides XML libraries for Python. The distribution contains a\nvalidating XML parser, an implementation of the SAX and DOM programming\ninterfaces, and an interface to the Expat parser.\n\nA buffer over-read flaw was found in the way PyXML's Expat parser handled\nmalformed UTF-8 sequences when processing XML files. A specially-crafted\nXML file could cause Python applications using PyXML's Expat parser to\ncrash while parsing the file. (CVE-2009-3720)\n\nThis update makes PyXML use the system Expat library rather than its own\ninternal copy; therefore, users must install the RHSA-2009:1625 expat\nupdate together with this PyXML update to resolve the CVE-2009-3720 issue.\n\nAll PyXML users should upgrade to this updated package, which changes PyXML\nto use the system Expat library. After installing this update along with\nRHSA-2009:1625, applications using the PyXML library must be restarted for\nthe update to take effect."})
def test_get_rhba_from_file(self):
content = None
with open('./tests/mock_files/com.redhat.rhba-.tar.bz2', 'rb') as content_file:
content = content_file.read()
(rhsa_list, rhba_list, rhsa_info_list, rhba_info_list) = get_rhsa_and_rhba_lists_from_file(content)
self.assertEqual(len(rhsa_list), 0)
self.assertEqual(len(rhba_list), 1)
self.assertEqual(len(rhsa_info_list), 0)
self.assertEqual(len(rhba_info_list), 1)
self.assertEqual(rhba_list[0], {'version': '7', 'product': 'enterprise_linux', 'rhba_id': 'RHBA-2017:1767', 'vendor': 'redhat'})
self.assertEqual(rhba_info_list[0], {'description': 'The Berkeley Internet Name Domain (BIND) is an implementation of the Domain Name System (DNS) protocols. BIND includes a DNS server (named); a resolver library (routines for applications to use when interfacing with DNS); and tools for verifying that the DNS server is operating correctly.\n\nFor detailed information on changes in this release, see the Red Hat Enterprise Linux 7.4 Release Notes linked from the References section.\n\nUsers of bind are advised to upgrade to these updated packages.', 'rhba_id': 'RHBA-2017:1767', 'cve': ['CVE-2016-2775'], 'severity': 'None', 'title': 'RHBA-2017:1767: bind bug fix update (None)'})
def test_get_exploit_db_list_from_csv(self):
(exploit_db_list, exploit_db_info_list) = get_exploit_db_list_from_csv(mock_exploit_db_csv_content)
self.assertEqual(len(exploit_db_list), 3)
self.assertEqual(len(exploit_db_info_list), 3)
self.assertTrue(('11#apache#2.0.44' in exploit_db_list))
self.assertTrue(('468#pigeon server#3.02.0143' in exploit_db_list))
self.assertTrue(('37060#microsoft internet explorer#11' in exploit_db_list))
def test_get_bug_traqs_lists_from_file(self):
output = io.BytesIO(base64.b64decode(mock_bid_gz_file))
(bid_lists, bid_info_list) = get_bug_traqs_lists_from_file(output)
self.assertEqual(len(bid_lists), 1)
self.assertEqual(len(bid_lists[0]), 7)
self.assertEqual(len(bid_info_list), 4)
self.assertTrue(('1#eric allman sendmail#5.58' in bid_lists[0]))
self.assertTrue(('3#sun sunos#4.0.1' in bid_lists[0]))
self.assertTrue(('4#bsd bsd#4.3' in bid_lists[0]))
self.assertEqual(bid_info_list[1], {'bugtraq_id': 2, 'title': 'BSD fingerd buffer overflow Vulnerability', 'class': 'Boundary Condition Error', 'cve': [], 'local': 'no', 'remote': 'yes'})
def test_get_bug_traqs_lists_from_online_mode(self):
(bid_lists, bid_info_list) = get_bug_traqs_lists_from_online_mode(mock_bid_online_mode)
self.assertEqual(len(bid_lists), 1)
self.assertEqual(len(bid_lists[0]), 7)
self.assertEqual(len(bid_info_list), 4)
self.assertTrue(('1#eric allman sendmail#5.58' in bid_lists[0]))
self.assertTrue(('3#sun sunos#4.0.1' in bid_lists[0]))
self.assertTrue(('4#bsd bsd#4.3' in bid_lists[0]))
self.assertEqual(bid_info_list[0], {'bugtraq_id': 1, 'title': 'Berkeley Sendmail DEBUG Vulnerability', 'class': 'Configuration Error', 'cve': [], 'local': 'yes', 'remote': 'yes'}) |
class OptionSeriesSolidgaugeSonificationTracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def create_fal_dbt(args: argparse.Namespace, generated_models: Dict[(str, Path)]={}) -> FalDbt:
real_state = None
if (hasattr(args, 'state') and (args.state is not None)):
real_state = args.state
return FalDbt(args.project_dir, args.profiles_dir, args.select, args.exclude, args.selector, args.threads, real_state, args.target, args.vars, generated_models) |
def test_extruded_periodic_interval_area():
m = PeriodicUnitIntervalMesh(10)
V = FunctionSpace(m, 'CG', 1)
u = Function(V)
u.assign(1)
assert (abs((assemble((u * dx)) - 1.0)) < 1e-12)
e = ExtrudedMesh(m, layers=4, layer_height=0.25)
V = FunctionSpace(e, 'CG', 1)
u = Function(V)
u.assign(1)
assert (abs((assemble((u * dx)) - 1.0)) < 1e-12) |
class IterableWithElements(Matcher):
def __init__(self, elements: Iterable[AnyType]) -> None:
self.elements_repr = (repr(elements) if (elements is not None) else '')
self.elements = list(elements)
def __eq__(self, other: Iterable[AnyType]) -> bool:
return (self.elements == list(other))
def __repr__(self) -> str:
return '<{} 0x{:02X}{}>'.format(type(self).__name__, id(self), f' elements={self.elements_repr}') |
def _migrate_summary(ensemble: EnsembleAccessor, data_file: DataFile, time_map: npt.NDArray[np.datetime64]) -> None:
if (len(time_map) == 0):
return
time_mask = (time_map != np.datetime64((- 1), 's'))
time_mask[0] = False
data: Dict[(int, Tuple[(List[npt.NDArray[np.float64]], List[str])])] = defaultdict((lambda : ([], [])))
for block in data_file.blocks(Kind.SUMMARY):
if (block.name == 'TIME'):
continue
(array, keys) = data[block.realization_index]
vector = data_file.load(block, 0)[time_mask]
NAN_STAND_IN = (- 9999.0)
vector[(vector == (- NAN_STAND_IN))] = np.nan
array.append(vector)
keys.append(block.name)
for (realization_index, (array, keys)) in data.items():
ds = xr.Dataset({'values': (['name', 'time'], array)}, coords={'time': time_map[time_mask], 'name': keys})
ensemble.save_response('summary', ds, realization_index) |
class SafeConfigParserWithIncludes(SafeConfigParser):
SECTION_NAME = 'INCLUDES'
SECTION_OPTNAME_CRE = re.compile('^([\\w\\-]+)/([^\\s>]+)$')
SECTION_OPTSUBST_CRE = re.compile('%\\(([\\w\\-]+/([^\\)]+))\\)s')
CONDITIONAL_RE = re.compile('^(\\w+)(\\?.+)$')
def __init__(self, share_config=None, *args, **kwargs):
kwargs = kwargs.copy()
kwargs['interpolation'] = BasicInterpolationWithName()
kwargs['inline_comment_prefixes'] = ';'
super(SafeConfigParserWithIncludes, self).__init__(*args, **kwargs)
self._cfg_share = share_config
def get_ex(self, section, option, raw=False, vars={}):
sopt = None
if ('/' in option):
sopt = SafeConfigParserWithIncludes.SECTION_OPTNAME_CRE.search(option)
if sopt:
sec = sopt.group(1)
opt = sopt.group(2)
seclwr = sec.lower()
if (seclwr == 'known'):
sopt = (('KNOWN/' + section), section)
else:
sopt = ((sec,) if (seclwr != 'default') else ('DEFAULT',))
for sec in sopt:
try:
v = self.get(sec, opt, raw=raw)
return v
except (NoSectionError, NoOptionError) as e:
pass
v = self.get(section, option, raw=raw, vars=vars)
return v
def _map_section_options(self, section, option, rest, defaults):
if (('/' not in rest) or ('%(' not in rest)):
return 0
rplcmnt = 0
soptrep = SafeConfigParserWithIncludes.SECTION_OPTSUBST_CRE.findall(rest)
if (not soptrep):
return 0
for (sopt, opt) in soptrep:
if (sopt not in defaults):
sec = sopt[:(~ len(opt))]
seclwr = sec.lower()
if (seclwr != 'default'):
usedef = 0
if (seclwr == 'known'):
try:
v = self._sections[('KNOWN/' + section)][opt]
except KeyError:
usedef = 1
else:
try:
try:
sec = self._sections[sec]
except KeyError:
continue
v = sec[opt]
except KeyError:
usedef = 1
else:
usedef = 1
if usedef:
try:
v = self._defaults[opt]
except KeyError:
continue
rplcmnt = 1
try:
defaults[sopt] = v
except:
try:
defaults._maps[0][sopt] = v
except:
self._defaults[sopt] = v
return rplcmnt
def share_config(self):
return self._cfg_share
def _getSharedSCPWI(self, filename):
SCPWI = SafeConfigParserWithIncludes
if self._cfg_share:
hashv = ('inc:' + (filename if (not isinstance(filename, list)) else '\x01'.join(filename)))
(cfg, i) = self._cfg_share.get(hashv, (None, None))
if (cfg is None):
cfg = SCPWI(share_config=self._cfg_share)
i = cfg.read(filename, get_includes=False)
self._cfg_share[hashv] = (cfg, i)
elif (logSys.getEffectiveLevel() <= logLevel):
logSys.log(logLevel, ' Shared file: %s', filename)
else:
cfg = SCPWI()
i = cfg.read(filename, get_includes=False)
return (cfg, i)
def _getIncludes(self, filenames, seen=[]):
if (not isinstance(filenames, list)):
filenames = [filenames]
filenames = _expandConfFilesWithLocal(filenames)
if self._cfg_share:
hashv = ('inc-path:' + '\x01'.join(filenames))
fileNamesFull = self._cfg_share.get(hashv)
if (fileNamesFull is None):
fileNamesFull = []
for filename in filenames:
fileNamesFull += self.__getIncludesUncached(filename, seen)
self._cfg_share[hashv] = fileNamesFull
return fileNamesFull
fileNamesFull = []
for filename in filenames:
fileNamesFull += self.__getIncludesUncached(filename, seen)
return fileNamesFull
def __getIncludesUncached(self, resource, seen=[]):
SCPWI = SafeConfigParserWithIncludes
try:
(parser, i) = self._getSharedSCPWI(resource)
if (not i):
return []
except UnicodeDecodeError as e:
logSys.error(("Error decoding config file '%s': %s" % (resource, e)))
return []
resourceDir = os.path.dirname(resource)
newFiles = [('before', []), ('after', [])]
if (SCPWI.SECTION_NAME in parser.sections()):
for (option_name, option_list) in newFiles:
if (option_name in parser.options(SCPWI.SECTION_NAME)):
newResources = parser.get(SCPWI.SECTION_NAME, option_name)
for newResource in newResources.split('\n'):
if os.path.isabs(newResource):
r = newResource
else:
r = os.path.join(resourceDir, newResource)
if (r in seen):
continue
s = (seen + [resource])
option_list += self._getIncludes(r, s)
return ((newFiles[0][1] + [resource]) + newFiles[1][1])
def get_defaults(self):
return self._defaults
def get_sections(self):
return self._sections
def options(self, section, withDefault=True):
try:
opts = self._sections[section]
except KeyError:
raise NoSectionError(section)
if withDefault:
return (set(opts.keys()) | set(self._defaults))
return list(opts.keys())
def read(self, filenames, get_includes=True):
if (not isinstance(filenames, list)):
filenames = [filenames]
fileNamesFull = []
if get_includes:
fileNamesFull += self._getIncludes(filenames)
else:
fileNamesFull = filenames
if (not fileNamesFull):
return []
logSys.info(' Loading files: %s', fileNamesFull)
if (get_includes or (len(fileNamesFull) > 1)):
ret = []
alld = self.get_defaults()
alls = self.get_sections()
for filename in fileNamesFull:
(cfg, i) = self._getSharedSCPWI(filename)
if i:
ret += i
alld.update(cfg.get_defaults())
for (n, s) in cfg.get_sections().items():
cond = SafeConfigParserWithIncludes.CONDITIONAL_RE.match(n)
if cond:
(n, cond) = cond.groups()
s = s.copy()
try:
del s['__name__']
except KeyError:
pass
for k in list(s.keys()):
v = s.pop(k)
s[(k + cond)] = v
s2 = alls.get(n)
if isinstance(s2, dict):
self.merge_section(('KNOWN/' + n), dict([i for i in iter(s2.items()) if (i[0] in s)]), '')
s2.update(s)
else:
alls[n] = s.copy()
return ret
if (logSys.getEffectiveLevel() <= logLevel):
logSys.log(logLevel, ' Reading file: %s', fileNamesFull[0])
return SafeConfigParser.read(self, fileNamesFull, encoding='utf-8')
def merge_section(self, section, options, pref=None):
alls = self.get_sections()
try:
sec = alls[section]
except KeyError:
alls[section] = sec = dict()
if (not pref):
sec.update(options)
return
sk = {}
for (k, v) in options.items():
if ((not k.startswith(pref)) and (k != '__name__')):
sk[(pref + k)] = v
sec.update(sk) |
class OptionsProgBar(Options):
component_properties = ('show_percentage', 'digits')
def classes(self):
return self._config_get(None)
def classes(self, value: str):
self._config(value)
def digits(self):
return self._config_get(2)
def digits(self, value: int):
self._config(value)
def background(self):
return self._config_group_get('css', None)
def background(self, value: str):
self._config_group('css', value)
def border_color(self):
return self._config_group_get('css', None, name='borderColor')
_color.setter
def border_color(self, value: str):
self._config_group('css', value, name='borderColor')
def css(self, attrs: dict):
css_attrs = self._config_get({}, 'css')
css_attrs.update(attrs)
self._config(css_attrs)
def disabled(self):
return self._config_get(None)
def disabled(self, flag: bool):
self._config(flag)
def max(self):
return self._config_get(100)
def max(self, num: int):
self._config(num)
def min(self):
return self._config_get(0)
def min(self, num: int):
self._config(num)
def value(self):
return self._config_get(None)
def value(self, val):
self._config(val)
def rounded(self):
return self.component.style.css.border_radius
def rounded(self, val: int):
self.component.style.css.border_radius = val
self.css({'border-radius': self.component.style.css.border_radius})
def show_percentage(self):
return self._config_get(False)
_percentage.setter
def show_percentage(self, flag: bool):
self._config(flag) |
def glp_topology(n, m, m0, p, beta, seed=None):
def calc_pi(G, beta):
if (beta >= 1):
raise ValueError('beta must be < 1')
degree = dict(G.degree())
den = float((sum(degree.values()) - (G.number_of_nodes() * beta)))
return {node: ((degree[node] - beta) / den) for node in G.nodes()}
def add_m_links(G, pi):
n_nodes = G.number_of_nodes()
n_edges = G.number_of_edges()
max_n_edges = ((n_nodes * (n_nodes - 1)) / 2)
if ((n_edges + m) > max_n_edges):
add_node(G, pi)
return
new_links = 0
while (new_links < m):
u = random_from_pdf(pi)
v = random_from_pdf(pi)
if ((u != v) and (not G.has_edge(u, v))):
G.add_edge(u, v)
new_links += 1
def add_node(G, pi):
new_node = G.number_of_nodes()
G.add_node(new_node)
new_links = 0
while (new_links < m):
existing_node = random_from_pdf(pi)
if (not G.has_edge(new_node, existing_node)):
G.add_edge(new_node, existing_node)
new_links += 1
if ((n < 1) or (m < 1) or (m0 < 1)):
raise ValueError('n, m and m0 must be a positive integers')
if (beta >= 1):
raise ValueError('beta must be < 1')
if (m >= m0):
raise ValueError('m must be <= m0')
if ((p > 1) or (p < 0)):
raise ValueError('p must be included between 0 and 1')
if (seed is not None):
random.seed(seed)
G = Topology(nx.path_graph(m0))
G.graph['type'] = 'glp'
G.name = ('glp_topology(%d, %d, %d, %f, %f)' % (n, m, m0, p, beta))
while (G.number_of_nodes() < n):
pi = calc_pi(G, beta)
if (random.random() < p):
add_m_links(G, pi)
else:
add_node(G, pi)
return G |
def mouse_click(func):
def mouse_click_handler(*, control, delay, **kwargs):
if ((not control) or (not control.IsEnabled())):
warnings.warn('Attempted to click on a non-existant or non-enabled control. Nothing was performed.')
return
wx.MilliSleep(delay)
func(control=control, delay=delay, **kwargs)
return mouse_click_handler |
def analyze(logfilenames: typing.List[str], clipterminals: bool, bytmp: bool, bybitfield: bool, columns: int) -> None:
data: typing.Dict[(str, typing.Dict[(str, typing.List[float])])] = {}
for logfilename in logfilenames:
with open(logfilename, 'r') as f:
sl = 'x'
phase_time: typing.Dict[(str, float)] = {}
n_sorts = 0
n_uniform = 0
is_first_last = False
for line in f:
m = re.search('Starting plot (\\d*)/(\\d*)', line)
if m:
sl = 'x'
phase_time = {}
n_sorts = 0
n_uniform = 0
seq_num = int(m.group(1))
seq_total = int(m.group(2))
is_first_last = ((seq_num == 1) or (seq_num == seq_total))
m = re.search('^Starting plotting.*dirs: (.*) and (.*)', line)
if m:
if bytmp:
tmpdir = m.group(1)
sl += ('-' + tmpdir)
m = re.search('^Starting phase 2/4: Backpropagation', line)
if (bybitfield and m):
if ('without bitfield' in line):
sl += '-nobitfield'
else:
sl += '-bitfield'
for phase in ['1', '2', '3', '4']:
m = re.search((('^Time for phase ' + phase) + ' = (\\d+.\\d+) seconds..*'), line)
if m:
phase_time[phase] = float(m.group(1))
for phase in ['1', '2', '3', '4']:
m = re.search((('^Phase ' + phase) + ' took (\\d+.\\d+) sec.*'), line)
if m:
phase_time[phase] = float(m.group(1))
m = re.search('Bucket \\d+ ([^\\.]+)\\..*', line)
if (m and (not ('force_qs' in line))):
sorter = m.group(1)
n_sorts += 1
if (sorter == 'uniform sort'):
n_uniform += 1
elif (sorter == 'QS'):
pass
else:
print(('Warning: unrecognized sort ' + sorter))
m = re.search('^Total time = (\\d+.\\d+) seconds.*', line)
if m:
if (clipterminals and is_first_last):
pass
else:
data.setdefault(sl, {}).setdefault('total time', []).append(float(m.group(1)))
for phase in ['1', '2', '3', '4']:
data.setdefault(sl, {}).setdefault(('phase ' + phase), []).append(phase_time[phase])
data.setdefault(sl, {}).setdefault('%usort', []).append(((100 * n_uniform) // n_sorts))
m = re.search('^Total plot creation time was (\\d+.\\d+) sec.*', line)
if m:
data.setdefault(sl, {}).setdefault('total time', []).append(float(m.group(1)))
for phase in ['1', '2', '3', '4']:
data.setdefault(sl, {}).setdefault(('phase ' + phase), []).append(phase_time[phase])
data.setdefault(sl, {}).setdefault('%usort', []).append(0)
tab = tt.Texttable()
all_measures = ['%usort', 'phase 1', 'phase 2', 'phase 3', 'phase 4', 'total time']
headings = (['Slice', 'n'] + all_measures)
tab.header(headings)
for sl in data.keys():
row = [sl]
sample_sizes = []
for measure in all_measures:
values = data.get(sl, {}).get(measure, [])
sample_sizes.append(len(values))
sample_size_lower_bound = min(sample_sizes)
sample_size_upper_bound = max(sample_sizes)
if (sample_size_lower_bound == sample_size_upper_bound):
row.append(('%d' % sample_size_lower_bound))
else:
row.append(('%d-%d' % (sample_size_lower_bound, sample_size_upper_bound)))
for measure in all_measures:
values = data.get(sl, {}).get(measure, [])
if (len(values) > 1):
row.append(('=%s =%s' % (plot_util.human_format(statistics.mean(values), 1), plot_util.human_format(statistics.stdev(values), 0))))
elif (len(values) == 1):
row.append(plot_util.human_format(values[0], 1))
else:
row.append('N/A')
tab.add_row(row)
tab.set_max_width(int(columns))
s = tab.draw()
print(s) |
def test_parquet_to_datasets():
df = pd.DataFrame({'name': ['Alice'], 'age': [10]})
def create_sd() -> StructuredDataset:
return StructuredDataset(dataframe=df)
sd = create_sd()
dataset = sd.open(datasets.Dataset).all()
assert (dataset.data == datasets.Dataset.from_pandas(df).data) |
class Action():
def __init__(self, ob, func, name, doc):
assert callable(func)
self._ob1 = weakref.ref(ob)
self._func = func
self._func_once = func
self._name = name
self.__doc__ = doc
self.is_autogenerated = (func.__name__ == 'flx_setter')
def __repr__(self):
cname = self.__class__.__name__
return ('<%s %r at 0x%x>' % (cname, self._name, id(self)))
def _use_once(self, func):
self._func_once = func
def __call__(self, *args):
ob = self._ob1()
if loop.can_mutate(ob):
func = self._func_once
self._func_once = self._func
if (ob is not None):
res = func(ob, *args)
if (res is not None):
logger.warning(('Action (%s) should not return a value' % self._name))
else:
loop.add_action_invokation(self, args)
return ob |
.parametrize('elasticapm_client', [{'transaction_max_spans': 3}], indirect=True)
def test_transaction_max_span_nested(elasticapm_client):
elasticapm_client.begin_transaction('test_type')
with elasticapm.capture_span('1'):
with elasticapm.capture_span('2'):
with elasticapm.capture_span('3'):
with elasticapm.capture_span('4'):
with elasticapm.capture_span('5'):
pass
with elasticapm.capture_span('6'):
pass
with elasticapm.capture_span('7'):
pass
with elasticapm.capture_span('8'):
pass
with elasticapm.capture_span('9'):
pass
transaction_obj = elasticapm_client.end_transaction('test')
transaction = elasticapm_client.events[constants.TRANSACTION][0]
spans = elasticapm_client.events[constants.SPAN]
assert (transaction_obj.dropped_spans == 6)
assert (len(spans) == 3)
for span in spans:
assert (span['name'] in ('1', '2', '3'))
assert (transaction['span_count'] == {'dropped': 6, 'started': 3}) |
class OptionSeriesXrangeSonificationDefaultinstrumentoptionsMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_grad()
def dump_flops_info(model, inputs, output_dir, use_eval_mode=True):
if (not comm.is_main_process()):
return
logger.info("Evaluating model's number of parameters and FLOPS")
try:
model = copy.deepcopy(model)
except Exception:
logger.info('Failed to deepcopy the model and skip FlopsEstimation.')
return
for hook_key in list(model._forward_pre_hooks.keys()):
logger.warning(f'Forward hook with key {hook_key} was removed in flop counter.')
model._forward_pre_hooks.pop(hook_key)
if use_eval_mode:
model.eval()
inputs = copy.deepcopy(inputs)
try:
fest = flops_utils.FlopsEstimation(model)
with fest.enable():
model(*inputs)
fest.add_flops_info()
model_str = str(model)
output_file = os.path.join(output_dir, 'flops_str_mobilecv.txt')
with PathManager.open(output_file, 'w') as f:
f.write(model_str)
logger.info(f'Flops info written to {output_file}')
except Exception:
logger.exception("Failed to estimate flops using mobile_cv's FlopsEstimation")
output_file = os.path.join(output_dir, 'flops_str_fvcore.txt')
try:
flops = FlopCountAnalysis(model, inputs)
model_str = flop_count_str(flops)
with PathManager.open(output_file, 'w') as f:
f.write(model_str)
logger.info(f'Flops info written to {output_file}')
flops_table = flop_count_table(flops, max_depth=10)
output_file = os.path.join(output_dir, 'flops_table_fvcore.txt')
with PathManager.open(output_file, 'w') as f:
f.write(flops_table)
logger.info(f'Flops table (full version) written to {output_file}')
flops_table = flop_count_table(flops, max_depth=3)
logger.info(('Flops table:\n' + flops_table))
except Exception:
with PathManager.open(output_file, 'w') as f:
traceback.print_exc(file=f)
logger.warning(f"Failed to estimate flops using detectron2's FlopCountAnalysis. Error written to {output_file}.")
flops = float('nan')
return flops |
def check_user(user_api, user_node_api, username=None):
if username:
user_list = [user_api.get_user(username)]
else:
user_list = user_api.get_all_user()
for user in user_list:
username = user['username']
quota = user.get('quota')
expiry_date = user.get('expiry_date')
nodes = user_node_api.get_node_for_user_name(username)
user_node_api.restore_user_traffic(username, nodes)
if (expiry_date and (expiry_date < datetime.now())):
if (expiry_date < datetime.now()):
user_node_api.limit_user_traffic(username, nodes)
if (quota > 0):
total = user_api.get_user_use(username, nodes)
if ((((quota * 1024) * 1024) * 1024) < total):
user_node_api.limit_user_traffic(username, nodes) |
def align_island(uv_vert0, uv_vert1, faces):
bm = bmesh.from_edit_mesh(bpy.context.active_object.data)
uv_layers = bm.loops.layers.uv.verify()
bpy.ops.uv.select_all(action='DESELECT')
for face in faces:
for loop in face.loops:
loop[uv_layers].select = True
diff = (uv_vert1 - uv_vert0)
current_angle = math.atan2(diff.x, diff.y)
angle_to_rotate = ((round((current_angle / (math.pi / 2))) * (math.pi / 2)) - current_angle)
if ((settings.bversion == 2.83) or (settings.bversion == 2.91)):
angle_to_rotate = (- angle_to_rotate)
bpy.context.tool_settings.transform_pivot_point = 'CURSOR'
bpy.ops.uv.cursor_set(location=(uv_vert0 + (diff / 2)))
bpy.ops.transform.rotate(value=angle_to_rotate, orient_axis='Z', constraint_axis=(False, False, False), orient_type='GLOBAL', mirror=False, use_proportional_edit=False) |
def extractShouahang58WordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return False
tagmap = [("Hey, Don't Act Unruly!", "Hey, Don't Act Unruly!", 'translated'), ("Don't Act Unruly!", "Hey, Don't Act Unruly!", 'translated'), ('Happy Days Ties With a Knot', 'Happy Days Ties With a Knot', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def clean_tarfiles(func: Callable) -> Callable:
def wrapper(*args: Any, **kwargs: Any) -> Callable:
try:
result = func(*args, **kwargs)
except Exception as e:
_rm_tarfiles()
raise e
else:
_rm_tarfiles()
return result
return wrapper |
class SchemaTestCase(testcases.TestCase):
Mutations = None
client_class = SchemaClient
def setUp(self):
self.client.schema(mutation=self.Mutations)
def execute(self, variables=None):
assert self.query, '`query` property not specified'
return self.client.execute(self.query, variables) |
class TestAWeight(object):
def test_freq_resp(self):
N = 40000
fs = 300000
impulse = signal.unit_impulse(N)
out = A_weight(impulse, fs)
freq = np.fft.rfftfreq(N, (1 / fs))
levels = (20 * np.log10(abs(np.fft.rfft(out))))
if mpl:
plt.figure('A')
plt.semilogx(freq, levels, alpha=0.7, label='fft')
plt.legend()
plt.ylim((- 80), (+ 5))
func = interp1d(freq, levels)
levels = func(frequencies)
assert all(np.less_equal(levels, (responses['A'] + upper_limits)))
assert all(np.greater_equal(levels, (responses['A'] + lower_limits))) |
class ChebyshevSemiIteration(LinearSolver):
def __init__(self, A, alpha, beta, save_iterations=False):
self.A_petsc = A
num_rows = A.getSizes()[1][0]
num_cols = A.getSizes()[1][1]
(self.rowptr, self.colind, self.nzval) = A.getValuesCSR()
A_superlu = SparseMat(num_rows, num_cols, self.nzval.shape[0], self.nzval, self.colind, self.rowptr)
LinearSolver.__init__(self, A_superlu)
self.r_petsc = p4pyPETSc.Vec().createWithArray(numpy.zeros(num_cols))
self.r_petsc.setType('mpi')
self.r_petsc_array = self.r_petsc.getArray()
self.alpha = alpha
self.beta = beta
self.relax_parameter = old_div((self.alpha + self.beta), 2.0)
self.rho = old_div((self.beta - self.alpha), (self.alpha + self.beta))
self.diag = self.A_petsc.getDiagonal().copy()
self.diag.scale(self.relax_parameter)
self.z = self.A_petsc.getDiagonal().copy()
self.save_iterations = save_iterations
if self.save_iterations:
self.iteration_results = []
def chebyshev_superlu_constructor(cls):
raise RuntimeError('This function is not implmented yet.')
def apply(self, b, x, k=5):
self.x_k = x
self.x_k_array = self.x_k.getArray()
self.x_km1 = x.copy()
self.x_km1.zeroEntries()
if self.save_iterations:
self.iteration_results.append(self.x_k_array.copy())
b_copy = b.copy()
for i in range(k):
w = old_div(1.0, (1 - old_div((self.rho ** 2), 4.0)))
self.r_petsc_array.fill(0.0)
self.computeResidual(self.x_k_array, self.r_petsc_array, b_copy.getArray())
self.r_petsc.scale((- 1))
self.z.pointwiseDivide(self.r_petsc, self.diag)
self.z.axpy(1.0, self.x_k)
self.z.axpy((- 1.0), self.x_km1)
self.z.scale(w)
self.z.axpy(1.0, self.x_km1)
self.x_km1 = self.x_k.copy()
self.x_k = self.z.copy()
self.x_k_array = self.x_k.getArray()
if self.save_iterations:
self.iteration_results.append(self.x_k.getArray().copy())
x.setArray(self.x_k_array) |
def validate_key_values(config_handle, section, key, default=None):
if (section not in config_handle):
LOG.info('Section missing from configurations: [%s]', section)
try:
value = config_handle[section][key]
except KeyError:
LOG.warning('[%s] missing key "%s", using %r.', section, key, default)
value = default
return value |
class ChoicesLayout(object):
def __init__(self, msg: str, choices, on_clicked=None, checked_index=0):
vbox = QVBoxLayout()
if (len(msg) > 50):
vbox.addWidget(WWLabel(msg))
msg = ''
gb2 = QGroupBox(msg)
vbox.addWidget(gb2)
vbox2 = QVBoxLayout()
gb2.setLayout(vbox2)
self.group = group = QButtonGroup()
for (i, c) in enumerate(choices):
button = QRadioButton(gb2)
button.setText(c)
vbox2.addWidget(button)
group.addButton(button)
group.setId(button, i)
if (i == checked_index):
button.setChecked(True)
if on_clicked:
group.buttonClicked.connect(partial(on_clicked, self))
self.vbox = vbox
def layout(self):
return self.vbox
def selected_index(self) -> int:
return self.group.checkedId() |
class TestForceForkTristate(unittest.TestCase):
def test_neither_fork_arg(self):
args = main.parse_cli_args([TEST_MANIFEST])
self.assertIsNone(args.fork)
def test_always_fork_arg(self):
args = main.parse_cli_args(['--always-fork', TEST_MANIFEST])
self.assertTrue(args.fork)
def test_never_fork_arg(self):
args = main.parse_cli_args(['--never-fork', TEST_MANIFEST])
self.assertFalse(args.fork)
def test_both_fork_args(self):
with self.assertRaises(SystemExit):
main.parse_cli_args(['--always-fork', '--never-fork', TEST_MANIFEST]) |
class OptionSeriesVariwideSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesPolygonSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.parametrize(['operation', 'result'], [(BinaryOperation(OperationType.logical_or, [var, con_false]), [var]), (BinaryOperation(OperationType.logical_or, [var, con_true]), [con_true]), (BinaryOperation(OperationType.logical_and, [var, con_false]), [con_false]), (BinaryOperation(OperationType.logical_and, [var, con_true]), [var])])
def test_simplify_trivial_logic_arithmetic(operation: Operation, result: list[Expression]):
assert (SimplifyTrivialLogicArithmetic().apply(operation) == [(operation, e) for e in result]) |
class TestReconTargets():
def setup_method(self):
self.tmp_path = Path(tempfile.mkdtemp())
shutil.copy(tfp, self.tmp_path)
Path((self.tmp_path / 'bitdiscovery')).open(mode='a').writelines(['127.0.0.1'])
self.scan = TargetList(target_file=str((self.tmp_path / 'bitdiscovery')), results_dir=str(self.tmp_path), db_location=str((self.tmp_path / 'testing.sqlite')))
def teardown_method(self):
shutil.rmtree(self.tmp_path)
def test_scan_creates_database(self):
assert self.scan.db_mgr.location.exists()
assert ((self.tmp_path / 'testing.sqlite') == self.scan.db_mgr.location)
def test_scan_creates_results(self):
assert self.scan.output().exists() |
class MockDecompilerTask(DecompilerTask):
class MockFunction():
class FunctionType():
def __init__(self):
self.return_value = 'void'
self.parameters = []
def __init__(self):
self.name = 'test'
self.function_type = self.FunctionType()
def __init__(self, cfg):
super().__init__('test', None)
self._cfg = cfg
self.set_options()
self.function = self.MockFunction()
def set_options(self):
self.options = Options()
self.options.set('pattern-independent-restructuring.switch_reconstruction', True)
self.options.set('pattern-independent-restructuring.nested_switch_nodes', True)
self.options.set('pattern-independent-restructuring.min_switch_case_number', 2)
def reset(self):
pass |
class StatusPayload(NamedTuple):
version: int
network_id: int
head_td: int
head_hash: Hash32
head_number: BlockNumber
genesis_hash: Hash32
serve_headers: bool
serve_chain_since: Optional[BlockNumber]
serve_state_since: Optional[BlockNumber]
serve_recent_state: Optional[bool]
serve_recent_chain: Optional[bool]
tx_relay: bool
flow_control_bl: Optional[int]
flow_control_mcr: Optional[Tuple[(Tuple[(int, int, int)], ...)]]
flow_control_mrr: Optional[int]
announce_type: Optional[int]
def from_pairs(cls, *pairs: Tuple[(str, Any)]) -> 'StatusPayload':
pairs_dict = cast(_StatusPayloadDict, dict(pairs))
return cls(version=pairs_dict['protocolVersion'], network_id=pairs_dict['networkId'], head_td=pairs_dict['headTd'], head_hash=pairs_dict['headHash'], head_number=pairs_dict['headNum'], genesis_hash=pairs_dict['genesisHash'], serve_headers=('serveHeaders' in pairs_dict), serve_chain_since=pairs_dict.get('serveChainSince'), serve_state_since=pairs_dict.get('serveStateSince'), serve_recent_chain=pairs_dict.get('serveRecentChain'), serve_recent_state=pairs_dict.get('serveRecentState'), tx_relay=('txRelay' in pairs_dict), flow_control_bl=pairs_dict.get('flowControl/BL'), flow_control_mcr=pairs_dict.get('flowControl/MRC'), flow_control_mrr=pairs_dict.get('flowControl/MRR'), announce_type=pairs_dict.get('announceType'))
_tuple
def to_pairs(self) -> Iterable[Tuple[(str, Any)]]:
(yield ('protocolVersion', self.version))
(yield ('networkId', self.network_id))
(yield ('headTd', self.head_td))
(yield ('headHash', self.head_hash))
(yield ('headNum', self.head_number))
(yield ('genesisHash', self.genesis_hash))
if (self.serve_headers is True):
(yield ('serveHeaders', None))
if (self.serve_chain_since is not None):
(yield ('serveChainSince', self.serve_chain_since))
if (self.serve_state_since is not None):
(yield ('serveStateSince', self.serve_state_since))
if (self.serve_recent_chain is not None):
(yield ('serveRecentChain', self.serve_recent_chain))
if (self.serve_recent_state is not None):
(yield ('serveRecentState', self.serve_recent_state))
if (self.tx_relay is True):
(yield ('txRelay', None))
if (self.flow_control_bl is not None):
(yield ('flowControl/BL', self.flow_control_bl))
if (self.flow_control_mcr is not None):
(yield ('flowControl/MRC', self.flow_control_mcr))
if (self.flow_control_mrr is not None):
(yield ('flowControl/MRR', self.flow_control_mrr))
if (self.announce_type is not None):
(yield ('announceType', self.announce_type)) |
def load(path: (str | None)=None):
Common.model_rebuild()
Backend.model_rebuild()
Frontend.model_rebuild()
if (path is None):
path = (Path(__file__).parent / 'config/fact-core-config.toml')
with open(path, encoding='utf8') as f:
cfg = toml.load(f)
_replace_hyphens_with_underscores(cfg)
backend_dict = cfg['backend']
frontend_dict = cfg['frontend']
common_dict = cfg['common']
preset_list = common_dict.pop('analysis_preset', [])
preset_dict = {}
for preset in preset_list:
p = Common.AnalysisPreset(**preset)
preset_dict[p.name] = p.model_dump()
common_dict['analysis_preset'] = preset_dict
plugin_list = backend_dict.pop('plugin', [])
plugin_dict = {}
for plugin in plugin_list:
p = Backend.Plugin(**plugin)
plugin_dict[p.name] = p.model_dump()
backend_dict['plugin'] = plugin_dict
if ('common' not in cfg):
raise ValueError('The common section MUST be specified')
global _common
if ('common' in cfg):
_common = Common(**common_dict)
global _backend
if ('backend' in cfg):
_backend = Backend(**backend_dict, **common_dict)
global _frontend
if ('frontend' in cfg):
_frontend = Frontend(**frontend_dict, **common_dict) |
def LookupAccountSid(sid, machine=None):
prototype = ctypes.WINFUNCTYPE(wintypes.BOOL, wintypes.LPCWSTR, PSID, wintypes.LPCWSTR, wintypes.LPDWORD, wintypes.LPCWSTR, wintypes.LPDWORD, wintypes.LPDWORD)
paramflags = ((_In_, 'lpSystemName'), (_In_, 'lpSid'), (_Out_, 'lpName', ctypes.create_unicode_buffer(UNLEN)), (_In_, 'cchName', ctypes.byref(wintypes.DWORD(UNLEN))), (_Out_, 'lpReferencedDomainName', ctypes.create_unicode_buffer(UNLEN)), (_In_, 'cchReferencedDomainName', ctypes.byref(wintypes.DWORD(UNLEN))), (_Out_, 'peUse'))
_LookupAccountSid = prototype(('LookupAccountSidW', advapi32), paramflags)
_LookupAccountSid.errcheck = errcheckBOOL
(lpname, lprefdn, peuse) = _LookupAccountSid(machine, sid)
return (lpname.value, lprefdn.value, peuse) |
(private_key_bytes=private_key_st, message_hash=message_hash_st, direction=st.one_of(st.just('coincurve-to-native'), st.just('native-to-coincurve')))
(max_examples=MAX_EXAMPLES)
def test_native_to_coincurve_recover(private_key_bytes, message_hash, direction, native_key_api, coincurve_key_api):
if (direction == 'coincurve-to-native'):
backend_a = coincurve_key_api
backend_b = native_key_api
elif (direction == 'native-to-coincurve'):
backend_b = coincurve_key_api
backend_a = native_key_api
else:
assert False, 'invariant'
private_key_a = backend_a.PrivateKey(private_key_bytes)
public_key_a = private_key_a.public_key
signature_a = backend_a.ecdsa_sign(message_hash, private_key_a)
public_key_b = backend_b.ecdsa_recover(message_hash, signature_a)
assert (public_key_b == public_key_a)
assert backend_b.ecdsa_verify(message_hash, signature_a, public_key_b) |
class CodeGen(asdl.VisitorBase):
def __init__(self, node):
asdl.VisitorBase.__init__(self)
self.starting_node = None
self.current_node = None
self.hierarchy = defaultdict(list)
self.hierarchy['_AST'] = []
self.fields = defaultdict(list)
self.attributes = defaultdict(list)
self.code = defaultdict(list)
self.visit(node)
ret = has_cycle(self.hierarchy)
if ret:
raise CyclicDependencies(ret)
self._gen_code(node)
def visitModule(self, node):
for (name, child) in node.types.items():
if (not self.starting_node):
self.starting_node = str(name)
self.current_node = str(name)
self.hierarchy[name]
self.visit(child)
def visitSum(self, node):
if hasattr(node, 'fields'):
self.fields[self.current_node] = node.fields
else:
self.fields[self.current_node] = []
if hasattr(node, 'attributes'):
self.attributes[self.current_node] = node.attributes
else:
self.attributes[self.current_node] = []
for child in node.types:
self.visit(child)
def visitConstructor(self, node):
if (str(node.name) in self.fields):
return
self.fields[str(node.name)].extend(node.fields)
self.hierarchy[str(node.name)].append(self.current_node)
def visitProduct(self, node):
self.fields[self.current_node].extend(node.fields)
def _get_fields(self, name):
if self.fields.has_key(name):
fields = map((lambda f: f.name), self.fields[name])
for parent in self.hierarchy[name]:
fields.extend(self._get_fields(parent))
return fields
else:
return []
def _get_attributes(self, name):
if (name == '_AST'):
return []
attributes = map((lambda a: a.name), self.attributes[name])
for parent in self.hierarchy[name]:
attributes.extend(self._get_attributes(parent))
return attributes
def _gen_code(self, node):
is_methods = []
for name in sorted(self.hierarchy):
if (name != '_AST'):
is_methods.extend(['', 'def is{!s}(self):'.format(name), ['return False']])
cls = ['class _AST (ast.AST):', (['_fields = ()', '_attributes = ()', '', 'def __init__ (self, **ARGS):', ['ast.AST.__init__(self)', 'for k, v in ARGS.items():', ['setattr(self, k, v)']]] + is_methods)]
self.code['_AST'] = cls
for (name, parents) in self.hierarchy.iteritems():
if (name == '_AST'):
continue
if (not parents):
parents = ['_AST']
fields = self.fields[name]
args = []
assign = []
body = []
_fields = remove_duplicates(self._get_fields(name))
_attributes = remove_duplicates(self._get_attributes(name))
body = []
cls = ['class {!s} ({!s}):'.format(name, ', '.join(parents)), body]
non_default_args = []
default_args = []
for f in fields:
if (f.name.value == 'ctx'):
f.opt = True
if f.opt:
default_args.append('{!s}=None'.format(f.name))
assign.append('self.{0!s} = {0!s}'.format(f.name))
elif f.seq:
default_args.append('{!s}=[]'.format(f.name))
assign.append('self.{0!s} = list({0!s})'.format(f.name))
else:
non_default_args.append('{!s}'.format(f.name))
assign.append('self.{0!s} = {0!s}'.format(f.name))
args = (non_default_args + default_args)
body.append('_fields = {!r}'.format(tuple(map(repr, _fields))))
body.append('_attributes = {!r}'.format(tuple(map(repr, _attributes))))
body.append('')
args_str = ', '.join(args)
if (args_str != ''):
args_str += ', '
body.append('def __init__ (self, {!s} **ARGS):'.format(args_str))
ctor_body = []
body.append(ctor_body)
ctor_body.extend(map((lambda base: '{!s}.__init__(self, **ARGS)'.format(base)), parents))
ctor_body.extend(assign)
body.extend(['', 'def is{}(self):'.format(name), ['return True']])
self.code[name] = cls
def _cost(self, name):
if (name == '_AST'):
return 0
parents = self.hierarchy[name]
return reduce((lambda acc, x: (acc + self._cost(x))), parents, 1)
def python(self):
classes = self.hierarchy.keys()
classes.sort((lambda a, b: (self._cost(a) - self._cost(b))))
code = ['from snakes.lang import ast', 'from ast import *', '']
for cls in classes:
code.extend(self.code[cls])
code.append('')
def python(code, indent):
for line in code:
if isinstance(line, str):
(yield (((4 * indent) * ' ') + line))
else:
for sub in python(line, (indent + 1)):
(yield sub)
return '\n'.join(python(code, 0)) |
def test_fix_reference_namespace_is_working_properly_with_refs_with_new_versions(create_test_data, create_pymel, create_maya_env):
data = create_test_data
pm = create_pymel
maya_env = create_maya_env
print('version2: {}'.format(data['asset2_model_main_v002']))
print('version3: {}'.format(data['asset2_model_main_v003']))
print('version2.full_path: {}'.format(data['asset2_model_main_v002'].full_path))
print('version3.full_path: {}'.format(data['asset2_model_main_v003'].full_path))
data['asset2_model_main_v002'].is_published = True
data['asset2_model_main_v003'].is_published = True
data['asset2_lookdev_take1_v001'].is_published = True
data['version11'].is_published = True
DBSession.commit()
maya_env.open(data['asset2_model_main_v002'])
loc = pm.spaceLocator(name='locator1')
loc.t.set(0, 0, 0)
tra_group = pm.nt.Transform(name='asset1')
pm.parent(loc, tra_group)
pm.saveFile()
maya_env.save_as(data['asset2_model_main_v003'])
DBSession.commit()
maya_env.open(data['asset2_lookdev_take1_v001'])
maya_env.reference(data['asset2_model_main_v002'])
refs = pm.listReferences()
ref = refs[0]
isinstance(ref, pm.system.FileReference)
ref.namespace = data['asset2_model_main_v002'].filename.replace('.', '_')
pm.saveFile()
pm.newFile(force=True)
maya_env.open(data['version11'])
maya_env.reference(data['asset2_lookdev_take1_v001'])
refs = pm.listReferences()
refs[0].namespace = data['asset2_lookdev_take1_v001'].filename.replace('.', '_')
loc = pm.ls('locator1', type=pm.nt.Transform, r=1)
loc[0].t.set(1, 0, 0)
version2_ref_node = pm.listReferences(refs[0])[0]
edits = pm.referenceQuery(version2_ref_node, es=1)
assert (len(edits) > 0)
pm.saveFile()
DBSession.commit()
all_refs = pm.listReferences(recursive=1)
assert (all_refs[0].namespace == data['asset2_lookdev_take1_v001'].filename.replace('.', '_'))
assert (all_refs[1].namespace == data['asset2_model_main_v002'].filename.replace('.', '_'))
maya_env.fix_reference_namespaces()
pm.saveFile()
all_refs = pm.listReferences(recursive=1)
assert (all_refs[0].namespace == data['asset2_lookdev_take1_v001'].latest_published_version.nice_name)
assert (all_refs[1].namespace == data['asset2_model_main_v002'].latest_published_version.nice_name)
assert (maya_env.get_version_from_full_path(all_refs[1].path).parent == data['asset2_model_main_v002'])
assert (maya_env.get_version_from_full_path(all_refs[1].path) == data['asset2_model_main_v003'])
assert (len(pm.referenceQuery(all_refs[0], es=1, fld=1)) == 0)
assert (len(pm.referenceQuery(all_refs[1], es=1, fld=1)) == 0)
assert (len(pm.referenceQuery(all_refs[0], es=1, scs=1)) == 0)
assert (len(pm.referenceQuery(all_refs[1], es=1, scs=1)) == 2)
locs = pm.ls('locator1', type=pm.nt.Transform, r=1)
assert (1.0 == locs[0].tx.get())
pm.saveFile() |
class ImportedNamespaces(object):
def __init__(self):
if (sys.version_info >= (3, 7)):
self.data = {}
else:
from collections import OrderedDict
self.data = OrderedDict()
def add(self, name1, name2=None, symbols=None):
if (name2 is None):
import_ = name1
if (not symbols):
self.data[import_] = None
return
from_ = symbols[import_]
else:
(from_, import_) = (name1, name2)
self.data[import_] = from_
def __contains__(self, item):
if isinstance(item, tuple):
(from_, import_) = item
else:
(from_, import_) = (None, item)
if (import_ in self.data):
return (self.data[import_] == from_)
return False
def _make_line(self, from_, imports, for_stub):
if for_stub:
import_ = ', '.join((('%s as %s' % (n, n)) for n in imports))
else:
import_ = ', '.join(imports)
code = ('from %s import %s' % (from_, import_))
if (len(code) <= 80):
return code
if for_stub:
import_ = '\n'.join(((' %s as %s,' % (n, n)) for n in imports))
else:
wrapper = textwrap.TextWrapper(subsequent_indent=' ', initial_indent=' ', break_long_words=False)
import_ = '\n'.join(wrapper.wrap(import_))
code = ('from %s import (\n%s\n)' % (from_, import_))
return code
def getvalue(self, for_stub=False):
ns = {}
lines = []
for (key, val) in self.data.items():
if (val is None):
ns[key] = val
elif (key == '*'):
lines.append(('from %s import *' % val))
else:
ns.setdefault(val, set()).add(key)
for (key, val) in ns.items():
if (val is None):
lines.append(('import %s' % key))
else:
names = sorted(val, key=(lambda s: s.lower()))
lines.append(self._make_line(key, names, for_stub=for_stub))
return '\n'.join(lines) |
class WebPlaylist(object):
MAX_TRACKS_TO_ADD = 3
MIN_TRACKS_TO_FETCH = 5
TOTAL_TRACKS_REMEMBERED = 25
MAX_TRACKS_PER_ARTIST = 3
def __init__(self, shell, source, playlist_name):
self.shell = shell
self.candidate_artist = {}
self.shell.props.shell_player.connect('playing-song-changed', self.playing_song_changed)
self.source = source
self.search_entry = None
self.playlist_started = False
self.played_artist = {}
self.tracks_not_played = 0
self.info_cache = rb.URLCache(name=playlist_name, path=os.path.join('coverart_browser', playlist_name), refresh=30, discard=180)
self.info_cache.clean()
def playing_song_changed(self, player, entry):
if (not entry):
return
if (player.get_playing_source() != self.source):
self.playlist_started = False
self.played_artist.clear()
self.tracks_not_played = 0
if (self.playlist_started and (len(self.source.props.query_model) < self.MIN_TRACKS_TO_FETCH)):
self.start(entry)
def start(self, seed_entry, reinitialise=False):
artist = seed_entry.get_string(RB.RhythmDBPropType.ARTIST)
if reinitialise:
self.played_artist.clear()
self.tracks_not_played = 0
self.playlist_started = False
player = self.shell.props.shell_player
(_, is_playing) = player.get_playing()
if is_playing:
player.stop()
for row in self.source.props.query_model:
self.source.props.query_model.remove_entry(row[0])
if (self.tracks_not_played > self.TOTAL_TRACKS_REMEMBERED):
print(('we have plenty of tracks to play yet - no need to fetch more %d', self.tracks_not_played))
self.add_tracks_to_source()
return
search_artist = urllib.parse.quote(artist.encode('utf8'))
if (search_artist in self.played_artist):
print('we have already searched for that artist')
return
self.search_entry = seed_entry
self.played_artist[search_artist] = True
self.playlist_started = True
self._running = False
self._start_process()
def _start_process(self):
if (not self._running):
self._running = True
self.search_website()
def search_website(self):
pass
def _clear_next(self):
self.search_artists = ''
self._running = False
_iterator
def _load_albums(self):
def process(row, data):
entry = data['model'][row.path][0]
lookup = entry.get_string(RB.RhythmDBPropType.ARTIST_FOLDED)
lookup_title = entry.get_string(RB.RhythmDBPropType.TITLE_FOLDED)
if ((lookup in self.artist) and (lookup_title in self.artist[lookup])):
if (lookup not in self.candidate_artist):
self.candidate_artist[lookup] = []
d = dict(((i['track-title'], i['add-to-source']) for i in self.candidate_artist[lookup]))
if ((len(d) < self.MAX_TRACKS_PER_ARTIST) and (lookup_title not in d)):
self.candidate_artist[lookup].append({'track': entry, 'add-to-source': False, 'track-title': lookup_title})
self.tracks_not_played = (self.tracks_not_played + 1)
def after(data):
pass
def error(exception):
print(('Error processing entries: ' + str(exception)))
def finish(data):
self.add_tracks_to_source()
self._clear_next()
return (LOAD_CHUNK, process, after, error, finish)
def display_error_message(self):
dialog = Gtk.MessageDialog(None, Gtk.DialogFlags.MODAL, Gtk.MessageType.INFO, Gtk.ButtonsType.OK, _('No matching tracks have been found'))
dialog.run()
dialog.destroy()
def add_tracks_to_source(self):
entries = []
for artist in self.candidate_artist:
d = dict(((i['track'], (self.candidate_artist[artist].index(i), i['add-to-source'], artist)) for i in self.candidate_artist[artist]))
for (entry, elements) in d.items():
(element_pos, add_to_source, artist) = elements
if (not add_to_source):
entries.append({entry: elements})
random.shuffle(entries)
count = 0
for row in entries:
print(row)
(entry, elements) = list(row.items())[0]
(element_pos, add_to_source, artist) = elements
self.source.add_entry(entry, (- 1))
self.candidate_artist[artist][element_pos]['add-to-source'] = True
count = (count + 1)
self.tracks_not_played = (self.tracks_not_played - 1)
if (count == self.MAX_TRACKS_TO_ADD):
break
player = self.shell.props.shell_player
(_, is_playing) = player.get_playing()
if ((len(self.source.props.query_model) > 0) and (not is_playing)):
player.play_entry(self.source.props.query_model[0][0], self.source) |
def run_demo():
store = FeatureStore(repo_path='.')
print('\n--- Online features ---')
features = store.get_online_features(features=['driver_hourly_stats:conv_rate'], entity_rows=[{'driver_id': 1001}, {'driver_id': 1002}]).to_dict()
for (key, value) in sorted(features.items()):
print(key, ' : ', value) |
('cuda.perm102_bmm_rcr_bias.gen_profiler')
def gen_profiler(func_attrs, workdir, profiler_filename, dim_info_dict):
args_parser = bmm_common.ARGS_PARSER_TEMPLATE.render(a_dims=['M', 'B', 'K'], b_dims=['B', 'N', 'K'], c_dims=['M', 'B', 'N'])
mm_info = _get_default_problem_info(alpha_value=func_attrs.get('alpha', 1))
problem_args = bmm_common.PROBLEM_ARGS_TEMPLATE.render(mm_info=mm_info)
problem_args_cutlass_3x = bmm_common.PROBLEM_ARGS_TEMPLATE_CUTLASS_3X.render(mm_info=bmm_common.add_elem_types_to_mm_info(mm_info=mm_info, func_attrs=func_attrs))
return bmm_common.gen_profiler(func_attrs=func_attrs, workdir=workdir, profiler_filename=profiler_filename, dim_info_dict=dim_info_dict, src_template=common_bias.SRC_TEMPLATE, problem_args=problem_args, problem_args_cutlass_3x=problem_args_cutlass_3x, args_parser=args_parser, bias_ptr_arg='memory_pool->RequestTensorByIdx(3)') |
def to_dot(bmg_raw: BMGraphBuilder, node_types: bool=False, node_sizes: bool=False, edge_requirements: bool=False, after_transform: bool=False, label_edges: bool=True, skip_optimizations: Set[str]=default_skip_optimizations) -> str:
lt = LatticeTyper()
sizer = Sizer()
reqs = EdgeRequirements(lt)
db = DotBuilder()
if after_transform:
(bmg, error_report) = fix_problems(bmg_raw, skip_optimizations)
error_report.raise_errors()
node_list = bmg.all_ancestor_nodes()
else:
node_list = bmg_raw.all_nodes()
nodes = {}
for (index, node) in enumerate(node_list):
nodes[node] = index
max_length = len(str((len(nodes) - 1)))
def to_id(index) -> str:
return ('N' + str(index).zfill(max_length))
for (node, index) in nodes.items():
n = to_id(index)
node_label = get_node_label(node)
if node_types:
node_label += (':' + lt[node].short_name)
if node_sizes:
node_label += (':' + size_to_str(sizer[node]))
db.with_node(n, node_label)
for (i, edge_name, req) in zip(node.inputs, get_edge_labels(node), reqs.requirements(node)):
if label_edges:
edge_label = edge_name
if edge_requirements:
edge_label += (':' + req.short_name)
elif edge_requirements:
edge_label = req.short_name
else:
edge_label = ''
start_node = to_id(nodes[i])
end_node = n
db.with_edge(start_node, end_node, edge_label)
return str(db) |
class UIFileTestCase(unittest.TestCase):
def setUp(self):
self.test_uicFile_path = tempfile.mktemp(suffix='.uic')
with open(self.test_uicFile_path, 'w+') as f:
f.write('test')
self.test_uicFile = ui_compiler.UIFile(self.test_uicFile_path)
def tearDown(self):
os.remove(self.test_uicFile_path)
def test_full_path_argument_is_skipped(self):
self.assertRaises(TypeError, ui_compiler.UIFile)
def test_filename_attribute_initialized_correctly(self):
expected_value = (os.path.basename(self.test_uicFile_path[:(- 4)]) + '.uic')
self.assertEqual(expected_value, self.test_uicFile.filename)
def test_path_attribute_initialized_correctly(self):
expected_value = '/'.join(self.test_uicFile_path.split('/')[:(- 1)])
self.assertEqual(expected_value, self.test_uicFile.path)
def test_md5_filename_attribute_initialized_correctly(self):
expected_value = (os.path.basename(self.test_uicFile_path)[:(- 4)] + '.md5')
self.assertEqual(expected_value, self.test_uicFile.md5_filename)
def test_md5_file_full_path_attribute_initialized_correctly(self):
expected_value = os.path.join(self.test_uicFile.path, (os.path.basename(self.test_uicFile_path)[:(- 4)] + '.md5'))
self.assertEqual(expected_value, self.test_uicFile.md5_file_full_path)
def test_pyqt4_filename_attribute_initialized_correctly(self):
expected_value = ('%s_UI_pyqt4.py' % os.path.basename(self.test_uicFile_path)[:(- 4)])
self.assertEqual(expected_value, self.test_uicFile.pyqt4_filename)
def test_pyqt4_full_path_attribute_initialized_correctly(self):
expected_value = os.path.normpath(os.path.join(self.test_uicFile.path, '../ui_compiled', (os.path.basename(self.test_uicFile_path)[:(- 4)] + '_UI_pyqt4.py')))
self.assertEqual(expected_value, self.test_uicFile.pyqt4_full_path)
def test_pyside_filename_attribute_initialized_correctly(self):
expected_value = ('%s_UI_pyside.py' % os.path.basename(self.test_uicFile_path)[:(- 4)])
self.assertEqual(expected_value, self.test_uicFile.pyside_filename)
def test_pyside_full_path_attribute_initialized_correctly(self):
expected_value = os.path.normpath(os.path.join(self.test_uicFile.path, '../ui_compiled', (os.path.basename(self.test_uicFile_path)[:(- 4)] + '_UI_pyside.py')))
self.assertEqual(expected_value, self.test_uicFile.pyside_full_path)
def test_md5_attribute_is_calculated_correctly(self):
expected_value = utils.md5_checksum(self.test_uicFile_path)
self.assertEqual(expected_value, self.test_uicFile.md5)
def test_update_md5_correctly_saves_the_md5_checksum_to_file(self):
self.test_uicFile.update_md5_file()
with open(self.test_uicFile.md5_file_full_path) as f:
md5 = f.read()
self.assertEqual(md5, self.test_uicFile.md5)
os.remove(self.test_uicFile.md5_file_full_path)
def test_isNew_method_is_working_correctly(self):
self.assertTrue(self.test_uicFile.is_new())
self.test_uicFile.update_md5_file()
self.assertFalse(self.test_uicFile.is_new()) |
class AgentConfigManager():
component_configurations = 'component_configurations'
_loader = ConfigLoader.from_configuration_type(PackageType.AGENT)
def __init__(self, agent_config: AgentConfig, aea_project_directory: Union[(str, Path)], env_vars_friendly: bool=False) -> None:
self.agent_config = agent_config
self.aea_project_directory = aea_project_directory
self.env_vars_friendly = env_vars_friendly
def load_component_configuration(self, component_id: ComponentId, skip_consistency_check: bool=True) -> ComponentConfiguration:
path = find_component_directory_from_component_id(aea_project_directory=Path(self.aea_project_directory), component_id=component_id)
return load_component_configuration(component_type=component_id.component_type, directory=path, skip_consistency_check=skip_consistency_check)
def agent_config_file_path(self) -> Path:
return self._get_agent_config_file_path(self.aea_project_directory)
def _get_agent_config_file_path(cls, aea_project_path: Union[(str, Path)]) -> Path:
return (Path(aea_project_path) / DEFAULT_AEA_CONFIG_FILE)
def load(cls, aea_project_path: Union[(Path, str)], substitude_env_vars: bool=False) -> 'AgentConfigManager':
data = cls._load_config_data(Path(aea_project_path))
if substitude_env_vars:
data = cast(List[Dict], apply_env_variables(data, os.environ))
agent_config = cls._loader.load_agent_config_from_json(data, validate=False)
instance = cls(agent_config, aea_project_path, env_vars_friendly=(not substitude_env_vars))
instance.validate_current_config()
return instance
def _load_config_data(cls, aea_project_path: Path) -> List[Dict]:
with open_file(cls._get_agent_config_file_path(aea_project_path)) as fp:
data = yaml_load_all(fp)
return data
def set_variable(self, path: VariablePath, value: JSON_TYPES) -> None:
(component_id, json_path) = self._parse_path(path)
data = self._make_dict_for_path_and_value(json_path, value)
overrides = {}
if component_id:
overrides[self.component_configurations] = {component_id: data}
else:
overrides.update(data)
self.update_config(overrides)
def _make_dict_for_path_and_value(json_path: JsonPath, value: JSON_TYPES) -> Dict:
data: Dict = {}
nested = data
for key in json_path[:(- 1)]:
nested[key] = {}
nested = nested[key]
nested[json_path[(- 1)]] = value
return data
def get_variable(self, path: VariablePath) -> JSON_TYPES:
(component_id, json_path) = self._parse_path(path)
if component_id:
configrations_data = [(_try_get_configuration_object_from_aea_config(self.agent_config, component_id) or {}), self.load_component_configuration(component_id).json]
else:
configrations_data = [self.agent_config.json]
for data in configrations_data:
value = self._get_value_for_json_path(data, json_path)
if (value is not NotExists):
return cast(JSON_TYPES, value)
raise VariableDoesNotExist(f"Attribute `{'.'.join(json_path)}` for {('{}({}) config'.format(component_id.component_type, component_id.public_id) if component_id else 'AgentConfig')} does not exist")
def _get_value_for_json_path(data: Dict, json_path: JsonPath) -> Union[(NotExistsType, JSON_TYPES)]:
value = json.loads(json.dumps(data))
prev_key = ''
for key in json_path:
if (not isinstance(value, dict)):
raise ValueError(f"Attribute '{prev_key}' is not a dictionary.")
if (key not in value):
return NotExists
value = value[key]
prev_key = key
return value
def _parse_path(self, path: VariablePath) -> Tuple[(Optional[ComponentId], JsonPath)]:
if isinstance(path, str):
(json_path, *_, component_id) = handle_dotted_path(path, self.agent_config.author, aea_project_path=self.aea_project_directory)
elif isinstance(path[0], ComponentId):
json_path = path[1:]
component_id = path[0]
else:
component_id = None
json_path = path
if component_id:
component_id = _try_get_component_id_from_prefix(set(self.agent_config.all_components_id), component_id.component_prefix)
return (component_id, json_path)
def update_config(self, overrides: Dict) -> None:
if (not overrides):
return
overrides = self._filter_overrides(overrides)
if (overrides is SAME_MARK):
return
for (component_id, obj) in overrides.get('component_configurations', {}).items():
component_configuration = self.load_component_configuration(component_id)
component_configuration.check_overrides_valid(obj, env_vars_friendly=self.env_vars_friendly)
self.agent_config.update(overrides, env_vars_friendly=self.env_vars_friendly)
def _filter_overrides(self, overrides: Dict) -> Dict:
(agent_overridable, components_overridables) = self.get_overridables()
agent_overridable['component_configurations'] = components_overridables
filtered_overrides = filter_data(agent_overridable, overrides)
return filtered_overrides
def validate_current_config(self) -> None:
for (component_id, obj) in self.agent_config.component_configurations.items():
component_configuration = self.load_component_configuration(component_id)
component_configuration.check_overrides_valid(obj, env_vars_friendly=self.env_vars_friendly)
self.agent_config.validate_config_data(self.agent_config.json, env_vars_friendly=self.env_vars_friendly)
def json(self) -> Dict:
return self.agent_config.json
def dump_config(self) -> None:
config_data = self.json
self.agent_config.validate_config_data(config_data, env_vars_friendly=self.env_vars_friendly)
with open_file(self.agent_config_file_path, 'w') as file_pointer:
ConfigLoader.from_configuration_type(PackageType.AGENT).dump(self.agent_config, file_pointer)
def verify_private_keys(cls, aea_project_path: Union[(Path, str)], private_key_helper: Callable[([AgentConfig, Path, Optional[str]], None)], substitude_env_vars: bool=False, password: Optional[str]=None) -> 'AgentConfigManager':
aea_project_path = Path(aea_project_path)
agent_config_manager = cls.load(aea_project_path, substitude_env_vars=substitude_env_vars)
aea_conf = agent_config_manager.agent_config
private_key_helper(aea_conf, Path(aea_project_path), password)
return agent_config_manager
def get_overridables(self) -> Tuple[(Dict, Dict[(ComponentId, Dict)])]:
agent_overridable = self.agent_config.get_overridable()
components_overridables: Dict[(ComponentId, Dict)] = {}
for component_id in self.agent_config.all_components_id:
obj = {}
component_config = self.load_component_configuration(component_id, skip_consistency_check=True)
obj.update(component_config.get_overridable())
obj.update(deepcopy(self.agent_config.component_configurations.get(component_id, {})))
if obj:
components_overridables[component_id] = obj
return (agent_overridable, components_overridables) |
def scss_colors(out_file_path: str='COLORS.SCSS', theme: themes.Theme.Theme=None):
with open(out_file_path, 'w') as fp:
fp.write('/* Auto generated SCSS files for colors definition */ \n')
if (theme is None):
theme = themes.Theme.ThemeDefault()
mapped_groups = {'theme': 'colors', 'grey': 'greys'}
for group in theme.groups:
fp.write(('\n/* Colors codes for %s */ \n' % group))
for (i, color) in enumerate(getattr(theme, mapped_groups.get(group, group))[::(- 1)]):
if (i == 0):
fp.write(('$color-%s-50: %s;\n' % (group, color)))
else:
fp.write(('$color-%s-%s: %s;\n' % (group, (i * 100), color)))
fp.write('\n\n/* Colors codes for charts */ \n')
if (not theme.chart_categories):
fp.write('/*$charts: (green, blue, purple, yellow, orange, red, brown) ; */ \n')
fp.write('$charts: default ; \n\n')
for (i, color) in enumerate(theme.charts):
if (i == 0):
fp.write(('$chart-default-50: %s;\n' % color))
else:
fp.write(('$chart-default-%s: %s;\n' % ((i * 100), color)))
else:
if (len(theme.chart_categories) > 1):
fp.write(('/*$charts: (%s) ; */ \n' % ', '.join(theme.chart_categories)))
else:
fp.write('/*$charts: (green, blue, purple, yellow, orange, red, brown) ; */ \n')
fp.write(('$charts: %s ; \n\n' % theme.chart_categories[0]))
for group in theme.chart_categories:
for (i, color) in enumerate(theme.charts):
if (i == 0):
fp.write(('$chart-%s-50: %s;\n' % (group, color)))
else:
fp.write(('$chart-%s-%s: %s;\n' % (group, (i * 100), color))) |
def _format_protfuncs():
out = []
sorted_funcs = [(key, func) for (key, func) in sorted(protlib.FUNC_PARSER.callables.items(), key=(lambda tup: tup[0]))]
for (protfunc_name, protfunc) in sorted_funcs:
out.append('- |c${name}|n - |W{docs}'.format(name=protfunc_name, docs=utils.justify(protfunc.__doc__.strip(), align='l', indent=10).strip()))
return '\n '.join(out) |
def test_levels():
mocker = EnvoyStatsMocker()
esm = EnvoyStatsMgr(logger, fetch_log_levels=mocker.fetch_log_levels, fetch_envoy_stats=mocker.fetch_envoy_stats)
esm.update()
assert (esm.loginfo == {'all': 'error'})
esm.update()
assert (esm.loginfo == {'error': ['admin', 'aws', 'assert', 'backtrace', 'cache_filter', 'client', 'config', 'connection', 'conn_handler', 'decompression', 'envoy_bug', 'ext_authz', 'rocketmq', 'file', 'filter', 'forward_proxy', 'grpc', 'hc', 'health_checker', ' ' 'hystrix', 'init', 'io', 'jwt', 'kafka', 'main', 'misc', 'mongo', 'quic', 'quic_stream', 'pool', 'rbac', 'redis', 'router', 'runtime', 'stats', 'secret', 'tap', 'testing', 'thrift', 'tracing', 'upstream', 'udp', 'wasm'], 'info': ['dubbo'], 'warning': ['lua']}) |
def process_purchase_orders(center, error_logs, date=None):
list_of_purchase_orders_for_center = get_list_of_purchase_orders_for_center(center.name, date)
if (list_of_purchase_orders_for_center and len(list_of_purchase_orders_for_center.get('orders'))):
for order in list_of_purchase_orders_for_center.get('orders'):
process_purchase_order(order, center, error_logs) |
class OptionSonificationGlobaltracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def value(self):
return self._config_get(None)
def value(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def display_len(target):
from evennia.utils.ansi import ANSI_PARSER
if inherits_from(target, str):
target = ANSI_PARSER.strip_mxp(target)
target = ANSI_PARSER.parse_ansi(target, strip_ansi=True)
extra_wide = ('F', 'W')
return sum(((2 if (east_asian_width(char) in extra_wide) else 1) for char in target))
else:
return len(target) |
class OptionSeriesBoxplotSonificationTracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class DetoolsDataFormatTest(unittest.TestCase):
def test_blocks(self):
blocks = Blocks()
self.assertEqual(repr(blocks), 'Blocks(number_of_blocks=0, blocks=[])')
blocks.append(0, 1, [2, 3, 4])
self.assertEqual(repr(blocks), 'Blocks(number_of_blocks=1, blocks=[Block(from_offset=0, to_address=1, number_of_values=3)])')
self.assertEqual(blocks.to_bytes(), (b'\x01\x00\x01\x03', b'\x02\x03\x04'))
def test_from_elf_file(self):
filename = 'tests/files/micropython/esp8266--v1.9.4.elf'
with open(filename, 'rb') as fin:
elffile = ELFFile(fin)
(code_range, data_range) = elf.from_file(elffile)
self.assertEqual(code_range.begin, )
self.assertEqual(code_range.end, )
self.assertEqual(data_range.begin, )
self.assertEqual(data_range.end, ) |
class ValveTestOrderedBiDirectionalDPTunnelACL(ValveTestBases.ValveTestTunnel):
TUNNEL_ID = 2
CONFIG = "\nacls:\n tunnel_acl:\n - rule:\n in_port: 1\n dl_type: 0x0800\n ip_proto: 1\n actions:\n output:\n - tunnel: {\n dp: s2,\n port: 1,\n bi_directional: True,\n maintain_encapsulation: True\n }\nvlans:\n vlan100:\n vid: 1\ndps:\n s1:\n dp_id: 0x1\n hardware: 'GenericTFM'\n stack:\n priority: 1\n dp_acls: [tunnel_acl]\n interfaces:\n 1:\n native_vlan: vlan100\n 3:\n stack: {dp: s2, port: 3}\n s2:\n dp_id: 0x2\n hardware: 'GenericTFM'\n interfaces:\n 1:\n native_vlan: vlan100\n 3:\n stack: {dp: s1, port: 3}\n"
def test_tunnel_bi_directional_dp_acl(self):
valve = self.valves_manager.valves[1]
self.apply_ofmsgs(valve.stack_manager.add_tunnel_acls())
self.validate_tunnel(int(2), int(2), 1, self.TUNNEL_ID, 3, self.TUNNEL_ID, True, 'Did not accept reverse tunnel packet', pcp=valve_of.PCP_TUNNEL_REVERSE_DIRECTION_FLAG)
self.validate_tunnel(self.DP_ID, self.DP_ID, 3, [self.TUNNEL_ID, 1], 1, 0, True, 'Did not output to original source, the reverse tunnelled packet', pcp=valve_of.PCP_TUNNEL_REVERSE_DIRECTION_FLAG) |
def serialise_uncanny_progress(save_data: list[int], uncanny: dict[(str, Any)]) -> list[int]:
lengths = uncanny['Lengths']
total = lengths['total']
stars = lengths['stars']
stages = lengths['stages']
for chapter in uncanny['Value']['clear_progress']:
save_data = write_length_data(save_data, chapter, 4, 4, False)
clear_amount = ((([0] * total) * stars) * stages)
clear_amount_data = uncanny['Value']['clear_amount']
for i in range(total):
for j in range(stages):
for k in range(stars):
clear_amount[((((i * stages) * stars) + (j * stars)) + k)] = clear_amount_data[i][k][j]
save_data = write_length_data(save_data, clear_amount, 4, 4, False)
for chapter in uncanny['Value']['unlock_next']:
save_data = write_length_data(save_data, chapter, 4, 4, False)
return save_data |
.parametrize('warm_up_period, epoch_length', [(15, 5), (20, 10), (100, 50), (220, 20)])
def test_cannot_initialize_during_warm_up(tester, casper, epoch_length, warm_up_period, assert_tx_failed):
block_number = tester.get_block_by_number('latest')['number']
current_epoch = casper.functions.current_epoch().call()
assert (current_epoch == ((block_number + warm_up_period) // epoch_length))
next_epoch = (current_epoch + 1)
for _ in range((warm_up_period - 1)):
assert_tx_failed((lambda : casper.functions.initialize_epoch(next_epoch).transact()))
tester.mine_block()
next_block_number = (tester.get_block_by_number('latest')['number'] + 1)
blocks_until_next_start = (epoch_length - (next_block_number % epoch_length))
for _ in range(blocks_until_next_start):
assert_tx_failed((lambda : casper.functions.initialize_epoch(next_epoch).transact()))
tester.mine_block()
next_block_number = (tester.get_block_by_number('latest')['number'] + 1)
assert ((next_block_number % epoch_length) == 0)
casper.functions.initialize_epoch(next_epoch).transact()
assert (casper.functions.current_epoch().call() == next_epoch) |
def test_restructure_cfg_ifelse(task):
task.graph.add_nodes_from((vertices := [BasicBlock(0, instructions=[Assignment(variable(name='i'), Constant(0)), Assignment(variable(name='x'), Constant(42))]), BasicBlock(1, instructions=[Branch(Condition(OperationType.equal, [variable(name='i'), Constant(0)]))]), BasicBlock(2, instructions=[Return([variable(name='x')])]), BasicBlock(3, instructions=[Return([variable(name='i')])])]))
task.graph.add_edges_from([UnconditionalEdge(vertices[0], vertices[1]), TrueCase(vertices[1], vertices[2]), FalseCase(vertices[1], vertices[3])])
PatternIndependentRestructuring().run(task)
context = LogicCondition.generate_new_context()
resulting_ast = AbstractSyntaxTree((seq_node := SeqNode(LogicCondition.initialize_true(context))), {LogicCondition.initialize_symbol('x1', context): Condition(OperationType.equal, [variable('i'), Constant(0)])})
code_node_0 = resulting_ast._add_code_node([Assignment(variable('i'), Constant(0)), Assignment(variable('x'), Constant(42))])
code_node_2 = resulting_ast._add_code_node([Return([variable('x')])])
code_node_3 = resulting_ast._add_code_node([Return([variable('i')])])
condition_node = resulting_ast._add_condition_node_with(LogicCondition.initialize_symbol('x1', context), code_node_2)
resulting_ast._add_edges_from(((seq_node, code_node_0), (seq_node, condition_node), (seq_node, code_node_3)))
resulting_ast._code_node_reachability_graph.add_reachability_from(((code_node_0, code_node_2), (code_node_0, code_node_3), (code_node_2, code_node_3)))
seq_node.sort_children()
assert (ASTComparator.compare(task.syntax_tree, resulting_ast) and (task.syntax_tree.condition_map == resulting_ast.condition_map)) |
class IWizardController(Interface):
pages = List(IWizardPage)
current_page = Instance(IWizardPage)
complete = Bool(False)
def get_first_page(self):
def get_next_page(self, page):
def get_previous_page(self, page):
def is_first_page(self, page):
def is_last_page(self, page):
def dispose_pages(self): |
class OptionSeriesXrangeSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def merge(data, new_data, raise_on_conflict=False):
if (not (isinstance(data, (AttrDict, collections.abc.Mapping)) and isinstance(new_data, (AttrDict, collections.abc.Mapping)))):
raise ValueError(f'You can only merge two dicts! Got {data!r} and {new_data!r} instead.')
for (key, value) in new_data.items():
if ((key in data) and isinstance(data[key], (AttrDict, collections.abc.Mapping)) and isinstance(value, (AttrDict, collections.abc.Mapping))):
merge(data[key], value, raise_on_conflict)
elif ((key in data) and (data[key] != value) and raise_on_conflict):
raise ValueError(f'Incompatible data for key {key!r}, cannot be merged.')
else:
data[key] = value |
def _get_installed_dbt_package_version() -> Optional[str]:
package_path = _get_elementary_package_path()
if (package_path is None):
return None
project_path = os.path.join(package_path, _DBT_PROJECT_FILENAME)
if (not os.path.exists(package_path)):
return None
project_yaml_dict = OrderedYaml().load(project_path)
return project_yaml_dict['version'] |
class OptionPlotoptionsDependencywheelSonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class TProtocolNoCtDialogues(Dialogues, ABC):
END_STATES = frozenset({TProtocolNoCtDialogue.EndState.END_STATE_1, TProtocolNoCtDialogue.EndState.END_STATE_2, TProtocolNoCtDialogue.EndState.END_STATE_3})
_keep_terminal_state_dialogues = True
def __init__(self, self_address: Address, role_from_first_message: Callable[([Message, Address], Dialogue.Role)], dialogue_class: Type[TProtocolNoCtDialogue]=TProtocolNoCtDialogue) -> None:
Dialogues.__init__(self, self_address=self_address, end_states=cast(FrozenSet[Dialogue.EndState], self.END_STATES), message_class=TProtocolNoCtMessage, dialogue_class=dialogue_class, role_from_first_message=role_from_first_message) |
def _mock_spaces_trajectory_record(step_count: int):
episode_record = SpacesTrajectoryRecord('test')
for i in range(step_count):
substep_record = SpacesRecord(actor_id=ActorID(0, 0), observation=dict(observation=np.array(i)), action=dict(action=np.array(i)), reward=0, done=(i == (step_count - 1)))
episode_record.step_records.append(StructuredSpacesRecord(substep_records=[substep_record]))
return episode_record |
class Config():
sep = Markdown('---')
baseDir = os.path.dirname(os.path.realpath(__file__))
default = os.path.join(baseDir, 'config.json')
mdSep = (('\n\n' + ('-' * 10)) + '\n')
encodings = ['utf8', 'gbk']
def __init__(self, file=None) -> None:
self.cfg = {}
if file:
self.load(file)
def load(self, file):
with open(file, 'r') as f:
self.cfg = json.load(f)
c: dict = self.cfg
self.api_key = (c.get('api_key') or openai.api_key)
self.api_base = (c.get('api_base') or openai.api_base)
self.api_type = (c.get('api_type') or openai.api_type)
self.api_version = (c.get('api_version') or openai.api_version)
self.api_organization = (c.get('api_organization') or openai.organization)
self.model = c.get('model', 'gpt-3.5-turbo')
self.prompt = c.get('prompt', [])
self.stream = c.get('stream', False)
self.stream_render = c.get('stream_render', False)
self.context = ContextLevel(c.get('context', 0))
self.proxy = c.get('proxy', '')
self.showtokens = c.get('showtokens', False)
def get(self, key, default=None):
return self.cfg.get(key, default) |
class DefinitionInitConfigReader(ConfigReader):
_configOpts = []
def __init__(self, file_, jailName, initOpts, **kwargs):
ConfigReader.__init__(self, **kwargs)
if file_.startswith('./'):
file_ = os.path.abspath(file_)
self.setFile(file_)
self.setJailName(jailName)
self._initOpts = initOpts
self._pOpts = dict()
self._defCache = dict()
def setFile(self, fileName):
self._file = fileName
self._initOpts = {}
def getFile(self):
return self._file
def setJailName(self, jailName):
self._jailName = jailName
def getJailName(self):
return self._jailName
def read(self):
return ConfigReader.read(self, self._file)
def readexplicit(self):
if (not self._cfg):
self._create_unshared(self._file)
return SafeConfigParserWithIncludes.read(self._cfg, self._file)
def getOptions(self, pOpts, all=False):
if (not pOpts):
pOpts = dict()
if self._initOpts:
pOpts = _merge_dicts(pOpts, self._initOpts)
self._opts = ConfigReader.getOptions(self, 'Definition', self._configOpts, pOpts, convert=False)
self._pOpts = pOpts
if self.has_section('Init'):
getopt = (lambda opt: self.get('Init', opt))
for opt in self.options('Init', withDefault=False):
if (opt == '__name__'):
continue
v = None
if (not opt.startswith('known/')):
if (v is None):
v = getopt(opt)
self._initOpts[('known/' + opt)] = v
if (opt not in self._initOpts):
if (v is None):
v = getopt(opt)
self._initOpts[opt] = v
if (all and self.has_section('Definition')):
for opt in self.options('Definition'):
if ((opt == '__name__') or (opt in self._opts)):
continue
self._opts[opt] = self.get('Definition', opt)
def convertOptions(self, opts, configOpts):
for (opttype, optname, optvalue) in _OptionsTemplateGen(configOpts):
conv = CONVERTER.get(opttype)
if conv:
v = opts.get(optname)
if (v is None):
continue
try:
opts[optname] = conv(v)
except ValueError:
logSys.warning('Wrong %s value %r for %r. Using default one: %r', opttype, v, optname, optvalue)
opts[optname] = optvalue
def getCombOption(self, optname):
try:
return self._defCache[optname]
except KeyError:
try:
v = self._cfg.get_ex('Definition', optname, vars=self._pOpts)
except (NoSectionError, NoOptionError, ValueError):
v = None
self._defCache[optname] = v
return v
def getCombined(self, ignore=()):
combinedopts = self._opts
if self._initOpts:
combinedopts = _merge_dicts(combinedopts, self._initOpts)
if (not len(combinedopts)):
return {}
ignore = set(ignore).copy()
for n in combinedopts:
cond = SafeConfigParserWithIncludes.CONDITIONAL_RE.match(n)
if cond:
(n, cond) = cond.groups()
ignore.add(n)
opts = substituteRecursiveTags(combinedopts, ignore=ignore, addrepl=self.getCombOption)
if (not opts):
raise ValueError('recursive tag definitions unable to be resolved')
self.convertOptions(opts, self._configOpts)
return opts
def convert(self):
raise NotImplementedError |
def test_unknowns_multiple_sets(tmpdir_factory, merge_files_manyLR):
fpath = str(tmpdir_factory.mktemp('lf').join('two-ch-sets.dlis'))
content = ['data/chap4-7/eflr/envelope.dlis.part', 'data/chap4-7/eflr/file-header.dlis.part', 'data/chap4-7/eflr/unknown.dlis.part', 'data/chap4-7/eflr/unknown2.dlis.part']
merge_files_manyLR(fpath, content)
with dlis.load(fpath) as (f, *_):
assert (len(f.unknowns['UNKNOWN_SET']) == 2) |
def get_
tmplist = sts.split('\r\n')
results = []
tmplist = __drop_nul_seq_elements(tmplist)
for s in tmplist:
p = s.find(':')
if (p < 1):
raise Http1xHeaderErr(('wrong http master:%s' % s))
name = s[0:p]
p += 1
value = s[p:].lstrip()
results.append((name, value))
return results |
def get_link_pQvw(pb_client, body_id, indices=None):
if (indices is None):
indices = range(pb_client.getNumJoints(body_id))
num_indices = len(indices)
assert (num_indices > 0)
ls = pb_client.getLinkStates(body_id, indices, computeLinkVelocity=True)
ps = [np.array(ls[j][0]) for j in range(num_indices)]
if (not xyzw_in):
Qs = [quaternion.Q_op(np.array(ls[j][1]), op=['change_order'], xyzw_in=True) for j in range(num_indices)]
else:
Qs = [np.array(ls[j][1]) for j in range(num_indices)]
vs = [np.array(ls[j][6]) for j in range(num_indices)]
ws = [np.array(ls[j][7]) for j in range(num_indices)]
if (num_indices == 1):
return (ps[0], Qs[0], vs[0], ws[0])
else:
return (ps, Qs, vs, ws) |
class DatasetDriftMetric(WithDriftOptions[DatasetDriftMetricResults]):
columns: Optional[List[str]]
drift_share: float
def __init__(self, columns: Optional[List[str]]=None, drift_share: float=0.5, stattest: Optional[PossibleStatTestType]=None, cat_stattest: Optional[PossibleStatTestType]=None, num_stattest: Optional[PossibleStatTestType]=None, text_stattest: Optional[PossibleStatTestType]=None, per_column_stattest: Optional[Dict[(str, PossibleStatTestType)]]=None, stattest_threshold: Optional[float]=None, cat_stattest_threshold: Optional[float]=None, num_stattest_threshold: Optional[float]=None, text_stattest_threshold: Optional[float]=None, per_column_stattest_threshold: Optional[Dict[(str, float)]]=None, options: AnyOptions=None):
self.columns = columns
self.drift_share = drift_share
super().__init__(stattest=stattest, cat_stattest=cat_stattest, num_stattest=num_stattest, text_stattest=text_stattest, per_column_stattest=per_column_stattest, stattest_threshold=stattest_threshold, cat_stattest_threshold=cat_stattest_threshold, num_stattest_threshold=num_stattest_threshold, text_stattest_threshold=text_stattest_threshold, per_column_stattest_threshold=per_column_stattest_threshold, options=options)
self._drift_options = DataDriftOptions(all_features_stattest=self.stattest, cat_features_stattest=self.cat_stattest, num_features_stattest=self.num_stattest, text_features_stattest=self.text_stattest, per_feature_stattest=self.per_column_stattest, all_features_threshold=self.stattest_threshold, cat_features_threshold=self.cat_stattest_threshold, num_features_threshold=self.num_stattest_threshold, text_features_threshold=self.text_stattest_threshold, per_feature_threshold=self.per_column_stattest_threshold)
def get_parameters(self) -> tuple:
return (self.drift_share, (None if (self.columns is None) else tuple(self.columns)), self.drift_options)
def calculate(self, data: InputData) -> DatasetDriftMetricResults:
if (data.reference_data is None):
raise ValueError('Reference dataset should be present')
dataset_columns = process_columns(data.reference_data, data.column_mapping)
result = get_drift_for_columns(current_data=data.current_data, reference_data=data.reference_data, data_drift_options=self.drift_options, drift_share_threshold=self.drift_share, dataset_columns=dataset_columns, columns=self.columns, agg_data=True)
return DatasetDriftMetricResults(drift_share=self.drift_share, number_of_columns=result.number_of_columns, number_of_drifted_columns=result.number_of_drifted_columns, share_of_drifted_columns=result.share_of_drifted_columns, dataset_drift=result.dataset_drift) |
def _gen_write_outputs_str(fused_elementwise_metadata: FusedElementwiseMetaData):
write_outputs = []
for (output_idx, output_accessor) in enumerate(fused_elementwise_metadata.output_accessors):
index_variable = 'dense_idx'
if fused_elementwise_metadata.mixed_jagged_dense_indexing:
index_variable = 'jagged_idx'
output_name = f'output{output_idx}'
get_strided_addr_str = GET_STRIDED_ADDRESS_TEMPLATE.render(tensor_accessor=output_accessor, data_ptr=output_name, data_t=fused_elementwise_metadata.data_t, read_t=fused_elementwise_metadata.max_read_t, data_idx=index_variable)
if ((len(fused_elementwise_metadata.original_outputs) == 1) and (len(fused_elementwise_metadata.outputs) > 1)):
output_idx = 0
write_out = KERNEL_WRITE_OUTPUT_TEMPLATE.render(get_strided_address=get_strided_addr_str, output_name=output_name, output_idx=output_idx)
write_outputs.append(write_out)
write_outputs_str = '\n'.join(write_outputs)
return write_outputs_str |
class ValidationErrorData(namedtuple('ValidationErrorData', ['datum', 'schema', 'field'])):
def __str__(self):
if (self.datum is None):
return f'Field({self.field}) is None expected {self.schema}'
return (f'{self.field} is <{self.datum}> of type ' + f'{type(self.datum)} expected {self.schema}') |
class TestRefreshToken():
def test_init_from_file(self):
credential = credentials.RefreshToken(testutils.resource_filename('refresh_token.json'))
self._verify_credential(credential)
def test_init_from_path_like(self):
path = pathlib.Path(testutils.resource_filename('refresh_token.json'))
credential = credentials.RefreshToken(path)
self._verify_credential(credential)
def test_init_from_dict(self):
parsed_json = json.loads(testutils.resource('refresh_token.json'))
credential = credentials.RefreshToken(parsed_json)
self._verify_credential(credential)
def test_init_from_nonexisting_file(self):
with pytest.raises(IOError):
credentials.RefreshToken(testutils.resource_filename('non_existing.json'))
def test_init_from_invalid_file(self):
with pytest.raises(ValueError):
credentials.RefreshToken(testutils.resource_filename('service_account.json'))
.parametrize('arg', [None, 0, 1, True, False, list(), tuple(), dict()])
def test_invalid_args(self, arg):
with pytest.raises(ValueError):
credentials.RefreshToken(arg)
.parametrize('key', ['client_id', 'client_secret', 'refresh_token'])
def test_required_field(self, key):
data = {'client_id': 'value', 'client_secret': 'value', 'refresh_token': 'value', 'type': 'authorized_user'}
del data[key]
with pytest.raises(ValueError):
credentials.RefreshToken(data)
def _verify_credential(self, credential):
assert (credential.client_id == 'mock.apps.googleusercontent.com')
assert (credential.client_secret == 'mock-secret')
assert (credential.refresh_token == 'mock-refresh-token')
g_credential = credential.get_credential()
assert isinstance(g_credential, gcredentials.Credentials)
assert (g_credential.token is None)
check_scopes(g_credential)
mock_response = {'access_token': 'mock_access_token', 'expires_in': 3600}
credentials._request = testutils.MockRequest(200, json.dumps(mock_response))
access_token = credential.get_access_token()
assert (access_token.access_token == 'mock_access_token')
assert isinstance(access_token.expiry, datetime.datetime) |
def extractGuardianofthespiritBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
_packages_ns.route('/edit/<ownername>/<projectname>/<package_name>/')
_packages_ns.route('/edit/<ownername>/<projectname>/<package_name>/<source_type_text>')
class PackageEdit(Resource):
_api_login_required
_packages_ns.doc(params=edit_package_docs)
_packages_ns.expect(package_edit_input_model)
_packages_ns.marshal_with(package_model)
def post(self, ownername, projectname, package_name, source_type_text=None):
copr = get_copr(ownername, projectname)
data = rename_fields(get_form_compatible_data(preserve=['python_versions']))
try:
package = PackagesLogic.get(copr.id, package_name)[0]
source_type_text = (source_type_text or package.source_type_text)
except IndexError as ex:
msg = 'Package {name} does not exists in copr {copr}.'.format(name=package_name, copr=copr.full_name)
raise ObjectNotFound(msg) from ex
process_package_add_or_edit(copr, source_type_text, package=package, data=data)
return to_dict(package) |
class BackupDynamoDBS3Test(unittest.TestCase):
('backup_dynamodb_s3.app.scan_contacts_table', side_effect=mocked_dynamodb_scan)
('backup_dynamodb_s3.app.write_to_s3', side_effect=mocked_s3_put)
def test_build(self, s3_put_mock, dynamo_scan_mock):
response = lambda_handler(self.scheduled_event(), '')
self.assertEqual(dynamo_scan_mock.call_count, 1)
self.assertEqual(s3_put_mock.call_count, 1)
def scheduled_event(self):
return {'version': '0', 'id': 'd77bcbc4-0b2b-4d45-9694-b1df99175cfb', 'detail-type': 'Scheduled Event', 'source': 'aws.events', 'account': '', 'time': '2016-09-25T04:55:26Z', 'region': 'us-east-1', 'resources': ['arn:aws:events:us-east-1::rule/test-scheduled-event'], 'detail': {}} |
class TestS3MockYAMLWriterNoBucket():
def test_yaml_s3_mock_file_writer_missing_s3(self, monkeypatch, tmp_path, s3):
with monkeypatch.context() as m:
(aws_session, s3_client) = s3
m.setattr(sys, 'argv', ['', '--config', './tests/conf/yaml/test.yaml'])
config = ConfigArgBuilder(*all_configs, desc='Test Builder')
mock_s3_bucket = 'spock-test'
mock_s3_object = 'fake/test/bucket/'
s3_client.create_bucket(Bucket=mock_s3_bucket)
now = datetime.datetime.now()
curr_int_time = int(f'{now.year}{now.month}{now.day}{now.hour}{now.second}')
with pytest.raises(ValueError):
config.save(user_specified_path=f's3://foo/{mock_s3_object}', file_extension='.yaml', file_name=f'pytest.save.{curr_int_time}').generate() |
class Underscore(JsPackage):
lib_alias = {'js': 'underscore'}
lib_selector = '_'
def custom(self, func_name: str, **kwargs):
_args = [('%s=%s' % (k, JsUtils.jsConvertData(v, None))) for (k, v) in kwargs.items()]
return JsObjects.JsObject.JsObject.get(('%s.%s(%s)' % (self._selector, func_name, ', '.join(_args))))
def each(self, data, iteratee, context: dict=None):
data = JsUtils.jsConvertData(data, None)
iteratee = JsUtils.jsConvertData(iteratee, None)
if (context is not None):
context = JsUtils.jsConvertData(context, None)
return JsObjects.JsObject.JsObject.get(('%s.each(%s, %s, %s)' % (self._selector, data, iteratee, context)))
return JsObjects.JsObject.JsObject.get(('%s.each(%s, %s)' % (self._selector, data, iteratee)))
def map(self, data, iteratee, context: dict=None):
data = JsUtils.jsConvertData(data, None)
iteratee = JsUtils.jsConvertData(iteratee, None)
if (context is not None):
context = JsUtils.jsConvertData(context, None)
return JsObjects.JsObject.JsObject.get(('%s.map(%s, %s, %s)' % (self._selector, data, iteratee, context)))
return JsObjects.JsObject.JsObject.get(('%s.map(%s, %s)' % (self._selector, data, iteratee))) |
class MessageSettingsList(ResourceList):
def query(self, view_kwargs):
query_ = db.session.query(MessageSettings).order_by(MessageSettings.id)
return query_
decorators = (api.has_permission('is_admin', methods='GET'),)
methods = ['GET']
schema = MessageSettingSchema
data_layer = {'session': db.session, 'model': MessageSettings, 'methods': {'query': query}} |
class TestVerifySolidRunPairedEnd(unittest.TestCase):
def setUp(self):
self.solid_test_dir = TestUtils().make_solid_dir_paired_end('solid0123__PE_BC')
def tearDown(self):
shutil.rmtree(self.solid_test_dir)
def test_verify(self):
self.assertTrue(SolidRun(self.solid_test_dir).verify())
def test_verify_missing_csfasta(self):
solid_run = SolidRun(self.solid_test_dir)
os.remove(solid_run.samples[0].libraries[0].csfasta)
self.assertFalse(SolidRun(self.solid_test_dir).verify())
def test_verify_missing_qual(self):
solid_run = SolidRun(self.solid_test_dir)
os.remove(solid_run.samples[0].libraries[0].qual)
self.assertFalse(SolidRun(self.solid_test_dir).verify()) |
class FaucetResult(unittest.runner.TextTestResult):
root_tmpdir = None
test_duration_secs = {}
unexpected_success = False
def _test_tmpdir(self, test):
return os.path.join(self.root_tmpdir, mininet_test_util.flat_test_name(test.id()))
def _set_test_duration_secs(self, test):
duration_file_name = os.path.join(self._test_tmpdir(test), 'test_duration_secs')
if (test.id() not in self.test_duration_secs):
self.test_duration_secs[test.id()] = 0
try:
with open(duration_file_name, encoding='utf-8') as duration_file:
self.test_duration_secs[test.id()] = int(duration_file.read())
except FileNotFoundError:
pass
def stopTest(self, test):
self._set_test_duration_secs(test)
super().stopTest(test)
def addUnexpectedSuccess(self, test):
self.unexpected_success = True
super().addUnexpectedSuccess(test) |
class OddIntegerTest(AnyTraitTest):
def setUp(self):
self.obj = OddIntegerTrait()
_default_value = 99
_good_values = [1, 3, 5, 7, 9, , 1.0, 3.0, 5.0, 7.0, 9.0, .0, (- 1), (- 3), (- 5), (- 7), (- 9), (- ), (- 1.0), (- 3.0), (- 5.0), (- 7.0), (- 9.0), (- .0)]
_bad_values = [0, 2, (- 2), 1j, None, '1', [1], (1,), {1: 1}] |
def extract3DmumnovelloversWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Dear Love', 'Dear Love', 'translated'), (' Gaga', ' Gaga', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionsImage(Options):
component_properties = ('color',)
def background(self):
return self._config_get(None)
def background(self, color):
self._config(color)
def color(self):
return self._config_get(self.page.theme.colors[(- 1)])
def color(self, color: str):
self._config(color) |
('aea.cli.fingerprint.fingerprint_package')
class FingerprintCommandTestCase(TestCase):
def setUp(self):
self.runner = CliRunner()
def test_fingerprint_positive(self, *mocks):
public_id = 'author/name:0.1.0'
result = self.runner.invoke(cli, [*CLI_LOG_OPTION, 'fingerprint', 'connection', public_id], standalone_mode=False)
self.assertEqual(result.exit_code, 0)
result = self.runner.invoke(cli, [*CLI_LOG_OPTION, 'fingerprint', 'contract', public_id], standalone_mode=False)
self.assertEqual(result.exit_code, 0)
result = self.runner.invoke(cli, [*CLI_LOG_OPTION, 'fingerprint', 'protocol', public_id], standalone_mode=False)
self.assertEqual(result.exit_code, 0)
result = self.runner.invoke(cli, [*CLI_LOG_OPTION, 'fingerprint', 'skill', public_id], standalone_mode=False)
self.assertEqual(result.exit_code, 0)
def _run_fingerprint_by_path(self):
result = self.runner.invoke(cli, [*CLI_LOG_OPTION, 'fingerprint', 'by-path', 'some_dir'], standalone_mode=False, catch_exceptions=False)
self.assertEqual(result.exit_code, 0, result.exception)
def test_by_path_ok(self, fingerprint_mock):
with mock.patch('os.listdir', return_value=[DEFAULT_CONNECTION_CONFIG_FILE]):
self._run_fingerprint_by_path()
fingerprint_mock.assert_called()
def test_by_path_exceptions(self, *mocks):
with pytest.raises(ClickException, match='No package config file found in `.*`. Incorrect directory?'):
with mock.patch('os.listdir', return_value=[]):
self._run_fingerprint_by_path()
with pytest.raises(ClickException, match='Too many config files in the directory, only one has to present!'):
with mock.patch('os.listdir', return_value=[DEFAULT_CONNECTION_CONFIG_FILE, DEFAULT_SKILL_CONFIG_FILE]):
self._run_fingerprint_by_path() |
def chain_vm_configuration(fixture: Dict[(str, Any)]) -> Iterable[Tuple[(int, Type[VirtualMachineAPI])]]:
network = fixture['network']
if (network == 'Frontier'):
return ((0, FrontierVM),)
elif (network == 'Homestead'):
HomesteadVM = BaseHomesteadVM.configure(support_dao_fork=False)
return ((0, HomesteadVM),)
elif (network == 'EIP150'):
return ((0, TangerineWhistleVM),)
elif (network == 'EIP158'):
return ((0, SpuriousDragonVM),)
elif (network == 'Byzantium'):
return ((0, ByzantiumVM),)
elif (network == 'Constantinople'):
return ((0, ConstantinopleVM),)
elif (network == 'ConstantinopleFix'):
return ((0, PetersburgVM),)
elif (network == 'Istanbul'):
return ((0, IstanbulVM),)
elif (network == 'Berlin'):
return ((0, BerlinVM),)
elif (network == 'London'):
return ((0, LondonVM),)
elif (network == 'Merge'):
return ((0, ParisVM),)
elif (network == 'Shanghai'):
return ((0, ShanghaiVM),)
elif (network == 'FrontierToHomesteadAt5'):
HomesteadVM = BaseHomesteadVM.configure(support_dao_fork=False)
return ((0, FrontierVM), (5, HomesteadVM))
elif (network == 'HomesteadToEIP150At5'):
HomesteadVM = BaseHomesteadVM.configure(support_dao_fork=False)
return ((0, HomesteadVM), (5, TangerineWhistleVM))
elif (network == 'HomesteadToDaoAt5'):
HomesteadVM = MainnetDAOValidatorVM.configure(support_dao_fork=True, _dao_fork_block_number=5)
return ((0, HomesteadVM),)
elif (network == 'EIP158ToByzantiumAt5'):
return ((0, SpuriousDragonVM), (5, ByzantiumVM))
elif (network == 'ByzantiumToConstantinopleFixAt5'):
return ((0, ByzantiumVM), (5, PetersburgVM))
elif (network == 'BerlinToLondonAt5'):
return ((0, BerlinVM), (5, LondonVM))
elif (network == 'ArrowGlacierToMergeAtDiffC0000'):
return ((0, GrayGlacierVM), (6, ParisVM))
elif (network == 'MergeToShanghaiAtTime15k'):
return ((0, ParisVM), (5, ShanghaiVM))
else:
raise ValueError(f'Network {network} does not match any known VM rules') |
class WafRuleRevisionResponseDataAllOf(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'relationships': (RelationshipWafRule,)}
_property
def discriminator():
return None
attribute_map = {'relationships': 'relationships'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def redirect_using_back_to(func: Callable) -> Callable:
(func)
async def wrapper(*args: Any, **kwargs: Any) -> Response:
response = (await func(*args, **kwargs))
if (response is not None):
return response
if ((back_to_url := request.args.get('back_to')) is not None):
return redirect(back_to_url)
return redirect(url_for('portal.portal'))
return wrapper |
def extractRomanticDreamersSanctuary(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
tagmap = [('An Interview With a Playboy', 'An Interview With a Playboy', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def extract_docstring_from_source(source):
f = StringIO(source)
python_tokens = tokenize.generate_tokens(f.readline)
for (ttype, tstring, tstart, tend, tline) in python_tokens:
token_name = token.tok_name[ttype]
if ((token_name == 'STRING') and (tstart[1] == 0)):
break
else:
return ('', source.strip())
source_lines = source.splitlines()
docstring = eval('\n'.join(source_lines[(tstart[0] - 1):tend[0]]))
source_lines = (source_lines[:(tstart[0] - 1)] + source_lines[tend[0]:])
source = '\n'.join(source_lines)
source = source.strip()
return (docstring, source) |
('/mod_page', methods=['GET', 'POST'])
_role_restrict(roles.ROLE_ADMIN)
def mod_pages():
pages = page_service.get_all_pages()
page_types = page_service.get_page_types()
add_page_form = AddPageForm(request.form)
add_page_form.type.choices = [(i, i) for i in page_types]
add_page_messages = []
if ((request.method == 'POST') and add_page_form.validate()):
title = add_page_form.title.data
link_name = add_page_form.link.data
page_type = add_page_form.type.data
page = PageModel.from_title_link_type(title, link_name, page_type)
try:
page = page_service.create_page(page)
flash('Page created')
return redirect(url_for('.mod_page', page=page))
except ArgumentError as e:
add_page_messages.append(e.message)
return render_template('mod_pages.html', pages=pages, page_types=page_types, add_page_form=add_page_form, add_page_messages=add_page_messages) |
def fetch_real_name(app_dir, flavours):
for path in manifest_paths(app_dir, flavours):
if ((not (path.suffix == '.xml')) or (not path.is_file())):
continue
logging.debug(('fetch_real_name: Checking manifest at %s' % path))
try:
xml = parse_xml(path)
except (XMLElementTree.ParseError, ValueError):
logging.warning(_("Problem with xml at '{path}'").format(path=path))
continue
app = xml.find('application')
if (app is None):
continue
if ((XMLNS_ANDROID + 'label') not in app.attrib):
continue
label = app.attrib[(XMLNS_ANDROID + 'label')]
result = retrieve_string_singleline(app_dir, label)
if result:
result = result.strip()
return result
return None |
def test_extract_return(task):
var_i0 = Variable('var_i', Integer(32, False), ssa_name=Variable('rax_1', Integer(32, False), 0))
var_i1 = Variable('var_i', Integer(32, False), ssa_name=Variable('rax_1', Integer(32, False), 1))
var_x0 = Variable('var_x', Integer(32, False), ssa_name=Variable('var_c', Integer(32, False), 0))
var_x1 = Variable('var_x', Integer(32, False), ssa_name=Variable('var_c', Integer(32, False), 1))
task.graph.add_nodes_from((vertices := [BasicBlock(0, instructions=[Assignment(var_i0, Constant(0)), Assignment(var_x0, Constant(42)), Branch(Condition(OperationType.equal, [var_i0, Constant(0)]))]), BasicBlock(1, instructions=[Assignment(var_x1, Constant(5)), Branch(Condition(OperationType.equal, [var_x1, Constant(0)]))]), BasicBlock(2, instructions=[Return([var_x0])]), BasicBlock(3, instructions=[Return([var_i0])]), BasicBlock(4, instructions=[Assignment(var_i1, Constant(2)), Return([var_i1])])]))
task.graph.add_edges_from([TrueCase(vertices[0], vertices[1]), FalseCase(vertices[0], vertices[2]), TrueCase(vertices[1], vertices[3]), FalseCase(vertices[1], vertices[4])])
PatternIndependentRestructuring().run(task)
assert (isinstance((seq_node := task._ast.root), SeqNode) and (len(seq_node.children) == 5))
assert (isinstance((node_0 := seq_node.children[0]), CodeNode) and (node_0.instructions == vertices[0].instructions[:(- 1)]))
assert (isinstance((cond_1 := seq_node.children[1]), ConditionNode) and (cond_1.false_branch is None))
assert (isinstance((node_1 := seq_node.children[2]), CodeNode) and (node_1.instructions == vertices[1].instructions[:(- 1)]))
assert (isinstance((cond_2 := seq_node.children[3]), ConditionNode) and (cond_2.false_branch is None))
assert (isinstance((node_4 := seq_node.children[4]), CodeNode) and (node_4.instructions == vertices[4].instructions))
assert ((cond := cond_1.condition).is_negation and (task._ast.condition_map[(~ cond)] == vertices[0].instructions[(- 1)].condition))
assert isinstance((branch := cond_1.true_branch_child), CodeNode)
assert (branch.instructions == vertices[2].instructions)
assert ((cond := cond_2.condition).is_symbol and (task._ast.condition_map[cond] == vertices[1].instructions[(- 1)].condition))
assert isinstance((branch := cond_2.true_branch_child), CodeNode)
assert (branch.instructions == vertices[3].instructions) |
('ui', 'widgets', 'filter_dialog.ui')
class FilterDialog(Gtk.Dialog):
__gtype_name__ = 'FilterDialog'
(name_entry, filter, match_any, random, lim_check, lim_spin) = GtkTemplate.Child.widgets(6)
def __init__(self, title, parent, criteria):
Gtk.Dialog.__init__(self, title=title, transient_for=parent)
self.init_template()
self.add_buttons(Gtk.STOCK_CANCEL, Gtk.ResponseType.REJECT, Gtk.STOCK_OK, Gtk.ResponseType.ACCEPT)
f = FilterWidget(sorted(criteria, key=(lambda k: _(k[0]))))
f.add_criteria_row()
f.set_border_width(5)
f.show_all()
self.filter = gtk_widget_replace(self.filter, f)
def on_add_button_clicked(self, *args):
self.filter.add_criteria_row()
def on_lim_check_toggled(self, *args):
self.lim_spin.set_sensitive(self.lim_check.get_active())
def set_limit(self, limit):
if (limit > (- 1)):
self.lim_check.set_active(True)
self.lim_spin.set_value(limit)
else:
self.lim_check.set_active(False)
def get_limit(self):
if self.lim_check.get_active():
return int(self.lim_spin.get_value())
else:
return (- 1)
def get_random(self):
return self.random.get_active()
def set_random(self, random):
self.random.set_active(random)
def get_match_any(self):
return self.match_any.get_active()
def set_match_any(self, any):
self.match_any.set_active(any)
def get_name(self):
return self.name_entry.get_text()
def set_name(self, name):
self.name_entry.set_text(name)
def get_result(self):
return self.filter.get_result()
def get_state(self):
return self.filter.get_state()
def set_state(self, state):
self.filter.set_state(state) |
def test_ajax_fk_multi():
(app, db, admin) = setup()
with app.app_context():
class Model1(db.Model):
__tablename__ = 'model1'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(20))
def __str__(self):
return self.name
table = db.Table('m2m', db.Model.metadata, db.Column('model1_id', db.Integer, db.ForeignKey('model1.id')), db.Column('model2_id', db.Integer, db.ForeignKey('model2.id')))
class Model2(db.Model):
__tablename__ = 'model2'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(20))
model1_id = db.Column(db.Integer(), db.ForeignKey(Model1.id))
model1 = db.relationship(Model1, backref='models2', secondary=table)
db.create_all()
view = CustomModelView(Model2, db.session, url='view', form_ajax_refs={'model1': {'fields': ['name']}})
admin.add_view(view)
assert (u'model1' in view._form_ajax_refs)
model = Model1(name=u'first')
db.session.add_all([model, Model1(name=u'foo')])
db.session.commit()
form = view.create_form()
assert (form.model1.__class__.__name__ == u'AjaxSelectMultipleField')
with app.test_request_context('/admin/view/'):
assert (u'data-json="[]"' in form.model1())
form.model1.data = [model]
assert ((u'data-json="[[1, "first"]]"' in form.model1()) or (u'data-json="[[1, "first"]]"' in form.model1()))
client = app.test_client()
client.post('/admin/view/new/', data={u'model1': as_unicode(model.id)})
mdl = db.session.query(Model2).first()
assert (mdl is not None)
assert (mdl.model1 is not None)
assert (len(mdl.model1) == 1) |
def cluster_responses(responses: npt.NDArray[np.float_], nr_components: int) -> npt.NDArray[np.int_]:
correlation = spearmanr(responses).statistic
if isinstance(correlation, np.float64):
correlation = np.array([[1, correlation], [correlation, 1]])
linkage_matrix = linkage(correlation, 'single', 'euclidean')
clusters = fcluster(linkage_matrix, nr_components, criterion='inconsistent', depth=2)
return clusters |
.asyncio
class TestWorkspaceManagerCreate():
async def test_db_error(self, mocker: MockerFixture, workspace_create: WorkspaceCreate, workspace_manager: WorkspaceManager, main_session: AsyncSession):
workspace_db_mock = mocker.patch.object(workspace_manager, 'workspace_db')
workspace_db_mock.migrate.side_effect = WorkspaceDatabaseConnectionError('An error occured')
with pytest.raises(WorkspaceDatabaseConnectionError):
(await workspace_manager.create(workspace_create))
workspace_repository = WorkspaceRepository(main_session)
workspace = (await workspace_repository.get_one_or_none(select(Workspace).where((Workspace.name == workspace_create.name))))
assert (workspace is None)
async def test_valid_db(self, workspace_create: WorkspaceCreate, workspace_manager: WorkspaceManager, workspace_engine_manager: WorkspaceEngineManager):
workspace = (await workspace_manager.create(workspace_create))
assert (workspace.domain == 'burgundy.localhost:8000')
assert (workspace.alembic_revision is not None)
async with get_workspace_session(workspace, workspace_engine_manager) as session:
tenant_repository = TenantRepository(session)
tenants = (await tenant_repository.all())
assert (len(tenants) == 1)
tenant = tenants[0]
assert tenant.default
client_repository = ClientRepository(session)
clients = (await client_repository.all())
assert (len(clients) == 1)
client = clients[0]
assert client.first_party
assert (client.tenant_id == tenant.id)
email_template_repository = EmailTemplateRepository(session)
email_templates = (await email_template_repository.all())
assert (len(email_templates) == len(EmailTemplateType))
theme_repository = ThemeRepository(session)
theme = (await theme_repository.get_default())
assert (theme is not None)
async def test_user_id(self, workspace_create: WorkspaceCreate, workspace_manager: WorkspaceManager, workspace_admin_user: User, main_session: AsyncSession):
workspace = (await workspace_manager.create(workspace_create, workspace_admin_user.id))
workspace_user_repository = WorkspaceUserRepository(main_session)
workspace_user = (await workspace_user_repository.get_by_workspace_and_user(workspace.id, workspace_admin_user.id))
assert (workspace_user is not None)
async def test_added_redirect_uri(self, workspace_create: WorkspaceCreate, workspace_manager: WorkspaceManager, workspace_admin_user: User, main_fief_client: Client):
workspace = (await workspace_manager.create(workspace_create, workspace_admin_user.id))
assert (f' in main_fief_client.redirect_uris)
async def test_default_parameters(self, workspace_create: WorkspaceCreate, workspace_manager: WorkspaceManager, workspace_engine_manager: WorkspaceEngineManager):
workspace = (await workspace_manager.create(workspace_create, default_domain='foobar.fief.dev', default_client_id='CLIENT_ID', default_client_secret='CLIENT_SECRET', default_encryption_key='ENCRYPTION_KEY'))
assert (workspace.domain == 'foobar.fief.dev')
async with get_workspace_session(workspace, workspace_engine_manager) as session:
client_repository = ClientRepository(session)
clients = (await client_repository.all())
client = clients[0]
assert (client.encrypt_jwk == 'ENCRYPTION_KEY')
assert (client.client_id == 'CLIENT_ID')
assert (client.client_secret == 'CLIENT_SECRET')
async def test_avoid_domain_collision(self, workspace_create: WorkspaceCreate, workspace_manager: WorkspaceManager):
workspace_create.name = 'Bretagne'
workspace = (await workspace_manager.create(workspace_create))
assert re.match('bretagne-\\w+\\.localhost', workspace.domain) |
class OptionPlotoptionsSplineTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionPlotoptionsSplineTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionPlotoptionsSplineTooltipDatetimelabelformats)
def distance(self):
return self._config_get(16)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get(None)
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get(None)
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
def extractSummertimewaterlilyCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSonificationDefaultspeechoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('undefined')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('undefined')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('undefined')
def min(self, text: str):
self._config(text, js_type=False)
def value(self):
return self._config_get(None)
def value(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get('undefined')
def within(self, text: str):
self._config(text, js_type=False) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.