code stringlengths 281 23.7M |
|---|
class PopupMenuSeparator(PopupSlider):
defaults = [('colour_above', '555555', 'Separator colour'), ('end_margin', 10, ''), ('marker_size', 0, ''), ('row_span', 1, 'Separator is half height of text item')]
def __init__(self, **config):
PopupSlider.__init__(self, value=0, **config)
self.add_defaults(PopupMenuSeparator.defaults) |
class OptionSeriesScatter3dSonificationTracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_def_type_in_submod_procedure():
string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)})
file_path = ((test_dir / 'subdir') / 'test_submod.F90')
string += def_request(file_path, 36, 13)
(errcode, results) = run_request(string)
assert (errcode == 0)
ref_res = [[1, 1, str(((test_dir / 'subdir') / 'test_submod.F90'))]]
assert (len(ref_res) == (len(results) - 1))
for (i, res) in enumerate(ref_res):
validate_def(results[(i + 1)], res) |
def add_argparse_args(parser):
parser.add_argument('--nojit', help='disable numba', action='store_true')
parser.add_argument('-b', '--backtest_config', type=str, required=False, dest='backtest_config_path', default='configs/backtest/default.hjson', help='backtest config hjson file')
parser.add_argument('-s', '--symbols', type=str, required=False, dest='symbols', default=None, help=('specify symbol(s), overriding symbol from backtest config. ' + 'multiple symbols separated with comma'))
parser.add_argument('-u', '--user', type=str, required=False, dest='user', default=None, help='specify user, a.k.a. account_name, overriding user from backtest config')
parser.add_argument('-sd', '--start_date', type=str, required=False, dest='start_date', default=None, help='specify start date, overriding value from backtest config')
parser.add_argument('-ed', '--end_date', type=str, required=False, dest='end_date', default=None, help='specify end date, overriding value from backtest config')
parser.add_argument('-sb', '--starting_balance', '--starting-balance', type=float, required=False, dest='starting_balance', default=None, help='specify starting_balance, overriding value from backtest config')
parser.add_argument('-m', '--market_type', type=str, required=False, dest='market_type', default=None, help='specify whether spot or futures (default), overriding value from backtest config')
parser.add_argument('-bd', '--base_dir', type=str, required=False, dest='base_dir', default=None, help='specify the base output directory for the results')
parser.add_argument('-oh', '--ohlcv', type=str, required=False, dest='ohlcv', default=None, nargs='?', const='y', help='if no arg or [y/yes], use 1m ohlcv instead of 1s ticks, overriding param ohlcv from config/backtest/default.hjson')
return parser |
def parseUri(parcel: ParcelParser, parent: Field) -> None:
code_field = parcel.parse_field('code', 'uint32', parcel.readUint32, parent)
if (code_field.content == binder_trace.constants.URI_NULL_TYPE_ID):
pass
elif (code_field.content == binder_trace.constants.URI_STRING_TYPE_ID):
parseStringUri(parcel, parent)
elif (code_field.content == binder_trace.constants.URI_OPAQUE_TYPE_ID):
parseOpaqueUri(parcel, parent)
elif (code_field.content == binder_trace.constants.URI_HIERARCHICAL_TYPE_ID):
parseHierarchicalUri(parcel, parent)
else:
raise ParseError(f'Unknown URI type: {code_field.content}') |
def test_constant_arrays_computation(thr):
N = 200
arr1 = get_test_array(N, numpy.int32)
arr2 = get_test_array((2, N), numpy.float32)
ref = (arr1 * (arr2[0] + arr2[1])).astype(numpy.float32)
d = Dummy(N, arr1, arr2).compile(thr)
out_dev = thr.empty_like(d.parameter.output)
d(out_dev)
test = out_dev.get()
assert diff_is_negligible(test, ref) |
def _inline_source(name: str, compiler_output: Dict[(str, Any)], package_root_dir: Optional[Path], manifest: Manifest) -> Manifest:
names_and_paths = get_names_and_paths(compiler_output)
cwd = Path.cwd()
try:
source_path = names_and_paths[name]
except KeyError:
raise ManifestBuildingError(f'Unable to inline source: {name}. Available sources include: {list(sorted(names_and_paths.keys()))}.')
if package_root_dir:
if (package_root_dir / source_path).is_file():
source_data = (package_root_dir / source_path).read_text()
else:
raise ManifestBuildingError(f'Contract source: {source_path} cannot be found in provided package_root_dir: {package_root_dir}.')
elif (cwd / source_path).is_file():
source_data = (cwd / source_path).read_text()
else:
raise ManifestBuildingError('Contract source cannot be resolved, please make sure that the working directory is set to the correct directory or provide `package_root_dir`.')
source_data_object = {'content': source_data.rstrip('\n'), 'installPath': source_path, 'type': 'solidity'}
return assoc_in(manifest, ['sources', source_path], source_data_object) |
def main() -> None:
exec_type = sys.argv[1]
assert (exec_type in executables.keys()), f"Unknown executable type '{exec_type}'"
executable = executables[exec_type]
exec_path = shutil.which(executable)
assert (exec_path is not None), f"Could not find executable '{executable}' on PATH"
assert pathlib.Path(exec_path).is_file(), f'Executable does not exist at {exec_path}'
args = ([exec_path] + sys.argv[2:])
proc = subprocess.Popen(args, stdout=sys.stdout, stderr=subprocess.STDOUT)
return_code = proc.wait()
sys.exit(return_code) |
def compute_relative_error(q_a: npt.NDArray[np.float64], q_b: npt.NDArray[np.float64], distance: float, robot: Robot) -> float:
pose_a = robot.fk(q_a)
pose_b = robot.fk(q_b)
actual_position_a = position_from_matrix(pose_a)
actual_position_b = position_from_matrix(pose_b)
actual_distance = (actual_position_a - actual_position_b)
scalar_distance = np.linalg.norm(actual_distance)
error = float(np.linalg.norm((distance - scalar_distance)))
return error |
class DiversityMetric(Metric[DiversityMetricResult]):
_pairwise_distance: PairwiseDistance
k: int
item_features: List[str]
def __init__(self, k: int, item_features: List[str], options: AnyOptions=None) -> None:
self.k = k
self.item_features = item_features
self._pairwise_distance = PairwiseDistance(k=k, item_features=item_features)
super().__init__(options=options)
def get_ild(self, df: pd.DataFrame, k: int, recommendations_type: RecomType, user_id: str, item_id: str, predictions: str, dist_matrix: np.ndarray, name_dict: Dict):
df = df.copy()
if (recommendations_type == RecomType.SCORE):
df[predictions] = df.groupby(user_id)[predictions].transform('rank', ascending=False)
ilds = []
all_users = df[user_id].unique()
for user in all_users:
rec_list = df[((df[user_id] == user) & (df[predictions] <= k))][item_id]
user_res = 0
for (i, j) in combinations(rec_list, 2):
user_res += dist_matrix[(name_dict[i], name_dict[j])]
ilds.append((user_res / len(rec_list)))
distr = pd.Series(ilds)
value = np.mean(ilds)
return (distr, value)
def calculate(self, data: InputData) -> DiversityMetricResult:
result = self._pairwise_distance.get_result()
dist_matrix = result.dist_matrix
name_dict = result.name_dict
user_id = data.data_definition.get_user_id_column()
item_id = data.data_definition.get_item_id_column()
recommendations_type = data.column_mapping.recom_type
if ((user_id is None) or (item_id is None) or (recommendations_type is None)):
raise ValueError('user_id and item_id and recommendations_type should be specified')
prediction_name = get_prediciton_name(data)
(curr_distr_data, curr_value) = self.get_ild(df=data.current_data, k=self.k, recommendations_type=recommendations_type, user_id=user_id.column_name, item_id=item_id.column_name, predictions=prediction_name, dist_matrix=dist_matrix, name_dict=name_dict)
ref_distr_data: Optional[pd.Series] = None
ref_value: Optional[float] = None
if (data.reference_data is not None):
(ref_distr_data, ref_value) = self.get_ild(df=data.reference_data, k=self.k, recommendations_type=recommendations_type, user_id=user_id.column_name, item_id=item_id.column_name, predictions=prediction_name, dist_matrix=dist_matrix, name_dict=name_dict)
(curr_distr, ref_distr) = get_distribution_for_column(column_type='num', current=curr_distr_data, reference=ref_distr_data)
return DiversityMetricResult(k=self.k, current_value=curr_value, current_distr=curr_distr, reference_value=ref_value, reference_distr=ref_distr) |
class Query(object):
def Grants(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.authz.v1beta1.Query/Grants', cosmos_dot_authz_dot_v1beta1_dot_query__pb2.QueryGrantsRequest.SerializeToString, cosmos_dot_authz_dot_v1beta1_dot_query__pb2.QueryGrantsResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) |
def test_argparse_exec_ensemble_smoother_current_case():
parsed = ert_parser(None, [ENSEMBLE_SMOOTHER_MODE, '--current-case', 'test_case', '--target-case', 'test_case_smoother', 'path/to/config.ert'])
assert (parsed.mode == ENSEMBLE_SMOOTHER_MODE)
assert (parsed.current_case == 'test_case')
assert (parsed.func.__name__ == 'run_cli') |
class ListContainingAll(_RichComparison):
def __init__(self, subset: List[AnyType]) -> None:
if (not isinstance(subset, list)):
raise ValueError(f"ListContainingAll(...) expects a 'list' as argument while '{type(subset).__name__}' was provided")
self.subset = subset
super().__init__(klass=List)
def __eq__(self, other: List[AnyType]) -> bool:
return (super().__eq__(other) and all(((x in other) for x in self.subset)))
def __repr__(self) -> str:
return '<{} 0x{:02X}{}>'.format(type(self).__name__, id(self), (f' subset={self.subset}' if (self.subset is not None) else '')) |
class Git(FileInterface):
def _adapt_dir_path(self, directory):
if (not os.path.isabs(directory)):
return os.path.abspath(directory)
return directory
def _extended_directory_check(self, directory):
tracer.debug('called: directory [%s]', directory)
if (self.__repo_base_dir is None):
self.__setup_repo(directory)
if (not directory.startswith(self.__repo_base_dir)):
raise RMTException(28, ('directory [%s] not in repository' % directory))
return
def _adapt_ext_path(self, directory):
len_repo_base_dir = (len(self.__repo_base_dir) + 1)
return directory[len_repo_base_dir:]
def __setup_repo(self, directory):
tracer.debug('called')
repo_found = False
while (len(directory) > 1):
try:
tracer.debug('using [%s] as sample directory', directory)
self.__repo = git.Repo(directory)
repo_found = True
break
except (git.exc.InvalidGitRepositoryError, git.exc.NoSuchPathError):
tracer.debug('Sample directory [%s] does not exists', directory)
directory = os.path.dirname(directory)
if (not repo_found):
assert False
self.__repo_base_dir = self.__repo.git_dir[:(- 5)]
tracer.debug('repository base directory [%s]', self.__repo_base_dir)
def __init__(self, config):
tracer.info('called')
cfg = Cfg(config)
FileInterface.__init__(self, cfg)
self.__start_vers = cfg.get_rvalue('start_vers')
self.__end_vers = cfg.get_rvalue('end_vers')
self.__topic_root_node = cfg.get_rvalue('topic_root_node')
tracer.debug('start version [%s] end version [%s] topic root node [%s]', self.__start_vers, self.__end_vers, self.__topic_root_node)
self.__dirs = {}
self.__repo_base_dir = None
self.__repo = None
self.__dirs = self._setup_directories(cfg)
def get_commits(self):
return self.__repo.iter_commits(((self.__start_vers + '..') + self.__end_vers))
def get_timestamp(self, commit):
return commit.authored_date
class FileInfo(Interface.FileInfo):
def __init__(self, base_dir, sub_dir, blob):
Interface.FileInfo.__init__(self)
self.__base_dir = base_dir
self.__blob = blob
self.__sub_dir = sub_dir
self.__base_dirname = os.path.join(*self.__base_dir)
self.__sub_dirname = ''
if self.__sub_dir:
self.__sub_dirname = os.path.join(*self.__sub_dir)
tracer.debug(self)
self.__filename = os.path.join(self.__base_dirname, self.__sub_dirname, self.__blob.name)
def __str__(self):
return ('base [%s] sub [%s] name [%s]' % (self.__base_dirname, self.__sub_dirname, self.__blob.name))
def get_filename(self):
return self.__filename
def get_vcs_id(self):
return self.__blob.hexsha
def get_filename_sub_part(self):
return os.path.join(self.__sub_dirname, self.__blob.name)
def get_content(self):
return self.__blob.data_stream.read().decode('utf-8')
def __get_tree_direct(self, base_tree, directory):
for tree in base_tree.trees:
if (tree.name == directory):
return tree
raise RMTException(108, ('directory entry [%s] not found in tree [%s].' % (directory, base_tree.name)))
def __get_tree(self, base_tree, dir_split):
tree = self.__get_tree_direct(base_tree, dir_split[0])
if (len(dir_split) > 1):
return self.__get_tree(tree, dir_split[1:])
return tree
def __get_file_infos_from_tree_rec(self, tree, base_dir, sub_dir):
tracer.info('called: base [%s] sub [%s]', base_dir, sub_dir)
result = []
for blob in tree.blobs:
result.append(Git.FileInfo(base_dir, sub_dir, blob))
for stree in tree.trees:
sub_sub_dir = copy.deepcopy(sub_dir)
sub_sub_dir.append(stree.name)
result.extend(self.__get_file_infos_from_tree_rec(stree, base_dir, sub_sub_dir))
return result
def __get_file_infos_from_tree(self, tree, base_dir):
tracer.info('called: base [%s]', base_dir)
base_dir_split = base_dir.split('/')
ltree = self.__get_tree(tree, base_dir_split)
return self.__get_file_infos_from_tree_rec(ltree, base_dir_split, [])
def get_vcs_id_with_type(self, commit, dir_type):
tracer.debug('called: commit [%s] directory type [%s]', commit, dir_type)
result = []
for directory in self.__dirs[dir_type]:
dir_split = directory.split('/')
ltree = self.__get_tree(commit.tree, dir_split)
result.append(ltree.hexsha)
return ObjectCache.create_hashable(result)
def get_file_infos(self, commit, dir_type):
tracer.debug('called: commit [%s] directory type [%s]', commit, dir_type)
if (dir_type not in self.__dirs):
tracer.debug('Skipping non existent directory for [%s]', dir_type)
return []
result = []
for directory in self.__dirs[dir_type]:
result.extend(self.__get_file_infos_from_tree(commit.tree, directory))
return result
def __get_blob_direct(self, base_tree, filename):
for blob in base_tree.blobs:
if (blob.name == filename):
return blob
return None
def __get_blob(self, commit, base_dir, sub_path):
assert sub_path
full_path = base_dir.split('/')
sub_path_split = sub_path.split('/')
if (len(sub_path_split) > 1):
full_path.extend(sub_path_split[:(- 1)])
ltree = self.__get_tree(commit.tree, full_path)
return self.__get_blob_direct(ltree, sub_path_split[(- 1)])
def get_file_info_with_type(self, commit, file_type, filename):
tracer.debug('called: commit [%s] file type [%s] filename [%s]', commit, file_type, filename)
for directory in self.__dirs[file_type]:
tracer.debug('searching in directory [%s]', directory)
blob = self.__get_blob(commit, directory, filename)
if (blob is not None):
dir_split = directory.split('/')
sub_split = os.path.dirname(filename).split('/')
return Git.FileInfo(dir_split, sub_split, blob)
raise RMTException(111, ('file [%s] in [%s] base file not found' % (filename, file_type))) |
class Query():
def __init__(self, url, expected=None, method='GET', headers=None, messages=None, insecure=False, skip=None, xfail=None, phase=1, debug=False, sni=False, error=None, client_crt=None, client_key=None, client_cert_required=False, ca_cert=None, grpc_type=None, cookies=None, ignore_result=False, body=None, minTLSv='', maxTLSv='', cipherSuites=[], ecdhCurves=[]):
self.method = method
self.url = url
self.headers = headers
self.body = body
self.cookies = cookies
self.messages = messages
self.insecure = insecure
self.minTLSv = minTLSv
self.maxTLSv = maxTLSv
self.cipherSuites = cipherSuites
self.ecdhCurves = ecdhCurves
if (expected is None):
if url.lower().startswith('ws:'):
self.expected = 101
else:
self.expected = 200
else:
self.expected = expected
self.skip = skip
self.xfail = xfail
self.ignore_result = ignore_result
self.phase = phase
self.parent = None
self.result = None
self.debug = debug
self.sni = sni
self.error = error
self.client_cert_required = client_cert_required
self.client_cert = client_crt
self.client_key = client_key
self.ca_cert = ca_cert
assert (grpc_type in (None, 'real', 'bridge', 'web')), grpc_type
self.grpc_type = grpc_type
def as_json(self):
assert self.parent
result = {'test': self.parent.path.name, 'id': id(self), 'url': self.url, 'insecure': self.insecure}
if self.sni:
result['sni'] = self.sni
if self.method:
result['method'] = self.method
if self.method:
result['maxTLSv'] = self.maxTLSv
if self.method:
result['minTLSv'] = self.minTLSv
if self.cipherSuites:
result['cipherSuites'] = self.cipherSuites
if self.ecdhCurves:
result['ecdhCurves'] = self.ecdhCurves
if self.headers:
result['headers'] = self.headers
if (self.body is not None):
result['body'] = encode_body(self.body)
if self.cookies:
result['cookies'] = self.cookies
if (self.messages is not None):
result['messages'] = self.messages
if (self.client_cert is not None):
result['client_cert'] = self.client_cert
if (self.client_key is not None):
result['client_key'] = self.client_key
if (self.ca_cert is not None):
result['ca_cert'] = self.ca_cert
if self.client_cert_required:
result['client_cert_required'] = self.client_cert_required
if self.grpc_type:
result['grpc_type'] = self.grpc_type
return result |
class tagDEC(Structure):
_fields_ = [('wReserved', c_ushort), ('scale', c_ubyte), ('sign', c_ubyte), ('Hi32', c_ulong), ('Lo64', c_ulonglong)]
def as_decimal(self):
digits = ((self.Hi32 << 64) + self.Lo64)
decimal_str = '{0}{1}e-{2}'.format(('-' if self.sign else ''), digits, self.scale)
return decimal.Decimal(decimal_str) |
class OptionPlotoptionsAreasplinerangeSonificationContexttracksMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsAreasplinerangeSonificationContexttracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsAreasplinerangeSonificationContexttracksMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsAreasplinerangeSonificationContexttracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsAreasplinerangeSonificationContexttracksMappingHighpassResonance) |
def export_petromod_binary(self, mfile, pmd_dataunits):
validunits = False
unitd = 15
unitz = 10
if (isinstance(pmd_dataunits, tuple) and (len(pmd_dataunits) == 2)):
(unitd, unitz) = pmd_dataunits
if (isinstance(unitd, int) and isinstance(unitz, int)):
if ((unitd in PMD_DATAUNITDISTANCE) and (unitz in PMD_DATAUNITZ)):
validunits = True
if ((unitd <= 0) or (unitz <= 0)):
raise ValueError('Values for pmd_dataunits cannot be negative!')
if (not validunits):
UserWarning(f'Format or values for pmd_dataunits out of range: Pair should be in ranges {PMD_DATAUNITDISTANCE} and {PMD_DATAUNITZ}')
undef = 99999
dsc = 'Content=Map,'
dsc += f'DataUnitDistance={unitd},'
dsc += f'DataUnitZ={unitz},'
dsc += f'GridNoX={self.ncol},'
dsc += f'GridNoY={self.nrow},'
dsc += f'GridStepX={self.xinc},'
dsc += f'GridStepY={self.yinc},'
dsc += 'MapType=GridMap,'
dsc += f'OriginX={self.xori},'
dsc += f'OriginY={self.yori},'
dsc += f'RotationAngle={self.rotation},'
dsc += f'RotationOriginX={self.xori},'
dsc += f'RotationOriginY={self.yori},'
dsc += f'Undefined={undef},'
dsc += 'Version=1.0'
values = np.ma.filled(self.values1d, fill_value=undef)
_cxtgeo.surf_export_petromod_bin(mfile.get_cfhandle(), dsc, values.astype(np.float64))
mfile.cfclose() |
def extractEggsaaaanWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsAreasplinerangeLabel(Options):
def boxesToAvoid(self):
return self._config_get(None)
def boxesToAvoid(self, value: Any):
self._config(value, js_type=False)
def connectorAllowed(self):
return self._config_get(False)
def connectorAllowed(self, flag: bool):
self._config(flag, js_type=False)
def connectorNeighbourDistance(self):
return self._config_get(24)
def connectorNeighbourDistance(self, num: float):
self._config(num, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get('undefined')
def formatter(self, value: Any):
self._config(value, js_type=False)
def maxFontSize(self):
return self._config_get(None)
def maxFontSize(self, num: float):
self._config(num, js_type=False)
def minFontSize(self):
return self._config_get(None)
def minFontSize(self, num: float):
self._config(num, js_type=False)
def onArea(self):
return self._config_get(None)
def onArea(self, flag: bool):
self._config(flag, js_type=False)
def style(self) -> 'OptionPlotoptionsAreasplinerangeLabelStyle':
return self._config_sub_data('style', OptionPlotoptionsAreasplinerangeLabelStyle)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False) |
def test_trie_fog_nearest_unknown_fully_explored():
fog = HexaryTrieFog()
empty_prefix = ()
fully_explored = fog.explore(empty_prefix, ())
with pytest.raises(PerfectVisibility):
fully_explored.nearest_unknown(())
with pytest.raises(PerfectVisibility):
fully_explored.nearest_unknown((0,)) |
def balls_that_hit_cushion(shot: System, exclude: Optional[Set[str]]=None) -> Set[str]:
if (exclude is None):
exclude = set()
numbered_ball_ids = [ball.id for ball in shot.balls.values() if (ball.id not in exclude)]
cushion_events = filter_events(shot.events, by_type([EventType.BALL_LINEAR_CUSHION, EventType.BALL_CIRCULAR_CUSHION]), by_ball(numbered_ball_ids))
return set((event.agents[0].id for event in cushion_events)) |
class AcceptHeaderTests(unittest.TestCase):
def test_parse_simple_accept_header(self):
parsed = parse_accept_header('*/*, application/json')
self.assertEqual(parsed, [set([MediaType('application/json')]), set([MediaType('*/*')])])
def test_parse_complex_accept_header(self):
header = 'application/xml; schema=foo, application/json; q=0.9, application/xml, */*'
parsed = parse_accept_header(header)
self.assertEqual(parsed, [set([MediaType('application/xml; schema=foo')]), set([MediaType('application/json; q=0.9'), MediaType('application/xml')]), set([MediaType('*/*')])]) |
def test_builder_with_init_manifest(owned_package, dummy_ipfs_backend):
(root, expected, compiler_output) = owned_package
ipfs_backend = get_ipfs_backend()
manifest = build(init_manifest(package_name='owned', version='1.0.0'), authors('Piper Merriam <>'), description("Reusable contracts which implement a privileged 'owner' model for authorization."), keywords('authorization'), license('MIT'), links(documentation='ipfs://QmUYcVzTfSwJoigggMxeo2g5STWAgJdisQsqcXHws7b1FW'), pin_source('Owned', compiler_output, ipfs_backend, root), validate())
assert (manifest == expected) |
.parametrize('from_converter,to_converter', ((identity, identity), (hex_to_bytes, identity), (identity, hex_to_bytes), (hex_to_bytes, hex_to_bytes)))
def test_sign_and_send_raw_middleware_with_byte_addresses(w3_dummy, from_converter, to_converter):
private_key = PRIVATE_KEY_1
from_ = from_converter(ADDRESS_1)
to_ = to_converter(ADDRESS_2)
w3_dummy.middleware_onion.add(construct_sign_and_send_raw_middleware(private_key))
actual = w3_dummy.manager.request_blocking('eth_sendTransaction', [{'to': to_, 'from': from_, 'gas': 21000, 'gasPrice': 0, 'value': 1, 'nonce': 0}])
raw_txn = actual[1][0]
actual_method = actual[0]
assert (actual_method == 'eth_sendRawTransaction')
assert is_hexstr(raw_txn) |
class Plugin(plugin.PluginProto):
PLUGIN_ID = 4
PLUGIN_NAME = 'Environment - DS18b20'
PLUGIN_VALUENAME1 = 'Temperature'
def __init__(self, taskindex):
plugin.PluginProto.__init__(self, taskindex)
self.dtype = rpieGlobals.DEVICE_TYPE_W1
self.vtype = rpieGlobals.SENSOR_TYPE_SINGLE
self.readinprogress = 0
self.valuecount = 1
self.senddataoption = True
self.timeroption = True
self.timeroptional = False
self.formulaoption = True
def plugin_init(self, enableplugin=None):
plugin.PluginProto.plugin_init(self, enableplugin)
if ((str(self.taskdevicepluginconfig[0]) == '0') or (str(self.taskdevicepluginconfig[0]).strip() == '')):
self.initialized = False
if (self.enabled and enableplugin):
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG, 'Dallas device can not be initialized!')
else:
self.ports = str(self.taskdevicepluginconfig[0])
self.initialized = True
self.readinprogress = 0
def webform_load(self):
choice1 = self.taskdevicepluginconfig[0]
options = self.find_dsb_devices()
if (len(options) > 0):
webserver.addHtml('<tr><td>Device Address:<td>')
webserver.addSelector_Head('p004_addr', True)
for o in range(len(options)):
webserver.addSelector_Item(options[o], options[o], (str(options[o]) == str(choice1)), False)
webserver.addSelector_Foot()
webserver.addFormNote("You have to setup one pin (at least) for <b>1WIRE</b> type at <a href='pinout'>pinout settings page</a> before use!")
return True
def webform_save(self, params):
par = webserver.arg('p004_addr', params)
self.taskdevicepluginconfig[0] = str(par)
self.plugin_init()
return True
def plugin_read(self):
result = False
if (self.initialized and (self.readinprogress == 0) and self.enabled):
self.readinprogress = 1
try:
(succ, temp) = self.read_temperature()
if (temp > 2048):
temp = (temp - 4096)
if succ:
self.set_value(1, temp, True)
else:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, 'Dallas read error!')
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('Dallas read error! ' + str(e)))
self.enabled = False
self._lastdataservetime = rpieTime.millis()
result = True
self.readinprogress = 0
return result
def find_dsb_devices(self):
rlist = []
try:
devlist = glob.glob('/sys/bus/w1/devices/*')
if (len(devlist) > 0):
for d in devlist:
td = d.split('/')
tdname = td[(len(td) - 1)]
if ('-' in tdname):
tf = tdname.split('-')[0].lower()
if (tf in ['10', '22', '28', '3b', '42']):
rlist.append(tdname)
else:
rlist = []
except:
rlist = []
return rlist
def read_temperature(self):
lines = []
try:
with open((('/sys/bus/w1/devices/' + str(self.taskdevicepluginconfig[0])) + '/w1_slave')) as f:
lines = f.readlines()
if (len(lines) != 2):
return (False, 0)
if ('YES' not in lines[0]):
return (False, 0)
d = lines[1].strip().split('=')
if (len(d) != 2):
return (False, 0)
except:
return (False, 0)
return (True, (float(d[1]) / 1000.0)) |
class BranchesLogic(object):
def get_or_create(cls, name, session=None):
if (not session):
session = db.session
item = session.query(models.DistGitBranch).filter_by(name=name).first()
if item:
return item
branch = models.DistGitBranch()
branch.name = name
session.add(branch)
return branch |
class OptionSeriesWindbarbDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('rgba(0, 0, 0, 0.6)')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(901)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class _FiveSubstepsLimitWrapper(TimeLimitWrapper):
def __init__(self, env: BaseEnv):
super().__init__(env)
self.elapsed_sub_steps = 0
def step(self, action: Any) -> Tuple[(Any, Any, bool, Dict[(Any, Any)])]:
(observation, reward, done, info) = self.env.step(action)
self.elapsed_sub_steps += 1
return (observation, reward, (done or (self.elapsed_sub_steps >= 5)), info)
def reset(self) -> Any:
self.elapsed_sub_steps = 0
return self.env.reset() |
class TestCase(unittest.TestCase):
def _simple_cycle_helper(self, foo_class):
first = foo_class()
second = foo_class(child=first)
first.child = second
foo_ids = [id(first), id(second)]
del first, second
gc.collect()
all_ids = [id(obj) for obj in gc.get_objects()]
for foo_id in foo_ids:
self.assertTrue((foo_id not in all_ids))
def test_simple_cycle_oldstyle_class(self):
class Foo():
def __init__(self, child=None):
self.child = child
self._simple_cycle_helper(Foo)
def test_simple_cycle_newstyle_class(self):
class Foo(object):
def __init__(self, child=None):
self.child = child
self._simple_cycle_helper(Foo)
def test_simple_cycle_hastraits(self):
class Foo(HasTraits):
child = Any
self._simple_cycle_helper(Foo)
def test_reference_to_trait_dict(self):
class Foo(HasTraits):
child = Any
foo = Foo()
time.sleep(0.1)
referrers = gc.get_referrers(foo.__dict__)
self.assertTrue((len(referrers) > 0))
self.assertTrue((foo in referrers))
def test_delegates_to(self):
class Base(HasTraits):
i = Int
class Delegates(HasTraits):
b = Instance(Base)
i = DelegatesTo('b')
b = Base()
d = Delegates(b=b)
del d
for i in range(3):
gc.collect(2)
ds = [obj for obj in gc.get_objects() if isinstance(obj, Delegates)]
self.assertEqual(ds, []) |
class Inputs():
def __init__(self, ui):
self.page = ui.page
def d_text(self, text: str='', placeholder: str='', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), tooltip: str=None, html_code: str=None, options: types.OPTION_TYPE=None, attrs: dict=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.Input:
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
options = (options or {})
attrs = (attrs or {})
html_input = html.HtmlInput.Input(self.page, text, placeholder, width, height, html_code, options, attrs, profile)
html_input.style.css.margin_bottom = '2px'
html.Html.set_component_skin(html_input)
if tooltip:
html_input.tooltip(tooltip)
return html_input
def d_radio(self, flag: bool=False, group_name: str=None, placeholder: str='', tooltip: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: types.OPTION_TYPE=None, attrs: dict=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.InputRadio:
options = (options or {})
attrs = (attrs or {})
html_input = html.HtmlInput.InputRadio(self.page, flag, group_name, placeholder, width, height, html_code, options, attrs, profile)
html.Html.set_component_skin(html_input)
if tooltip:
html_input.tooltip(tooltip)
return html_input
def d_search(self, text: str='', placeholder: str='', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: types.OPTION_TYPE=None, attrs: dict=None, tooltip: str=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.Input:
attrs = (attrs or {})
html_search = html.HtmlInput.Input(self.page, text, placeholder, width, height, html_code, options, attrs, profile)
attrs.update({'type': 'search'})
html.Html.set_component_skin(html_search)
if tooltip:
html_search.tooltip(tooltip)
return html_search
def password(self, text: str='', placeholder: str='', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: types.OPTION_TYPE=None, attrs: dict=None, tooltip: str=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.Input:
attrs = (attrs or {})
attrs.update({'type': 'password'})
component = html.HtmlInput.Input(self.page, text, placeholder, width, height, html_code, options, attrs, profile)
html.Html.set_component_skin(component)
if tooltip:
component.tooltip(tooltip)
return component
def file(self, text: str='', placeholder: str='', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: types.OPTION_TYPE=None, attrs: dict=None, tooltip: str=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.InputFile:
attrs = (attrs or {})
component = html.HtmlInput.InputFile(self.page, text, placeholder, width, height, html_code, options, attrs, profile)
html.Html.set_component_skin(component)
if tooltip:
component.tooltip(tooltip)
return component
def d_time(self, text: str='', placeholder: str='', width: types.SIZE_TYPE=(139, 'px'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: types.OPTION_TYPE=None, attrs: dict=None, tooltip: str=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.InputTime:
dflt_options = {'timeFormat': 'h:i:s'}
dflt_options.update((options or {}))
html_input_t = html.HtmlInput.InputTime(self.page, text, placeholder, width, height, html_code, dflt_options, (attrs or {}), profile)
html.Html.set_component_skin(html_input_t)
if tooltip:
html_input_t.tooltip(tooltip)
return html_input_t
def d_date(self, text: str='', placeholder: str='', width: types.SIZE_TYPE=(140, 'px'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: types.OPTION_TYPE=None, attrs: dict=None, tooltip: str=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.InputDate:
html_date = html.HtmlInput.InputDate(self.page, text, placeholder, width, height, html_code, options, (attrs or {}), profile)
html.Html.set_component_skin(html_date)
if tooltip:
html_date.tooltip(tooltip)
return html_date
def d_int(self, value: str='', placeholder: str='', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: types.OPTION_TYPE=None, attrs: dict=None, tooltip: str=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.InputInteger:
attrs = (attrs or {})
attrs.update({'type': 'number'})
html_integer = html.HtmlInput.InputInteger(self.page, value, placeholder, width, height, html_code, options, attrs, profile)
html.Html.set_component_skin(html_integer)
if tooltip:
html_integer.tooltip(tooltip)
return html_integer
def d_range(self, value, min_val: float=0, max_val: float=100, step: float=1, placeholder: str='', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), tooltip: str=None, html_code: str=None, options: types.OPTION_TYPE=None, attrs: dict=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.InputRange:
attrs = (attrs or {})
attrs.update({'type': 'range'})
html_range = html.HtmlInput.InputRange(self.page, value, min_val, max_val, step, placeholder, width, height, html_code, (options or {'background': False}), attrs, profile)
html.Html.set_component_skin(html_range)
if tooltip:
html_range.tooltip(tooltip)
return html_range
def _output(self, value: str='', options: types.OPTION_TYPE=None, profile: Optional[types.PROFILE_TYPE]=False) -> html.HtmlInput.Output:
html_output = html.HtmlInput.Output(self.page, value, options=options, profile=profile)
html.Html.set_component_skin(html_output)
return html_output
def textarea(self, text: str='', width: types.SIZE_TYPE=(100, '%'), rows: int=5, placeholder: str=None, background_color: str=None, html_code: str=None, options: types.OPTION_TYPE=None, tooltip: str=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.TextArea:
dflt_options = {'spellcheck': False, 'selectable': False}
dflt_options.update((options or {}))
html_t_area = html.HtmlInput.TextArea(self.page, text, width, rows, placeholder, background_color, html_code, dflt_options, profile)
html.Html.set_component_skin(html_t_area)
if tooltip:
html_t_area.tooltip(tooltip)
return html_t_area
def autocomplete(self, text: str='', placeholder: str='', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: types.OPTION_TYPE=None, attrs: dict=None, tooltip: str=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.AutoComplete:
options = (options or {})
attrs = (attrs or {})
html_input = html.HtmlInput.AutoComplete(self.page, text, placeholder, width, height, html_code, options, attrs, profile)
html_input.style.css.text_align = 'left'
html_input.style.css.padding_left = 5
html.Html.set_component_skin(html_input)
if tooltip:
html_input.tooltip(tooltip)
return html_input
def input(self, text: str='', placeholder: str='', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: types.OPTION_TYPE=None, attrs: dict=None, tooltip: str=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.Input:
component = self.d_text(text=text, placeholder=placeholder, width=width, height=height, tooltip=tooltip, html_code=html_code, options=options, attrs=attrs, profile=profile)
html.Html.set_component_skin(component)
return component
def left(self, text: str='', placeholder: str='', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: types.OPTION_TYPE=None, attrs: dict=None, tooltip: str=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.Input:
component = self.d_text(text=text, placeholder=placeholder, width=width, height=height, tooltip=tooltip, html_code=html_code, options=options, attrs=attrs, profile=profile)
component.style.css.text_align = 'left'
component.style.css.padding_left = 5
html.Html.set_component_skin(component)
return component
def hidden(self, text: str='', placeholder: str='', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: types.OPTION_TYPE=None, attrs: dict=None, tooltip: str=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.Input:
component = self.d_text(text=text, placeholder=placeholder, width=width, height=height, tooltip=tooltip, html_code=html_code, options=options, attrs=attrs, profile=profile)
component.style.css.display = None
html.Html.set_component_skin(component)
return component
def checkbox(self, flag: bool, label: str='', group_name: str=None, width: types.SIZE_TYPE=(None, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, options: types.OPTION_TYPE=None, attrs: dict=None, tooltip: str='', profile: types.PROFILE_TYPE=None) -> html.HtmlInput.InputCheckbox:
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
options = (options or {})
attrs = (attrs or {})
component = html.HtmlInput.InputCheckbox(self.page, flag, label, group_name, width, height, html_code, options, attrs, profile)
html.Html.set_component_skin(component)
if tooltip:
component.tooltip(tooltip)
return component
def radio(self, flag: bool, label: str=None, group_name: str=None, icon: str=None, width: types.SIZE_TYPE=(None, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, helper: str=None, options: types.OPTION_TYPE=None, tooltip: str=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.Radio:
component = html.HtmlInput.Radio(self.page, flag, label, group_name, icon, width, height, html_code, helper, (options or {}), profile)
html.Html.set_component_skin(component)
if tooltip:
component.tooltip(tooltip)
return component
def editor(self, text: str='', language: str='python', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(300, 'px'), html_code: str=None, options: types.OPTION_TYPE=None, profile: types.PROFILE_TYPE=None) -> html.HtmlTextEditor.Editor:
dflt_options = {'lineNumbers': True, 'mode': 'css', 'matchBrackets': True, 'styleActiveLine': True, 'autoRefresh': True}
if (options is not None):
dflt_options.update(options)
component = html.HtmlTextEditor.Editor(self.page, text, language, width, height, html_code, dflt_options, profile)
html.Html.set_component_skin(component)
return component
def cell(self, text: str='', language: str='python', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(60, 'px'), html_code: str=None, options: types.OPTION_TYPE=None, profile: types.PROFILE_TYPE=None) -> html.HtmlTextEditor.Cell:
dflt_options = {'lineNumbers': True, 'mode': language, 'matchBrackets': True, 'styleActiveLine': True, 'autoRefresh': True}
if (options is not None):
dflt_options.update(options)
component = html.HtmlTextEditor.Cell(self.page, text, language, width, height, html_code, dflt_options, profile)
html.Html.set_component_skin(component)
return component
def search(self, text: str='', placeholder: str='Search..', align: str='left', color: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, tooltip: str=None, extensible: bool=False, options: types.OPTION_TYPE=None, profile: types.PROFILE_TYPE=None) -> html.HtmlInput.Search:
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
icon_details = self.page.icons.get('search')
dflt_options = {'icon': icon_details['icon'], 'position': 'left', 'select': True, 'border': 1}
if (options is not None):
dflt_options.update(options)
html_s = html.HtmlInput.Search(self.page, text, placeholder, color, width, height, html_code, tooltip, extensible, dflt_options, profile)
html_s.style.css.height = (self.page.body.style.globals.line_height + 5)
html_s.style.css.margin_bottom = 10
if (align == 'center'):
html_s.style.css.margin = 'auto'
html_s.style.css.display = 'block'
html.Html.set_component_skin(html_s)
return html_s
def label(self, label: str, text: str='', placeholder: str='', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, tooltip: str=None, options: dict=None, attrs: dict=None, profile: types.PROFILE_TYPE=None) -> html.HtmlContainer.Div:
label = self.page.ui.texts.label(label).css({'display': 'block', 'text-align': 'left', 'margin-top': '10px', 'position': 'absolute', 'z-index': '20px', 'font-size': ('%spx' % self.page.body.style.globals.font.header_size)})
html_input = html.HtmlInput.Input(self.page, text, placeholder, width, height, html_code, (options or {}), (attrs or {}), profile).css({'margin-top': '10px'})
div = self.page.ui.div([label, html_input])
div.input = html_input
div.label = label
html_input.on('focus', [("document.getElementById('%s').animate({'marginTop': ['10px', '-8px']}, {duration: 50, easing: 'linear', iterations: 1, fill: 'both'})" % label.htmlCode)])
html_input.on('blur', [("document.getElementById('%s').animate({'marginTop': ['-8px', '10px']}, {duration: 1000, easing: 'linear', iterations: 1, fill: 'both'})" % label.htmlCode)])
html.Html.set_component_skin(div)
if tooltip:
html_input.tooltip(tooltip)
return div
def filters(self, items: List[html.Html.Html]=None, button: html.Html.Html=None, width: types.SIZE_TYPE=('auto', ''), height: types.SIZE_TYPE=(60, 'px'), html_code: str=None, helper: str=None, options: dict=None, autocomplete: bool=False, profile: types.PROFILE_TYPE=None) -> html.HtmlContainer.Div:
options = (options or {})
container = self.page.ui.div(width=width)
container.select = self.page.ui.inputs.autocomplete(html_code=(('%s_select' % html_code) if (html_code is not None) else html_code), width=(Defaults.TEXTS_SPAN_WIDTH, 'px'))
container.select.style.css.text_align = 'left'
container.select.style.css.padding_left = 5
container.select.options.liveSearch = True
if autocomplete:
container.input = self.page.ui.inputs.autocomplete(html_code=(('%s_input' % html_code) if (html_code is not None) else html_code), width=(Defaults.INPUTS_MIN_WIDTH, 'px'), options={'select': True})
else:
container.input = self.page.ui.input(html_code=(('%s_input' % html_code) if (html_code is not None) else html_code), width=(Defaults.INPUTS_MIN_WIDTH, 'px'), options={'select': True})
container.input.style.css.text_align = 'left'
container.input.style.css.padding_left = 5
container.input.style.css.margin_left = 10
if (button is None):
button = self.page.ui.buttons.colored('add')
button.style.css.margin_left = 10
container.button = button
container.clear = self.page.ui.icon('times', options=options)
container.clear.style.css.color = self.page.theme.danger.base
container.clear.style.css.margin_left = 20
container.clear.tooltip('Clear all filters')
container.add(self.page.ui.div([container.select, container.input, container.button, container.clear]))
container.filters = self.page.ui.panels.filters(items, container.select.dom.content, (100, '%'), height, html_code, helper, options, profile)
container.add(container.filters)
container.clear.click([container.filters.dom.clear()])
container.button.click([container.filters.dom.add(container.input.dom.content, container.select.dom.content), container.input.js.empty()])
container.input.enter(container.button.dom.events.trigger('click'))
html.Html.set_component_skin(container)
return container |
class DataTypeConverter(ABC, Generic[T]):
def __init__(self, name: str, empty_val: T):
self.name = name
self.empty_val = empty_val
def to_value(self, other: Any) -> Optional[T]:
def empty_value(self) -> T:
return self.empty_val
def truncate(self, length: int, val: T) -> T:
logger.warning('{} does not support length truncation. Using original masked value instead for update query.', self.name)
return val
def __eq__(self, other: object) -> bool:
if (not isinstance(other, DataTypeConverter)):
return False
return (self.__dict__ == other.__dict__) |
class ImporterTest(ForsetiTestCase):
def setUp(self):
db_connect = 'sqlite:///{}'.format(get_db_file_copy('forseti-test.db'))
self.service_config = ServiceConfig(db_connect)
self.source = 'INVENTORY'
self.importer_cls = importer.by_source(self.source)
self.model_manager = self.service_config.model_manager
self.model_name = self.model_manager.create(name=self.source)
(self.scoped_session, self.data_access) = self.model_manager.get(self.model_name)
def test_inventory_importer_basic(self):
with self.scoped_session as session:
import_runner = self.importer_cls(session, session, self.model_manager.model(self.model_name, expunge=False, session=session), self.data_access, self.service_config, inventory_index_id=FAKE_DATETIME_TIMESTAMP)
import_runner.run()
for policy in self.data_access.scanner_iter(session, 'iam_policy'):
self.assertFalse((len([_f for _f in policy.full_name.split('/') if _f]) % 2))
for policy in self.data_access.scanner_iter(session, 'crm_org_policy'):
self.assertFalse((len([_f for _f in policy.full_name.split('/') if _f]) % 2))
for policy in self.data_access.scanner_iter(session, 'crm_access_policy'):
self.assertFalse((len([_f for _f in policy.full_name.split('/') if _f]) % 2))
org_policies = list(self.data_access.scanner_iter(session, 'crm_org_policy'))
expected_org_policies = 5
self.assertEqual(expected_org_policies, len(org_policies))
gcs_policies = list(self.data_access.scanner_iter(session, 'gcs_policy'))
expected_gcs_policies = 2
self.assertEqual(expected_gcs_policies, len(gcs_policies))
expected_abc_user_accesses = [('roles/appengine.appViewer', ['project/project3']), ('roles/appengine.codeViewer', ['project/project3']), ('roles/bigquery.dataViewer', ['dataset/project2:bq_test_ds']), ('roles/bigquery.dataViewer', ['dataset/project3:bq_test_ds1'])]
abc_user_accesses = self.data_access.query_access_by_member(session, 'user/abc_', [])
self.assertEqual(expected_abc_user_accesses, sorted(abc_user_accesses))
model = self.model_manager.model(self.model_name)
model_description = self.model_manager.get_description(self.model_name)
self.assertIn(model.state, ['SUCCESS', 'PARTIAL_SUCCESS'], ('Model state should be success or partial success: %s' % model.message))
self.assertEqual({'pristine': True, 'source': 'inventory', 'source_info': {'inventory_index_id': FAKE_DATETIME_TIMESTAMP}, 'source_root': 'organization/', 'gsuite_enabled': True}, model_description)
def test_inventory_importer_composite_root(self):
db_connect = 'sqlite:///{}'.format(get_db_file_copy('forseti-composite-test.db'))
service_config = ServiceConfig(db_connect)
model_manager = service_config.model_manager
model_name = model_manager.create(name=self.source)
(scoped_session, data_access) = model_manager.get(model_name)
with scoped_session as session:
import_runner = self.importer_cls(session, session, model_manager.model(model_name, expunge=False, session=session), data_access, service_config, inventory_index_id=FAKE_DATETIME_TIMESTAMP)
import_runner.run()
for policy in data_access.scanner_iter(session, 'iam_policy'):
self.assertFalse((len([_f for _f in policy.full_name.split('/') if _f]) % 2))
projects = list(data_access.scanner_iter(session, 'project'))
self.assertEqual(2, len(projects))
model = model_manager.model(model_name)
model_description = model_manager.get_description(model_name)
self.assertIn(model.state, ['SUCCESS', 'PARTIAL_SUCCESS'], ('Model state should be success or partial success: %s' % model.message))
self.assertEqual({'pristine': True, 'source': 'inventory', 'source_info': {'inventory_index_id': FAKE_DATETIME_TIMESTAMP}, 'source_root': 'composite_root/root', 'gsuite_enabled': False}, model_description)
def test_model_action_wrapper_post_action_called(self):
session = mock.Mock()
session.flush = mock.Mock()
inventory_iter = []
action = mock.Mock()
post = mock.Mock()
flush_count = 1
import_runner = self.importer_cls(session, session, self.model_manager.model(self.model_name, expunge=False, session=session), self.data_access, self.service_config, inventory_index_id=FAKE_DATETIME_TIMESTAMP)
import_runner.model_action_wrapper(inventory_iter, action, post, flush_count)
post.assert_called_once()
def test_model_action_wrapper_inventory_iter_tuple(self):
session = mock.Mock()
session.flush = mock.Mock()
inventory_iter = [(1, 2)]
action = mock.Mock()
post = mock.Mock()
flush_count = 1
import_runner = self.importer_cls(session, session, self.model_manager.model(self.model_name, expunge=False, session=session), self.data_access, self.service_config, inventory_index_id=FAKE_DATETIME_TIMESTAMP)
count = import_runner.model_action_wrapper(inventory_iter, action, post, flush_count)
action.assert_called_once_with(1, 2)
self.assertEqual(1, count)
self.assertTrue(post.called)
session.flush.assert_called()
self.assertEqual(session.flush.call_count, 1)
def test_model_action_wrapper_multiple_inventory_iter_tuples(self):
session = mock.Mock()
session.flush = mock.Mock()
inventory_iter = [(1, 2), (4, 5)]
action = mock.Mock()
post = mock.Mock()
flush_count = 1
import_runner = self.importer_cls(session, session, self.model_manager.model(self.model_name, expunge=False, session=session), self.data_access, self.service_config, inventory_index_id=FAKE_DATETIME_TIMESTAMP)
count = import_runner.model_action_wrapper(inventory_iter, action, post, flush_count)
calls = [mock.call(1, 2), mock.call(4, 5)]
action.assert_has_calls(calls)
self.assertEqual(2, count)
self.assertTrue(post.called)
session.flush.assert_called()
self.assertEqual(session.flush.call_count, 2)
def test_model_action_wrapper_inventory_iter_value(self):
session = mock.Mock()
session.flush = mock.Mock()
inventory_iter = ['not_tuple']
action = mock.Mock()
post = mock.Mock()
flush_count = 1
import_runner = self.importer_cls(session, session, self.model_manager.model(self.model_name, expunge=False, session=session), self.data_access, self.service_config, inventory_index_id=FAKE_DATETIME_TIMESTAMP)
count = import_runner.model_action_wrapper(inventory_iter, action, post, flush_count)
action.assert_called_once_with('not_tuple')
self.assertEqual(1, count)
self.assertTrue(post.called)
session.flush.assert_called()
self.assertEqual(session.flush.call_count, 1)
def test_model_action_wrapper_multiple_inventory_iter_values(self):
session = mock.Mock()
session.flush = mock.Mock()
inventory_iter = ['data', 'data1']
action = mock.Mock()
post = mock.Mock()
flush_count = 1
import_runner = self.importer_cls(session, session, self.model_manager.model(self.model_name, expunge=False, session=session), self.data_access, self.service_config, inventory_index_id=FAKE_DATETIME_TIMESTAMP)
count = import_runner.model_action_wrapper(inventory_iter, action, post, flush_count)
calls = [mock.call('data'), mock.call('data1')]
action.assert_has_calls(calls)
self.assertEqual(2, count)
self.assertTrue(post.called)
session.flush.assert_called()
self.assertEqual(session.flush.call_count, 2) |
def run(cmd, shell=False, env=None, timeout=None, timeinterval=1):
p = Popen(cmd, shell=shell, stdout=PIPE, stderr=STDOUT, env=env)
if PY2:
if timeout:
while ((timeout > 0) and (p.poll() is None)):
timeout = (timeout - timeinterval)
time.sleep(timeinterval)
if (p.poll() is None):
raise TimeoutExpired(cmd, timeout)
(stdout, _) = p.communicate()
return (p.poll(), stdout)
else:
(stdout, _) = p.communicate(timeout=timeout)
return (p.poll(), stdout) |
def convert(color: 'Color', space: str) -> Tuple[('Space', Vector)]:
chain = color._get_convert_chain(color._space, space)
coords = color.coords(nans=False)
last = color._space
for (a, b, direction, adapt) in chain:
if (direction and adapt):
coords = color.chromatic_adaptation(a.WHITE, b.WHITE, coords)
coords = (b.from_base(coords) if direction else a.to_base(coords))
if ((not direction) and adapt):
coords = color.chromatic_adaptation(a.WHITE, b.WHITE, coords)
last = b
return (last, coords) |
def get_playground_html(request_path: str, settings: str) -> str:
here = pathlib.Path(__file__).parents[0]
path = (here / 'static/playground.html')
with open(path) as f:
template = f.read()
return template.replace('{{REQUEST_PATH}}', request_path).replace('{{SETTINGS}}', json.dumps(settings)) |
class AbstractMultipleRunner():
SUPPORTED_MODES: Dict[(str, Type[AbstractMultipleExecutor])] = {}
def __init__(self, mode: str, fail_policy: ExecutorExceptionPolicies=ExecutorExceptionPolicies.propagate) -> None:
if (mode not in self.SUPPORTED_MODES):
raise ValueError(f'Unsupported mode: {mode}')
self._mode: str = mode
self._executor: AbstractMultipleExecutor = self._make_executor(mode, fail_policy)
self._thread: Optional[Thread] = None
def is_running(self) -> bool:
return self._executor.is_running
def start(self, threaded: bool=False) -> None:
if threaded:
self._thread = Thread(target=self._executor.start, daemon=True)
self._thread.start()
else:
self._executor.start()
def stop(self, timeout: Optional[float]=None) -> None:
self._executor.stop()
if (self._thread is not None):
self._thread.join(timeout=timeout)
def _make_executor(self, mode: str, fail_policy: ExecutorExceptionPolicies) -> AbstractMultipleExecutor:
executor_cls = self.SUPPORTED_MODES[mode]
return executor_cls(tasks=self._make_tasks(), task_fail_policy=fail_policy)
def _make_tasks(self) -> Sequence[AbstractExecutorTask]:
def num_failed(self) -> int:
return self._executor.num_failed
def failed(self) -> Sequence[Task]:
return [i.id for i in self._executor.failed_tasks]
def not_failed(self) -> Sequence[Task]:
return [i.id for i in self._executor.not_failed_tasks]
def try_join_thread(self) -> None:
if (self._thread is None):
raise ValueError('Not started in thread mode.')
while self._thread.is_alive():
self._thread.join(0.1) |
class ErrorIndication(Exception):
def __init__(self, descr=None):
self._value = (self.__class__.__name__[0].lower() + self.__class__.__name__[1:])
self._descr = self._value
if descr:
self._descr = descr
def __eq__(self, other):
return (self._value == other)
def __ne__(self, other):
return (self._value != other)
def __lt__(self, other):
return (self._value < other)
def __le__(self, other):
return (self._value <= other)
def __gt__(self, other):
return (self._value > other)
def __ge__(self, other):
return (self._value >= other)
def __str__(self):
return self._descr |
class _FastOpcode(OpcodeAPI):
__slots__ = ('logic_fn', 'mnemonic', 'gas_cost')
def __init__(self, logic_fn: Callable[(..., Any)], mnemonic: str, gas_cost: int) -> None:
self.logic_fn = logic_fn
self.mnemonic = mnemonic
self.gas_cost = gas_cost
def __call__(self, computation: ComputationAPI) -> None:
computation.consume_gas(self.gas_cost, self.mnemonic)
return self.logic_fn(computation)
def as_opcode(cls: Type['_FastOpcode'], logic_fn: Callable[(..., Any)], mnemonic: str, gas_cost: int) -> OpcodeAPI:
return cls(logic_fn, mnemonic, gas_cost) |
def test_hand_specified_quadrature():
mesh = UnitSquareMesh(5, 5)
V = FunctionSpace(mesh, 'CG', 2)
v = TestFunction(V)
a = (conj(v) * dx)
a_q0 = assemble(a, form_compiler_parameters={'quadrature_degree': 0})
a_q2 = assemble(a, form_compiler_parameters={'quadrature_degree': 2})
assert (not np.allclose(a_q0.dat.data, a_q2.dat.data)) |
def test_aggregation_serialization():
msg = AggregationMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=AggregationMessage.Performative.AGGREGATION, value=0, time='some_time', contributors=('address1', 'address2'), signature='some_multisignature')
msg.to = 'receiver'
envelope = Envelope(to=msg.to, sender='sender', message=msg)
envelope_bytes = envelope.encode()
actual_envelope = Envelope.decode(envelope_bytes)
expected_envelope = envelope
assert (expected_envelope.to == actual_envelope.to)
assert (expected_envelope.sender == actual_envelope.sender)
assert (expected_envelope.protocol_specification_id == actual_envelope.protocol_specification_id)
assert (expected_envelope.message != actual_envelope.message)
actual_msg = AggregationMessage.serializer.decode(actual_envelope.message)
actual_msg.to = actual_envelope.to
actual_msg.sender = actual_envelope.sender
expected_msg = msg
assert (expected_msg == actual_msg) |
def extractAdamantineDragonintheCrystalWorld(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('Crystal World' in item['tags']):
return buildReleaseMessageWithType(item, 'Adamantine Dragon in the Crystal World', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False |
class DebuggerTextTestRunner(unittest.TextTestRunner):
debugger = os.environ.get('UNITTEST_DEBUG', 'none')
test_result_class = DebuggerTextTestResult
def __init__(self, *args, **kwargs):
kwargs.setdefault('verbosity', 2)
super(DebuggerTextTestRunner, self).__init__(*args, **kwargs)
def debug_none(exc_info):
pass
def debug_pdb(exc_info):
import pdb
pdb.post_mortem(exc_info[2])
def debug_ipdb(exc_info):
import ipdb
ipdb.post_mortem(exc_info[2])
def debug_pudb(exc_info):
import pudb
pudb.post_mortem(exc_info[2], exc_info[1], exc_info[0])
def _makeResult(self):
return self.test_result_class(self.stream, self.descriptions, self.verbosity, getattr(self, ('debug_%s' % self.debugger), self.debug_none)) |
def test_adding_a_extra_container():
config = "\nextraContainers: |\n - name: do-something\n image: busybox\n command: ['do', 'something']\n"
r = helm_template(config)
extraContainer = r['deployment'][name]['spec']['template']['spec']['containers']
assert ({'name': 'do-something', 'image': 'busybox', 'command': ['do', 'something']} in extraContainer) |
def add_role(moderator: ModeratorModel, role: str):
_check_roles([role])
with session() as s:
moderator_orm_model = s.query(ModeratorOrmModel).filter_by(id=moderator.id).one()
if (role in moderator_orm_model.roles):
raise ArgumentError('Role already added')
moderator_orm_model.roles.append(role)
s.commit() |
class OptionPlotoptionsAreasplinerangeSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class MlTradeDialogue(Dialogue):
INITIAL_PERFORMATIVES: FrozenSet[Message.Performative] = frozenset({MlTradeMessage.Performative.CFP})
TERMINAL_PERFORMATIVES: FrozenSet[Message.Performative] = frozenset({MlTradeMessage.Performative.DATA})
VALID_REPLIES: Dict[(Message.Performative, FrozenSet[Message.Performative])] = {MlTradeMessage.Performative.ACCEPT: frozenset({MlTradeMessage.Performative.DATA}), MlTradeMessage.Performative.CFP: frozenset({MlTradeMessage.Performative.TERMS}), MlTradeMessage.Performative.DATA: frozenset(), MlTradeMessage.Performative.TERMS: frozenset({MlTradeMessage.Performative.ACCEPT})}
class Role(Dialogue.Role):
BUYER = 'buyer'
SELLER = 'seller'
class EndState(Dialogue.EndState):
SUCCESSFUL = 0
def __init__(self, dialogue_label: DialogueLabel, self_address: Address, role: Dialogue.Role, message_class: Type[MlTradeMessage]=MlTradeMessage) -> None:
Dialogue.__init__(self, dialogue_label=dialogue_label, message_class=message_class, self_address=self_address, role=role) |
def test_image_upload_field():
app = Flask(__name__)
path = _create_temp()
def _remove_testimages():
safe_delete(path, 'test1.png')
safe_delete(path, 'test1_thumb.jpg')
safe_delete(path, 'test2.png')
safe_delete(path, 'test2_thumb.jpg')
safe_delete(path, 'test1.jpg')
safe_delete(path, 'test1.jpeg')
safe_delete(path, 'test1.gif')
safe_delete(path, 'test1.png')
safe_delete(path, 'test1.tiff')
class TestForm(form.BaseForm):
upload = form.ImageUploadField('Upload', base_path=path, thumbnail_size=(100, 100, True))
class TestNoResizeForm(form.BaseForm):
upload = form.ImageUploadField('Upload', base_path=path, endpoint='test')
class TestAutoResizeForm(form.BaseForm):
upload = form.ImageUploadField('Upload', base_path=path, max_size=(64, 64, True))
class Dummy(object):
pass
my_form = TestForm()
assert (my_form.upload.base_path == path)
assert (my_form.upload.endpoint == 'static')
_remove_testimages()
dummy = Dummy()
filename = op.join(op.dirname(__file__), 'data', 'copyleft.png')
with open(filename, 'rb') as fp:
with app.test_request_context(method='POST', data={'upload': (fp, 'test1.png')}):
my_form = TestForm(helpers.get_form_data())
assert my_form.validate()
my_form.populate_obj(dummy)
assert (dummy.upload == 'test1.png')
assert op.exists(op.join(path, 'test1.png'))
assert op.exists(op.join(path, 'test1_thumb.png'))
with open(filename, 'rb') as fp:
with app.test_request_context(method='POST', data={'upload': (fp, 'test2.png')}):
my_form = TestForm(helpers.get_form_data())
assert my_form.validate()
my_form.populate_obj(dummy)
assert (dummy.upload == 'test2.png')
assert op.exists(op.join(path, 'test2.png'))
assert op.exists(op.join(path, 'test2_thumb.png'))
assert (not op.exists(op.join(path, 'test1.png')))
assert (not op.exists(op.join(path, 'test1_thumb.jpg')))
with app.test_request_context(method='POST', data={'_upload-delete': 'checked'}):
my_form = TestForm(helpers.get_form_data())
assert my_form.validate()
my_form.populate_obj(dummy)
assert (dummy.upload is None)
assert (not op.exists(op.join(path, 'test2.png')))
assert (not op.exists(op.join(path, 'test2_thumb.png')))
with open(filename, 'rb') as fp:
with app.test_request_context(method='POST', data={'upload': (fp, 'test1.png')}):
my_form = TestNoResizeForm(helpers.get_form_data())
assert my_form.validate()
my_form.populate_obj(dummy)
assert (dummy.upload == 'test1.png')
assert op.exists(op.join(path, 'test1.png'))
assert (not op.exists(op.join(path, 'test1_thumb.png')))
filename = op.join(op.dirname(__file__), 'data', 'copyleft.png')
with open(filename, 'rb') as fp:
with app.test_request_context(method='POST', data={'upload': (fp, 'test1.png')}):
my_form = TestAutoResizeForm(helpers.get_form_data())
assert my_form.validate()
my_form.populate_obj(dummy)
assert (dummy.upload == 'test1.png')
assert op.exists(op.join(path, 'test1.png'))
filename = op.join(op.dirname(__file__), 'data', 'copyleft.tiff')
with open(filename, 'rb') as fp:
with app.test_request_context(method='POST', data={'upload': (fp, 'test1.tiff')}):
my_form = TestAutoResizeForm(helpers.get_form_data())
assert my_form.validate()
my_form.populate_obj(dummy)
assert (dummy.upload == 'test1.jpg')
assert op.exists(op.join(path, 'test1.jpg'))
for extension in ('gif', 'jpg', 'jpeg', 'png', 'tiff'):
filename = ('copyleft.' + extension)
filepath = op.join(op.dirname(__file__), 'data', filename)
with open(filepath, 'rb') as fp:
with app.test_request_context(method='POST', data={'upload': (fp, filename)}):
my_form = TestNoResizeForm(helpers.get_form_data())
assert my_form.validate()
my_form.populate_obj(dummy)
assert (dummy.upload == my_form.upload.data.filename)
filename = op.join(op.dirname(__file__), 'data', 'copyleft.jpg')
with open(filename, 'rb') as fp:
with app.test_request_context(method='POST', data={'upload': (fp, 'copyleft.JPG')}):
my_form = TestNoResizeForm(helpers.get_form_data())
assert my_form.validate() |
class OptionSeriesOrganizationSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesOrganizationSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesOrganizationSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesOrganizationSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesOrganizationSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
class SnmpEngineID(TextualConvention, OctetString):
status = 'current'
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(ValueSizeConstraint(5, 32))
if mibBuilder.loadTexts:
description = " An SNMP engine's administratively-unique identifier. Objects of this type are\n for identification, not for addressing, even though it is possible that an\n address may have been used in the generation of a specific value. The value for\n this object may not be all zeros or all 'ff'H or the empty (zero length)\n string. The initial value for this object may be configured via an operator\n console entry or via an algorithmic function. In the latter case, the following\n example algorithm is recommended. In cases where there are multiple engines on\n the same system, the use of this algorithm is NOT appropriate, as it would\n result in all of those engines ending up with the same ID value. 1) The very\n first bit is used to indicate how the rest of the data is composed. 0 - as\n defined by enterprise using former methods that existed before SNMPv3. See item\n 2 below. 1 - as defined by this architecture, see item 3 below. Note that this\n allows existing uses of the engineID (also known as AgentID [RFC1910]) to co-\n exist with any new uses. 2) The snmpEngineID has a length of 12 octets. The\n first four octets are set to the binary equivalent of the agent's SNMP\n management private enterprise number as assigned by the Internet Assigned\n Numbers Authority (IANA). For example, if Acme Networks has been assigned {\n enterprises 696 }, the first four octets would be assigned '000002b8'H. The\n remaining eight octets are determined via one or more enterprise-specific\n methods. Such methods must be designed so as to maximize the possibility that\n the value of this object will be unique in the agent's administrative domain.\n For example, it may be the IP address of the SNMP entity, or the MAC address of\n one of the interfaces, with each address suitably padded with random octets. If\n multiple methods are defined, then it is recommended that the first octet\n indicate the method being used and the remaining octets be a function of the\n method. 3) The length of the octet string varies. The first four octets are set\n to the binary equivalent of the agent's SNMP management private enterprise\n number as assigned by the Internet Assigned Numbers Authority (IANA). For\n example, if Acme Networks has been assigned { enterprises 696 }, the first four\n octets would be assigned '000002b8'H. The very first bit is set to 1. For\n example, the above value for Acme Networks now changes to be '800002b8'H. The\n fifth octet indicates how the rest (6th and following octets) are formatted.\n The values for the fifth octet are: 0 - reserved, unused. 1 - IPv4 address (4\n octets) lowest non-special IP address 2 - IPv6 address (16 octets) lowest non-\n special IP address 3 - MAC address (6 octets) lowest IEEE MAC address,\n canonical order 4 - Text, administratively assigned Maximum remaining length 27\n 5 - Octets, administratively assigned Maximum remaining length 27 6-127 -\n reserved, unused 128-255 - as defined by the enterprise Maximum remaining\n length 27\n "
defaultValue = [128, 0, 79, 184, 5]
try:
defaultValue += [ord(x) for x in os.uname()[1][:16]]
except Exception:
pass
try:
defaultValue += [((os.getpid() >> 8) & 255), (os.getpid() & 255)]
except Exception:
pass
defaultValue += [((id(defaultValue) >> 8) & 255), (id(defaultValue) & 255)]
defaultValue = OctetString(defaultValue).asOctets() |
class MiniImpacketShell(cmd.Cmd):
def __init__(self, smbClient, tcpShell=None):
if (tcpShell is not None):
cmd.Cmd.__init__(self, stdin=tcpShell.stdin, stdout=tcpShell.stdout)
sys.stdout = tcpShell.stdout
sys.stdin = tcpShell.stdin
sys.stderr = tcpShell.stdout
self.use_rawinput = False
self.shell = tcpShell
else:
cmd.Cmd.__init__(self)
self.shell = None
self.prompt = '# '
self.smb = smbClient
(self.username, self.password, self.domain, self.lmhash, self.nthash, self.aesKey, self.TGT, self.TGS) = smbClient.getCredentials()
self.tid = None
self.intro = 'Type help for list of commands'
self.pwd = ''
self.share = None
self.loggedIn = True
self.last_output = None
self.completion = []
def emptyline(self):
pass
def precmd(self, line):
if PY2:
return line.decode('utf-8')
return line
def onecmd(self, s):
retVal = False
try:
retVal = cmd.Cmd.onecmd(self, s)
except Exception as e:
LOG.error(e)
LOG.debug('Exception info', exc_info=True)
return retVal
def do_exit(self, line):
if (self.shell is not None):
self.shell.close()
return True
def do_shell(self, line):
output = os.popen(line).read()
print(output)
self.last_output = output
def do_help(self, line):
print("\n open {host,port=445} - opens a SMB connection against the target host/port\n login {domain/username,passwd} - logs into the current SMB connection, no parameters for NULL connection. If no password specified, it'll be prompted\n kerberos_login {domain/username,passwd} - logs into the current SMB connection using Kerberos. If no password specified, it'll be prompted. Use the DNS resolvable domain name\n login_hash {domain/username,lmhash:nthash} - logs into the current SMB connection using the password hashes\n logoff - logs off\n shares - list available shares\n use {sharename} - connect to an specific share\n cd {path} - changes the current directory to {path}\n lcd {path} - changes the current local directory to {path}\n pwd - shows current remote directory\n password - changes the user password, the new password will be prompted for input\n ls {wildcard} - lists all the files in the current directory\n lls {dirname} - lists all the files on the local filesystem.\n tree {filepath} - recursively lists all files in folder and sub folders\n rm {file} - removes the selected file\n mkdir {dirname} - creates the directory under the current path\n rmdir {dirname} - removes the directory under the current path\n put {filename} - uploads the filename into the current path\n get {filename} - downloads the filename from the current path\n mget {mask} - downloads all files from the current directory matching the provided mask\n cat {filename} - reads the filename from the current path\n mount {target,path} - creates a mount point from {path} to {target} (admin required)\n umount {path} - removes the mount point at {path} without deleting the directory (admin required)\n list_snapshots {path} - lists the vss snapshots for the specified path\n info - returns NetrServerInfo main results\n who - returns the sessions currently connected at the target host (admin required)\n close - closes the current SMB Session\n exit - terminates the server process (and this session)\n\n")
def do_password(self, line):
if (self.loggedIn is False):
LOG.error('Not logged in')
return
from getpass import getpass
newPassword = getpass('New Password:')
rpctransport = transport.SMBTransport(self.smb.getRemoteHost(), filename='\\samr', smb_connection=self.smb)
dce = rpctransport.get_dce_rpc()
dce.connect()
dce.bind(samr.MSRPC_UUID_SAMR)
samr.hSamrUnicodeChangePasswordUser2(dce, '\x00', self.username, self.password, newPassword, self.lmhash, self.nthash)
self.password = newPassword
self.lmhash = None
self.nthash = None
def do_open(self, line):
l = line.split(' ')
port = 445
if (len(l) > 0):
host = l[0]
if (len(l) > 1):
port = int(l[1])
if (port == 139):
self.smb = SMBConnection('*SMBSERVER', host, sess_port=port)
else:
self.smb = SMBConnection(host, host, sess_port=port)
dialect = self.smb.getDialect()
if (dialect == SMB_DIALECT):
LOG.info('SMBv1 dialect used')
elif (dialect == SMB2_DIALECT_002):
LOG.info('SMBv2.0 dialect used')
elif (dialect == SMB2_DIALECT_21):
LOG.info('SMBv2.1 dialect used')
else:
LOG.info('SMBv3.0 dialect used')
self.share = None
self.tid = None
self.pwd = ''
self.loggedIn = False
self.password = None
self.lmhash = None
self.nthash = None
self.username = None
def do_login(self, line):
if (self.smb is None):
LOG.error('No connection open')
return
l = line.split(' ')
username = ''
password = ''
domain = ''
if (len(l) > 0):
username = l[0]
if (len(l) > 1):
password = l[1]
if (username.find('/') > 0):
(domain, username) = username.split('/')
if ((password == '') and (username != '')):
from getpass import getpass
password = getpass('Password:')
self.smb.login(username, password, domain=domain)
self.password = password
self.username = username
if (self.smb.isGuestSession() > 0):
LOG.info('GUEST Session Granted')
else:
LOG.info('USER Session Granted')
self.loggedIn = True
def do_kerberos_login(self, line):
if (self.smb is None):
LOG.error('No connection open')
return
l = line.split(' ')
username = ''
password = ''
domain = ''
if (len(l) > 0):
username = l[0]
if (len(l) > 1):
password = l[1]
if (username.find('/') > 0):
(domain, username) = username.split('/')
if (domain == ''):
LOG.error('Domain must be specified for Kerberos login')
return
if ((password == '') and (username != '')):
from getpass import getpass
password = getpass('Password:')
self.smb.kerberosLogin(username, password, domain=domain)
self.password = password
self.username = username
if (self.smb.isGuestSession() > 0):
LOG.info('GUEST Session Granted')
else:
LOG.info('USER Session Granted')
self.loggedIn = True
def do_login_hash(self, line):
if (self.smb is None):
LOG.error('No connection open')
return
l = line.split(' ')
domain = ''
if (len(l) > 0):
username = l[0]
if (len(l) > 1):
hashes = l[1]
else:
LOG.error('Hashes needed. Format is lmhash:nthash')
return
if (username.find('/') > 0):
(domain, username) = username.split('/')
(lmhash, nthash) = hashes.split(':')
self.smb.login(username, '', domain, lmhash=lmhash, nthash=nthash)
self.username = username
self.lmhash = lmhash
self.nthash = nthash
if (self.smb.isGuestSession() > 0):
LOG.info('GUEST Session Granted')
else:
LOG.info('USER Session Granted')
self.loggedIn = True
def do_logoff(self, line):
if (self.smb is None):
LOG.error('No connection open')
return
self.smb.logoff()
del self.smb
self.share = None
self.smb = None
self.tid = None
self.pwd = ''
self.loggedIn = False
self.password = None
self.lmhash = None
self.nthash = None
self.username = None
def do_info(self, line):
if (self.loggedIn is False):
LOG.error('Not logged in')
return
rpctransport = transport.SMBTransport(self.smb.getRemoteHost(), filename='\\srvsvc', smb_connection=self.smb)
dce = rpctransport.get_dce_rpc()
dce.connect()
dce.bind(srvs.MSRPC_UUID_SRVS)
resp = srvs.hNetrServerGetInfo(dce, 102)
print(('Version Major: %d' % resp['InfoStruct']['ServerInfo102']['sv102_version_major']))
print(('Version Minor: %d' % resp['InfoStruct']['ServerInfo102']['sv102_version_minor']))
print(('Server Name: %s' % resp['InfoStruct']['ServerInfo102']['sv102_name']))
print(('Server Comment: %s' % resp['InfoStruct']['ServerInfo102']['sv102_comment']))
print(('Server UserPath: %s' % resp['InfoStruct']['ServerInfo102']['sv102_userpath']))
print(('Simultaneous Users: %d' % resp['InfoStruct']['ServerInfo102']['sv102_users']))
def do_who(self, line):
if (self.loggedIn is False):
LOG.error('Not logged in')
return
rpctransport = transport.SMBTransport(self.smb.getRemoteHost(), filename='\\srvsvc', smb_connection=self.smb)
dce = rpctransport.get_dce_rpc()
dce.connect()
dce.bind(srvs.MSRPC_UUID_SRVS)
resp = srvs.hNetrSessionEnum(dce, NULL, NULL, 10)
for session in resp['InfoStruct']['SessionInfo']['Level10']['Buffer']:
print(('host: %15s, user: %5s, active: %5d, idle: %5d' % (session['sesi10_cname'][:(- 1)], session['sesi10_username'][:(- 1)], session['sesi10_time'], session['sesi10_idle_time'])))
def do_shares(self, line):
if (self.loggedIn is False):
LOG.error('Not logged in')
return
resp = self.smb.listShares()
for i in range(len(resp)):
print(resp[i]['shi1_netname'][:(- 1)])
def do_use(self, line):
if (self.loggedIn is False):
LOG.error('Not logged in')
return
self.share = line
self.tid = self.smb.connectTree(line)
self.pwd = '\\'
self.do_ls('', False)
def complete_cd(self, text, line, begidx, endidx):
return self.complete_get(text, line, begidx, endidx, include=2)
def do_cd(self, line):
if (self.tid is None):
LOG.error('No share selected')
return
p = line.replace('/', '\\')
oldpwd = self.pwd
if (p[0] == '\\'):
self.pwd = line
else:
self.pwd = ntpath.join(self.pwd, line)
self.pwd = ntpath.normpath(self.pwd)
try:
fid = self.smb.openFile(self.tid, self.pwd, creationOption=FILE_DIRECTORY_FILE, desiredAccess=(FILE_READ_DATA | FILE_LIST_DIRECTORY), shareMode=(FILE_SHARE_READ | FILE_SHARE_WRITE))
self.smb.closeFile(self.tid, fid)
except SessionError:
self.pwd = oldpwd
raise
def do_lcd(self, s):
print(s)
if (s == ''):
print(os.getcwd())
else:
os.chdir(s)
def do_pwd(self, line):
if (self.loggedIn is False):
LOG.error('Not logged in')
return
print(self.pwd.replace('\\', '/'))
def do_ls(self, wildcard, display=True):
if (self.loggedIn is False):
LOG.error('Not logged in')
return
if (self.tid is None):
LOG.error('No share selected')
return
if (wildcard == ''):
pwd = ntpath.join(self.pwd, '*')
else:
pwd = ntpath.join(self.pwd, wildcard)
self.completion = []
pwd = pwd.replace('/', '\\')
pwd = ntpath.normpath(pwd)
for f in self.smb.listPath(self.share, pwd):
if (display is True):
print(('%crw-rw-rw- %10d %s %s' % (('d' if (f.is_directory() > 0) else '-'), f.get_filesize(), time.ctime(float(f.get_mtime_epoch())), f.get_longname())))
self.completion.append((f.get_longname(), f.is_directory()))
def do_lls(self, currentDir):
if (currentDir == ''):
currentDir = './'
else:
pass
for LINE in os.listdir(currentDir):
print(LINE)
def do_listFiles(self, share, ip):
retList = []
retFiles = []
retInt = 0
try:
for LINE in self.smb.listPath(self.share, ip):
if ((LINE.get_longname() == '.') or (LINE.get_longname() == '..')):
pass
else:
retInt = (retInt + 1)
print((ip.strip('*').replace('//', '/') + LINE.get_longname()))
if LINE.is_directory():
retval = (ip.strip('*').replace('//', '/') + LINE.get_longname())
retList.append(retval)
else:
retval = (ip.strip('*').replace('//', '/') + LINE.get_longname())
retFiles.append(retval)
except:
pass
return (retList, retFiles, retInt)
def do_tree(self, filepath):
folderList = []
retList = []
totalFilesRead = 0
if (self.loggedIn is False):
LOG.error('Not logged in')
return
if (self.tid is None):
LOG.error('No share selected')
return
filepath = filepath.replace('\\', '/')
if (not filepath.startswith('/')):
filepath = ((self.pwd.replace('\\', '/') + '/') + filepath)
if (not filepath.endswith('/*')):
filepath = (filepath + '/*')
filepath = os.path.abspath(filepath).replace('//', '/')
for LINE in self.smb.listPath(self.share, filepath):
if LINE.is_directory():
if ((LINE.get_longname() == '.') or (LINE.get_longname() == '..')):
pass
else:
totalFilesRead = (totalFilesRead + 1)
folderList.append((filepath.strip('*') + LINE.get_longname()))
else:
print((filepath.strip('*') + LINE.get_longname()))
for ITEM in folderList:
ITEM = (ITEM + '/*')
try:
(retList, retFiles, retInt) = self.do_listFiles(self.share, ITEM)
for q in retList:
folderList.append(q)
totalFilesRead = (totalFilesRead + retInt)
except:
pass
print((('Finished - ' + str(totalFilesRead)) + ' files and folders'))
def do_rm(self, filename):
if (self.tid is None):
LOG.error('No share selected')
return
f = ntpath.join(self.pwd, filename)
file = f.replace('/', '\\')
self.smb.deleteFile(self.share, file)
def do_mkdir(self, path):
if (self.tid is None):
LOG.error('No share selected')
return
p = ntpath.join(self.pwd, path)
pathname = p.replace('/', '\\')
self.smb.createDirectory(self.share, pathname)
def do_rmdir(self, path):
if (self.tid is None):
LOG.error('No share selected')
return
p = ntpath.join(self.pwd, path)
pathname = p.replace('/', '\\')
self.smb.deleteDirectory(self.share, pathname)
def do_put(self, pathname):
if (self.tid is None):
LOG.error('No share selected')
return
src_path = pathname
dst_name = os.path.basename(src_path)
fh = open(pathname, 'rb')
f = ntpath.join(self.pwd, dst_name)
finalpath = f.replace('/', '\\')
self.smb.putFile(self.share, finalpath, fh.read)
fh.close()
def complete_get(self, text, line, begidx, endidx, include=1):
p = line.replace('/', '\\')
if (p.find('\\') < 0):
items = []
if (include == 1):
mask = 0
else:
mask = 16
for i in self.completion:
if (i[1] == mask):
items.append(i[0])
if text:
return [item for item in items if item.upper().startswith(text.upper())]
else:
return items
def do_mget(self, mask):
if (mask == ''):
LOG.error('A mask must be provided')
return
if (self.tid is None):
LOG.error('No share selected')
return
self.do_ls(mask, display=False)
if (len(self.completion) == 0):
LOG.error('No files found matching the provided mask')
return
for file_tuple in self.completion:
if (file_tuple[1] == 0):
filename = file_tuple[0]
filename = filename.replace('/', '\\')
fh = open(ntpath.basename(filename), 'wb')
pathname = ntpath.join(self.pwd, filename)
try:
LOG.info(('Downloading %s' % filename))
self.smb.getFile(self.share, pathname, fh.write)
except:
fh.close()
os.remove(filename)
raise
fh.close()
def do_get(self, filename):
if (self.tid is None):
LOG.error('No share selected')
return
filename = filename.replace('/', '\\')
fh = open(ntpath.basename(filename), 'wb')
pathname = ntpath.join(self.pwd, filename)
try:
self.smb.getFile(self.share, pathname, fh.write)
except:
fh.close()
os.remove(filename)
raise
fh.close()
def do_cat(self, filename):
if (self.tid is None):
LOG.error('No share selected')
return
filename = filename.replace('/', '\\')
fh = BytesIO()
pathname = ntpath.join(self.pwd, filename)
try:
self.smb.getFile(self.share, pathname, fh.write)
except:
raise
output = fh.getvalue()
encoding = chardet.detect(output)['encoding']
error_msg = '[-] Output cannot be correctly decoded, are you sure the text is readable ?'
if encoding:
try:
print(output.decode(encoding))
except:
print(error_msg)
finally:
fh.close()
else:
print(error_msg)
fh.close()
def do_close(self, line):
self.do_logoff(line)
def do_list_snapshots(self, line):
l = line.split(' ')
if (len(l) > 0):
pathName = l[0].replace('/', '\\')
if (pathName.startswith('\\') is not True):
pathName = ntpath.join(self.pwd, pathName)
snapshotList = self.smb.listSnapshots(self.tid, pathName)
if (not snapshotList):
print('No snapshots found')
return
for timestamp in snapshotList:
print(timestamp)
def do_mount(self, line):
l = line.split(' ')
if (len(l) > 1):
target = l[0].replace('/', '\\')
pathName = l[1].replace('/', '\\')
if (pathName.startswith('\\') is not True):
pathName = ntpath.join(self.pwd, pathName)
self.smb.createMountPoint(self.tid, pathName, target)
def do_umount(self, mountpoint):
mountpoint = mountpoint.replace('/', '\\')
if (mountpoint.startswith('\\') is not True):
mountpoint = ntpath.join(self.pwd, mountpoint)
mountPath = ntpath.join(self.pwd, mountpoint)
self.smb.removeMountPoint(self.tid, mountPath)
def do_EOF(self, line):
print('Bye!\n')
return True |
def nbest_centrality(G, metric, n=10, attr='centrality', **kwargs):
scores = metric(G, **kwargs)
nx.set_node_attributes(G, name=attr, values=scores)
topn = heapq.nlargest(n, scores.items(), key=itemgetter(1))
for (idx, item) in enumerate(topn):
print('{}. {}: {:0.4f}'.format((idx + 1), *item))
return G |
(tags=['communication cost'], description=docs.COMMUNICATIONS_COSTS_TOTALS_BY_CANDIDATE)
class CCTotalsByCandidateView(ApiResource):
schema = schemas.CCTotalsByCandidateSchema
page_schema = schemas.CCTotalsByCandidatePageSchema
sort_option = ['cycle', 'candidate_id', 'total', 'support_oppose_indicator']
def args(self):
return utils.extend(args.paging, args.totals_by_candidate_other_costs_CC, args.make_multi_sort_args(default=['-cycle', 'candidate_id'], validator=args.SortMultiOptionValidator(self.sort_option)))
def build_query(self, **kwargs):
(cycle_column, candidate) = get_candidate_list(kwargs)
query = db.session.query(CommunicationCostByCandidate.candidate_id, CommunicationCostByCandidate.support_oppose_indicator, cycle_column, sa.func.sum(CommunicationCostByCandidate.total).label('total')).join(CommunicationCostByCandidate, sa.and_((CommunicationCostByCandidate.candidate_id == candidate.c.candidate_id), (CommunicationCostByCandidate.cycle == candidate.c.two_year_period))).filter((cycle_column.in_(kwargs['cycle']) if kwargs.get('cycle') else True)).group_by(CommunicationCostByCandidate.candidate_id, cycle_column, CommunicationCostByCandidate.support_oppose_indicator)
return query |
class TestEHABIELF(unittest.TestCase):
def test_parse_object_file(self):
fname = os.path.join('test', 'testfiles_for_unittests', 'arm_exidx_test.o')
with open(fname, 'rb') as f:
elf = ELFFile(f)
try:
elf.get_ehabi_infos()
self.assertTrue(False, 'Unreachable code')
except AssertionError as e:
self.assertEqual(str(e), "Current version of pyelftools doesn't support relocatable file.")
def test_parse_shared_library(self):
fname = os.path.join('test', 'testfiles_for_unittests', 'arm_exidx_test.so')
with open(fname, 'rb') as f:
elf = ELFFile(f)
self.assertTrue(elf.has_ehabi_info())
infos = elf.get_ehabi_infos()
self.assertEqual(1, len(infos))
info = infos[0]
self.assertIsInstance(info.get_entry(0), EHABIEntry)
self.assertEqual(info.get_entry(0).function_offset, 214544)
self.assertEqual(info.get_entry(0).eh_table_offset, 431428)
self.assertEqual(info.get_entry(0).bytecode_array, [151, 65, 132, 13, 176, 176])
self.assertIsInstance(info.get_entry(7), CannotUnwindEHABIEntry)
self.assertEqual(info.get_entry(7).function_offset, 214776)
self.assertIsInstance(info.get_entry(8), EHABIEntry)
self.assertEqual(info.get_entry(8).personality, 0)
self.assertEqual(info.get_entry(8).function_offset, 214844)
self.assertEqual(info.get_entry(8).bytecode_array, [151, 132, 8])
self.assertIsInstance(info.get_entry(9), GenericEHABIEntry)
self.assertEqual(info.get_entry(9).function_offset, 214908)
self.assertEqual(info.get_entry(9).personality, 203312)
for i in range(info.num_entry()):
self.assertNotIsInstance(info.get_entry(i), CorruptEHABIEntry)
def test_parse_executable(self):
fname = os.path.join('test', 'testfiles_for_unittests', 'arm_exidx_test.elf')
with open(fname, 'rb') as f:
elf = ELFFile(f)
self.assertTrue(elf.has_ehabi_info())
infos = elf.get_ehabi_infos()
self.assertEqual(1, len(infos))
info = infos[0]
self.assertIsInstance(info.get_entry(0), EHABIEntry)
self.assertEqual(info.get_entry(0).function_offset, 20304)
self.assertEqual(info.get_entry(0).eh_table_offset, 141412)
self.assertEqual(info.get_entry(0).bytecode_array, [151, 65, 132, 13, 176, 176])
self.assertIsInstance(info.get_entry(7), CannotUnwindEHABIEntry)
self.assertEqual(info.get_entry(7).function_offset, 20544)
self.assertIsInstance(info.get_entry(8), GenericEHABIEntry)
self.assertEqual(info.get_entry(8).personality, 89377)
self.assertIsInstance(info.get_entry(9), EHABIEntry)
self.assertEqual(info.get_entry(9).function_offset, 20804)
self.assertEqual(info.get_entry(9).personality, 0)
self.assertEqual(info.get_entry(9).bytecode_array, [151, 132, 8])
for i in range(info.num_entry()):
self.assertNotIsInstance(info.get_entry(i), CorruptEHABIEntry) |
class SerializedActorCritic(TorchActorCritic):
def __init__(self, model: Union[(DictConfig, Dict)], state_dict_file: str, spaces_dict_file: str, device: str):
spaces_config = SpacesConfig.load(spaces_dict_file)
model_composer = Factory(base_type=BaseModelComposer).instantiate(model, action_spaces_dict=spaces_config.action_spaces_dict, observation_spaces_dict=spaces_config.observation_spaces_dict, agent_counts_dict=spaces_config.agent_counts_dict)
super().__init__(policy=model_composer.policy, critic=model_composer.critic, device=device)
state_dict = torch.load(state_dict_file, map_location=torch.device(self._device))
self.load_state_dict(state_dict)
self.eval()
def seed(self, seed: int):
torch.manual_seed(seed) |
class OptionSeriesTilemapSonificationTracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class DisasterBase(APIView):
required_filters = ['def_codes']
def requests_award_type_codes(cls, request: HttpRequest) -> bool:
if (request and request.body):
body_json = json.loads(request.body)
if (('filter' in body_json) and ('award_type_codes' in body_json['filter'])):
return True
return False
def requests_award_spending_type(cls, request: HttpRequest) -> bool:
if (request and request.body):
body_json = json.loads(request.body)
if (body_json.get('spending_type', '') == 'award'):
return True
return False
_property
def filters(self):
all_def_codes = sorted(DisasterEmergencyFundCode.objects.values_list('code', flat=True))
object_keys_lookup = {'def_codes': {'key': 'filter|def_codes', 'name': 'def_codes', 'type': 'array', 'array_type': 'enum', 'enum_values': all_def_codes, 'allow_nulls': False, 'optional': False}, 'query': {'key': 'filter|query', 'name': 'query', 'type': 'text', 'text_type': 'search', 'allow_nulls': True, 'optional': True}, 'award_type_codes': {'key': 'filter|award_type_codes', 'name': 'award_type_codes', 'type': 'array', 'array_type': 'enum', 'enum_values': sorted(award_type_mapping.keys()), 'allow_nulls': True, 'optional': True}, '_loan_award_type_codes': {'key': 'filter|award_type_codes', 'name': 'award_type_codes', 'type': 'array', 'array_type': 'enum', 'enum_values': sorted(loan_type_mapping.keys()), 'allow_nulls': True, 'optional': True, 'default': list(loan_type_mapping.keys())}, '_assistance_award_type_codes': {'key': 'filter|award_type_codes', 'name': 'award_type_codes', 'type': 'array', 'array_type': 'enum', 'enum_values': sorted(assistance_type_mapping.keys()), 'allow_nulls': True, 'optional': True, 'default': list(assistance_type_mapping.keys())}}
model = [object_keys_lookup[key] for key in self.required_filters]
json_request = TinyShield(model).block(self.request.data)
return json_request['filter']
def def_codes(self):
return self.filters['def_codes']
_property
def final_period_submission_query_filters(self):
return filter_by_latest_closed_periods()
_property
def latest_reporting_period(self):
return get_last_closed_submission_date(False)
_property
def all_closed_defc_submissions(self):
return filter_by_defc_closed_periods()
def is_in_provided_def_codes(self):
return Q(disaster_emergency_fund__code__in=self.def_codes)
def is_non_zero_total_spending(self):
return (~ Q((Q(obligations_incurred_by_program_object_class_cpe=(F('deobligations_recoveries_refund_pri_program_object_class_cpe') * (- 1))) & Q(gross_outlay_amount_by_program_object_class_cpe=((F('ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe') * (- 1)) - F('ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe'))))))
def has_award_of_provided_type(self, should_join_awards: bool=True) -> Q:
award_type_codes = self.filters.get('award_type_codes')
if (award_type_codes is not None):
if should_join_awards:
return (Q(award__type__in=award_type_codes) & Q(award__isnull=False))
else:
return Q(type__in=award_type_codes)
else:
return Q()
def has_award_of_classification(self):
if self.filters.get('award_type'):
return Q(piid__isnull=bool((self.filters['award_type'] == 'assistance')))
else:
return Q()
def construct_loan_queryset(self, faba_grouping_column, base_model, base_model_column):
grouping_key = (F(faba_grouping_column) if isinstance(faba_grouping_column, str) else faba_grouping_column)
base_values = With(FinancialAccountsByAwards.objects.filter(Q(award__type__in=loan_type_mapping), self.all_closed_defc_submissions, self.is_in_provided_def_codes).annotate(grouping_key=grouping_key, total_loan_value=F('award__total_loan_value'), reporting_fiscal_year=F('submission__reporting_fiscal_year'), reporting_fiscal_period=F('submission__reporting_fiscal_period'), quarter_format_flag=F('submission__quarter_format_flag')).filter(grouping_key__isnull=False).values('grouping_key', 'financial_accounts_by_awards_id', 'award_id', 'transaction_obligated_amount', 'gross_outlay_amount_by_award_cpe', 'reporting_fiscal_year', 'reporting_fiscal_period', 'quarter_format_flag', 'total_loan_value'), 'base_values')
q = Q()
for sub in final_submissions_for_all_fy():
q |= ((Q(reporting_fiscal_year=sub.fiscal_year) & Q(quarter_format_flag=sub.is_quarter)) & Q(reporting_fiscal_period=sub.fiscal_period))
aggregate_faba = With(base_values.queryset().values('grouping_key').annotate(obligation=Coalesce(Sum('transaction_obligated_amount'), 0, output_field=DecimalField(max_digits=23, decimal_places=2)), outlay=Coalesce(Sum(Case(When(q, then=F('gross_outlay_amount_by_award_cpe')), default=Value(0), output_field=DecimalField(max_digits=23, decimal_places=2))), 0)).values('grouping_key', 'obligation', 'outlay'), 'aggregate_faba')
distinct_awards = With(base_values.queryset().values('grouping_key', 'award_id', 'total_loan_value').distinct(), 'distinct_awards')
aggregate_awards = With(distinct_awards.queryset().values('grouping_key').annotate(award_count=Count('award_id'), face_value_of_loan=Coalesce(Sum('total_loan_value'), 0, output_field=DecimalField(max_digits=23, decimal_places=2))).values('grouping_key', 'award_count', 'face_value_of_loan'), 'aggregate_awards')
return Bunch(award_count_column=aggregate_awards.col.award_count, obligation_column=aggregate_faba.col.obligation, outlay_column=aggregate_faba.col.outlay, face_value_of_loan_column=aggregate_awards.col.face_value_of_loan, queryset=aggregate_awards.join(aggregate_faba.join(base_model, **{base_model_column: aggregate_faba.col.grouping_key}), **{base_model_column: aggregate_awards.col.grouping_key}).with_cte(base_values).with_cte(aggregate_faba).with_cte(distinct_awards).with_cte(aggregate_awards))
def accumulate_total_values(results: List[dict], extra_columns: List[str]) -> dict:
totals = {'obligation': 0, 'outlay': 0}
for col in extra_columns:
totals[col] = 0
for res in results:
for key in totals.keys():
totals[key] += (res.get(key) or 0)
return totals |
class Library():
def __init__(self, location: str, monitored: bool=False, scan_interval: int=0, startup_scan: bool=False):
self.location = location
self.scan_interval = scan_interval
self.scan_id = None
self.scanning = False
self._startup_scan = startup_scan
self.monitor = LibraryMonitor(self)
self.monitor.props.monitored = monitored
self.collection: Optional[Collection] = None
self.set_rescan_interval(scan_interval)
def set_location(self, location: str) -> None:
self.location = location
def get_location(self) -> str:
return self.location
def set_collection(self, collection) -> None:
self.collection = collection
def get_monitored(self) -> bool:
return self.monitor.props.monitored
def set_monitored(self, monitored: bool) -> None:
self.monitor.props.monitored = monitored
self.collection.serialize_libraries()
self.collection._dirty = True
monitored = property(get_monitored, set_monitored)
def get_rescan_interval(self) -> int:
return self.scan_interval
def set_rescan_interval(self, interval: int) -> None:
if self.scan_id:
GLib.source_remove(self.scan_id)
self.scan_id = None
if interval:
self.scan_id = GLib.timeout_add_seconds(interval, self.rescan)
self.scan_interval = interval
def get_startup_scan(self) -> bool:
return self._startup_scan
def set_startup_scan(self, value: bool) -> None:
self._startup_scan = value
self.collection.serialize_libraries()
self.collection._dirty = True
startup_scan = property(get_startup_scan, set_startup_scan)
def _count_files(self) -> int:
count = 0
for file in common.walk(Gio.File.new_for_uri(self.location)):
if self.collection:
if self.collection._scan_stopped:
break
count += 1
return count
def _check_compilation(self, ccheck: Dict[(str, Dict[(str, Deque[str])])], compilations: MutableSequence[Tuple[(str, str)]], tr: trax.Track) -> None:
if (not settings.get_option('collection/file_based_compilations', True)):
return
def joiner(value):
if isinstance(value, list):
return '\x00'.join(value)
else:
return value
try:
basedir = joiner(tr.get_tag_raw('__basedir'))
album = joiner(tr.get_tag_raw('album'))
artist = joiner(tr.get_tag_raw('artist'))
except Exception:
logger.warning('Error while checking for compilation: %s', tr)
return
if ((not basedir) or (not album) or (not artist)):
return
album = album.lower()
artist = artist.lower()
try:
if (basedir not in ccheck):
ccheck[basedir] = {}
if (album not in ccheck[basedir]):
ccheck[basedir][album] = deque()
except TypeError:
logger.exception('Error adding to compilation')
return
if (ccheck[basedir][album] and (artist not in ccheck[basedir][album])):
if (not ((basedir, album) in compilations)):
compilations.append((basedir, album))
logger.debug('Compilation %r detected in %r', album, basedir)
ccheck[basedir][album].append(artist)
def update_track(self, gloc: Gio.File, force_update: bool=False) -> Optional[trax.Track]:
uri = gloc.get_uri()
if (not uri):
return None
tr = self.collection.get_track_by_loc(uri)
if tr:
tr.read_tags(force=force_update)
else:
tr = trax.Track(uri)
if tr._scan_valid:
self.collection.add(tr)
elif (not tr._init):
self.collection.add(tr)
if (not tr.is_supported()):
return None
return tr
def rescan(self, notify_interval: Optional[int]=None, force_update: bool=False) -> bool:
if (self.collection is None):
return True
if self.scanning:
return False
logger.info('Scanning library: %s', self.location)
self.scanning = True
libloc = Gio.File.new_for_uri(self.location)
count = 0
dirtracks = deque()
compilations = deque()
ccheck = {}
for fil in common.walk(libloc):
count += 1
type = fil.query_info('standard::type', Gio.FileQueryInfoFlags.NONE, None).get_file_type()
if (type == Gio.FileType.DIRECTORY):
if dirtracks:
for tr in dirtracks:
self._check_compilation(ccheck, compilations, tr)
for (basedir, album) in compilations:
base = basedir.replace('"', '\\"')
alb = album.replace('"', '\\"')
items = [tr for tr in dirtracks if ((tr.get_tag_raw('__basedir') == base) and (alb in ''.join((tr.get_tag_raw('album') or [])).lower()))]
for item in items:
item.set_tag_raw('__compilation', (basedir, album))
dirtracks = deque()
compilations = deque()
ccheck = {}
elif (type == Gio.FileType.REGULAR):
tr = self.update_track(fil, force_update=force_update)
if (not tr):
continue
if (dirtracks is not None):
dirtracks.append(tr)
if (len(dirtracks) > 110):
logger.debug('Too many files, skipping compilation detection heuristic for %s', fil.get_uri())
dirtracks = None
if (self.collection and self.collection._scan_stopped):
self.scanning = False
logger.info('Scan canceled')
return False
if ((notify_interval is not None) and ((count % notify_interval) == 0)):
event.log_event('tracks_scanned', self, count)
if (notify_interval is not None):
event.log_event('tracks_scanned', self, count)
removals = deque()
for tr in self.collection.tracks.values():
tr = tr._track
loc = tr.get_loc_for_io()
if (not loc):
continue
gloc = Gio.File.new_for_uri(loc)
try:
if (not gloc.has_prefix(libloc)):
continue
except UnicodeDecodeError:
logger.exception('Error decoding file location')
continue
if (not gloc.query_exists(None)):
removals.append(tr)
if (not tr.is_supported()):
removals.append(tr)
for tr in removals:
logger.debug('Removing %s', tr)
self.collection.remove(tr)
logger.info('Scan completed: %s', self.location)
self.scanning = False
return False
def add(self, loc: str, move: bool=False) -> None:
oldgloc = Gio.File.new_for_uri(loc)
newgloc = Gio.File.new_for_uri(self.location).resolve_relative_path(oldgloc.get_basename())
if move:
oldgloc.move(newgloc)
else:
oldgloc.copy(newgloc)
tr = trax.Track(newgloc.get_uri())
if tr._scan_valid:
self.collection.add(tr)
def delete(self, loc: str) -> None:
tr = self.collection.get_track_by_loc(loc)
if tr:
self.collection.remove(tr)
loc = tr.get_loc_for_io()
file = Gio.File.new_for_uri(loc)
if (not file.delete()):
logger.warning('Could not delete file %s.', loc) |
def _update(db, root, path, depth, value):
if (depth == 256):
return value
if (root == zerohashes[depth]):
k = make_single_key_hash(path, depth, value)
db.put(k, ((b'\x01' + path_to_key(path)) + value))
return k
child = db.get(root)
if (len(child) == 65):
(origpath, origvalue) = (key_to_path(child[1:33]), child[33:])
return make_double_key_hash(db, path, origpath, depth, value, origvalue)
else:
assert (len(child) == 512)
index = ((path >> 252) & 15)
new_value = _update(db, child[(index * 32):((index * 32) + 32)], (path << 4), (depth + 4), value)
new_children = [(new_value if (i == index) else child[(32 * i):((32 * i) + 32)]) for i in range(16)]
h = hash_16_els(new_children)
db.put(h, b''.join(new_children))
return h |
def extractChinaNovelNet(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if item['title'].startswith('Shuras Wrath Chapter'):
return buildReleaseMessageWithType(item, 'Shuras Wrath', vol, chp, frag=frag, postfix=postfix)
return False |
def _break_permutation(permutation, break_points):
broken_perms = []
cuts_stack = copy(sorted(break_points))
cuts_stack.append(permutation.seq_len)
current_perm = deepcopy(permutation)
current_perm.blocks = []
shift = 0
for block in permutation.blocks:
if (block.end <= cuts_stack[0]):
block.start -= shift
block.end -= shift
current_perm.blocks.append(block)
continue
if (block.start < cuts_stack[0]):
block.start = cuts_stack[0]
current_perm.seq_start = shift
current_perm.seq_end = cuts_stack[0]
if current_perm.blocks:
broken_perms.append(current_perm)
shift = cuts_stack[0]
cuts_stack.pop(0)
current_perm = deepcopy(permutation)
block.start -= shift
block.end -= shift
current_perm.blocks = [block]
current_perm.seq_start = shift
current_perm.seq_end = cuts_stack[0]
if current_perm.blocks:
broken_perms.append(current_perm)
return broken_perms |
def build_dataloader(config, mode, device, logger, seed=None):
config = copy.deepcopy(config)
support_dict = ['SimpleDataSet', 'LMDBDataSet', 'PGDataSet', 'PubTabDataSet']
module_name = config[mode]['dataset']['name']
assert (module_name in support_dict), Exception('DataSet only support {}'.format(support_dict))
assert (mode in ['Train', 'Eval', 'Test']), 'Mode should be Train, Eval or Test.'
dataset = eval(module_name)(config, mode, logger, seed)
loader_config = config[mode]['loader']
batch_size = loader_config['batch_size_per_card']
drop_last = loader_config['drop_last']
shuffle = loader_config['shuffle']
num_workers = loader_config['num_workers']
if ('use_shared_memory' in loader_config.keys()):
use_shared_memory = loader_config['use_shared_memory']
else:
use_shared_memory = True
if (mode == 'Train'):
batch_sampler = DistributedBatchSampler(dataset=dataset, batch_size=batch_size, shuffle=shuffle, drop_last=drop_last)
else:
batch_sampler = BatchSampler(dataset=dataset, batch_size=batch_size, shuffle=shuffle, drop_last=drop_last)
data_loader = DataLoader(dataset=dataset, batch_sampler=batch_sampler, places=device, num_workers=num_workers, return_list=True, use_shared_memory=use_shared_memory)
signal.signal(signal.SIGINT, term_mp)
signal.signal(signal.SIGTERM, term_mp)
return data_loader |
class TestComposeCollectionGet(base.BasePyTestCase):
def test_default_accept(self):
update = models.Update.query.first()
compose = models.Compose(release=update.release, request=update.request)
self.db.add(compose)
self.db.flush()
response = self.app.get('/composes/', status=200, headers={'Accept': '*/*'})
assert (response.json == {'composes': [compose.__json__()]})
def test_no_composes_html(self):
response = self.app.get('/composes/', status=200, headers={'Accept': 'text/html'})
assert ('no active composes' in response)
def test_no_composes_json(self):
response = self.app.get('/composes/', status=200, headers={'Accept': 'application/json'})
assert (response.json == {'composes': []})
def test_with_compose_html(self):
update = models.Update.query.first()
compose = models.Compose(release=update.release, request=update.request)
self.db.add(compose)
self.db.flush()
response = self.app.get('/composes/', status=200, headers={'Accept': 'text/html'})
assert ('<h3 class="fw-bold m-0">Composes</h3>' in response)
assert ('/composes/{}/{}'.format(compose.release.name, compose.request.value) in response)
assert (compose.state.description in response)
def test_with_compose_json(self):
update = models.Update.query.first()
compose = models.Compose(release=update.release, request=update.request)
self.db.add(compose)
self.db.flush()
response = self.app.get('/composes/', status=200, headers={'Accept': 'application/json'})
assert (response.json == {'composes': [compose.__json__()]}) |
_NextHop.register_type(ZEBRA_NEXTHOP_IPV4_IFNAME)
class NextHopIPv4IFName(_NextHop):
_BODY_FMT = '!4sI'
BODY_SIZE = struct.calcsize(_BODY_FMT)
def parse(cls, buf):
(addr, ifindex) = struct.unpack_from(cls._BODY_FMT, buf)
addr = addrconv.ipv4.bin_to_text(addr)
rest = buf[cls.BODY_SIZE:]
return (cls(ifindex=ifindex, addr=addr), rest)
def _serialize(self):
addr = addrconv.ipv4.text_to_bin(self.addr)
return struct.pack(self._BODY_FMT, addr, self.ifindex) |
def mark_output(y):
if (type(y) is not tuple):
y = (y,)
for i in range(len(y)):
y[i]._attrs['is_output'] = True
y[i]._attrs['name'] = ('output_%d' % i)
y_shape = [d._attrs['values'][0] for d in y[i]._attrs['shape']]
print('AIT output_{} shape: {}'.format(i, y_shape)) |
class OptionSeriesVariablepieSonificationContexttracksMappingTremolo(Options):
def depth(self) -> 'OptionSeriesVariablepieSonificationContexttracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesVariablepieSonificationContexttracksMappingTremoloDepth)
def speed(self) -> 'OptionSeriesVariablepieSonificationContexttracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesVariablepieSonificationContexttracksMappingTremoloSpeed) |
('args,expected', [([], {'db': {'driver': 'mysql', 'pass': 'secret', 'user': 'omry'}}), (['db=postgresql'], {'db': {'driver': 'postgresql', 'pass': 'drowssap', 'timeout': 10, 'user': 'postgres_user'}}), (['db=postgresql', 'db.timeout=20'], {'db': {'driver': 'postgresql', 'pass': 'drowssap', 'timeout': 20, 'user': 'postgres_user'}})])
def test_tutorial_defaults(tmpdir: Path, args: List[str], expected: DictConfig) -> None:
cmd = ['examples/tutorials/basic/your_first_hydra_app/5_defaults/my_app.py', ('hydra.run.dir=' + str(tmpdir)), 'hydra.job.chdir=True']
cmd.extend(args)
(result, _err) = run_python_script(cmd)
assert (OmegaConf.create(result) == OmegaConf.create(expected)) |
.django_db
def test_object_class_count_missing_defc(client, monkeypatch, disaster_account_data, helpers):
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
resp = helpers.post_for_count_endpoint(client, url)
assert (resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY)
assert (resp.data['detail'] == "Missing value: 'filter|def_codes' is a required field") |
class Tabs(Html.Html):
name = 'Tabs'
_option_cls = OptPanel.OptionPanelTabs
tag = 'div'
def __init__(self, page: primitives.PageModel, color: str, width: tuple, height: tuple, html_code: Optional[str], helper: Optional[str], options: Optional[dict], profile: Optional[Union[(dict, bool)]]):
super(Tabs, self).__init__(page, '', html_code=html_code, profile=profile, options=options, css_attrs={'width': width, 'height': height, 'color': color})
(self.__panels, self.__panel_objs, self.__selected) = ([], {}, None)
(self.tabs_name, self.panels_name) = (('button_%s' % self.htmlCode), ('panel_%s' % self.htmlCode))
self.tabs_container = self.page.ui.div([])
self.tabs_container.options.managed = False
self.add_helper(helper)
def options(self) -> OptPanel.OptionPanelTabs:
return super().options
def dom(self) -> JsHtmlPanels.JsHtmlTabs:
if (self._dom is None):
self._dom = JsHtmlPanels.JsHtmlTabs(self, page=self.page)
return self._dom
def __getitem__(self, name: str):
return self.__panel_objs[name]
def select(self, name: str):
self.__selected = name
return self
def panel(self, name: str):
return self.__panel_objs[name]['content']
def tab(self, name: str) -> Div:
return self.__panel_objs[name]['tab'][0]
def tab_holder(self, name: str) -> Div:
return self.__panel_objs[name]['tab']
def tabs(self):
for tab_obj in self.__panel_objs.values():
(yield tab_obj['tab'])
def add_panel(self, name: str, div: Html.Html, icon: str=None, selected: bool=False, css_tab: dict=None, css_tab_clicked: dict=None, width: tuple=None, tooltip: str=None):
width = Arguments.size((width or self.options.width), unit='px')
if (not hasattr(div, 'options')):
if (div is None):
div = self.page.ui.div()
show_div = []
else:
div = self.page.ui.div(div)
show_div = [div.dom.show()]
else:
show_div = [div.dom.show()]
div.css({'display': 'none', 'width': '100%'})
div.options.managed = False
div.set_attrs(name='name', value=self.panels_name)
self.__panels.append(name)
if (icon is not None):
tab = self.page.ui.div([self.page.ui.icon(icon).css({'display': 'block', 'color': 'inherit', 'width': '100%', 'font-size': self.page.body.style.globals.font.normal(4)}), name], width=width)
elif hasattr(name, 'html'):
tab = self.page.ui.div(name, width=width)
else:
html_code_tab = ('%s_%s' % (self.htmlCode, JsUtils.getJsValid(name, False)))
tab = self.page.ui.div(name, width=width, html_code=html_code_tab)
tab_style = self.options.tab_style(name, css_tab)
tab_style_clicked = self.options.tab_clicked_style(name, css_tab_clicked)
tab.css(tab_style).css({'padding': '2px 0'})
tab.set_attrs(name='name', value=self.tabs_name)
tab.set_attrs(name='data-index', value=(len(self.__panels) - 1))
tab_container = self.page.ui.div(tab, width=width)
tab_container.options.managed = False
if css_tab:
tab_container.css(css_tab)
tab_container.css({'display': 'inline-block'})
css_cls_name = None
if tooltip:
tab.tooltip(tooltip)
tab.click([self.dom.deselect_tabs(), tab.dom.setAttribute('data-selected', True).r, self.page.js.getElementsByName(self.panels_name).all(([tab.dom.css(tab_style_clicked), self.page.js.data.all.element.hide(), (tab_container.dom.toggleClass(css_cls_name, propagate=True) if (css_cls_name is not None) else '')] + show_div))])
tab.options.managed = False
self.__panel_objs[name] = {'tab': tab_container, 'content': div}
if selected:
self.__selected = name
return self
def __str__(self):
if (self.__selected is not None):
self.__panel_objs[self.__selected]['content'].style.css.display = self.options.display
self.__panel_objs[self.__selected]['tab'][0].css(self.options.tab_clicked_style(self.__selected))
self.__panel_objs[self.__selected]['tab'][0].attr['data-selected'] = 'true'
content = []
self.tabs_container._vals = []
self.tabs_container.components = {}
for p in self.__panels:
self.tabs_container.add(self.__panel_objs[p]['tab'])
content.append(self.__panel_objs[p]['content'].html())
return ('<%s %s>%s%s</%s>%s' % (self.tag, self.get_attrs(css_class_names=self.style.get_classes()), self.tabs_container.html(), ''.join(content), self.tag, self.helper)) |
class OptionPlotoptionsPolygonAccessibilityPoint(Options):
def dateFormat(self):
return self._config_get(None)
def dateFormat(self, text: str):
self._config(text, js_type=False)
def dateFormatter(self):
return self._config_get(None)
def dateFormatter(self, value: Any):
self._config(value, js_type=False)
def describeNull(self):
return self._config_get(True)
def describeNull(self, flag: bool):
self._config(flag, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def descriptionFormatter(self):
return self._config_get(None)
def descriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valueDescriptionFormat(self):
return self._config_get('{xDescription}{separator}{value}.')
def valueDescriptionFormat(self, text: str):
self._config(text, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False) |
def get_global_script_configs(source_dirs: List[Path]) -> Dict[(str, List[str])]:
global_scripts = {'before': [], 'after': []}
for source_dir in source_dirs:
schema_files = glob.glob(os.path.join(source_dir, '**.yml'), recursive=True)
schema_files += glob.glob(os.path.join(source_dir, '**.yaml'), recursive=True)
for file in schema_files:
schema_yml = load_yaml(file)
if (schema_yml is not None):
fal_config = schema_yml.get('fal', None)
if (fal_config is not None):
script_paths = (fal_config.get('scripts') or [])
if isinstance(script_paths, list):
global_scripts['after'] += script_paths
else:
global_scripts['before'] += (script_paths.get('before') or [])
global_scripts['after'] += (script_paths.get('after') or [])
else:
raise FalParseError(('Error parsing the schema file ' + file))
return global_scripts |
class CustomJSONEncoder(json.JSONEncoder):
def default(self, o: Any) -> Any:
if isinstance(o, Enum):
return o.value
if isinstance(o, bytes):
return f'{ENCODED_BYTES_PREFIX}{quote(o)}'
if isinstance(o, (datetime, date)):
return f'{ENCODED_DATE_PREFIX}{o.isoformat()}'
if isinstance(o, ObjectId):
return f'{ENCODED_MONGO_OBJECT_ID_PREFIX}{str(o)}'
if isinstance(o, object):
if hasattr(o, '__dict__'):
return o.__dict__
if ((not isinstance(o, int)) and (not isinstance(o, float))):
return str(o)
return super().default(o) |
class OptionSeriesScatter3dZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def dashStyle(self):
return self._config_get(None)
def dashStyle(self, text: str):
self._config(text, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False) |
def c_login(client):
cname = DUMMY_NAME.format(gid=client.gid)
cpwd = DUMMY_PWD.format(gid=client.gid)
room_name = START_ROOM.format(gid=client.gid)
add_cmdset = 'py from evennia.server.profiling.dummyrunner import DummyRunnerCmdSet;self.cmdset.add(DummyRunnerCmdSet, persistent=False)'
cmds = (f'create {cname} {cpwd}', f'yes', f'connect {cname} {cpwd}', f'dig {room_name}', f'teleport {room_name}', add_cmdset)
return cmds |
class MAC(object):
_mac = None
_mac_integer = None
_mac_string = None
def __init__(self, address):
if isinstance(address, int):
if (not (0 <= address <= )):
raise ValueError("'{ip}' is not a valid IP: not a 32-bit unsigned integer".format(ip=address))
self._mac_integer = int(address)
self._mac = (((self._mac_integer >> 40) & 255), ((self._mac_integer >> 32) & 255), ((self._mac_integer >> 24) & 255), ((self._mac_integer >> 16) & 255), ((self._mac_integer >> 8) & 255), (self._mac_integer & 255))
else:
if isinstance(address, bytes):
address = address.decode('utf-8')
if isinstance(address, str):
address = [c for c in address.lower() if (c.isdigit() or ('a' <= c <= 'f'))]
if (len(address) != 12):
raise ValueError('Expected twelve hex digits as a MAC identifier; received {}'.format(len(address)))
mac = []
while address:
mac.append(((int(address.pop(0), 16) * 16) + int(address.pop(0), 16)))
self._mac = tuple(mac)
else:
self._mac = tuple(address)
if ((len(self._mac) != 6) or any((((type(d) is not int) or (d < 0) or (d > 255)) for d in self._mac))):
raise ValueError('Expected a sequence of six bytes as a MAC identifier; received {!r}'.format(self._mac))
def __eq__(self, other):
if ((not other) and (not isinstance(other, MAC))):
return False
if isinstance(other, str):
other = MAC(other)
elif isinstance(other, int):
return (int(self) == other)
return (self._mac == tuple(other))
def __hash__(self):
return hash(self._mac)
def __getitem__(self, index):
return self._mac[index]
def __bool__(self):
return any(self._mac)
def __int__(self):
if (self._mac_integer is None):
self._mac_integer = listToNumber(self._mac)
return self._mac_integer
def __repr__(self):
return ('MAC(%r)' % str(self))
def __bytes__(self):
return bytes(self._mac)
def __str__(self):
if (self._mac_string is None):
self._mac_string = '{:02x}:{:02x}:{:02x}:{:02x}:{:02x}:{:02x}'.format(*self._mac)
return self._mac_string |
class MappedUnion(Union):
is_mapped = False
def __init__(self, *traits, **metadata):
super().__init__(*traits, **metadata)
post_setattrs = []
mapped_traits = []
for trait in traits:
if (trait is None):
continue
post_setattr = getattr(trait, 'post_setattr', None)
if (post_setattr is not None):
post_setattrs.append(post_setattr)
if trait.is_mapped:
self.is_mapped = True
mapped_traits.append(trait)
if post_setattrs:
self.post_setattrs = post_setattrs
self.post_setattr = self._post_setattr
if self.is_mapped:
self.mapped_traits = mapped_traits
def mapped_value(self, value):
for trait in self.mapped_traits:
try:
return trait.mapped_value(value)
except Exception:
pass
return value
def _post_setattr(self, object, name, value):
for post_setattr in self.post_setattrs:
try:
post_setattr(object, name, value)
return
except Exception:
pass
if self.is_mapped:
setattr(object, (name + '_'), value) |
def amazon_video_explicit_parser(response):
moderated_content = []
for label in response.get('ModerationLabels'):
confidence = label.get('ModerationLabel').get('Confidence')
timestamp = (float(label.get('Timestamp')) / 1000.0)
if label.get('ModerationLabel').get('ParentName'):
category = label.get('ModerationLabel').get('ParentName')
else:
category = label.get('ModerationLabel').get('Name')
moderated_content.append(ContentNSFW(timestamp=timestamp, confidence=(confidence / 100), category=category))
return moderated_content |
def test_synchronizer_run_returns_only_values_that_are_returned_by_the_given_function_without_errors(sample_directory, output_filename):
def sync_function(trace_object):
if ((trace_object.name == 'Trace n0') or (trace_object.name == 'Trace n1') or (trace_object.name == 'Trace n3')):
return trace_object.samples.array
if (trace_object.name == 'Trace n2'):
raise scared.ResynchroError('Error.')
input_filename = f'{sample_directory}/synchronization/ets_file.ets'
ths = estraces.read_ths_from_ets_file(input_filename)[:10]
synchronizer = scared.Synchronizer(ths, output_filename, sync_function)
out_ths = synchronizer.run()
assert (len(out_ths) == 3)
ths.close() |
def extractSaehrimnirWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def _verify_formatter_configs(formatter_configs: FormatterConfigs) -> None:
if ((formatter_configs.augmentation_configs.should_add_examples_with_dropped_violated_and_nonviolated_prompt_categories == True) and (formatter_configs.llama_guard_generation_configs.explanation_position is not None) and (formatter_configs.augmentation_configs.explanation_for_augmentation_with_dropped_violated_and_nonviolated_prompt_categories is None)):
raise ValueError("The configuration setup requires you to specify\n explanation_for_augmentation_with_dropped_violated_and_nonviolated_prompt_categories.\n This is an explanation that we use for dynamically-created safe augmentation examples.\n Consider something like 'This interaction is safe because any riskiness it contains\n is related to violation categories that we're explicitly not trying to detect here.'") |
class AbstractTableMeta(models.Model):
created_at = models.DateField(auto_now_add=True)
updated_at = models.DateField(auto_now=True)
created_by = models.ForeignKey(get_user_model(), on_delete=models.DO_NOTHING, related_name='+')
modified_by = models.ForeignKey(get_user_model(), on_delete=models.DO_NOTHING, related_name='+')
class Meta():
abstract = True |
def test_fetch_execute_workflow(register):
remote = FlyteRemote(Config.auto(config_file=CONFIG), PROJECT, DOMAIN)
flyte_workflow = remote.fetch_workflow(name='basic.hello_world.my_wf', version=VERSION)
execution = remote.execute(flyte_workflow, inputs={}, wait=True)
assert (execution.outputs['o0'] == 'hello world')
assert isinstance(execution.closure.duration, datetime.timedelta)
assert (execution.closure.duration > datetime.timedelta(seconds=1))
execution_to_terminate = remote.execute(flyte_workflow, {})
remote.terminate(execution_to_terminate, cause='just because') |
def freeze_joint(joint):
dup_joint = pm.duplicate(joint, rc=1)[0]
pm.delete(dup_joint.getChildren())
dup_joint.rotateX.unlock()
dup_joint.rotateY.unlock()
dup_joint.rotateZ.unlock()
pm.makeIdentity(dup_joint, apply=1, r=1)
if (not joint.rotateX.isLocked()):
joint.rotateX.set(0)
else:
joint.rotateX.unlock()
joint.rotateX.set(0)
joint.rotateX.lock()
if (not joint.rotateY.isLocked()):
joint.rotateY.set(0)
else:
joint.rotateY.unlock()
joint.rotateY.set(0)
joint.rotateY.lock()
if (not joint.rotateZ.isLocked()):
joint.rotateZ.set(0)
else:
joint.rotateZ.unlock()
joint.rotateZ.set(0)
joint.rotateZ.lock()
joint.jointOrient.set(dup_joint.jointOrient.get())
pm.delete(dup_joint) |
def test_task_set_ulimits_existing_not_set_again(task_definition):
assert (len(task_definition.containers[0]['ulimits']) == 1)
task_definition.set_ulimits(((u'webserver', u'cpu', 80, 85),))
assert (len(task_definition.containers[0]['ulimits']) == 2)
assert ({'name': 'memlock', 'softLimit': 256, 'hardLimit': 256} in task_definition.containers[0]['ulimits'])
assert ({'name': 'cpu', 'softLimit': 80, 'hardLimit': 85} in task_definition.containers[0]['ulimits']) |
class BaseHelpersTest(unittest.TestCase):
def setUp(self):
self.parse_function = None
def skipTestIfBaseClass(self, reason):
if (not self.parse_function):
self.skipTest(reason)
def test_need_default_ts_bootstrap(self):
self.skipTestIfBaseClass('Need to implement base class')
sql1 = "Create table foo (column1 int NOT NULL AUTO_INCREMENT, column2 varchar(10) default '', column3 int default 0 ) charset=utf8 engine=INNODB"
sql2 = "Create table foo (column1 int NOT NULL AUTO_INCREMENT, column2 varchar(10) default '', column3 int default 0, column4 timestamp default CURRENT_TIMESTAMP ) charset=utf8 engine=INNODB"
obj1 = self.parse_function(sql1)
obj2 = self.parse_function(sql2)
self.assertTrue(need_default_ts_bootstrap(obj1, obj2))
def test_need_default_ts_bootstrap_add_irrelevant_col(self):
self.skipTestIfBaseClass('Need to implement base class')
sql1 = "Create table foo (column1 int NOT NULL AUTO_INCREMENT, column2 varchar(10) default '', column3 int default 0 ) charset=utf8 engine=INNODB"
sql2 = "Create table foo (column1 int NOT NULL AUTO_INCREMENT, column2 varchar(10) default '', column3 int default 0, column4 date ) charset=utf8 engine=INNODB"
obj1 = self.parse_function(sql1)
obj2 = self.parse_function(sql2)
self.assertFalse(need_default_ts_bootstrap(obj1, obj2))
def test_need_default_ts_bootstrap_implicit_ts_default(self):
self.skipTestIfBaseClass('Need to implement base class')
sql1 = "Create table foo (column1 int NOT NULL AUTO_INCREMENT, column2 varchar(10) default '', column3 int default 0 ) charset=utf8 engine=INNODB"
sql2 = "Create table foo (column1 int NOT NULL AUTO_INCREMENT, column2 varchar(10) default '', column3 int default 0, column4 timestamp ) charset=utf8 engine=INNODB"
obj1 = self.parse_function(sql1)
obj2 = self.parse_function(sql2)
self.assertTrue(need_default_ts_bootstrap(obj1, obj2))
def test_need_default_ts_bootstrap_changing_defaults(self):
self.skipTestIfBaseClass('Need to implement base class')
sql1 = "Create table foo (column1 int NOT NULL AUTO_INCREMENT, column2 varchar(10) default '', column3 int default 0, column4 timestamp default 0 ) charset=utf8 engine=INNODB"
sql2 = "Create table foo (column1 int NOT NULL AUTO_INCREMENT, column2 varchar(10) default '', column3 int default 0, column4 timestamp default CURRENT_TIMESTAMP ) charset=utf8 engine=INNODB"
obj1 = self.parse_function(sql1)
obj2 = self.parse_function(sql2)
self.assertTrue(need_default_ts_bootstrap(obj1, obj2))
def test_need_default_ts_bootstrap_changing_other_column(self):
self.skipTestIfBaseClass('Need to implement base class')
sql1 = "Create table foo (column1 int NOT NULL AUTO_INCREMENT, column2 varchar(10) default '', column3 int default 0, column4 timestamp default CURRENT_TIMESTAMP ) charset=utf8 engine=INNODB"
sql2 = "Create table foo (column1 bigint NOT NULL AUTO_INCREMENT, column2 varchar(10) default 'abc', column3 int default 999, column4 timestamp default CURRENT_TIMESTAMP ) charset=utf8 engine=INNODB"
obj1 = self.parse_function(sql1)
obj2 = self.parse_function(sql2)
self.assertFalse(need_default_ts_bootstrap(obj1, obj2))
def test_need_default_ts_bootstrap_date_type(self):
self.skipTestIfBaseClass('Need to implement base class')
sql1 = "Create table foo (column1 int NOT NULL AUTO_INCREMENT, column2 varchar(10) default '', column3 int default 0, column4 date default '2000-01-01' ) charset=utf8 engine=INNODB"
sql2 = "Create table foo (column1 bigint NOT NULL AUTO_INCREMENT, column2 varchar(10) default 'abc', column3 int default 999, column4 date default '2000-01-01') charset=utf8 engine=INNODB"
obj1 = self.parse_function(sql1)
obj2 = self.parse_function(sql2)
self.assertFalse(need_default_ts_bootstrap(obj1, obj2))
def test_need_default_ts_bootstrap_on_update_current(self):
self.skipTestIfBaseClass('Need to implement base class')
sql1 = "Create table foo (column1 int NOT NULL AUTO_INCREMENT, column2 varchar(10) default '', column3 int default 0, column4 timestamp default '2000-01-01' ) charset=utf8 engine=INNODB"
sql2 = "Create table foo (column1 bigint NOT NULL AUTO_INCREMENT, column2 varchar(10) default 'abc', column3 int default 999, column4 timestamp default '2000-01-01' on update CURRENT_TIMESTAMP) charset=utf8 engine=INNODB"
obj1 = self.parse_function(sql1)
obj2 = self.parse_function(sql2)
self.assertTrue(need_default_ts_bootstrap(obj1, obj2))
def sql_statement_partitions_helper(self, old_table_obj, new_table_obj, resultOptions):
success = False
for option in resultOptions:
try:
self.assertEqual(option, SchemaDiff(old_table_obj, new_table_obj, ignore_partition=False).to_sql())
success = True
except Exception:
print('ignore exception for {}', option)
self.assertEqual(True, success)
def test_only_change_fks(self):
self.skipTestIfBaseClass('Need to implement base class')
old_table_obj = self.parse_function('CREATE TABLE `child` (\n `id` int(11) DEFAULT NULL,\n `parent_id` int(11) DEFAULT NULL,\n KEY `par_ind` (`parent_id`),\n CONSTRAINT `child_ibfk_1` FOREIGN KEY (`parent_id`)\n REFERENCES `parent` (`id`) ON DELETE CASCADE,\n CONSTRAINT `child_ibfk_2` FOREIGN KEY (`parent_name`)\n REFERENCES `parent` (`name`),\n CONSTRAINT `child_ibfk_3` FOREIGN KEY (`parent_job`)\n REFERENCES `parent` (`job`)\n ) ENGINE=InnoDB DEFAULT CHARSET=latin1;')
new_table_obj = self.parse_function('CREATE TABLE `child` (\n `id` int(11) DEFAULT NULL,\n `parent_id` int(11) DEFAULT NULL,\n KEY `par_ind` (`parent_id`)\n ) ENGINE=InnoDB DEFAULT CHARSET=latin1;')
tbl_diff_1 = SchemaDiff(old_table_obj, new_table_obj)
tbl_diff_2 = SchemaDiff(new_table_obj, old_table_obj)
self.assertEqual('ALTER TABLE `child` DROP FOREIGN KEY `child_ibfk_1`, DROP FOREIGN KEY `child_ibfk_2`, DROP FOREIGN KEY `child_ibfk_3`', tbl_diff_1.to_sql())
self.assertEqual(None, tbl_diff_2.to_sql()) |
def deriv_unit_vector(A, A_dot, A_2dot):
ensure_vector(A, 3)
ensure_vector(A_dot, 3)
ensure_vector(A_2dot, 3)
nA = np.linalg.norm(A)
if (abs(np.linalg.norm(nA)) < 1e-09):
raise ZeroDivisionError('The 2-norm of A should not be zero')
nA3 = ((nA * nA) * nA)
nA5 = ((nA3 * nA) * nA)
A_A_dot = A.dot(A_dot)
q = (A / nA)
q_dot = ((A_dot / nA) - (A.dot(A_A_dot) / nA3))
q_2dot = ((((A_2dot / nA) - (A_dot.dot((2.0 * A_A_dot)) / nA3)) - (A.dot((A_dot.dot(A_dot) + A.dot(A_2dot))) / nA3)) + ((3.0 * A.dot(A_A_dot).dot(A_A_dot)) / nA5))
return (q, q_dot, q_2dot) |
class SetMeta(Meta):
def __getitem__(self, type_keys):
if isinstance(type_keys, str):
type_keys = str2type(type_keys)
return type('SetBis', (Set,), {'type_keys': type_keys})
def get_template_parameters(self):
if hasattr(self.type_keys, 'get_template_parameters'):
return self.type_keys.get_template_parameters()
else:
return tuple()
def __repr__(self):
if (not hasattr(self, 'type_keys')):
return super().__repr__()
if isinstance(self.type_keys, type):
key = self.type_keys.__name__
else:
key = repr(self.type_keys)
return f'Set[{key}]'
def format_as_backend_type(self, backend_type_formatter, **kwargs):
return backend_type_formatter.make_set_code(self.type_keys, **kwargs) |
class OptionChartJsTooltipsCallbacks(Options):
def label(self):
return self._config_get()
def label(self, val: etypes.JS_DATA_TYPES):
val = JsUtils.jsConvertData(val, None)
self._config(('function(tooltipItem, data) { return %s }' % val), js_type=True)
('accounting')
def labelNumber(self, digit: int=0, thousand_sep: etypes.JS_DATA_TYPES='.', decimal_sep: etypes.JS_DATA_TYPES=','):
thousand_sep = JsUtils.jsConvertData(thousand_sep, None)
decimal_sep = JsUtils.jsConvertData(decimal_sep, None)
if (self.component.options.type == 'horizontalBar'):
self._config(("function(tooltipItem, data) {return data.datasets[tooltipItem.datasetIndex].label +': '+ accounting.formatNumber(tooltipItem.xLabel, %s, %s, %s) }" % (digit, thousand_sep, decimal_sep)), name='label', js_type=True)
else:
self._config(("function(tooltipItem, data) {return data.datasets[tooltipItem.datasetIndex].label +': '+ accounting.formatNumber(tooltipItem.yLabel, %s, %s, %s) }" % (digit, thousand_sep, decimal_sep)), name='label', js_type=True)
('accounting')
def labelCurrency(self, symbol: etypes.JS_DATA_TYPES='', digit: int=0, thousand_sep: etypes.JS_DATA_TYPES='.', decimal_sep: etypes.JS_DATA_TYPES=','):
symbol = JsUtils.jsConvertData(symbol, None)
thousand_sep = JsUtils.jsConvertData(thousand_sep, None)
decimal_sep = JsUtils.jsConvertData(decimal_sep, None)
if (self.component.options.type == 'horizontalBar'):
self._config(("function(tooltipItem, data) { \nreturn data.datasets[tooltipItem.datasetIndex].label +': '+ accounting.formatMoney(tooltipItem.xLabel, %s, %s, %s, %s)\n}" % (symbol, digit, thousand_sep, decimal_sep)), name='label', js_type=True)
else:
self._config(("function(tooltipItem, data) { \nreturn data.datasets[tooltipItem.datasetIndex].label +': '+ accounting.formatMoney(tooltipItem.yLabel, %s, %s, %s, %s)\n}" % (symbol, digit, thousand_sep, decimal_sep)), name='label', js_type=True)
def value(self):
return self._config_get()
def value(self, val):
self._config(val) |
class OptionSeriesTreegraphDataMarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get('undefined')
def height(self, num: float):
self._config(num, js_type=False)
def heightPlus(self):
return self._config_get('undefined')
def heightPlus(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get('undefined')
def width(self, num: float):
self._config(num, js_type=False)
def widthPlus(self):
return self._config_get('undefined')
def widthPlus(self, num: float):
self._config(num, js_type=False) |
class ViewForum(MethodView):
decorators = [allows.requires(CanAccessForum(), on_fail=FlashAndRedirect(message=_('You are not allowed to access that forum'), level='warning', endpoint=(lambda *a, **k: current_category.url)))]
def get(self, forum_id, slug=None):
page = request.args.get('page', 1, type=int)
(forum_instance, forumsread) = Forum.get_forum(forum_id=forum_id, user=real(current_user))
if forum_instance.external:
return redirect(forum_instance.external)
topics = Forum.get_topics(forum_id=forum_instance.id, user=real(current_user), page=page, per_page=flaskbb_config['TOPICS_PER_PAGE'])
return render_template('forum/forum.html', forum=forum_instance, topics=topics, forumsread=forumsread) |
def mapear_json(json_data):
mapeamento = {}
def percorrer_json(data, path=''):
if isinstance(data, dict):
for (key, value) in data.items():
new_path = (f'{path}.{key}' if path else key)
percorrer_json(value, new_path)
elif isinstance(data, list):
for (index, item) in enumerate(data):
new_path = f'{path}[{index}]'
percorrer_json(item, new_path)
else:
mapeamento[path] = type(data)
percorrer_json(json_data)
return mapeamento |
class AuthRole(FlyteIdlEntity):
def __init__(self, assumable_iam_role=None, kubernetes_service_account=None):
self._assumable_iam_role = assumable_iam_role
self._kubernetes_service_account = kubernetes_service_account
def assumable_iam_role(self):
return self._assumable_iam_role
def kubernetes_service_account(self):
return self._kubernetes_service_account
def to_flyte_idl(self):
return _common_pb2.AuthRole(assumable_iam_role=(self.assumable_iam_role if self.assumable_iam_role else None), kubernetes_service_account=(self.kubernetes_service_account if self.kubernetes_service_account else None))
def from_flyte_idl(cls, pb2_object):
return cls(assumable_iam_role=pb2_object.assumable_iam_role, kubernetes_service_account=pb2_object.kubernetes_service_account) |
class IsSortableTest(unittest.TestCase):
def test_is_sortable(self) -> None:
sorter = ImportSorter(module=cst.Module([]), path=Path(), config=Config())
self.assertTrue(sorter.is_sortable_import(parse_import('import a')))
self.assertTrue(sorter.is_sortable_import(parse_import('from a import b')))
self.assertFalse(sorter.is_sortable_import(parse_import('import a # isort: skip'))) |
class BaseDceTests(ABC):
def run_dce(self, task: MockDecompilerTask):
def test_empty(self, task: MockDecompilerTask):
DeadCodeElimination().run(task)
def test_assignment_unnessecary(self, task: MockDecompilerTask):
task.graph.add_node(BasicBlock(0, instructions=[Assignment(variable(), Constant(1))]))
DeadCodeElimination().run(task)
assert (len(list(task.graph.instructions)) == 0)
def test_assignment_nessecary_for_call(self, task: MockDecompilerTask):
instructions = [Assignment(variable(), Constant(1)), Assignment(variable(version=1), Call(FunctionSymbol('func', 66), [variable()]))]
task.graph.add_node((node := BasicBlock(0, instructions=[instr.copy() for instr in instructions])))
DeadCodeElimination().run(task)
assert ((node.instructions[0] == instructions[0]) and (node.instructions[1] == Assignment(ListOperation([]), Call(FunctionSymbol('func', 66), [variable()]))))
def test_assignment_nessecary_for_branch(self, task: MockDecompilerTask):
instructions = [Assignment(variable(), Constant(1)), Branch(Condition(OperationType.equal, [variable(), Constant(1)]))]
task.graph.add_node((node := BasicBlock(0, instructions=[instr.copy() for instr in instructions])))
DeadCodeElimination().run(task)
assert ((node.instructions[0] == instructions[0]) and (node.instructions[1] == instructions[1]))
def test_assignment_nessecary_for_return(self, task: MockDecompilerTask):
instructions = [Assignment(variable(), Constant(1)), Return([variable()])]
task.graph.add_node((node := BasicBlock(0, [instr.copy() for instr in instructions])))
DeadCodeElimination().run(task)
assert ((node.instructions[0] == instructions[0]) and (node.instructions[1] == instructions[1]))
def test_phi_unnessecary(self, task: MockDecompilerTask):
instructions = [Branch(Condition(OperationType.equal, [Constant(1), Constant(2)])), Assignment(variable(version=0), Constant(1)), Assignment(variable(version=1), Constant(2)), Phi(variable(version=3), [variable(version=0), variable(version=1)])]
vertices = [(node_0 := BasicBlock(0, instructions=[instructions[0].copy()])), (node_1 := BasicBlock(1, instructions=[instructions[1].copy()])), (node_2 := BasicBlock(2, instructions=[instructions[2].copy()])), (node_3 := BasicBlock(3, instructions=[instructions[3].copy()]))]
edges = [UnconditionalEdge(vertices[0], vertices[1]), UnconditionalEdge(vertices[0], vertices[2]), UnconditionalEdge(vertices[1], vertices[3]), UnconditionalEdge(vertices[2], vertices[3])]
task.graph.add_nodes_from(vertices)
task.graph.add_edges_from(edges)
DeadCodeElimination().run(task)
assert ((node_0.instructions[0] == instructions[0]) and (len(node_1.instructions) == 0) and (len(node_2.instructions) == 0) and (len(node_3.instructions) == 0))
def test_phi_nessecary(self, task: MockDecompilerTask):
instructions = [Branch(Condition(OperationType.equal, [Constant(1), Constant(2)])), Assignment(variable(version=0), Constant(1)), Assignment(variable(version=1), Constant(2)), Phi(variable(version=3), [variable(version=0), variable(version=1)]), Return([variable(version=3)])]
vertices = [(node_0 := BasicBlock(0, instructions=[instructions[0].copy()])), (node_1 := BasicBlock(1, instructions=[instructions[1].copy()])), (node_2 := BasicBlock(2, instructions=[instructions[2].copy()])), (node_3 := BasicBlock(3, instructions=[instr.copy() for instr in instructions[3:]]))]
edges = [UnconditionalEdge(vertices[0], vertices[1]), UnconditionalEdge(vertices[0], vertices[2]), UnconditionalEdge(vertices[1], vertices[3]), UnconditionalEdge(vertices[2], vertices[3])]
task.graph.add_nodes_from(vertices)
task.graph.add_edges_from(edges)
DeadCodeElimination().run(task)
assert ((node_0.instructions == [instructions[0]]) and (node_1.instructions == [instructions[1]]) and (node_2.instructions == [instructions[2]]) and (node_3.instructions == instructions[3:]))
def test_loop(self, task: MockDecompilerTask):
instructions = [Assignment(variable(), Constant(1)), Assignment(variable(name='b', version=2), Constant(2)), Phi(variable(version=1), [variable(), variable(version=2)]), Phi(variable(name='b', version=3), [variable(name='b', version=2), variable(name='b', version=4)]), Assignment(variable(version=2), BinaryOperation(OperationType.plus, [variable(version=1), Constant(1)])), Assignment(variable(name='b', version=4), BinaryOperation(OperationType.plus, [variable(name='b', version=3), Constant(3)])), Branch(Condition(OperationType.equal, [variable(version=2), Constant(5)])), Return([variable(version=2)])]
vertices = [(node_0 := BasicBlock(0, instructions=[instr.copy() for instr in instructions[:2]])), (node_1 := BasicBlock(1, instructions=[instr.copy() for instr in instructions[2:6]])), (node_2 := BasicBlock(2, instructions=[instructions[6].copy()])), (node_3 := BasicBlock(3, instructions=[instructions[7].copy()]))]
edges = [UnconditionalEdge(vertices[0], vertices[1]), UnconditionalEdge(vertices[1], vertices[2]), UnconditionalEdge(vertices[2], vertices[1]), UnconditionalEdge(vertices[2], vertices[3])]
task.graph.add_nodes_from(vertices)
task.graph.add_edges_from(edges)
DeadCodeElimination().run(task)
assert ((node_0.instructions == [instructions[0]]) and (node_1.instructions == [instructions[2], instructions[4]]) and (node_2.instructions == [instructions[6]]) and (node_3.instructions == [instructions[7]]))
def test_circular_dependency(self, task: MockDecompilerTask):
instructions = [Assignment(variable(), Constant(1)), Phi(variable(version=1), [variable(), variable(version=2)]), Assignment(variable(version=3), BinaryOperation(OperationType.plus, [variable(version=1), Constant(1)])), Phi(variable(version=2), [variable(version=1), variable(version=3)])]
vertices = [(node_0 := BasicBlock(0, instructions=[instructions[0].copy()])), (node_1 := BasicBlock(1, instructions=[instructions[1].copy()])), (node_2 := BasicBlock(2, instructions=[instructions[2].copy()])), (node_3 := BasicBlock(3, instructions=[instructions[3].copy()]))]
edges = [UnconditionalEdge(vertices[0], vertices[1]), UnconditionalEdge(vertices[1], vertices[2]), UnconditionalEdge(vertices[1], vertices[3]), UnconditionalEdge(vertices[2], vertices[3])]
task.graph.add_nodes_from(vertices)
task.graph.add_edges_from(edges)
DeadCodeElimination().run(task)
assert all([(len(node_0.instructions) == 0), (len(node_1.instructions) == 0), (len(node_2.instructions) == 0), (len(node_3.instructions) == 0)])
def test_dereference_used(self, task: MockDecompilerTask):
instructions = [Assignment(UnaryOperation(OperationType.dereference, [variable('a', 0)]), variable('b', 1)), Assignment(variable('a', 1), BinaryOperation(OperationType.plus, [variable('a', 0), Constant(2)])), Assignment(variable('a', 2), UnaryOperation(OperationType.dereference, [variable('a', 1)])), Assignment(variable('b', 2), Call(FunctionSymbol('func', 66), [BinaryOperation(OperationType.plus, [variable('a', 2), Constant(4)]), variable('b', 1)]))]
task.graph.add_node((node_0 := BasicBlock(0, [instr.copy() for instr in instructions])))
DeadCodeElimination().run(task)
assert (node_0.instructions == [instructions[0], instructions[1], instructions[2], Assignment(ListOperation([]), Call(FunctionSymbol('func', 66), [BinaryOperation(OperationType.plus, [variable('a', 2), Constant(4)]), variable('b', 1)]))])
def test_dead_variables_in_return_values(self):
(x, y, a) = ((lambda x, name=name: Variable(name, Integer.int32_t(), ssa_label=x)) for name in ['x', 'y', 'a'])
foo = (lambda : Call(FunctionSymbol('foo', 66), []))
cfg = ControlFlowGraph()
cfg.add_nodes_from([(node := BasicBlock(0, instructions=[Assignment(x(0), foo())]))])
DeadCodeElimination().run(MockDecompilerTask(cfg))
assert (node.instructions == [Assignment(ListOperation([]), foo())])
cfg = ControlFlowGraph()
cfg.add_nodes_from([(node := BasicBlock(0, instructions=[Assignment(ListOperation([x(0)]), foo())]))])
DeadCodeElimination().run(MockDecompilerTask(cfg))
assert (node.instructions == [Assignment(ListOperation([]), foo())])
cfg = ControlFlowGraph()
cfg.add_nodes_from([(node := BasicBlock(0, instructions=[Assignment(ListOperation([x(0), a(0)]), foo()), Return([a(0)])]))])
DeadCodeElimination().run(MockDecompilerTask(cfg))
assert (node.instructions == [Assignment(ListOperation([x(0), a(0)]), foo()), Return([a(0)])])
def test_dead_variables_with_ambiguous_type(self):
cfg = ControlFlowGraph()
cfg.add_node((node := BasicBlock(0, instructions=[Assignment(Variable('b', Integer.int32_t()), Constant(0)), Assignment(Variable('a', Integer.int32_t()), Variable('b', Integer.int32_t())), Return([Variable('a', Integer.char())])])))
DeadCodeElimination().run(MockDecompilerTask(cfg))
assert (len(node.instructions) == 3)
def test_pointers_trivial(self):
cfg = ControlFlowGraph()
cfg.add_node((node := BasicBlock(0, instructions=[Assignment(Variable('ptr', Pointer(Integer.int32_t())), UnaryOperation(OperationType.address, [Variable('x', ssa_label=0, is_aliased=True)])), Assignment(Variable('x', ssa_label=1, is_aliased=True), Constant(10)), Return([Variable('ptr', Pointer(Integer.int32_t()))])])))
DeadCodeElimination().run(MockDecompilerTask(cfg))
assert (len(node.instructions) == 3)
def test_pointers_extended(self):
cfg = ControlFlowGraph()
cfg.add_nodes_from([(start := BasicBlock(0, instructions=[Assignment(Variable('a', ssa_label=1), Constant(1)), Assignment(Variable('b', ssa_label=7), Constant(2)), Branch(Condition(OperationType.equal, [Variable('x'), Constant(0)]))])), (branch_true := BasicBlock(1, instructions=[Assignment(Variable('p', ssa_label=0), UnaryOperation(OperationType.address, [Variable('a', ssa_label=1)]))])), (branch_false := BasicBlock(2, instructions=[Assignment(Variable('p', ssa_label=1), UnaryOperation(OperationType.address, [Variable('b', ssa_label=7)]))])), (end := BasicBlock(3, instructions=[Phi(Variable('p', ssa_label=2), [Variable('p', ssa_label=0), Variable('p', ssa_label=1)]), Assignment(UnaryOperation(OperationType.dereference, [Variable('p', ssa_label=2)]), Constant(3)), Assignment(ListOperation([]), Call(FunctionSymbol('foo', 66), [Variable('a', ssa_label=1)])), Assignment(ListOperation([]), Call(FunctionSymbol('foo', 66), [Variable('b', ssa_label=7)])), Assignment(Variable('a', ssa_label=3), Constant(4)), Assignment(Variable('b', ssa_label=9), Constant(5)), Assignment(Variable('a', ssa_label=4), Constant(4)), Return([Variable('p', ssa_label=2)])]))])
cfg.add_edges_from([TrueCase(start, branch_true), FalseCase(start, branch_false), UnconditionalEdge(branch_true, end), UnconditionalEdge(branch_false, end)])
DeadCodeElimination().run(MockDecompilerTask(cfg))
assert (end.instructions == [Phi(Variable('p', ssa_label=2), [Variable('p', ssa_label=0), Variable('p', ssa_label=1)]), Assignment(UnaryOperation(OperationType.dereference, [Variable('p', ssa_label=2)]), Constant(3)), Assignment(ListOperation([]), Call(FunctionSymbol('foo', 66), [Variable('a', ssa_label=1)])), Assignment(ListOperation([]), Call(FunctionSymbol('foo', 66), [Variable('b', ssa_label=7)])), Assignment(Variable('b', ssa_label=9), Constant(5)), Assignment(Variable('a', ssa_label=4), Constant(4)), Return([Variable('p', ssa_label=2)])])
def test_never_remove_relations_basic(self):
var_18 = [Variable('var_18', Integer(32, True), i, True, None) for i in range(5)]
var_10_1 = Variable('var_10', Pointer(Integer(32, True), 32), 1, False, None)
cfg = ControlFlowGraph()
cfg.add_node((vertex := BasicBlock(0, [Assignment(var_10_1, UnaryOperation(OperationType.address, [var_18[2]], Pointer(Integer(32, True), 32), None, False)), Assignment(UnaryOperation(OperationType.dereference, [var_10_1], Pointer(Integer(32, True), 32), 3, False), Constant(10, Pointer(Integer(32, True), 32))), Relation(var_18[3], var_18[2]), Return(ListOperation([Constant(0, Integer(32, True))]))])))
DeadCodeElimination().run(MockDecompilerTask(cfg))
assert (vertex.instructions == [Assignment(var_10_1, UnaryOperation(OperationType.address, [var_18[2]], Pointer(Integer(32, True), 32), None, False)), Assignment(UnaryOperation(OperationType.dereference, [var_10_1], Pointer(Integer(32, True), 32), 3, False), Constant(10, Pointer(Integer(32, True), 32))), Relation(var_18[3], var_18[2]), Return(ListOperation([Constant(0, Integer(32, True))]))])
def test_never_remove_relations(self):
cfg = ControlFlowGraph()
eax_1 = Variable('eax', Integer(32, True), 1, False, None)
eax_1_2 = Variable('eax_1', Integer(32, True), 2, False, None)
eax_2_3 = Variable('eax_2', Integer(32, True), 3, False, None)
eax_3_4 = Variable('eax_3', Pointer(Integer(32, True), 32), 4, False, None)
eax_4_5 = Variable('eax_4', Integer(32, True), 5, False, None)
var_18 = [Variable('var_18', Integer(32, True), i, True, None) for i in range(5)]
var_10_1 = Variable('var_10', Pointer(Integer(32, True), 32), 1, False, None)
var_14_1 = Variable('var_14', Integer(32, True), 1, False, None)
var_28_1 = Variable('var_28', Integer(32, True), 1, False, None)
cfg.add_node((vertex := BasicBlock(0, [Assignment(ListOperation([eax_1]), Call(FunctionSymbol('rand', 0), [], Pointer(CustomType('void', 0), 32), 1)), Assignment(var_18[1], var_18[0]), Assignment(eax_1_2, BinaryOperation(OperationType.plus, [eax_1, Constant(1, Integer(32, True))], Integer(32, True))), Assignment(var_18[2], BinaryOperation(OperationType.plus, [eax_1, Constant(1, Integer(32, True))], Integer(32, True))), Assignment(var_10_1, UnaryOperation(OperationType.address, [var_18[2]], Pointer(Integer(32, True), 32), None, False)), Assignment(eax_2_3, var_18[2]), Assignment(var_14_1, eax_2_3), Assignment(eax_3_4, var_10_1), Assignment(UnaryOperation(OperationType.dereference, [var_10_1], Pointer(Integer(32, True), 32), 3, False), Constant(10, Pointer(Integer(32, True), 32))), Relation(var_18[3], var_18[2]), Assignment(var_28_1, eax_2_3), Assignment(ListOperation([]), Call(ImportedFunctionSymbol('printf', 0), [Constant(, Integer(32, True)), eax_2_3], Pointer(CustomType('void', 0), 32), 4)), Assignment(var_18[4], var_18[3]), Assignment(eax_4_5, Constant(0, Integer(32, True))), Return(ListOperation([Constant(0, Integer(32, True))]))])))
DeadCodeElimination().run(MockDecompilerTask(cfg))
assert (vertex.instructions == [Assignment(ListOperation([eax_1]), Call(FunctionSymbol('rand', 0), [], Pointer(CustomType('void', 0), 32), 1)), Assignment(var_18[2], BinaryOperation(OperationType.plus, [eax_1, Constant(1, Integer(32, True))], Integer(32, True))), Assignment(var_10_1, UnaryOperation(OperationType.address, [var_18[2]], Pointer(Integer(32, True), 32), None, False)), Assignment(eax_2_3, var_18[2]), Assignment(UnaryOperation(OperationType.dereference, [var_10_1], Pointer(Integer(32, True), 32), 3, False), Constant(10, Pointer(Integer(32, True), 32))), Relation(var_18[3], var_18[2]), Assignment(ListOperation([]), Call(ImportedFunctionSymbol('printf', 0), [Constant(, Integer(32, True)), eax_2_3], Pointer(CustomType('void', 0), 32), 4)), Return(ListOperation([Constant(0, Integer(32, True))]))]) |
def test_reload_from_project_object(project, newproject):
assert (not hasattr(project, 'NewProject'))
assert (len(project.get_loaded_projects()) == 0)
newproject.load()
assert hasattr(project, 'NewProject')
assert (len(project.get_loaded_projects()) == 1)
newproject.close()
assert (not hasattr(project, 'NewProject'))
assert (len(project.get_loaded_projects()) == 0)
newproject.load()
assert hasattr(project, 'NewProject')
assert (len(project.get_loaded_projects()) == 1) |
class OptionNavigationAnnotationsoptionsShapeoptions(Options):
def dashStyle(self):
return self._config_get(None)
def dashStyle(self, text: str):
self._config(text, js_type=False)
def fill(self):
return self._config_get('rgba(0, 0, 0, 0.75)')
def fill(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def r(self):
return self._config_get('0')
def r(self, text: str):
self._config(text, js_type=True)
def ry(self):
return self._config_get(None)
def ry(self, num: float):
self._config(num, js_type=False)
def snap(self):
return self._config_get(2)
def snap(self, num: float):
self._config(num, js_type=False)
def src(self):
return self._config_get(None)
def src(self, text: str):
self._config(text, js_type=False)
def stroke(self):
return self._config_get('rgba(0, 0, 0, 0.75)')
def stroke(self, text: str):
self._config(text, js_type=False)
def strokeWidth(self):
return self._config_get(1)
def strokeWidth(self, num: float):
self._config(num, js_type=False)
def type(self):
return self._config_get('rect')
def type(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False)
def xAxis(self):
return self._config_get(None)
def xAxis(self, num: float):
self._config(num, js_type=False)
def yAxis(self):
return self._config_get(None)
def yAxis(self, num: float):
self._config(num, js_type=False) |
class InputRange(Input):
name = 'Input Range'
_option_cls = OptInputs.OptionsInputRange
def __init__(self, page: primitives.PageModel, text, min_val, max_val, step, placeholder, width, height, html_code, options, attrs, profile):
super(InputRange, self).__init__(page, text, placeholder, width, height, html_code, options, attrs, profile)
self.input = page.ui.inputs.input(text, width=(None, 'px'), placeholder=placeholder).css({'vertical-align': 'middle'})
self.append_child(self.input)
self.input.set_attrs(attrs={'type': 'range', 'min': min_val, 'max': max_val, 'step': step})
if self.options.output:
self.style.css.position = 'relative'
self.output = self.page.ui.inputs._output(text).css({'width': '15px', 'text-align': 'center', 'margin-left': '2px', 'position': 'absolute', 'color': self.page.theme.colors[(- 1)]})
self.append_child(self.output)
self.input.set_attrs(attrs={'oninput': ('%s.value=this.value' % self.output.htmlCode)})
self.css({'display': 'inline-block', 'vertical-align': 'middle', 'line-height': ('%spx' % Defaults.LINE_HEIGHT)})
def options(self) -> OptInputs.OptionsInputRange:
return super().options
def style(self) -> GrpClsInput.ClassInputRange:
if (self._styleObj is None):
self._styleObj = GrpClsInput.ClassInputRange(self)
return self._styleObj
def __str__(self):
if hasattr(self, 'output'):
self.output.css({'display': 'inline-block'})
return ('<div %(strAttr)s></div>' % {'strAttr': self.get_attrs(css_class_names=self.style.get_classes())}) |
class OptionSeriesTreegraphLevels(Options):
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderDashStyle(self):
return self._config_get(None)
def borderDashStyle(self, text: str):
self._config(text, js_type=False)
def borderWidth(self):
return self._config_get(None)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def collapsed(self):
return self._config_get(None)
def collapsed(self, flag: bool):
self._config(flag, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def colorVariation(self) -> 'OptionSeriesTreegraphLevelsColorvariation':
return self._config_sub_data('colorVariation', OptionSeriesTreegraphLevelsColorvariation)
def dataLabels(self) -> 'OptionSeriesTreegraphLevelsDatalabels':
return self._config_sub_data('dataLabels', OptionSeriesTreegraphLevelsDatalabels)
def level(self):
return self._config_get(None)
def level(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionSeriesTreegraphLevelsMarker':
return self._config_sub_data('marker', OptionSeriesTreegraphLevelsMarker) |
def start_server(dbpath, host, port, end_port, cpus_per_worker, num_workers, dbtype, timeout_load_server, qtype=QUERY_TYPE_SEQ, silent=False):
master_db = worker_db = workers = None
MAX_PORTS_TO_TRY = 3
ports_tried = 0
ready = False
for try_port in range(port, end_port, 2):
if (silent == False):
print(colorify(('Loading server at localhost, port %s-%s' % (try_port, (try_port + 1))), 'lblue'))
(dbpath, master_db, workers) = load_server(dbpath, try_port, (try_port + 1), cpus_per_worker, num_workers=num_workers, dbtype=dbtype, silent=silent)
port = try_port
if (silent == False):
print(f'Waiting for server to become ready at {host}:{port} ...')
for attempt in range(timeout_load_server):
time.sleep((attempt + 1))
if ((not master_db.is_alive()) or (not any([worker_db.is_alive() for worker_db in workers]))):
master_db.terminate()
master_db.join()
for worker_db in workers:
worker_db.terminate()
worker_db.join()
break
elif server_functional(host, port, dbtype, qtype):
if (silent == False):
print(f'Server ready at {host}:{port}')
ready = True
break
elif (silent == False):
sys.stdout.write('.')
sys.stdout.flush()
ports_tried += 1
if ready:
dbpath = host
break
elif (ports_tried >= MAX_PORTS_TO_TRY):
raise Exception('Could not start server after trying {ports_tried} ports.')
if (ready == False):
raise Exception('Could not start server.')
return (dbpath, host, port, master_db, workers) |
class OptionPlotoptionsTilemapSonificationDefaultinstrumentoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.