code stringlengths 281 23.7M |
|---|
class TestWeightedMinHashGenerator(unittest.TestCase):
def test_init(self):
mg = WeightedMinHashGenerator(2, 4, 1)
self.assertEqual(len(mg.rs), 4)
self.assertEqual(len(mg.ln_cs), 4)
self.assertEqual(len(mg.betas), 4)
self.assertEqual(mg.seed, 1)
self.assertEqual(mg.sample_size, 4)
def test_minhash(self):
mg = WeightedMinHashGenerator(2, 4, 1)
m = mg.minhash([1, 3])
self.assertIsInstance(m, WeightedMinHash)
self.assertEqual(len(m.hashvalues), 4)
self.assertEqual(len(m), 4)
self.assertTrue((m.hashvalues.dtype == int))
def test_minhash_many_dense_onerow(self):
mg = WeightedMinHashGenerator(2, 4, 1)
m_list = mg.minhash_many(np.array([1, 3]).reshape(1, 2))
self.assertIsInstance(m_list, list)
self.assertEqual(len(m_list), 1)
self.assertIsInstance(m_list[0], WeightedMinHash)
self.assertEqual(len(m_list[0].hashvalues), 4)
self.assertEqual(len(m_list[0]), 4)
self.assertTrue((m_list[0].hashvalues.dtype == int))
def test_minhash_many_dense_tworows(self):
mg = WeightedMinHashGenerator(2, 4, 1)
m_list = mg.minhash_many(np.array([[1, 3], [1, 3]]))
self.assertIsInstance(m_list, list)
self.assertEqual(len(m_list), 2)
for m in m_list:
self.assertIsInstance(m, WeightedMinHash)
self.assertEqual(len(m.hashvalues), 4)
self.assertEqual(len(m), 4)
self.assertTrue((m.hashvalues.dtype == int))
def test_minhash_many_dense_tworows_with_null(self):
mg = WeightedMinHashGenerator(2, 4, 1)
m_list = mg.minhash_many(np.array([[1, 3], [0, 0]]))
self.assertIsInstance(m_list, list)
self.assertEqual(len(m_list), 2)
m = m_list[0]
self.assertIsInstance(m, WeightedMinHash)
self.assertEqual(len(m.hashvalues), 4)
self.assertEqual(len(m), 4)
self.assertTrue((m.hashvalues.dtype == int))
self.assertIs(m_list[1], None)
def test_minhash_many_sparse_onerow(self):
mg = WeightedMinHashGenerator(2, 4, 1)
X = sp.sparse.csr_matrix(np.array([1, 3]).reshape(1, 2))
m_list = mg.minhash_many(X)
self.assertIsInstance(m_list, list)
self.assertEqual(len(m_list), 1)
self.assertIsInstance(m_list[0], WeightedMinHash)
self.assertEqual(len(m_list[0].hashvalues), 4)
self.assertEqual(len(m_list[0]), 4)
self.assertTrue((m_list[0].hashvalues.dtype == int))
def test_minhash_many_sparse_tworows(self):
mg = WeightedMinHashGenerator(2, 4, 1)
X = sp.sparse.csr_matrix(np.array([[1, 3], [1, 3]]))
m_list = mg.minhash_many(X)
self.assertIsInstance(m_list, list)
self.assertEqual(len(m_list), 2)
for m in m_list:
self.assertIsInstance(m, WeightedMinHash)
self.assertEqual(len(m.hashvalues), 4)
self.assertEqual(len(m), 4)
self.assertTrue((m.hashvalues.dtype == int))
def test_minhash_many_sparse_tworows_with_null(self):
mg = WeightedMinHashGenerator(2, 4, 1)
X = sp.sparse.csr_matrix(np.array([[1, 3], [0, 0]]))
m_list = mg.minhash_many(X)
self.assertIsInstance(m_list, list)
self.assertEqual(len(m_list), 2)
m = m_list[0]
self.assertIsInstance(m, WeightedMinHash)
self.assertEqual(len(m.hashvalues), 4)
self.assertEqual(len(m), 4)
self.assertTrue((m.hashvalues.dtype == int))
self.assertIs(m_list[1], None) |
class table_stats_request(stats_request):
version = 2
type = 18
stats_type = 3
def __init__(self, xid=None, flags=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = table_stats_request()
_version = reader.read('!B')[0]
assert (_version == 2)
_type = reader.read('!B')[0]
assert (_type == 18)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 3)
obj.flags = reader.read('!H')[0]
reader.skip(4)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
return True
def pretty_print(self, q):
q.text('table_stats_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.breakable()
q.text('}') |
def clients(api_settings: Dict) -> Dict:
return {'speech': boto3.client('transcribe', region_name=api_settings['region_name'], aws_access_key_id=api_settings['aws_access_key_id'], aws_secret_access_key=api_settings['aws_secret_access_key']), 'texttospeech': boto3.client('polly', region_name=api_settings['ressource_region'], aws_access_key_id=api_settings['aws_access_key_id'], aws_secret_access_key=api_settings['aws_secret_access_key']), 'image': boto3.client('rekognition', region_name=api_settings['region_name'], aws_access_key_id=api_settings['aws_access_key_id'], aws_secret_access_key=api_settings['aws_secret_access_key']), 'textract': boto3.client('textract', region_name=api_settings['region_name'], aws_access_key_id=api_settings['aws_access_key_id'], aws_secret_access_key=api_settings['aws_secret_access_key']), 'text': boto3.client('comprehend', region_name=api_settings['region_name'], aws_access_key_id=api_settings['aws_access_key_id'], aws_secret_access_key=api_settings['aws_secret_access_key']), 'translate': boto3.client('translate', region_name=api_settings['region_name'], aws_access_key_id=api_settings['aws_access_key_id'], aws_secret_access_key=api_settings['aws_secret_access_key']), 'video': boto3.client('rekognition', region_name=api_settings['video-region'], aws_access_key_id=api_settings['aws_access_key_id'], aws_secret_access_key=api_settings['aws_secret_access_key']), 'text_classification': boto3.client('sts', region_name=api_settings['region_name'], aws_access_key_id=api_settings['aws_access_key_id'], aws_secret_access_key=api_settings['aws_secret_access_key']), 's3': boto3.client('s3', region_name=api_settings['region_name'], aws_access_key_id=api_settings['aws_access_key_id'], aws_secret_access_key=api_settings['aws_secret_access_key'])} |
class OptionPlotoptionsColumnrangeSonificationDefaultinstrumentoptionsMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsColumnrangeSonificationDefaultinstrumentoptionsMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsColumnrangeSonificationDefaultinstrumentoptionsMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsColumnrangeSonificationDefaultinstrumentoptionsMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsColumnrangeSonificationDefaultinstrumentoptionsMappingLowpassResonance) |
class CardGamesTest(unittest.TestCase):
.task(taskno=1)
def test_get_rounds(self):
input_data = [0, 1, 10, 27, 99, 666]
result_data = [[0, 1, 2], [1, 2, 3], [10, 11, 12], [27, 28, 29], [99, 100, 101], [666, 667, 668]]
for (variant, (number, expected)) in enumerate(zip(input_data, result_data), start=1):
with self.subTest(f'variation #{variant}', number=number, expected=expected):
actual_result = get_rounds(number)
error_message = f'Called get_rounds({number}). The function returned {actual_result}, but the tests expected rounds {expected} given the current round {number}.'
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=2)
def test_concatenate_rounds(self):
input_data = [([], []), ([0, 1], []), ([], [1, 2]), ([1], [2]), ([27, 28, 29], [35, 36]), ([1, 2, 3], [4, 5, 6])]
result_data = [[], [0, 1], [1, 2], [1, 2], [27, 28, 29, 35, 36], [1, 2, 3, 4, 5, 6]]
for (variant, ((rounds_1, rounds_2), expected)) in enumerate(zip(input_data, result_data), start=1):
with self.subTest(f'variation #{variant}', rounds_1=rounds_1, rounds_2=rounds_2, expected=expected):
actual_result = concatenate_rounds(rounds_1, rounds_2)
error_message = f'Called concatenate_rounds({rounds_1}, {rounds_2}). The function returned {actual_result}, but the tests expected {expected} as the concatenation of {rounds_1} and {rounds_2}.'
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=3)
def test_list_contains_round(self):
input_data = [([], 1), ([1, 2, 3], 0), ([27, 28, 29, 35, 36], 30), ([1], 1), ([1, 2, 3], 1), ([27, 28, 29, 35, 36], 29)]
result_data = [False, False, False, True, True, True]
for (variant, ((rounds, round_number), expected)) in enumerate(zip(input_data, result_data), start=1):
with self.subTest(f'variation #{variant}', rounds=rounds, round_number=round_number, expected=expected):
actual_result = list_contains_round(rounds, round_number)
error_message = f"Called list_contains_round({rounds}, {round_number}). The function returned {actual_result}, but round {round_number} {('is' if expected else 'is not')} in {rounds}."
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=4)
def test_card_average(self):
input_data = [[1], [5, 6, 7], [1, 2, 3, 4], [1, 10, 100]]
result_data = [1.0, 6.0, 2.5, 37.0]
for (variant, (hand, expected)) in enumerate(zip(input_data, result_data), start=1):
with self.subTest(f'variation #{variant}', hand=hand, expected=expected):
actual_result = card_average(hand)
error_message = f'Called card_average({hand}). The function returned {actual_result}, but the tests expected {expected} as the average of {hand}.'
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=5)
def test_approx_average_is_average(self):
input_data = [[0, 1, 5], [3, 6, 9, 12, 150], [1, 2, 3, 5, 9], [2, 3, 4, 7, 8], [1, 2, 3], [2, 3, 4], [2, 3, 4, 8, 8], [1, 2, 4, 5, 8]]
result_data = [False, False, False, False, True, True, True, True]
for (variant, (hand, expected)) in enumerate(zip(input_data, result_data), start=1):
with self.subTest(f'variation #{variant}', hand=hand, expected=expected):
actual_result = approx_average_is_average(hand)
error_message = f"Called approx_average_is_average({hand}). The function returned {actual_result}, but the hand {hand} {('does' if expected else 'does not')} yield the same approximate average."
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=6)
def test_average_even_is_average_odd(self):
input_data = [[5, 6, 8], [1, 2, 3, 4], [1, 2, 3], [5, 6, 7], [1, 3, 5, 7, 9]]
result_data = [False, False, True, True, True]
for (variant, (input_hand, expected)) in enumerate(zip(input_data, result_data), start=1):
with self.subTest(f'variation #{variant}', input_hand=input_hand, expected=expected):
actual_result = average_even_is_average_odd(input_hand)
error_message = f"Called average_even_is_average_odd({input_hand}). The function returned {actual_result}, but the hand {('does' if expected else 'does not')} yield the same odd-even average."
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=7)
def test_maybe_double_last(self):
input_data = [(1, 2, 11), (5, 9, 11), (5, 9, 10), (1, 2, 3), (1, 11, 8)]
result_data = [[1, 2, 22], [5, 9, 22], [5, 9, 10], [1, 2, 3], [1, 11, 8]]
for (variant, (hand, expected)) in enumerate(zip(input_data, result_data), start=1):
with self.subTest(f'variation #{variant}', hand=list(hand), expected=expected):
actual_result = maybe_double_last(list(hand))
error_message = f'Called maybe_double_last({list(hand)}). The function returned {actual_result}, but the tests expected {expected} as the maybe-doubled version of {list(hand)}.'
self.assertEqual(actual_result, expected, msg=error_message) |
class TCPSocketChannel(IPCChannel):
def __init__(self, logger: logging.Logger=_default_logger, loop: Optional[AbstractEventLoop]=None) -> None:
self.logger = logger
self._loop = loop
self._server = None
self._connected = None
self._sock = None
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('127.0.0.1', 0))
s.listen(1)
self._port = s.getsockname()[1]
s.close()
async def connect(self, timeout: float=PIPE_CONN_TIMEOUT) -> bool:
if (self._loop is None):
self._loop = asyncio.get_event_loop()
self._connected = asyncio.Event()
self._server = (await asyncio.start_server(self._handle_connection, host='127.0.0.1', port=self._port))
if (self._server.sockets is None):
raise ValueError('Server sockets is None!')
self._port = self._server.sockets[0].getsockname()[1]
self.logger.debug('socket pipe rdv point: {}'.format(self._port))
try:
(await asyncio.wait_for(self._connected.wait(), timeout))
except asyncio.TimeoutError:
return False
self._server.close()
(await self._server.wait_closed())
return True
async def _handle_connection(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None:
if (self._connected is None):
raise ValueError('Connected is None!')
self._connected.set()
self._sock = TCPSocketProtocol(reader, writer, logger=self.logger, loop=self._loop)
async def write(self, data: bytes) -> None:
if (self._sock is None):
raise ValueError('Socket pipe not connected.')
(await self._sock.write(data))
async def read(self) -> Optional[bytes]:
if (self._sock is None):
raise ValueError('Socket pipe not connected.')
return (await self._sock.read())
async def close(self) -> None:
if (self._sock is None):
raise ValueError('Socket pipe not connected.')
(await self._sock.close())
def in_path(self) -> str:
return str(self._port)
def out_path(self) -> str:
return str(self._port) |
class _BoundingBoxDistanceGraphicMatcherInstance(NamedTuple):
graphic_bounding_box_ref_list: Sequence[BoundingBoxRef]
candidate_bounding_box_ref_list: Sequence[BoundingBoxRef]
max_distance: float
def create(semantic_graphic_list: Sequence[SemanticGraphic], candidate_semantic_content_list: Sequence[SemanticContentWrapper], ignored_graphic_types: Set[str], max_distance: float) -> '_BoundingBoxDistanceGraphicMatcherInstance':
graphic_bounding_box_ref_list = [BoundingBoxRef(id(semantic_graphic), bounding_box_list=get_normalized_bounding_box_list_for_layout_graphic(semantic_graphic.layout_graphic), semantic_content=semantic_graphic) for semantic_graphic in semantic_graphic_list if (semantic_graphic.layout_graphic and semantic_graphic.layout_graphic.coordinates and (semantic_graphic.layout_graphic.graphic_type not in ignored_graphic_types))]
LOGGER.debug('graphic_bounding_box_ref_list: %r', graphic_bounding_box_ref_list)
candidate_bounding_box_ref_list = [BoundingBoxRef(id(candidate_semantic_content), bounding_box_list=get_normalized_bounding_box_list_for_layout_block(candidate_semantic_content.merged_block), semantic_content=candidate_semantic_content) for candidate_semantic_content in candidate_semantic_content_list]
LOGGER.debug('candidate_bounding_box_ref_list: %r', candidate_bounding_box_ref_list)
return _BoundingBoxDistanceGraphicMatcherInstance(graphic_bounding_box_ref_list=graphic_bounding_box_ref_list, candidate_bounding_box_ref_list=candidate_bounding_box_ref_list, max_distance=max_distance)
def is_accept_distance(self, distance_between: BoundingBoxDistanceBetween) -> bool:
return (distance_between.bounding_box_distance.euclidean_distance < self.max_distance)
def with_graphic_bounding_box_ref_list(self, graphic_bounding_box_ref_list: Sequence[BoundingBoxRef]) -> '_BoundingBoxDistanceGraphicMatcherInstance':
return self._replace(graphic_bounding_box_ref_list=graphic_bounding_box_ref_list)
def with_candidate_bounding_box_ref_list(self, candidate_bounding_box_ref_list: Sequence[BoundingBoxRef]) -> '_BoundingBoxDistanceGraphicMatcherInstance':
return self._replace(candidate_bounding_box_ref_list=candidate_bounding_box_ref_list)
def get_sorted_distances_between_for_graphic_bounding_box_ref(self, graphic_bounding_box_ref: BoundingBoxRef) -> Sequence[BoundingBoxDistanceBetween]:
return sorted([BoundingBoxDistanceBetween(bounding_box_distance=graphic_bounding_box_ref.get_distance_to(candidate_bounding_box_ref), bounding_box_ref_1=graphic_bounding_box_ref, bounding_box_ref_2=candidate_bounding_box_ref) for candidate_bounding_box_ref in self.candidate_bounding_box_ref_list], key=BoundingBoxDistanceBetween.get_sort_key)
def get_best_distance_between_batch(self) -> Sequence[BoundingBoxDistanceBetween]:
best_distance_between_by_candidate_key: Dict[(int, BoundingBoxDistanceBetween)] = {}
for graphic_bounding_box_ref in self.graphic_bounding_box_ref_list:
sorted_distances_between = self.get_sorted_distances_between_for_graphic_bounding_box_ref(graphic_bounding_box_ref)
if (not sorted_distances_between):
continue
best_distance_between = sorted_distances_between[0]
if (not self.is_accept_distance(best_distance_between)):
LOGGER.debug('not accepting distance: %r', best_distance_between)
continue
LOGGER.debug('sorted_distances_between: %r', sorted_distances_between)
candidate_key = best_distance_between.bounding_box_ref_2.key
previous_best_distance_between = best_distance_between_by_candidate_key.get(candidate_key)
if (not best_distance_between.is_better_than(previous_best_distance_between)):
LOGGER.debug('found better previous best distance between: %r > %r', previous_best_distance_between, best_distance_between)
continue
if previous_best_distance_between:
LOGGER.debug('found better best distance between: %r < %r', previous_best_distance_between, best_distance_between)
LOGGER.debug('accept candidate: %r -> %r', candidate_key, best_distance_between)
best_distance_between_by_candidate_key[candidate_key] = best_distance_between
return list(best_distance_between_by_candidate_key.values())
def iter_remaining_candidate_bounding_box_refs(self, best_distance_between_batch: Iterable[BoundingBoxDistanceBetween]) -> Iterable[BoundingBoxRef]:
for best_distance_between in best_distance_between_batch:
(yield best_distance_between.bounding_box_ref_2.with_extended_bounding_box_list(best_distance_between.bounding_box_ref_1.bounding_box_list))
def iter_graphic_matches(self) -> Iterable[GraphicMatch]:
graphic_matches: List[GraphicMatch] = []
matcher = self
while matcher.graphic_bounding_box_ref_list:
best_distance_between_batch = matcher.get_best_distance_between_batch()
graphic_matches.extend([get_graphic_match_for_distance_between(distance_between) for distance_between in best_distance_between_batch])
matched_graphic_keys = {distance_between.bounding_box_ref_1.key for distance_between in best_distance_between_batch}
LOGGER.debug('matched_graphic_keys: %r', matched_graphic_keys)
remaining_graphic_bounding_box_ref_list = [graphic_bounding_box_ref for graphic_bounding_box_ref in matcher.graphic_bounding_box_ref_list if (graphic_bounding_box_ref.key not in matched_graphic_keys)]
LOGGER.debug('remaining_graphic_bounding_box_ref_list: %r', remaining_graphic_bounding_box_ref_list)
if (len(remaining_graphic_bounding_box_ref_list) == len(matcher.graphic_bounding_box_ref_list)):
break
remaining_candidate_bounding_box_ref_list = list(matcher.iter_remaining_candidate_bounding_box_refs(best_distance_between_batch))
LOGGER.debug('remaining_candidate_bounding_box_ref_list: %r', remaining_candidate_bounding_box_ref_list)
matcher = matcher.with_candidate_bounding_box_ref_list(remaining_candidate_bounding_box_ref_list).with_graphic_bounding_box_ref_list(remaining_graphic_bounding_box_ref_list)
return get_sorted_graphic_matches(graphic_matches) |
def extractWwwNekoseireiCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Godly Alchemist', 'Godly Alchemist', 'translated'), ('Kein no Zenkou', 'Kein no Zenkou', 'translated'), ('God of Music', 'God of Music', 'translated'), ('king of skill', 'King of Skill', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def save_user_config(filename: Optional[Union[(PathLike, str)]]=None, user_config: Optional[Config]=None, compact: bool=False, indent: int=4, encoding: Optional[str]=None):
if (filename is None):
filename = default_user_config_filepath
if (user_config is None):
user_config = get_user_config()
if (encoding is None):
encoding = default_encoding
save_config(filename, user_config, compact, indent, encoding) |
class OptionSeriesSankeyDataEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsNetworkgraphZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def dashStyle(self):
return self._config_get(None)
def dashStyle(self, text: str):
self._config(text, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False) |
(scope='function')
def postgres_integration_db(postgres_integration_session):
postgres_integration_session = seed_postgres_data(postgres_integration_session, './src/fides/data/sample_project/postgres_sample.sql')
(yield postgres_integration_session)
drop_database(postgres_integration_session.bind.url) |
_pytree_node_class
class JaxStructureStaticMedium(AbstractJaxStructure, JaxObject):
geometry: JaxGeometryType = pd.Field(..., title='Geometry', description='Geometry of the structure, which is jax-compatible.', jax_field=True, discriminator=TYPE_TAG_STR)
medium: MediumType = pd.Field(..., title='Medium', description='Regular ``tidy3d`` medium of the structure, non differentiable. Supports dispersive materials.', jax_field=False, discriminator=TYPE_TAG_STR)
_differentiable_fields = ('geometry',) |
class OptionSeriesTimelineSonificationContexttracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def show_task_info(task):
log.log(26, '')
set_logindent(1)
log.log(28, ('(%s) %s' % (color_status(task.status), task)))
logindent(2)
st_info = ', '.join([('%d(%s)' % (v, k)) for (k, v) in task.job_status.items()])
log.log(26, ('%d jobs: %s' % (len(task.jobs), st_info)))
tdir = task.taskid
tdir = tdir.lstrip('/')
log.log(20, ('TaskDir: %s' % tdir))
if (task.status == 'L'):
logindent((- 2))
log.warning(('Some jobs within the task [%s] are marked as (L)ost, meaning that although they look as running, its execution could not be tracked. NPR will continue execution with other pending tasks.' % task))
logindent(2)
logindent(2)
for j in task.jobs:
if (j.status == 'D'):
log.log(20, '(%s): %s', j.status, j)
else:
log.log(24, '(%s): %s', j.status, j)
logindent((- 2)) |
class PrivateKey(BaseKey, LazyBackend):
public_key = None
def __init__(self, private_key_bytes: bytes, backend: 'Union[BaseECCBackend, Type[BaseECCBackend], str, None]'=None) -> None:
validate_private_key_bytes(private_key_bytes)
self._raw_key = private_key_bytes
self.public_key = self.backend.private_key_to_public_key(self)
super().__init__(backend=backend)
def sign_msg(self, message: bytes) -> 'Signature':
message_hash = keccak(message)
return self.sign_msg_hash(message_hash)
def sign_msg_hash(self, message_hash: bytes) -> 'Signature':
return self.backend.ecdsa_sign(message_hash, self)
def sign_msg_non_recoverable(self, message: bytes) -> 'NonRecoverableSignature':
message_hash = keccak(message)
return self.sign_msg_hash_non_recoverable(message_hash)
def sign_msg_hash_non_recoverable(self, message_hash: bytes) -> 'NonRecoverableSignature':
return self.backend.ecdsa_sign_non_recoverable(message_hash, self) |
def parse_color(tokens: dict[(str, Any)], space: Space) -> (tuple[(Vector, float)] | None):
num_channels = len(space.CHANNELS)
values = len(tokens['func']['values'])
if tokens['func']['slash']:
values -= 1
if (values != num_channels):
return None
alpha = (norm_alpha_channel(tokens['func']['values'][(- 1)]['value']) if tokens['func']['slash'] else 1.0)
channels = []
properties = space.CHANNELS
for i in range(num_channels):
c = tokens['func']['values'][i]['value']
channel = properties[i]
channels.append(norm_color_channel(c.lower(), channel.span, channel.offset))
return (channels, alpha) |
class BasicBlockEdge(GraphEdgeInterface, ABC):
def __init__(self, source: BasicBlock, sink: BasicBlock):
self._source = source
self._sink = sink
def source(self) -> BasicBlock:
return self._source
def sink(self) -> BasicBlock:
return self._sink
def __eq__(self, other):
return (isinstance(other, type(self)) and (self.__dict__ == other.__dict__))
def __hash__(self) -> int:
return hash((self.source, self.sink, self.condition_type))
def __iter__(self) -> Iterator[Expression]:
(yield from [])
def substitute(self, replacee: Expression, repleacement: Expression):
pass
def condition_type(self) -> BasicBlockEdgeCondition:
pass
def copy(self, source: Optional[BasicBlock]=None, sink: Optional[BasicBlock]=None) -> BasicBlockEdge:
return self.__class__((source if (source is not None) else self._source), (sink if (sink is not None) else self._sink)) |
def render_geom(geom_type, geom_size, color=[0.5, 0.5, 0.5, 1.0], T=constants.EYE_T):
if (geom_type == pybullet.GEOM_SPHERE):
gl_render.render_sphere(T, geom_size[0], color=color, slice1=16, slice2=16)
elif (geom_type == pybullet.GEOM_BOX):
gl_render.render_cube(T, size=[geom_size[0], geom_size[1], geom_size[2]], color=color)
elif (geom_type == pybullet.GEOM_CAPSULE):
gl_render.render_capsule(T, geom_size[0], geom_size[1], color=color, slice=16)
elif (geom_type == pybullet.GEOM_CYLINDER):
gl_render.render_cylinder(T, geom_size[0], geom_size[1], color=color, slice=16)
else:
raise NotImplementedError() |
def fixpoint_graph_fixer(fixer: GraphFixer) -> GraphFixer:
def fixpoint(bmg: BMGraphBuilder) -> GraphFixerResult:
current = bmg
while True:
(current, made_progress, errors) = fixer(current)
if ((not made_progress) or errors.any()):
return (current, made_progress, errors)
return fixpoint |
class OptionSeriesLollipopStatesSelect(Options):
def animation(self) -> 'OptionSeriesLollipopStatesSelectAnimation':
return self._config_sub_data('animation', OptionSeriesLollipopStatesSelectAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionSeriesLollipopStatesSelectHalo':
return self._config_sub_data('halo', OptionSeriesLollipopStatesSelectHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionSeriesLollipopStatesSelectMarker':
return self._config_sub_data('marker', OptionSeriesLollipopStatesSelectMarker) |
class CarModelTestSerializer(ExpandableFieldsMixin, serializers.ModelSerializer):
class Meta():
fields = ('id', 'name')
model = models.CarModel
expandable_fields = dict(manufacturer=ManufacturerTestSerializer, skus=dict(serializer='{0}.SkuTestSerializer'.format(MODULE), many=True)) |
def _re_wrap_quantifier(q, xp, lazy=False):
if (q is None):
return xp
if (lazy and (q not in (_ONE_PLUS, _ZERO_PLUS))):
raise ValueError
if (q not in _WRAP_Q_LOOKUP):
keys = ', '.join(_WRAP_Q_LOOKUP.keys())
raise ValueError(Errors.E011.format(op=q, opts=keys))
xpq = _WRAP_Q_LOOKUP[q].format(xp=xp)
if (not lazy):
return xpq
return ''.join([xpq[:(- 1)], '?', xpq[(- 1):]]) |
class MappedParameter(parser.ParameterWithValue):
def __init__(self, list_name, values, case_sensitive, **kwargs):
super(MappedParameter, self).__init__(**kwargs)
self.list_name = list_name
self.case_sensitive = case_sensitive
self.values = values
def _uncase_values(self, values):
used = set()
for (target, names, _) in values:
for name in names:
name_ = name.lower()
if (name_ in used):
raise ValueError('Duplicate allowed values for parameter {}: {}'.format(self, name_))
used.add(name_)
(yield (name_, target))
def _ensure_no_duplicate_names(self, values):
used = set()
for value in values:
(_, names, _) = value
for name in names:
if (name in used):
raise ValueError('Duplicate allowed values for parameter {}: {}'.format(self, name))
used.add(name)
(yield value)
_once
def values_table(self):
if (not self.case_sensitive):
try:
new_values = dict(self._uncase_values(self.values))
except ValueError:
if (self.case_sensitive is not None):
raise
self.case_sensitive = True
else:
self.case_sensitive = False
return new_values
return dict(((name, target) for (target, names, _) in self._ensure_no_duplicate_names(self.values) for name in names))
def coerce_value(self, value, ba):
table = self.values_table
key = (value if self.case_sensitive else value.lower())
if (key == self.list_name):
raise _ShowList
try:
return table[key]
except KeyError:
raise errors.BadArgumentFormat(value)
def read_argument(self, ba, i):
try:
super(MappedParameter, self).read_argument(ba, i)
except _ShowList:
ba.args[:] = [ba.name]
ba.kwargs.clear()
ba.func = self.show_list
ba.sticky = parser.IgnoreAllArguments()
ba.posarg_only = True
def show_list(self, name):
f = util.Formatter()
f.append('{name}: Possible values for {self.display_name}:'.format(self=self, name=name))
f.new_paragraph()
with f.indent():
with f.columns() as cols:
for (_, names, desc) in self.values:
cols.append(', '.join(names), desc)
f.new_paragraph()
return str(f)
def help_parens(self):
backup = self.default
try:
for (arg, keys, _) in self.values:
if (arg == self.default):
self.default = keys[0]
break
else:
self.default = util.UNSET
for s in super(MappedParameter, self).help_parens():
(yield s)
finally:
self.default = backup
if self.list_name:
(yield 'use "{0}" for options'.format(self.list_name)) |
class SearchService():
def __init__(self, github_client: Github):
self._github_client = github_client
def search_repositories(self, query, limit):
repositories = self._github_client.search_repositories(query=query, **{'in': 'name'})
return [self._format_repo(repository) for repository in repositories[:limit]]
def _format_repo(self, repository: Repository):
commits = repository.get_commits()
return {'url': repository.html_url, 'name': repository.name, 'owner': {'login': repository.owner.login, 'url': repository.owner.html_url, 'avatar_url': repository.owner.avatar_url}, 'latest_commit': (self._format_commit(commits[0]) if commits else {})}
def _format_commit(self, commit: Commit):
return {'sha': commit.sha, 'url': commit.html_url, 'message': commit.commit.message, 'author_name': commit.commit.author.name} |
def upgrade():
op.create_table('access_codes_tickets', sa.Column('access_code_id', sa.Integer(), nullable=False), sa.Column('ticket_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['access_code_id'], ['access_codes.id'], ondelete='CASCADE'), sa.ForeignKeyConstraint(['ticket_id'], ['tickets.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('access_code_id', 'ticket_id'))
op.drop_column(u'access_codes', 'tickets') |
class OptionSeriesVectorSonification(Options):
def contextTracks(self) -> 'OptionSeriesVectorSonificationContexttracks':
return self._config_sub_data('contextTracks', OptionSeriesVectorSonificationContexttracks)
def defaultInstrumentOptions(self) -> 'OptionSeriesVectorSonificationDefaultinstrumentoptions':
return self._config_sub_data('defaultInstrumentOptions', OptionSeriesVectorSonificationDefaultinstrumentoptions)
def defaultSpeechOptions(self) -> 'OptionSeriesVectorSonificationDefaultspeechoptions':
return self._config_sub_data('defaultSpeechOptions', OptionSeriesVectorSonificationDefaultspeechoptions)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def pointGrouping(self) -> 'OptionSeriesVectorSonificationPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesVectorSonificationPointgrouping)
def tracks(self) -> 'OptionSeriesVectorSonificationTracks':
return self._config_sub_data('tracks', OptionSeriesVectorSonificationTracks) |
def div_polys(a, b):
assert (len(a) >= len(b))
a = [x for x in a]
o = []
apos = (len(a) - 1)
bpos = (len(b) - 1)
diff = (apos - bpos)
while (diff >= 0):
quot = gexptable[((glogtable[a[apos]] - glogtable[b[bpos]]) + two_to_the_degree_m1)]
o.insert(0, quot)
for i in range(bpos, (- 1), (- 1)):
a[(diff + i)] ^= gexptable[(glogtable[b[i]] + glogtable[quot])]
apos -= 1
diff -= 1
return o |
def get_user_by_payload(payload):
username = jwt_settings.JWT_PAYLOAD_GET_USERNAME_HANDLER(payload)
if (not username):
raise exceptions.JSONWebTokenError(_('Invalid payload'))
user = jwt_settings.JWT_GET_USER_BY_NATURAL_KEY_HANDLER(username)
if ((user is not None) and (not getattr(user, 'is_active', True))):
raise exceptions.JSONWebTokenError(_('User is disabled'))
return user |
def add_kid(kid_b64, key_b64):
kid = b64decode(kid_b64)
asn1data = b64decode(key_b64)
pub = serialization.load_der_public_key(asn1data)
if isinstance(pub, RSAPublicKey):
kids[kid_b64] = CoseKey.from_dict({KpKty: KtyRSA, KpAlg: Ps256, RSAKpE: int_to_bytes(pub.public_numbers().e), RSAKpN: int_to_bytes(pub.public_numbers().n)})
elif isinstance(pub, EllipticCurvePublicKey):
kids[kid_b64] = CoseKey.from_dict({KpKty: KtyEC2, EC2KpCurve: P256, KpAlg: Es256, EC2KpX: pub.public_numbers().x.to_bytes(32, byteorder='big'), EC2KpY: pub.public_numbers().y.to_bytes(32, byteorder='big')})
else:
print(f'Skipping unexpected/unknown key type (keyid={kid_b64}, {pub.__class__.__name__}).', file=sys.stderr) |
class CleanupPayload(Payload):
def __init__(self, *args, **kwargs):
super(CleanupPayload, self).__init__(*args, **kwargs)
self.files_to_clean = []
self.to_drop = []
self.sqls_to_execute = []
self._current_db = kwargs.get('db')
self._current_table = kwargs.get('table')
self.databases = kwargs.get('database')
self.kill_first = kwargs.get('kill', False)
self.kill_only = kwargs.get('kill_only', False)
self.additional_osc_tables = kwargs.get('additional_tables', [])
self.disable_replication = kwargs.get('disable_replication', True)
self.print_tables = kwargs.get('print_tables', False)
self.tables_to_print = []
def set_current_table(self, table_name):
self._current_table = table_name
def cleanup(self, db='mysql'):
for filepath in self.files_to_clean:
try:
if os.path.isfile(filepath):
util.rm(filepath, self.sudo)
except Exception:
log.exception('Failed to cleanup file: {}'.format(filepath))
if ((not self._conn) or (self.databases and (len(self.databases) > 1))):
self._conn = self.get_conn(db)
if self.print_tables:
self.tables_to_print.append(('SELECT * FROM `{}`'.format(self._current_table), db))
self.print_osc_tables(db)
self.gen_drop_sqls()
self.get_mysql_settings()
self.init_mysql_version()
if self.disable_replication:
self.set_no_binlog()
self.stop_slave_sql()
if self.is_high_pri_ddl_supported:
self.enable_priority_ddl()
else:
self.lock_tables(tables=[self.table_name])
self.execute_sql('USE `{}`'.format(escape(db)))
current_db = db
for (stmt, stmt_db) in self.sqls_to_execute:
cleanupError = False
try:
if (stmt_db != current_db):
self.execute_sql('USE `{}`'.format(escape(stmt_db)))
current_db = stmt_db
log.info('Executing db: {} sql: {}'.format(stmt_db, stmt))
self.execute_sql(stmt)
except MySQLdb.OperationalError as e:
(errnum, _) = e.args
if (errnum in [1507, 1508]):
continue
cleanupError = True
error = e
except Exception as e:
cleanupError = True
error = e
if cleanupError:
self.sqls_to_execute = []
if (not self.is_high_pri_ddl_supported):
self.unlock_tables()
self.start_slave_sql()
log.error('Failed to execute sql for cleanup')
raise OSCError('CLEANUP_EXECUTION_ERROR', {'sql': stmt, 'msg': str(error)})
if (not self.is_high_pri_ddl_supported):
self.unlock_tables()
self.sqls_to_execute = []
self.start_slave_sql()
def print_osc_tables(self, db='mysql'):
if ((not self._conn) or (self.databases and (len(self.databases) > 1))):
self._conn = self.get_conn(db)
self.execute_sql('USE `{}`'.format(escape(db)))
for (stmt, stmt_db) in self.tables_to_print:
if (stmt_db != db):
continue
try:
rows = self.query(stmt)
for row in rows:
log.debug(row)
except Exception:
continue
def add_file_entry(self, filepath):
log.debug('Cleanup file entry added: {}'.format(filepath))
self.files_to_clean.append(filepath)
def remove_file_entry(self, filepath):
log.debug('Cleanup file entry removed: {}'.format(filepath))
self.files_to_clean.remove(filepath)
def remove_all_file_entries(self):
log.debug('Removing all cleanup file entries')
self.files_to_clean = []
def add_sql_entry(self, sql):
log.debug('Cleanup SQL entry added: {}'.format(sql))
self.sqls_to_execute.append(sql)
def gen_drop_sqls(self):
log.info('Generating drop trigger queries')
for entry in self.to_drop:
if (entry['type'] == 'trigger'):
db = entry['db']
trigger_name = entry['name']
sql_query = 'DROP TRIGGER IF EXISTS `{}`'.format(escape(trigger_name))
self.sqls_to_execute.append((sql_query, db))
log.info('Generating drop table queries')
for entry in self.to_drop:
if (entry['type'] == 'table'):
db = entry['db']
table = entry['name']
partition_method = self.get_partition_method(db, table)
if (partition_method in ('RANGE', 'LIST')):
if entry['partitions']:
entry['partitions'].pop()
log.debug('{}/{} using {} partitioning method'.format(db, table, partition_method))
for partition_name in entry['partitions']:
sql_query = 'ALTER TABLE `{}` DROP PARTITION `{}`'.format(escape(table), escape(partition_name))
self.sqls_to_execute.append((sql_query, db))
sql_query = 'DROP TABLE IF EXISTS `{}`'.format(table)
self.sqls_to_execute.append((sql_query, db))
self.to_drop = []
def add_drop_table_entry(self, db, table, partitions=None):
self.to_drop.append({'type': 'table', 'db': db, 'name': table, 'partitions': partitions})
self.tables_to_print.append(('SELECT * FROM `{}`'.format(table), db))
def remove_drop_table_entry(self, db, table_name):
for entry in self.to_drop:
if ((entry['type'] == 'table') and (entry['name'] == table_name)):
self.to_drop.remove(entry)
def add_drop_trigger_entry(self, db, trigger_name):
self.to_drop.append({'type': 'trigger', 'db': db, 'name': trigger_name})
def run_ddl(self):
self.cleanup()
def fetch_all_tables(self):
results = self.query(sql.get_all_osc_tables(), (constant.PREFIX, constant.PREFIX))
return [row['TABLE_NAME'] for row in results]
def search_for_tables(self):
if self.databases:
for db in self.databases:
results = self.query(sql.get_all_osc_tables(db), (constant.PREFIX, constant.PREFIX, db))
for row in results:
self.add_drop_table_entry(db, row['TABLE_NAME'])
else:
results = self.query(sql.get_all_osc_tables(), (constant.PREFIX, constant.PREFIX))
for row in results:
self.add_drop_table_entry(row['db'], row['TABLE_NAME'])
for table in self.additional_osc_tables:
log.info(f'reading table: {table}')
results = self.query(sql.get_all_osc_tables(), (table, table))
for row in results:
self.add_drop_table_entry(row['db'], row['TABLE_NAME'])
def search_for_triggers(self):
if self.databases:
for db in self.databases:
results = self.query(sql.get_all_osc_triggers(db), (constant.PREFIX, constant.PREFIX, db))
for row in results:
self.add_drop_trigger_entry(db, row['TRIGGER_NAME'])
else:
results = self.query(sql.get_all_osc_triggers(), (constant.PREFIX, constant.PREFIX))
for row in results:
self.add_drop_trigger_entry(row['db'], row['TRIGGER_NAME'])
def search_for_files(self):
datadir = self.query(sql.select_as('', 'dir'))[0]['dir']
for (root, _, files) in os.walk(datadir):
for fname in files:
if re.match('__osc_.*\\.[0-9]+', fname):
self.add_file_entry(os.path.join(root, fname))
def kill_osc(self):
result = self.query('SELECT IS_USED_LOCK(%s) as owner_id', (constant.OSC_LOCK_NAME,))
owner_id = result[0]['owner_id']
if owner_id:
log.info('Named lock: {} is held by {}. Killing it to free up the lock'.format(constant.OSC_LOCK_NAME, owner_id))
self.execute_sql(sql.kill_proc, (owner_id,))
else:
log.info('No other OSC is running at the moment')
def cleanup_all(self):
if self.kill_first:
self.kill_osc()
log.info('Wait 5 seconds for the running OSC to cleanup its own stuff ')
time.sleep(5)
if self.kill_only:
return
self.search_for_triggers()
self.search_for_tables()
self.search_for_files()
self.get_osc_lock()
self.cleanup()
self.release_osc_lock() |
.django_db
def test_fabs_old_submission(client, monkeypatch, fabs_award_with_old_submission, helpers, elasticsearch_award_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
resp = helpers.post_for_count_endpoint(client, url, ['M'], ['07'])
assert (resp.data['count'] == 1) |
class NursingTask(Document):
def before_insert(self):
self.set_task_schedule()
self.title = '{} - {}'.format(_(self.patient), _(self.activity))
self.age = frappe.get_doc('Patient', self.patient).get_age()
def validate(self):
if (self.status == 'Requested'):
self.docstatus = 1
def on_submit(self):
self.db_set('status', 'Requested')
def on_cancel(self):
if (self.status == 'Completed'):
frappe.throw(_("Not Allowed to cancel Nursing Task with status 'Completed'"))
self.db_set('status', 'Cancelled')
def on_update_after_submit(self):
if ((self.status == 'Completed') and self.task_doctype and (not self.task_document_name)):
frappe.throw(_("Not Allowed to 'Complete' Nursing Task without linking Task Document"))
if (self.status == 'Draft'):
frappe.throw(_("Nursing Task cannot be 'Draft' after submission"))
if (self.status == 'In Progress'):
if (not self.task_start_time):
self.db_set('task_start_time', now_datetime())
elif (self.status == 'Completed'):
self.db_set({'task_end_time': now_datetime(), 'task_duration': time_diff_in_seconds(self.task_end_time, self.task_start_time)})
self.notify_update()
def set_task_schedule(self):
if ((not self.requested_start_time) or (get_datetime(self.requested_start_time) < now_datetime())):
self.requested_start_time = now_datetime()
if (not self.requested_end_time):
if (not self.duration):
self.duration = frappe.db.get_value('Healthcare Activity', self.activity, 'duration')
self.requested_end_time = add_to_date(self.requested_start_time, seconds=self.duration)
self.date = getdate(self.requested_start_time)
def create_nursing_tasks_from_template(cls, template, doc, start_time=None, post_event=True):
tasks = frappe.get_all('Nursing Checklist Template Task', filters={'parent': template}, fields=['*'])
start_time = (start_time or now_datetime())
NursingTask.create_nursing_tasks(tasks, doc, start_time, post_event)
def create_nursing_tasks(cls, tasks, doc, start_time, post_event=True):
for task in tasks:
medical_department = (doc.get('department') if doc.get('department') else doc.get('medical_department'))
if (doc.get('doctype') == 'Inpatient Record'):
service_unit = (frappe.db.get_value('Inpatient Occupancy', {'parent': doc.name, 'left': 0}, 'service_unit'),)
else:
service_unit = (doc.get('service_unit') if doc.get('service_unit') else doc.get('healthcare_service_unit'))
options = {'doctype': 'Nursing Task', 'status': 'Requested', 'company': doc.get('company', get_default_company()), 'service_unit': service_unit, 'medical_department': medical_department, 'reference_doctype': doc.get('doctype'), 'reference_name': doc.get('name'), 'patient': doc.get('patient'), 'activity': task.activity, 'mandatory': task.mandatory, 'duration': task.task_duration, 'task_doctype': task.task_doctype}
if task.time_offset:
time_offset = (task.time_offset if (not post_event) else (0 - task.time_offset))
requested_start_time = add_to_date(start_time, seconds=time_offset)
else:
requested_start_time = start_time
options.update({'requested_start_time': requested_start_time})
options = {key: value for (key, value) in options.items() if value}
frappe.get_doc(options).insert()
def cancel_nursing_tasks(cls, dt, dn):
tasks = frappe.db.get_all('Nursing Task', filters={'reference_doctype': dt, 'reference_document': dn, 'status': ['!=', 'Completed', 'Cancelled']})
for task in tasks:
frappe.get_doc('Nursing Task', task).cancel() |
class Player(models.Model):
class Meta():
verbose_name = ''
verbose_name_plural = ''
permissions = (('change_credit', ''),)
id = models.AutoField(**_('ID'), primary_key=True)
user = models.OneToOneField(authext.models.User, models.CASCADE, **_(''), unique=True)
name = models.CharField(**_(''), unique=True, max_length=15, validators=[is_name])
bio = models.CharField(**_(''), blank=True, max_length=150)
avatar = models.URLField(**_(''), blank=True, max_length=150)
prefs = models.TextField(**_(''), blank=True)
level = models.IntegerField(**_(''), default=1)
exp = models.IntegerField(**_(''), default=0)
up = models.IntegerField(**_('+1UP'), default=0)
bomb = models.IntegerField(**_('B'), default=0)
point = models.IntegerField(**_(''), default=0)
games = models.IntegerField('', default=0, help_text='')
drops = models.IntegerField('', default=0, help_text='')
badges = models.ManyToManyField('badge.Badge', related_name='players', verbose_name='', blank=True, help_text='')
friends = models.ManyToManyField('self', related_name='friended_by', verbose_name='', symmetrical=False, blank=True, help_text='')
blocks = models.ManyToManyField('self', related_name='blocked_by', verbose_name='', symmetrical=False, blank=True, help_text='')
def __str__(self):
return self.name |
.parametrize('with_spiders', [True, pytest.param(False, marks=pytest.mark.slow)])
def test_tmt_aperture(with_spiders):
name = 'tmt/pupil'
name += ('_without_spiders' if (not with_spiders) else '')
check_aperture(make_tmt_aperture, 30.0, name, check_normalization=True, check_segmentation=True, with_spiders=with_spiders) |
class HeaderDB(HeaderDatabaseAPI):
def __init__(self, db: AtomicDatabaseAPI) -> None:
self.db = db
def get_header_chain_gaps(self) -> ChainGaps:
return self._get_header_chain_gaps(self.db)
def _get_header_chain_gaps(cls, db: DatabaseAPI) -> ChainGaps:
try:
encoded_gaps = db[SchemaV1.make_header_chain_gaps_lookup_key()]
except KeyError:
return GENESIS_CHAIN_GAPS
else:
return rlp.decode(encoded_gaps, sedes=chain_gaps)
def _update_header_chain_gaps(cls, db: DatabaseAPI, persisted_header: BlockHeaderAPI, base_gaps: ChainGaps=None) -> GapInfo:
if (base_gaps is None):
base_gaps = cls._get_header_chain_gaps(db)
(gap_change, gaps) = fill_gap(persisted_header.block_number, base_gaps)
if (gap_change is not GapChange.NoChange):
db.set(SchemaV1.make_header_chain_gaps_lookup_key(), rlp.encode(gaps, sedes=chain_gaps))
return (gap_change, gaps)
def get_canonical_block_hash(self, block_number: BlockNumber) -> Hash32:
return self._get_canonical_block_hash(self.db, block_number)
def _get_canonical_block_hash(db: DatabaseAPI, block_number: BlockNumber) -> Hash32:
validate_block_number(block_number)
number_to_hash_key = SchemaV1.make_block_number_to_hash_lookup_key(block_number)
try:
encoded_key = db[number_to_hash_key]
except KeyError:
raise HeaderNotFound(f'No canonical header for block number #{block_number}')
else:
return rlp.decode(encoded_key, sedes=rlp.sedes.binary)
def get_canonical_block_header_by_number(self, block_number: BlockNumber) -> BlockHeaderAPI:
return self._get_canonical_block_header_by_number(self.db, block_number)
def _get_canonical_block_header_by_number(cls, db: DatabaseAPI, block_number: BlockNumber) -> BlockHeaderAPI:
validate_block_number(block_number)
canonical_block_hash = cls._get_canonical_block_hash(db, block_number)
return cls._get_block_header_by_hash(db, canonical_block_hash)
def get_canonical_head(self) -> BlockHeaderAPI:
return self._get_canonical_head(self.db)
def _get_canonical_head(cls, db: DatabaseAPI) -> BlockHeaderAPI:
canonical_head_hash = cls._get_canonical_head_hash(db)
return cls._get_block_header_by_hash(db, canonical_head_hash)
def _get_canonical_head_hash(cls, db: DatabaseAPI) -> Hash32:
try:
return Hash32(db[SchemaV1.make_canonical_head_hash_lookup_key()])
except KeyError:
raise CanonicalHeadNotFound('No canonical head set for this chain')
def get_block_header_by_hash(self, block_hash: Hash32) -> BlockHeaderAPI:
return self._get_block_header_by_hash(self.db, block_hash)
def _get_block_header_by_hash(db: DatabaseAPI, block_hash: Hash32) -> BlockHeaderAPI:
validate_word(block_hash, title='Block Hash')
try:
header_rlp = db[block_hash]
except KeyError:
raise HeaderNotFound(f'No header with hash {encode_hex(block_hash)} found')
return _decode_block_header(header_rlp)
def get_score(self, block_hash: Hash32) -> int:
return self._get_score(self.db, block_hash)
def _get_score(db: DatabaseAPI, block_hash: Hash32) -> int:
try:
encoded_score = db[SchemaV1.make_block_hash_to_score_lookup_key(block_hash)]
except KeyError:
raise HeaderNotFound(f'No header with hash {encode_hex(block_hash)} found')
return rlp.decode(encoded_score, sedes=rlp.sedes.big_endian_int)
def header_exists(self, block_hash: Hash32) -> bool:
return self._header_exists(self.db, block_hash)
def _header_exists(db: DatabaseAPI, block_hash: Hash32) -> bool:
validate_word(block_hash, title='Block Hash')
return (block_hash in db)
def persist_header(self, header: BlockHeaderAPI) -> Tuple[(Tuple[(BlockHeaderAPI, ...)], Tuple[(BlockHeaderAPI, ...)])]:
return self.persist_header_chain((header,))
def persist_header_chain(self, headers: Iterable[BlockHeaderAPI], genesis_parent_hash: Hash32=GENESIS_PARENT_HASH) -> Tuple[(Tuple[(BlockHeaderAPI, ...)], Tuple[(BlockHeaderAPI, ...)])]:
with self.db.atomic_batch() as db:
return self._persist_header_chain(db, headers, genesis_parent_hash)
def persist_checkpoint_header(self, header: BlockHeaderAPI, score: int) -> None:
with self.db.atomic_batch() as db:
return self._persist_checkpoint_header(db, header, score)
def _set_hash_scores_to_db(cls, db: DatabaseAPI, header: BlockHeaderAPI, score: int) -> int:
difficulty = header.difficulty
new_score = ((score + difficulty) if (difficulty != 0) else (score + header.block_number))
db.set(SchemaV1.make_block_hash_to_score_lookup_key(header.hash), rlp.encode(new_score, sedes=rlp.sedes.big_endian_int))
return new_score
def _persist_checkpoint_header(cls, db: DatabaseAPI, header: BlockHeaderAPI, score: int) -> None:
db.set(header.hash, rlp.encode(header))
previous_checkpoints = cls._get_checkpoints(db)
new_checkpoints = (previous_checkpoints + (header.hash,))
db.set(SchemaV1.make_checkpoint_headers_key(), b''.join(new_checkpoints))
difficulty = header.difficulty
previous_score = ((score - difficulty) if (difficulty != 0) else (score - header.block_number))
cls._set_hash_scores_to_db(db, header, previous_score)
cls._set_as_canonical_chain_head(db, header, GENESIS_PARENT_HASH)
(_, gaps) = cls._update_header_chain_gaps(db, header)
parent_block_num = BlockNumber((header.block_number - 1))
try:
parent_hash = cls._get_canonical_block_hash(db, parent_block_num)
except HeaderNotFound:
pass
else:
if (parent_hash != header.parent_hash):
try:
true_parent = cls._get_block_header_by_hash(db, header.parent_hash)
except HeaderNotFound:
cls._decanonicalize_single(db, parent_block_num, gaps)
else:
raise ValidationError(f'Why was a non-matching parent header {parent_hash!r} left as canonical after _set_as_canonical_chain_head() and {true_parent} is available?')
cls._decanonicalize_descendant_orphans(db, header, new_checkpoints)
def _decanonicalize_descendant_orphans(cls, db: DatabaseAPI, header: BlockHeaderAPI, checkpoints: Tuple[(Hash32, ...)]) -> None:
new_gaps = starting_gaps = cls._get_header_chain_gaps(db)
child_number = BlockNumber((header.block_number + 1))
try:
child = cls._get_canonical_block_header_by_number(db, child_number)
except HeaderNotFound:
next_invalid_child = None
else:
if (child.parent_hash != header.hash):
if (child.hash in checkpoints):
raise CheckpointsMustBeCanonical(f'Trying to decanonicalize {child} while making {header} the chain tip')
else:
next_invalid_child = child
else:
next_invalid_child = None
while next_invalid_child:
db.delete(SchemaV1.make_block_number_to_hash_lookup_key(child_number))
new_gaps = reopen_gap(child_number, new_gaps)
child_number = BlockNumber((child_number + 1))
try:
next_invalid_child = cls._get_canonical_block_header_by_number(db, child_number)
except HeaderNotFound:
break
else:
if (next_invalid_child.hash in checkpoints):
raise CheckpointsMustBeCanonical(f'Trying to decanonicalize {next_invalid_child} while making {header} the chain tip')
if (new_gaps != starting_gaps):
db.set(SchemaV1.make_header_chain_gaps_lookup_key(), rlp.encode(new_gaps, sedes=chain_gaps))
def _decanonicalize_single(cls, db: DatabaseAPI, block_num: BlockNumber, base_gaps: ChainGaps) -> ChainGaps:
db.delete(SchemaV1.make_block_number_to_hash_lookup_key(block_num))
new_gaps = reopen_gap(block_num, base_gaps)
if (new_gaps != base_gaps):
db.set(SchemaV1.make_header_chain_gaps_lookup_key(), rlp.encode(new_gaps, sedes=chain_gaps))
return new_gaps
def _persist_header_chain(cls, db: DatabaseAPI, headers: Iterable[BlockHeaderAPI], genesis_parent_hash: Hash32) -> Tuple[(Tuple[(BlockHeaderAPI, ...)], Tuple[(BlockHeaderAPI, ...)])]:
headers_iterator = iter(headers)
try:
first_header = first(headers_iterator)
except StopIteration:
return ((), ())
is_genesis = (first_header.parent_hash == genesis_parent_hash)
if ((not is_genesis) and (not cls._header_exists(db, first_header.parent_hash))):
raise ParentNotFound(f'Cannot persist block header ({encode_hex(first_header.hash)}) with unknown parent ({encode_hex(first_header.parent_hash)})')
if is_genesis:
score = 0
else:
score = cls._get_score(db, first_header.parent_hash)
curr_chain_head = first_header
db.set(curr_chain_head.hash, rlp.encode(curr_chain_head))
score = cls._set_hash_scores_to_db(db, curr_chain_head, score)
base_gaps = cls._get_header_chain_gaps(db)
gap_info = cls._update_header_chain_gaps(db, curr_chain_head, base_gaps)
gaps = cls._handle_gap_change(db, gap_info, curr_chain_head, genesis_parent_hash)
orig_headers_seq = concat([(first_header,), headers_iterator])
for (parent, child) in sliding_window(2, orig_headers_seq):
if (parent.hash != child.parent_hash):
raise ValidationError(f'Non-contiguous chain. Expected {encode_hex(child.hash)} to have {encode_hex(parent.hash)} as parent but was {encode_hex(child.parent_hash)}')
curr_chain_head = child
db.set(curr_chain_head.hash, rlp.encode(curr_chain_head))
score = cls._set_hash_scores_to_db(db, curr_chain_head, score)
gap_info = cls._update_header_chain_gaps(db, curr_chain_head, gaps)
gaps = cls._handle_gap_change(db, gap_info, curr_chain_head, genesis_parent_hash)
try:
previous_canonical_head = cls._get_canonical_head_hash(db)
head_score = cls._get_score(db, previous_canonical_head)
except CanonicalHeadNotFound:
return cls._set_as_canonical_chain_head(db, curr_chain_head, genesis_parent_hash)
if (score > head_score):
return cls._set_as_canonical_chain_head(db, curr_chain_head, genesis_parent_hash)
return ((), ())
def _handle_gap_change(cls, db: DatabaseAPI, gap_info: GapInfo, header: BlockHeaderAPI, genesis_parent_hash: Hash32) -> ChainGaps:
(gap_change, gaps) = gap_info
if (gap_change not in GAP_WRITES):
return gaps
if (gap_change in (GapChange.GapFill, GapChange.GapRightShrink)):
next_child_number = BlockNumber((header.block_number + 1))
expected_child = cls._get_canonical_block_header_by_number(db, next_child_number)
if (header.hash != expected_child.parent_hash):
checkpoints = cls._get_checkpoints(db)
if (expected_child.hash in checkpoints):
raise CheckpointsMustBeCanonical(f'Cannot make {header} canonical, because it is not the parent of declared checkpoint: {expected_child}')
else:
gaps = cls._decanonicalize_single(db, expected_child.block_number, gaps)
cls._canonicalize_header(db, header, genesis_parent_hash)
return gaps
def _canonicalize_header(cls, db: DatabaseAPI, header: BlockHeaderAPI, genesis_parent_hash: Hash32) -> Tuple[(Tuple[(BlockHeaderAPI, ...)], Tuple[(BlockHeaderAPI, ...)])]:
new_canonical_headers = cast(Tuple[(BlockHeaderAPI, ...)], tuple(reversed(cls._find_new_ancestors(db, header, genesis_parent_hash))))
old_canonical_headers = cls._find_headers_to_decanonicalize(db, [h.block_number for h in new_canonical_headers])
checkpoints = cls._get_checkpoints(db)
attempted_checkpoint_overrides = {old for old in old_canonical_headers if (old.hash in checkpoints)}
if len(attempted_checkpoint_overrides):
raise CheckpointsMustBeCanonical(f'Tried to switch chain away from checkpoint(s) {attempted_checkpoint_overrides!r} by inserting new canonical headers {new_canonical_headers}')
for ancestor in new_canonical_headers:
cls._add_block_number_to_hash_lookup(db, ancestor)
if len(new_canonical_headers):
cls._decanonicalize_descendant_orphans(db, new_canonical_headers[(- 1)], checkpoints)
return (new_canonical_headers, old_canonical_headers)
def _set_as_canonical_chain_head(cls, db: DatabaseAPI, header: BlockHeaderAPI, genesis_parent_hash: Hash32) -> Tuple[(Tuple[(BlockHeaderAPI, ...)], Tuple[(BlockHeaderAPI, ...)])]:
try:
current_canonical_head = cls._get_canonical_head_hash(db)
except CanonicalHeadNotFound:
current_canonical_head = None
new_canonical_headers: Tuple[(BlockHeaderAPI, ...)]
old_canonical_headers: Tuple[(BlockHeaderAPI, ...)]
if (current_canonical_head and (header.parent_hash == current_canonical_head)):
new_canonical_headers = (header,)
old_canonical_headers = ()
cls._add_block_number_to_hash_lookup(db, header)
else:
(new_canonical_headers, old_canonical_headers) = cls._canonicalize_header(db, header, genesis_parent_hash)
db.set(SchemaV1.make_canonical_head_hash_lookup_key(), header.hash)
return (new_canonical_headers, old_canonical_headers)
def _get_checkpoints(cls, db: DatabaseAPI) -> Tuple[(Hash32, ...)]:
concatenated_checkpoints = db.get(SchemaV1.make_checkpoint_headers_key())
if (concatenated_checkpoints is None):
return ()
else:
return tuple((Hash32(concatenated_checkpoints[index:(index + 32)]) for index in range(0, len(concatenated_checkpoints), 32)))
_tuple
def _find_headers_to_decanonicalize(cls, db: DatabaseAPI, numbers_to_decanonicalize: Sequence[BlockNumber]) -> Iterable[BlockHeaderAPI]:
for block_number in numbers_to_decanonicalize:
try:
old_canonical_hash = cls._get_canonical_block_hash(db, block_number)
except HeaderNotFound:
continue
else:
(yield cls._get_block_header_by_hash(db, old_canonical_hash))
_tuple
def _find_new_ancestors(cls, db: DatabaseAPI, header: BlockHeaderAPI, genesis_parent_hash: Hash32) -> Iterable[BlockHeaderAPI]:
h = header
while True:
try:
orig = cls._get_canonical_block_header_by_number(db, h.block_number)
except HeaderNotFound:
pass
else:
if (orig.hash == h.hash):
break
(yield h)
if (h.parent_hash == genesis_parent_hash):
break
else:
try:
h = cls._get_block_header_by_hash(db, h.parent_hash)
except HeaderNotFound:
break
def _add_block_number_to_hash_lookup(db: DatabaseAPI, header: BlockHeaderAPI) -> None:
block_number_to_hash_key = SchemaV1.make_block_number_to_hash_lookup_key(header.block_number)
db.set(block_number_to_hash_key, rlp.encode(header.hash, sedes=rlp.sedes.binary)) |
def check_on_disk_config(fw):
fw_config = FirewallConfig(fw)
try:
_firewalld_conf = firewalld_conf(config.FIREWALLD_CONF)
_firewalld_conf.read()
except FirewallError as error:
raise FirewallError(error.code, ("'%s': %s" % (config.FIREWALLD_CONF, error.msg)))
except IOError:
pass
except Exception as msg:
raise Exception(("'%s': %s" % (config.FIREWALLD_CONF, msg)))
fw_config.set_firewalld_conf(_firewalld_conf)
readers = {'ipset': {'reader': ipset_reader, 'add': fw_config.add_ipset, 'dirs': [config.FIREWALLD_IPSETS, config.ETC_FIREWALLD_IPSETS]}, 'helper': {'reader': helper_reader, 'add': fw_config.add_helper, 'dirs': [config.FIREWALLD_HELPERS, config.ETC_FIREWALLD_HELPERS]}, 'icmptype': {'reader': icmptype_reader, 'add': fw_config.add_icmptype, 'dirs': [config.FIREWALLD_ICMPTYPES, config.ETC_FIREWALLD_ICMPTYPES]}, 'service': {'reader': service_reader, 'add': fw_config.add_service, 'dirs': [config.FIREWALLD_SERVICES, config.ETC_FIREWALLD_SERVICES]}, 'zone': {'reader': zone_reader, 'add': fw_config.add_zone, 'dirs': [config.FIREWALLD_ZONES, config.ETC_FIREWALLD_ZONES]}, 'policy': {'reader': policy_reader, 'add': fw_config.add_policy_object, 'dirs': [config.FIREWALLD_POLICIES, config.ETC_FIREWALLD_POLICIES]}}
for reader in readers.keys():
for _dir in readers[reader]['dirs']:
if (not os.path.isdir(_dir)):
continue
for file in sorted(os.listdir(_dir)):
if file.endswith('.xml'):
obj = readers[reader]['reader'](file, _dir)
readers[reader]['add'](obj)
fw_config.full_check_config()
if os.path.isfile(config.FIREWALLD_DIRECT):
try:
obj = Direct(config.FIREWALLD_DIRECT)
obj.read()
obj.check_config(obj.export_config())
except FirewallError as error:
raise FirewallError(error.code, ("'%s': %s" % (config.FIREWALLD_DIRECT, error.msg)))
except Exception as msg:
raise Exception(("'%s': %s" % (config.FIREWALLD_DIRECT, msg)))
if os.path.isfile(config.LOCKDOWN_WHITELIST):
try:
obj = LockdownWhitelist(config.LOCKDOWN_WHITELIST)
obj.read()
obj.check_config(obj.export_config())
except FirewallError as error:
raise FirewallError(error.code, ("'%s': %s" % (config.LOCKDOWN_WHITELIST, error.msg)))
except Exception as msg:
raise Exception(("'%s': %s" % (config.LOCKDOWN_WHITELIST, msg))) |
class ExtractGrid(FilterBase):
__version__ = 0
x_min = Range(value=0, low='_x_low', high='_x_high', enter_set=True, auto_set=False, desc='minimum x value of the domain')
x_max = Range(value=10000, low='_x_low', high='_x_high', enter_set=True, auto_set=False, desc='maximum x value of the domain')
y_min = Range(value=0, low='_y_low', high='_y_high', enter_set=True, auto_set=False, desc='minimum y value of the domain')
y_max = Range(value=10000, low='_y_low', high='_y_high', enter_set=True, auto_set=False, desc='maximum y value of the domain')
z_min = Range(value=0, low='_z_low', high='_z_high', enter_set=True, auto_set=False, desc='minimum z value of the domain')
z_max = Range(value=10000, low='_z_low', high='_z_high', enter_set=True, auto_set=False, desc='maximum z value of the domain')
x_ratio = Range(value=1, low='_min_sample', high='_x_s_high', enter_set=True, auto_set=False, desc='sample rate along x')
y_ratio = Range(value=1, low='_min_sample', high='_y_s_high', enter_set=True, auto_set=False, desc='sample rate along y')
z_ratio = Range(value=1, low='_min_sample', high='_z_s_high', enter_set=True, auto_set=False, desc='sample rate along z')
filter = Instance(tvtk.Object, tvtk.ExtractVOI(), allow_none=False)
input_info = PipelineInfo(datasets=['image_data', 'rectilinear_grid', 'structured_grid'], attribute_types=['any'], attributes=['any'])
output_info = PipelineInfo(datasets=['image_data', 'rectilinear_grid', 'structured_grid'], attribute_types=['any'], attributes=['any'])
_min_sample = Int(1)
_x_low = Int(0)
_x_high = Int(10000)
_x_s_high = Int(100)
_y_low = Int(0)
_y_high = Int(10000)
_y_s_high = Int(100)
_z_low = Int(0)
_z_high = Int(10000)
_z_s_high = Int(100)
view = View(Group(Item(label='Select Volume Of Interest'), Item(name='x_min'), Item(name='x_max'), Item(name='y_min'), Item(name='y_max'), Item(name='z_min'), Item(name='z_max'), Item('_'), Item(label='Select Sample Ratio'), Item(name='x_ratio'), Item(name='y_ratio'), Item(name='z_ratio'), label='VOI'), Group(Item(name='filter', style='custom', resizable=True), show_labels=False, label='Filter'), resizable=True)
def __get_pure_state__(self):
d = super(ExtractGrid, self).__get_pure_state__()
for axis in ('x', 'y', 'z'):
for name in ('_min', '_max'):
d.pop((axis + name), None)
d.pop((('_' + axis) + '_low'), None)
d.pop((('_' + axis) + '_high'), None)
d.pop((('_' + axis) + '_s_high'), None)
d.pop((axis + '_ratio'), None)
return d
def update_pipeline(self):
inputs = self.inputs
if (len(inputs) == 0):
return
input = inputs[0].get_output_dataset()
mapping = {'vtkStructuredGrid': tvtk.ExtractGrid, 'vtkRectilinearGrid': tvtk.ExtractRectilinearGrid, 'vtkImageData': tvtk.ExtractVOI}
for (key, klass) in mapping.items():
if input.is_a(key):
self.filter = klass()
break
else:
error(('This filter does not support %s objects' % input.__class__.__name__))
return
fil = self.filter
self.configure_connection(fil, inputs[0])
self._update_limits()
self._update_voi()
self._update_sample_rate()
fil.update()
self._set_outputs([fil])
def update_data(self):
self._update_limits()
fil = self.filter
fil.update_whole_extent()
fil.update()
self.data_changed = True
def _update_limits(self):
if (VTK_MAJOR_VERSION <= 7):
extents = self.filter.get_update_extent()
else:
extents = self.filter.input.extent
if ((extents[0] > extents[1]) or (extents[2] > extents[3]) or (extents[4] > extents[5])):
dims = self.inputs[0].get_output_dataset().dimensions
e = extents
extents = [e[0], (dims[0] - 1), e[2], (dims[1] - 1), e[4], (dims[2] - 1)]
(self._x_low, self._x_high) = extents[:2]
(self._y_low, self._y_high) = extents[2:4]
(self._z_low, self._z_high) = extents[4:]
self._x_s_high = max(1, self._x_high)
self._y_s_high = max(1, self._y_high)
self._z_s_high = max(1, self._z_high)
def _x_min_changed(self, val):
if (val > self.x_max):
self.x_max = val
else:
self._update_voi()
def _x_max_changed(self, val):
if (val < self.x_min):
self.x_min = val
else:
self._update_voi()
def _y_min_changed(self, val):
if (val > self.y_max):
self.y_max = val
else:
self._update_voi()
def _y_max_changed(self, val):
if (val < self.y_min):
self.y_min = val
else:
self._update_voi()
def _z_min_changed(self, val):
if (val > self.z_max):
self.z_max = val
else:
self._update_voi()
def _z_max_changed(self, val):
if (val < self.z_min):
self.z_min = val
else:
self._update_voi()
def _x_ratio_changed(self):
self._update_sample_rate()
def _y_ratio_changed(self):
self._update_sample_rate()
def _z_ratio_changed(self):
self._update_sample_rate()
def _update_voi(self):
f = self.filter
f.voi = (self.x_min, self.x_max, self.y_min, self.y_max, self.z_min, self.z_max)
f.update_whole_extent()
f.update()
self.data_changed = True
def _update_sample_rate(self):
f = self.filter
f.sample_rate = (self.x_ratio, self.y_ratio, self.z_ratio)
f.update_whole_extent()
f.update()
self.data_changed = True
def _filter_changed(self, old, new):
if (old is not None):
old.on_trait_change(self.render, remove=True)
new.on_trait_change(self.render) |
class OptionSeriesAreasplineSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class PreProcessingObservationConversion(ObservationConversion):
def space(self) -> gym.spaces.space.Space:
return gym.spaces.Dict({'observation_0_feature_series': gym.spaces.Box(low=np.float32(0), high=np.float32(1), shape=(64, 24), dtype=np.float32), 'observation_0_image': gym.spaces.Box(low=0.0, high=1.0, shape=(3, 32, 32), dtype=np.float32), 'observation_1_categorical_feature': gym.spaces.Box(low=np.float32(0), high=np.float32(11), shape=(), dtype=np.float32)}) |
class ModelTest(unittest.TestCase):
def setUp(self):
x = torch.randn(3, 1)
y = torch.randn(3)
mean = ConstantMean(constant_prior=UniformPrior((- 1), 1))
kernel = kernels.MaternKernel(lengthscale_prior=GammaPrior(0.5, 0.5))
lik = likelihoods.GaussianLikelihood()
self.model = SimpleGP(x, y, mean, kernel, lik)
self.bo_model = BoTorchGP(x, y, mean, kernel, lik)
self.name_to_rv = make_prior_random_variables(self.model)
_variable
def y():
sampled_model = bm_sample_from_prior(self.model.to_pyro_random_module(), self.name_to_rv)
return sampled_model.likelihood(sampled_model(x))
self.y = y
def test_infer(self):
self.model.train()
bm.GlobalNoUTurnSampler().infer(list(self.name_to_rv.values()), {}, num_samples=2, num_chains=1)
def test_load_and_predict(self):
self.model.eval()
d = {'kernel.lengthscale_prior': torch.ones(1), 'mean.mean_prior': torch.tensor(1.0)}
self.model.bm_load_samples(d)
assert (self.model.kernel.lengthscale.item() == 1.0)
assert isinstance(self.model(torch.randn(3, 1)), dist.MultivariateNormal)
def test_posterior(self):
self.bo_model.eval()
d = {'kernel.lengthscale_prior': torch.ones(1), 'mean.mean_prior': torch.tensor(1.0)}
self.bo_model.bm_load_samples(d)
assert isinstance(self.bo_model.posterior(torch.randn(3, 1)), GPyTorchPosterior)
obs_noise = torch.ones(1, 1)
mvn = self.bo_model.posterior(torch.randn(3, 1), obs_noise)
assert isinstance(mvn, GPyTorchPosterior) |
class LogThread(threading.Thread):
def __init__(self, interval, device, attributes):
super(LogThread, self).__init__()
self.interval = interval
self.device = device
self.attributes = attributes
self.done = threading.Event()
def run(self):
tic = time.time()
toc = tic
self.results = []
while (not self.done.isSet()):
now = time.time()
s = ()
for a in self.attributes:
s += (getattr(self.device, a),)
self.results.append(((now - tic), s))
while (0.005 > (toc - time.time())):
toc += self.interval
time.sleep((toc - time.time()))
def join(self, timeout=None):
self.done.set()
super(LogThread, self).join(timeout) |
def get_classes(module_label, classnames):
app_label = module_label.split('.')[0]
app_module_path = _get_app_module_path(module_label)
if (not app_module_path):
raise AppNotFoundError("No app found matching '{}'".format(module_label))
module_path = app_module_path
if ('.' in app_module_path):
base_package = app_module_path.rsplit(('.' + app_label), 1)[0]
module_path = '{}.{}'.format(base_package, module_label)
local_imported_module = _import_module(module_path, classnames)
machina_imported_module = None
if (not app_module_path.startswith('machina.apps')):
machina_imported_module = _import_module('{}.{}'.format('machina.apps', module_label), classnames)
if ((local_imported_module is None) and (machina_imported_module is None)):
raise AppNotFoundError("Error importing '{}'".format(module_path))
imported_modules = [m for m in (local_imported_module, machina_imported_module) if (m is not None)]
return _pick_up_classes(imported_modules, classnames) |
.parametrize(['operation', 'result'], [(BinaryOperation(OperationType.plus, [var, con_0]), [var]), (BinaryOperation(OperationType.minus, [var, con_0]), [var]), (BinaryOperation(OperationType.multiply, [var, con_0]), [con_0]), (BinaryOperation(OperationType.multiply_us, [var, con_0]), [con_0]), (BinaryOperation(OperationType.multiply, [var, con_1]), [var]), (BinaryOperation(OperationType.multiply_us, [var, con_1]), [var]), (BinaryOperation(OperationType.multiply, [var, con_neg1]), [UnaryOperation(OperationType.negate, [var])]), (BinaryOperation(OperationType.multiply_us, [var, con_neg1]), [UnaryOperation(OperationType.negate, [var])]), (BinaryOperation(OperationType.divide, [var, con_1]), [var]), (BinaryOperation(OperationType.divide_us, [var, con_1]), [var]), (BinaryOperation(OperationType.divide, [var, con_neg1]), [UnaryOperation(OperationType.negate, [var])]), (BinaryOperation(OperationType.divide_us, [var, con_neg1]), [])])
def test_simplify_trivial_arithmetic(operation: Operation, result: list[Expression]):
assert (SimplifyTrivialArithmetic().apply(operation) == [(operation, e) for e in result]) |
def module_combined(output, param, subtree_parameters, module_idx):
return Module(_module_combined, render_kwds=dict(output=output, VALUE_NAME=VALUE_NAME, shape=param.annotation.type.shape, module_idx=module_idx, disassemble=_snippet_disassemble_combined, connector_ctype=param.annotation.type.ctype, nq_indices=index_cnames_seq(param), q_indices=index_cnames_seq(param, qualified=True), param_cnames_str=param_cnames_seq, subtree_parameters=subtree_parameters, nq_params=param_cnames_seq(subtree_parameters), indices=index_cnames(param.annotation.type.shape))) |
(scope='function')
def privacy_experience_privacy_center(db: Session, experience_config_privacy_center) -> Generator:
privacy_experience = PrivacyExperience.create(db=db, data={'component': ComponentType.privacy_center, 'region': PrivacyNoticeRegion.us_co, 'experience_config_id': experience_config_privacy_center.id})
(yield privacy_experience)
privacy_experience.delete(db) |
class _MemberSpec(object):
__slots__ = ('name', 'idlflags', 'restype')
def __init__(self, name, idlflags, restype):
self.name = name
self.idlflags = idlflags
self.restype = restype
def is_prop(self):
propflags = ('propget', 'propput', 'propputref')
return any(((f in propflags) for f in self.idlflags)) |
_meta(characters.alice.LittleLegionDollControlCard)
class LittleLegionDollControlCard():
name = ''
custom_ray = True
def effect_string(self, act):
(controllee, victim) = act.target_list
return f'{N.char(act.source)}......,{N.char(controllee)},{N.char(victim)}!'
def sound_effect(self, act):
return 'thb-cv-alice_legion_control' |
def get_serializable_flyte_workflow(entity: 'FlyteWorkflow', settings: SerializationSettings) -> FlyteControlPlaneEntity:
def _mutate_task_node(tn: workflow_model.TaskNode):
tn.reference_id._project = settings.project
tn.reference_id._domain = settings.domain
def _mutate_branch_node_task_ids(bn: workflow_model.BranchNode):
_mutate_node(bn.if_else.case.then_node)
for c in bn.if_else.other:
_mutate_node(c.then_node)
if bn.if_else.else_node:
_mutate_node(bn.if_else.else_node)
def _mutate_workflow_node(wn: workflow_model.WorkflowNode):
wn.sub_workflow_ref._project = settings.project
wn.sub_workflow_ref._domain = settings.domain
def _mutate_node(n: workflow_model.Node):
if n.task_node:
_mutate_task_node(n.task_node)
elif n.branch_node:
_mutate_branch_node_task_ids(n.branch_node)
elif n.workflow_node:
_mutate_workflow_node(n.workflow_node)
for n in entity.flyte_nodes:
_mutate_node(n)
entity.id._project = settings.project
entity.id._domain = settings.domain
return entity |
_register_exp_type(ofproto_common.ONF_EXPERIMENTER_ID, ofproto.ONF_ET_BUNDLE_CONTROL)
class ONFBundleCtrlMsg(OFPExperimenter):
def __init__(self, datapath, bundle_id=None, type_=None, flags=None, properties=None):
super(ONFBundleCtrlMsg, self).__init__(datapath, ofproto_common.ONF_EXPERIMENTER_ID, ofproto.ONF_ET_BUNDLE_CONTROL)
self.bundle_id = bundle_id
self.type = type_
self.flags = flags
self.properties = properties
def _serialize_body(self):
bin_props = bytearray()
for p in self.properties:
bin_props += p.serialize()
msg_pack_into(ofproto.OFP_EXPERIMENTER_HEADER_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.experimenter, self.exp_type)
msg_pack_into(ofproto.ONF_BUNDLE_CTRL_PACK_STR, self.buf, ofproto.OFP_EXPERIMENTER_HEADER_SIZE, self.bundle_id, self.type, self.flags)
self.buf += bin_props
def parser_subtype(cls, super_msg):
(bundle_id, type_, flags) = struct.unpack_from(ofproto.ONF_BUNDLE_CTRL_PACK_STR, super_msg.data)
msg = cls(super_msg.datapath, bundle_id, type_, flags)
msg.properties = []
rest = super_msg.data[ofproto.ONF_BUNDLE_CTRL_SIZE:]
while rest:
(p, rest) = OFPBundleProp.parse(rest)
msg.properties.append(p)
return msg |
def test_unsigned_to_signed_transaction(txn_fixture, transaction_class):
key = keys.PrivateKey(decode_hex(txn_fixture['key']))
unsigned_txn = transaction_class.create_unsigned_transaction(nonce=txn_fixture['nonce'], gas_price=txn_fixture['gasPrice'], gas=txn_fixture['gas'], to=(to_canonical_address(txn_fixture['to']) if txn_fixture['to'] else b''), value=txn_fixture['value'], data=decode_hex(txn_fixture['data']))
signed_txn = unsigned_txn.as_signed_transaction(key)
assert is_same_address(signed_txn.sender, key.public_key.to_canonical_address()) |
class Example(flx.HFix):
def init(self):
with flx.VBox():
self.b1 = flx.Button(text='apple')
self.b2 = flx.Button(text='banana')
self.b3 = flx.Button(text='pear')
self.buttonlabel = flx.Label(text='...')
with flx.VBox():
self.r1 = flx.RadioButton(text='apple')
self.r2 = flx.RadioButton(text='banana')
self.r3 = flx.RadioButton(text='pear')
self.radiolabel = flx.Label(text='...')
with flx.VBox():
self.c1 = flx.ToggleButton(text='apple')
self.c2 = flx.ToggleButton(text='banana')
self.c3 = flx.ToggleButton(text='pear')
self.checklabel = flx.Label(text='...')
('b1.pointer_click', 'b2.pointer_click', 'b3.pointer_click')
def _button_clicked(self, *events):
ev = events[(- 1)]
self.buttonlabel.set_text(('Clicked on the ' + ev.source.text))
('r1.checked', 'r2.checked', 'r3.checked')
def _radio_changed(self, *events):
ev = events[(- 1)]
self.radiolabel.set_text(('Selected the ' + ev.source.text))
('c1.checked', 'c2.checked', 'c3.checked')
def _check_changed(self, *events):
selected = [c.text for c in (self.c1, self.c2, self.c3) if c.checked]
if selected:
self.checklabel.set_text(('Selected: ' + ', '.join(selected)))
else:
self.checklabel.set_text('None selected') |
(tags=['disbursements'], description=docs.SCHEDULE_B_BY_RECIPIENT)
class ScheduleBByRecipientView(AggregateResource):
model = models.ScheduleBByRecipient
schema = schemas.ScheduleBByRecipientSchema
page_schema = schemas.ScheduleBByRecipientPageSchema
query_args = args.schedule_b_by_recipient
filter_multi_fields = [('cycle', models.ScheduleBByRecipient.cycle), ('committee_id', models.ScheduleBByRecipient.committee_id)]
filter_fulltext_fields = [('recipient_name', models.ScheduleBByRecipient.recipient_name)] |
def check_ipaddr(ip, prefix, is_ipv6=False):
import socket
if is_ipv6:
fa = socket.AF_INET6
else:
fa = socket.AF_INET
try:
socket.inet_pton(fa, ip)
except:
return False
if (prefix < 0):
return False
if (is_ipv6 and (prefix > 128)):
return False
if ((not is_ipv6) and (prefix > 32)):
return False
return True |
class ContourGridPlane(Module):
__version__ = 0
grid_plane = Instance(GridPlane, allow_none=False, record=True)
enable_contours = Bool(True, desc='if contours are generated')
contour = Instance(Contour, allow_none=False, record=True)
actor = Instance(Actor, allow_none=False, record=True)
input_info = PipelineInfo(datasets=['image_data', 'structured_grid', 'rectilinear_grid'], attribute_types=['any'], attributes=['any'])
view = View([Group(Item(name='grid_plane', style='custom'), show_labels=False), Group(Item(name='enable_contours')), Group(Item(name='contour', style='custom', enabled_when='object.enable_contours'), Item(name='actor', style='custom'), show_labels=False)])
def setup_pipeline(self):
self.grid_plane = GridPlane()
self.contour = Contour(auto_contours=True, number_of_contours=10)
self.actor = Actor()
def update_pipeline(self):
mm = self.module_manager
if (mm is None):
return
self.grid_plane.inputs = [mm.source]
self._enable_contours_changed(self.enable_contours)
self.actor.set_lut(mm.scalar_lut_manager.lut)
self.pipeline_changed = True
def update_data(self):
self.data_changed = True
def _filled_contours_changed(self, value):
if value:
self.actor.mapper.scalar_mode = 'use_cell_data'
else:
self.actor.mapper.scalar_mode = 'default'
self.render()
def _enable_contours_changed(self, value):
if (self.module_manager is None):
return
if value:
self.actor.inputs = [self.contour]
if self.contour.filled_contours:
self.actor.mapper.scalar_mode = 'use_cell_data'
else:
self.actor.inputs = [self.grid_plane]
self.actor.mapper.scalar_mode = 'default'
self.render()
def _grid_plane_changed(self, old, new):
cont = self.contour
if (cont is not None):
cont.inputs = [new]
self._change_components(old, new)
def _contour_changed(self, old, new):
if (old is not None):
old.on_trait_change(self._filled_contours_changed, 'filled_contours', remove=True)
new.on_trait_change(self._filled_contours_changed, 'filled_contours')
gp = self.grid_plane
if (gp is not None):
new.inputs = [gp]
actor = self.actor
if (actor is not None):
actor.inputs = [new]
self._change_components(old, new)
def _actor_changed(self, old, new):
if (old is None):
new.property.trait_set(line_width=2.0)
new.scene = self.scene
cont = self.contour
if (cont is not None):
new.inputs = [cont]
self._change_components(old, new) |
def load_schema_files(files: List[str]) -> Dict[(str, FieldNestedEntry)]:
fields_nested: Dict[(str, FieldNestedEntry)] = {}
for f in files:
new_fields: Dict[(str, FieldNestedEntry)] = read_schema_file(f)
fields_nested = ecs_helpers.safe_merge_dicts(fields_nested, new_fields)
return fields_nested |
class OptionSeriesStreamgraphSonificationTracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class ContextMock():
cwd = 'cwd'
def __init__(self, *args, **kwargs):
self.invoke = Mock()
self.agent_config = AgentConfigMock(*args, **kwargs)
self.config: dict = {}
self.connection_loader = ConfigLoaderMock()
self.agent_loader = ConfigLoaderMock()
self.clean_paths: List = []
self.obj = self
self.registry_path = 'packages'
self.cwd = 'cwd'
def set_config(self, key, value):
setattr(self.config, key, value) |
def parse_block_2021(lines):
chain1_name = lines[7].split(':')[1].rstrip().lstrip().split(' ')[0]
chain2_name = lines[8].split(':')[1].rstrip().lstrip().split(' ')[0]
print(chain1_name, chain2_name)
tmscore1 = float(lines[13].lstrip().split(' ')[1])
tmscore2 = float(lines[14].lstrip().split(' ')[1])
chain1 = lines[18].rstrip().lstrip()
aln = lines[19]
chain2 = lines[20].rstrip().lstrip()
rmsd = float(re.split('\\s+', lines[12].lstrip().split(', ')[1])[1])
zlist = list(zip(chain1, aln, chain2))
alignment = ''.join(list(map(aln_f, zlist)))
chain1 = chain1.replace('-', '')
chain2 = chain2.replace('-', '')
return (chain1_name, chain2_name, tmscore1, tmscore2, rmsd, chain1, chain2, alignment) |
def check_dangling_end(read, dangling_sequences):
ds = dangling_sequences
if (('pat_forw' not in ds) or ('pat_rev' not in ds)):
return False
if ((not read.is_reverse) and read.seq.upper().startswith(ds['pat_forw'])):
return True
if (read.is_reverse and read.seq.upper().endswith(ds['pat_rev'])):
return True
return False |
def register_bigquery_handlers():
try:
from .bigquery import ArrowToBQEncodingHandlers, BQToArrowDecodingHandler, BQToPandasDecodingHandler, PandasToBQEncodingHandlers
StructuredDatasetTransformerEngine.register(PandasToBQEncodingHandlers())
StructuredDatasetTransformerEngine.register(BQToPandasDecodingHandler())
StructuredDatasetTransformerEngine.register(ArrowToBQEncodingHandlers())
StructuredDatasetTransformerEngine.register(BQToArrowDecodingHandler())
except ImportError:
logger.info("We won't register bigquery handler for structured dataset because we can't find the packages google-cloud-bigquery-storage and google-cloud-bigquery") |
class OptionPlotoptionsGaugeDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
class NewsArticle(object):
authors = []
date_download = None
date_modify = None
date_publish = None
description = None
filename = None
image_url = None
language = None
localpath = None
source_domain = None
maintext = None
text = None
title = None
title_page = None
title_rss = None
url = None
def get_serializable_dict(self):
tmp = self.get_dict()
tmp['date_download'] = str(tmp['date_download'])
tmp['date_modify'] = str(tmp['date_modify'])
tmp['date_publish'] = str(tmp['date_publish'])
return tmp
def get_dict(self):
return {'authors': self.authors, 'date_download': self.date_download, 'date_modify': self.date_modify, 'date_publish': self.date_publish, 'description': self.description, 'filename': self.filename, 'image_url': self.image_url, 'language': self.language, 'localpath': self.localpath, 'maintext': self.maintext, 'source_domain': self.source_domain, 'text': self.text, 'title': self.title, 'title_page': self.title_page, 'title_rss': self.title_rss, 'url': self.url} |
.parametrize('raw_data, encoded_data', ethtest_fixtures_as_pytest_fixtures('RandomRLPTests/example.json'))
def test_ethtest_fixtures_for_successfull_rlp_decoding(raw_data: Bytes, encoded_data: Bytes) -> None:
decoded_data = rlp.decode(encoded_data)
assert (rlp.encode(decoded_data) == encoded_data) |
class ElectionDate(db.Model, FecAppMixin):
__tablename__ = 'trc_election'
trc_election_id = db.Column(db.Integer, primary_key=True)
election_state = db.Column(db.String, index=True, doc=docs.STATE)
election_district = db.Column(db.String, index=True, doc=docs.DISTRICT)
election_party = db.Column(db.String, index=True, doc=docs.PARTY)
office_sought = db.Column(db.String, index=True, doc=docs.OFFICE)
election_date = db.Column(db.Date, index=True, doc=docs.ELECTION_DATE)
election_notes = db.Column(db.String, index=True)
election_type_id = db.Column('trc_election_type_id', db.String, index=True, doc=docs.ELECTION_TYPE)
update_date = db.Column(db.DateTime, index=True, doc=docs.UPDATE_DATE)
create_date = db.Column(db.DateTime, index=True, doc=docs.CREATE_DATE)
election_year = db.Column('election_yr', db.Integer, index=True, doc=docs.ELECTION_YEAR)
primary_general_date = db.Column('pg_date', db.Date, index=True)
election_status_id = db.Column('trc_election_status_id', db.Integer, index=True, doc=docs.ELECTION_STATUS_ID)
def election_type_full(self):
return decoders.election_types.get(self.election_type_id) |
def get_line_objs_from_lines(input_lines: List[str], validate_file_exists: bool=True, all_input: bool=False) -> Dict[(int, LineBase)]:
line_objs: Dict[(int, LineBase)] = {}
for (index, line) in enumerate(input_lines):
line = line.replace('\t', (' ' * 4))
line = line.replace('\n', '')
formatted_line = FormattedText(line)
result = parse.match_line(str(formatted_line), validate_file_exists=validate_file_exists, all_input=all_input)
if (not result):
line_obj: LineBase = SimpleLine(formatted_line, index)
else:
line_obj = LineMatch(formatted_line, result, index, validate_file_exists=validate_file_exists, all_input=all_input)
line_objs[index] = line_obj
return line_objs |
def test_local_storage_makedirs_permissionerror(monkeypatch):
def mockmakedirs(path, exist_ok=False):
raise PermissionError('Fake error')
data_cache = os.path.join(os.curdir, 'test_permission')
assert (not os.path.exists(data_cache))
monkeypatch.setattr(os, 'makedirs', mockmakedirs)
with pytest.raises(PermissionError) as error:
make_local_storage(path=data_cache, env='SOME_VARIABLE')
assert ('Pooch could not create data cache' in str(error))
assert ("'SOME_VARIABLE'" in str(error)) |
def Follow_path(path):
global final_goal_location, goal_reached
cpath = path
goal_point = cpath[(- 1)]
print('Following Path -->', cpath)
for loc in cpath:
while (Distance_compute(robot_location, loc) > 0.1):
goal_location_marker(final_goal_location)
points_publisher(cpath)
go_to_goal(loc)
if (loc == goal_point):
goal_reached = True |
def get_adr_citations(case_id):
citations = []
with db.engine.connect() as conn:
rs = conn.execute(ADR_CITATIONS, case_id)
for row in rs:
citations = parse_statutory_citations(row['statutory_citation'], case_id, row['name'])
citations.extend(parse_regulatory_citations(row['regulatory_citation'], case_id, row['name']))
return citations |
class Tor():
TOR_PATH_WIN = '.\\torbundle\\Tor\\tor.exe'
TOR_PATH_LINUX = './tor_linux/tor'
def __init__(self):
self.tor_process = self.start()
socks.set_default_proxy(socks.SOCKS5, '127.0.0.1', 9050)
socket.socket = socks.socksocket
def start(self):
try:
if (os.name == 'nt'):
path = Tor.resource_path(self.TOR_PATH_WIN)
else:
path = Tor.resource_path(self.TOR_PATH_LINUX)
sp.Popen(['chmod', '+x', path])
tor_process = sp.Popen(path, shell=True, stdout=sp.PIPE, stderr=sp.PIPE)
print((tor_process.stdout.read() + tor_process.stderr.read()))
except Exception as error:
print(str(error))
sys.exit(1)
print('Started tor')
return tor_process
def resource_path(relative_path):
base_path = getattr(sys, '_MEIPASS', os.path.dirname(os.path.abspath(__file__)))
return os.path.join(base_path, relative_path) |
def parse_color(base, string, start=0, second=False):
length = len(string)
more = None
ratio = None
color = base.match(string, start=start, fullmatch=False)
if color:
start = color.end
if (color.end != length):
more = True
m = tools.RE_RATIO.match(string, start)
if m:
ratio = float(m.group(1))
start = m.end(0)
if ((not second) and (not ratio)):
m = tools.RE_SLASH.match(string, start)
if (m and (not ratio)):
start = m.end(0)
more = (start != length)
else:
more = False
else:
more = (None if (start == length) else False)
if color:
color.end = start
return (color, ratio, more) |
class LCSTSProcessor(DataProcessor):
def get_examples(self, data_path):
return self._create_examples(self._read_tsv(data_path))
def _create_examples(self, lines):
examples = []
for data in lines:
guid = data[0]
src = convert_to_unicode(data[2])
tgt = convert_to_unicode(data[1])
examples.append(InputExample(guid=guid, src=src, tgt=tgt))
return examples |
class TestNormaliseBarcode(unittest.TestCase):
def test_normalise_barcode(self):
self.assertEqual(normalise_barcode('CGATGT'), 'CGATGT')
self.assertEqual(normalise_barcode('CGTGTAGG-GACCTGTA'), 'CGTGTAGGGACCTGTA')
self.assertEqual(normalise_barcode('CGTGTAGG+GACCTGTA'), 'CGTGTAGGGACCTGTA')
self.assertEqual(normalise_barcode('CGTGTAGGGACCTGTA'), 'CGTGTAGGGACCTGTA') |
_tag('forum_conversation/topic_pages_inline_list.html')
def topic_pages_inline_list(topic):
data_dict = {'topic': topic}
pages_number = (((topic.posts_count - 1) // machina_settings.TOPIC_POSTS_NUMBER_PER_PAGE) + 1)
if (pages_number > 5):
data_dict['first_pages'] = range(1, 5)
data_dict['last_page'] = pages_number
elif (pages_number > 1):
data_dict['first_pages'] = range(1, (pages_number + 1))
return data_dict |
class Config():
def __init__(self, path):
self.path = path
self.cf = configparser.ConfigParser()
self.cf.read(self.path)
def get(self, field, key):
result = ''
try:
result = self.cf.get(field, key)
except:
result = ''
return result
def set(self, filed, key, value):
try:
self.cf.set(field, key, value)
self.cf.write(open(self.path, 'w', encoding='utf-8'))
except:
return False
return True |
class RunPython():
def __init__(self, strip_newlines: bool=False, return_err_output: bool=False, workdir: str='.'):
self.strip_newlines = strip_newlines
self.return_err_output = return_err_output
self.workdir = workdir
def run(self, commands: str) -> str:
if (not commands.strip()):
return ''
try:
completed_process = subprocess.run(['python', '-c', strip_quotes(commands)], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.workdir, timeout=40)
except subprocess.TimeoutExpired as error:
return 'Command timed out, possibly due to asking for input.'
stdout_output = completed_process.stdout.decode()
stderr_output = completed_process.stderr.decode()
if self.strip_newlines:
stdout_output = stdout_output.strip()
stderr_output = stderr_output.strip()
combined_output = trim_extra(((stdout_output + '\n') + stderr_output))
return (combined_output if combined_output.strip() else '(empty)') |
_renderer(wrap_type=ColumnInteractionPlot)
class ColumnInteractionPlotRenderer(MetricRenderer):
def render_html(self, obj: ColumnInteractionPlot) -> List[BaseWidgetInfo]:
metric_result = obj.get_result()
agg_data = (not obj.get_options().render_options.raw_data)
if ((metric_result.x_type == ColumnType.Numerical) and (metric_result.y_type == ColumnType.Numerical) and ((metric_result.current_scatter is not None) or (metric_result.current_contour is not None))):
if (isinstance(metric_result.current_scatter, Dict[(str, List[Any])]) and isinstance(metric_result.reference_scatter, Dict[(str, List[Any])]) and ((not agg_data) or (metric_result.current_scatter is not None))):
fig = plot_num_num_rel(metric_result.current_scatter, metric_result.reference_scatter, obj.y_column, obj.x_column, self.color_options)
elif (metric_result.current_contour is not None):
fig = plot_contour(metric_result.current_contour, metric_result.reference_contour, obj.x_column, obj.y_column)
fig = json.loads(fig.to_json())
elif ((metric_result.x_type == ColumnType.Categorical) and (metric_result.y_type == ColumnType.Categorical) and (metric_result.current is not None)):
fig = plot_cat_cat_rel(metric_result.current, metric_result.reference, obj.y_column, obj.x_column, self.color_options)
elif ((metric_result.x_type == ColumnType.Categorical) and (metric_result.y_type == ColumnType.Numerical) and (metric_result.current_boxes is not None)):
fig = plot_boxes(metric_result.current_boxes, metric_result.reference_boxes, obj.y_column, obj.x_column, self.color_options)
elif ((metric_result.x_type == ColumnType.Numerical) and (metric_result.y_type == ColumnType.Categorical) and (metric_result.current_boxes is not None)):
fig = plot_boxes(metric_result.current_boxes, metric_result.reference_boxes, obj.x_column, obj.y_column, self.color_options, True)
elif ((metric_result.x_type == ColumnType.Datetime) and (metric_result.y_type == ColumnType.Numerical) and (metric_result.current is not None) and (metric_result.prefix is not None)):
fig = plot_num_feature_in_time(metric_result.current, metric_result.reference, obj.y_column, obj.x_column, metric_result.prefix, self.color_options)
elif ((metric_result.y_type == ColumnType.Datetime) and (metric_result.x_type == ColumnType.Numerical) and (metric_result.current is not None) and (metric_result.prefix is not None)):
fig = plot_num_feature_in_time(metric_result.current, metric_result.reference, obj.x_column, obj.y_column, metric_result.prefix, self.color_options, True)
elif ((metric_result.x_type == ColumnType.Datetime) and (metric_result.y_type == ColumnType.Categorical) and (metric_result.current is not None) and (metric_result.prefix is not None)):
fig = plot_cat_feature_in_time(metric_result.current, metric_result.reference, obj.y_column, obj.x_column, metric_result.prefix, self.color_options)
elif ((metric_result.y_type == ColumnType.Datetime) and (metric_result.x_type == ColumnType.Categorical) and (metric_result.current is not None) and (metric_result.prefix is not None)):
fig = plot_cat_feature_in_time(metric_result.current, metric_result.reference, obj.x_column, obj.y_column, metric_result.prefix, self.color_options, True)
return [header_text(label=f"Interactions between '{obj.x_column}' and '{obj.y_column}'"), BaseWidgetInfo(title='', size=2, type='big_graph', params={'data': fig['data'], 'layout': fig['layout']})] |
def update_wav_lab_pairs():
wav_count = tot_count = 0
for (root, _, files) in os.walk('./raw'):
for file in files:
file_path = os.path.join(root, file)
if file.lower().endswith('.wav'):
lab_file = (os.path.splitext(file_path)[0] + '.lab')
if os.path.exists(lab_file):
wav_count += 1
tot_count += 1
return f'{wav_count} / {tot_count}' |
class OptionSeriesBellcurveStatesHoverMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def test_complicated_schema():
foundry_schema = {'fieldSchemaList': [{'type': 'STRUCT', 'name': 'url', 'nullable': True, 'userDefinedTypeClass': None, 'customMetadata': {}, 'arraySubtype': None, 'precision': None, 'scale': None, 'mapKeyType': None, 'mapValueType': None, 'subSchemas': [{'type': 'STRING', 'name': 'value', 'nullable': True, 'userDefinedTypeClass': None, 'customMetadata': {}, 'arraySubtype': None, 'precision': None, 'scale': None, 'mapKeyType': None, 'mapValueType': None, 'subSchemas': None}]}, {'type': 'STRING', 'name': 'entity_def_id', 'nullable': True, 'userDefinedTypeClass': None, 'customMetadata': {}, 'arraySubtype': None, 'precision': None, 'scale': None, 'mapKeyType': None, 'mapValueType': None, 'subSchemas': []}, {'type': 'MAP', 'name': 'map_column', 'nullable': True, 'userDefinedTypeClass': None, 'customMetadata': {}, 'arraySubtype': None, 'precision': None, 'scale': None, 'mapKeyType': {'type': 'STRING', 'name': 'map_key', 'nullable': True, 'userDefinedTypeClass': None, 'customMetadata': {}, 'arraySubtype': None, 'precision': None, 'scale': None, 'mapKeyType': None, 'mapValueType': None, 'subSchemas': []}, 'mapValueType': {'type': 'INTEGER', 'name': 'map_value', 'nullable': False, 'userDefinedTypeClass': None, 'customMetadata': {}, 'arraySubtype': None, 'precision': None, 'scale': None, 'mapKeyType': None, 'mapValueType': None, 'subSchemas': []}, 'subSchemas': []}, {'type': 'ARRAY', 'name': 'activity_entities', 'nullable': True, 'userDefinedTypeClass': None, 'customMetadata': {}, 'arraySubtype': {'type': 'STRUCT', 'name': None, 'nullable': True, 'userDefinedTypeClass': None, 'customMetadata': {}, 'arraySubtype': None, 'precision': None, 'scale': None, 'mapKeyType': None, 'mapValueType': None, 'subSchemas': [{'type': 'STRING', 'name': 'permalink', 'nullable': True, 'userDefinedTypeClass': None, 'customMetadata': {}, 'arraySubtype': None, 'precision': None, 'scale': None, 'mapKeyType': None, 'mapValueType': None, 'subSchemas': None}]}, 'precision': None, 'scale': None, 'mapKeyType': None, 'mapValueType': None, 'subSchemas': None}], 'primaryKey': None, 'dataFrameReaderClass': 'com.palantir.foundry.spark.input.ParquetDataFrameReader', 'customMetadata': {'format': 'parquet', 'options': {}}}
expected_spark_schema = StructType([StructField('url', StructType([StructField('value', StringType(), True)])), StructField('entity_def_id', StringType(), True), StructField('map_column', MapType(StringType(), IntegerType(), valueContainsNull=False), True), StructField('activity_entities', ArrayType(StructType([StructField('permalink', StringType(), True)])), True)])
result = foundry_schema_to_spark_schema(foundry_schema)
assert (result == expected_spark_schema) |
def get_110900_data() -> list[dict[(str, int)]]:
data: list[dict[(str, int)]] = []
data.append(next_int_len(4))
data.append(next_int_len(2))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
ivar_14 = next_int_len(1)
data.append(ivar_14)
for _ in range(ivar_14['Value']):
data.append(next_int_len(2))
svar6 = next_int_len(2)
data.append(svar6)
for _ in range(svar6['Value']):
data.append(next_int_len(2))
svar6 = next_int_len(2)
data.append(svar6)
for _ in range(svar6['Value']):
data.append(next_int_len(2))
data.append(next_int_len(4))
data.append(next_int_len(4))
data.append(next_int_len(4))
data.append(next_int_len(2))
data.append(next_int_len(2))
data.append(next_int_len(2))
data.append(next_int_len(2))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
svar6 = next_int_len(2)
data.append(svar6)
for _ in range(svar6['Value']):
data.append(next_int_len(2))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
data.append(next_int_len(1))
cvar4 = next_int_len(1)
data.append(cvar4)
if (0 < cvar4['Value']):
data.append(next_int_len(2))
if (cvar4['Value'] != 1):
data.append(next_int_len(2))
if (cvar4['Value'] != 2):
data.append(next_int_len(2))
if (cvar4['Value'] != 3):
data.append(next_int_len(2))
if (cvar4['Value'] != 4):
ivar32 = (cvar4['Value'] + 4)
for _ in range(ivar32):
data.append(next_int_len(2))
return data |
def extractSporadictranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('muscle magic', 'Magic? Muscle Is Much More Important Than Such A Thing!', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def unregisterMethodCallback(path):
with _web_lock:
try:
del _web_methods[path]
except KeyError:
_logger.error("'{}' is not registered".format(path))
return False
else:
_logger.debug('Unregistered method {}'.format(path))
return True |
class CustomerResponse(ModelComposed):
allowed_values = {('billing_network_type',): {'PUBLIC': 'public', 'PRIVATE': 'private'}}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'billing_contact_id': (str, none_type), 'billing_network_type': (str,), 'billing_ref': (str, none_type), 'can_configure_wordpress': (bool, none_type), 'can_reset_passwords': (bool,), 'can_upload_vcl': (bool,), 'force_2fa': (bool,), 'force_sso': (bool,), 'has_account_panel': (bool,), 'has_improved_events': (bool,), 'has_improved_ssl_config': (bool,), 'has_openstack_logging': (bool,), 'has_pci': (bool,), 'has_pci_passwords': (bool,), 'ip_whitelist': (str,), 'legal_contact_id': (str, none_type), 'name': (str,), 'owner_id': (str,), 'phone_number': (str,), 'postal_address': (str, none_type), 'pricing_plan': (str,), 'pricing_plan_id': (str,), 'security_contact_id': (str, none_type), 'technical_contact_id': (str, none_type), 'created_at': (datetime, none_type), 'deleted_at': (datetime, none_type), 'updated_at': (datetime, none_type), 'id': (str,)}
_property
def discriminator():
return None
attribute_map = {'billing_contact_id': 'billing_contact_id', 'billing_network_type': 'billing_network_type', 'billing_ref': 'billing_ref', 'can_configure_wordpress': 'can_configure_wordpress', 'can_reset_passwords': 'can_reset_passwords', 'can_upload_vcl': 'can_upload_vcl', 'force_2fa': 'force_2fa', 'force_sso': 'force_sso', 'has_account_panel': 'has_account_panel', 'has_improved_events': 'has_improved_events', 'has_improved_ssl_config': 'has_improved_ssl_config', 'has_openstack_logging': 'has_openstack_logging', 'has_pci': 'has_pci', 'has_pci_passwords': 'has_pci_passwords', 'ip_whitelist': 'ip_whitelist', 'legal_contact_id': 'legal_contact_id', 'name': 'name', 'owner_id': 'owner_id', 'phone_number': 'phone_number', 'postal_address': 'postal_address', 'pricing_plan': 'pricing_plan', 'pricing_plan_id': 'pricing_plan_id', 'security_contact_id': 'security_contact_id', 'technical_contact_id': 'technical_contact_id', 'created_at': 'created_at', 'deleted_at': 'deleted_at', 'updated_at': 'updated_at', 'id': 'id'}
read_only_vars = {'can_configure_wordpress', 'can_reset_passwords', 'can_upload_vcl', 'has_improved_ssl_config', 'has_pci_passwords', 'created_at', 'deleted_at', 'updated_at', 'id'}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [Customer, CustomerResponseAllOf, Timestamps], 'oneOf': []} |
def main(args=None):
args = parse_arguments().parse_args(args)
mpl.rcParams['pdf.fonttype'] = 42
log.warning('This tool is deprecated. Please use chicViewpoint, chicViewpointBackgroundModel and chicPlotViewpoint.')
if args.region:
args.region = args.region.replace(',', '')
args.region = args.region.replace(';', '')
args.region = args.region.replace('!', '')
args.region = args.region.replace('-', ':')
region = args.region.split(':')
if (len(region) != 3):
log.error('Region format is invalid {}'.format(args.region))
exit(0)
(chrom, region_start, region_end) = (region[0], int(region[1]), int(region[2]))
args.referencePoint = args.referencePoint.replace(',', '')
args.referencePoint = args.referencePoint.replace(';', '')
args.referencePoint = args.referencePoint.replace('!', '')
args.referencePoint = args.referencePoint.replace('-', ':')
referencePoint = args.referencePoint.split(':')
data_list = []
interactions_list = None
if (args.interactionOutFileName is not None):
interactions_list = []
matrix_name_legend = []
for matrix in args.matrix:
(view_point_start, view_point_end, view_point_range, data_list_, interactions_list_) = getViewpointValues(matrix, referencePoint, chrom, region_start, region_end, args.interactionOutFileName, args.chromosome)
data_list.append(data_list_)
if (args.interactionOutFileName is not None):
interactions_list.append(interactions_list_)
matrix_name_legend.append(os.path.basename(matrix))
fig = plt.figure(figsize=(6.4, 4.8))
ax = plt.subplot(111)
matrices_plot_legend = []
for (i, data) in enumerate(data_list):
matrices_plot_legend.append(ax.plot(range(len(data)), data, alpha=0.7, label=matrix_name_legend[i])[0])
if (len(referencePoint) == 2):
log.debug('Single reference point mode: {}'.format(referencePoint))
log.debug('label 0: {}'.format(((int(referencePoint[1]) - region_start) * (- 1))))
log.debug('referencePoint[1]: {}'.format(referencePoint[1]))
log.debug('region_start: {}'.format(region_start))
log.debug('label 1: {}'.format(((referencePoint[0] + ':') + relabelTicks(int(referencePoint[1])))))
log.debug('label 2: {}'.format((region_end - int(referencePoint[1]))))
ax.set_xticks([0, (view_point_start - view_point_range[0]), (view_point_range[1] - view_point_range[0])])
xticklabels = ([None] * 3)
xticklabels[0] = relabelTicks(((int(referencePoint[1]) - region_start) * (- 1)))
xticklabels[1] = ((referencePoint[0] + ':') + relabelTicks(int(referencePoint[1])))
xticklabels[2] = relabelTicks((region_end - int(referencePoint[1])))
elif (len(referencePoint) == 3):
log.debug('Range mode: {}'.format(referencePoint))
ax.set_xticks([0, (view_point_start - view_point_range[0]), (view_point_end - view_point_range[0]), (view_point_range[1] - view_point_range[0])])
xticklabels = ([None] * 4)
xticklabels[0] = relabelTicks(((int(referencePoint[1]) - region_start) * (- 1)))
xticklabels[1] = ((referencePoint[0] + ':') + relabelTicks(int(referencePoint[1])))
xticklabels[2] = ((referencePoint[0] + ':') + relabelTicks(int(referencePoint[2])))
xticklabels[3] = relabelTicks((region_end - int(referencePoint[1])))
ax.set_xticklabels(xticklabels)
ax.set_ylabel('Number of interactions')
plt.legend(handles=matrices_plot_legend)
plt.savefig(args.outFileName, dpi=args.dpi)
plt.close(fig)
if (interactions_list is not None):
for (i, interactions_list_) in enumerate(interactions_list):
with open((((args.interactionOutFileName + '_') + matrix_name_legend[i]) + '.bedgraph'), 'w') as fh:
for interaction in interactions_list_:
fh.write('{}\t{}\t{}\t{}\t{}\t{}\t{:.12f}\n'.format(toString(interaction[0]), toString(interaction[1]), toString(interaction[2]), toString(interaction[3]), toString(interaction[4]), toString(interaction[5]), float(interaction[6]))) |
_os(*metadata.platforms)
def main():
nslookup = 'C:\\Windows\\System32\\nslookup.exe'
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10)
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10)
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10)
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10)
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10)
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10)
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10)
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10)
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10)
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10)
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10)
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10)
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10)
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10)
common.execute([nslookup, '-q=aaaa', 'google.com'], timeout=10) |
class OptionSeriesAreasplinerangeMarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False) |
class OptionSeriesColumnSonificationDefaultinstrumentoptions(Options):
def activeWhen(self) -> 'OptionSeriesColumnSonificationDefaultinstrumentoptionsActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesColumnSonificationDefaultinstrumentoptionsActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesColumnSonificationDefaultinstrumentoptionsMapping':
return self._config_sub_data('mapping', OptionSeriesColumnSonificationDefaultinstrumentoptionsMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionSeriesColumnSonificationDefaultinstrumentoptionsPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesColumnSonificationDefaultinstrumentoptionsPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
def get_app_display_name(app):
if app.get('Name'):
return app['Name']
if app.get('localized'):
localized = app['localized'].get(DEFAULT_LOCALE)
if (not localized):
for v in app['localized'].values():
localized = v
break
if localized.get('name'):
return localized['name']
return (app.get('AutoName') or app['id']) |
class Body(Attrs):
def __init__(self, component: primitives.HtmlModel, page: primitives.PageModel=None):
super(Body, self).__init__(component, page=page)
self.font_size = component.style.globals.font.normal()
self.font_family = component.style.globals.font.family
self.margin = 0 |
class Collision(object):
swagger_types = {'created_at': 'datetime', 'embedded': 'object', 'front': 'CollisionObjFront', 'id': 'str', 'lateral': 'CollisionObjFront', 'pedestrian': 'bool', 'rear': 'CollisionObjFront', 'roll_over': 'bool', 'updated_at': 'datetime', 'links': 'CollisionLinks'}
attribute_map = {'created_at': 'createdAt', 'embedded': '_embedded', 'front': 'front', 'id': 'id', 'lateral': 'lateral', 'pedestrian': 'pedestrian', 'rear': 'rear', 'roll_over': 'rollOver', 'updated_at': 'updatedAt', 'links': '_links'}
def __init__(self, created_at=None, embedded=None, front=None, id=None, lateral=None, pedestrian=None, rear=None, roll_over=None, updated_at=None, links=None):
self._created_at = None
self._embedded = None
self._front = None
self._id = None
self._lateral = None
self._pedestrian = None
self._rear = None
self._roll_over = None
self._updated_at = None
self._links = None
self.discriminator = None
if (created_at is not None):
self.created_at = created_at
if (embedded is not None):
self.embedded = embedded
if (front is not None):
self.front = front
if (id is not None):
self.id = id
if (lateral is not None):
self.lateral = lateral
if (pedestrian is not None):
self.pedestrian = pedestrian
if (rear is not None):
self.rear = rear
if (roll_over is not None):
self.roll_over = roll_over
if (updated_at is not None):
self.updated_at = updated_at
if (links is not None):
self.links = links
def created_at(self):
return self._created_at
_at.setter
def created_at(self, created_at):
self._created_at = created_at
def embedded(self):
return self._embedded
def embedded(self, embedded):
self._embedded = embedded
def front(self):
return self._front
def front(self, front):
self._front = front
def id(self):
return self._id
def id(self, id):
self._id = id
def lateral(self):
return self._lateral
def lateral(self, lateral):
self._lateral = lateral
def pedestrian(self):
return self._pedestrian
def pedestrian(self, pedestrian):
self._pedestrian = pedestrian
def rear(self):
return self._rear
def rear(self, rear):
self._rear = rear
def roll_over(self):
return self._roll_over
_over.setter
def roll_over(self, roll_over):
self._roll_over = roll_over
def updated_at(self):
return self._updated_at
_at.setter
def updated_at(self, updated_at):
self._updated_at = updated_at
def links(self):
return self._links
def links(self, links):
self._links = links
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(Collision, dict):
for (key, value) in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if (not isinstance(other, Collision)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other)) |
class TestGenericOefSearchHandler(BaseSkillTestCase):
path_to_skill = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'generic_seller')
is_agent_to_agent_messages = False
def setup(cls):
super().setup()
cls.oef_search_handler = cast(GenericOefSearchHandler, cls._skill.skill_context.handlers.oef_search)
cls.strategy = cast(GenericStrategy, cls._skill.skill_context.strategy)
cls.oef_dialogues = cast(OefSearchDialogues, cls._skill.skill_context.oef_search_dialogues)
cls.service_registration_behaviour = cast(GenericServiceRegistrationBehaviour, cls._skill.skill_context.behaviours.service_registration)
cls.register_location_description = Description({'location': Location(51.5194, 0.127)}, data_model=DataModel('location_agent', [Attribute('location', Location, True)]))
cls.list_of_messages_register_location = (DialogueMessage(OefSearchMessage.Performative.REGISTER_SERVICE, {'service_description': cls.register_location_description}, is_incoming=False),)
cls.register_service_description = Description({'key': 'some_key', 'value': 'some_value'}, data_model=DataModel('set_service_key', [Attribute('key', str, True), Attribute('value', str, True)]))
cls.list_of_messages_register_service = (DialogueMessage(OefSearchMessage.Performative.REGISTER_SERVICE, {'service_description': cls.register_service_description}, is_incoming=False),)
cls.register_genus_description = Description({'piece': 'genus', 'value': 'some_value'}, data_model=DataModel('personality_agent', [Attribute('piece', str, True), Attribute('value', str, True)]))
cls.list_of_messages_register_genus = (DialogueMessage(OefSearchMessage.Performative.REGISTER_SERVICE, {'service_description': cls.register_genus_description}, is_incoming=False),)
cls.register_classification_description = Description({'piece': 'classification', 'value': 'some_value'}, data_model=DataModel('personality_agent', [Attribute('piece', str, True), Attribute('value', str, True)]))
cls.list_of_messages_register_classification = (DialogueMessage(OefSearchMessage.Performative.REGISTER_SERVICE, {'service_description': cls.register_classification_description}, is_incoming=False),)
cls.register_invalid_description = Description({'piece': 'classification', 'value': 'some_value'}, data_model=DataModel('some_different_name', [Attribute('piece', str, True), Attribute('value', str, True)]))
cls.list_of_messages_register_invalid = (DialogueMessage(OefSearchMessage.Performative.REGISTER_SERVICE, {'service_description': cls.register_invalid_description}, is_incoming=False),)
cls.unregister_description = Description({'key': 'seller_service'}, data_model=DataModel('remove', [Attribute('key', str, True)]))
cls.list_of_messages_unregister = (DialogueMessage(OefSearchMessage.Performative.UNREGISTER_SERVICE, {'service_description': cls.unregister_description}, is_incoming=False),)
def test_setup(self):
assert (self.oef_search_handler.setup() is None)
self.assert_quantity_in_outbox(0)
def test_handle_unidentified_dialogue(self):
incorrect_dialogue_reference = ('', '')
incoming_message = self.build_incoming_message(message_type=OefSearchMessage, dialogue_reference=incorrect_dialogue_reference, performative=OefSearchMessage.Performative.SEARCH_SERVICES)
with patch.object(self.oef_search_handler.context.logger, 'log') as mock_logger:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received invalid oef_search message={incoming_message}, unidentified dialogue.')
def test_handle_success_i(self):
oef_dialogue = self.prepare_skill_dialogue(dialogues=self.oef_dialogues, messages=self.list_of_messages_register_location[:1])
incoming_message = self.build_incoming_message_for_skill_dialogue(dialogue=oef_dialogue, performative=OefSearchMessage.Performative.SUCCESS, agents_info=OefSearchMessage.AgentsInfo({'address': {'key': 'value'}}))
with patch.object(self.oef_search_handler.context.logger, 'log') as mock_logger:
with patch.object(self.service_registration_behaviour, 'register_service') as mock_reg:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received oef_search success message={incoming_message} in dialogue={oef_dialogue}.')
mock_reg.assert_called_once()
def test_handle_success_ii(self):
oef_dialogue = self.prepare_skill_dialogue(dialogues=self.oef_dialogues, messages=self.list_of_messages_register_service[:1])
incoming_message = self.build_incoming_message_for_skill_dialogue(dialogue=oef_dialogue, performative=OefSearchMessage.Performative.SUCCESS, agents_info=OefSearchMessage.AgentsInfo({'address': {'key': 'value'}}))
with patch.object(self.oef_search_handler.context.logger, 'log') as mock_logger:
with patch.object(self.service_registration_behaviour, 'register_genus') as mock_reg:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received oef_search success message={incoming_message} in dialogue={oef_dialogue}.')
mock_reg.assert_called_once()
def test_handle_success_iii(self):
oef_dialogue = self.prepare_skill_dialogue(dialogues=self.oef_dialogues, messages=self.list_of_messages_register_genus[:1])
incoming_message = self.build_incoming_message_for_skill_dialogue(dialogue=oef_dialogue, performative=OefSearchMessage.Performative.SUCCESS, agents_info=OefSearchMessage.AgentsInfo({'address': {'key': 'value'}}))
with patch.object(self.oef_search_handler.context.logger, 'log') as mock_logger:
with patch.object(self.service_registration_behaviour, 'register_classification') as mock_reg:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received oef_search success message={incoming_message} in dialogue={oef_dialogue}.')
mock_reg.assert_called_once()
def test_handle_success_iv(self):
oef_dialogue = self.prepare_skill_dialogue(dialogues=self.oef_dialogues, messages=self.list_of_messages_register_classification[:1])
incoming_message = self.build_incoming_message_for_skill_dialogue(dialogue=oef_dialogue, performative=OefSearchMessage.Performative.SUCCESS, agents_info=OefSearchMessage.AgentsInfo({'address': {'key': 'value'}}))
with patch.object(self.oef_search_handler.context.logger, 'log') as mock_logger:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received oef_search success message={incoming_message} in dialogue={oef_dialogue}.')
mock_logger.assert_any_call(logging.INFO, 'the agent, with its genus and classification, and its service are successfully registered on the SOEF.')
def test_handle_success_v(self):
oef_dialogue = self.prepare_skill_dialogue(dialogues=self.oef_dialogues, messages=self.list_of_messages_register_invalid[:1])
incoming_message = self.build_incoming_message_for_skill_dialogue(dialogue=oef_dialogue, performative=OefSearchMessage.Performative.SUCCESS, agents_info=OefSearchMessage.AgentsInfo({'address': {'key': 'value'}}))
with patch.object(self.oef_search_handler.context.logger, 'log') as mock_logger:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received oef_search success message={incoming_message} in dialogue={oef_dialogue}.')
mock_logger.assert_any_call(logging.WARNING, f'received soef SUCCESS message as a reply to the following unexpected message: {oef_dialogue.get_message_by_id(incoming_message.target)}')
def test_handle_error_i(self):
oef_dialogue = self.prepare_skill_dialogue(dialogues=self.oef_dialogues, messages=self.list_of_messages_register_location[:1])
incoming_message = self.build_incoming_message_for_skill_dialogue(dialogue=oef_dialogue, performative=OefSearchMessage.Performative.OEF_ERROR, oef_error_operation=OefSearchMessage.OefErrorOperation.SEARCH_SERVICES)
with patch.object(self.oef_search_handler.context.logger, 'log') as mock_logger:
self.oef_search_handler.handle(incoming_message)
self.assert_quantity_in_outbox(0)
mock_logger.assert_any_call(logging.INFO, f'received oef_search error message={incoming_message} in dialogue={oef_dialogue}.')
assert (self.service_registration_behaviour.failed_registration_msg == oef_dialogue.get_message_by_id(incoming_message.target))
def test_handle_error_ii(self):
oef_dialogue = self.prepare_skill_dialogue(dialogues=self.oef_dialogues, messages=self.list_of_messages_unregister[:1])
incoming_message = self.build_incoming_message_for_skill_dialogue(dialogue=oef_dialogue, performative=OefSearchMessage.Performative.OEF_ERROR, oef_error_operation=OefSearchMessage.OefErrorOperation.SEARCH_SERVICES)
with patch.object(self.oef_search_handler.context.logger, 'log') as mock_logger:
self.oef_search_handler.handle(incoming_message)
self.assert_quantity_in_outbox(0)
mock_logger.assert_any_call(logging.INFO, f'received oef_search error message={incoming_message} in dialogue={oef_dialogue}.')
assert (self.service_registration_behaviour.failed_registration_msg is None)
def test_handle_invalid(self):
invalid_performative = OefSearchMessage.Performative.UNREGISTER_SERVICE
incoming_message = self.build_incoming_message(message_type=OefSearchMessage, dialogue_reference=('1', ''), performative=invalid_performative, service_description='some_service_description')
with patch.object(self.oef_search_handler.context.logger, 'log') as mock_logger:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.WARNING, f'cannot handle oef_search message of performative={invalid_performative} in dialogue={self.oef_dialogues.get_dialogue(incoming_message)}.')
def test_teardown(self):
assert (self.oef_search_handler.teardown() is None)
self.assert_quantity_in_outbox(0) |
def test_comp7():
string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)})
file_path = (test_dir / 'test_block.f08')
string += comp_request(file_path, 2, 2)
string += comp_request(file_path, 5, 4)
string += comp_request(file_path, 8, 6)
(errcode, results) = run_request(string, ['--use_signature_help'])
assert (errcode == 0)
exp_results = ([9, 'READ', 'STATEMENT'], [10, 'READ', 'STATEMENT'], [11, 'READ', 'STATEMENT'])
assert (len(exp_results) == (len(results) - 1))
for (i, ref) in enumerate(exp_results):
validate_comp(results[(i + 1)], ref) |
def _log_summary(ep_len, ep_ret, ep_num):
ep_len = str(round(ep_len, 2))
ep_ret = str(round(ep_ret, 2))
print(flush=True)
print(f' Episode #{ep_num} ', flush=True)
print(f'Episodic Length: {ep_len}', flush=True)
print(f'Episodic Return: {ep_ret}', flush=True)
print(f'', flush=True)
print(flush=True) |
class PyperfComparisons():
_table: 'PyperfTable'
def parse_table(cls, text: str, filenames: Optional[Iterable[str]]=None) -> 'PyperfComparisons':
table = PyperfTable.parse(text, filenames)
if (table is None):
raise ValueError('Could not parse table')
return cls.from_table(table)
def from_table(cls, table: 'PyperfTable') -> 'PyperfComparisons':
base_byname = {}
bysource: Dict[(str, Any)] = {s: {} for s in table.header.others}
means = {s: None for s in table.header.others}
for row in table.rows:
values = row.valuesdict
if (row.name == 'Geometric mean'):
for source in means:
means[source] = values[source]
else:
base_byname[row.name] = row.baseline
for (source, byname) in bysource.items():
byname[row.name] = values[source]
for (source, byname) in bysource.items():
assert source.endswith(PyperfResultsFile._SUFFIXES), repr(source)
bysource[source] = (byname, means[source])
self = cls(PyperfComparisonBaseline(table.header.baseline, base_byname), bysource)
self._table = table
return self
def __init__(self, baseline: PyperfComparisonBaseline, bysource: Mapping[(str, dict)]):
if (not bysource):
raise ValueError('missing bysource')
_bysource = {}
for (source, data) in bysource.items():
(byname, mean) = data
_bysource[source] = PyperfComparison(baseline, source, mean, byname)
self._baseline = baseline
self._bysource = _bysource
def __eq__(self, other):
raise NotImplementedError
def baseline(self) -> PyperfComparisonBaseline:
return self._baseline
def bysource(self) -> Dict[(str, Any)]:
return dict(self._bysource)
def table(self) -> 'PyperfTable':
try:
return self._table
except AttributeError:
raise NotImplementedError |
class TestSequenceFunctions(unittest.TestCase):
def __init__(self, *args, **kwargs):
logSetup.initLogging()
super().__init__(*args, **kwargs)
def setUp(self):
self.buildTestTree()
def buildTestTree(self):
self.tree = hamDb.BkHammingTree()
for (nodeId, nodeHash) in TEST_DATA.items():
self.tree.insert(nodeHash, nodeId)
def test_2(self):
for (node_id, ib) in TEST_DATA.items():
res = self.tree.getWithinDistance(ib, SEARCH_DIST)
print('{}: {}'.format(node_id, res))
new = ''
self.assertEqual(self.tree.getWithinDistance(int_bits(new), SEARCH_DIST), {3})
print('new: {}'.format(self.tree.getWithinDistance(int_bits(new), SEARCH_DIST)))
ones = ('1' * 64)
print('111..: {}'.format(self.tree.getWithinDistance(int_bits(ones), SEARCH_DIST)))
zeroes = ('0' * 64)
print('000..: {}'.format(self.tree.getWithinDistance(int_bits(zeroes), SEARCH_DIST)))
self.assertEqual(self.tree.getWithinDistance(int_bits(ones), SEARCH_DIST), set())
self.assertEqual(self.tree.getWithinDistance(int_bits(zeroes), SEARCH_DIST), set()) |
class ExpandFqcnCommand(sublime_plugin.TextCommand):
def run(self, edit, leading_separator=False):
view = self.view
self.region = view.word(view.sel()[0])
symbol = view.substr(self.region)
if (re.match('\\w', symbol) is None):
return sublime.status_message(('Not a valid symbol "%s" !' % symbol))
self.namespaces = find_symbol(symbol, view.window())
self.leading_separator = leading_separator
if (len(self.namespaces) == 1):
self.view.run_command('replace_fqcn', {'region_start': self.region.begin(), 'region_end': self.region.end(), 'namespace': self.namespaces[0][0], 'leading_separator': self.leading_separator})
if (len(self.namespaces) > 1):
view.window().show_quick_panel(self.namespaces, self.on_done)
def on_done(self, index):
if (index == (- 1)):
return
self.view.run_command('replace_fqcn', {'region_start': self.region.begin(), 'region_end': self.region.end(), 'namespace': self.namespaces[index][0], 'leading_separator': self.leading_separator}) |
def home(request, room_id=None):
user = request.GET.get('user')
if user:
if (not room_id):
return redirect(('/default?' + request.GET.urlencode()))
last_id = get_current_event_id(['room-{}'.format(room_id)])
try:
room = ChatRoom.objects.get(eid=room_id)
cmsgs = ChatMessage.objects.filter(room=room).order_by('-date')[:50]
msgs = []
for msg in reversed(cmsgs):
msgs.append(msg.to_data())
except ChatRoom.DoesNotExist:
msgs = []
context = {}
context['room_id'] = room_id
context['last_id'] = last_id
context['messages'] = msgs
context['user'] = user
return render(request, 'chat/chat.html', context)
else:
context = {}
context['room_id'] = (room_id or 'default')
return render(request, 'chat/join.html', context) |
def relax():
selection = pm.ls(sl=1)
if (not selection):
return
verts = pm.ls(pm.polyListComponentConversion(tv=1))
if (not verts):
return
shape = verts[0].node()
dup = shape.duplicate()[0]
dup_shape = dup.getShape()
pm.polyAverageVertex(verts, i=1, ch=0)
ta_node = pm.transferAttributes(dup, verts, transferPositions=True, transferNormals=False, transferUVs=False, transferColors=False, sampleSpace=0, searchMethod=0, flipUVs=False, colorBorders=1)
pm.delete(shape, ch=1)
pm.delete(dup)
pm.select(selection) |
class CollectionEntity(db.BaseModel):
local_user = pw.ForeignKeyField(User, backref='collections', on_delete='CASCADE')
uid = pw.CharField(null=False, index=True)
eb_col = pw.BlobField()
new = pw.BooleanField(null=False, default=False)
dirty = pw.BooleanField(null=False, default=False)
deleted = pw.BooleanField(null=False, default=False)
stoken = pw.CharField(null=True, default=None)
local_stoken = pw.CharField(null=True, default=None)
class Meta():
indexes = ((('local_user', 'uid'), True),) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.