code stringlengths 281 23.7M |
|---|
class ImageFormat(Entity):
__auto_name__ = False
__tablename__ = 'ImageFormats'
__mapper_args__ = {'polymorphic_identity': 'ImageFormat'}
imageFormat_id = Column('id', Integer, ForeignKey('Entities.id'), primary_key=True)
width = Column(Integer, doc='The width of this format.\n\n * the width should be set to a positive non-zero integer\n * integers are also accepted but will be converted to float\n * for improper inputs the object will raise an exception.\n ')
height = Column(Integer, doc='The height of this format\n\n * the height should be set to a positive non-zero integer\n * integers are also accepted but will be converted to float\n * for improper inputs the object will raise an exception.\n ')
pixel_aspect = Column(Float, default='1.0', doc='The pixel aspect ratio of this format.\n\n * the pixel_aspect should be set to a positive non-zero float\n * integers are also accepted but will be converted to float\n * for improper inputs the object will raise an exception\n ')
print_resolution = Column(Float, default='300.0', doc='The print resolution of this format\n\n * it should be set to a positive non-zero float or integer\n * integers are also accepted but will be converted to float\n * for improper inputs the object will raise an exception.\n ')
def __init__(self, width=None, height=None, pixel_aspect=1.0, print_resolution=300, **kwargs):
super(ImageFormat, self).__init__(**kwargs)
self.width = width
self.height = height
self.pixel_aspect = pixel_aspect
self.print_resolution = print_resolution
('width')
def _validate_width(self, key, width):
if (not isinstance(width, (int, float))):
raise TypeError(('%s.width should be an instance of int or float not %s' % (self.__class__.__name__, width.__class__.__name__)))
if (width <= 0):
raise ValueError(('%s.width cannot be zero or negative' % self.__class__.__name__))
return int(width)
('height')
def _validate_height(self, key, height):
if (not isinstance(height, (int, float))):
raise TypeError(('%s.height should be an instance of int or float not %s' % (self.__class__.__name__, height.__class__.__name__)))
if (height <= 0):
raise ValueError(('%s.height cannot be zero or negative' % self.__class__.__name__))
return int(height)
('pixel_aspect')
def _validate_pixel_aspect(self, key, pixel_aspect):
if (not isinstance(pixel_aspect, (int, float))):
raise TypeError(('%s.pixel_aspect should be an instance of int or float not %s' % (self.__class__.__name__, pixel_aspect.__class__.__name__)))
if (pixel_aspect <= 0):
raise ValueError(('%s.pixel_aspect cannot be zero or a negative value' % self.__class__.__name__))
return float(pixel_aspect)
('print_resolution')
def _validate_print_resolution(self, key, print_resolution):
if (not isinstance(print_resolution, (int, float))):
raise TypeError(('%s.print_resolution should be an instance of int or float not %s' % (self.__class__.__name__, print_resolution.__class__.__name__)))
if (print_resolution <= 0):
raise ValueError(('%s.print_resolution cannot be zero or negative' % self.__class__.__name__))
return float(print_resolution)
def device_aspect(self):
return ((float(self.width) / float(self.height)) * self.pixel_aspect)
def __eq__(self, other):
return (super(ImageFormat, self).__eq__(other) and isinstance(other, ImageFormat) and (self.width == other.width) and (self.height == other.height) and (self.pixel_aspect == other.pixel_aspect))
def __hash__(self):
return super(ImageFormat, self).__hash__() |
.parametrize('empty', ([], (), None), ids=['empty-list', 'empty-tuple', 'None'])
def test_empty_mungers_for_property_with_input_parameters_raises_ValidationError(empty):
method = Method(is_property=True, mungers=empty, json_rpc_method='eth_method')
with pytest.raises(ValidationError, match='Parameters cannot be passed to a property'):
method.input_munger(object(), [1], {}) |
def getRPIVer():
detarr = []
try:
with open('/proc/cpuinfo') as f:
for line in f:
line = line.strip()
if line.startswith('Revision'):
detarr = line.split(':')
break
except:
pass
hwarr = {'name': 'Unknown model', 'pins': '', 'ram': '0'}
if (len(detarr) > 1):
hwid = detarr[1].strip().lower()
if (hwid[:4] == '1000'):
hwid = hwid[(- 4):]
if (len(hwid) > 6):
hwid = hwid[1:]
if ((hwid == '0002') or (hwid == '0003')):
hwarr = {'name': 'Pi 1 Model B', 'ram': '256MB', 'pins': '26R1', 'lan': '1'}
elif ((hwid == '0004') or (hwid == '0005') or (hwid == '0006')):
hwarr = {'name': 'Pi 1 Model B', 'ram': '256MB', 'pins': '26R2', 'lan': '1'}
elif ((hwid == '0007') or (hwid == '0008') or (hwid == '0009')):
hwarr = {'name': 'Pi 1 Model A', 'ram': '256MB', 'pins': '26R1'}
elif ((hwid == '000d') or (hwid == '000e') or (hwid == '000f')):
hwarr = {'name': 'Pi 1 Model B', 'ram': '512MB', 'pins': '26R2', 'lan': '1'}
elif ((hwid == '0010') or (hwid == '0013') or (hwid == '900032')):
hwarr = {'name': 'Pi 1 Model B+', 'ram': '512MB', 'pins': '40', 'lan': '1'}
elif ((hwid == '0011') or (hwid == '0014')):
hwarr = {'name': 'Pi Compute Module 1', 'ram': '512MB', 'pins': '200'}
elif (hwid == 'a020a0'):
hwarr = {'name': 'Pi Compute Module 3', 'ram': '1GB', 'pins': '200'}
elif (hwid == '0012'):
hwarr = {'name': 'Pi 1 Model A+', 'ram': '256MB', 'pins': '40'}
elif (hwid == '0015'):
hwarr = {'name': 'Pi 1 Model A+', 'ram': '256/512MB', 'pins': '40'}
elif (hwid == '900021'):
hwarr = {'name': 'Pi 1 Model A+', 'ram': '512MB', 'pins': '40'}
elif ((hwid == 'a01040') or (hwid == 'a01041') or (hwid == 'a21041') or (hwid == 'a22042')):
hwarr = {'name': 'Pi 2 Model B', 'ram': '1GB', 'pins': '40', 'lan': '1'}
elif ((hwid == '900092') or (hwid == '900093') or (hwid == '920092') or (hwid == '920093')):
hwarr = {'name': 'Pi Zero', 'ram': '512MB', 'pins': '40'}
elif (hwid == '9000c1'):
hwarr = {'name': 'Pi Zero W', 'ram': '512MB', 'pins': '40', 'wlan': '1', 'bt': '1'}
elif (hwid == '902120'):
hwarr = {'name': 'Pi Zero 2W', 'ram': '512MB', 'pins': '40', 'wlan': '1', 'bt': '1'}
elif ((hwid == 'a02082') or (hwid == 'a22082') or (hwid == 'a32082') or (hwid == 'a52082') or (hwid == 'a22083')):
hwarr = {'name': 'Pi 3 Model B', 'ram': '1GB', 'pins': '40', 'wlan': '1', 'lan': '1', 'bt': '1'}
elif (hwid == 'a020d3'):
hwarr = {'name': 'Pi 3 Model B+', 'ram': '1GB', 'pins': '40', 'wlan': '1', 'lan': '1', 'bt': '1'}
elif (hwid == '9020e0'):
hwarr = {'name': 'Pi 3 Model A+', 'ram': '512MB', 'pins': '40', 'wlan': '1', 'bt': '1'}
elif (hwid == 'a03111'):
hwarr = {'name': 'Pi 4 Model B', 'ram': '1GB', 'pins': '40', 'wlan': '1', 'lan': '1', 'bt': '1'}
elif (hwid in 'b03111,b03112,b03114'):
hwarr = {'name': 'Pi 4 Model B', 'ram': '2GB', 'pins': '40', 'wlan': '1', 'lan': '1', 'bt': '1'}
elif (hwid in 'c03111,c03112,c03114,c03115'):
hwarr = {'name': 'Pi 4 Model B', 'ram': '4GB', 'pins': '40', 'wlan': '1', 'lan': '1', 'bt': '1'}
elif (hwid == 'c03130'):
hwarr = {'name': 'Pi 400 Rev1', 'ram': '4GB', 'pins': '40', 'wlan': '1', 'lan': '1', 'bt': '1'}
elif (hwid in 'd03114,d03115'):
hwarr = {'name': 'Pi 4 Model B', 'ram': '8GB', 'pins': '40', 'wlan': '1', 'lan': '1', 'bt': '1'}
elif (hwid in 'c04170'):
hwarr = {'name': 'Pi 5', 'ram': '4GB', 'pins': '40', 'wlan': '1', 'lan': '1', 'bt': '1'}
elif (hwid in 'd04170'):
hwarr = {'name': 'Pi 5', 'ram': '8GB', 'pins': '40', 'wlan': '1', 'lan': '1', 'bt': '1'}
return hwarr |
def test_populate_response():
view = instantiate_view_for_tests()
expected_dictionary = {'limit': 5, 'results': ['item 1', 'item 2'], 'page_metadata': {'page': 1, 'hasNext': True}, 'messages': [get_time_period_message()]}
assert (view.populate_response(results=['item 1', 'item 2'], has_next=True) == expected_dictionary)
expected_dictionary['results'] = []
assert (view.populate_response(results=[], has_next=True) == expected_dictionary) |
class DatabaseContext():
MEMORY_PATH = ':memory:'
JOURNAL_MODE = JournalModes.WAL
SQLITE_CONN_POOL_SIZE = 0
def __init__(self, wallet_path: str) -> None:
if ((not self.is_special_path(wallet_path)) and (not wallet_path.endswith(DATABASE_EXT))):
wallet_path += DATABASE_EXT
self._db_path = wallet_path
self._connection_pool: queue.Queue = queue.Queue()
self._active_connections: Set = set()
self._logger = logs.get_logger('sqlite-context')
self._lock = threading.Lock()
self._write_dispatcher = SqliteWriteDispatcher(self)
def acquire_connection(self) -> sqlite3.Connection:
try:
conn = self._connection_pool.get_nowait()
self._active_connections.add(conn)
return conn
except queue.Empty as e:
self.increase_connection_pool()
conn = self._connection_pool.get_nowait()
self._active_connections.add(conn)
return conn
def release_connection(self, connection: sqlite3.Connection) -> None:
self._active_connections.remove(connection)
self._connection_pool.put(connection)
def increase_connection_pool(self) -> None:
self.SQLITE_CONN_POOL_SIZE += 1
is_special_path = self.is_special_path(self._db_path)
connection = sqlite3.connect(self._db_path, check_same_thread=False, isolation_level=None, uri=is_special_path)
connection.execute('PRAGMA busy_timeout=5000;')
connection.execute('PRAGMA foreign_keys=ON;')
if (not self.is_special_path(self._db_path)):
self._ensure_journal_mode(connection)
self._connection_pool.put(connection)
def decrease_connection_pool(self) -> None:
connection = self._connection_pool.get_nowait()
connection.close()
def _ensure_journal_mode(self, connection: sqlite3.Connection) -> None:
with self._lock:
cursor = connection.execute(f'PRAGMA journal_mode;')
journal_mode = cursor.fetchone()[0]
if (journal_mode.upper() == self.JOURNAL_MODE.value):
return
self._logger.debug('Switching database from journal mode %s to journal mode %s', journal_mode.upper(), self.JOURNAL_MODE.value)
time_start = time.time()
attempt = 1
delay = 0.05
while True:
try:
cursor = connection.execute(f'PRAGMA journal_mode={self.JOURNAL_MODE.value};')
except sqlite3.OperationalError:
time_delta = (time.time() - time_start)
if (time_delta < 10.0):
delay = min(delay, max(0.05, (10.0 - time_delta)))
time.sleep(delay)
self._logger.warning('Database %s pragma attempt %d at %ds', self.JOURNAL_MODE.value, attempt, time_delta)
delay *= 2
attempt += 1
continue
raise
else:
journal_mode = cursor.fetchone()[0]
if (journal_mode.upper() != self.JOURNAL_MODE.value):
self._logger.error('Database unable to switch from journal mode %s to journal mode %s', self.JOURNAL_MODE.value, journal_mode.upper())
return
break
self._logger.debug('Database now in journal mode %s', self.JOURNAL_MODE.value)
def get_path(self) -> str:
return self._db_path
def queue_write(self, write_callback: WriteCallbackType, completion_callback: Optional[CompletionCallbackType]=None, size_hint: int=0) -> None:
self._write_dispatcher.put(WriteEntryType(write_callback, completion_callback, size_hint))
def close(self) -> None:
self._write_dispatcher.stop()
outstanding_connections = list(self._active_connections)
for conn in outstanding_connections:
self.release_connection(conn)
for conn in range(self.SQLITE_CONN_POOL_SIZE):
self.decrease_connection_pool()
if (len(outstanding_connections) != 0):
raise LeakedSQLiteConnectionError('There were still outstanding SQLite connections when attempting to close DatabaseContext! Force closed all connections.')
assert self.is_closed(), f'{self._write_dispatcher.is_stopped()}'
def is_closed(self) -> bool:
return ((self._connection_pool.qsize() == 0) and self._write_dispatcher.is_stopped())
def is_special_path(self, path: str) -> bool:
if (path == self.MEMORY_PATH):
return True
if (path.startswith('file:') and ('mode=memory' in path)):
return True
return False
def shared_memory_uri(cls, unique_name: str) -> str:
return f'file:{unique_name}?mode=memory&cache=shared' |
def find_and_replace(path, pattern, replace):
with open(path, 'r') as f:
old_data = f.read()
if (re.search(pattern, old_data, flags=re.MULTILINE) is None):
print(f"Didn't find the pattern {pattern!r} in {path!s}")
exit(1)
new_data = re.sub(pattern, replace, old_data, flags=re.MULTILINE)
with open(path, 'w') as f:
f.truncate()
f.write(new_data) |
class ExtendedNodeStorageSerializer(NodeStorageSerializer):
size_vms = s.IntegerField(read_only=True)
size_snapshots = s.IntegerField(read_only=True)
size_rep_snapshots = s.IntegerField(read_only=True)
size_backups = s.IntegerField(read_only=True)
snapshots = s.IntegerField(read_only=True, source='snapshot_count')
backups = s.IntegerField(read_only=True, source='backup_count')
images = s.IntegerField(read_only=True, source='image_count')
dcs = s.DcsField()
vms = s.ArrayField(read_only=True) |
def plot_error_bias_colored_scatter(curr_scatter_data: RegressionScatter, ref_scatter_data: Optional[RegressionScatter], color_options: ColorOptions):
cols = 1
subplot_titles: Union[(list, str)] = ''
if (ref_scatter_data is not None):
cols = 2
subplot_titles = ['current', 'reference']
fig = make_subplots(rows=1, cols=cols, shared_yaxes=True, subplot_titles=subplot_titles)
for (name, value, color) in zip(['Underestimation', 'Overestimation', 'Majority'], [curr_scatter_data.underestimation, curr_scatter_data.overestimation, curr_scatter_data.majority], [color_options.underestimation_color, color_options.overestimation_color, color_options.majority_color]):
trace = go.Scatter(x=value.actual, y=value.predicted, mode='markers', name=name, legendgroup=name, marker_color=color)
fig.add_trace(trace, 1, 1)
fig.update_xaxes(title_text='Actual value', row=1, col=1)
if (ref_scatter_data is not None):
for (name, value, color) in zip(['Underestimation', 'Overestimation', 'Majority'], [ref_scatter_data.underestimation, ref_scatter_data.overestimation, ref_scatter_data.majority], [color_options.underestimation_color, color_options.overestimation_color, color_options.majority_color]):
trace = go.Scatter(x=value.actual, y=value.predicted, mode='markers', name=name, legendgroup=name, showlegend=False, marker_color=color)
fig.add_trace(trace, 1, 2)
fig.update_xaxes(title_text='Actual value', row=1, col=2)
fig.update_layout(yaxis_title='Predicted value', xaxis=dict(showticklabels=True), yaxis=dict(showticklabels=True))
fig = json.loads(fig.to_json())
return fig |
def test_async_task(test_client_factory):
TASK_COMPLETE = False
async def async_task():
nonlocal TASK_COMPLETE
TASK_COMPLETE = True
task = BackgroundTask(async_task)
async def app(scope, receive, send):
response = Response('task initiated', media_type='text/plain', background=task)
(await response(scope, receive, send))
client = test_client_factory(app)
response = client.get('/')
assert (response.text == 'task initiated')
assert TASK_COMPLETE |
def test_alpha_int8():
assert (Color('rgba(0, 0, 0, 1)').alpha_int8 == 255)
assert (Color('rgba(0, 0, 0, 0)').alpha_int8 == 0)
if (not (Color('rgb(127,0,0)').red_quantum <= Color('rgba(0,0,0,0.5').alpha_quantum <= Color('rgb(128,0,0)').red_quantum)):
return
assert (127 <= Color('rgba(0, 0, 0, 0.5)').alpha_int8 <= 128)
c = Color('none')
c.alpha_int8 = 255
assert (c.alpha_int8 == 255) |
def test_dao_update(dao, default_entity_dict):
req = ServeRequest(**default_entity_dict)
res: ServerResponse = dao.create(req)
res: ServerResponse = dao.update({'prompt_name': 'my_prompt_1', 'sys_code': 'dbgpt'}, ServeRequest(prompt_name='my_prompt_2'))
assert (res is not None)
assert (res.id == 1)
assert (res.chat_scene == 'chat_data')
assert (res.sub_chat_scene == 'excel')
assert (res.prompt_type == 'common')
assert (res.prompt_name == 'my_prompt_2')
assert (res.content == 'Write a qsort function in python.')
assert (res.user_name == 'zhangsan')
assert (res.sys_code == 'dbgpt') |
class PopupRelativeLayout(_PopupLayout):
def _place_control(self, control):
def is_relative(val):
return (0 <= val <= 1)
if (not control.placed):
if (not all([is_relative(x) for x in [control.pos_x, control.pos_y, control.width, control.height]])):
logger.warning('Control %s using non relative dimensions in Relative layout', control)
control.offsetx = (int((self._width * control.pos_x)) + self.margin)
control.offsety = (int((self._height * control.pos_y)) + self.margin)
control.width = int((self._width * control.width))
control.height = int((self._height * control.height))
control.placed = True |
def test_option_env_variable_interpolation(config, yaml_config_file_3):
config.option.from_yaml(yaml_config_file_3)
assert (config.option() == {'section1': {'value1': 'test-value', 'value2': 'test-path/path'}})
assert (config.option.section1() == {'value1': 'test-value', 'value2': 'test-path/path'})
assert (config.option.section1.value1() == 'test-value')
assert (config.option.section1.value2() == 'test-path/path') |
_type(ofproto.OFPTFPT_WRITE_ACTIONS)
_type(ofproto.OFPTFPT_WRITE_ACTIONS_MISS)
_type(ofproto.OFPTFPT_APPLY_ACTIONS)
_type(ofproto.OFPTFPT_APPLY_ACTIONS_MISS)
class OFPTableFeaturePropActions(OFPTableFeatureProp):
def __init__(self, type_=None, length=None, action_ids=None):
action_ids = (action_ids if action_ids else [])
super(OFPTableFeaturePropActions, self).__init__(type_, length)
self.action_ids = action_ids
def parser(cls, buf):
rest = cls.get_rest(buf)
ids = []
while rest:
(i, rest) = OFPActionId.parse(rest)
ids.append(i)
return cls(action_ids=ids)
def serialize_body(self):
bin_ids = bytearray()
for i in self.action_ids:
bin_ids += i.serialize()
return bin_ids |
class EVENT_TRACE_HEADER(ct.Structure):
_fields_ = [('Size', ct.c_ushort), ('HeaderType', ct.c_ubyte), ('MarkerFlags', ct.c_ubyte), ('Class', EVENT_TRACE_HEADER_CLASS), ('ThreadId', ct.c_ulong), ('ProcessId', ct.c_ulong), ('TimeStamp', wt.LARGE_INTEGER), ('Guid', GUID), ('ClientContext', ct.c_ulong), ('Flags', ct.c_ulong)] |
def decrypt_combined_nonce_and_message(encrypted_value: str, key: bytes) -> str:
verify_encryption_key(key)
gcm = AESGCM(key)
encrypted_combined: bytes = base64.b64decode(encrypted_value)
nonce: bytes = encrypted_combined[0:CONFIG.security.aes_gcm_nonce_length]
encrypted_message: bytes = encrypted_combined[CONFIG.security.aes_gcm_nonce_length:]
decrypted_bytes: bytes = gcm.decrypt(nonce, encrypted_message, nonce)
decrypted_str = decrypted_bytes.decode(CONFIG.security.encoding)
return decrypted_str |
def test_slash_after_logout_before_logout_delay(casper, concise_casper, funded_account, validation_key, deposit_amount, induct_validator, send_vote, mk_suggested_vote, mk_slash_votes, new_epoch, fake_hash, logout_validator_via_signed_msg):
validator_index = induct_validator(funded_account, validation_key, deposit_amount)
scaled_deposit_size = concise_casper.validators__deposit(validator_index)
assert (concise_casper.total_curdyn_deposits_in_wei() == deposit_amount)
logout_validator_via_signed_msg(validator_index, validation_key)
end_dynasty = concise_casper.validators__end_dynasty(validator_index)
assert (concise_casper.dynasty_wei_delta(end_dynasty) == (- scaled_deposit_size))
send_vote(mk_suggested_vote(validator_index, validation_key))
new_epoch()
new_deposit_size = concise_casper.deposit_size(validator_index)
new_scaled_deposit_size = concise_casper.validators__deposit(validator_index)
assert (concise_casper.dynasty() < (end_dynasty - 1))
assert (concise_casper.dynasty_wei_delta((concise_casper.dynasty() + 1)) == 0)
assert (concise_casper.dynasty_wei_delta(end_dynasty) == (- new_scaled_deposit_size))
(vote_1, vote_2) = mk_slash_votes(validator_index, validation_key)
assert concise_casper.slashable(vote_1, vote_2)
casper.functions.slash(vote_1, vote_2).transact()
assert (concise_casper.total_slashed(concise_casper.current_epoch()) == new_deposit_size)
assert concise_casper.validators__is_slashed(validator_index)
assert (concise_casper.validators__end_dynasty(validator_index) == (concise_casper.dynasty() + 1))
assert (concise_casper.dynasty_wei_delta(end_dynasty) == 0)
assert (concise_casper.dynasty_wei_delta((concise_casper.dynasty() + 1)) == (- new_scaled_deposit_size))
assert (concise_casper.validators__total_deposits_at_logout(validator_index) == deposit_amount) |
class Environment():
def __init__(self, white: VectorLike, adapting_luminance: float, surround: float, discounting: float) -> None:
self.xyz_w = util.xy_to_xyz(white)
self.surround = surround
self.yn = adapting_luminance
self.d = discounting
self.ram = self.calc_ram()
self.iram = alg.inv(self.ram)
def calc_ram(self) -> Matrix:
lms = alg.matmul(M, self.xyz_w)
a = []
s = sum(lms)
for c in lms:
l = ((3.0 * c) / s)
p = (((1.0 + alg.nth_root(self.yn, 3)) + l) / ((1.0 + alg.nth_root(self.yn, 3)) + (1.0 / l)))
a.append(((p + (self.d * (1.0 - p))) / c))
A = alg.diag(a)
return alg.multi_dot([R, A, M]) |
class BackupProcess(FledgeProcess):
_MODULE_NAME = 'fledge_backup_postgres_process'
_BACKUP_FILE_NAME_PREFIX = 'fledge_backup_'
_MESSAGES_LIST = {'i000001': 'Execution started.', 'i000002': 'Execution completed.', 'e000000': 'general error', 'e000001': 'cannot initialize the logger - error details |{0}|', 'e000002': 'cannot retrieve the configuration from the manager, trying retrieving from file - error details |{0}|', 'e000003': 'cannot retrieve the configuration from file - error details |{0}|', 'e000004': '...', 'e000005': '...', 'e000006': '...', 'e000007': 'backup failed.', 'e000008': 'cannot execute the backup, either a backup or a restore is already running - pid |{0}|', 'e000009': '...', 'e000010': "directory used to store backups doesn't exist - dir |{0}|", 'e000011': "directory used to store semaphores for backup/restore synchronization doesn't exist - dir |{0}|", 'e000012': 'cannot create the configuration cache file, neither FLEDGE_DATA nor FLEDGE_ROOT are defined.', 'e000013': 'cannot create the configuration cache file, provided path is not a directory - dir |{0}|', 'e000014': "the identified path of backups doesn't exists, creation was tried - dir |{0}| - error details |{1}|", 'e000015': 'The command is not available neither using the unmanaged approach - command |{0}|', 'e000016': 'Postgres command is not executable - command |{0}|', 'e000017': 'The execution of the Postgres command using the -V option produce an error - command |{0}| - output |{1}|', 'e000018': 'It is not possible to read data from Postgres - command |{0}| - exit code |{1}| - output |{2}|', 'e000019': 'The command is not available using the managed approach - command |{0}|'}
_logger = None
def __init__(self):
super().__init__()
if (not self._logger):
self._logger = logger.setup(self._MODULE_NAME, destination=_LOGGER_DESTINATION, level=_LOGGER_LEVEL)
self._backup = Backup(self._storage_async)
self._backup_lib = lib.BackupRestoreLib(self._storage_async, self._logger)
self._job = lib.Job()
lib._logger = self._logger
lib._storage = self._storage_async
def _generate_file_name(self):
self._logger.debug('{func}'.format(func='_generate_file_name'))
execution_time = time.strftime('%Y_%m_%d_%H_%M_%S')
full_file_name = (((self._backup_lib.dir_backups + '/') + self._BACKUP_FILE_NAME_PREFIX) + execution_time)
ext = 'dump'
_backup_file = '{file}.{ext}'.format(file=full_file_name, ext=ext)
return _backup_file
def init(self):
self._logger.debug('{func}'.format(func='init'))
self._backup_lib.evaluate_paths()
self._backup_lib.retrieve_configuration()
self._backup_lib.check_for_execution_backup()
pid = self._job.is_running()
if (pid == 0):
pid = os.getpid()
self._job.set_as_running(self._backup_lib.JOB_SEM_FILE_BACKUP, pid)
else:
_message = self._MESSAGES_LIST['e000008'].format(pid)
self._logger.warning('{0}'.format(_message))
raise exceptions.BackupOrRestoreAlreadyRunning
def execute_backup(self):
self._logger.debug('{func}'.format(func='execute_backup'))
self._purge_old_backups()
backup_file = self._generate_file_name()
(backup_file_tar_base, dummy) = os.path.splitext(backup_file)
backup_file_tar = (backup_file_tar_base + '.tar.gz')
self._logger.debug('execute_backup - backup_file :{}: backup_file_tar :{}: -'.format(backup_file, backup_file_tar))
self._backup_lib.sl_backup_status_create(backup_file_tar, lib.BackupType.FULL, lib.BackupStatus.RUNNING)
(status, exit_code) = self._run_backup_command(backup_file)
t = tarfile.open(backup_file_tar, 'w:gz')
t.add(backup_file, arcname=os.path.basename(backup_file))
backup_path = (self._backup_lib.dir_fledge_data + '/scripts')
if os.path.isdir(backup_path):
t.add(backup_path, arcname=os.path.basename(backup_path))
t.add(self._backup_lib.dir_fledge_data_etc, arcname=os.path.basename(self._backup_lib.dir_fledge_data_etc))
data = {'plugins': PluginDiscovery.get_plugins_installed(), 'services': get_service_installed()}
temp_software_file = '{}/software.json'.format(self._backup_lib.dir_backups)
with open(temp_software_file, 'w') as outfile:
json.dump(data, outfile, indent=4)
t.add(temp_software_file, arcname=os.path.basename(temp_software_file))
t.close()
os.remove(backup_file)
os.remove(temp_software_file)
backup_information = self._backup_lib.sl_get_backup_details_from_file_name(backup_file_tar)
self._backup_lib.sl_backup_status_update(backup_information['id'], status, exit_code)
audit = AuditLogger(self._storage_async)
loop = asyncio.get_event_loop()
if (status != lib.BackupStatus.COMPLETED):
self._logger.error(self._MESSAGES_LIST['e000007'])
loop.run_until_complete(audit.information('BKEXC', {'status': 'failed'}))
raise exceptions.BackupFailed
else:
loop.run_until_complete(audit.information('BKEXC', {'status': 'completed'}))
def _purge_old_backups(self):
backups_info = asyncio.get_event_loop().run_until_complete(self._backup.get_all_backups(self._backup_lib.MAX_NUMBER_OF_BACKUPS_TO_RETRIEVE, 0, None, lib.SortOrder.ASC))
backups_n = len(backups_info)
last_to_delete = (backups_n - (self._backup_lib.config['retention'] - 1))
if (last_to_delete > 0):
backups_to_delete = backups_info[:last_to_delete]
for row in backups_to_delete:
backup_id = row['id']
file_name = row['file_name']
self._logger.debug('{func} - id |{id}| - file_name |{file}|'.format(func='_purge_old_backups', id=backup_id, file=file_name))
asyncio.get_event_loop().run_until_complete(self._backup.delete_backup(backup_id))
def _run_backup_command(self, _backup_file):
self._logger.debug('{func} - file_name |{file}|'.format(func='_run_backup_command', file=_backup_file))
pg_cmd = self._backup_lib.PG_COMMANDS[self._backup_lib.PG_COMMAND_DUMP]
cmd = '{cmd} {options} {db} > {file}'.format(cmd=pg_cmd, options='--serializable-deferrable -Fc', db=self._backup_lib.config['database'], file=_backup_file)
(_exit_code, output) = lib.exec_wait_retry(cmd, output_capture=True, exit_code_ok=0, max_retry=self._backup_lib.config['max_retry'], timeout=self._backup_lib.config['timeout'])
if (_exit_code == 0):
_status = lib.BackupStatus.COMPLETED
else:
_status = lib.BackupStatus.FAILED
self._logger.debug('{func} - status |{status}| - exit_code |{exit_code}| - cmd |{cmd}| output |{output}| '.format(func='_run_backup_command', status=_status, exit_code=_exit_code, cmd=cmd, output=output))
return (_status, _exit_code)
def shutdown(self):
self._logger.debug('{func}'.format(func='shutdown'))
self._job.set_as_completed(self._backup_lib.JOB_SEM_FILE_BACKUP)
def run(self):
self.init()
try:
self.execute_backup()
except Exception as _ex:
_message = _MESSAGES_LIST['e000002'].format(_ex)
_logger.error(_message)
self.shutdown()
raise exceptions.RestoreFailed(_message)
else:
self.shutdown() |
def find_muscle(name, muscles):
results = []
for (key, muscle) in muscles.items():
if (key in name):
return muscle
for (key, muscle) in muscles.items():
matcher = SequenceMatcher(None, key, name)
ratio = matcher.ratio()
if (ratio >= 0.75):
results.append((ratio, muscle))
if (not results):
return 'Unmapped'
app.server.logger.error(f'No matching muscles for: {name}')
return sorted(results)[0][1] |
('/monitor-target')
def monitor_target_drift(window_size: int=3000) -> FileResponse:
logging.info('Read current data')
current_data: pd.DataFrame = load_current_data(window_size)
logging.info('Read reference data')
reference_data = load_reference_data(columns=DATA_COLUMNS['columns'])
logging.info('Build report')
column_mapping: ColumnMapping = get_column_mapping(**DATA_COLUMNS)
report_path: Text = build_target_drift_report(reference_data=reference_data, current_data=current_data, column_mapping=column_mapping)
logging.info('Return report as html')
return FileResponse(report_path) |
class CalendarScaleSystemTestCase(TicksTestCase):
def test_hourly_scales(self):
scales = (([TimeScale(seconds=dt) for dt in (1, 5, 15, 30)] + [TimeScale(minutes=dt) for dt in (1, 5, 15, 30)]) + [TimeScale(hours=dt) for dt in (1, 2, 3, 4, 6, 12)])
def test_yearly_scales(self):
ticker = ScaleSystem(TimeScale(month_of_year=1), default_scale=None)
ticks = ticker.ticks(DTS(2000, 1, 1), DTS(2007, 1, 1), 10)
desired = list(starmap(DTS, ((2000, 1, 1), (2001, 1, 1), (2002, 1, 1), (2003, 1, 1), (2004, 1, 1), (2005, 1, 1), (2006, 1, 1), (2007, 1, 1))))
self.check_ticks(ticks, desired) |
def find_matching_fn_abi(abi: ABI, abi_codec: ABICodec, fn_identifier: Optional[Union[(str, Type[FallbackFn], Type[ReceiveFn])]]=None, args: Optional[Sequence[Any]]=None, kwargs: Optional[Any]=None) -> ABIFunction:
args = (args or tuple())
kwargs = (kwargs or dict())
num_arguments = (len(args) + len(kwargs))
if (fn_identifier is FallbackFn):
return get_fallback_func_abi(abi)
if (fn_identifier is ReceiveFn):
return get_receive_func_abi(abi)
if (not is_text(fn_identifier)):
raise TypeError('Unsupported function identifier')
name_filter = functools.partial(filter_by_name, fn_identifier)
arg_count_filter = functools.partial(filter_by_argument_count, num_arguments)
encoding_filter = functools.partial(filter_by_encodability, abi_codec, args, kwargs)
function_candidates = pipe(abi, name_filter, arg_count_filter, encoding_filter)
if (len(function_candidates) == 1):
return function_candidates[0]
else:
matching_identifiers = name_filter(abi)
matching_function_signatures = [abi_to_signature(func) for func in matching_identifiers]
arg_count_matches = len(arg_count_filter(matching_identifiers))
encoding_matches = len(encoding_filter(matching_identifiers))
if (arg_count_matches == 0):
diagnosis = '\nFunction invocation failed due to improper number of arguments.'
elif (encoding_matches == 0):
diagnosis = '\nFunction invocation failed due to no matching argument types.'
elif (encoding_matches > 1):
diagnosis = '\nAmbiguous argument encoding. Provided arguments can be encoded to multiple functions matching this call.'
collapsed_args = extract_argument_types(args)
collapsed_kwargs = dict({(k, extract_argument_types([v])) for (k, v) in kwargs.items()})
message = f'''
Could not identify the intended function with name `{fn_identifier}`, positional arguments with type(s) `{collapsed_args}` and keyword arguments with type(s) `{collapsed_kwargs}`.
Found {len(matching_identifiers)} function(s) with the name `{fn_identifier}`: {matching_function_signatures}{diagnosis}'''
raise Web3ValidationError(message) |
class AgentManager():
_instance = None
def get_instance(cls) -> 'AgentManager':
if (cls._instance is None):
cls._instance = AgentManager()
return cls._instance
def __init__(self) -> None:
self.agent_templete_env: PackageEnv = None
self.agent_env: PackageEnv = None
self.db_path: str = None
self.loaded_agent_instance: Dict[(str, AIAgent)] = None
async def initial(self) -> None:
system_app_dir = AIStorage.get_instance().get_system_app_dir()
user_data_dir = AIStorage.get_instance().get_myai_dir()
self.agent_templete_env: PackageEnv = PackageEnvManager().get_env(f'{system_app_dir}/templates/templetes.cfg')
sys_agent_env: PackageEnv = PackageEnvManager().get_env(f'{system_app_dir}/agents/agents.cfg')
user_agent_config_path = f'{user_data_dir}/agents/agents.cfg'
(await AIStorage.get_instance().try_create_file_with_default_value(user_agent_config_path, default_agent_cfg))
self.agent_env: PackageEnv = PackageEnvManager().get_env(user_agent_config_path)
self.agent_env.parent_envs.append(sys_agent_env)
self.db_path = f'{user_data_dir}/messages.db'
self.loaded_agent_instance = {}
return True
async def scan_all_agent(self) -> None:
pass
async def get(self, agent_id: str) -> AIAgent:
the_agent = self.loaded_agent_instance.get(agent_id)
if the_agent:
return the_agent
agent_media_info = self.agent_env.load(agent_id)
if (agent_media_info is None):
return None
the_agent: AIAgent = (await self._load_agent_from_media(agent_media_info))
if (the_agent is None):
logger.warn(f'load agent {agent_id} from media failed!')
the_agent.chat_db = self.db_path
return the_agent
def remove(self, agent_id: str) -> int:
pass
async def get_templete(self, templete_id) -> AIAgentTemplete:
template_media_info = self.agent_templete_env.get(templete_id)
if (template_media_info is None):
return None
return self._load_templete_from_media(template_media_info)
def install(self, templete_id) -> PackageInstallTask:
installer = self.agent_templete_env.get_installer()
return installer.install(templete_id)
def uninstall(self, templete_id) -> int:
pass
async def _load_templete_from_media(self, templete_media: PackageMediaInfo) -> AIAgentTemplete:
pass
async def _load_agent_from_media(self, agent_media: PackageMediaInfo) -> AIAgent:
reader = self.agent_env._create_media_loader(agent_media)
if (reader is None):
logger.error(f'create media loader for {agent_media} failed!')
return None
try:
config_file = (await reader.read('agent.toml', 'r'))
if (config_file is None):
logger.error(f'read agent config from {agent_media} failed!')
return None
config_data = (await config_file.read())
config = toml.loads(config_data)
result_agent = AIAgent()
if (result_agent.load_from_config(config) is False):
logger.error(f'load agent from {agent_media} failed!')
return None
return result_agent
except Exception as e:
logger.error(f'read agent.toml cfg from {agent_media} failed! unexpected error occurred: {str(e)}')
return None
def create(self, template, agent_name, agent_last_name, agent_introduce) -> AIAgent:
pass |
def test_class_variables(additionals):
assert (additionals.mod.name == 'empty')
assert (additionals.mod.disk.base_dir == Path(additionals.BASE_DIR))
assert (additionals.mod._MTModule__LOGS == [])
assert (additionals.mod.disk._LocalStorage__LOGS_DIR == f'{additionals.BASE_DIR}/logs')
assert (additionals.mod.disk._LocalStorage__LOGS_FILE == f'{additionals.BASE_DIR}/logs/logs.txt')
assert os.path.exists(f'{additionals.BASE_DIR}/logs') |
class OptionSeriesSunburstSonificationContexttracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.django_db
def test_psc_autocomplete_success(client, psc_data):
resp = client.post('/api/v2/autocomplete/psc/', content_type='application/json', data=json.dumps({'search_text': '8435'}))
assert (resp.status_code == status.HTTP_200_OK)
assert (len(resp.data['results']) == 1)
assert (resp.data['results'][0]['psc_description'] == "FOOTWEAR, WOMEN'S")
resp = client.post('/api/v2/autocomplete/psc/', content_type='application/json', data=json.dumps({'search_text': 'FOO'}))
assert (resp.status_code == status.HTTP_200_OK)
assert (len(resp.data['results']) == 2) |
class OptionPlotoptionsScatterSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_geo_group_sim():
geo_grp = td.TriangleMesh.from_stl('tests/data/two_boxes_separate.stl')
geos_orig = list(geo_grp.geometries)
geo_grp_full = geo_grp.updated_copy(geometries=(geos_orig + [td.Box(size=(1, 1, 1))]))
sim = td.Simulation(size=(10, 10, 10), grid_spec=td.GridSpec.uniform(dl=0.1), sources=[], structures=[td.Structure(geometry=geo_grp_full, medium=td.Medium(permittivity=2))], monitors=[], run_time=1e-12, boundary_spec=td.BoundarySpec.all_sides(td.PML()))
assert (len(sim.custom_datasets) == len(geos_orig)) |
def show_plotcuts(cuts, buttons=False, extraText=None):
if (plt is None):
print('Install matplotlib for python to allow graphical display of cuts', file=sys.stderr)
return 2
if (cuts == []):
print('Empty cut path', file=sys.stderr)
return 3
xy = sum(cuts, [])
least = min((min(p[0], p[1]) for p in xy))
greatest = max((max(p[0], p[1]) for p in xy))
scale = (greatest - least)
plt.figure('Sendto Silhouette - Preview')
plt.plot(*zip(*sum(cuts, [])), color='lightsteelblue')
plt.plot(xy[0][0], xy[0][1], 'go')
plt.plot(xy[(- 1)][0], xy[(- 1)][1], 'ro')
ncuts = len(cuts)
maxhue = 0.33
for (i, xy) in enumerate(cuts):
plt.plot(*zip(*xy), color=hsv_to_rgb((maxhue * (1.0 - (i / ncuts))), 0.9, 0.7))
plt.arrow(xy[(- 2)][0], xy[(- 2)][1], (xy[(- 1)][0] - xy[(- 2)][0]), (xy[(- 1)][1] - xy[(- 2)][1]), color='lightblue', length_includes_head=True, head_width=min(3, (scale / 50)))
plt.axis([plt.axis()[0], plt.axis()[1], plt.axis()[3], plt.axis()[2]])
plt.gca().set_aspect('equal')
class Response():
returnvalue = (1 if (buttons == True) else 0)
def pushedcut(self, event):
self.returnvalue = 0
plt.close('all')
def pushedcancel(self, event):
plt.close('all')
response = Response()
if buttons:
bcut = Button(plt.axes([0.7, 0.9, 0.1, 0.075]), 'Cut')
bcancel = Button(plt.axes([0.81, 0.9, 0.1, 0.075]), 'Cancel')
bcut.on_clicked(response.pushedcut)
bcancel.on_clicked(response.pushedcancel)
bcut.connect_event('key_press_event', (lambda event: (response.pushedcut(event) if (event.key == 'enter') else None)))
bcancel.connect_event('key_press_event', (lambda event: (response.pushedcancel(event) if (event.key == 'escape') else None)))
if extraText:
plt.text((- 1.3), 0.5, str(extraText), fontsize=8, horizontalalignment='right')
plt.show()
return response.returnvalue |
class OptionSeriesColumnpyramidSonificationContexttracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Cell_Reference(Name):
def __init__(self, n_ident):
super().__init__()
assert isinstance(n_ident, Name)
self.n_ident = n_ident
self.n_ident.set_parent(self)
self.l_args = []
def loc(self):
return self.n_ident.loc()
def add_argument(self, n_arg):
assert isinstance(n_arg, Expression)
self.l_args.append(n_arg)
n_arg.set_parent(self)
def visit(self, parent, function, relation):
self._visit(parent, function, relation)
self.n_ident.visit(self, function, 'Name')
self._visit_list(self.l_args, function, 'Arguments')
self._visit_end(parent, function, relation)
def __str__(self):
if self.l_args:
return ('%s{%s}' % (self.n_ident, ', '.join(map(str, self.l_args))))
else:
return str(self.n_ident) |
class ChefArray(Processor):
description = 'Produces an array that can be used with other Chef blocks. See
input_variables = {'item_list': {'description': 'Array of items to be put into the array block. This can also be a single string.', 'required': True}, 'no_wrap_quotes': {'description': 'Do not add wrapping quotation marks.', 'required': False}, 'remove_version': {'description': 'Removes the version string from the variable.', 'required': False}}
output_variables = {'array_block': {'description': 'Chef array block.'}}
__doc__ = description
def main(self):
beginning_bracket = '[\n'
iterator = 'item'
end_bracket = ']'
each_text = ('.each do |%s|\n' % iterator)
quotes = "'"
itemlist = list()
if self.env.get('no_wrap_quotes'):
quotes = ''
if isinstance(self.env['item_list'], basestring):
if self.env['remove_version']:
version_string = ("['%s']" % self.env['remove_version'])
if (version_string in self.env['item_list']):
self.env['item_list'] = self.env['item_list'].replace(version_string, '')
self.env['array_block'] = (self.env['item_list'] + each_text)
else:
itemlist = self.env['item_list']
self.env['array_block'] = beginning_bracket
for item in itemlist:
self.output(('Item: %s' % item))
self.env['array_block'] += (' %s%s%s,\n' % (quotes, str(item), quotes))
self.env['array_block'] += end_bracket
self.env['array_block'] = self.env['array_block'].replace(',\n]', '\n]')
self.env['array_block'] += each_text
self.output(('Chef block: \n%s' % self.env['array_block'])) |
def set_webprofileusername(username):
try:
url = (config.host + '/profile/username')
r = requests.put(url, headers=headers, data=json.dumps({'username': username}))
return r.json()
except requests.exceptions.RequestException as e:
print('Something went wrong. Could not set webprofile username:', e) |
def periodise(m):
element = BrokenElement(FiniteElement('CG', cell=m.ufl_cell(), degree=1))
coord_fs = VectorFunctionSpace(m, element, dim=2)
old_coordinates = m.coordinates
new_coordinates = Function(coord_fs)
domain = '{[i, j]: 0 <= i < old_coords.dofs and 0 <= j < new_coords.dofs}'
instructions = f'''
<{RealType}> Y = 0
<float64> pi = 3.
for i
Y = Y + real(old_coords[i, 1])
end
for j
new_coords[j, 0] = atan2(real(old_coords[j, 1]), real(old_coords[j, 0])) / (pi*2)
new_coords[j, 0] = new_coords[j, 0] + 1 if real(new_coords[j, 0]) < 0 else new_coords[j, 0]
new_coords[j, 0] = 1 if (real(new_coords[j, 0]) == 0 and Y < 0) else new_coords[j, 0]
new_coords[j, 0] = new_coords[j, 0] * Lx[0]
new_coords[j, 1] = old_coords[j, 2] * Ly[0]
end
'''
cLx = Constant(1)
cLy = Constant(1)
par_loop((domain, instructions), dx, {'new_coords': (new_coordinates, WRITE), 'old_coords': (old_coordinates, READ), 'Lx': (cLx, READ), 'Ly': (cLy, READ)})
return Mesh(new_coordinates) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_ssh_local_ca': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['firewall_ssh_local_ca']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['firewall_ssh_local_ca']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_ssh_local_ca')
(is_error, has_changed, result, diff) = fortios_firewall_ssh(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
('int (*git_transport_certificate_check_cb)(git_cert *cert, int valid, const char *host, void *payload)')
def _certificate_cb(cert_i, valid, host, data):
d = ffi.from_handle(data)
try:
val = d['certificate_cb'](None, bool(valid), ffi.string(host))
if (not val):
return C.GIT_ECERTIFICATE
except Exception as e:
d['exception'] = e
return C.GIT_EUSER
return 0 |
def stream(audio_stream: Iterator[bytes]) -> bytes:
if (not is_installed('mpv')):
message = "mpv not found, necessary to stream audio. On mac you can install it with 'brew install mpv'. On linux and windows you can install it from
raise ValueError(message)
mpv_command = ['mpv', '--no-cache', '--no-terminal', '--', 'fd://0']
mpv_process = subprocess.Popen(mpv_command, stdin=subprocess.PIPE, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
audio = b''
for chunk in audio_stream:
if (chunk is not None):
mpv_process.stdin.write(chunk)
mpv_process.stdin.flush()
audio += chunk
if mpv_process.stdin:
mpv_process.stdin.close()
mpv_process.wait()
return audio |
class SDXLSAGAdapter(SAGAdapter[SDXLUNet]):
def __init__(self, target: SDXLUNet, scale: float=1.0, kernel_size: int=9, sigma: float=1.0) -> None:
super().__init__(target=target, scale=scale, kernel_size=kernel_size, sigma=sigma)
def inject(self: 'SDXLSAGAdapter', parent: (fl.Chain | None)=None) -> 'SDXLSAGAdapter':
middle_block = self.target.ensure_find(MiddleBlock)
middle_block.insert_after_type(ResidualBlock, SelfAttentionShape(context_key='middle_block_attn_shape'))
self_attn = middle_block.ensure_find(fl.SelfAttention)
self_attn.insert_before_type(ScaledDotProductAttention, SelfAttentionMap(num_heads=self_attn.num_heads, context_key='middle_block_attn_map'))
return super().inject(parent)
def eject(self) -> None:
middle_block = self.target.ensure_find(MiddleBlock)
middle_block.remove(middle_block.ensure_find(SelfAttentionShape))
self_attn = middle_block.ensure_find(fl.SelfAttention)
self_attn.remove(self_attn.ensure_find(SelfAttentionMap))
super().eject() |
class DEOK(DE76):
NAME = 'ok'
SPACE = 'oklab'
def __init__(self, scalar: float=1) -> None:
self.scalar = scalar
def distance(self, color: 'Color', sample: 'Color', scalar: Optional[float]=None, **kwargs: Any) -> float:
if (scalar is None):
scalar = self.scalar
return (scalar * super().distance(color, sample)) |
class OptionPlotoptionsWordcloudSonificationTracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def compute_max_saturation(a: float, b: float) -> float:
if ((((- 1.) * a) - (0. * b)) > 1):
k0 = 1.
k1 = 1.
k2 = 0.
k3 = 0.
k4 = 0.
wl = 4.
wm = (- 3.)
ws = 0.
elif (((1. * a) - (1. * b)) > 1):
k0 = 0.
k1 = (- 0.)
k2 = 0.
k3 = 0.1254107
k4 = 0.
wl = (- 1.)
wm = 2.
ws = (- 0.)
else:
k0 = 1.
k1 = (- 0.)
k2 = (- 1.1513021)
k3 = (- 0.)
k4 = 0.
wl = (- 0.)
wm = (- 0.)
ws = 1.
sat = ((((k0 + (k1 * a)) + (k2 * b)) + (k3 * (a ** 2))) + ((k4 * a) * b))
k_l = ((0. * a) + (0. * b))
k_m = (((- 0.) * a) - (0. * b))
k_s = (((- 0.) * a) - (1. * b))
l_ = (1.0 + (sat * k_l))
m_ = (1.0 + (sat * k_m))
s_ = (1.0 + (sat * k_s))
l = (l_ ** 3)
m = (m_ ** 3)
s = (s_ ** 3)
l_ds = ((3.0 * k_l) * (l_ ** 2))
m_ds = ((3.0 * k_m) * (m_ ** 2))
s_ds = ((3.0 * k_s) * (s_ ** 2))
l_ds2 = ((6.0 * (k_l ** 2)) * l_)
m_ds2 = ((6.0 * (k_m ** 2)) * m_)
s_ds2 = ((6.0 * (k_s ** 2)) * s_)
f = (((wl * l) + (wm * m)) + (ws * s))
f1 = (((wl * l_ds) + (wm * m_ds)) + (ws * s_ds))
f2 = (((wl * l_ds2) + (wm * m_ds2)) + (ws * s_ds2))
sat = (sat - ((f * f1) / ((f1 ** 2) - ((0.5 * f) * f2))))
return sat |
def get_facet_closure_nodes(mesh, key, V):
(_, sub_domain) = key
if (sub_domain not in {'on_boundary', 'top', 'bottom'}):
valid = set(mesh.interior_facets.unique_markers)
valid |= set(mesh.exterior_facets.unique_markers)
invalid = (set(sub_domain) - valid)
if invalid:
raise LookupError(f"BC construction got invalid markers {invalid}. Valid markers are '{valid}'")
return dmcommon.facet_closure_nodes(V, sub_domain) |
class OptionPlotoptionsCylinderSonificationDefaultinstrumentoptionsMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_nistems():
mp4exc = stempeg.cmds.find_cmd('MP4Box')
(stems, rate) = stempeg.read_stems(stempeg.example_stem_path())
with tmp.NamedTemporaryFile(delete=False, suffix='.m4a') as tempfile:
stempeg.write_stems(tempfile.name, stems, sample_rate=rate, writer=stempeg.NIStemsWriter())
callArgs = [mp4exc]
callArgs.extend(['-dump-udta', '0:stem', tempfile.name])
sp.check_call(callArgs)
(root, ext) = os.path.splitext(tempfile.name)
udtaFile = (root + '_stem.udta')
with open(stempeg.default_metadata()) as f:
d_metadata = json.load(f)
try:
fileObj = codecs.open(udtaFile, encoding='utf-8')
fileObj.seek(8)
l_metadata = json.load(fileObj)
except json.decoder.JSONDecodeError:
with open(udtaFile) as json_file:
l_metadata = json.load(json_file)
assert (ordered(l_metadata) == ordered(d_metadata)) |
def test_finds_correct_dominator():
cfg = ControlFlowGraph()
cfg.add_nodes_from([(head := BasicBlock(0, instructions=[Branch(Condition(OperationType.greater, [Variable('a', ssa_label=0), Constant(0, Integer.int32_t())]))])), (branch_body := BasicBlock(1, instructions=[Assignment(Variable('a', ssa_label=1), UnaryOperation(OperationType.negate, [Variable('a', ssa_label=0)]))])), (phi_block := BasicBlock(2, instructions=[Phi(Variable('a', ssa_label=2), [Variable('a', ssa_label=0), Variable('a', ssa_label=1)]), Phi(Variable('b', ssa_label=3), [Variable('b', ssa_label=1), expr1.copy()]), Branch(Condition(OperationType.equal, [Variable('a', ssa_label=2), Variable('b', ssa_label=3)]))])), (print_branch := BasicBlock(3, instructions=[Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant('%s'), expr1.copy()]))])), (foo_branch := BasicBlock(4, instructions=[Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant('%s'), expr1.copy()]))])), (return_block := BasicBlock(5, instructions=[Assignment(ListOperation([]), Call(function_symbol('bar'), [expr1.copy()])), Return([expr1.copy()])]))])
cfg.add_edges_from([TrueCase(head, branch_body), FalseCase(head, phi_block), UnconditionalEdge(branch_body, phi_block), TrueCase(phi_block, print_branch), FalseCase(phi_block, foo_branch), UnconditionalEdge(foo_branch, return_block), UnconditionalEdge(print_branch, return_block)])
_run_cse(cfg)
assert (head.instructions == [Assignment(Variable('c0', ssa_label=0), expr1.copy()), Branch(Condition(OperationType.greater, [Variable('a', ssa_label=0), Constant(0, Integer.int32_t())]))]) |
class TestArchChecker(unittest.TestCase):
maxDiff = None
def __init__(self, *args, **kwargs):
logSetup.initLogging()
super().__init__(*args, **kwargs)
self.maxDiff = None
def setUp(self):
self.addCleanup(self.dropDatabase)
self.db = Tests.basePhashTestSetup.TestDb()
self.verifyDatabaseLoaded()
def dropDatabase(self):
self.db.tearDown()
self.db.close()
def _reprDatabase(self, db):
for row in db:
print(('%s, ' % list(row)))
def verifyDatabaseLoaded(self):
expect = list(CONTENTS)
expect.sort()
items = list(self.db.getItems())
items.sort()
if (items != expect):
self._reprDatabase(items)
self.assertEqual(items, expect)
def test_significantlySimilar_1(self):
cwd = os.path.dirname(os.path.realpath(__file__))
ck = TestArchiveChecker('{cwd}/test_ptree/notQuiteAllArch.zip'.format(cwd=cwd))
ret = ck.getSignificantlySimilarArches(searchDistance=2)
expect = {5: ['{cwd}/test_ptree/allArch.zip'.format(cwd=cwd)]}
self.assertEqual(ret, expect)
del ck
def test_pathNegativeFiltering_nofilter(self):
cwd = os.path.dirname(os.path.realpath(__file__))
expect = {5: ['{cwd}/test_ptree/allArch.zip'.format(cwd=cwd)]}
ck0 = TestArchiveChecker('{cwd}/test_ptree/notQuiteAllArch.zip'.format(cwd=cwd))
ret0 = ck0.getSignificantlySimilarArches(searchDistance=2)
print('ck0: ', ck0)
print('ret0: ', ret0)
self.assertEqual(ret0, expect)
del ck0
def test_pathNegativeFiltering_2(self):
cwd = os.path.dirname(os.path.realpath(__file__))
pf1 = ['{cwd}/test_ptree/'.format(cwd=cwd)]
ck1 = TestArchiveChecker('{cwd}/test_ptree/notQuiteAllArch.zip'.format(cwd=cwd), pathNegativeFilter=pf1)
ret1 = ck1.getSignificantlySimilarArches(searchDistance=2)
print('ck1: ', ck1)
print('ret1: ', ret1)
self.assertEqual(ret1, {})
del ck1
def test_pathNegativeFiltering_3(self):
cwd = os.path.dirname(os.path.realpath(__file__))
pf2 = ['{cwd}/test'.format(cwd=cwd)]
ck2 = TestArchiveChecker('{cwd}/test_ptree/notQuiteAllArch.zip'.format(cwd=cwd), pathNegativeFilter=pf2)
ret2 = ck2.getSignificantlySimilarArches(searchDistance=2)
print('ck2: ', ck2)
print('ret2: ', ret2)
self.assertEqual(ret2, {})
del ck2
def test_pathNegativeFiltering_4(self):
cwd = os.path.dirname(os.path.realpath(__file__))
expect = {5: ['{cwd}/test_ptree/allArch.zip'.format(cwd=cwd)]}
pf3 = ['{cwd}/testzzzzzzzzzz'.format(cwd=cwd)]
ck3 = TestArchiveChecker('{cwd}/test_ptree/notQuiteAllArch.zip'.format(cwd=cwd), pathNegativeFilter=pf3)
ret3 = ck3.getSignificantlySimilarArches(searchDistance=2)
print('ck3: ', ck3)
print('ret3: ', ret3)
self.assertEqual(ret3, expect)
del ck3
def test_pathNegativeFiltering_5(self):
cwd = os.path.dirname(os.path.realpath(__file__))
expect = {5: ['{cwd}/test_ptree/allArch.zip'.format(cwd=cwd)]}
pf4 = ['{cwd}/test_ptree/regular-u'.format(cwd=cwd)]
ck4 = TestArchiveChecker('{cwd}/test_ptree/notQuiteAllArch.zip'.format(cwd=cwd), pathNegativeFilter=pf4)
ret4 = ck4.getSignificantlySimilarArches(searchDistance=2)
print('ck4: ', ck4)
print('ret4: ', ret4)
self.assertEqual(ret4, expect)
del ck4 |
def make_app():
ctx = click.get_current_context(silent=True)
script_info = None
if (ctx is not None):
script_info = ctx.obj
config_file = getattr(script_info, 'config_file', None)
instance_path = getattr(script_info, 'instance_path', None)
return create_app(config_file, instance_path) |
class BlackJackTest(unittest.TestCase):
.task(taskno=1)
def test_value_of_card(self):
test_data = [('2', 2), ('5', 5), ('8', 8), ('A', 1), ('10', 10), ('J', 10), ('Q', 10), ('K', 10)]
for (variant, (card, expected)) in enumerate(test_data, 1):
with self.subTest(f'variation #{variant}', card=card, expected=expected):
actual_result = value_of_card(card)
error_msg = f'Called value_of_card({card}). The function returned {actual_result} as the value of the {card} card, but the test expected {expected} as the {card} card value.'
self.assertEqual(actual_result, expected, msg=error_msg)
.task(taskno=2)
def test_higher_card(self):
test_data = [('A', 'A', ('A', 'A')), ('10', 'J', ('10', 'J')), ('3', 'A', '3'), ('3', '6', '6'), ('Q', '10', ('Q', '10')), ('4', '4', ('4', '4')), ('9', '10', '10'), ('6', '9', '9'), ('4', '8', '8')]
for (variant, (card_one, card_two, expected)) in enumerate(test_data, 1):
with self.subTest(f'variation #{variant}', card_one=card_one, card_two=card_two, expected=expected):
actual_result = higher_card(card_one, card_two)
error_msg = f'Called higher_card({card_one}, {card_two}). The function returned {actual_result}, but the test expected {expected} as the result for the cards {(card_one, card_two)}.'
self.assertEqual(actual_result, expected, msg=error_msg)
.task(taskno=3)
def test_value_of_ace(self):
test_data = [('2', '3', 11), ('3', '6', 11), ('5', '2', 11), ('8', '2', 11), ('5', '5', 11), ('Q', 'A', 1), ('10', '2', 1), ('7', '8', 1), ('J', '9', 1), ('K', 'K', 1), ('2', 'A', 1), ('A', '2', 1)]
for (variant, (card_one, card_two, ace_value)) in enumerate(test_data, 1):
with self.subTest(f'variation #{variant}', card_one=card_one, card_two=card_two, ace_value=ace_value):
actual_result = value_of_ace(card_one, card_two)
error_msg = f'Called value_of_ace({card_one}, {card_two}). The function returned {actual_result}, but the test expected {ace_value} as the value of an ace card when the hand includes {(card_one, card_two)}.'
self.assertEqual(value_of_ace(card_one, card_two), ace_value, msg=error_msg)
.task(taskno=4)
def test_is_blackjack(self):
test_data = [(('A', 'K'), True), (('10', 'A'), True), (('10', '9'), False), (('A', 'A'), False), (('4', '7'), False), (('9', '2'), False), (('Q', 'K'), False)]
for (variant, (hand, expected)) in enumerate(test_data, 1):
with self.subTest(f'variation #{variant}', hand=hand, expected=expected):
actual_result = is_blackjack(*hand)
error_msg = f"Called is_blackjack({hand[0]}, {hand[1]}). The function returned {actual_result}, but hand {hand} {('is' if expected else 'is not')} a blackjack."
self.assertEqual(actual_result, expected, msg=error_msg)
.task(taskno=5)
def test_can_split_pairs(self):
test_data = [(('Q', 'K'), True), (('6', '6'), True), (('A', 'A'), True), (('10', 'A'), False), (('10', '9'), False)]
for (variant, (hand, expected)) in enumerate(test_data, 1):
with self.subTest(f'variation #{variant}', input=hand, expected=expected):
actual_result = can_split_pairs(*hand)
error_msg = f"Called can_split_pairs({hand[0]}, {hand[1]}). The function returned {actual_result}, but hand {hand} {('can' if expected else 'cannot')} be split into pairs."
self.assertEqual(actual_result, expected, msg=error_msg)
.task(taskno=6)
def test_can_double_down(self):
test_data = [(('A', '9'), True), (('K', 'A'), True), (('4', '5'), True), (('A', 'A'), False), (('10', '2'), False), (('10', '9'), False)]
for (variant, (hand, expected)) in enumerate(test_data, 1):
with self.subTest(f'variation #{variant}', hand=hand, expected=expected):
actual_result = can_double_down(*hand)
error_msg = f"Called can_double_down({hand[0]}, {hand[1]}). The function returned {actual_result}, but hand {hand} {('can' if expected else 'cannot')} be doubled down."
self.assertEqual(actual_result, expected, msg=error_msg) |
class UnionParamType(click.ParamType):
def __init__(self, types: typing.List[click.ParamType]):
super().__init__()
self._types = self._sort_precedence(types)
def _sort_precedence(tp: typing.List[click.ParamType]) -> typing.List[click.ParamType]:
unprocessed = []
str_types = []
others = []
for t in tp:
if isinstance(t, type(click.UNPROCESSED)):
unprocessed.append(t)
elif isinstance(t, type(click.STRING)):
str_types.append(t)
else:
others.append(t)
return ((others + str_types) + unprocessed)
def convert(self, value: typing.Any, param: typing.Optional[click.Parameter], ctx: typing.Optional[click.Context]) -> typing.Any:
for t in self._types:
try:
return t.convert(value, param, ctx)
except Exception as e:
logging.debug(f'Ignoring conversion error for type {t} trying other variants in Union. Error: {e}')
raise click.BadParameter(f'Failed to convert {value} to any of the types {self._types}') |
def decorate_name(name, dual_porosity, fracture, date=None):
decorated_name = name
if dual_porosity:
if fracture:
decorated_name += 'F'
else:
decorated_name += 'M'
if (date is not None):
decorated_name += ('_' + str(date))
return decorated_name |
def test_unicode_inside_ascii_range():
resp = falcon.Response()
resp.set_cookie('non_unicode_ascii_name_1', 'ascii_value')
resp.set_cookie('unicode_ascii_name_1', 'ascii_value')
resp.set_cookie('non_unicode_ascii_name_2', 'unicode_ascii_value')
resp.set_cookie('unicode_ascii_name_2', 'unicode_ascii_value') |
def make_gridprop_values(values, grid, fracture):
num_cells = np.prod(grid.dimensions)
if np.isscalar(values):
values = expand_scalar_values(values, num_cells, grid.dualporo)
if grid.dualporo:
actind = grid.get_dualactnum_indices(fracture=fracture, order='F')
values = pick_dualporo_values(values, actind, num_cells, fracture)
else:
actind = grid.get_actnum_indices(order='F')
if (len(values) != num_cells):
values = match_values_to_active_cells(values, actind, num_cells)
values = values.reshape(grid.dimensions, order='F')
if grid.dualporo:
if fracture:
values[(grid._dualactnum.values == 1)] = 0.0
else:
values[(grid._dualactnum.values == 2)] = 0.0
return np.ma.masked_where((grid.get_actnum().values < 1), values) |
class String(Atom, Seq):
def __init__(self, str):
Atom.__init__(self, str)
def __repr__(self):
return repr(self.data)
def eval(self, env, args=None):
return self
def cons(self, e):
if ((e.__class__ != self.__class__) and (e.__class__ != Symbol.__class__)):
raise UnimplementedFunctionError('Cannot cons a string and a ', e.__class__.__name__)
return String((e.data + self.data))
def car(self):
return Symbol(self.data[0])
def cdr(self):
return String(self.data[1:]) |
class RebuildAllPackagesFormFactory(object):
def __new__(cls, active_chroots, package_names):
form_cls = _get_build_form(active_chroots, BaseForm)
form_cls.packages = MultiCheckboxField('Packages', choices=[(name, name) for name in package_names], default=package_names, validators=[wtforms.validators.DataRequired()])
form_cls.only_package_chroots = wtforms.BooleanField(label='Respect package-level chroot list configuration', description='The final set of chroot builds submitted for a particular package will be an <strong>intersection</strong> of the chroot list <strong>selected below</strong> and the chroots selected <strong>per package</strong>. If not set, builds for all chroots selected below will be submitted.', default=True, false_values=FALSE_VALUES)
return form_cls |
def export_single_model(model, arch_config, save_path, logger):
if (arch_config['algorithm'] == 'SRN'):
max_text_length = arch_config['Head']['max_text_length']
other_shape = [paddle.static.InputSpec(shape=[None, 1, 64, 256], dtype='float32'), [paddle.static.InputSpec(shape=[None, 256, 1], dtype='int64'), paddle.static.InputSpec(shape=[None, max_text_length, 1], dtype='int64'), paddle.static.InputSpec(shape=[None, 8, max_text_length, max_text_length], dtype='int64'), paddle.static.InputSpec(shape=[None, 8, max_text_length, max_text_length], dtype='int64')]]
model = to_static(model, input_spec=other_shape)
elif (arch_config['algorithm'] == 'SAR'):
other_shape = [paddle.static.InputSpec(shape=[None, 3, 48, 160], dtype='float32')]
model = to_static(model, input_spec=other_shape)
else:
infer_shape = [3, (- 1), (- 1)]
if (arch_config['model_type'] == 'rec'):
infer_shape = [3, 32, (- 1)]
if (('Transform' in arch_config) and (arch_config['Transform'] is not None) and (arch_config['Transform']['name'] == 'TPS')):
logger.info('When there is tps in the network, variable length input is not supported, and the input size needs to be the same as during training')
infer_shape[(- 1)] = 100
if (arch_config['algorithm'] == 'NRTR'):
infer_shape = [1, 32, 100]
elif (arch_config['model_type'] == 'table'):
infer_shape = [3, 488, 488]
model = to_static(model, input_spec=[paddle.static.InputSpec(shape=([None] + infer_shape), dtype='float32')])
paddle.jit.save(model, save_path)
logger.info('inference model is saved to {}'.format(save_path))
return |
class Listable():
def __init__(self, list_result, get_results=None, archive_result=None):
self.list_result = list_result
self.get_results = get_results
self.archive_result = archive_result
def list(self, as_list=False, archived=None, top_level_only=False):
if (archived is None):
return ([self.list_result, self.archive_result] if (self.archive_result is not None) else [self.list_result])
elif (archived is True):
return [self.archive_result]
else:
return [self.list_result]
def get(self, id, lazy=False):
try:
return list(filter((lambda n: (id in (n.id, n.full_path))), self.get_results)).pop()
except IndexError:
raise GitlabGetError(response_code=404) |
def setup(logger_name: str=None, destination: int=None, level: int=None, propagate: bool=False) -> logging.Logger:
logger = logging.getLogger(logger_name)
if (destination is None):
destination = default_destination
if (destination == SYSLOG):
handler = SysLogHandler(address='/dev/log')
elif (destination == CONSOLE):
handler = logging.StreamHandler()
else:
raise ValueError('Invalid destination {}'.format(destination))
fmt = '{}[%(process)d] %(levelname)s: %(module)s: %(name)s: %(message)s'.format(get_process_name())
formatter = logging.Formatter(fmt=fmt)
handler.setFormatter(formatter)
if (level is not None):
logger.setLevel(level)
logger.addHandler(handler)
logger.propagate = propagate
error_override(logger)
return logger |
class OptionPlotoptionsSplineSonificationContexttracksMapping(Options):
def frequency(self) -> 'OptionPlotoptionsSplineSonificationContexttracksMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsSplineSonificationContexttracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsSplineSonificationContexttracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsSplineSonificationContexttracksMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsSplineSonificationContexttracksMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsSplineSonificationContexttracksMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsSplineSonificationContexttracksMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsSplineSonificationContexttracksMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsSplineSonificationContexttracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsSplineSonificationContexttracksMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsSplineSonificationContexttracksMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsSplineSonificationContexttracksMappingPan)
def pitch(self) -> 'OptionPlotoptionsSplineSonificationContexttracksMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsSplineSonificationContexttracksMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsSplineSonificationContexttracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsSplineSonificationContexttracksMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsSplineSonificationContexttracksMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsSplineSonificationContexttracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsSplineSonificationContexttracksMappingTime':
return self._config_sub_data('time', OptionPlotoptionsSplineSonificationContexttracksMappingTime)
def tremolo(self) -> 'OptionPlotoptionsSplineSonificationContexttracksMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsSplineSonificationContexttracksMappingTremolo)
def volume(self) -> 'OptionPlotoptionsSplineSonificationContexttracksMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsSplineSonificationContexttracksMappingVolume) |
class KrxHistoricalDailyPriceDataDownloader():
def __init__(self):
self._headers = {'Accept': 'application/json, text/javascript, */*; q=0.01', 'Accept-Encoding': 'gzip, deflate', 'Accept-Language': 'en-US,en;q=0.9,ko;q=0.8,fr;q=0.7,ja;q=0.6,zh-CN;q=0.5,zh;q=0.4', 'Host': 'data.krx.co.kr', 'Origin': ' 'Referer': ' 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36', 'X-Requested-With': 'XMLHttpRequest'}
self._stocks = None
self._stocks_delisted = None
self._bld = 'dbms/MDC/STAT/standard/MDCSTAT01701'
self._isuCd = ''
self._calendar = get_calendar('XKRX')
self._start_date = self._calendar.first_session
def get_stocks(self):
data = {'mktsel': 'ALL', 'typeNo': '0', 'searchText': '', 'bld': 'dbms/comm/finder/finder_stkisu'}
url = '
response = requests.post(url, data, headers=self._headers)
df = pd.json_normalize(response.json()['block1'])
df = df.set_index('short_code')
return df
def get_stocks_delisted(self):
data = {'mktsel': 'ALL', 'searchText': '', 'bld': 'dbms/comm/finder/finder_listdelisu'}
url = '
response = requests.post(url, data, headers=self._headers)
df = pd.json_normalize(response.json()['block1'])
df = df.set_index('short_code')
return df
def stocks(self):
if (self._stocks is None):
self._stocks = self.get_stocks()
return self._stocks
def stocks_delisted(self):
if (self._stocks_delisted is None):
self._stocks_delisted = self.get_stocks_delisted()
return self._stocks_delisted
def get_full_code(self, symbol):
if (symbol in self.stocks.index):
return self.stocks.loc[symbol]['full_code']
if (symbol in self.stocks_delisted.index):
return self.stocks_delisted.loc[symbol]['full_code']
raise ValueError(('No full_code found for given symbol %s' % symbol))
def get_name(self, symbol):
if (symbol in self.stocks.index):
return self.stocks.loc[symbol]['codeName']
if (symbol in self.stocks_delisted.index):
return self.stocks_delisted.loc[symbol]['codeName']
raise ValueError(('No name found for given symbol %s' % symbol))
def download(self, symbol, start_date=None, end_date=None):
if (start_date is None):
start_date = self._start_date
if (end_date is None):
now = pd.Timestamp.now(self._calendar.tz)
end_date = self._calendar.previous_close(now).astimezone(self._calendar.tz).normalize()
full_code = self.get_full_code(symbol)
name = self.get_name(symbol)
url = '
data = {'bld': self._bld, 'tboxisuCd_finder_stkisu0_0': '{}/{}'.format(symbol, name), 'isuCd': full_code, 'isuCd2': self._isuCd, 'codeNmisuCd_finder_stkisu0_0': name, 'param1isuCd_finder_stkisu0_0': 'ALL', 'strtDd': start_date.strftime('%Y%m%d'), 'endDd': end_date.strftime('%Y%m%d'), 'share': '1', 'money': '1', 'csvxls_isNo': 'false'}
self._isuCd = full_code
response = requests.post(url, data, headers=self._headers)
output = response.json()['output']
if (len(output) == 0):
return None
df = pd.json_normalize(output)
column_names = {'TRD_DD': 'Date', 'ISU_CD': 'Code', 'ISU_NM': 'Name', 'MKT_NM': 'Market', 'SECUGRP_NM': 'SecuGroup', 'TDD_CLSPRC': 'Close', 'FLUC_TP_CD': 'UpDown', 'CMPPRVDD_PRC': 'Change', 'FLUC_RT': 'ChangeRate', 'TDD_OPNPRC': 'Open', 'TDD_HGPRC': 'High', 'TDD_LWPRC': 'Low', 'ACC_TRDVOL': 'Volume', 'ACC_TRDVAL': 'Amount', 'MKTCAP': 'MarCap', 'CMPPREVDD_PRC': 'Change', 'LIST_SHRS': 'Shares'}
df.rename(columns=column_names, inplace=True)
int_columns = ['Close', 'UpDown', 'Change', 'ChangeRate', 'Open', 'High', 'Low', 'Volume', 'Amount', 'MarCap', 'Shares']
for col in int_columns:
if (col in df.columns):
df[col] = pd.to_numeric(df[col].str.replace(',', ''), errors='coerce')
df['Date'] = pd.to_datetime(df['Date'])
df.set_index('Date', inplace=True)
return df |
class Bot(object):
def __init__(self, cache_path=None, console_qr=False, qr_path=None, qr_callback=None, login_callback=None, logout_callback=None, user_agent=None, start_immediately=True):
self.core = itchat.Core(user_agent)
self.user_agent = self.core.user_agent
itchat.instanceList.append(self)
enhance_connection(self.core.s)
if (cache_path is True):
cache_path = 'wxpy.pkl'
self.cache_path = cache_path
if (console_qr is True):
console_qr = 2
try:
self.core.auto_login(hotReload=bool(cache_path), statusStorageDir=cache_path, enableCmdQR=console_qr, picDir=qr_path, qrCallback=qr_callback, loginCallback=login_callback, exitCallback=logout_callback)
except FileNotFoundError as e:
if ('xdg-open' in e.strerror):
raise Exception('use `console_qr` arg while under pure console environment')
raise
self._sync_check_iterations = int((time.time() * 1000))
enhance_webwx_request(self)
self.self = User(self.core.loginInfo['User'], self)
self.file_helper = Chat(wrap_user_name('filehelper'), self)
self.messages = Messages()
self.registered = Registered(self)
self.puid_map = None
self.auto_mark_as_read = False
self.is_listening = False
self.listening_thread = None
if PY2:
from ..compatible.utils import TemporaryDirectory
self.temp_dir = TemporaryDirectory(prefix='wxpy_')
else:
self.temp_dir = tempfile.TemporaryDirectory(prefix='wxpy_')
if start_immediately:
self.start()
_encoded_string_output
def __repr__(self):
return '<{}: {}>'.format(self.__class__.__name__, self.self.name)
def __unicode__(self):
return '<{}: {}>'.format(self.__class__.__name__, self.self.name)
_response()
def logout(self):
logger.info('{}: logging out'.format(self))
return self.core.logout()
def alive(self):
return self.core.alive
def alive(self, value):
self.core.alive = value
def dump_login_status(self, cache_path=None):
logger.debug('{}: dumping login status'.format(self))
return self.core.dump_login_status((cache_path or self.cache_path))
def enable_puid(self, path='wxpy_puid.pkl', puid_logs=None):
self.puid_map = PuidMap(path, puid_logs)
return self.puid_map
def except_self(self, chats_or_dicts):
return list(filter((lambda x: (get_user_name(x) != self.self.user_name)), chats_or_dicts))
def chats(self, update=False):
return Chats(((self.friends(update) + self.groups(update)) + self.mps(update)), self)
def _retrieve_itchat_storage(self, attr):
with self.core.storageClass.updateLock:
return getattr(self.core.storageClass, attr)
_response(Friend)
def friends(self, update=False):
if update:
logger.info('{}: updating friends'.format(self))
return self.core.get_friends(update=update)
else:
return self._retrieve_itchat_storage('memberList')
_response(Group)
def groups(self, update=False, contact_only=False):
if (update or contact_only):
logger.info('{}: updating groups'.format(self))
return self.core.get_chatrooms(update=update, contactOnly=contact_only)
else:
return self._retrieve_itchat_storage('chatroomList')
_response(MP)
def mps(self, update=False):
if update:
logger.info('{}: updating mps'.format(self))
return self.core.get_mps(update=update)
else:
return self._retrieve_itchat_storage('mpList')
_response(User)
def user_details(self, user_or_users, chunk_size=50):
def chunks():
total = ensure_list(user_or_users)
for i in range(0, len(total), chunk_size):
(yield total[i:(i + chunk_size)])
_response()
def process_one_chunk(_chunk):
return self.core.update_friend(userName=get_user_name(_chunk))
if isinstance(user_or_users, (list, tuple)):
ret = list()
for chunk in chunks():
chunk_ret = process_one_chunk(chunk)
if isinstance(chunk_ret, list):
ret += chunk_ret
else:
ret.append(chunk_ret)
return ret
else:
return process_one_chunk(user_or_users)
def search(self, keywords=None, **attributes):
return self.chats().search(keywords, **attributes)
_response()
def add_friend(self, user, verify_content=''):
logger.info('{}: adding {} (verify_content: {})'.format(self, user, verify_content))
user_name = get_user_name(user)
return self.core.add_friend(userName=user_name, status=2, verifyContent=verify_content, autoUpdate=True)
_response()
def add_mp(self, user):
logger.info('{}: adding {}'.format(self, user))
user_name = get_user_name(user)
return self.core.add_friend(userName=user_name, status=1, autoUpdate=True)
def accept_friend(self, user, verify_content=''):
logger.info('{}: accepting {} (verify_content: {})'.format(self, user, verify_content))
_response()
def do():
return self.core.add_friend(userName=get_user_name(user), status=3, verifyContent=verify_content, autoUpdate=True)
do()
for friend in self.friends():
if (friend == user):
return friend
def create_group(self, users, topic=None):
logger.info('{}: creating group (topic: {}), with users:\n{}'.format(self, topic, pformat(users)))
_response()
def request():
return self.core.create_chatroom(memberList=dict_list, topic=(topic or ''))
dict_list = wrap_user_name(self.except_self(ensure_list(users)))
ret = request()
user_name = ret.get('ChatRoomName')
if user_name:
return Group(self.core.update_chatroom(userName=user_name), self)
else:
from ..utils import decode_text_from_webwx
ret = decode_text_from_webwx(pformat(ret))
raise Exception('Failed to create group:\n{}'.format(ret))
def upload_file(self, path):
logger.info('{}: uploading file: {}'.format(self, path))
_response()
def do():
upload = functools.partial(self.core.upload_file, fileDir=path)
ext = os.path.splitext(path)[1].lower()
if (ext in ('.bmp', '.png', '.jpeg', '.jpg', '.gif')):
return upload(isPicture=True)
elif (ext == '.mp4'):
return upload(isVideo=True)
else:
return upload()
return do().get('MediaId')
def _process_message(self, msg):
if (not self.alive):
return
config = self.registered.get_config(msg)
logger.debug('{}: new message (func: {}):\n{}'.format(self, (config.func.__name__ if config else None), msg))
if config:
def process():
try:
ret = config.func(msg)
if (ret is not None):
msg.reply(ret)
except:
logger.exception('an error occurred in {}.'.format(config.func))
if (self.auto_mark_as_read and (not (msg.type == SYSTEM)) and (msg.sender != self.self)):
from .. import ResponseError
try:
msg.chat.mark_as_read()
except ResponseError as e:
logger.warning('failed to mark as read: {}'.format(e))
if config.run_async:
start_new_thread(process, use_caller_name=True)
else:
process()
def register(self, chats=None, msg_types=None, except_self=True, run_async=True, enabled=True):
def do_register(func):
self.registered.append(MessageConfig(bot=self, func=func, chats=chats, msg_types=msg_types, except_self=except_self, run_async=run_async, enabled=enabled))
return func
return do_register
def _listen(self):
try:
logger.info('{}: started'.format(self))
self.is_listening = True
while (self.alive and self.is_listening):
try:
msg = Message(self.core.msgList.get(timeout=0.5), self)
except queue.Empty:
continue
if (msg.type != SYSTEM):
self.messages.append(msg)
try:
self._process_message(msg)
except:
logger.exception('an error occurred while processing msg:\n{}'.format(msg))
finally:
self.is_listening = False
logger.info('{}: stopped'.format(self))
def start(self):
if (not self.alive):
logger.warning('{} has been logged out!'.format(self))
elif self.is_listening:
logger.warning('{} is already running, no need to start again.'.format(self))
else:
self.listening_thread = start_new_thread(self._listen)
def stop(self):
if self.is_listening:
self.is_listening = False
self.listening_thread.join()
else:
logger.warning('{} is not running.'.format(self))
def join(self):
if isinstance(self.listening_thread, Thread):
with suppress(KeyboardInterrupt):
logger.info('{}: joined'.format(self))
self.listening_thread.join()
def cleanup(self):
if self.is_listening:
self.stop()
if (self.alive and self.core.useHotReload):
self.dump_login_status()
self.alive = False
self.temp_dir.cleanup() |
class TestSdmCommonParser(unittest.TestCase):
parser = SdmCommonParser(parent=None)
def test_sdm_common_basic_info(self):
self.parser.icd_ver = (4, 54)
payload = binascii.unhexlify('cac6d')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT LTE, Status 0, MIMO 3, Frequency DL 1840.00 MHz/UL 1745.00 MHz'}
self.assertDictEqual(result, expected)
self.parser.icd_ver = (4, 128)
payload = binascii.unhexlify('cac6d')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT LTE, Status 4, MIMO 3, Frequency DL 1840.00 MHz/UL 1745.00 MHz, Extra: 0x'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('dc29c808f9b951f7e7f1a')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT LTE, Status 0, MIMO 2, Frequency DL 2630.00 MHz/UL 2510.00 MHz, Extra: 0x1a7f7e1f'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('dc29c808f9b95157e7f1a')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT LTE, Status 0, MIMO 2, Frequency DL 2630.00 MHz/UL 2510.00 MHz, Extra: 0x1a7f7e15'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('dc29c808f9b957f1a0000')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT LTE, Status 4, MIMO 2, Frequency DL 2630.00 MHz/UL 2510.00 MHz, Extra: 0x00001a7f'}
self.assertDictEqual(result, expected)
self.parser.icd_ver = (5, 128)
payload = binascii.unhexlify('')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT WCDMA, Status 5, MIMO 1, Frequency -/-, Extra: 0x'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('c')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT GPRS, Status 2, MIMO 0, Frequency DL 958.40 MHz/UL 913.40 MHz, Extra: 0x'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('170003c0b32e6c001e85660d0a004b')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT LTE, Status 0, MIMO 3, Frequency DL 1815.00 MHz/UL 1720.00 MHz, Extra: 0x4b000a0d'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('190000ffffffffffffffff')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT UNKNOWN (0x19), Status 0, MIMO 0, Frequency -/-, Extra: 0x'}
self.assertDictEqual(result, expected)
self.parser.icd_ver = (6, 34)
payload = binascii.unhexlify('b9fe7fe0c6553a006f30c300ffffffffffffff')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT WCDMA, Status 4, MIMO 0, Frequency DL 2147.40 MHz/UL UARFCN 9787, Extra: 0xc3306f00, Num cells: 0'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('eaff30c300ffffffffffffff')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT GPRS, Status 1, MIMO 0, Frequency -/-, Extra: 0xc3306f00, Num cells: 0'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('e13820df30c300ffffffffffffff')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT LTE, Status 0, MIMO 3, Frequency DL 954.30 MHz/UL 909.30 MHz, Extra: 0xc3306f00, Num cells: 0'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('190000ffffffffffffffff006f30c300ffffffffffffff')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT UNKNOWN (0x19), Status 0, MIMO 0, Frequency -/-, Extra: 0xc3306f00, Num cells: 0'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('e13820df30c300ffffffffffffff')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT NR NSA, Status 4, MIMO 3, Frequency DL 954.30 MHz/UL 909.30 MHz, Extra: 0xc3306f00, Num cells: 0'}
self.assertDictEqual(result, expected)
self.parser.icd_ver = (8, 0)
payload = binascii.unhexlify('fcc9114700ffffffffffffff')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT GPRS, Status 4, MIMO 0, Frequency DL 946.80 MHz/UL 901.80 MHz, Extra: 0x4711c900, Num cells: 0'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('170005c0b32e6ceffffffffffffff')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT LTE, Status 0, MIMO 5, Frequency DL 1815.00 MHz/UL 1720.00 MHz, Extra: 0x, Num cells: 0'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('190000ffffffffffffffffffffffffffffffff00b0c9df00ffffffffffffff')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT UNKNOWN (0x19), Status 0, MIMO 0, Frequency -/-, Extra: 0xdfc9b000, Num cells: 0'}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('200405c0b32e6ceffffffffff')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_BASIC_INFO, payload, timestamp=)
result = self.parser.sdm_common_basic_info(packet)
expected = {'stdout': 'Common Basic Info: RAT NR NSA, Status 4, MIMO 5, Frequency DL 1815.00 MHz/UL 1720.00 MHz, Extra: 0x, Num cells: 2 (5, 3)'}
self.assertDictEqual(result, expected)
def test_sdm_common_signaling(self):
payload = binascii.unhexlify('01ffabc10a19d3a136b8240e4b9795537c82010d2fea6dac1e87fff23883fd')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_SIGNALING_INFO, payload, timestamp=0)
result = self.parser.sdm_common_signaling(packet)
expected = {'cp': [binascii.unhexlify('abc10a19d3a136b8240e4b9795537c82010d2fea6dac1e87fff23883fd')]}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('01ff')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_SIGNALING_INFO, payload, timestamp=0)
result = self.parser.sdm_common_signaling(packet)
expected = {'cp': [binascii.unhexlify('')]}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('21ffb2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_SIGNALING_INFO, payload, timestamp=0)
result = self.parser.sdm_common_signaling(packet)
expected = {'cp': [binascii.unhexlify('bb2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b')]}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('21ffb771021ec118acacacacacacacacacacacacacaca')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_SIGNALING_INFO, payload, timestamp=0)
result = self.parser.sdm_common_signaling(packet)
expected = {'cp': [binascii.unhexlify('bb771021ec118acacacacacacacacacacacacacaca')]}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('20ff')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_SIGNALING_INFO, payload, timestamp=0)
result = self.parser.sdm_common_signaling(packet)
expected = {'cp': [binascii.unhexlify('')]}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('20ff0217002d062200f5d97e6de1eae02d2b2b2b2b2b2b2b2b2b2b2b')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_SIGNALING_INFO, payload, timestamp=0)
result = self.parser.sdm_common_signaling(packet)
expected = {'cp': [binascii.unhexlify('d062200f5d97e6de1eae02d2b2b2b2b2b2b2b2b2b2b2b')]}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('20ffd55cc805d345e')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_SIGNALING_INFO, payload, timestamp=0)
result = self.parser.sdm_common_signaling(packet)
expected = {'cp': [binascii.unhexlify('d55cc805d345e')]}
self.assertDictEqual(result, expected)
payload = binascii.unhexlify('20ffb611401eca')
packet = sdmcmd.generate_sdm_packet(160, sdmcmd.sdm_command_group.CMD_COMMON_DATA, sdmcmd.sdm_common_data.COMMON_SIGNALING_INFO, payload, timestamp=0)
result = self.parser.sdm_common_signaling(packet)
expected = {'cp': [binascii.unhexlify('b611401eca')]}
self.assertDictEqual(result, expected) |
class BCycle(Gbfs):
meta = {'system': 'BCycle', 'company': ['BCycle, LLC']}
def __init__(self, tag, meta, uid, bbox=None):
if ('company' in meta):
meta['company'] += BCycle.meta['company']
feed_url = FEED_URL.format(uid=uid)
super(BCycle, self).__init__(tag, meta, feed_url, bbox=bbox) |
def import_symbol(symbol_path):
if (':' in symbol_path):
(module_name, symbol_name) = symbol_path.split(':')
module = import_module(module_name)
symbol = xgetattr(module, symbol_name)
else:
components = symbol_path.split('.')
module_name = '.'.join(components[:(- 1)])
symbol_name = components[(- 1)]
module = import_module(module_name)
symbol = getattr(module, symbol_name)
return symbol |
.integration
class TestUser():
def test_user_login_provide_credentials(self, test_config_path: str, test_cli_runner: CliRunner, credentials_path: str) -> None:
print(credentials_path)
result = test_cli_runner.invoke(cli, ['-f', test_config_path, 'user', 'login', '-u', 'root_user', '-p', 'Testpassword1!'], env={'FIDES_CREDENTIALS_PATH': credentials_path})
print(result.output)
assert (result.exit_code == 0)
def test_user_login_env_var_failed(self, test_config_path: str, test_cli_runner: CliRunner, credentials_path: str) -> None:
print(credentials_path)
result = test_cli_runner.invoke(cli, ['-f', test_config_path, 'user', 'login'], env={'FIDES_CREDENTIALS_PATH': credentials_path, 'FIDES__USER__USERNAME': 'fakeuser', 'FIDES__USER__PASSWORD': 'Testpassword1!'})
print(result.output)
assert (result.exit_code == 1)
def test_user_login_env_var_password(self, test_config_path: str, test_cli_runner: CliRunner, credentials_path: str) -> None:
print(credentials_path)
result = test_cli_runner.invoke(cli, ['-f', test_config_path, 'user', 'login', '-u', 'root_user'], env={'FIDES_CREDENTIALS_PATH': credentials_path, 'FIDES__USER__PASSWORD': 'Testpassword1!'})
print(result.output)
assert (result.exit_code == 0)
def test_user_login_env_var_credentials(self, test_config_path: str, test_cli_runner: CliRunner, credentials_path: str) -> None:
print(credentials_path)
result = test_cli_runner.invoke(cli, ['-f', test_config_path, 'user', 'login'], env={'FIDES_CREDENTIALS_PATH': credentials_path, 'FIDES__USER__USERNAME': 'root_user', 'FIDES__USER__PASSWORD': 'Testpassword1!'})
print(result.output)
assert (result.exit_code == 0)
def test_user_create(self, test_config_path: str, test_cli_runner: CliRunner, credentials_path: str) -> None:
print(credentials_path)
result = test_cli_runner.invoke(cli, ['-f', test_config_path, 'user', 'create', 'newuser', 'Newpassword1!'], env={'FIDES_CREDENTIALS_PATH': credentials_path})
print(result.output)
assert (result.exit_code == 0)
test_cli_runner.invoke(cli, ['-f', test_config_path, 'user', 'login', '-u', 'newuser', '-p', 'Newpassword1!'], env={'FIDES_CREDENTIALS_PATH': credentials_path})
credentials = read_credentials_file(credentials_path)
(total_scopes, roles) = get_user_permissions(credentials.user_id, get_auth_header(), CONFIG.cli.server_url)
assert (set(total_scopes) == set(SCOPE_REGISTRY))
assert (roles == [OWNER])
def test_user_permissions_valid(self, test_config_path: str, test_cli_runner: CliRunner, credentials_path: str) -> None:
print(credentials_path)
result = test_cli_runner.invoke(cli, ['-f', test_config_path, 'user', 'permissions'], env={'FIDES_CREDENTIALS_PATH': credentials_path})
print(result.output)
assert (result.exit_code == 0)
def test_get_self_user_permissions(self, test_config_path, test_cli_runner, credentials_path) -> None:
test_cli_runner.invoke(cli, ['-f', test_config_path, 'user', 'login', '-u', 'root_user', '-p', 'Testpassword1!'], env={'FIDES_CREDENTIALS_PATH': credentials_path})
(total_scopes, roles) = get_user_permissions(CONFIG.security.oauth_root_client_id, get_auth_header(), CONFIG.cli.server_url)
assert (set(total_scopes) == set(SCOPE_REGISTRY))
assert (roles == [OWNER])
.unit
def test_get_self_user_systems(self, test_config_path, test_cli_runner, credentials_path) -> None:
test_cli_runner.invoke(cli, ['-f', test_config_path, 'user', 'login', '-u', 'root_user', '-p', 'Testpassword1!'], env={'FIDES_CREDENTIALS_PATH': credentials_path})
systems = get_systems_managed_by_user(CONFIG.security.oauth_root_client_id, get_auth_header(), CONFIG.cli.server_url)
assert (systems == [])
def test_get_other_user_perms_and_systems(self, test_config_path, test_cli_runner, credentials_path, system_manager) -> None:
test_cli_runner.invoke(cli, ['-f', test_config_path, 'user', 'login', '-u', 'root_user', '-p', 'Testpassword1!'], env={'FIDES_CREDENTIALS_PATH': credentials_path})
(total_scopes, roles) = get_user_permissions(system_manager.id, get_auth_header(), CONFIG.cli.server_url)
assert (roles == [VIEWER])
systems = get_systems_managed_by_user(system_manager.id, get_auth_header(), CONFIG.cli.server_url)
assert (systems == [system_manager.systems[0].fides_key])
def test_user_permissions_not_found(self, test_config_path: str, test_cli_runner: CliRunner, credentials_path: str) -> None:
print(credentials_path)
result = test_cli_runner.invoke(cli, ['-f', test_config_path, 'user', 'permissions'], env={'FIDES_CREDENTIALS_PATH': '/root/notarealfile.credentials'})
print(result.output)
assert (result.exit_code == 1) |
def randu(bound=(0, 10), shape=(5, 5), missingness='mcar', thr=0.2, dtype='int'):
if (dtype == 'int'):
data = np.random.randint(bound[0], bound[1], size=shape).astype(float)
elif (dtype == 'float'):
data = np.random.uniform(bound[0], bound[1], size=shape)
corruptor = Corruptor(data, thr=thr)
raw_data = getattr(corruptor, missingness)()
return raw_data |
def interpret_token(val: str) -> list[str]:
if ((val[0] == "'") and (val[(- 1)] == "'")):
return [val[1:(- 1)]]
if val[0].isalpha():
return [val]
if ('*' in val):
(multiplicand, value) = val.split('*')
return (interpret_token(value) * int(multiplicand))
return [val] |
class _CSP():
def from_prog_name(cls, prog_name):
csp = Path(prog_name).name.replace('run', '')
if (csp not in ['aws', 'azure']):
raise Exception(f'unknown variant: {csp}')
return cls(csp)
def __init__(self, name):
self.name = name
def config_filename(self):
env_var = (self.name.upper() + 'RUN_CONFIG')
dotfile = (('.' + self.name.lower()) + 'run.yaml')
return os.environ.get(env_var, (Path.home() / dotfile))
def default_command_path(self):
return ('awsrun.commands.' + self.name.lower())
def default_session_provider(self):
return (('awsrun.plugins.creds.' + self.name.lower()) + '.Default') |
class ReadEnv():
def __init__(self):
pass
def read_env(self):
try:
GlobalAttrs.env_prometheus_server = os.environ['KPTOP_PROMETHEUS_SERVER']
except KeyError as e:
raise SystemExit(f'''
ERROR -- ENV not found => {e}''')
try:
if os.environ['KPTOP_BASIC_AUTH_ENABLED']:
GlobalAttrs.env_basic_auth_enabled = os.environ['KPTOP_BASIC_AUTH_ENABLED']
GlobalAttrs.env_prometheus_username = os.environ['KPTOP_PROMETHEUS_USERNAME']
GlobalAttrs.env_prometheus_password = os.environ['KPTOP_PROMETHEUS_PASSWORD']
if GlobalAttrs.env_basic_auth_enabled:
if ((GlobalAttrs.env_prometheus_username is None) or (GlobalAttrs.env_prometheus_password is None)):
raise SystemExit('INFO -- ENV: KPTOP_PROMETHEUS_USERNAME or KPTOP_PROMETHEUS_PASSWORD is missing')
if (GlobalAttrs.env_basic_auth_enabled not in [True, False, 'true', 'false']):
print("INFO -- KPTOP_BASIC_AUTH_ENABLED > allowed options are: 'true' || 'false'")
exit(1)
if (GlobalAttrs.env_insecure not in [True, False, 'true', 'false']):
print("INFO -- KPTOP_INSECURE > allowed options are: 'true' || 'false'")
exit(1)
except KeyError as e:
SystemExit(f'''
ERROR -- ENV not found => {e}''')
try:
GlobalAttrs.node_exporter_node_label = os.environ['KPTOP_NODE_EXPORTER_NODE_LABEL']
except:
pass
try:
GlobalAttrs.env_insecure = os.environ['KPTOP_INSECURE']
except:
pass
try:
GlobalAttrs.start_graphs_with_zero = os.environ['KPTOP_START_GRAPHS_WITH_ZERO']
except:
pass
try:
GlobalAttrs.log_dir = os.environ['KPTOP_LOGGING_DIR']
except:
pass
try:
GlobalAttrs.graphs_width = int(os.environ['KPTOP_GRAPH_WIDTH'])
except:
pass |
class Integer(object):
def __init__(self, maximum, increment=256):
self._maximum = maximum
if (increment >= maximum):
increment = maximum
self._increment = increment
self._threshold = (increment // 2)
e = random.randrange((self._maximum - self._increment))
self._bank = list(range(e, (e + self._increment)))
def __repr__(self):
return ('%s(%d, %d)' % (self.__class__.__name__, self._maximum, self._increment))
def __call__(self):
v = self._bank.pop(0)
if (v % self._threshold):
return v
e = (self._bank[(- 1)] + 1)
if (e > self._maximum):
e = 0
self._bank.extend(range(e, (e + self._threshold)))
return v |
def test_inputs_outputs_length():
def many_inputs(a: int, b: str, c: float) -> str:
return f'{a} - {b} - {c}'
m = array_node_map_task(many_inputs)
assert (m.python_interface.inputs == {'a': List[int], 'b': List[str], 'c': List[float]})
assert (m.name == 'tests.flytekit.unit.core.test_array_node_map_task.map_many_inputs_4ee240ef5cf979dbc133fb30035cb874-arraynode')
r_m = ArrayNodeMapTask(many_inputs)
assert (str(r_m.python_interface) == str(m.python_interface))
p1 = functools.partial(many_inputs, c=1.0)
m = array_node_map_task(p1)
assert (m.python_interface.inputs == {'a': List[int], 'b': List[str], 'c': float})
assert (m.name == 'tests.flytekit.unit.core.test_array_node_map_task.map_many_inputs_352fcdea8523a83134b51bbf5793f14e-arraynode')
r_m = ArrayNodeMapTask(many_inputs, bound_inputs=set('c'))
assert (str(r_m.python_interface) == str(m.python_interface))
p2 = functools.partial(p1, b='hello')
m = array_node_map_task(p2)
assert (m.python_interface.inputs == {'a': List[int], 'b': str, 'c': float})
assert (m.name == 'tests.flytekit.unit.core.test_array_node_map_task.map_many_inputs_e224ba3a5b00e08083d541a6ca99b179-arraynode')
r_m = ArrayNodeMapTask(many_inputs, bound_inputs={'c', 'b'})
assert (str(r_m.python_interface) == str(m.python_interface))
p3 = functools.partial(p2, a=1)
m = array_node_map_task(p3)
assert (m.python_interface.inputs == {'a': int, 'b': str, 'c': float})
assert (m.name == 'tests.flytekit.unit.core.test_array_node_map_task.map_many_inputs_f080e60be9d6faedeef0c74834d6812a-arraynode')
r_m = ArrayNodeMapTask(many_inputs, bound_inputs={'a', 'c', 'b'})
assert (str(r_m.python_interface) == str(m.python_interface))
with pytest.raises(TypeError):
m(a=[1, 2, 3])
def many_outputs(a: int) -> (int, str):
return (a, f'{a}')
with pytest.raises(ValueError):
_ = array_node_map_task(many_outputs) |
(name='testwells')
def fixture_testwells(testpath):
w_names = ['WELL29', 'WELL14', 'WELL30', 'WELL27', 'WELL23', 'WELL32', 'WELL22', 'WELL35', 'WELLX']
well_files = [join(testpath, 'wells', 'battle', '1', (wn + '.rmswell')) for wn in w_names]
return xtgeo.wells_from_files(well_files, fformat='rms_ascii') |
class ReadBed(object):
def __init__(self, file_handle):
self.file_type = None
self.file_handle = file_handle
self.line_number = 0
fields = self.get_no_comment_line()
fields = toString(fields)
fields = fields.split('\t')
self.guess_file_type(fields)
self.file_handle.seek(0)
self.prev_chrom = None
self.prev_start = (- 1)
self.prev_line = None
self.fields = ['chromosome', 'start', 'end', 'name', 'score', 'strand', 'thick_start', 'thick_end', 'rgb', 'block_count', 'block_sizes', 'block_starts']
if (self.file_type == 'bed12'):
self.BedInterval = collections.namedtuple('BedInterval', self.fields)
elif (self.file_type == 'bed9'):
self.BedInterval = collections.namedtuple('BedInterval', self.fields[:9])
else:
self.BedInterval = collections.namedtuple('BedInterval', self.fields[:6])
def __iter__(self):
return self
def get_no_comment_line(self):
line = next(self.file_handle)
line = toString(line)
if (line.startswith('#') or line.startswith('track') or line.startswith('browser') or (line.strip() == '')):
line = self.get_no_comment_line()
self.line_number += 1
return line
def guess_file_type(self, line_values):
if (len(line_values) == 3):
self.file_type = 'bed3'
elif (len(line_values) == 4):
self.file_type = 'bedgraph'
elif (len(line_values) == 6):
self.file_type = 'bed6'
elif (len(line_values) == 12):
self.file_type = 'bed12'
elif (len(line_values) == 9):
self.file_type = 'bed9'
elif (len(line_values) > 6):
self.file_type = 'bed6'
log.debug('Number of fields in BED file is not standard. Assuming bed6.')
else:
self.file_type = 'bed3'
log.debug('Number of fields in BED file is not standard. Assuming bed3.')
return self.file_type
def next(self):
line = self.get_no_comment_line()
bed = self.get_bed_interval(line)
if (self.prev_chrom == bed.chromosome):
assert (self.prev_start <= bed.start), 'Bed file not sorted. Please use a sorted bed file.\nFile: {}\nPrevious line: {}\n Current line{} '.format(self.file_handle.name, self.prev_line, line)
self.prev_chrom = bed.chromosome
self.prev_start = bed.start
self.prev_line = line
return bed
def __next__(self):
line = self.get_no_comment_line()
bed = self.get_bed_interval(line)
if (self.prev_chrom == bed.chromosome):
assert (self.prev_start <= bed.start), 'Bed file not sorted. Please use a sorted bed file.\nFile: {}\nPrevious line: {}\n Current line{} '.format(self.file_handle.name, self.prev_line, line)
self.prev_chrom = bed.chromosome
self.prev_start = bed.start
self.prev_line = line
return bed
def get_bed_interval(self, bed_line):
line_data = bed_line.strip()
line_data = toString(line_data)
line_data = line_data.split('\t')
if (self.file_handle == 'bed12'):
assert (len(line_data) == 12), 'File type detected is bed12 but line {}: {} does not have 12 fields.'.format(self.line_number, bed_line)
elif (self.file_type == 'bed3'):
assert (len(line_data) == 3), 'File type detected is bed3 but line {}: {} does not have 3 fields.'.format(self.line_number, bed_line)
elif (self.file_type == 'bed6'):
assert (len(line_data) == 6), 'File type detected is bed6 but line {}: {} does not have 6 fields.'.format(self.line_number, bed_line)
line_values = []
for (idx, r) in enumerate(line_data):
if (idx in [0, 3]):
line_values.append(r)
elif (idx == 5):
if (r not in ['+', '-', '.']):
if (r == '1'):
r = '+'
elif (r == '-1'):
r = '-'
else:
log.warning("*Warning, invalid strand value found {} for line #{}:\n{}\n Setting strand to '.'\n".format(r, bed_line, self.line_number))
r = '.'
line_values.append(r)
elif (idx in [1, 2, 6, 7, 9]):
try:
line_values.append(int(r))
except ValueError:
log.warning('Value: {} in field {} at line {} is not an integer\n'.format(r, (idx + 1), self.line_number))
return dict()
elif (idx == 8):
r = toString(r)
rgb = r.split(',')
if (len(rgb) == 3):
try:
r = map(int, rgb)
except ValueError as detail:
log.debug('Error reading line: #{}. The rgb field {} is not valid.\nError message: {}\n'.format(self.line_number, r, detail))
line_values.append(r)
elif (idx in [10, 11]):
r = toString(r)
r_parts = r.split(',')
try:
r = [int(x) for x in r_parts if (x != '')]
except ValueError as detail:
log.debug('Error reading line #{}. The block field {} is not valid.\nError message: {}\n'.format(self.line_number, r, detail))
line_values.append(r)
else:
try:
tmp = float(r)
except ValueError:
tmp = r
except TypeError:
tmp = r
line_values.append(tmp)
assert (line_values[2] > line_values[1]), 'Start position larger or equal than end for line #{}:\n{}\n'.format(self.line_number, bed_line)
if (self.file_type == 'bed3'):
line_values = line_values[0:3]
line_values.extend(['.', 0, '.'])
elif (self.file_type == 'bed6'):
line_values = line_values[0:6]
return self.BedInterval._make(line_values) |
def CreateGemmRRRPermOperator(manifest, c_element_op):
operation_kind = library.GemmKind.GemmPermute
a_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.RowMajor)
b_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.RowMajor)
c_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.RowMajor)
ds_dtype = [library.DataType.f16]
e_dtype = library.DataType.f16
element_op = library.TensorOperation.PassThrough
tile_descriptions = [gemm.TileDesc(256, 256, 128, 32, 8, 2, 32, 32, 4, 2), gemm.TileDesc(256, 256, 128, 32, 8, 8, 32, 32, 4, 2), gemm.TileDesc(256, 128, 256, 32, 8, 2, 32, 32, 2, 4), gemm.TileDesc(256, 128, 256, 32, 8, 8, 32, 32, 2, 4), gemm.TileDesc(128, 128, 128, 32, 8, 2, 32, 32, 4, 2), gemm.TileDesc(128, 128, 128, 32, 8, 8, 32, 32, 4, 2), gemm.TileDesc(256, 128, 128, 32, 8, 2, 32, 32, 2, 2), gemm.TileDesc(256, 128, 128, 32, 8, 8, 32, 32, 2, 2), gemm.TileDesc(128, 128, 64, 32, 8, 2, 32, 32, 2, 2), gemm.TileDesc(128, 128, 64, 32, 8, 8, 32, 32, 2, 2), gemm.TileDesc(128, 64, 128, 32, 8, 2, 32, 32, 2, 2), gemm.TileDesc(128, 64, 128, 32, 8, 8, 32, 32, 2, 2), gemm.TileDesc(256, 128, 64, 32, 8, 2, 32, 32, 2, 1), gemm.TileDesc(256, 128, 64, 32, 8, 8, 32, 32, 2, 1), gemm.TileDesc(256, 64, 128, 32, 8, 2, 32, 32, 1, 2), gemm.TileDesc(256, 64, 128, 32, 8, 8, 32, 32, 1, 2)]
b_block_descriptions = [gemm.BlockTransferDesc([8, 32, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0), gemm.BlockTransferDesc([4, 64, 1], [0, 2, 1], [0, 2, 1], 1, 2, 8, 1), gemm.BlockTransferDesc([4, 64, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0), gemm.BlockTransferDesc([4, 64, 1], [0, 2, 1], [0, 2, 1], 1, 4, 8, 1), gemm.BlockTransferDesc([4, 32, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0), gemm.BlockTransferDesc([4, 32, 1], [0, 2, 1], [0, 2, 1], 1, 4, 8, 1), gemm.BlockTransferDesc([8, 32, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0), gemm.BlockTransferDesc([4, 64, 1], [0, 2, 1], [0, 2, 1], 1, 2, 8, 1), gemm.BlockTransferDesc([8, 16, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0), gemm.BlockTransferDesc([4, 32, 1], [0, 2, 1], [0, 2, 1], 1, 2, 8, 1), gemm.BlockTransferDesc([4, 32, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0), gemm.BlockTransferDesc([4, 32, 1], [0, 2, 1], [0, 2, 1], 1, 4, 8, 1), gemm.BlockTransferDesc([16, 16, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0), gemm.BlockTransferDesc([4, 64, 1], [0, 2, 1], [0, 2, 1], 1, 1, 8, 1), gemm.BlockTransferDesc([8, 32, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0), gemm.BlockTransferDesc([4, 64, 1], [0, 2, 1], [0, 2, 1], 1, 2, 8, 1)]
a_block_descriptions = []
c_block_descriptions = []
for t in tile_descriptions:
a_block_transfer = (- 1)
c_block_transfer = (- 1)
if (t.block_size == 256):
a_block_transfer = [4, 64, 1]
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 32, 1, 8], 1)
if (t.block_size == 128):
a_block_transfer = [4, 32, 1]
if (t.n_per_block == 128):
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 16, 1, 8], 1)
else:
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 32, 1, 4], 1)
if (t.block_size == 64):
a_block_transfer = [4, 16, 1]
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 16, 1, 4], 1)
assert ((a_block_transfer != (- 1)) and (c_block_transfer != (- 1)) and ('Cannot determine block_transfer_size with block_size ' + str(t.block_size)))
a_block_descriptions.append(gemm.BlockTransferDesc(a_block_transfer, [1, 0, 2], [1, 0, 2], 2, 8, 8, 1))
c_block_descriptions.append(c_block_transfer)
gemm_specialization = [gemm.GemmSpecialization.GemmDefault, gemm.GemmSpecialization.MNKPadding]
operations = []
for gemm_spec in gemm_specialization:
for (tile_desc, a_block_desc, b_block_desc, c_block_desc) in zip(tile_descriptions, a_block_descriptions, b_block_descriptions, c_block_descriptions):
new_operation = gemm.GemmOperation(operation_kind=operation_kind, extra_kind=c_element_op, xdl_op_type=gemm.XdlOpType.DeviceGemmBiasCPermute_Xdl, A=a_element_desc, B=b_element_desc, C=c_element_desc, a_elem_op=element_op, b_elem_op=element_op, epilogue_functor=c_element_op, gemm_specialization=gemm_spec, tile_desc=tile_desc, a_block_transfer=a_block_desc, b_block_transfer=b_block_desc, c_block_transfer=c_block_desc, ds_dtype=ds_dtype, e_dtype=e_dtype)
manifest.append(new_operation)
operations.append(new_operation)
return operations |
_required
def account():
emails = {'verified': (e.address for e in current_user.emails.order_by(Email.registered_on.desc())), 'pending': filter(bool, request.cookies.get('pending-emails', '').split(','))}
sub = None
cards = {}
if current_user.stripe_id:
try:
customer = stripe.Customer.retrieve(current_user.stripe_id)
card_mappings = {'Visa': 'cc-visa', 'American Express': 'cc-amex', 'MasterCard': 'cc-mastercard', 'Discover': 'cc-discover', 'JCB': 'cc-jcb', 'Diners Club': 'cc-diners-club', 'Unknown': 'credit-card'}
cards = customer.sources.all(object='card').data
for card in cards:
if (customer.default_source == card.id):
card.default = True
card.css_name = card_mappings[card.brand]
sub = (customer.subscriptions.data[0] if customer.subscriptions.data else None)
if sub:
sub.current_period_end = datetime.datetime.fromtimestamp(sub.current_period_end).strftime('%A, %B %d, %Y')
except stripe.error.StripeError:
return render_template('error.html', title='Unable to connect', text=("We're unable to make a secure connection to verify your account details. Please try again in a little bit. If this problem persists, please contact <strong>%s</strong>" % settings.CONTACT_EMAIL))
return render_template('users/account.html', emails=emails, cards=cards, sub=sub) |
class OSCIGeneralRankingSchema():
position = OSCIChangeRankingSchema.position
__ytd = 'YTD'
__dtd = 'DTD'
__mtd = 'MTD'
change_suffix = 'Change'
position_change = f'{OSCIChangeRankingSchema.position_change}'
position_change_ytd = f'{position_change}_{__ytd}'
position_change_dtd = f'{position_change}_{__dtd}'
position_growth_speed = OSCIGrowthSpeedSchema.position_growth
commits = 'Commits'
commits_ytd = f'{commits}_{__ytd}'
commits_mtd = f'{commits}_{__mtd}'
total = OSCIChangeRankingSchema.total
total_ytd = f'{total}_{__ytd}'
total_mtd = f'{total}_{__mtd}'
total_dtd = f'{total}_{__dtd}'
total_change = OSCIChangeRankingSchema.total_change
total_change_ytd = f'{total_change}_{__ytd}'
total_change_dtd = f'{total_change}_{__dtd}'
total_growth_speed = OSCIGrowthSpeedSchema.total_growth
active = OSCIChangeRankingSchema.active
active_ytd = f'{active}_{__ytd}'
active_mtd = f'{active}_{__mtd}'
active_dtd = f'{active}_{__dtd}'
active_change = OSCIChangeRankingSchema.active_change
active_change_ytd = f'{active_change}_{__ytd}'
active_change_dtd = f'{active_change}_{__dtd}'
active_growth_speed = OSCIGrowthSpeedSchema.active_growth
company = OSCIChangeRankingSchema.company
required = frozenset([position, position_change_ytd, position_change_dtd, position_growth_speed, commits_mtd, commits_ytd, total_ytd, total_mtd, total_dtd, total_change_ytd, total_change_dtd, total_growth_speed, active_ytd, active_mtd, active_dtd, active_change_ytd, active_change_dtd, active_growth_speed, company]) |
def test_redis5_master_failed_sentinel_failover(pysoa_client: Client) -> None:
context = _new_context()
thread = threading.Thread(target=_work, name='test_redis5_planned_demotion', args=(pysoa_client, 'meta', context))
thread.start()
try:
while (len(context['results_before_failover']) < 50):
time.sleep(0.01)
assert thread.is_alive()
assert all(((r is True) for r in context['results_before_failover'])), context['results_before_failover']
assert (context['unexpected_errors_before_failover'] == [])
original_master = _get_master_ip('redis5-sentinel1')
context['failover_initiated'] = True
_kill_master('redis5-master')
_progress('/')
time.sleep(1)
tries = 0
new_master = _get_master_ip('redis5-sentinel1')
while ((new_master == original_master) and (tries < 5)):
time.sleep(1)
tries += 1
new_master = _get_master_ip('redis5-sentinel1')
assert (new_master != original_master)
tries = 0
replica1_role = _get_redis_role('redis5-replica1')
replica2_role = _get_redis_role('redis5-replica2')
while (('master' not in (replica1_role, replica2_role)) and (tries < 10)):
time.sleep(1)
tries += 1
replica1_role = _get_redis_role('redis5-replica1')
replica2_role = _get_redis_role('redis5-replica2')
assert ('master' in (replica1_role, replica2_role))
context['failover_completed'] = True
_progress('/')
assert (context['unexpected_errors_after_failover_initiated'] == [])
while (len(context['results_after_failover_completed']) < 50):
time.sleep(0.01)
assert thread.is_alive()
context['stop'] = True
thread.join(6)
test = context['results_after_failover_completed'][(- 10):]
assert all(((r is True) for r in test)), test
assert (context['unexpected_errors_after_failover_completed'] == [])
_progress('//')
finally:
context['stop'] = True
thread.join(6) |
class OptionSeriesColumnSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
('/rules1.txt')
def handle_rules1(self):
if rpieGlobals.wifiSetup:
return self.redirect('/setup')
if (not isLoggedIn(self.get, self.cookie)):
return self.redirect('/login')
if (self.type == 'GET'):
responsearr = self.get
else:
responsearr = self.post
fname = 'files/rules1.txt'
if os.path.isfile(fname):
fpath = fname.split('/')
self.set_header('Content-Disposition', (('filename="' + str(fpath[(len(fpath) - 1)])) + '"'))
return self.file(fname)
else:
return '' |
def split_star_expr_tokens(tokens, is_dict=False):
groups = [[]]
has_star = False
has_comma = False
for tok_grp in tokens:
if (tok_grp == ','):
has_comma = True
elif (len(tok_grp) == 1):
internal_assert((not is_dict), 'found non-star non-pair item in dict literal', tok_grp)
groups[(- 1)].append(tok_grp[0])
elif (len(tok_grp) == 2):
internal_assert((not tok_grp[0].lstrip('*')), 'invalid star expr item signifier', tok_grp[0])
has_star = True
groups.append(tok_grp[1])
groups.append([])
elif (len(tok_grp) == 3):
internal_assert(is_dict, 'found dict key-value pair in non-dict tokens', tok_grp)
(k, c, v) = tok_grp
internal_assert((c == ':'), 'invalid colon in dict literal item', c)
groups[(- 1)].append((k, v))
else:
raise CoconutInternalException('invalid testlist_star_expr tokens', tokens)
if (not groups[(- 1)]):
groups.pop()
return (groups, has_star, has_comma) |
class TwoColumns(Layout):
def __init__(self, workspace_name: str, params: List[Any]):
super().__init__(LayoutName.TWO_COLUMNS, workspace_name)
try:
self.first_column_position = (HorizontalPosition(params[0]) if (len(params) > 0) else HorizontalPosition.LEFT)
except ValueError:
self.first_column_position = HorizontalPosition.LEFT
self._warn_wrong_parameters(params)
self.second_column_position = self.first_column_position.opposite()
def _params(self) -> List[Any]:
return []
def anchor_mark(self) -> Optional[str]:
return self.mark_main()
def split_direction(self, context: Context) -> Optional[Direction]:
return (Direction.VERTICAL if (len(context.containers) <= 3) else None)
def stack_direction(self, context: Context) -> Optional[Direction]:
return Direction.VERTICAL
def _update(self, context: Context):
if (len(context.containers) <= 2):
context.exec(f'[con_id="{context.focused.id}"] move {self.second_column_position.value}')
return
sorted_containers = context.sorted_containers()
candidates = (sorted_containers[1:(- 1):2] if ((len(context.containers) % 2) == 0) else sorted_containers[:(- 1):2])
self._move_container_to_lowest(context, candidates)
def _move_container_to_lowest(self, context: Context, candidates: List[Con]):
lowest = self._lowest(candidates)
if (lowest is not None):
Mover(context).move_to_container(lowest.id)
def _lowest(cls, containers: List[Con]) -> Optional[Con]:
lower_y = 0
destination = None
for con in containers:
if (con.rect.y >= lower_y):
destination = con
lower_y = con.rect.y
return destination
def create(cls, workspace_name: str, params: List[Any]) -> Optional['Layout']:
return TwoColumns(workspace_name, params) |
def export_matlib_to_file(fname: str='matlib.json') -> None:
mat_lib_dict = {f'{mat.name} ("{mat_name}")': {var_name: json.loads(var.medium._json_string) for (var_name, var) in mat.variants.items()} for (mat_name, mat) in material_library.items() if (not isinstance(mat, type))}
with open(fname, 'w') as f:
json.dump(mat_lib_dict, f) |
class SemanticReferenceListTeiElementFactory(SingleElementTeiElementFactory):
def get_tei_element_for_semantic_content(self, semantic_content: SemanticContentWrapper, context: TeiElementFactoryContext) -> etree.ElementBase:
LOGGER.debug('semantic_content: %s', semantic_content)
assert isinstance(semantic_content, SemanticReferenceList)
semantic_reference_list = semantic_content
return get_tei_raw_reference_list_element(semantic_reference_list=semantic_reference_list, context=context) |
class GlobalFFTKernel():
def __init__(self, dtype, device_params, outer_shape, fft_size, curr_size, fft_size_real, inner_shape, pass_num, reverse_direction):
num_passes = len(get_global_radix_info(fft_size)[0])
real_output_shape = ((pass_num == (num_passes - 1)) and reverse_direction)
self.name = 'fft_global'
self.inplace_possible = ((pass_num == (num_passes - 1)) and ((num_passes % 2) == 1))
self.output_shape = ((outer_shape + ((fft_size_real if real_output_shape else fft_size),)) + inner_shape)
if ((fft_size != fft_size_real) and (pass_num == 0) and reverse_direction):
self.kweights = get_kweights(fft_size_real, fft_size)
else:
self.kweights = None
self._fft_size = fft_size
self._curr_size = curr_size
self._fft_size_real = fft_size_real
self._local_mem_size = device_params.local_mem_size
self._itemsize = dtype.itemsize
self._inner_batch = helpers.product(inner_shape)
self._outer_batch = helpers.product(outer_shape)
self._pass_num = pass_num
self._last_pass = (pass_num == (num_passes - 1))
self._constant_kwds = get_common_kwds(dtype, device_params)
self._constant_kwds.update(dict(takes_kweights=(self.kweights is not None), input_slices=(len(outer_shape), 1, len(inner_shape)), output_slices=(len(outer_shape), 1, len(inner_shape)), pad_in=((fft_size != fft_size_real) and (pass_num == 0) and (not reverse_direction)), unpad_out=((fft_size != fft_size_real) and self._last_pass and reverse_direction), reverse_direction=reverse_direction, normalize=self._last_pass))
def prepare_for(self, max_local_size):
kwds = dict(self._constant_kwds)
(radix_arr, radix1_arr, radix2_arr) = get_global_radix_info(self._fft_size)
radix = radix_arr[self._pass_num]
radix1 = radix1_arr[self._pass_num]
radix2 = radix2_arr[self._pass_num]
stride_out = (self._inner_batch * helpers.product(radix_arr[:self._pass_num]))
stride = (stride_out * radix)
stride_in = (stride_out * helpers.product(radix_arr[(self._pass_num + 1):]))
threads_per_xform = radix2
coalesce_width = kwds['min_mem_coalesce_width']
local_batch = (max_local_size if (radix2 == 1) else coalesce_width)
local_batch = min(local_batch, stride_in)
local_size = min((local_batch * threads_per_xform), max_local_size)
local_batch = (local_size // threads_per_xform)
workgroups_num = (helpers.min_blocks(stride_in, local_batch) * self._outer_batch)
if (radix2 == 1):
lmem_size = 0
elif (stride_out == 1):
lmem_size = ((radix + 1) * local_batch)
else:
lmem_size = (local_size * radix1)
if (((lmem_size * self._itemsize) // 2) > self._local_mem_size):
raise OutOfResourcesError
kwds.update(self._constant_kwds)
kwds.update(dict(fft_size=self._fft_size, curr_size=self._curr_size, fft_size_real=self._fft_size_real, pass_num=self._pass_num, lmem_size=lmem_size, local_batch=local_batch, local_size=local_size, inner_batch=self._inner_batch, radix_arr=radix_arr, radix1_arr=radix1_arr, radix2_arr=radix2_arr, radix1=radix1, radix2=radix2, radix=radix, stride_in=stride_in, stride_out=stride_out, stride=stride, last_pass=self._last_pass))
return ((workgroups_num * local_size), local_size, kwds)
def create_chain(dtype, device_params, outer_shape, fft_size, fft_size_real, inner_shape, reverse_direction):
(radix_arr, _, _) = get_global_radix_info(fft_size)
curr_size = fft_size
kernels = []
for pass_num in range(len(radix_arr)):
kernels.append(GlobalFFTKernel(dtype, device_params, outer_shape, fft_size, curr_size, fft_size_real, inner_shape, pass_num, reverse_direction))
curr_size //= radix_arr[pass_num]
return kernels |
class LGBMClassifierTransformer(LGBMForestTransformer):
def __init__(self, model: LGBMClassifier, feature_names: List[str], classification_labels: List[str], classification_weights: List[float]):
super().__init__(model.booster_, feature_names, classification_labels, classification_weights)
self.n_estimators = int(model.n_estimators)
self.n_classes = int(model.n_classes_)
if (not classification_labels):
self._classification_labels = [str(x) for x in model.classes_]
def make_leaf_node(self, tree_id: int, node_id: int, tree_node_json_obj: Dict[(str, Any)]) -> TreeNode:
if (self._objective == 'binary'):
return super().make_leaf_node(tree_id, node_id, tree_node_json_obj)
leaf_val = ([0.0] * self.n_classes)
leaf_val[(tree_id % self.n_classes)] = float(tree_node_json_obj['leaf_value'])
return TreeNode(node_idx=node_id, leaf_value=leaf_val, number_samples=(int(tree_node_json_obj['leaf_count']) if ('leaf_count' in tree_node_json_obj) else None))
def check_model_booster(self) -> None:
if (self._model.params['boosting_type'] not in {'gbdt', 'rf', 'dart', 'goss'}):
raise ValueError(f"boosting type must exist and be of type 'gbdt', 'rf', 'dart', or 'goss', was {self._model.params['boosting_type']!r}")
def determine_target_type(self) -> str:
return 'classification'
def build_aggregator_output(self) -> Dict[(str, Any)]:
return {'logistic_regression': {}}
def model_type(self) -> str:
return TYPE_CLASSIFICATION
def is_objective_supported(self) -> bool:
return (self._objective in {'binary', 'multiclass', 'multiclassova'}) |
def _optimal_param(threshold, num_perm, max_r, xq, false_positive_weight, false_negative_weight):
min_error = float('inf')
opt = (0, 0)
for b in range(1, (num_perm + 1)):
for r in range(1, (max_r + 1)):
if ((b * r) > num_perm):
continue
fp = _false_positive_probability(threshold, b, r, xq)
fn = _false_negative_probability(threshold, b, r, xq)
error = ((fp * false_positive_weight) + (fn * false_negative_weight))
if (error < min_error):
min_error = error
opt = (b, r)
return opt |
_metaclass(abc.ABCMeta)
class Activity(object):
def __init__(self, name=None):
self._name = name
if (self._name is None):
self._name = ('UnknownActivity: ' + str(time.time()))
self._child_thread_map = weakref.WeakValueDictionary()
self._child_activity_map = weakref.WeakValueDictionary()
self._asso_socket_map = weakref.WeakValueDictionary()
self._timers = weakref.WeakValueDictionary()
self._started = False
def name(self):
return self._name
def started(self):
return self._started
def _validate_activity(self, activity):
if (not self._started):
raise ActivityException(desc='Tried to spawn a child activity before Activity was started.')
if activity.started:
raise ActivityException(desc='Tried to start an Activity that was already started.')
def _spawn_activity(self, activity, *args, **kwargs):
self._validate_activity(activity)
greenthread = hub.spawn(activity.start, *args, **kwargs)
self._child_thread_map[activity.name] = greenthread
self._child_activity_map[activity.name] = activity
return greenthread
def _spawn_activity_after(self, seconds, activity, *args, **kwargs):
self._validate_activity(activity)
greenthread = hub.spawn_after(seconds, activity.start, *args, **kwargs)
self._child_thread_map[activity.name] = greenthread
self._child_activity_map[activity.name] = activity
return greenthread
def _validate_callable(self, callable_):
if (callable_ is None):
raise ActivityException(desc='Callable cannot be None')
if (not hasattr(callable_, '__call__')):
raise ActivityException(desc='Currently only supports instances that have __call__ as callable which is missing in given arg.')
if (not self._started):
raise ActivityException(desc='Tried to spawn a child thread before this Activity was started.')
def _spawn(self, name, callable_, *args, **kwargs):
self._validate_callable(callable_)
greenthread = hub.spawn(callable_, *args, **kwargs)
self._child_thread_map[name] = greenthread
return greenthread
def _spawn_after(self, name, seconds, callable_, *args, **kwargs):
self._validate_callable(callable_)
greenthread = hub.spawn_after(seconds, callable_, *args, **kwargs)
self._child_thread_map[name] = greenthread
return greenthread
def _create_timer(self, name, func, *arg, **kwarg):
timer = LoopingCall(func, *arg, **kwarg)
self._timers[name] = timer
return timer
def _run(self, *args, **kwargs):
raise NotImplementedError()
def start(self, *args, **kwargs):
if self.started:
raise ActivityException(desc='Activity already started')
self._started = True
try:
self._run(*args, **kwargs)
except BGPSException:
LOG.error(traceback.format_exc())
finally:
if self.started:
self.stop()
def pause(self, seconds=0):
hub.sleep(seconds)
def _stop_child_activities(self, name=None):
for (child_name, child) in list(self._child_activity_map.items()):
if ((name is not None) and (name != child_name)):
continue
LOG.debug('%s: Stopping child activity %s ', self.name, child_name)
if child.started:
child.stop()
self._child_activity_map.pop(child_name, None)
def _stop_child_threads(self, name=None):
for (thread_name, thread) in list(self._child_thread_map.items()):
if ((name is None) or (thread_name == name)):
LOG.debug('%s: Stopping child thread %s', self.name, thread_name)
thread.kill()
self._child_thread_map.pop(thread_name, None)
def _close_asso_sockets(self):
for (sock_name, sock) in list(self._asso_socket_map.items()):
LOG.debug('%s: Closing socket %s - %s', self.name, sock_name, sock)
sock.close()
def _stop_timers(self):
for (timer_name, timer) in list(self._timers.items()):
LOG.debug('%s: Stopping timer %s', self.name, timer_name)
timer.stop()
def stop(self):
if (not self.started):
raise ActivityException(desc='Cannot call stop when activity is not started or has been stopped already.')
LOG.debug('Stopping activity %s.', self.name)
self._stop_timers()
self._stop_child_activities()
self._stop_child_threads()
self._close_asso_sockets()
self._started = False
self._asso_socket_map = weakref.WeakValueDictionary()
self._child_activity_map = weakref.WeakValueDictionary()
self._child_thread_map = weakref.WeakValueDictionary()
self._timers = weakref.WeakValueDictionary()
LOG.debug('Stopping activity %s finished.', self.name)
def _canonicalize_ip(self, ip):
addr = netaddr.IPAddress(ip)
if addr.is_ipv4_mapped():
ip = str(addr.ipv4())
return ip
def get_remotename(self, sock):
(addr, port) = sock.getpeername()[:2]
return (self._canonicalize_ip(addr), str(port))
def get_localname(self, sock):
(addr, port) = sock.getsockname()[:2]
return (self._canonicalize_ip(addr), str(port))
def _create_listen_socket(self, family, loc_addr):
s = socket.socket(family)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(loc_addr)
s.listen(1)
return s
def _listen_socket_loop(self, s, conn_handle):
while True:
(sock, client_address) = s.accept()
(client_address, port) = self.get_remotename(sock)
LOG.debug('Connect request received from client for port %s:%s', client_address, port)
client_name = ((self.name + '_') + client_address)
self._asso_socket_map[client_name] = sock
self._spawn(client_name, conn_handle, sock)
def _listen_tcp(self, loc_addr, conn_handle):
info = socket.getaddrinfo(loc_addr[0], loc_addr[1], socket.AF_UNSPEC, socket.SOCK_STREAM, 0, socket.AI_PASSIVE)
listen_sockets = {}
for res in info:
(af, socktype, proto, _, sa) = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if (af == socket.AF_INET6):
sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
sock.bind(sa)
sock.listen(50)
listen_sockets[sa] = sock
except socket.error as e:
LOG.error('Error creating socket: %s', e)
if sock:
sock.close()
count = 0
server = None
for sa in listen_sockets:
name = ((self.name + '_') + str(sa[0]))
self._asso_socket_map[name] = listen_sockets[sa]
if (count == 0):
import eventlet
server = eventlet.spawn(self._listen_socket_loop, listen_sockets[sa], conn_handle)
self._child_thread_map[name] = server
count += 1
else:
server = self._spawn(name, self._listen_socket_loop, listen_sockets[sa], conn_handle)
return (server, listen_sockets)
def _connect_tcp(self, peer_addr, conn_handler, time_out=None, bind_address=None, password=None):
LOG.debug('Connect TCP called for %s:%s', peer_addr[0], peer_addr[1])
if ip.valid_ipv4(peer_addr[0]):
family = socket.AF_INET
else:
family = socket.AF_INET6
with Timeout(time_out, socket.error):
sock = socket.socket(family)
if bind_address:
sock.bind(bind_address)
if password:
sockopt.set_tcp_md5sig(sock, peer_addr[0], password)
sock.connect(peer_addr)
local = self.get_localname(sock)[0]
remote = self.get_remotename(sock)[0]
conn_name = ((('L: ' + local) + ', R: ') + remote)
self._asso_socket_map[conn_name] = sock
self._spawn(conn_name, conn_handler, sock)
return sock |
def _test_correct_response_of_loans(client):
resp = post(client, award_type_codes=list(loan_type_mapping.keys()), def_codes=['L', 'M'], geo_layer='county', geo_layer_filters=['45001', '45005'], spending_type='obligation')
expected_response = {'geo_layer': 'county', 'scope': 'recipient_location', 'spending_type': 'obligation', 'results': [{'amount': 220.0, 'display_name': 'Charleston', 'per_capita': 220.0, 'population': 1, 'shape_code': '45001', 'award_count': 2}]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
resp_json = resp.json()
resp_json['results'].sort(key=_get_shape_code_for_sort)
assert (resp_json == expected_response) |
def crop_frequencies(arr):
result_arr = Type(arr.dtype, (arr.shape[0], ((arr.shape[1] // 2) + 1)))
return Transformation([Parameter('output', Annotation(result_arr, 'o')), Parameter('input', Annotation(arr, 'i'))], '\n if (${idxs[1]} < ${input.shape[1] // 2 + 1})\n ${output.store_idx}(${idxs[0]}, ${idxs[1]}, ${input.load_same});\n ', connectors=['input']) |
class OptionPlotoptionsPictorialSonificationContexttracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class MyPluginDao(BaseDao):
def add(self, engity: MyPluginEntity):
session = self.get_raw_session()
my_plugin = MyPluginEntity(tenant=engity.tenant, user_code=engity.user_code, user_name=engity.user_name, name=engity.name, type=engity.type, version=engity.version, use_count=(engity.use_count or 0), succ_count=(engity.succ_count or 0), sys_code=engity.sys_code, gmt_created=datetime.now())
session.add(my_plugin)
session.commit()
id = my_plugin.id
session.close()
return id
def raw_update(self, entity: MyPluginEntity):
session = self.get_raw_session()
updated = session.merge(entity)
session.commit()
return updated.id
def get_by_user(self, user: str) -> list[MyPluginEntity]:
session = self.get_raw_session()
my_plugins = session.query(MyPluginEntity)
if user:
my_plugins = my_plugins.filter((MyPluginEntity.user_code == user))
result = my_plugins.all()
session.close()
return result
def get_by_user_and_plugin(self, user: str, plugin: str) -> MyPluginEntity:
session = self.get_raw_session()
my_plugins = session.query(MyPluginEntity)
if user:
my_plugins = my_plugins.filter((MyPluginEntity.user_code == user))
my_plugins = my_plugins.filter((MyPluginEntity.name == plugin))
result = my_plugins.first()
session.close()
return result
def list(self, query: MyPluginEntity, page=1, page_size=20) -> list[MyPluginEntity]:
session = self.get_raw_session()
my_plugins = session.query(MyPluginEntity)
all_count = my_plugins.count()
if (query.id is not None):
my_plugins = my_plugins.filter((MyPluginEntity.id == query.id))
if (query.name is not None):
my_plugins = my_plugins.filter((MyPluginEntity.name == query.name))
if (query.tenant is not None):
my_plugins = my_plugins.filter((MyPluginEntity.tenant == query.tenant))
if (query.type is not None):
my_plugins = my_plugins.filter((MyPluginEntity.type == query.type))
if (query.user_code is not None):
my_plugins = my_plugins.filter((MyPluginEntity.user_code == query.user_code))
if (query.user_name is not None):
my_plugins = my_plugins.filter((MyPluginEntity.user_name == query.user_name))
if (query.sys_code is not None):
my_plugins = my_plugins.filter((MyPluginEntity.sys_code == query.sys_code))
my_plugins = my_plugins.order_by(MyPluginEntity.id.desc())
my_plugins = my_plugins.offset(((page - 1) * page_size)).limit(page_size)
result = my_plugins.all()
session.close()
total_pages = (all_count // page_size)
if ((all_count % page_size) != 0):
total_pages += 1
return (result, total_pages, all_count)
def count(self, query: MyPluginEntity):
session = self.get_raw_session()
my_plugins = session.query(func.count(MyPluginEntity.id))
if (query.id is not None):
my_plugins = my_plugins.filter((MyPluginEntity.id == query.id))
if (query.name is not None):
my_plugins = my_plugins.filter((MyPluginEntity.name == query.name))
if (query.type is not None):
my_plugins = my_plugins.filter((MyPluginEntity.type == query.type))
if (query.tenant is not None):
my_plugins = my_plugins.filter((MyPluginEntity.tenant == query.tenant))
if (query.user_code is not None):
my_plugins = my_plugins.filter((MyPluginEntity.user_code == query.user_code))
if (query.user_name is not None):
my_plugins = my_plugins.filter((MyPluginEntity.user_name == query.user_name))
if (query.sys_code is not None):
my_plugins = my_plugins.filter((MyPluginEntity.sys_code == query.sys_code))
count = my_plugins.scalar()
session.close()
return count
def raw_delete(self, plugin_id: int):
session = self.get_raw_session()
if (plugin_id is None):
raise Exception('plugin_id is None')
query = MyPluginEntity(id=plugin_id)
my_plugins = session.query(MyPluginEntity)
if (query.id is not None):
my_plugins = my_plugins.filter((MyPluginEntity.id == query.id))
my_plugins.delete()
session.commit()
session.close() |
class OptionSonificationGlobalcontexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def value(self):
return self._config_get(None)
def value(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def is_not_secure(secure_version, file_version, appname=None):
if (secure_version == 'N/A'):
return True
try:
if (not all((isinstance(x, str) for x in (secure_version, file_version)))):
raise TypeError('is_not_secure: input must be str when comparing. secure_version %s, file_version %s', type(secure_version), type(file_version))
if (appname == 'WikkaWiki'):
secure_version = secure_version.replace('-p', '.')
file_version = file_version.replace('-p', '.')
secure = [int(x) for x in secure_version.split('.')]
file = [int(x) for x in file_version.split('.')]
return (secure > file)
except Exception:
logging.error(traceback.format_exc()) |
class OptionPlotoptionsFunnel3dSonificationTracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def publish_hass_discovery(client, device_topic, expire_after_seconds: int, sample: BmsSample, num_cells, temperatures, device_info: DeviceInfo=None):
discovery_msg = {}
device_json = {'identifiers': [((device_info and device_info.sn) or device_topic)], 'manufacturer': ((device_info and device_info.mnf) or None), 'name': (f'{device_info.name} ({device_topic})' if (device_info and device_info.name) else device_topic), 'model': ((device_info and device_info.model) or None), 'sw_version': ((device_info and device_info.sw_version) or None), 'hw_version': ((device_info and device_info.hw_version) or None)}
def _hass_discovery(k, device_class, unit, state_class=None, icon=None, name=None, long_expiry=False):
dm = {'unique_id': f"{device_topic}__{k.replace('/', '_')}", 'name': (name or k.replace('/', ' ')), 'device_class': (device_class or None), 'state_class': (state_class or None), 'unit_of_measurement': unit, 'state_topic': f'{device_topic}/{k}', 'expire_after': (max(expire_after_seconds, (3600 * 2)) if long_expiry else expire_after_seconds), 'device': device_json}
if icon:
dm['icon'] = ('mdi:' + icon)
remove_none_values(dm)
remove_none_values(dm['device'])
discovery_msg[f"homeassistant/sensor/{device_topic}/_{k.replace('/', '_')}/config"] = dm
for (k, d) in sample_desc.items():
if (not is_none_or_nan(getattr(sample, d['field']))):
_hass_discovery(k, d['device_class'], state_class=d['state_class'], unit=d['unit_of_measurement'], icon=d.get('icon', None), name=d['field'])
for i in range(0, num_cells):
k = ('cell_voltages/%d' % (i + 1))
n = ('Cell Volt %0*d' % ((1 + int(math.log10(num_cells))), (i + 1)))
_hass_discovery(k, 'voltage', name=n, unit='V')
if (num_cells > 1):
statistic_fields = ['min', 'max', 'average', 'median', 'delta']
for f in statistic_fields:
k = ('cell_voltages/%s' % f)
_hass_discovery(k, name=('Cell Volt %s' % f), device_class='voltage', unit='V')
for f in ['min_index', 'max_index']:
k = ('cell_voltages/%s' % f)
_hass_discovery(k, name=('Cell Index %s' % f[:3]), device_class=None, unit='')
for i in range(0, len(temperatures)):
k = ('temperatures/%d' % (i + 1))
if (not is_none_or_nan(temperatures[i])):
_hass_discovery(k, 'temperature', unit='C')
meters = {'total_energy': dict(device_class='energy', unit='kWh', icon='meter-electric'), 'total_energy_charge': dict(device_class='energy', state_class='total_increasing', unit='kWh', icon='meter-electric'), 'total_energy_discharge': dict(device_class='energy', state_class='total_increasing', unit='kWh', icon='meter-electric'), 'total_charge': dict(device_class=None, unit='Ah'), 'total_cycles': dict(device_class=None, unit='N', icon='battery-sync')}
for (name, m) in meters.items():
_hass_discovery(('meter/%s' % name), **m, name=(name.replace('_', ' ') + ' meter'), long_expiry=True)
switches = (sample.switches and sample.switches.keys())
if switches:
for switch_name in switches:
discovery_msg[f'homeassistant/switch/{device_topic}/{switch_name}/config'] = {'unique_id': f'{device_topic}__switch_{switch_name}', 'name': f'{switch_name}', 'device_class': 'outlet', 'state_topic': f'{device_topic}/switch/{switch_name}', 'expire_after': expire_after_seconds, 'device': device_json, 'command_topic': f'homeassistant/switch/{device_topic}/{switch_name}/set'}
discovery_msg[f'homeassistant/binary_sensor/{device_topic}/{switch_name}/config'] = {'unique_id': f'{device_topic}__switch_{switch_name}', 'name': f'{switch_name} switch', 'device_class': 'power', 'expire_after': expire_after_seconds, 'device': device_json, 'state_topic': f'{device_topic}/switch/{switch_name}', 'command_topic': f'homeassistant/switch/{device_topic}/{switch_name}/set'}
for (topic, data) in discovery_msg.items():
j = json.dumps(data)
logger.debug('discovery msg %s: %s', topic, j)
mqtt_single_out(client, topic, j) |
class GenericTimer(Thread):
interval = 1
stop_flag = None
callback = None
def __init__(self, _interval, _callback, _args=()):
Thread.__init__(self, name='generic_timer_thread')
self.interval = _interval
self.stop_flag = Event()
self.callback = _callback
self.args = _args
def run(self):
while self.stop_flag.wait(self.interval):
if self.stop_flag.is_set():
self.callback(self.args)
break
def stop(self):
self.stop_flag.set() |
def extractRuxifielluBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def annotate_prime_award_recipient_id(field_name, queryset):
return _annotate_recipient_id(field_name, queryset, "(\n select\n rp.recipient_hash || '-' || rp.recipient_level\n from\n rpt.subaward_search bs\n inner join recipient_lookup rl on (rl.uei = bs.awardee_or_recipient_uei OR rl.duns = bs.awardee_or_recipient_uniqu)\n inner join recipient_profile rp on rp.recipient_hash = rl.recipient_hash\n where\n bs.broker_subaward_id = {outer_table}.broker_subaward_id and\n rp.recipient_level = case\n when bs.ultimate_parent_uei is null or bs.ultimate_parent_uei = '' then 'R'\n else 'C'\n end and\n rp.recipient_name not in {special_cases}\n )") |
class Train():
def __init__(self):
self._opt = TrainOptions().parse()
self._model = ModelsFactory.get_by_name(self._opt.model, self._opt)
self._tb_visualizer = TBVisualizer(self._opt)
data_loader_train = CustomDatasetDataLoader(self._opt, mode='train')
data_loader_val = CustomDatasetDataLoader(self._opt, mode='val')
self._dataset_train = data_loader_train.load_data()
self._dataset_val = data_loader_val.load_data()
self._dataset_train_size = len(data_loader_train)
self._dataset_val_size = len(data_loader_val)
print(('#train images = %d' % self._dataset_train_size))
print(('#val images = %d' % self._dataset_val_size))
self._train()
def _train(self):
self._total_steps = (self._opt.load_epoch * self._dataset_train_size)
self._iters_per_epoch = (self._dataset_train_size / self._opt.batch_size)
self._last_display_time = None
self._last_save_latest_time = None
self._last_print_time = time.time()
for i_epoch in range((self._opt.load_epoch + 1), ((self._opt.nepochs_no_decay + self._opt.nepochs_decay) + 1)):
epoch_start_time = time.time()
self._model.set_epoch(i_epoch)
self._train_epoch(i_epoch)
print(('saving the model at the end of epoch %d, iters %d' % (i_epoch, self._total_steps)))
if (i_epoch == (self._opt.nepochs_no_decay + self._opt.nepochs_decay)):
self._model.save(i_epoch)
time_epoch = (time.time() - epoch_start_time)
print(('End of epoch %d / %d \t Time Taken: %d sec (%d min or %d h)' % (i_epoch, (self._opt.nepochs_no_decay + self._opt.nepochs_decay), time_epoch, (time_epoch / 60), (time_epoch / 3600))))
if (i_epoch > self._opt.nepochs_no_decay):
self._model.update_learning_rate()
def _train_epoch(self, i_epoch):
epoch_iter = 0
self._model.set_train()
for (i_train_batch, train_batch) in enumerate(self._dataset_train):
iter_start_time = time.time()
do_visuals = ((self._last_display_time is None) or ((time.time() - self._last_display_time) > self._opt.display_freq_s))
do_print_terminal = (((time.time() - self._last_print_time) > self._opt.print_freq_s) or do_visuals)
self._model.set_input(train_batch)
self._model.optimize_parameters()
self._total_steps += self._opt.batch_size
epoch_iter += self._opt.batch_size
if do_print_terminal:
self._display_terminal(iter_start_time, i_epoch, i_train_batch, do_visuals)
self._last_print_time = time.time()
if do_visuals:
self._display_visualizer_train(self._total_steps)
self._display_visualizer_val(i_epoch, self._total_steps)
self._last_display_time = time.time()
if ((self._last_save_latest_time is None) or ((time.time() - self._last_save_latest_time) > self._opt.save_latest_freq_s)):
print(('saving the latest model (epoch %d, total_steps %d)' % (i_epoch, self._total_steps)))
self._model.save(i_epoch)
self._last_save_latest_time = time.time()
def _display_terminal(self, iter_start_time, i_epoch, i_train_batch, visuals_flag):
errors = self._model.get_current_errors()
t = ((time.time() - iter_start_time) / self._opt.batch_size)
self._tb_visualizer.print_current_train_errors(i_epoch, i_train_batch, self._iters_per_epoch, errors, t, visuals_flag)
def _display_visualizer_train(self, total_steps):
self._tb_visualizer.display_current_results(self._model.get_current_visuals(), total_steps, is_train=True, save_visuals=True)
self._tb_visualizer.plot_scalars(self._model.get_current_errors(), total_steps, is_train=True)
self._tb_visualizer.plot_scalars(self._model.get_current_scalars(), total_steps, is_train=True)
def _display_visualizer_val(self, i_epoch, total_steps):
val_start_time = time.time()
self._model.set_eval()
val_errors = OrderedDict()
for (i_val_batch, val_batch) in enumerate(self._dataset_val):
if (i_val_batch == self._opt.num_iters_validate):
break
self._model.set_input(val_batch)
self._model.forward(keep_data_for_visuals=(i_val_batch == 0))
errors = self._model.get_current_errors()
for (k, v) in errors.items():
if (k in val_errors):
val_errors[k] += v
else:
val_errors[k] = v
for k in val_errors.keys():
val_errors[k] /= self._opt.num_iters_validate
t = (time.time() - val_start_time)
self._tb_visualizer.print_current_validate_errors(i_epoch, val_errors, t)
self._tb_visualizer.plot_scalars(val_errors, total_steps, is_train=False)
self._tb_visualizer.display_current_results(self._model.get_current_visuals(), total_steps, is_train=False, save_visuals=True)
self._model.set_train() |
def bulk_delete_netloc(netloc, main=False, history=False):
commit_interval = 50000
step = 10000
with db.session_context() as sess:
print('Getting minimum row in need or update..')
start = sess.execute('SELECT min(id) FROM web_pages WHERE netloc=:netloc', {'netloc': netloc})
start = list(start)[0][0]
if (start is None):
print('No rows to reset!')
return
print('Minimum row ID:', start, 'getting maximum row...')
stop = sess.execute('SELECT max(id) FROM web_pages WHERE netloc=:netloc', {'netloc': netloc})
stop = list(stop)[0][0]
print('Maximum row ID: ', stop)
print(('Need to fix rows from %s to %s' % (start, stop)))
start = (start - (start % step))
changed = 0
main_changed = 0
version_changed = 0
tot_changed = 0
pbar = tqdm.tqdm(range(start, stop, step), desc='Deleting')
for idx in pbar:
try:
if main:
have = sess.execute('DELETE FROM\n\t\t\t\t\t\t\t\t\t\t\t\tweb_pages\n\t\t\t\t\t\t\t\t\t\t\tWHERE\n\t\t\t\t\t\t\t\t\t\t\t\tnetloc = :netloc\n\t\t\t\t\t\t\t\t\t\t\tAND\n\t\t\t\t\t\t\t\t\t\t\t\tid > :min_idx\n\t\t\t\t\t\t\t\t\t\t\tAND\n\t\t\t\t\t\t\t\t\t\t\t\tid <= :max_idx;', {'netloc': netloc, 'min_idx': idx, 'max_idx': (idx + step)})
changed += have.rowcount
main_changed += have.rowcount
tot_changed += have.rowcount
if history:
have = sess.execute('DELETE FROM\n\t\t\t\t\t\t\t\t\t\t\t\tweb_pages_version\n\t\t\t\t\t\t\t\t\t\t\tWHERE\n\t\t\t\t\t\t\t\t\t\t\t\tnetloc = :netloc\n\t\t\t\t\t\t\t\t\t\t\tAND\n\t\t\t\t\t\t\t\t\t\t\t\tid > :min_idx\n\t\t\t\t\t\t\t\t\t\t\tAND\n\t\t\t\t\t\t\t\t\t\t\t\tid <= :max_idx;', {'netloc': netloc, 'min_idx': idx, 'max_idx': (idx + step)})
changed += have.rowcount
version_changed += have.rowcount
tot_changed += have.rowcount
pbar.set_description(('Deleted %s, %s since commit' % (tot_changed, changed)))
if (changed > commit_interval):
print(('Committing (%s changed rows)....' % changed), end=' ')
sess.commit()
print('done')
changed = 0
except sqlalchemy.exc.OperationalError:
sess.rollback()
except sqlalchemy.exc.InvalidRequestError:
sess.rollback()
sess.commit() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.