code stringlengths 281 23.7M |
|---|
def _expand_array_paths_to_preserve(paths: List[DetailedPath]) -> Dict[(str, List[int])]:
expanded: List[DetailedPath] = []
for path in paths:
while ((path != []) and (not isinstance(path[(- 1)], int))):
path.pop()
new_path: DetailedPath = []
for elem in path:
new_path.append(elem)
if (isinstance(elem, int) and (new_path not in expanded)):
expanded.append(copy.deepcopy(new_path))
merge_paths: Dict[(str, List[int])] = defaultdict(list)
for path in expanded:
merge_paths[join_detailed_path(path[0:(- 1)])].append(path[(- 1)])
return merge_paths |
class DummyAsyncTransport():
def __init__(self, hosts, responses=None, **_):
self.hosts = hosts
self.responses = responses
self.call_count = 0
self.calls = defaultdict(list)
async def perform_request(self, method, target, **kwargs):
(status, resp) = (200, {})
if self.responses:
(status, resp) = self.responses[self.call_count]
self.call_count += 1
self.calls[(method, target)].append(kwargs)
return (ApiResponseMeta(status=status, headers=HttpHeaders({'X-elastic-product': 'Elasticsearch'}), duration=0.0, node=None), resp) |
class OptionSeriesVariablepieSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
(tags=['financial'], description=docs.REPORTS, params={'entity_type': {'description': 'Committee groupings based on FEC filing form. Choose one of: `presidential`, `pac-party`, `house-senate`, or `ie-only`', 'enum': ['presidential', 'pac-party', 'house-senate', 'ie-only']}})
class ReportsView(views.ApiResource):
_kwargs(args.paging)
_kwargs(args.reports)
_kwargs(args.make_multi_sort_args(default=['-coverage_end_date']))
_with(schemas.CommitteeReportsPageSchema(), apply=False)
def get(self, entity_type=None, **kwargs):
(query, reports_class, reports_schema) = self.build_query(entity_type=entity_type, **kwargs)
if kwargs['sort']:
validator = args.IndicesValidator(reports_class)
validator(kwargs['sort'])
page = utils.fetch_page(query, kwargs, model=reports_class, multi=True)
return reports_schema().dump(page)
def build_query(self, entity_type=None, **kwargs):
(reports_class, reports_schema) = reports_schema_map.get(reports_type_map.get(entity_type), default_schemas)
query = reports_class.query
filter_multi_fields = [('amendment_indicator', models.CommitteeReports.amendment_indicator), ('report_type', reports_class.report_type), ('committee_id', reports_class.committee_id), ('year', reports_class.report_year), ('cycle', reports_class.cycle), ('beginning_image_number', reports_class.beginning_image_number)]
filter_overlap_fields = [('candidate_id', models.CommitteeHistory.candidate_ids)]
if (entity_type == 'ie-only'):
filter_fulltext_fields = [('q_spender', reports_class.spender_name_text)]
else:
filter_fulltext_fields = [('q_filer', reports_class.filer_name_text)]
if hasattr(reports_class, 'committee'):
query = reports_class.query.outerjoin(reports_class.committee).options(sa.orm.contains_eager(reports_class.committee))
if kwargs.get('committee_type'):
query = query.filter(models.CommitteeHistory.committee_type.in_(kwargs.get('committee_type')))
query = filters.filter_range(query, kwargs, filter_range_fields)
query = filters.filter_match(query, kwargs, filter_match_fields)
query = filters.filter_multi(query, kwargs, filter_multi_fields)
query = filters.filter_fulltext(query, kwargs, filter_fulltext_fields)
query = filters.filter_overlap(query, kwargs, filter_overlap_fields)
return (query, reports_class, reports_schema) |
class OptionPlotoptionsDumbbellLowmarkerStatesHover(Options):
def animation(self) -> 'OptionPlotoptionsDumbbellLowmarkerStatesHoverAnimation':
return self._config_sub_data('animation', OptionPlotoptionsDumbbellLowmarkerStatesHoverAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get(None)
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False)
def radiusPlus(self):
return self._config_get(2)
def radiusPlus(self, num: float):
self._config(num, js_type=False) |
class Test_CreateObject(ut.TestCase):
def test_progid(self):
obj = comtypes.client.CreateObject('Scripting.Dictionary')
self.assertTrue(isinstance(obj, POINTER(Scripting.IDictionary)))
def test_clsid(self):
obj = comtypes.client.CreateObject(Scripting.Dictionary)
self.assertTrue(isinstance(obj, POINTER(Scripting.IDictionary)))
def test_clsid_string(self):
comtypes.client.CreateObject(text_type(Scripting.Dictionary._reg_clsid_))
comtypes.client.CreateObject(str(Scripting.Dictionary._reg_clsid_))
('This test uses IE which is not available on all machines anymore. Find another API to use.')
def test_remote(self):
ie = comtypes.client.CreateObject('InternetExplorer.Application', machine='localhost')
self.assertEqual(ie.Visible, False)
ie.Visible = 1
self.assertEqual(ie.Visible, True)
self.assertEqual(0, ie.Quit())
('This test uses IE which is not available on all machines anymore. Find another API to use.')
def test_server_info(self):
serverinfo = COSERVERINFO()
serverinfo.pwszName = 'localhost'
pServerInfo = byref(serverinfo)
self.assertRaises(ValueError, comtypes.client.CreateObject, 'InternetExplorer.Application', machine='localhost', pServerInfo=pServerInfo)
ie = comtypes.client.CreateObject('InternetExplorer.Application', pServerInfo=pServerInfo)
self.assertEqual(ie.Visible, False)
ie.Visible = 1
self.assertEqual(ie.Visible, True)
self.assertEqual(0, ie.Quit()) |
def extractJyudo05XxWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestMrtlibUnknownMrtRecord(unittest.TestCase):
def test_parse(self):
body = b'test'
buf = (b'\x11\x11\x11\x11""33\x00\x00\x00\x04' + body)
(record, rest) = mrtlib.MrtRecord.parse(buf)
eq_(, record.timestamp)
eq_(8738, record.type)
eq_(13107, record.subtype)
eq_(4, record.length)
eq_(body, record.message.buf)
eq_(b'', rest)
def test_serialize(self):
body = b'test'
buf = (b'\x11\x11\x11\x11""33\x00\x00\x00\x04' + body)
message = mrtlib.UnknownMrtMessage(buf=body)
record = mrtlib.UnknownMrtRecord(message=message, timestamp=, type_=8738, subtype=13107, length=4)
output = record.serialize()
eq_(buf, output) |
('flytekit.core.python_customized_container_task.PythonCustomizedContainerTask.get_config')
('flytekit.core.python_customized_container_task.PythonCustomizedContainerTask.get_custom')
def test_serialize_to_model(mock_custom, mock_config):
mock_custom.return_value = {'a': 'custom'}
mock_config.return_value = {'a': 'config'}
ct = PythonCustomizedContainerTask(name='mytest', task_config=None, container_image='someimage', executor_type=Placeholder, requests=Resources(ephemeral_storage='200Mi'), limits=Resources(ephemeral_storage='300Mi'))
tt = ct.serialize_to_model(serialization_settings)
assert (tt.container.image == 'someimage')
assert (len(tt.config) == 1)
assert (tt.id.name == 'mytest')
assert (len(tt.custom) == 1)
assert (tt.container.resources.requests[0].value == '200Mi')
assert (tt.container.resources.limits[0].value == '300Mi') |
def get_max_cat_level_special(save_stats: dict[(str, Any)], cat_id: int) -> int:
legend = is_legend(cat_id)
acient_curse_clear = uncanny.is_ancient_curse_clear(save_stats)
user_rank = helper.calculate_user_rank(save_stats)
catseyes = catseyes_unlocked(save_stats)
eoc_cleared_2 = main_story.has_cleared_chapter(save_stats, 1)
if (not eoc_cleared_2):
return 10
if (user_rank < 1600):
return 20
if (not catseyes):
return 30
if ((not acient_curse_clear) and (not legend)):
return 40
if ((not acient_curse_clear) and legend):
return 30
if (acient_curse_clear and legend):
return 40
return 50 |
def get_wordmap_fieldnames():
return ['upos', 'lemma', 'homonym', 'new_para', 'kotus_tn', 'kotus_av', 'plurale_tantum', 'possessive', 'clitics', 'is_proper', 'proper_noun_class', 'style', 'stub', 'gradestem', 'twolstem', 'grade_dir', 'harmony', 'is_suffix', 'is_prefix', 'stem_vowel', 'stem_diphthong', 'sem', 'particle', 'pronunciation', 'boundaries', 'bracketstub', 'origin', 'extra_i', 'extra_e', 'real_pos', 'symbol', 'argument', 'pronoun', 'abbr', 'lex', 'numtype', 'prontype', 'adptype', 'blacklist', 'pos', 'deletion', 'suffix_regex'] |
class ExposureBoundsLever():
def __init__(self, exposure_control_system=None):
self.exposure_control_system = exposure_control_system
def activate(self):
if (not self.exposure_control_system):
return
elif self.exposure_control_system.ee_lever.locked():
return 'Blocked'
meter_value = self.exposure_control_system.meter()
if ((meter_value is None) or (meter_value == 'Under') or (meter_value == 'Over')):
self.exposure_control_system.shutter_lock_lever.activate()
return 'Activated shutter lock lever'
def deactivate(self):
if (not self.exposure_control_system):
return
self.exposure_control_system.shutter_lock_lever.activate() |
class RedisCache():
PREFIX = 'asgilook:'
INVALIDATE_ON = frozenset({'DELETE', 'POST', 'PUT'})
CACHE_HEADER = 'X-ASGILook-Cache'
TTL = 3600
def __init__(self, config):
self._config = config
self._redis = self._config.redis_from_url(self._config.redis_host)
async def _serialize_response(self, resp):
data = (await resp.render_body())
return msgpack.packb([resp.content_type, data], use_bin_type=True)
def _deserialize_response(self, resp, data):
(resp.content_type, resp.data) = msgpack.unpackb(data, raw=False)
resp.complete = True
resp.context.cached = True
async def process_startup(self, scope, event):
(await self._redis.ping())
async def process_shutdown(self, scope, event):
(await self._redis.close())
async def process_request(self, req, resp):
resp.context.cached = False
if (req.method in self.INVALIDATE_ON):
return
key = f'{self.PREFIX}/{req.path}'
data = (await self._redis.get(key))
if (data is not None):
self._deserialize_response(resp, data)
resp.set_header(self.CACHE_HEADER, 'Hit')
else:
resp.set_header(self.CACHE_HEADER, 'Miss')
async def process_response(self, req, resp, resource, req_succeeded):
if (not req_succeeded):
return
key = f'{self.PREFIX}/{req.path}'
if (req.method in self.INVALIDATE_ON):
(await self._redis.delete(key))
elif (not resp.context.cached):
data = (await self._serialize_response(resp))
(await self._redis.set(key, data, ex=self.TTL)) |
def _load_project_config(project_path: Path) -> None:
config_path = project_path.joinpath('brownie-config')
config_data = _load_config(config_path)
config_vars = _load_project_envvars(project_path)
if ('dotenv' in config_data):
if (not isinstance(config_data['dotenv'], str)):
raise ValueError(f"Invalid value passed to dotenv: {config_data['dotenv']}")
env_path = project_path.joinpath(config_data['dotenv'])
if (not env_path.is_file()):
raise ValueError(f'Dotenv specified in config but not found at path: {env_path}')
config_vars.update(dotenv_values(dotenv_path=env_path))
load_dotenv(dotenv_path=env_path)
config_data = expand_posix_vars(config_data, config_vars)
if (not config_data):
return
if ('network' in config_data):
warnings.warn(f'The `network` field in `brownie-config.yaml` has been deprecated. Network settings are now handled via `brownie networks` in the CLI. Remove `network` from {config_path} to silence this warning.', DeprecationWarning)
del config_data['network']
if (('networks' in config_data) and isinstance(config_data['networks'], dict)):
for (network, values) in config_data['networks'].items():
if ((network != 'default') and (network in CONFIG.networks.keys()) and ('cmd_settings' in values) and isinstance(values['cmd_settings'], dict)):
if ('cmd_settings' in CONFIG.networks[network]):
_recursive_update(CONFIG.networks[network]['cmd_settings'], values['cmd_settings'])
else:
CONFIG.networks[network]['cmd_settings'] = values['cmd_settings']
CONFIG.settings._unlock()
_recursive_update(CONFIG.settings, config_data)
_recursive_update(CONFIG.settings, expand_posix_vars(CONFIG.settings, config_vars))
CONFIG.settings._lock()
if ('hypothesis' in config_data):
_modify_hypothesis_settings(config_data['hypothesis'], 'brownie', 'brownie-base') |
def diag_quadrupole3d_10(ax, da, A, bx, db, B, R):
result = numpy.zeros((3, 3, 1), dtype=float)
x0 = ((ax + bx) ** (- 1.0))
x1 = (x0 * ((ax * A[0]) + (bx * B[0])))
x2 = (- x1)
x3 = (x2 + R[0])
x4 = (x2 + A[0])
x5 = (0.5 * x0)
x6 = ((ax * bx) * x0)
x7 = ((((5. * da) * db) * numpy.sqrt(x0)) * numpy.exp(((- x6) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
x8 = (x5 * x7)
x9 = (x0 * ((ax * A[1]) + (bx * B[1])))
x10 = (- x9)
x11 = (x10 + A[1])
x12 = (x0 * x7)
x13 = (x12 * ((x3 ** 2) + x5))
x14 = (x0 * ((ax * A[2]) + (bx * B[2])))
x15 = (- x14)
x16 = (x15 + A[2])
x17 = (x10 + R[1])
x18 = (x12 * ((x17 ** 2) + x5))
x19 = (x15 + R[2])
x20 = (x12 * ((x19 ** 2) + x5))
result[(0, 0, 0)] = numpy.sum(((- x8) * ((x0 * ((((- 2.0) * x1) + A[0]) + R[0])) + (x3 * (x0 + ((2.0 * x3) * x4))))))
result[(0, 1, 0)] = numpy.sum(((- x11) * x13))
result[(0, 2, 0)] = numpy.sum(((- x13) * x16))
result[(1, 0, 0)] = numpy.sum(((- x18) * x4))
result[(1, 1, 0)] = numpy.sum(((- x8) * ((x0 * ((((- 2.0) * x9) + A[1]) + R[1])) + (x17 * (x0 + ((2.0 * x11) * x17))))))
result[(1, 2, 0)] = numpy.sum(((- x16) * x18))
result[(2, 0, 0)] = numpy.sum(((- x20) * x4))
result[(2, 1, 0)] = numpy.sum(((- x11) * x20))
result[(2, 2, 0)] = numpy.sum(((- x8) * ((x0 * ((((- 2.0) * x14) + A[2]) + R[2])) + (x19 * (x0 + ((2.0 * x16) * x19))))))
return result |
def emotion_caculate(text):
positive = 0
negative = 0
anger = 0
disgust = 0
fear = 0
sad = 0
surprise = 0
good = 0
happy = 0
anger_list = []
disgust_list = []
fear_list = []
sad_list = []
surprise_list = []
good_list = []
happy_list = []
wordlist = txt_cut(text)
wordset = set(wordlist)
wordfreq = []
for word in wordset:
freq = wordlist.count(word)
tlist = []
if (word in Positive):
positive += freq
if (word in Negative):
negative += freq
if (word in Anger):
anger += freq
anger_list.append(word)
tlist.append('anger')
tlist.append(word)
tlist.append(freq)
writer.writerow(tlist)
if (word in Disgust):
disgust += freq
disgust_list.append(word)
tlist.append('disgust')
tlist.append(word)
tlist.append(freq)
writer.writerow(tlist)
if (word in Fear):
fear += freq
fear_list.append(word)
tlist.append('fear')
tlist.append(word)
tlist.append(freq)
writer.writerow(tlist)
if (word in Sad):
sad += freq
sad_list.append(word)
tlist.append('sad')
tlist.append(word)
tlist.append(freq)
writer.writerow(tlist)
if (word in Surprise):
surprise += freq
surprise_list.append(word)
tlist.append('surprise')
tlist.append(word)
tlist.append(freq)
writer.writerow(tlist)
if (word in Good):
good += freq
good_list.append(word)
tlist.append('good')
tlist.append(word)
tlist.append(freq)
writer.writerow(tlist)
if (word in Happy):
happy += freq
happy_list.append(word)
tlist.append('happy')
tlist.append(word)
tlist.append(freq)
writer.writerow(tlist)
emotion_info = {'length': len(wordlist), 'positive': positive, 'negative': negative, 'anger': anger, 'disgust': disgust, 'fear': fear, 'good': good, 'sadness': sad, 'surprise': surprise, 'happy': happy}
indexs = ['length', 'positive', 'negative', 'anger', 'disgust', 'fear', 'sadness', 'surprise', 'good', 'happy']
return pd.Series(emotion_info, index=indexs) |
def get_jinja_environment():
jinja_template_paths_obj = []
if TEMPLATES_PATH:
external_templates = pathlib.Path(TEMPLATES_PATH).expanduser().resolve()
assert os.path.isdir(external_templates), 'External template path "{0}" not found'.format(external_templates)
jinja_template_paths_obj.append(external_templates)
jinja_template_paths_obj.append(LOCAL_TEMPLATES)
jinja_template_paths = [str(path) for path in jinja_template_paths_obj]
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader(jinja_template_paths))
return jinjaenv |
_register_parser
_set_msg_type(ofproto.OFPT_ECHO_REPLY)
class OFPEchoReply(MsgBase):
def __init__(self, datapath, data=None):
super(OFPEchoReply, self).__init__(datapath)
self.data = data
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPEchoReply, cls).parser(datapath, version, msg_type, msg_len, xid, buf)
msg.data = msg.buf[ofproto.OFP_HEADER_SIZE:]
return msg
def _serialize_body(self):
assert (self.data is not None)
self.buf += self.data |
def PlaylistsTab(playlists, accent_color, text_color, background_color, vertical_gui: bool, show_album_art: bool):
playlists_names = list(playlists.keys())
default_pl_name = (playlists_names[0] if playlists_names else None)
playlist_selector = [[IconButton(PLUS_ICON, 'new_pl', t('new playlist'), background_color), Sg.Button(image_data=EXPORT_PL, key='export_pl', tooltip=t('export playlist'), button_color=(background_color, background_color)), Sg.Button(image_data=DELETE_ICON, key='delete_pl', tooltip=t('delete playlist'), button_color=(background_color, background_color)), Sg.Button(image_data=PLAY_ICON, key='play_pl', tooltip=t('play playlist'), button_color=(background_color, background_color)), Sg.Button(image_data=QUEUE_ICON, key='queue_pl', tooltip=t('queue playlist'), button_color=(background_color, background_color)), Sg.Button(image_data=PLAY_NEXT_ICON, key='add_next_pl', tooltip=t('add to next up'), button_color=(background_color, background_color)), Sg.Combo(values=playlists_names, size=(PL_COMBO_W, 1), key='playlist_combo', font=FONT_NORMAL, enable_events=True, default_value=default_pl_name, readonly=True)]]
playlist_name = (playlists_names[0] if playlists_names else '')
pl_length_txt = [Sg.Text('', font=FONT_NORMAL, key='pl_length')]
add_tracks_btn = [StyledButton(t('Add files'), accent_color, background_color, key='pl_add_tracks', button_width=13)]
url_input_btn = [Sg.Input('', key='pl_url_input', size=(15, 1), font=FONT_NORMAL, border_width=1, enable_events=True)]
add_url_btn = [StyledButton(t('Add URL'), accent_color, background_color, key='pl_add_url', button_width=13)]
pl_saved_txt = [Sg.Text(t('Playlist saved'), key='pl_saved', font=FONT_NORMAL, visible=False, text_color='green')]
lb_height = (17 - (6 * (vertical_gui or (not show_album_art))))
pl_name_text = t('Playlist name')
name_text_w = max(13, len(pl_name_text))
layout = [[Sg.Column(playlist_selector, pad=(5, 20))], [Sg.Text(pl_name_text, font=FONT_NORMAL, size=(name_text_w, 1), justification='center', pad=(4, (5, 10))), Sg.Input(playlist_name, key='pl_name', size=((60 - name_text_w), 1), font=FONT_NORMAL, pad=((22, 5), (5, 10)), border_width=1), Sg.Button(key='pl_save', image_data=SAVE_IMG, tooltip='Ctrl + S', button_color=(background_color, background_color))], [Sg.Column([pl_length_txt, add_tracks_btn, url_input_btn, add_url_btn, pl_saved_txt], vertical_alignment='top'), Sg.Listbox([], size=(45, lb_height), select_mode=Sg.SELECT_MODE_EXTENDED, text_color=text_color, key='pl_tracks', background_color=background_color, font=FONT_NORMAL, bind_return_key=True), Sg.Column([[IconButton(UP_ICON, 'pl_move_up', t('move up'), background_color)], [IconButton(X_ICON, 'pl_rm_items', t('remove'), background_color)], [IconButton(DOWN_ICON, 'pl_move_down', t('move down'), background_color)], [Sg.Button(image_data=PLAY_ICON, key='play_pl_selected', tooltip=t('play selected'), button_color=(background_color, background_color))], [Sg.Button(image_data=QUEUE_ICON, key='queue_pl_selected', tooltip=t('queue selected'), button_color=(background_color, background_color))], [Sg.Button(image_data=PLAY_NEXT_ICON, key='add_next_pl_selected', tooltip=t('add selected to next up'), button_color=(background_color, background_color))], [Sg.Button(image_data=LOCATE_FILE, key='pl_locate_selected', button_color=(background_color, background_color), tooltip=t('locate selected'), size=(2, 1))], [Sg.Button(image_data=COPY_ICON, key='pl_copy_selected', button_color=(background_color, background_color), tooltip=t('copy URIs'), size=(2, 1))]], background_color=background_color)]]
return Sg.Tab(t('Playlists'), layout, key='tab_playlists') |
class AdsInsightsMixin():
class Increment(object):
monthly = 'monthly'
all_days = 'all_days'
class Operator(object):
all = 'ALL'
any = 'ANY'
contain = 'CONTAIN'
equal = 'EQUAL'
greater_than = 'GREATER_THAN'
greater_than_or_equal = 'GREATER_THAN_OR_EQUAL'
in_ = 'IN'
in_range = 'IN_RANGE'
less_than = 'LESS_THAN'
less_than_or_equal = 'LESS_THAN_OR_EQUAL'
none = 'NONE'
not_contain = 'NOT_CONTAIN'
not_equal = 'NOT_EQUAL'
not_in = 'NOT_IN'
not_in_range = 'NOT_IN_RANGE' |
class PartialRoutes():
async def async_endpoint(cls, arg, request):
return JSONResponse({'arg': arg})
async def async_ws_endpoint(cls, websocket: WebSocket):
(await websocket.accept())
(await websocket.send_json({'url': str(websocket.url)}))
(await websocket.close()) |
def parse_args(argv):
parser = argparse.ArgumentParser(prog='ergo encode', description='Encode one or more files to vectors and create or update a csv dataset for training.', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('project', help='The path containing the model definition.')
parser.add_argument('path', help='Path of a single file or of a folder of files.')
parser.add_argument('-l', '--label', dest='label', default='auto', help="The class of the file(s) or 'auto' to use the name of the containing folder.")
parser.add_argument('-o', '--output', dest='output', default='dataset.csv', help='Output CSV file names.')
parser.add_argument('-f', '--filter', dest='filter', default='*.*', help='If PATH is a folder, this flag determines which files are going to be selected.')
parser.add_argument('-m', '--multi', dest='multi', default=False, action='store_true', help='If PATH is a single file, this flag will enable line by line reading of multiple inputs from it.')
parser.add_argument('-w', '--workers', dest='workers', default=0, type=int, help='Number of concurrent workers to use for encoding, or zero to run two workers per available CPU core.')
parser.add_argument('-d', '--delete', dest='delete', default=False, action='store_true', help='Delete each file after encoding it.')
args = parser.parse_args(argv)
return args |
class TestGetDefaultMessagingConfig():
(scope='function')
def url(self, messaging_config: MessagingConfig) -> str:
return (V1_URL_PREFIX + MESSAGING_DEFAULT_BY_TYPE).format(service_type=messaging_config.service_type.value)
(scope='function')
def url_uppercase(self, messaging_config: MessagingConfig) -> str:
return (V1_URL_PREFIX + MESSAGING_DEFAULT_BY_TYPE).format(service_type=messaging_config.service_type.value.upper())
def test_get_default_config_not_authenticated(self, url, api_client: TestClient):
response = api_client.get(url)
assert (401 == response.status_code)
def test_get_default_config_wrong_scope(self, url, api_client: TestClient, generate_auth_header):
auth_header = generate_auth_header([MESSAGING_DELETE])
response = api_client.get(url, headers=auth_header)
assert (403 == response.status_code)
def test_get_default_config_invalid(self, url, api_client: TestClient, generate_auth_header):
auth_header = generate_auth_header([MESSAGING_READ])
response = api_client.get((V1_URL_PREFIX + MESSAGING_DEFAULT_BY_TYPE).format(service_type='invalid'), headers=auth_header)
assert (422 == response.status_code)
def test_get_default_config_not_exist(self, api_client: TestClient, generate_auth_header):
auth_header = generate_auth_header([MESSAGING_READ])
response = api_client.get((V1_URL_PREFIX + MESSAGING_DEFAULT_BY_TYPE).format(service_type=MessagingServiceType.mailgun.value), headers=auth_header)
assert (404 == response.status_code)
def test_get_default_config(self, url, api_client: TestClient, generate_auth_header, messaging_config: MessagingConfig):
auth_header = generate_auth_header([MESSAGING_READ])
response = api_client.get(url, headers=auth_header)
assert (response.status_code == 200)
response_body = response.json()
assert (response_body == {'key': 'my_mailgun_messaging_config', 'name': messaging_config.name, 'service_type': MessagingServiceType.mailgun.value, 'details': {MessagingServiceDetails.API_VERSION.value: 'v3', MessagingServiceDetails.DOMAIN.value: 'some.domain', MessagingServiceDetails.IS_EU_DOMAIN.value: False}})
def test_get_default_config_uppered_url(self, url_uppercase, api_client: TestClient, generate_auth_header, messaging_config: MessagingConfig):
auth_header = generate_auth_header([MESSAGING_READ])
response = api_client.get(url_uppercase.lower(), headers=auth_header)
assert (response.status_code == 200)
response_body = response.json()
assert (response_body == {'key': 'my_mailgun_messaging_config', 'name': messaging_config.name, 'service_type': MessagingServiceType.mailgun.value, 'details': {MessagingServiceDetails.API_VERSION.value: 'v3', MessagingServiceDetails.DOMAIN.value: 'some.domain', MessagingServiceDetails.IS_EU_DOMAIN.value: False}}) |
def format_timestamp(timestamp_str, datetime_formatter):
try:
if (('"' in timestamp_str) or ("'" in timestamp_str)):
timestamp_str = timestamp_str.replace('"', '')
timestamp_str = timestamp_str.replace("'", '')
formatted_timestamp = dateutil_parser.parse(timestamp_str).strftime(datetime_formatter)
except (TypeError, ValueError) as e:
LOGGER.warning('Unable to parse/format timestamp: %s\n, datetime_formatter: %s\n%s', timestamp_str, datetime_formatter, e)
formatted_timestamp = None
return formatted_timestamp |
class Config(dict):
def __init__(self, root_path=None, data_path=None, defaults=default_config):
dict.__init__(self, (defaults or {}))
if root_path:
self.root_path = root_path
else:
self.root_path = os.path.dirname(os.path.realpath(__file__))
if data_path:
self.data_path = data_path
else:
self.data_path = gettempdir()
self['ROOT_PATH'] = self.root_path
self['DATA_PATH'] = self.data_path
def normalize_paths(self):
rootpath = self.root_path
datapath = self.data_path
if (not datapath):
return
self['DB_FILE'] = os.path.join(datapath, self['DATABASE'])
self['BLOCKED_GREETING_FILE'] = os.path.join(rootpath, self['BLOCKED_GREETING_FILE'])
self['SCREENED_GREETING_FILE'] = os.path.join(rootpath, self['SCREENED_GREETING_FILE'])
self['PERMITTED_GREETING_FILE'] = os.path.join(rootpath, self['PERMITTED_GREETING_FILE'])
self['VOICE_MAIL_GREETING_FILE'] = os.path.join(rootpath, self['VOICE_MAIL_GREETING_FILE'])
self['VOICE_MAIL_GOODBYE_FILE'] = os.path.join(rootpath, self['VOICE_MAIL_GOODBYE_FILE'])
self['VOICE_MAIL_LEAVE_MESSAGE_FILE'] = os.path.join(rootpath, self['VOICE_MAIL_LEAVE_MESSAGE_FILE'])
self['VOICE_MAIL_INVALID_RESPONSE_FILE'] = os.path.join(rootpath, self['VOICE_MAIL_INVALID_RESPONSE_FILE'])
self['VOICE_MAIL_MENU_FILE'] = os.path.join(rootpath, self['VOICE_MAIL_MENU_FILE'])
self['VOICE_MAIL_MESSAGE_FOLDER'] = os.path.join(datapath, self['VOICE_MAIL_MESSAGE_FOLDER'])
def validate(self):
success = True
if (self['ENV'] not in ('production', 'development')):
print('* ENV is incorrect: {}'.format(self['ENV']))
success = False
if (not isinstance(self['DEBUG'], bool)):
print('* DEBUG should be a bool: {}'.format(type(self['DEBUG'])))
success = False
if (not isinstance(self['TESTING'], bool)):
print('* TESTING should be bool: {}'.format(type(self['TESTING'])))
success = False
if (not isinstance(self['BLOCK_ENABLED'], bool)):
print('* BLOCK_ENABLED should be a bool: {}'.format(type(self['BLOCK_ENABLED'])))
success = False
for mode in self['SCREENING_MODE']:
if (mode not in ('whitelist', 'blacklist')):
print('* SCREENING_MODE option is invalid: {}'.format(mode))
success = False
if (not self._validate_actions('BLOCKED_ACTIONS')):
success = False
if (not self._validate_actions('SCREENED_ACTIONS')):
success = False
if (not self._validate_actions('PERMITTED_ACTIONS')):
success = False
if (not isinstance(self['BLOCKED_RINGS_BEFORE_ANSWER'], int)):
print('* BLOCKED_RINGS_BEFORE_ANSWER should be an integer: {}'.format(type(self['BLOCKED_RINGS_BEFORE_ANSWER'])))
success = False
if (not isinstance(self['SCREENED_RINGS_BEFORE_ANSWER'], int)):
print('* SCREENED_RINGS_BEFORE_ANSWER should be an integer: {}'.format(type(self['SCREENED_RINGS_BEFORE_ANSWER'])))
success = False
if (not isinstance(self['PERMITTED_RINGS_BEFORE_ANSWER'], int)):
print('* PERMITTED_RINGS_BEFORE_ANSWER should be an integer: {}'.format(type(self['PERMITTED_RINGS_BEFORE_ANSWER'])))
success = False
filepath = self['BLOCKED_GREETING_FILE']
if (not os.path.exists(filepath)):
print('* BLOCKED_GREETING_FILE not found: {}'.format(filepath))
success = False
filepath = self['SCREENED_GREETING_FILE']
if (not os.path.exists(filepath)):
print('* SCREENED_GREETING_FILE not found: {}'.format(filepath))
success = False
filepath = self['PERMITTED_GREETING_FILE']
if (not os.path.exists(filepath)):
print('* PERMITTED_GREETING_FILE not found: {}'.format(filepath))
success = False
filepath = self['VOICE_MAIL_GREETING_FILE']
if (not os.path.exists(filepath)):
print('* VOICE_MAIL_GREETING_FILE not found: {}'.format(filepath))
success = False
filepath = self['VOICE_MAIL_GOODBYE_FILE']
if (not os.path.exists(filepath)):
print('* VOICE_MAIL_GOODBYE_FILE not found: {}'.format(filepath))
success = False
filepath = self['VOICE_MAIL_LEAVE_MESSAGE_FILE']
if (not os.path.exists(filepath)):
print('* VOICE_MAIL_LEAVE_MESSAGE_FILE not found: {}'.format(filepath))
success = False
filepath = self['VOICE_MAIL_INVALID_RESPONSE_FILE']
if (not os.path.exists(filepath)):
print('* VOICE_MAIL_INVALID_RESPONSE_FILE not found: {}'.format(filepath))
success = False
filepath = self['VOICE_MAIL_MENU_FILE']
if (not os.path.exists(filepath)):
print('* VOICE_MAIL_MENU_FILE not found: {}'.format(filepath))
success = False
filepath = self['VOICE_MAIL_MESSAGE_FOLDER']
if (not os.path.exists(filepath)):
print('* VOICE_MAIL_MESSAGE_FOLDER not found: {}'.format(filepath))
success = False
if (not (self['PHONE_DISPLAY_SEPARATOR'] in self['PHONE_DISPLAY_FORMAT'])):
print("* WARNING: PHONE_DISPLAY_SEPARATOR not used in PHONE_DISPLAY_FORMAT: '{}'".format(self['PHONE_DISPLAY_SEPARATOR']))
return success
def _validate_actions(self, key):
if (not isinstance(self[key], tuple)):
print('* {} must be a tuple, not {}'.format(key, type(self[key])))
return False
for action in self[key]:
if (action not in ('answer', 'ignore', 'greeting', 'record_message', 'voice_mail')):
print('* {} option is invalid: {}'.format(key, action))
return False
if (not any(((a in self[key]) for a in ('answer', 'ignore')))):
print("* {} must include either 'answer' or 'ignore'".format(key))
return False
if all(((a in self[key]) for a in ('answer', 'ignore'))):
print("* {} cannot include both 'answer' and 'ignore'".format(key))
return False
if all(((a in self[key]) for a in ('record_message', 'voice_mail'))):
print("* {} cannot include both 'record_message' and 'voice_mail'".format(key))
return False
if ('ignore' in self[key]):
if any(((a in self[key]) for a in ('greeting', 'record_message', 'voice_mail'))):
print("* WARNING: {} contains actions in addition to 'ignore'. They not be used.".format(key))
return True
def pretty_print(self):
print('[Configuration]')
keys = sorted(self.keys())
for key in keys:
print(' {} = {}'.format(key, self[key]))
def from_pyfile(self, filename, silent=False):
filename = os.path.join(self.data_path, filename)
d = types.ModuleType('config')
d.__file__ = filename
try:
with open(filename, mode='rb') as config_file:
exec(compile(config_file.read(), filename, 'exec'), d.__dict__)
except OSError as e:
if (silent and (e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR))):
return False
e.strerror = 'Unable to load configuration file ({})'.format(e.strerror)
raise
self.from_object(d)
return True
def from_object(self, obj):
if isinstance(obj, str):
obj = import_string(obj)
for key in dir(obj):
if key.isupper():
self[key] = getattr(obj, key)
def get_namespace(self, namespace, lowercase=True, trim_namespace=True):
rv = {}
for (k, v) in self.items():
if (not k.startswith(namespace)):
continue
if trim_namespace:
key = k[len(namespace):]
else:
key = k
if lowercase:
key = key.lower()
rv[key] = v
return rv
def __repr__(self):
return '<{} {}>'.format(type(self).__name__, dict.__repr__(self)) |
def test_boundary_from_points_more_data(testpath):
points = xtgeo.points_from_file((testpath / POINTSET2))
boundary = xtgeo.Polygons.boundary_from_points(points, alpha=2000)
assert (boundary.dataframe[boundary.xname].values[0:5].tolist() == pytest.approx([460761.571, 461325.128, 461325.128, 462452.241, 462452.241]))
assert (len(boundary.dataframe) == 28) |
class PaymentManager(models.Manager):
def booking_payments_by_location(self, location):
booking_payments = Payment.objects.filter(bill__in=BookingBill.objects.filter(booking__use__location=location))
return booking_payments
def subscription_payments_by_location(self, location):
subscription_payments = Payment.objects.filter(bill__in=SubscriptionBill.objects.filter(subscription__location=location))
return subscription_payments
def booking_payments_by_resource(self, resource):
booking_payments = Payment.objects.filter(bill__in=BookingBill.objects.filter(booking__use__resource=resource))
return booking_payments |
def max_ff_f(gen, t, srcs):
l = srcs[0]
r = srcs[1]
right_larger = gen.symbols.newLabel()
done = gen.symbols.newLabel()
dst = gen.newTemp(Float)
right_greater = gen.emit_binop('>', [r, l], Float)
gen.emit_cjump(right_greater, right_larger)
gen.emit_move(l, dst)
gen.emit_jump(done)
gen.emit_label(right_larger)
gen.emit_move(r, dst)
gen.emit_label(done)
return dst |
class FaucetUntaggedMeterAddTest(FaucetUntaggedMeterParseTest):
NUM_FAUCET_CONTROLLERS = 1
def test_untagged(self):
super().test_untagged()
conf = self._get_faucet_conf()
conf['meters']['lossymeter2'] = {'meter_id': 2, 'entry': {'flags': ['PKTPS'], 'bands': [{'rate': '1000', 'type': 'DROP'}]}}
conf['acls']['lossyacl2'] = [{'rule': {'actions': {'allow': 1, 'meter': 'lossymeter2'}}}]
port_conf = conf['dps'][self.DP_NAME]['interfaces'][self.port_map['port_2']]
port_conf['acls_in'] = ['lossyacl2']
self.reload_conf(conf, self.faucet_config_path, restart=True, cold_start=True, change_expected=True, hup=True)
self.wait_until_matching_lines_from_file(".+\\'meter_id\\'\\: 2+", self.get_matching_meters_on_dpid(self.dpid))
port_conf['acls_in'] = []
self.reload_conf(conf, self.faucet_config_path, restart=True, cold_start=True, change_expected=True)
self.wait_until_no_matching_lines_from_file(".+\\'meter_id\\'\\: 2+", self.get_matching_meters_on_dpid(self.dpid)) |
def nextCmd(snmpDispatcher, authData, transportTarget, *varBinds, **options):
def _cbFun(snmpDispatcher, stateHandle, errorIndication, rspPdu, _cbCtx):
if (not cbFun):
return
if errorIndication:
cbFun(errorIndication, pMod.Integer(0), pMod.Integer(0), None, cbCtx=cbCtx, snmpDispatcher=snmpDispatcher, stateHandle=stateHandle)
return
errorStatus = pMod.apiPDU.getErrorStatus(rspPdu)
errorIndex = pMod.apiPDU.getErrorIndex(rspPdu)
varBindTable = pMod.apiPDU.getVarBindTable(reqPdu, rspPdu)
(errorIndication, nextVarBinds) = pMod.apiPDU.getNextVarBinds(varBindTable[(- 1)], errorIndex=errorIndex)
if options.get('lookupMib'):
varBindTable = [VB_PROCESSOR.unmakeVarBinds(snmpDispatcher.cache, vbs) for vbs in varBindTable]
nextStateHandle = pMod.getNextRequestID()
nextVarBinds = cbFun(errorIndication, errorStatus, errorIndex, varBindTable, cbCtx=cbCtx, snmpDispatcher=snmpDispatcher, stateHandle=stateHandle, nextStateHandle=nextStateHandle, nextVarBinds=nextVarBinds)
if (not nextVarBinds):
return
pMod.apiPDU.setRequestID(reqPdu, nextStateHandle)
pMod.apiPDU.setVarBinds(reqPdu, nextVarBinds)
return snmpDispatcher.sendPdu(authData, transportTarget, reqPdu, cbFun=_cbFun)
(lookupMib, cbFun, cbCtx) = [options.get(x) for x in ('lookupMib', 'cbFun', 'cbCtx')]
if lookupMib:
varBinds = VB_PROCESSOR.makeVarBinds(snmpDispatcher.cache, varBinds)
pMod = api.PROTOCOL_MODULES[authData.mpModel]
reqPdu = pMod.GetNextRequestPDU()
pMod.apiPDU.setDefaults(reqPdu)
pMod.apiPDU.setVarBinds(reqPdu, varBinds)
return snmpDispatcher.sendPdu(authData, transportTarget, reqPdu, cbFun=_cbFun) |
def _filter_out_variables_not_in_dataframe(X, variables):
if (variables is None):
return None
if (not isinstance(variables, list)):
variables = [variables]
filtered_variables = [var for var in variables if (var in X.columns)]
if (len(filtered_variables) == 0):
raise ValueError('After filtering no variable remaining. At least 1 is required.')
return filtered_variables |
((not has_traitsui()), 'TraitsUI not installed')
((toolkit.toolkit == 'wx'), 'wxPython not supported')
class TestTraitsUIWidgetAction(unittest.TestCase, UnittestTools):
def setUp(self):
self.gui = GUI()
self.parent = Window()
self.parent.create()
self.parent.open()
self.addCleanup(self._destroy_parent)
self.gui.process_events()
def _destroy_parent(self):
self.parent.destroy()
self.gui.process_events()
self.parent = None
def create_model(self):
from traitsui.api import View, Item
class SimpleEnum(HasTraits):
value = Enum('a', 'b', 'c')
view = View(Item('value'))
return SimpleEnum()
def test_traitsui_widget_action(self):
from traitsui.api import View, Item
class SimpleEnumAction(TraitsUIWidgetAction):
value = Enum('a', 'b', 'c')
view = View(Item('value'))
action = SimpleEnumAction(name='Simple')
control = action.create_control(self.parent.control)
self.gui.process_events()
editor = control._ui.get_editors('value')[0]
with self.assertTraitChanges(action, 'value', count=1):
if (toolkit.toolkit in {'qt', 'qt4'}):
editor.control.setCurrentIndex(1)
editor.control.activated.emit(1)
elif (toolkit.toolkit == 'wx'):
import wx
event = wx.CommandEvent(wx.EVT_CHOICE.typeId, editor.control.GetId())
event.SetString('b')
wx.PostEvent(editor.control.GetEventHandler(), event)
else:
self.skipTest('Unknown toolkit')
self.gui.process_events()
self.assertEqual(action.value, 'b')
def test_traitsui_widget_action_model(self):
from traitsui.api import View, Item
class SimpleEnumAction(TraitsUIWidgetAction):
view = View(Item('value'))
model = self.create_model()
action = SimpleEnumAction(name='Simple', model=model)
control = action.create_control(self.parent.control)
self.gui.process_events()
editor = control._ui.get_editors('value')[0]
with self.assertTraitChanges(model, 'value', count=1):
if (toolkit.toolkit in {'qt', 'qt4'}):
editor.control.setCurrentIndex(1)
editor.control.activated.emit(1)
elif (toolkit.toolkit == 'wx'):
import wx
event = wx.CommandEvent(wx.EVT_CHOICE.typeId, editor.control.GetId())
event.SetString('b')
wx.PostEvent(editor.control.GetEventHandler(), event)
else:
self.skipTest('Unknown toolkit')
self.gui.process_events()
self.assertEqual(model.value, 'b')
def test_traitsui_widget_action_model_view(self):
from traitsui.api import HGroup, View, Item
class ComplexEnumAction(TraitsUIWidgetAction):
value = Enum('a', 'b', 'c')
view = View(HGroup(Item('value'), Item('action.value')))
model = self.create_model()
action = ComplexEnumAction(name='Simple', model=model)
control = action.create_control(self.parent.control)
self.gui.process_events()
editor = control._ui.get_editors('value')[0]
with self.assertTraitChanges(model, 'value', count=1):
if (toolkit.toolkit in {'qt', 'qt4'}):
editor.control.setCurrentIndex(1)
editor.control.activated.emit(1)
elif (toolkit.toolkit == 'wx'):
import wx
event = wx.CommandEvent(wx.EVT_CHOICE.typeId, editor.control.GetId())
event.SetString('b')
wx.PostEvent(editor.control.GetEventHandler(), event)
else:
self.skipTest('Unknown toolkit')
self.gui.process_events()
self.assertEqual(model.value, 'b')
editor = control._ui.get_editors('value')[1]
with self.assertTraitChanges(action, 'value', count=1):
if (toolkit.toolkit in {'qt', 'qt4'}):
editor.control.setCurrentIndex(2)
editor.control.activated.emit(2)
elif (toolkit.toolkit == 'wx'):
event = wx.CommandEvent(wx.EVT_CHOICE.typeId, editor.control.GetId())
event.SetString('c')
wx.PostEvent(editor.control.GetEventHandler(), event)
else:
self.skipTest('Unknown toolkit')
self.gui.process_events()
self.assertEqual(action.value, 'c') |
class UrlInput(QLineEdit):
def __init__(self, cb, parent=None):
QLineEdit.__init__(self, parent)
self.cb = cb
self.suggestions = []
self.completer = QCompleter(self.suggestions, self)
self.completer.setCaseSensitivity(Qt.CaseInsensitive)
self.setCompleter(self.completer)
def keyPressEvent(self, e):
if ((e.key() == Qt.Key_Enter) or (e.key() == Qt.Key_Return)):
self.cb()
self._refresh_completer()
e.accept()
else:
QLineEdit.keyPressEvent(self, e)
def _refresh_completer(self):
t = QUrl(self.text()).toDisplayString()
if ((len(t) > 0) and (not (t in self.suggestions))):
self.suggestions.append(t)
self.completer = QCompleter(self.suggestions, self)
self.completer.setCaseSensitivity(Qt.CaseInsensitive)
self.setCompleter(self.completer)
def mousePressEvent(self, e):
self.selectAll() |
class Uint(int):
__slots__ = ()
def from_be_bytes(cls: Type, buffer: 'Bytes') -> 'Uint':
return cls(int.from_bytes(buffer, 'big'))
def from_le_bytes(cls: Type, buffer: 'Bytes') -> 'Uint':
return cls(int.from_bytes(buffer, 'little'))
def __init__(self, value: int) -> None:
if (not isinstance(value, int)):
raise TypeError()
if (value < 0):
raise ValueError()
def __radd__(self, left: int) -> 'Uint':
return self.__add__(left)
def __add__(self, right: int) -> 'Uint':
if (not isinstance(right, int)):
return NotImplemented
if (right < 0):
raise ValueError()
return int.__new__(self.__class__, int.__add__(self, right))
def __iadd__(self, right: int) -> 'Uint':
return self.__add__(right)
def __sub__(self, right: int) -> 'Uint':
if (not isinstance(right, int)):
return NotImplemented
if ((right < 0) or (self < right)):
raise ValueError()
return int.__new__(self.__class__, int.__sub__(self, right))
def __rsub__(self, left: int) -> 'Uint':
if (not isinstance(left, int)):
return NotImplemented
if ((left < 0) or (self > left)):
raise ValueError()
return int.__new__(self.__class__, int.__rsub__(self, left))
def __isub__(self, right: int) -> 'Uint':
return self.__sub__(right)
def __mul__(self, right: int) -> 'Uint':
if (not isinstance(right, int)):
return NotImplemented
if (right < 0):
raise ValueError()
return int.__new__(self.__class__, int.__mul__(self, right))
def __rmul__(self, left: int) -> 'Uint':
return self.__mul__(left)
def __imul__(self, right: int) -> 'Uint':
return self.__mul__(right)
def __floordiv__(self, right: int) -> 'Uint':
if (not isinstance(right, int)):
return NotImplemented
if (right < 0):
raise ValueError()
return int.__new__(self.__class__, int.__floordiv__(self, right))
def __rfloordiv__(self, left: int) -> 'Uint':
if (not isinstance(left, int)):
return NotImplemented
if (left < 0):
raise ValueError()
return int.__new__(self.__class__, int.__rfloordiv__(self, left))
def __ifloordiv__(self, right: int) -> 'Uint':
return self.__floordiv__(right)
def __mod__(self, right: int) -> 'Uint':
if (not isinstance(right, int)):
return NotImplemented
if (right < 0):
raise ValueError()
return int.__new__(self.__class__, int.__mod__(self, right))
def __rmod__(self, left: int) -> 'Uint':
if (not isinstance(left, int)):
return NotImplemented
if (left < 0):
raise ValueError()
return int.__new__(self.__class__, int.__rmod__(self, left))
def __imod__(self, right: int) -> 'Uint':
return self.__mod__(right)
def __divmod__(self, right: int) -> Tuple[('Uint', 'Uint')]:
if (not isinstance(right, int)):
return NotImplemented
if (right < 0):
raise ValueError()
result = int.__divmod__(self, right)
return (int.__new__(self.__class__, result[0]), int.__new__(self.__class__, result[1]))
def __rdivmod__(self, left: int) -> Tuple[('Uint', 'Uint')]:
if (not isinstance(left, int)):
return NotImplemented
if (left < 0):
raise ValueError()
result = int.__rdivmod__(self, left)
return (int.__new__(self.__class__, result[0]), int.__new__(self.__class__, result[1]))
def __pow__(self, right: int, modulo: Optional[int]=None) -> 'Uint':
if (modulo is not None):
if (not isinstance(modulo, int)):
return NotImplemented
if (modulo < 0):
raise ValueError()
if (not isinstance(right, int)):
return NotImplemented
if (right < 0):
raise ValueError()
return int.__new__(self.__class__, int.__pow__(self, right, modulo))
def __rpow__(self, left: int, modulo: Optional[int]=None) -> 'Uint':
if (modulo is not None):
if (not isinstance(modulo, int)):
return NotImplemented
if (modulo < 0):
raise ValueError()
if (not isinstance(left, int)):
return NotImplemented
if (left < 0):
raise ValueError()
return int.__new__(self.__class__, int.__rpow__(self, left, modulo))
def __ipow__(self, right: int, modulo: Optional[int]=None) -> 'Uint':
return self.__pow__(right, modulo)
def __xor__(self, right: int) -> 'Uint':
if (not isinstance(right, int)):
return NotImplemented
if (right < 0):
raise ValueError()
return int.__new__(self.__class__, int.__xor__(self, right))
def __rxor__(self, left: int) -> 'Uint':
if (not isinstance(left, int)):
return NotImplemented
if (left < 0):
raise ValueError()
return int.__new__(self.__class__, int.__rxor__(self, left))
def __ixor__(self, right: int) -> 'Uint':
return self.__xor__(right)
def to_be_bytes32(self) -> 'Bytes32':
return Bytes32(self.to_bytes(32, 'big'))
def to_be_bytes(self) -> 'Bytes':
bit_length = self.bit_length()
byte_length = ((bit_length + 7) // 8)
return self.to_bytes(byte_length, 'big')
def to_le_bytes(self, number_bytes: int=None) -> 'Bytes':
if (number_bytes is None):
bit_length = self.bit_length()
number_bytes = ((bit_length + 7) // 8)
return self.to_bytes(number_bytes, 'little') |
class EvenniaPasswordValidator():
def __init__(self, regex="^[\\w. +\\-',]+$", policy="Password should contain a mix of letters, spaces, digits and /./+/-/_/'/, only."):
self.regex = regex
self.policy = policy
def validate(self, password, user=None):
if (not re.findall(self.regex, password)):
raise ValidationError(_(self.policy), code='evennia_password_policy')
def get_help_text(self):
return _('{policy} From a terminal client, you can also use a phrase of multiple words if you enclose the password in double quotes.'.format(policy=self.policy)) |
class Grid(object):
def __init__(self, color_grid, name_grid, grid_size):
if (not isinstance(color_grid, (tuple, list))):
raise ValueError('Color grid is not a list: {}'.format(color_grid))
if (not isinstance(name_grid, (tuple, list))):
raise ValueError('Name grid is not a list: {}'.format(name_grid))
if (isinstance(grid_size, int) and (grid_size >= 0)):
self._grid_size = (int(grid_size),)
elif (isinstance(grid_size, (tuple, list)) and (len(grid_size) == 1) and isinstance(grid_size[0], int) and (grid_size[0] >= 0)):
self._grid_size = (int(grid_size[0]),)
elif (isinstance(grid_size, (tuple, list)) and (len(grid_size) == 2) and isinstance(grid_size[0], int) and isinstance(grid_size[1], int) and (grid_size[0] >= 0) and (grid_size[1] >= 0)):
self._grid_size = (int(grid_size[0]), int(grid_size[1]))
else:
raise ValueError('Grid size is not a number list: {}'.format(grid_size))
if (len(self._grid_size) == 1):
color_grid = [('FFFFFF' if (i >= len(color_grid)) else Color.fmt_hec(color_grid[i])) for i in range(self._grid_size[0])]
name_grid = [('RR-{}'.format((i + 1)) if (i >= len(name_grid)) else str(name_grid[i])) for i in range(self._grid_size[0])]
else:
color_grid = [[('FFFFFF' if (((i * self._grid_size[1]) + j) >= len(color_grid)) else Color.fmt_hec(color_grid[((i * self._grid_size[1]) + j)])) for j in range(self._grid_size[1])] for i in range(self._grid_size[0])]
name_grid = [[('RR-{}-{}'.format((i + 1), (j + 1)) if (((i * self._grid_size[1]) + j) >= len(name_grid)) else str(name_grid[((i * self._grid_size[1]) + j)])) for j in range(self._grid_size[1])] for i in range(self._grid_size[0])]
self._color_grid = np.array(color_grid, dtype=str).reshape(grid_size)
self._name_grid = np.array(name_grid, dtype=str).reshape(grid_size)
def __getitem__(self, idx):
curr_color = self._color_grid[idx]
curr_name = self._name_grid[idx]
if (isinstance(curr_color, str) and isinstance(curr_name, str)):
return Box(curr_color, curr_name)
elif (isinstance(curr_color, np.ndarray) and isinstance(curr_name, np.ndarray)):
curr_size = curr_color.shape
curr_color = curr_color.reshape((- 1)).tolist()
curr_name = curr_name.reshape((- 1)).tolist()
return Grid(curr_color, curr_name, curr_size)
def __str__(self):
return 'Grid(size {})'.format('x'.join([str(i) for i in self.size]))
def __repr__(self):
return str(self)
def T(self):
curr_color = self._color_grid.T
curr_name = self._name_grid.T
curr_size = curr_color.shape
curr_color = curr_color.reshape((- 1)).tolist()
curr_name = curr_name.reshape((- 1)).tolist()
return Grid(curr_color, curr_name, curr_size)
def size(self):
return self._grid_size
def shape(self):
return self.size
def values(self):
return (self._color_grid.reshape((- 1)).tolist(), self._name_grid.reshape((- 1)).tolist())
def grid(self):
return Grid(*self.h_line.values, self.size)
def h_line(self):
curr_color = self._color_grid.reshape((- 1)).tolist()
curr_name = self._name_grid.reshape((- 1)).tolist()
return Grid(curr_color, curr_name, len(curr_color))
def v_line(self):
curr_color = self._color_grid.T.reshape((- 1)).tolist()
curr_name = self._name_grid.T.reshape((- 1)).tolist()
return Grid(curr_color, curr_name, len(curr_color)) |
def extractRaisingmytailBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def is_using_pyqt5():
if hasattr(idaapi, 'get_kernel_version'):
(_ida_version_major, _ida_version_minor) = map(int, idaapi.get_kernel_version().split('.'))
return ((_ida_version_major > 6) or ((_ida_version_major == 6) and (_ida_version_minor >= 9)))
else:
return False |
class AbstractJaxStructure(Structure, JaxObject):
geometry: Union[(JaxGeometryType, GeometryType)]
medium: Union[(JaxMediumType, MediumType)]
_differentiable_fields = ()
('medium', always=True)
def _check_2d_geometry(cls, val, values):
return val
def jax_fields(self):
return dict(geometry=self.geometry, medium=self.medium)
def exclude_fields(self):
return set((['type'] + list(self.jax_fields.keys())))
def to_structure(self) -> Structure:
self_dict = self.dict(exclude=self.exclude_fields)
for (key, component) in self.jax_fields.items():
if (key in self._differentiable_fields):
self_dict[key] = component.to_tidy3d()
else:
self_dict[key] = component
return Structure.parse_obj(self_dict)
def from_structure(cls, structure: Structure) -> JaxStructure:
struct_dict = structure.dict(exclude={'type'})
jax_fields = dict(geometry=structure.geometry, medium=structure.medium)
for (key, component) in jax_fields.items():
if (key in cls._differentiable_fields):
type_map = GEO_MED_MAPPINGS[key]
jax_type = type_map[type(component)]
struct_dict[key] = jax_type.from_tidy3d(component)
else:
struct_dict[key] = component
return cls.parse_obj(struct_dict)
def make_grad_monitors(self, freqs: List[float], name: str) -> FieldMonitor:
if ('geometry' not in self._differentiable_fields):
(rmin, rmax) = self.geometry.bounds
geometry = JaxBox.from_bounds(rmin=rmin, rmax=rmax)
else:
geometry = self.geometry
return geometry.make_grad_monitors(freqs=freqs, name=name)
def _get_medium_params(self, grad_data_eps: PermittivityData) -> Dict[(str, float)]:
freq_max = max(grad_data_eps.eps_xx.f)
eps_in = self.medium.eps_model(frequency=freq_max)
ref_ind = np.sqrt(np.max(np.real(eps_in)))
wvl_free_space = (C_0 / freq_max)
wvl_mat = (wvl_free_space / ref_ind)
return dict(wvl_mat=wvl_mat, eps_in=eps_in)
def geometry_vjp(self, grad_data_fwd: FieldData, grad_data_adj: FieldData, grad_data_eps: PermittivityData, sim_bounds: Bound, eps_out: complex, num_proc: int=1) -> JaxGeometryType:
medium_params = self._get_medium_params(grad_data_eps=grad_data_eps)
return self.geometry.store_vjp(grad_data_fwd=grad_data_fwd, grad_data_adj=grad_data_adj, grad_data_eps=grad_data_eps, sim_bounds=sim_bounds, wvl_mat=medium_params['wvl_mat'], eps_out=eps_out, eps_in=medium_params['eps_in'], num_proc=num_proc)
def medium_vjp(self, grad_data_fwd: FieldData, grad_data_adj: FieldData, grad_data_eps: PermittivityData, sim_bounds: Bound) -> JaxMediumType:
medium_params = self._get_medium_params(grad_data_eps=grad_data_eps)
return self.medium.store_vjp(grad_data_fwd=grad_data_fwd, grad_data_adj=grad_data_adj, sim_bounds=sim_bounds, wvl_mat=medium_params['wvl_mat'], inside_fn=self.geometry.inside)
def store_vjp(self, grad_data_fwd: FieldData, grad_data_adj: FieldData, grad_data_eps: PermittivityData, sim_bounds: Bound, eps_out: complex, num_proc: int=1) -> JaxStructure:
if (not self._differentiable_fields):
return self
vjp_dict = {}
if ('geometry' in self._differentiable_fields):
vjp_dict['geometry'] = self.geometry_vjp(grad_data_fwd=grad_data_fwd, grad_data_adj=grad_data_adj, grad_data_eps=grad_data_eps, sim_bounds=sim_bounds, eps_out=eps_out, num_proc=num_proc)
if ('medium' in self._differentiable_fields):
vjp_dict['medium'] = self.medium_vjp(grad_data_fwd=grad_data_fwd, grad_data_adj=grad_data_adj, grad_data_eps=grad_data_eps, sim_bounds=sim_bounds)
return self.updated_copy(**vjp_dict) |
def test_stride_with_config(golden):
ConfigLoad = new_config_ld()
def bar(n: size, src: [i8][n]):
assert (stride(src, 0) == ConfigLoad.src_stride)
pass
def foo(n: size, src: [i8][n]):
assert (stride(src, 0) == ConfigLoad.src_stride)
bar(n, src)
assert (f'''{bar}
{foo}''' == golden) |
class Debug():
def __init__(self, config: Config):
self.config = config
def run(self) -> bool:
dbt_runner = DbtRunner(dbt_project_utils.PATH, self.config.profiles_dir, self.config.profile_target)
try:
dbt_runner.run_operation('elementary_cli.test_conn', quiet=True)
except DbtCommandError as err:
logs = ('\n'.join((str(log) for log in err.logs)) if err.logs else 'No logs available')
click.echo(f'''Could not connect to the Elementary db and schema. See details below
{logs}''')
return False
click.echo('Connected to the Elementary db and schema successfully')
return True |
class OptionSeriesDumbbellSonificationDefaultspeechoptionsMapping(Options):
def pitch(self) -> 'OptionSeriesDumbbellSonificationDefaultspeechoptionsMappingPitch':
return self._config_sub_data('pitch', OptionSeriesDumbbellSonificationDefaultspeechoptionsMappingPitch)
def playDelay(self) -> 'OptionSeriesDumbbellSonificationDefaultspeechoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesDumbbellSonificationDefaultspeechoptionsMappingPlaydelay)
def rate(self) -> 'OptionSeriesDumbbellSonificationDefaultspeechoptionsMappingRate':
return self._config_sub_data('rate', OptionSeriesDumbbellSonificationDefaultspeechoptionsMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesDumbbellSonificationDefaultspeechoptionsMappingTime':
return self._config_sub_data('time', OptionSeriesDumbbellSonificationDefaultspeechoptionsMappingTime)
def volume(self) -> 'OptionSeriesDumbbellSonificationDefaultspeechoptionsMappingVolume':
return self._config_sub_data('volume', OptionSeriesDumbbellSonificationDefaultspeechoptionsMappingVolume) |
class USLiteSATAPHYCRG(Module):
def __init__(self, refclk, pads, gth, gen):
self.tx_reset = Signal()
self.rx_reset = Signal()
self.clock_domains.cd_sata_tx = ClockDomain()
self.clock_domains.cd_sata_rx = ClockDomain()
if isinstance(refclk, (Signal, ClockSignal)):
self.refclk = refclk
else:
self.refclk = Signal()
self.specials += Instance('IBUFDS_GTE3', i_CEB=0, i_I=pads.clk_p, i_IB=pads.clk_n, o_O=self.refclk)
self.comb += gth.refclk.eq(self.refclk)
self.specials += Instance('BUFG_GT', i_I=gth.txoutclk, o_O=self.cd_sata_tx.clk)
self.comb += gth.txusrclk.eq(self.cd_sata_tx.clk)
self.comb += gth.txusrclk2.eq(self.cd_sata_tx.clk)
self.specials += Instance('BUFG_GT', i_I=gth.rxoutclk, o_O=self.cd_sata_rx.clk)
self.comb += gth.rxusrclk.eq(self.cd_sata_rx.clk)
self.comb += gth.rxusrclk2.eq(self.cd_sata_rx.clk)
self.specials += [AsyncResetSynchronizer(self.cd_sata_tx, ((~ gth.cplllock) | self.tx_reset)), AsyncResetSynchronizer(self.cd_sata_rx, ((~ gth.cplllock) | self.rx_reset))] |
def test_ge_checkpoint_params():
task_object = GreatExpectationsTask(name='test8', datasource_name='data', inputs=kwtypes(dataset=str), expectation_suite_name='test.demo', data_connector_name='data_example_data_connector', checkpoint_params={'site_names': ['local_site']})
task_object(dataset='yellow_tripdata_sample_2019-01.csv') |
.param_file((FIXTURE_PATH / 'docutil_syntax_extensions.txt'))
def test_syntax_extensions(file_params):
settings = settings_from_cmdline(file_params.description)
report_stream = StringIO()
settings['warning_stream'] = report_stream
doctree = publish_doctree(file_params.content, parser=Parser(), settings_overrides=settings)
file_params.assert_expected(doctree.pformat(), rstrip_lines=True) |
class RRLSeriesPageFilter(WebMirror.OutputFilters.FilterBase.FilterBase):
wanted_mimetypes = ['text/html']
want_priority = 55
loggerPath = 'Main.Filter.RoyalRoad.Page'
match_re = re.compile('^ flags=re.IGNORECASE)
def wantsUrl(cls, url):
if cls.match_re.search(url):
print(("RRLSeriesPageFilter Wants url: '%s'" % url))
return True
return False
def __init__(self, **kwargs):
self.kwargs = kwargs
self.pageUrl = kwargs['pageUrl']
self.content = kwargs['pgContent']
self.type = kwargs['type']
self.log.info('Processing RoyalRoadL Item')
super().__init__(**kwargs)
def extractSeriesReleases(self, seriesPageUrl, soup):
match = self.match_re.search(seriesPageUrl)
series_id = match.group(1)
header = soup.find('div', class_='fic-title')
if (not header):
self.log.warning('Series page %s contains no releases. Is this series removed?', seriesPageUrl)
return []
titletg = header.find('h1')
authortg = header.find('h4')
authortg.find('span').decompose()
rating_val = soup.find('meta', property='books:rating:value')
rating_scale = soup.find('meta', property='books:rating:scale')
if ((not rating_val) or (not rating_scale)):
return []
rval_f = float(rating_val.get('content', '0'))
rscale_f = float(rating_scale.get('content', '999999'))
rating = (5 * (rval_f / rscale_f))
if (rating < SeriesPageCommon.MIN_RATING_STARS):
self.log.error('Item rating below upload threshold: %s', rating)
return []
if (not titletg):
self.log.error('Could not find title tag!')
return []
if (not authortg):
self.log.error('Could not find author tag!')
return []
title = titletg.get_text().strip()
author = authortg.get_text().strip()
title = bleach.clean(title, tags=[], attributes=[], styles=[], strip=True, strip_comments=True)
author = bleach.clean(author, tags=[], attributes=[], styles=[], strip=True, strip_comments=True)
descDiv = soup.find('div', class_='description')
if ((not descDiv) or (not descDiv.div)):
self.log.error('Incomplete or broken description?')
return []
desc = []
for segment in descDiv.div:
if isinstance(segment, bs4.NavigableString):
desc.append(str(segment).strip())
elif segment.get_text().strip():
desc.append(segment.get_text().strip())
desc = ['<p>{}</p>'.format(line) for line in desc if line.strip()]
tags = []
tagdiv = soup.find('span', class_='tags')
for tag in tagdiv.find_all('span', class_='label'):
tagtxt = tag.get_text().strip().lower().replace(' ', '-')
tagtxt = SeriesPageCommon.fix_tag(tagtxt)
tags.append(tagtxt)
info_div = soup.find('div', class_='fiction-info')
warning_div = info_div.find('div', class_='font-red-sunglo')
if warning_div:
for warning_tag in warning_div.find_all('li'):
tagtxt = warning_tag.get_text().strip().lower().replace(' ', '-')
tagtxt = SeriesPageCommon.fix_tag(tagtxt)
tags.append(tagtxt)
seriesmeta = {}
seriesmeta['title'] = msgpackers.fix_string(title)
seriesmeta['author'] = msgpackers.fix_string(author)
seriesmeta['tags'] = tags
seriesmeta['homepage'] = seriesPageUrl
seriesmeta['desc'] = '\r\n'.join(desc)
seriesmeta['tl_type'] = 'oel'
seriesmeta['sourcesite'] = 'RoyalRoadL'
seriesmeta['create_tags'] = True
meta_pkt = msgpackers.createSeriesInfoPacket(seriesmeta, matchAuthor=True)
extra = {}
extra['tags'] = tags
extra['homepage'] = seriesPageUrl
extra['sourcesite'] = 'RoyalRoadL'
chapters = soup.find_all('tr', attrs={'data-url': True})
raw_retval = []
for chapter in chapters:
if (len(chapter.find_all('td')) != 2):
self.log.warning('Row with invalid number of entries?')
continue
(cname, cdate) = chapter.find_all('td')
if (not cdate.time):
self.log.error('No time entry?')
continue
timestr = cdate.time.get('title').strip()
(itemDate, status) = parsedatetime.Calendar().parse(timestr)
if (status < 1):
self.log.warning('Failure processing date: %s', timestr)
continue
reldate = time.mktime(itemDate)
relurl = common.util.urlFuncs.rebaseUrl(cname.a['href'], seriesPageUrl)
chp_title = cname.get_text().strip()
(vol, chp, frag, _) = titleParsers.extractTitle(((chp_title + ' ') + title))
raw_item = {}
raw_item['srcname'] = 'RoyalRoadL'
raw_item['published'] = float(reldate)
raw_item['linkUrl'] = relurl
raw_msg = msgpackers._buildReleaseMessage(raw_item, title, vol, chp, frag, author=author, postfix=chp_title, tl_type='oel', extraData=extra, matchAuthor=True)
raw_retval.append(raw_msg)
raw_retval = SeriesPageCommon.check_fix_numbering(self.log, raw_retval, series_id, rrl=True)
if (len(raw_retval) < 3):
self.log.info('Less then three chapters!')
return []
if (not raw_retval):
self.log.info('Retval empty?!')
return []
retval = ([msgpackers.createReleasePacket(raw_msg) for raw_msg in raw_retval] + [meta_pkt])
self.put_measurement(measurement_name='chapter_releases', measurement=len(retval), fields={}, extra_tags={'site': 'RoyalRoadL'})
self.log.info('Found %s chapter releases on series page for %s (with rating %s)!', len(retval), msgpackers.fix_string(title), rating)
return retval
def sendReleases(self, releases):
self.log.info('Total releases found on page: %s. Emitting messages into AMQP local queue.', len(releases))
self.amqp_put_many(releases)
def processPage(self, url, content):
if ('<title>Not Found | RoyalRoadL</title>' in content):
return
soup = WebRequest.as_soup(self.content)
releases = self.extractSeriesReleases(self.pageUrl, soup)
if releases:
self.sendReleases(releases)
def extractContent(self):
self.processPage(self.pageUrl, self.content) |
class NodeExecutionGetDataResponse(_CommonDataResponse):
def __init__(self, *args, dynamic_workflow: typing.Optional[DynamicWorkflowNodeMetadata]=None, **kwargs):
super().__init__(*args, **kwargs)
self._dynamic_workflow = dynamic_workflow
def dynamic_workflow(self) -> typing.Optional[DynamicWorkflowNodeMetadata]:
return self._dynamic_workflow
def from_flyte_idl(cls, pb2_object):
return cls(inputs=_common_models.UrlBlob.from_flyte_idl(pb2_object.inputs), outputs=_common_models.UrlBlob.from_flyte_idl(pb2_object.outputs), full_inputs=_literals_models.LiteralMap.from_flyte_idl(pb2_object.full_inputs), full_outputs=_literals_models.LiteralMap.from_flyte_idl(pb2_object.full_outputs), dynamic_workflow=(DynamicWorkflowNodeMetadata.from_flyte_idl(pb2_object.dynamic_workflow) if pb2_object.HasField('dynamic_workflow') else None))
def to_flyte_idl(self):
return _node_execution_pb2.NodeExecutionGetDataResponse(inputs=self.inputs.to_flyte_idl(), outputs=self.outputs.to_flyte_idl(), full_inputs=self.full_inputs.to_flyte_idl(), full_outputs=self.full_outputs.to_flyte_idl(), dynamic_workflow=(self.dynamic_workflow.to_flyte_idl() if self.dynamic_workflow else None)) |
.parametrize('gas_limit', (True, False, None, 'auto'))
.parametrize('gas_buffer', (1, 1.25))
def test_gas_limit_automatic(tester, accounts, config, gas_limit, gas_buffer):
config.active_network['settings']['gas_limit'] = gas_limit
config.active_network['settings']['gas_buffer'] = gas_buffer
tx = tester.doNothing({'from': accounts[0]})
assert (int((tx.gas_used * gas_buffer)) == tx.gas_limit) |
class OptionsChartistBar(OptionsChartistLine):
def seriesBarDistance(self):
return self._config_get()
def seriesBarDistance(self, num: int):
self._config(num)
def reverseData(self):
return self._config_get()
def reverseData(self, flag: bool):
self._config(flag)
def horizontalBars(self):
return self._config_get()
def horizontalBars(self, flag: bool):
self._config(flag) |
def test():
assert Doc.has_extension('autor'), "Hast du die Doc-Erweiterung 'autor' registriert?"
ext = Doc.get_extension('autor')
assert all(((v is None) for v in ext)), "Hast du den default-Wert der Erweiterung 'autor' angegeben?"
assert Doc.has_extension('buch'), "Hast du die Doc-Erweiterung 'buch' registriert?"
ext = Doc.get_extension('buch')
assert all(((v is None) for v in ext)), "Hast du den default-Wert der Erweiterung 'buch' angegeben?"
assert ('nlp.pipe(DATA, as_tuples=True)' in __solution__), 'Verwendest du nlp.pipe mit as_tuples=True?'
assert ('doc._.buch = context["buch"]' in __solution__), "Hast du das Attribut doc._.buch mit dem Kontext-Wert von 'buch' uberschrieben?"
assert ('doc._.autor = context["autor"]' in __solution__), "Hast du das Attribut doc._.autor mit dem Kontext-Wert von 'autor' uberschrieben?"
__msg__.good('Toll gemacht! Dieses Verfahren kann fur eine Reihe von Aufgaben genutzt werden. Du konntest zum Beispiel auch Seitenzahlen oder Absatznummern hinzufugen, um so das verarbeitete Doc wieder einem groeren Dokument zuordnen zu konnen. Oder du konntest andere strukturierte Daten wie IDs aus einer Wissensdatenbank hinzufugen.') |
def extractStattuceBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('() with you for thirteen years (eng', 'With you for thirteen years', 'translated'), ('() love in disguise (eng)', 'love in disguise', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def get_info_from_internet(user_chat_text, settings):
answer_with_search = settings['answer_with_search']
additional_searches = settings['additional_searches']
specify_sources = settings['specify_sources'].split(', ')
consult_search_history = settings['consult_search_history']
num_of_excerpts = settings['num_of_excerpts']
history = st.session_state['google_history']
sources_content = pd.DataFrame()
if (specify_sources != ['']):
sources_content = search_new_links(user_chat_text, specify_sources, history, sources_content)
if (additional_searches != []):
sources_content = search_new_queries(additional_searches, history, sources_content)
if answer_with_search:
sources_content = search_new_queries([user_chat_text], history, sources_content)
if (not consult_search_history):
if sources_content.empty:
return pd.DataFrame()
return find_top_similar_results(sources_content, user_chat_text, num_of_excerpts)
all_results = st.session_state['google_history']
all_results = pd.concat([all_results, sources_content])
return find_top_similar_results(all_results, user_chat_text, num_of_excerpts) |
def extractBellonertBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class PagesTestCase(TestCase):
def setUp(self):
u = User(username='test', is_active=True, is_staff=True, is_superuser=True)
u.set_password('test')
u.save()
self.site_1 = Site.objects.all()[0]
Page.register_templates({'key': 'base', 'title': 'Standard template', 'path': 'feincms_base.html', 'regions': (('main', 'Main content area'), ('sidebar', 'Sidebar', 'inherited'))}, {'key': 'theother', 'title': 'This actually exists', 'path': 'base.html', 'regions': (('main', 'Main content area'), ('sidebar', 'Sidebar', 'inherited'))})
def login(self):
self.assertTrue(self.client.login(username='test', password='test'))
def create_page_through_admin(self, title='Test page', parent='', **kwargs):
dic = {'title': title, 'slug': kwargs.get('slug', slugify(title)), 'parent': parent, 'template_key': 'base', 'publication_date_0': '2009-01-01', 'publication_date_1': '00:00:00', 'initial-publication_date_0': '2009-01-01', 'initial-publication_date_1': '00:00:00', 'language': 'en', 'navigation_group': 'default', 'site': self.site_1.id, 'rawcontent_set-TOTAL_FORMS': 0, 'rawcontent_set-INITIAL_FORMS': 0, 'rawcontent_set-MAX_NUM_FORMS': 10, 'mediafilecontent_set-TOTAL_FORMS': 0, 'mediafilecontent_set-INITIAL_FORMS': 0, 'mediafilecontent_set-MAX_NUM_FORMS': 10, 'imagecontent_set-TOTAL_FORMS': 0, 'imagecontent_set-INITIAL_FORMS': 0, 'imagecontent_set-MAX_NUM_FORMS': 10, 'contactformcontent_set-TOTAL_FORMS': 0, 'contactformcontent_set-INITIAL_FORMS': 0, 'contactformcontent_set-MAX_NUM_FORMS': 10, 'filecontent_set-TOTAL_FORMS': 0, 'filecontent_set-INITIAL_FORMS': 0, 'filecontent_set-MAX_NUM_FORMS': 10, 'templatecontent_set-TOTAL_FORMS': 0, 'templatecontent_set-INITIAL_FORMS': 0, 'templatecontent_set-MAX_NUM_FORMS': 10, 'applicationcontent_set-TOTAL_FORMS': 0, 'applicationcontent_set-INITIAL_FORMS': 0, 'applicationcontent_set-MAX_NUM_FORMS': 10}
dic.update(kwargs)
return self.client.post('/admin/page/page/add/', dic)
def create_default_page_set_through_admin(self):
self.login()
self.create_page_through_admin()
return self.create_page_through_admin('Test child page', 1)
def create_page(self, title='Test page', parent=None, **kwargs):
defaults = {'template_key': 'base', 'site': self.site_1, 'in_navigation': False, 'active': False, 'navigation_group': 'default', 'publication_date': (timezone.now() - timedelta(days=1))}
defaults.update(kwargs)
return Page.objects.create(title=title, slug=kwargs.get('slug', slugify(title)), parent=parent, **defaults)
def create_default_page_set(self):
self.create_page('Test child page', parent=self.create_page())
def is_published(self, url, should_be=True):
try:
self.client.get(url)
except TemplateDoesNotExist as e:
if should_be:
if (e.args != ('feincms_base.html',)):
raise
elif (e.args != ('404.html',)):
raise
def test_01_tree_editor(self):
self.login()
self.assertEqual(self.client.get('/admin/page/page/').status_code, 200)
self.assertRedirects(self.client.get('/admin/page/page/?anything=anything'), '/admin/page/page/?e=1')
def test_02_add_page(self):
self.login()
self.assertRedirects(self.create_page_through_admin(title=('Test page ' * 10), slug='test-page'), '/admin/page/page/')
self.assertEqual(Page.objects.count(), 1)
self.assertContains(self.client.get('/admin/page/page/'), '...')
def test_03_item_editor(self):
self.login()
self.assertRedirects(self.create_page_through_admin(_continue=1), reverse('admin:page_page_change', args=(1,)))
self.assertEqual(self.client.get(reverse('admin:page_page_change', args=(1,))).status_code, 200)
self.is_published(reverse('admin:page_page_change', args=(42,)), should_be=False)
def test_03_add_another(self):
self.login()
self.assertRedirects(self.create_page_through_admin(_addanother=1), '/admin/page/page/add/')
def test_04_add_child(self):
response = self.create_default_page_set_through_admin()
self.assertRedirects(response, '/admin/page/page/')
self.assertEqual(Page.objects.count(), 2)
page = Page.objects.get(pk=2)
self.assertEqual(page.get_absolute_url(), '/test-page/test-child-page/')
page.active = True
page.in_navigation = True
page.save()
self.assertContains(self.client.get('/admin/page/page/'), 'inherited')
page1 = Page.objects.get(pk=1)
page1.active = True
page1.save()
content = self.client.get('/admin/page/page/').content.decode('utf-8')
self.assertEqual(len(content.split('checked="checked"')), 4)
def test_05_override_url(self):
self.create_default_page_set()
page = Page.objects.get(pk=1)
page.override_url = '/something/'
page.save()
page2 = Page.objects.get(pk=2)
self.assertEqual(page2.get_absolute_url(), '/something/test-child-page/')
page.override_url = '/'
page.save()
page2 = Page.objects.get(pk=2)
self.assertEqual(page2.get_absolute_url(), '/test-child-page/')
self.is_published('/', False)
page.active = True
page.template_key = 'theother'
page.save()
self.is_published('/', True)
def test_06_tree_editor_save(self):
self.create_default_page_set()
page1 = Page.objects.get(pk=1)
page2 = Page.objects.get(pk=2)
page3 = Page.objects.create(title='page3', slug='page3', parent=page2)
page4 = Page.objects.create(title='page4', slug='page4', parent=page1)
page5 = Page.objects.create(title='page5', slug='page5', parent=None)
self.assertEqual(page3.get_absolute_url(), '/test-page/test-child-page/page3/')
self.assertEqual(page4.get_absolute_url(), '/test-page/page4/')
self.assertEqual(page5.get_absolute_url(), '/page5/')
self.login()
self.client.post('/admin/page/page/', {'__cmd': 'move_node', 'position': 'last-child', 'cut_item': '1', 'pasted_on': '5'}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(Page.objects.get(pk=1).get_absolute_url(), '/page5/test-page/')
self.assertEqual(Page.objects.get(pk=5).get_absolute_url(), '/page5/')
self.assertEqual(Page.objects.get(pk=3).get_absolute_url(), '/page5/test-page/test-child-page/page3/')
def test_07_tree_editor_toggle_boolean(self):
self.create_default_page_set()
self.assertEqual(Page.objects.get(pk=1).in_navigation, False)
self.login()
self.assertContains(self.client.post('/admin/page/page/', {'__cmd': 'toggle_boolean', 'item_id': 1, 'attr': 'in_navigation'}, HTTP_X_REQUESTED_WITH='XMLHttpRequest'), 'checked=\\"checked\\"')
self.assertEqual(Page.objects.get(pk=1).in_navigation, True)
self.assertNotContains(self.client.post('/admin/page/page/', {'__cmd': 'toggle_boolean', 'item_id': 1, 'attr': 'in_navigation'}, HTTP_X_REQUESTED_WITH='XMLHttpRequest'), 'checked="checked"')
self.assertEqual(Page.objects.get(pk=1).in_navigation, False)
self.assertTrue(isinstance(self.client.post('/admin/page/page/', {'__cmd': 'toggle_boolean', 'item_id': 1, 'attr': 'notexists'}, HTTP_X_REQUESTED_WITH='XMLHttpRequest'), HttpResponseBadRequest))
def test_07_tree_editor_invalid_ajax(self):
self.login()
self.assertContains(self.client.post('/admin/page/page/', {'__cmd': 'notexists'}, HTTP_X_REQUESTED_WITH='XMLHttpRequest'), 'Oops. AJAX request not understood.', status_code=400)
def test_08_publishing(self):
self.create_default_page_set()
page = Page.objects.get(pk=1)
page2 = Page.objects.get(pk=2)
self.is_published(page.get_absolute_url(), should_be=False)
self.is_published(page2.get_absolute_url(), should_be=False)
page.active = True
page.save()
page2.active = True
page2.save()
self.is_published(page.get_absolute_url(), should_be=True)
self.is_published(page2.get_absolute_url(), should_be=True)
old_publication = page.publication_date
page.publication_date = (timezone.now() + timedelta(days=1))
page.save()
self.is_published(page.get_absolute_url(), should_be=False)
self.is_published(page2.get_absolute_url(), should_be=False)
page.publication_date = old_publication
page.publication_end_date = (timezone.now() - timedelta(days=1))
page.save()
self.is_published(page.get_absolute_url(), should_be=False)
self.is_published(page2.get_absolute_url(), should_be=False)
page.publication_end_date = (timezone.now() + timedelta(days=1))
page.save()
self.is_published(page.get_absolute_url(), should_be=True)
self.is_published(page2.get_absolute_url(), should_be=True)
def create_page_through_admincontent(self, page, **kwargs):
data = {'title': page.title, 'slug': page.slug, 'template_key': page.template_key, 'publication_date_0': '2009-01-01', 'publication_date_1': '00:00:00', 'initial-publication_date_0': '2009-01-01', 'initial-publication_date_1': '00:00:00', 'language': 'en', 'navigation_group': 'default', 'site': self.site_1.id, 'rawcontent_set-TOTAL_FORMS': 1, 'rawcontent_set-INITIAL_FORMS': 0, 'rawcontent_set-MAX_NUM_FORMS': 10, 'rawcontent_set-0-parent': 1, 'rawcontent_set-0-region': 'main', 'rawcontent_set-0-ordering': 0, 'rawcontent_set-0-text': 'This is some example content', 'mediafilecontent_set-TOTAL_FORMS': 1, 'mediafilecontent_set-INITIAL_FORMS': 0, 'mediafilecontent_set-MAX_NUM_FORMS': 10, 'mediafilecontent_set-0-parent': 1, 'mediafilecontent_set-0-type': 'default', 'templatecontent_set-TOTAL_FORMS': 1, 'templatecontent_set-INITIAL_FORMS': 0, 'templatecontent_set-MAX_NUM_FORMS': 10, 'applicationcontent_set-TOTAL_FORMS': 1, 'applicationcontent_set-INITIAL_FORMS': 0, 'applicationcontent_set-MAX_NUM_FORMS': 10}
data.update(kwargs)
return self.client.post(reverse('admin:page_page_change', args=(page.pk,)), data)
def test_09_pagecontent(self):
self.create_default_page_set()
page = Page.objects.get(pk=1)
self.login()
response = self.create_page_through_admincontent(page)
self.assertRedirects(response, '/admin/page/page/')
self.assertEqual(page.content.main[0].__class__.__name__, 'RawContent')
page2 = Page.objects.get(pk=2)
page2.symlinked_page = page
self.assertEqual(len(page2.content.all_of_type(RawContent)), 1)
self.assertEqual(page2.content.main[0].__class__.__name__, 'RawContent')
self.assertEqual(force_str(page2.content.main[0]), 'RawContent<pk=1, parent=Page<pk=1, Test page>, region=main, ordering=0>')
self.assertEqual(len(page2.content.main), 1)
self.assertEqual(len(page2.content.sidebar), 0)
self.assertEqual(len(page2.content.nonexistant_region), 0)
self.assertTrue(isinstance(page2.content.media, forms.Media))
self.assertEqual(len(page2.content.all_of_type(RawContent)), 1)
def test_10_mediafile_and_imagecontent(self):
self.create_default_page_set()
self.login()
page = Page.objects.get(pk=1)
self.create_page_through_admincontent(page)
category = Category.objects.create(title='Category', parent=None)
category2 = Category.objects.create(title='Something', parent=category)
self.assertEqual(force_str(category2), 'Category - Something')
self.assertEqual(force_str(category), 'Category')
mediafile = MediaFile.objects.create(file='somefile.jpg')
mediafile.categories.set([category])
page.mediafilecontent_set.create(mediafile=mediafile, region='main', type='default', ordering=1)
self.assertEqual(force_str(mediafile), 'somefile.jpg')
mediafile.translations.create(caption='something', language_code=('%s-ha' % short_language_code()))
mediafile.purge_translation_cache()
self.assertTrue(('something' in force_str(mediafile)))
mf = page.content.main[1].mediafile
self.assertEqual(mf.translation.caption, 'something')
self.assertEqual(mf.translation.short_language_code(), short_language_code())
self.assertNotEqual(mf.get_absolute_url(), '')
self.assertEqual(force_str(mf), 'something')
self.assertTrue((mf.type == 'image'))
self.assertEqual(MediaFile.objects.only_language('de').count(), 0)
self.assertEqual(MediaFile.objects.only_language('en').count(), 0)
self.assertEqual(MediaFile.objects.only_language((lambda : ('%s-ha' % short_language_code()))).count(), 1)
self.assertTrue((('%s-ha' % short_language_code()) in mf.available_translations))
self.client.get(reverse('admin:page_page_change', args=(1,)))
page.mediafilecontent_set.update(mediafile=3)
self.client.get('/admin/page/page/1/')
page.mediafilecontent_set.update(mediafile=mf.id)
field = MediaFile._meta.get_field('file')
old = (field.upload_to, field.storage, field.generate_filename)
from django.core.files.storage import FileSystemStorage
MediaFile.reconfigure(upload_to=(lambda : 'anywhere'), storage=FileSystemStorage(location='/wha/', base_url='/whe/'))
mediafile = MediaFile.objects.get(pk=1)
self.assertEqual(mediafile.file.url, '/whe/somefile.jpg')
(field.upload_to, field.storage, field.generate_filename) = old
mediafile = MediaFile.objects.get(pk=1)
self.assertEqual(mediafile.file.url, '/media/somefile.jpg')
def test_11_translations(self):
self.create_default_page_set()
page1 = Page.objects.get(pk=1)
self.assertEqual(len(page1.available_translations()), 0)
page1 = Page.objects.get(pk=1)
page2 = Page.objects.get(pk=2)
page1.active = True
page1.save()
page2.active = True
page2.language = 'de'
page2.save()
self.assertEqual(len(page2.available_translations()), 0)
page2.translation_of = page1
page2.save()
self.assertEqual(len(page2.available_translations()), 1)
self.assertEqual(len(page1.available_translations()), 1)
self.assertEqual(page1, page1.original_translation)
self.assertEqual(page1, page2.original_translation)
def test_12_titles(self):
self.create_default_page_set()
page = Page.objects.get(pk=1)
self.assertEqual(page.page_title, page.title)
self.assertEqual(page.content_title, page.title)
page._content_title = 'Something\nawful'
page._page_title = 'Hello world'
page.save()
self.assertEqual(page.page_title, 'Hello world')
self.assertEqual(page.content_title, 'Something')
self.assertEqual(page.content_subtitle, 'awful')
page._content_title = 'Only one line'
self.assertEqual(page.content_title, 'Only one line')
self.assertEqual(page.content_subtitle, '')
page._content_title = ''
self.assertEqual(page.content_title, page.title)
self.assertEqual(page.content_subtitle, '')
def test_13_inheritance_and_ct_tracker(self):
self.create_default_page_set()
page = Page.objects.get(pk=1)
page.rawcontent_set.create(region='sidebar', ordering=0, text='Something')
page.rawcontent_set.create(region='main', ordering=0, text='Anything')
page2 = Page.objects.get(pk=2)
page2.rawcontent_set.create(region='main', ordering=0, text='Something else')
page2.rawcontent_set.create(region='main', ordering=1, text='Whatever')
page2.content_proxy_class = ContentProxy
if hasattr(self, 'assertNumQueries'):
self.assertNumQueries(4, (lambda : [page2.content.main, page2.content.sidebar]))
self.assertNumQueries(0, (lambda : page2.content.sidebar[0].render()))
self.assertEqual(''.join((c.render() for c in page2.content.main)), 'Something elseWhatever')
self.assertEqual(page2.content.sidebar[0].render(), 'Something')
page2 = Page.objects.get(pk=2)
self.assertEqual(page2._ct_inventory, {})
for ct in Page._feincms_content_types:
ContentType.objects.get_for_model(ct)
if hasattr(self, 'assertNumQueries'):
self.assertNumQueries(5, (lambda : [page2.content.main, page2.content.sidebar]))
self.assertNumQueries(0, (lambda : page2.content.sidebar[0].render()))
self.assertEqual(page2.content.sidebar[0].render(), 'Something')
page2 = Page.objects.get(pk=2)
if hasattr(self, 'assertNumQueries'):
self.assertNumQueries(1, (lambda : [page2.content.main, page2.content.sidebar]))
self.assertNotEqual(page2._ct_inventory, {})
def test_17_page_template_tags(self):
self.create_default_page_set()
page1 = Page.objects.get(pk=1)
page2 = Page.objects.get(pk=2)
page2.language = 'de'
page2.translation_of = page1
page2.active = True
page2.in_navigation = True
page2.save()
page3 = Page.objects.create(parent=page2, title='page3', slug='page3', language='en', active=True, in_navigation=True, publication_date=datetime(2001, 1, 1))
page1 = Page.objects.get(pk=1)
page2 = Page.objects.get(pk=2)
context = template.Context({'feincms_page': page2, 'page3': page3})
t = template.Template('{% load feincms_page_tags %}{% feincms_parentlink of feincms_page level=1 %}')
self.assertEqual(t.render(context), '/test-page/')
t = template.Template('{% load feincms_page_tags %}{% feincms_languagelinks for feincms_page as links %}{% for key, name, link in links %}{{ key }}:{{ link }}{% if not forloop.last %},{% endif %}{% endfor %}')
self.assertEqual(t.render(context), 'en:/test-page/,de:/test-page/test-child-page/')
t = template.Template('{% load feincms_page_tags %}{% feincms_languagelinks for page3 as links %}{% for key, name, link in links %}{{ key }}:{{ link }}{% if not forloop.last %},{% endif %}{% endfor %}')
self.assertEqual(t.render(context), 'en:/test-page/test-child-page/page3/,de:None')
t = template.Template('{% load feincms_page_tags %}{% feincms_languagelinks for page3 as links existing %}{% for key, name, link in links %}{{ key }}:{{ link }}{% if not forloop.last %},{% endif %}{% endfor %}')
self.assertEqual(t.render(context), 'en:/test-page/test-child-page/page3/')
t = template.Template('{% load feincms_page_tags %}{% feincms_languagelinks for feincms_page as links excludecurrent=1 %}{% for key, name, link in links %}{{ key }}:{{ link }}{% if not forloop.last %},{% endif %}{% endfor %}')
self.assertEqual(t.render(context), 'en:/test-page/')
t = template.Template('{% load feincms_page_tags %}{% feincms_nav feincms_page level=1 as nav %}{% for p in nav %}{{ p.get_absolute_url }}{% if not forloop.last %},{% endif %}{% endfor %}')
self.assertEqual(t.render(context), '')
page1.active = True
page1.in_navigation = True
page1.save()
self.assertEqual(t.render(context), '/test-page/')
t = template.Template('{% load feincms_page_tags %}{% feincms_nav feincms_page level=2 as nav %}{% for p in nav %}{{ p.get_absolute_url }}{% if not forloop.last %},{% endif %}{% endfor %}')
self.assertEqual(t.render(context), '/test-page/test-child-page/')
t = template.Template('{% load feincms_page_tags %}{% feincms_nav request level=2 as nav %}{% for p in nav %}{{ p.get_absolute_url }}{% if not forloop.last %},{% endif %}{% endfor %}')
from django.http import HttpRequest
request = HttpRequest()
request.path = '/test-page/'
self.assertEqual(t.render(template.Context({'request': request})), '/test-page/test-child-page/')
t = template.Template('{% load feincms_page_tags %}{% feincms_nav feincms_page level=99 as nav %}{% for p in nav %}{{ p.get_absolute_url }}{% if not forloop.last %},{% endif %}{% endfor %}')
self.assertEqual(t.render(context), '')
t = template.Template('{% load feincms_page_tags %}{% feincms_breadcrumbs feincms_page %}')
rendered = t.render(context)
self.assertTrue(('Test child page' in rendered))
self.assertTrue(('href="/test-page/">Test page</a>' in rendered), msg='The parent page should be a breadcrumb link')
self.assertTrue(('href="/test-page/test-child-page/"' not in rendered), msg='The current page should not be a link in the breadcrumbs')
t = template.Template('{% load feincms_page_tags %}{% feincms_nav feincms_page level=2 depth=2 as nav %}{% for p in nav %}{{ p.get_absolute_url }}{% if not forloop.last %},{% endif %}{% endfor %}')
self.assertEqual(t.render(context), '/test-page/test-child-page/,/test-page/test-child-page/page3/')
t = template.Template('{% load feincms_page_tags %}{% feincms_nav feincms_page level=1 depth=2 as nav %}{% for p in nav %}{{ p.get_absolute_url }}{% if not forloop.last %},{% endif %}{% endfor %}')
self.assertEqual(t.render(context), '/test-page/,/test-page/test-child-page/')
t = template.Template('{% load feincms_page_tags %}{% feincms_nav feincms_page level=1 depth=3 as nav %}{% for p in nav %}{{ p.get_absolute_url }}{% if not forloop.last %},{% endif %}{% endfor %}')
self.assertEqual(t.render(context), '/test-page/,/test-page/test-child-page/,/test-page/test-child-page/page3/')
t = template.Template('{% load feincms_page_tags %}{% feincms_nav feincms_page level=3 depth=2 as nav %}{% for p in nav %}{{ p.get_absolute_url }}{% if not forloop.last %},{% endif %}{% endfor %}')
self.assertEqual(t.render(context), '/test-page/test-child-page/page3/')
t = template.Template('{% load feincms_page_tags %}{% if feincms_page|is_parent_of:page3 %}yes{% endif %}|{% if page3|is_parent_of:feincms_page %}yes{% endif %}')
self.assertEqual(t.render(context), 'yes|')
t = template.Template('{% load feincms_page_tags %}{% if feincms_page|is_equal_or_parent_of:page3 %}yes{% endif %}|{% if page3|is_equal_or_parent_of:feincms_page %}yes{% endif %}')
self.assertEqual(t.render(context), 'yes|')
t = template.Template('{% load feincms_page_tags %}{% feincms_translatedpage for feincms_page as t1 language=de %}{% feincms_translatedpage for feincms_page as t2 %}{{ t1.id }}|{{ t2.id }}')
self.assertEqual(t.render(context), '2|1')
def test_17_feincms_nav(self):
self.login()
self.create_page_through_admin('Page 1')
self.create_page_through_admin('Page 1.1', 1)
self.create_page_through_admin('Page 1.2', 1)
self.create_page_through_admin('Page 1.2.1', 3)
self.create_page_through_admin('Page 1.2.2', 3)
self.create_page_through_admin('Page 1.2.3', 3)
self.create_page_through_admin('Page 1.3', 1)
self.create_page_through_admin('Page 2')
self.create_page_through_admin('Page 2.1', 8)
self.create_page_through_admin('Page 2.2', 8)
self.create_page_through_admin('Page 2.3', 8)
self.create_page_through_admin('Page 3')
self.create_page_through_admin('Page 3.1', 12)
self.create_page_through_admin('Page 3.2', 12)
self.create_page_through_admin('Page 3.3', 12)
self.create_page_through_admin('Page 3.3.1', 15)
self.create_page_through_admin('Page 3.3.1.1', 16)
self.create_page_through_admin('Page 3.3.2', 15)
self.create_page_through_admin('Page 4')
self.create_page_through_admin('Page 4.1', 19)
self.create_page_through_admin('Page 4.2', 19)
Page.objects.all().update(active=True, in_navigation=True)
Page.objects.filter(id__in=(5, 9, 19)).update(in_navigation=False)
tests = [({'feincms_page': Page.objects.get(pk=1)}, '{% load feincms_page_tags %}{% feincms_nav feincms_page level=1 depth=2 as nav %}{% for p in nav %}{{ p.get_absolute_url }}{% if not forloop.last %},{% endif %}{% endfor %}', '/page-1/,/page-1/page-11/,/page-1/page-12/,/page-1/page-13/,/page-2/,/page-2/page-22/,/page-2/page-23/,/page-3/,/page-3/page-31/,/page-3/page-32/,/page-3/page-33/'), ({'feincms_page': Page.objects.get(pk=14)}, '{% load feincms_page_tags %}{% feincms_nav feincms_page level=2 depth=2 as nav %}{% for p in nav %}{{ p.get_absolute_url }}{% if not forloop.last %},{% endif %}{% endfor %}', '/page-3/page-31/,/page-3/page-32/,/page-3/page-33/,/page-3/page-33/page-331/,/page-3/page-33/page-332/'), ({'feincms_page': Page.objects.get(pk=14)}, '{% load feincms_page_tags %}{% feincms_nav feincms_page level=2 depth=3 as nav %}{% for p in nav %}{{ p.get_absolute_url }}{% if not forloop.last %},{% endif %}{% endfor %}', '/page-3/page-31/,/page-3/page-32/,/page-3/page-33/,/page-3/page-33/page-331/,/page-3/page-33/page-331/page-3311/,/page-3/page-33/page-332/'), ({'feincms_page': Page.objects.get(pk=19)}, '{% load feincms_page_tags %}{% feincms_nav feincms_page level=1 depth=2 as nav %}{% for p in nav %}{{ p.get_absolute_url }}{% if not forloop.last %},{% endif %}{% endfor %}', '/page-1/,/page-1/page-11/,/page-1/page-12/,/page-1/page-13/,/page-2/,/page-2/page-22/,/page-2/page-23/,/page-3/,/page-3/page-31/,/page-3/page-32/,/page-3/page-33/'), ({'feincms_page': Page.objects.get(pk=1)}, '{% load feincms_page_tags %}{% feincms_nav feincms_page level=3 depth=1 as nav %}{% for p in nav %}{{ p.get_absolute_url }}{% if not forloop.last %},{% endif %}{% endfor %}', '')]
for (c, t, r) in tests:
self.assertEqual(template.Template(t).render(template.Context(c)), r)
page = Page.objects.get(pk=8)
page.navigation_extension = 'testapp.navigation_extensions.PassthroughExtension'
page.save()
for (c, t, r) in tests:
self.assertEqual(template.Template(t).render(template.Context(c)), r)
p = Page.objects.get(pk=15)
tmpl = '{% load feincms_page_tags %}\n{% feincms_nav feincms_page level=1 depth=3 as nav %}\n{% for p in nav %}{{ p.pk }}{% if not forloop.last %},{% endif %}{% endfor %}\n'
data = (template.Template(tmpl).render(template.Context({'feincms_page': p})).strip(),)
self.assertEqual(data, ('1,2,3,4,6,7,8,10,11,12,13,14,15,16,18',), 'Original navigation')
p.active = False
p.save()
data = (template.Template(tmpl).render(template.Context({'feincms_page': p})).strip(),)
self.assertEqual(data, ('1,2,3,4,6,7,8,10,11,12,13,14',), 'Navigation after disabling intermediate page')
tmpl = '{% load feincms_page_tags %}\n{% feincms_nav feincms_page level=1 depth=3 as nav %}\n{% for p in nav %}{{ p.pk }}{% if not forloop.last %},{% endif %}{% endfor %}\n'
data = (template.Template(tmpl).render(template.Context({'feincms_page': p})).strip(),)
self.assertEqual(data, ('1,2,3,4,6,7,8,10,11,12,13,14',), 'Navigation after disabling intermediate page')
p.active = True
p.save()
data = (template.Template(tmpl).render(template.Context({'feincms_page': p})).strip(),)
self.assertEqual(data, ('1,2,3,4,6,7,8,10,11,12,13,14,15,16,18',), 'Original navigation')
def test_18_default_render_method(self):
class Something(models.Model):
class Meta():
abstract = True
def render_main(self):
return 'Hello'
tmp = Page._feincms_content_types[:]
type = Page.create_content_type(Something, regions=('notexists',))
Page._feincms_content_types = tmp
s = type(region='main', ordering='1')
self.assertEqual(s.render(), 'Hello')
def test_19_page_manager(self):
self.create_default_page_set()
page = Page.objects.get(pk=2)
page.active = True
page.save()
self.assertRaises(Page.DoesNotExist, (lambda : Page.objects.page_for_path(page.get_absolute_url())))
self.assertRaises(Page.DoesNotExist, (lambda : Page.objects.best_match_for_path((page.get_absolute_url() + 'something/hello/'))))
self.assertRaises(Http404, (lambda : Page.objects.best_match_for_path('/blabla/blabla/', raise404=True)))
self.assertRaises(Http404, (lambda : Page.objects.page_for_path('/asdf/', raise404=True)))
self.assertRaises(Page.DoesNotExist, (lambda : Page.objects.best_match_for_path('/blabla/blabla/')))
self.assertRaises(Page.DoesNotExist, (lambda : Page.objects.page_for_path('/asdf/')))
request = Empty()
request.path = request.path_info = page.get_absolute_url()
request.method = 'GET'
request.get_full_path = (lambda : '/xyz/')
request.GET = {}
request.META = {}
request.user = AnonymousUser()
from django.utils import translation
translation.activate(page.language)
page.active = False
page.save()
self.assertRaises(Http404, (lambda : Page.objects.for_request(request, raise404=True)))
page.active = True
page.save()
self.assertRaises(Http404, (lambda : Page.objects.for_request(request, raise404=True)))
page.parent.active = True
page.parent.save()
self.assertEqual(page, Page.objects.for_request(request))
self.assertEqual(page, Page.objects.page_for_path(page.get_absolute_url()))
self.assertEqual(page, Page.objects.best_match_for_path((page.get_absolute_url() + 'something/hello/')))
old = feincms_settings.FEINCMS_ALLOW_EXTRA_PATH
request.path += 'hello/'
feincms_settings.FEINCMS_ALLOW_EXTRA_PATH = False
self.assertEqual(self.client.get(request.path).status_code, 404)
feincms_settings.FEINCMS_ALLOW_EXTRA_PATH = True
self.assertEqual(self.client.get(request.path).status_code, 200)
self.assertEqual(page, Page.objects.for_request(request, best_match=True))
feincms_settings.FEINCMS_ALLOW_EXTRA_PATH = old
page_id = id(request._feincms_page)
p = Page.objects.for_request(request)
self.assertEqual(id(p), page_id)
def test_20_redirects(self):
self.create_default_page_set()
page1 = Page.objects.get(pk=1)
page2 = Page.objects.get(pk=2)
page2.active = True
page2.publication_date = (timezone.now() - timedelta(days=1))
page2.override_url = '/blablabla/'
page2.redirect_to = page1.get_absolute_url()
page2.save()
page1.active = True
page1.save()
page2 = Page.objects.get(pk=2)
try:
self.assertRedirects(self.client.get('/blablabla/'), page1.get_absolute_url())
except TemplateDoesNotExist as e:
if (e.args != ('feincms_base.html',)):
raise
def test_21_copy_content(self):
self.create_default_page_set()
page = Page.objects.get(pk=1)
self.login()
self.create_page_through_admincontent(page)
page2 = Page.objects.get(pk=2)
page2.copy_content_from(page)
self.assertEqual(len(page2.content.main), 1)
def test_23_navigation_extension(self):
self.create_default_page_set()
page = Page.objects.get(pk=1)
self.assertEqual(len(page.extended_navigation()), 0)
page.navigation_extension = 'testapp.navigation_extensions.PassthroughExtension'
page2 = Page.objects.get(pk=2)
page2.active = True
page2.in_navigation = True
page2.save()
self.assertEqual(list(page.extended_navigation()), [page2])
page.navigation_extension = 'testapp.navigation_extensions.ThisExtensionDoesNotExist'
self.assertEqual(len(page.extended_navigation()), 1)
page.navigation_extension = 'testapp.navigation_extensions.PretenderExtension'
self.assertEqual(page.extended_navigation()[0].get_absolute_url(), '/asdsa/')
def test_24_admin_redirects(self):
self.create_default_page_set()
self.login()
page = Page.objects.get(pk=1)
response = self.create_page_through_admincontent(page, _continue=1)
self.assertRedirects(response, reverse('admin:page_page_change', args=(1,)))
response = self.create_page_through_admincontent(page, _addanother=1)
self.assertRedirects(response, '/admin/page/page/add/')
response = self.create_page_through_admincontent(page)
self.assertRedirects(response, '/admin/page/page/')
def test_25_applicationcontent(self):
self.create_default_page_set()
page1 = Page.objects.get(pk=1)
page1.active = True
page1.save()
page = Page.objects.get(pk=2)
page.active = True
page.template_key = 'theother'
page.save()
self.is_published((page1.get_absolute_url() + 'anything/'), False)
page.applicationcontent_set.create(region='main', ordering=0, urlconf_path='testapp.applicationcontent_urls')
self.assertContains(self.client.get(page.get_absolute_url()), 'module_root')
self.assertContains(self.client.get((page.get_absolute_url() + 'args_test/abc/def/')), 'abc-def')
self.assertContains(self.client.get((page.get_absolute_url() + 'kwargs_test/abc/def/')), 'def-abc')
response = self.client.get((page.get_absolute_url() + 'full_reverse_test/'))
self.assertContains(response, 'home:/test-page/test-child-page/')
self.assertContains(response, 'args:/test-page/test-child-page/args_test/xy/zzy/')
self.assertContains(response, 'base:/test/')
self.assertContains(response, 'homeas:/test-page/test-child-page/')
self.assertEqual(app_reverse('ac_module_root', 'testapp.applicationcontent_urls'), '/test-page/test-child-page/')
if hasattr(self, 'assertNumQueries'):
self.assertNumQueries(0, (lambda : app_reverse('ac_module_root', 'testapp.applicationcontent_urls')))
self.assertEqual(self.client.get((page.get_absolute_url() + 'notexists/')).status_code, 404)
self.assertContains(self.client.get((page.get_absolute_url() + 'fragment/')), '<span id="something">some things</span>')
self.assertRedirects(self.client.get((page.get_absolute_url() + 'redirect/')), (' + page.get_absolute_url()))
self.assertEqual(app_reverse('ac_module_root', 'testapp.applicationcontent_urls'), page.get_absolute_url())
response = self.client.get((page.get_absolute_url() + 'response/'))
self.assertContains(response, 'Anything')
self.assertContains(response, '<h2>Main content</h2>')
self.assertEqual(self.client.get((page.get_absolute_url() + 'response/'), HTTP_X_REQUESTED_WITH='XMLHttpRequest').content, self.client.get((page.get_absolute_url() + 'response_decorated/')).content)
page1.applicationcontent_set.create(region='main', ordering=0, urlconf_path='whatever')
response = self.client.get((page.get_absolute_url() + 'alias_reverse_test/'))
self.assertContains(response, 'home:/test-page/')
self.assertContains(response, 'args:/test-page/args_test/xy/zzy/')
self.assertContains(response, 'base:/test/')
self.assertEqual(app_reverse('ac_module_root', 'testapp.applicationcontent_urls'), '/test-page/test-child-page/')
self.assertEqual(app_reverse('ac_module_root', 'whatever'), '/test-page/')
page.applicationcontent_set.get(urlconf_path='testapp.applicationcontent_urls').delete()
self.assertEqual(app_reverse('ac_module_root', 'whatever'), '/test-page/')
self.login()
response = self.client.get(reverse('admin:page_page_change', args=(page1.id,)))
self.assertContains(response, 'exclusive_subpages')
self.assertContains(response, 'custom_field')
app_ct = page1.applicationcontent_set.all()[0]
app_ct.parameters = '{"custom_field":"val42", "exclusive_subpages": false}'
app_ct.save()
response = self.client.get(reverse('admin:page_page_change', args=(page1.id,)))
self.assertContains(response, 'val42')
def test_26_page_form_initial(self):
self.create_default_page_set()
self.login()
self.assertEqual(self.client.get('/admin/page/page/add/?translation_of=1&lang=de').status_code, 200)
self.assertEqual(self.client.get('/admin/page/page/add/?parent=1').status_code, 200)
self.assertEqual(self.client.get('/admin/page/page/add/?parent=2').status_code, 200)
def test_27_cached_url_clash(self):
self.create_default_page_set()
page1 = Page.objects.get(pk=1)
page2 = Page.objects.get(pk=2)
page1.override_url = '/'
page1.active = True
page1.save()
self.login()
self.assertContains(self.create_page_through_admincontent(page2, active=True, override_url='/'), 'already taken by')
def test_28_applicationcontent_reverse(self):
self.create_default_page_set()
page1 = Page.objects.get(pk=1)
page1.active = True
page1.save()
page = Page.objects.get(pk=2)
page.active = True
page.template_key = 'theother'
page.save()
page.applicationcontent_set.create(region='main', ordering=0, urlconf_path='testapp.applicationcontent_urls')
self.assertEqual(app_reverse('ac_module_root', 'testapp.applicationcontent_urls'), page.get_absolute_url())
self.login()
self.create_page_through_admin(title='Home DE', language='de', active=True)
page_de = Page.objects.get(title='Home DE')
self.create_page_through_admin(title='Child 1 DE', language='de', parent=page_de.id, active=True)
page_de_1 = Page.objects.get(title='Child 1 DE')
page_de_1.applicationcontent_set.create(region='main', ordering=0, urlconf_path='testapp.applicationcontent_urls')
page.active = False
page.save()
settings.TEMPLATE_DIRS = (os.path.join(os.path.dirname(__file__), 'templates'),)
self.client.get(page_de_1.get_absolute_url())
self.assertEqual(app_reverse('ac_module_root', 'testapp.applicationcontent_urls'), page_de_1.get_absolute_url())
page.active = True
page.save()
self.client.get(page1.get_absolute_url())
self.assertEqual(app_reverse('ac_module_root', 'testapp.applicationcontent_urls'), page.get_absolute_url())
def test_29_medialibrary_admin(self):
self.create_default_page_set()
self.login()
page = Page.objects.get(pk=1)
mediafile = MediaFile.objects.create(file='somefile.jpg')
page.mediafilecontent_set.create(mediafile=mediafile, region='main', type='default', ordering=1)
self.assertContains(self.client.get('/admin/medialibrary/mediafile/'), 'somefile.jpg')
import zipfile
zf = zipfile.ZipFile('test.zip', 'w')
for i in range(10):
zf.writestr(('test%d.txt' % i), ('test%d' % i))
zf.close()
with open('test.zip', 'rb') as handle:
response = self.client.post('/admin/medialibrary/mediafile/mediafile-bulk-upload/', {'data': handle})
self.assertRedirects(response, '/admin/medialibrary/mediafile/')
self.assertEqual(MediaFile.objects.count(), 11, 'Upload of media files with ZIP does not work')
dn = os.path.dirname
path = os.path.join(dn(dn(dn(dn(__file__)))), 'docs', 'images', 'tree_editor.png')
with open(path, 'rb') as handle:
response = self.client.post('/admin/medialibrary/mediafile/add/', {'file': handle, 'translations-TOTAL_FORMS': 0, 'translations-INITIAL_FORMS': 0, 'translations-MAX_NUM_FORMS': 10})
self.assertRedirects(response, '/admin/medialibrary/mediafile/')
self.assertContains(self.client.get('/admin/medialibrary/mediafile/'), '100x100')
stats = list(MediaFile.objects.values_list('type', flat=True))
self.assertEqual(len(stats), 12)
self.assertEqual(stats.count('image'), 2)
self.assertEqual(stats.count('txt'), 10)
def test_30_context_processors(self):
self.create_default_page_set()
Page.objects.update(active=True, in_navigation=True)
request = Empty()
request.GET = {}
request.META = {}
request.method = 'GET'
request.path = request.path_info = '/test-page/test-child-page/abcdef/'
request.get_full_path = (lambda : '/test-page/test-child-page/abcdef/')
ctx = add_page_if_missing(request)
self.assertEqual(ctx['feincms_page'], request._feincms_page)
def test_31_sites_framework_associating_with_single_site(self):
self.login()
site_2 = Site.objects.create(name='site 2', domain='2.example.com')
self.create_page_through_admin('site 1 homepage', override_url='/', active=True)
self.create_page_through_admin('site 2 homepage', override_url='/', site=site_2.id, active=True)
self.assertEqual(Page.objects.count(), 2)
self.assertEqual(Page.objects.active().count(), 1)
def test_32_applicationcontent_inheritance20(self):
self.create_default_page_set()
page1 = Page.objects.get(pk=1)
page1.active = True
page1.save()
page = Page.objects.get(pk=2)
page.active = True
page.template_key = 'theother'
page.save()
self.is_published((page1.get_absolute_url() + 'anything/'), False)
page.applicationcontent_set.create(region='main', ordering=0, urlconf_path='testapp.applicationcontent_urls')
page.rawcontent_set.create(region='main', ordering=1, text='some_main_region_text')
page.rawcontent_set.create(region='sidebar', ordering=0, text='some_sidebar_region_text')
self.assertContains(self.client.get(page.get_absolute_url()), 'module_root')
response = self.client.get((page.get_absolute_url() + 'inheritance20/'))
self.assertContains(response, 'a content 42')
self.assertContains(response, 'b content')
self.assertNotContains(response, 'some_main_region_text')
self.assertContains(response, 'some_sidebar_region_text')
self.assertNotContains(response, 'some content outside')
response = self.client.get((page.get_absolute_url() + 'inheritance20_unpack/'))
self.assertContains(response, 'a content 43')
self.assertIn('yabba dabba', response['cache-control'])
def test_33_preview(self):
self.create_default_page_set()
page = Page.objects.get(pk=1)
page.template_key = 'theother'
page.save()
page.rawcontent_set.create(region='main', ordering=0, text='Example content')
self.login()
self.assertEqual(self.client.get(page.get_absolute_url()).status_code, 404)
self.assertContains(self.client.get(f'{page.get_absolute_url()}_preview/{page.pk}/'), 'Example content')
def test_34_access(self):
self.create_default_page_set()
page = Page.objects.get(pk=1)
page.override_url = '/something/'
page.save()
Page.objects.update(active=True)
self.login()
self.create_page_through_admin(title='redirect page', override_url='/', redirect_to=page.get_absolute_url(), active=True)
r = self.client.get('/')
self.assertRedirects(r, page.get_absolute_url())
r = self.client.get(page.override_url)
self.assertEqual(r.status_code, 200)
r = self.client.get('/foo/')
self.assertEqual(r.status_code, 404)
def test_35_access_with_extra_path(self):
self.login()
self.create_page(title='redirect again', override_url='/', redirect_to='/somewhere/', active=True)
self.create_page(title='somewhere', active=True)
r = self.client.get('/')
self.assertRedirects(r, '/somewhere/')
r = self.client.get('/dingdong/')
self.assertEqual(r.status_code, 404)
old = feincms_settings.FEINCMS_ALLOW_EXTRA_PATH
feincms_settings.FEINCMS_ALLOW_EXTRA_PATH = True
r = self.client.get('/')
self.assertRedirects(r, '/somewhere/')
r = self.client.get('/dingdong/')
self.assertEqual(r.status_code, 404)
feincms_settings.FEINCMS_ALLOW_EXTRA_PATH = old
def test_36_sitemaps(self):
response = self.client.get('/sitemap.xml')
self.assertContains(response, '<urlset', status_code=200)
self.login()
response = self.create_page()
response = self.client.get('/sitemap.xml')
self.assertNotContains(response, '<url>', status_code=200)
page = Page.objects.get()
page.active = True
page.in_navigation = True
page.save()
response = self.client.get('/sitemap.xml')
self.assertContains(response, '<url>', status_code=200)
def test_37_invalid_parent(self):
self.create_default_page_set()
(page1, page2) = list(Page.objects.order_by('id'))
page1.parent = page1
self.assertRaises(InvalidMove, page1.save)
self.create_page('Page 3', parent=page2)
(page1, page2, page3) = list(Page.objects.order_by('id'))
page1.parent = page3
self.assertRaises(InvalidMove, page1.save)
def test_38_invalid_template(self):
page = Page()
page.template_key = 'test'
self.assertEqual(page.template.key, 'base')
def test_39_navigationgroups(self):
self.create_default_page_set()
(page1, page2) = list(Page.objects.order_by('id'))
page1.active = True
page1.in_navigation = True
page1.save()
page2.active = True
page2.in_navigation = True
page2.navigation_group = 'footer'
page2.save()
t = template.Template("\n{% load feincms_page_tags %}\n{% feincms_nav feincms_page level=1 depth=10 group='default' as nav %}\n{% for p in nav %}{{ p.get_absolute_url }},{% endfor %}\n ")
str = t.render(template.Context({'feincms_page': page1}))
self.assertEqual(str.strip(), '/test-page/,')
t = template.Template("\n{% load feincms_page_tags %}\n{% feincms_nav feincms_page level=1 depth=10 group='footer' as nav %}\n{% for p in nav %}{{ p.get_absolute_url }},{% endfor %}\n ")
str = t.render(template.Context({'feincms_page': page1}))
self.assertEqual(str.strip(), '/test-page/test-child-page/,')
def test_40_page_is_active(self):
self.create_default_page_set()
(page1, page2) = list(Page.objects.order_by('id'))
self.assertTrue(feincms_page_tags.page_is_active({'feincms_page': page1}, page1))
self.assertTrue(feincms_page_tags.page_is_active({'feincms_page': page2}, page1))
self.assertFalse(feincms_page_tags.page_is_active({'feincms_page': page1}, page2))
p = PagePretender(title='bla', slug='bla', url='/test-page/whatsup/')
self.assertTrue(feincms_page_tags.page_is_active({}, p, path='/test-page/whatsup/test/'))
self.assertFalse(feincms_page_tags.page_is_active({}, p, path='/test-page/'))
self.assertTrue(feincms_page_tags.page_is_active({'feincms_page': page1}, p, path='/test-page/whatsup/test/'))
self.assertFalse(feincms_page_tags.page_is_active({'feincms_page': page2}, p, path='/test-page/'))
def test_41_templatecontent(self):
page = self.create_page(active=True)
page.templatecontent_set.create(region='main', ordering=10, template='templatecontent_1.html')
self.login()
self.assertContains(self.client.get(reverse('admin:page_page_change', args=(page.id,))), '<option value="templatecontent_1.html">template 1</option>')
response = self.client.get(page.get_absolute_url())
self.assertContains(response, 'TemplateContent_1')
self.assertContains(response, '#42#') |
class OptionPlotoptionsVariwideDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get(None)
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('rgba(0, 0, 0, 0.6)')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def pathFormatter(self):
return self._config_get(None)
def pathFormatter(self, value: Any):
self._config(value, js_type=False)
def zIndex(self):
return self._config_get(901)
def zIndex(self, num: float):
self._config(num, js_type=False) |
.skipif(('pandas' not in sys.modules), reason='Pandas is not installed.')
def calculate_cache_key_multiple_times(x, n=1000):
import pandas as pd
series = pd.Series([_calculate_cache_key(task_name='task_name', cache_version='cache_version', input_literal_map=LiteralMap(literals={'d': TypeEngine.to_literal(ctx=FlyteContextManager.current_context(), expected=TypeEngine.to_literal_type(Dict), python_type=Dict, python_val=x)})) for _ in range(n)]).value_counts()
return series |
class LocalIOSchemaWriter(SchemaWriter[T]):
def __init__(self, to_local_path: str, cols: typing.Optional[typing.Dict[(str, type)]], fmt: SchemaFormat):
super().__init__(to_local_path, cols, fmt)
def _write(self, df: T, path: os.PathLike, **kwargs):
pass
def write(self, *dfs, **kwargs):
for df in dfs:
self._write(df, next(self._file_name_gen), **kwargs) |
def output_orthologs_row(out, annotation, ncbi):
(query_name, best_hit_name, best_hit_evalue, best_hit_score, annotations, (og_name, og_cat, og_desc), max_annot_lvl, match_nog_names, all_orthologies, annot_orthologs) = annotation
best_hit_name_id = best_hit_name.split('.')[1]
all_orthologies['annot_orthologs'] = annot_orthologs
seed_shown = False
for target in all_orthologies:
if (target == 'all'):
continue
if (target == 'annot_orthologs'):
continue
query_target_orths = all_orthologies[target]
if ((query_target_orths is None) or (len(query_target_orths) == 0)):
continue
orthologs_taxids = set([int(x.split('.')[0]) for x in query_target_orths])
orthologs_taxnames = sorted(ncbi.get_taxid_translator(orthologs_taxids).items(), key=(lambda x: x[1]))
for (taxid, taxname) in orthologs_taxnames:
orth_names = []
for orth in [x for x in query_target_orths if (int(x.split('.')[0]) == taxid)]:
orth_name = orth.split('.')[1]
if (orth in annot_orthologs):
orth_name = f'*{orth_name}'
if (orth_name in {best_hit_name_id, f'*{best_hit_name_id}'}):
if (seed_shown == False):
row = [query_name, 'seed', f'{taxname}({taxid})', orth_name]
print('\t'.join(row), file=out)
seed_shown = True
else:
orth_names.append(orth_name)
if (len(orth_names) > 0):
row = [query_name, target, f'{taxname}({taxid})', ','.join(sorted(orth_names))]
print('\t'.join(row), file=out)
return |
class SubgridError(proteus.SubgridError.SGE_base):
def __init__(self, coefficients, nd):
proteus.SubgridError.SGE_base.__init__(self, coefficients, nd, lag=False)
def initializeElementQuadrature(self, mesh, t, cq):
pass
def updateSubgridErrorHistory(self, initializationPhase=False):
pass
def calculateSubgridError(self, q):
pass |
def get_auth_service_mock() -> MagicMock:
auth_stub_mock = MagicMock()
auth_stub_mock.GetPublicClientConfig.return_value = PublicClientAuthConfigResponse(client_id=CLIENT_ID, redirect_uri=REDIRECT_URI, scopes=['offline', 'all'], authorization_metadata_key='flyte-authorization')
auth_stub_mock.GetOAuth2Metadata.return_value = OAuth2MetadataResponse(issuer=' authorization_endpoint=OAUTH_AUTHORIZE, token_endpoint=TOKEN_ENDPOINT, response_types_supported=['code', 'token', 'code token'], scopes_supported=['all'], token_endpoint_auth_methods_supported=['client_secret_basic'], jwks_uri=' code_challenge_methods_supported=['S256'], grant_types_supported=['client_credentials', 'refresh_token', 'authorization_code'])
return auth_stub_mock |
('/mark/<fid>/<mark>')
def mark(fid, mark):
data = parse.urlencode(encode_obj({'fid': fid, 'is_mark': mark}))
try:
data = xl.urlopen(' data=data)
data = data.replace('\n', '').replace('\r', '')
data = json.loads(data[data.index('{'):])
if data['state']:
xbmc.executebuiltin('Container.Refresh()')
else:
notify(msg=(',:' + comm.ensure_text(data['error'])))
return
except:
notify(msg='')
return |
def test_vaex_workflow_subset():
def generate() -> subset_schema:
return StructuredDataset(dataframe=vaex_df)
def consume(df: subset_schema) -> subset_schema:
subset_df = df.open(vaex.dataframe.DataFrameLocal).all()
assert (subset_df.column_names == ['y'])
coly = subset_df.y.values.tolist()
assert (coly[0] == 'a')
assert (coly[1] == 'b')
assert (coly[2] == 'c')
return StructuredDataset(dataframe=subset_df)
def wf() -> subset_schema:
return consume(df=generate())
result = wf()
assert (result is not None) |
class OptionSeriesSolidgauge(Options):
def accessibility(self) -> 'OptionSeriesSolidgaugeAccessibility':
return self._config_sub_data('accessibility', OptionSeriesSolidgaugeAccessibility)
def allowPointSelect(self):
return self._config_get(False)
def allowPointSelect(self, flag: bool):
self._config(flag, js_type=False)
def animation(self):
return self._config_get(True)
def animation(self, flag: bool):
self._config(flag, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def clip(self):
return self._config_get(True)
def clip(self, flag: bool):
self._config(flag, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def colorAxis(self):
return self._config_get(0)
def colorAxis(self, num: float):
self._config(num, js_type=False)
def colorByPoint(self):
return self._config_get(True)
def colorByPoint(self, flag: bool):
self._config(flag, js_type=False)
def colorIndex(self):
return self._config_get(None)
def colorIndex(self, num: float):
self._config(num, js_type=False)
def colorKey(self):
return self._config_get('y')
def colorKey(self, text: str):
self._config(text, js_type=False)
def crisp(self):
return self._config_get(True)
def crisp(self, flag: bool):
self._config(flag, js_type=False)
def cursor(self):
return self._config_get(None)
def cursor(self, text: str):
self._config(text, js_type=False)
def custom(self):
return self._config_get(None)
def custom(self, value: Any):
self._config(value, js_type=False)
def data(self) -> 'OptionSeriesSolidgaugeData':
return self._config_sub_data('data', OptionSeriesSolidgaugeData)
def dataLabels(self) -> 'OptionSeriesSolidgaugeDatalabels':
return self._config_sub_data('dataLabels', OptionSeriesSolidgaugeDatalabels)
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def dragDrop(self) -> 'OptionSeriesSolidgaugeDragdrop':
return self._config_sub_data('dragDrop', OptionSeriesSolidgaugeDragdrop)
def enableMouseTracking(self):
return self._config_get(True)
def enableMouseTracking(self, flag: bool):
self._config(flag, js_type=False)
def events(self) -> 'OptionSeriesSolidgaugeEvents':
return self._config_sub_data('events', OptionSeriesSolidgaugeEvents)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def inactiveOtherPoints(self):
return self._config_get(False)
def inactiveOtherPoints(self, flag: bool):
self._config(flag, js_type=False)
def includeInDataExport(self):
return self._config_get(None)
def includeInDataExport(self, flag: bool):
self._config(flag, js_type=False)
def index(self):
return self._config_get(None)
def index(self, num: float):
self._config(num, js_type=False)
def innerRadius(self):
return self._config_get('"60%"')
def innerRadius(self, text: str):
self._config(text, js_type=False)
def keys(self):
return self._config_get(None)
def keys(self, value: Any):
self._config(value, js_type=False)
def label(self) -> 'OptionSeriesSolidgaugeLabel':
return self._config_sub_data('label', OptionSeriesSolidgaugeLabel)
def legendIndex(self):
return self._config_get(None)
def legendIndex(self, num: float):
self._config(num, js_type=False)
def legendSymbol(self):
return self._config_get('rectangle')
def legendSymbol(self, text: str):
self._config(text, js_type=False)
def linecap(self):
return self._config_get(round)
def linecap(self, value: Any):
self._config(value, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def linkedTo(self):
return self._config_get(None)
def linkedTo(self, text: str):
self._config(text, js_type=False)
def name(self):
return self._config_get(None)
def name(self, text: str):
self._config(text, js_type=False)
def onPoint(self) -> 'OptionSeriesSolidgaugeOnpoint':
return self._config_sub_data('onPoint', OptionSeriesSolidgaugeOnpoint)
def opacity(self):
return self._config_get(1)
def opacity(self, num: float):
self._config(num, js_type=False)
def overshoot(self):
return self._config_get(0)
def overshoot(self, num: float):
self._config(num, js_type=False)
def point(self) -> 'OptionSeriesSolidgaugePoint':
return self._config_sub_data('point', OptionSeriesSolidgaugePoint)
def pointDescriptionFormat(self):
return self._config_get(None)
def pointDescriptionFormat(self, value: Any):
self._config(value, js_type=False)
def pointDescriptionFormatter(self):
return self._config_get(None)
def pointDescriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def pointInterval(self):
return self._config_get(1)
def pointInterval(self, num: float):
self._config(num, js_type=False)
def pointIntervalUnit(self):
return self._config_get(None)
def pointIntervalUnit(self, value: Any):
self._config(value, js_type=False)
def pointStart(self):
return self._config_get(0)
def pointStart(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get('"100%"')
def radius(self, text: str):
self._config(text, js_type=False)
def relativeXValue(self):
return self._config_get(False)
def relativeXValue(self, flag: bool):
self._config(flag, js_type=False)
def rounded(self):
return self._config_get(False)
def rounded(self, flag: bool):
self._config(flag, js_type=False)
def selected(self):
return self._config_get(False)
def selected(self, flag: bool):
self._config(flag, js_type=False)
def showCheckbox(self):
return self._config_get(False)
def showCheckbox(self, flag: bool):
self._config(flag, js_type=False)
def showInLegend(self):
return self._config_get(False)
def showInLegend(self, flag: bool):
self._config(flag, js_type=False)
def skipKeyboardNavigation(self):
return self._config_get(None)
def skipKeyboardNavigation(self, flag: bool):
self._config(flag, js_type=False)
def sonification(self) -> 'OptionSeriesSolidgaugeSonification':
return self._config_sub_data('sonification', OptionSeriesSolidgaugeSonification)
def stickyTracking(self):
return self._config_get(True)
def stickyTracking(self, flag: bool):
self._config(flag, js_type=False)
def tooltip(self) -> 'OptionSeriesSolidgaugeTooltip':
return self._config_sub_data('tooltip', OptionSeriesSolidgaugeTooltip)
def type(self):
return self._config_get(None)
def type(self, text: str):
self._config(text, js_type=False)
def visible(self):
return self._config_get(True)
def visible(self, flag: bool):
self._config(flag, js_type=False)
def xAxis(self):
return self._config_get(0)
def xAxis(self, num: float):
self._config(num, js_type=False)
def yAxis(self):
return self._config_get(0)
def yAxis(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(None)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class OptionPlotoptionsVariwideSonificationContexttracksMapping(Options):
def frequency(self) -> 'OptionPlotoptionsVariwideSonificationContexttracksMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsVariwideSonificationContexttracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsVariwideSonificationContexttracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsVariwideSonificationContexttracksMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsVariwideSonificationContexttracksMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsVariwideSonificationContexttracksMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsVariwideSonificationContexttracksMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsVariwideSonificationContexttracksMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsVariwideSonificationContexttracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsVariwideSonificationContexttracksMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsVariwideSonificationContexttracksMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsVariwideSonificationContexttracksMappingPan)
def pitch(self) -> 'OptionPlotoptionsVariwideSonificationContexttracksMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsVariwideSonificationContexttracksMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsVariwideSonificationContexttracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsVariwideSonificationContexttracksMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsVariwideSonificationContexttracksMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsVariwideSonificationContexttracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsVariwideSonificationContexttracksMappingTime':
return self._config_sub_data('time', OptionPlotoptionsVariwideSonificationContexttracksMappingTime)
def tremolo(self) -> 'OptionPlotoptionsVariwideSonificationContexttracksMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsVariwideSonificationContexttracksMappingTremolo)
def volume(self) -> 'OptionPlotoptionsVariwideSonificationContexttracksMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsVariwideSonificationContexttracksMappingVolume) |
class InstanceNetworkInterface(object):
def __init__(self, **kwargs):
self.full_name = kwargs.get('full_name')
self.kind = kwargs.get('kind')
self.network = kwargs.get('network')
self.subnetwork = kwargs.get('subnetwork')
self.network_ip = kwargs.get('networkIP')
self.name = kwargs.get('name')
self.access_configs = kwargs.get('accessConfigs')
self.alias_ip_ranges = kwargs.get('aliasIpRanges')
self._json = json.dumps(kwargs, sort_keys=True, indent=2)
def __repr__(self):
return ('kind: %s Network: %s subnetwork: %s network_ip %s name %saccess_configs %s alias_ip_ranges %s' % (self.kind, self.network, self.subnetwork, self.network_ip, self.name, self.access_configs, self.alias_ip_ranges))
def __hash__(self):
return hash(self.__repr__())
def __ne__(self, other):
return (not self.__eq__(other))
def __eq__(self, other):
if isinstance(self, InstanceNetworkInterface):
return ((self.kind == other.kind) and (self.network == other.network) and (self.subnetwork == other.subnetwork) and (self.network_ip == other.network_ip) and (self.name == other.name) and (self.access_configs == other.access_configs) and (self.alias_ip_ranges == other.alias_ip_ranges))
return False
def as_json(self):
return self._json |
def test_adding_deprecated_env_from():
config = '\ndeployment:\n enabled: true\nenvFrom:\n- configMapRef:\n name: configmap-name\n'
r = helm_template(config)
assert (r['daemonset'][name]['spec']['template']['spec']['containers'][0]['envFrom'][0]['configMapRef'] == {'name': 'configmap-name'})
assert (r['deployment'][name]['spec']['template']['spec']['containers'][0]['envFrom'][0]['configMapRef'] == {'name': 'configmap-name'}) |
def which(program):
def is_exe(fpath):
return (os.path.isfile(fpath) and os.access(fpath, os.X_OK))
(fpath, _fname) = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ['PATH'].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None |
def find_origin_coordinate(sites):
if (len(sites) == 0):
return (0, 0)
def inner_():
for site in sites:
coordinate = SITE_COORDINATE_PATTERN.match(site['name'])
assert (coordinate is not None), site
x_coord = int(coordinate.group(2))
y_coord = int(coordinate.group(3))
(yield (x_coord, y_coord))
(x_coords, y_coords) = zip(*inner_())
min_x_coord = min(x_coords)
min_y_coord = min(y_coords)
return (min_x_coord, min_y_coord) |
class TestSkillExceptionPolicyConfigVariable(BaseConfigTestVariable):
OPTION_NAME = 'skill_exception_policy'
CONFIG_ATTR_NAME = 'skill_exception_policy'
GOOD_VALUES = ExceptionPolicyEnum
INCORRECT_VALUES = [None, 'sTrING?', (- 1)]
REQUIRED = False
AEA_ATTR_NAME = '_skills_exception_policy'
AEA_DEFAULT_VALUE = ExceptionPolicyEnum.propagate |
.parametrize('elasticapm_client', [{'environment': 'production'}], indirect=True)
def test_service_info(elasticapm_client):
service_info = elasticapm_client.get_service_info()
assert (service_info['name'] == elasticapm_client.config.service_name)
assert (service_info['environment'] == elasticapm_client.config.environment == 'production')
assert (service_info['language'] == {'name': 'python', 'version': platform.python_version()})
assert (service_info['agent']['name'] == 'python')
assert (service_info['agent']['activation_method'] == 'unknown') |
.parametrize('params', (['t', 'u'], ['u', 't']))
.parametrize('levels', ([500, 850], [850, 500]))
.parametrize('source_name', ['indexed-directory'])
def test_indexing_order_by_with_request(params, levels, source_name):
request = dict(level=levels, variable=params, time='1200')
(ds, _, total, n) = get_fixtures(source_name, request)
for i in ds:
print(i)
assert (len(ds) == 4), len(ds)
check_sel_and_order(ds, params, levels) |
class ClefAccount(_PrivateKeyAccount):
def __init__(self, address: str, provider: Union[(HTTPProvider, IPCProvider)]) -> None:
self._provider = provider
super().__init__(address)
def _transact(self, tx: Dict, allow_revert: bool) -> None:
if (allow_revert is None):
allow_revert = bool((CONFIG.network_type == 'development'))
if (not allow_revert):
self._check_for_revert(tx)
formatters = {'nonce': web3.toHex, 'value': web3.toHex, 'chainId': web3.toHex, 'data': web3.toHex, 'from': to_address}
if ('to' in tx):
formatters['to'] = to_address
tx['chainId'] = web3.chain_id
tx = apply_formatters_to_dict(formatters, tx)
response = self._provider.make_request('account_signTransaction', [tx])
if ('error' in response):
raise ValueError(response['error']['message'])
return web3.eth.send_raw_transaction(response['result']['raw']) |
class OptionSeriesVennSonificationTracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_dfsr_subtype0(tmpdir, merge_lis_prs):
fpath = os.path.join(str(tmpdir), 'dfsr-subtype0.lis')
content = ((headers + ['data/lis/records/curves/dfsr-subtype0.lis.part']) + trailers)
merge_lis_prs(fpath, content)
with lis.load(fpath) as (f,):
dfs = f.data_format_specs()[0]
ch1 = dfs.specs[0]
assert (ch1.mnemonic == 'CH01')
assert (ch1.service_id == 'testCH')
assert (ch1.service_order_nr == ' 1234 ')
assert (ch1.units == 'INCH')
assert (ch1.api_log_type == 2)
assert (ch1.api_curve_type == 179)
assert (ch1.api_curve_class == 96)
assert (ch1.api_modifier == 59)
assert (ch1.filenr == 1)
assert (ch1.reserved_size == 4)
assert (ch1.process_level == 255)
assert (ch1.samples == 1)
assert (ch1.reprc == 73)
curves = lis.curves(f, dfs)
assert (len(curves) == 0) |
def process_description(description_fce):
def inner(description):
clear_description = re.sub('\\s+', ' ', re.sub('\\w+:\\/{2}[\\d\\w-]+(\\.[\\d\\w-]+)*(?:(?:\\/[^\\s/]*))*', '', re.sub('(#|=|---|~|`)*', '', re.sub('((\r?\n)|^).{0,8}((\r?\n)|$)', '', re.sub('((\r*.. image::|:target:) '', description_fce(description))))))
return ' '.join(textwrap.wrap(clear_description, 80))
return inner |
def chunked(arg_name: str, arg_pos: int, chunk_size: int, process: Callable, reverse: str=None, reverse_pos: int=None, chain: str=None, chain_pos: int=None) -> Callable:
def decorator(function: Callable) -> Callable:
nonlocal arg_pos, reverse_pos, chain_pos
arg_pos -= 1
if (reverse_pos is not None):
reverse_pos -= 1
if (chain_pos is not None):
chain_pos -= 1
def replace(arg_val, chain_val, args, kwargs):
(args, kwargs) = _replace_arg(arg_pos, arg_name, arg_val, args, kwargs)
if (chain is not None):
(args, kwargs) = _replace_arg(chain_pos, chain, chain_val, args, kwargs)
return (args, kwargs)
async def async_wrapper(self, chunks, chain_val, args, kwargs):
responses = []
for chunk in chunks:
(args, kwargs) = replace(chunk, chain_val, args, kwargs)
chain_val = (await function(self, *args, **kwargs))
responses.append(chain_val)
return process(responses)
(function)
def wrapper(self, *args, **kwargs):
arg_val = _get_arg(arg_pos, arg_name, args, kwargs)
if ((not self.chunked_on) or (arg_val is None)):
return function(self, *args, **kwargs)
if (chain is not None):
chain_val = _get_arg(chain_pos, chain, args, kwargs)
else:
chain_val = None
if (reverse is not None):
reverse_val = _get_arg(reverse_pos, reverse, args, kwargs)
reverse_bool = (reverse_val is not None)
else:
reverse_bool = False
chunks = _chunks(arg_val, chunk_size, reverse_bool)
if self.is_async:
return async_wrapper(self, chunks, chain_val, args, kwargs)
responses = []
for chunk in chunks:
(args, kwargs) = replace(chunk, chain_val, args, kwargs)
chain_val = function(self, *args, **kwargs)
responses.append(chain_val)
return process(responses)
return wrapper
return decorator |
def smspecs(draw, sum_keys, start_date):
sum_keys = draw(sum_keys)
n = (len(sum_keys) + 1)
nx = draw(small_ints)
ny = draw(small_ints)
nz = draw(small_ints)
keywords = (['TIME '] + sum_keys)
units = (['DAYS '] + draw(st.lists(unit_names, min_size=(n - 1), max_size=(n - 1))))
well_names = ([':+:+:+:+'] + draw(st.lists(names, min_size=(n - 1), max_size=(n - 1))))
region_numbers = ([(- 32676)] + draw(st.lists(from_dtype(np.dtype(np.int32), min_value=0, max_value=10), min_size=len(sum_keys), max_size=len(sum_keys))))
return draw(st.builds(Smspec, nx=st.just(nx), ny=st.just(ny), nz=st.just(nz), restarted_from_step=st.just(0), num_keywords=st.just(n), restart=names, keywords=st.just(keywords), well_names=st.just(well_names), region_numbers=st.just(region_numbers), units=st.just(units), start_date=start_date)) |
class TestTrainingTeiParser():
def test_should_parse_labelled_tokens_as_tag_result(self):
tei_root = _get_training_tei_with_text([E('head', TOKEN_1, E('lb')), '\n', E('p', TOKEN_2, E('lb')), '\n'])
tag_result = SampleTrainingTeiParser().parse_training_tei_to_tag_result(tei_root)
assert (tag_result == [[(TOKEN_1, 'B-<head>'), (TOKEN_2, 'B-<paragraph>')]])
def test_should_parse_same_line_labelled_token_with_same_line_meta(self):
tei_root = _get_training_tei_with_text([E('head', TOKEN_1), ' ', E('p', TOKEN_2, E('lb')), '\n'])
labeled_layout_tokens_list = SampleTrainingTeiParser().parse_training_tei_to_labeled_layout_tokens_list(tei_root)
assert (len(labeled_layout_tokens_list) == 1)
labeled_layout_tokens = labeled_layout_tokens_list[0]
assert (len(labeled_layout_tokens) == 2)
assert isinstance(labeled_layout_tokens[0], LabeledLayoutToken)
assert (labeled_layout_tokens[0].layout_token.text == TOKEN_1)
assert (labeled_layout_tokens[1].layout_token.text == TOKEN_2)
assert (labeled_layout_tokens[0].layout_token.line_meta == labeled_layout_tokens[1].layout_token.line_meta)
def test_should_parse_multi_line_labelled_token_with_diff_line_meta(self):
tei_root = _get_training_tei_with_text([E('head', TOKEN_1, E('lb')), '\n', E('p', TOKEN_2, E('lb')), '\n'])
labeled_layout_tokens_list = SampleTrainingTeiParser().parse_training_tei_to_labeled_layout_tokens_list(tei_root)
assert (len(labeled_layout_tokens_list) == 1)
labeled_layout_tokens = labeled_layout_tokens_list[0]
assert (len(labeled_layout_tokens) == 2)
assert isinstance(labeled_layout_tokens[0], LabeledLayoutToken)
assert (labeled_layout_tokens[0].layout_token.text == TOKEN_1)
assert (labeled_layout_tokens[1].layout_token.text == TOKEN_2)
assert (labeled_layout_tokens[0].layout_token.line_meta != labeled_layout_tokens[1].layout_token.line_meta) |
(no_gui_test_assistant, 'No GuiTestAssistant')
class TestConfirmationDialog(unittest.TestCase, GuiTestAssistant):
def setUp(self):
GuiTestAssistant.setUp(self)
self.dialog = ConfirmationDialog()
def tearDown(self):
if (self.dialog.control is not None):
with self.delete_widget(self.dialog.control):
self.dialog.destroy()
self.dialog = None
GuiTestAssistant.tearDown(self)
def test_create(self):
with self.event_loop():
self.dialog.create()
with self.event_loop():
self.dialog.destroy()
def test_destroy(self):
with self.event_loop():
self.dialog.destroy()
def test_size(self):
self.dialog.size = (100, 100)
with self.event_loop():
self.dialog.create()
with self.event_loop():
self.dialog.destroy()
def test_position(self):
self.dialog.position = (100, 100)
with self.event_loop():
self.dialog.create()
with self.event_loop():
self.dialog.destroy()
def test_create_parent(self):
with self.event_loop():
parent = Window()
parent.create(parent.control)
with self.event_loop():
self.dialog.create()
with self.event_loop():
self.dialog.destroy()
with self.event_loop():
parent.destroy()
def test_create_yes_renamed(self):
self.dialog.yes_label = 'Sure'
with self.event_loop():
self.dialog.create()
with self.event_loop():
self.dialog.destroy()
def test_create_no_renamed(self):
self.dialog.no_label = 'No Way'
with self.event_loop():
self.dialog.create()
with self.event_loop():
self.dialog.destroy()
def test_create_yes_default(self):
self.dialog.default = YES
with self.event_loop():
self.dialog.create()
with self.event_loop():
self.dialog.destroy()
def test_create_cancel(self):
self.dialog.cancel = True
with self.event_loop():
self.dialog.create()
with self.event_loop():
self.dialog.destroy()
def test_create_cancel_renamed(self):
self.dialog.cancel = True
self.dialog.cancel_label = 'Back'
with self.event_loop():
self.dialog.create()
with self.event_loop():
self.dialog.destroy()
def test_create_cancel_default(self):
self.dialog.cancel = True
self.dialog.default = CANCEL
with self.event_loop():
self.dialog.create()
with self.event_loop():
self.dialog.destroy()
def test_create_image(self):
self.dialog.image = ImageResource('core')
with self.event_loop():
self.dialog.create()
with self.event_loop():
self.dialog.destroy()
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_close(self):
tester = ModalDialogTester(self.dialog.open)
tester.open_and_run(when_opened=(lambda x: self.dialog.close()))
self.assertEqual(tester.result, NO)
self.assertEqual(self.dialog.return_code, NO)
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_close_with_cancel(self):
self.dialog.cancel = True
tester = ModalDialogTester(self.dialog.open)
tester.open_and_run(when_opened=(lambda x: self.dialog.close()))
self.assertEqual(tester.result, CANCEL)
self.assertEqual(self.dialog.return_code, CANCEL)
(is_pyqt5, "Confirmation dialog click tests don't work on pyqt5.")
(is_pyqt4_linux, "Confirmation dialog click tests don't work reliably on linux. Issue #282.")
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_yes(self):
tester = ModalDialogTester(self.dialog.open)
tester.open_and_wait(when_opened=(lambda x: x.click_button(YES)))
self.assertEqual(tester.result, YES)
self.assertEqual(self.dialog.return_code, YES)
(is_pyqt5, "Confirmation dialog click tests don't work on pyqt5.")
(is_pyqt4_linux, "Confirmation dialog click tests don't work reliably on linux. Issue #282.")
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_renamed_yes(self):
self.dialog.yes_label = 'Sure'
tester = ModalDialogTester(self.dialog.open)
tester.open_and_wait(when_opened=(lambda x: x.click_widget('Sure')))
self.assertEqual(tester.result, YES)
self.assertEqual(self.dialog.return_code, YES)
(is_pyqt5, "Confirmation dialog click tests don't work on pyqt5.")
(is_pyqt4_linux, "Confirmation dialog click tests don't work reliably on linux. Issue #282.")
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_no(self):
tester = ModalDialogTester(self.dialog.open)
tester.open_and_wait(when_opened=(lambda x: x.click_button(NO)))
self.assertEqual(tester.result, NO)
self.assertEqual(self.dialog.return_code, NO)
(is_pyqt5, "Confirmation dialog click tests don't work on pyqt5.")
(is_pyqt4_linux, "Confirmation dialog click tests don't work reliably on linux. Issue #282.")
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_renamed_no(self):
self.dialog.no_label = 'No way'
tester = ModalDialogTester(self.dialog.open)
tester.open_and_wait(when_opened=(lambda x: x.click_widget('No way')))
self.assertEqual(tester.result, NO)
self.assertEqual(self.dialog.return_code, NO)
(is_pyqt5, "Confirmation dialog click tests don't work on pyqt5.")
(is_pyqt4_linux, "Confirmation dialog click tests don't work reliably on linux. Issue #282.")
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_cancel(self):
self.dialog.cancel = True
tester = ModalDialogTester(self.dialog.open)
tester.open_and_wait(when_opened=(lambda x: x.click_button(CANCEL)))
self.assertEqual(tester.result, CANCEL)
self.assertEqual(self.dialog.return_code, CANCEL)
(is_pyqt5, "Confirmation dialog click tests don't work on pyqt5.")
(is_pyqt4_linux, "Confirmation dialog click tests don't work reliably on linux. Issue #282.")
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_cancel_renamed(self):
self.dialog.cancel = True
self.dialog.cancel_label = 'Back'
tester = ModalDialogTester(self.dialog.open)
tester.open_and_wait(when_opened=(lambda x: x.click_widget('Back')))
self.assertEqual(tester.result, CANCEL)
self.assertEqual(self.dialog.return_code, CANCEL)
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_parent(self):
parent = Window()
self.dialog.parent = parent.control
with self.event_loop():
parent.open()
tester = ModalDialogTester(self.dialog.open)
tester.open_and_run(when_opened=(lambda x: x.close(accept=True)))
with self.event_loop():
parent.close()
self.assertEqual(tester.result, OK)
self.assertEqual(self.dialog.return_code, OK) |
class TestSettings(unittest.TestCase):
def setUp(self) -> None:
self.config_str = "\nserver_port: 50052\n# uri of database backend for notification server\ndb_uri: sqlite:///ns.db\n# High availability is disabled by default\nenable_ha: false\n# TTL of the heartbeat of a server, i.e., if the server hasn't send heartbeat for the TTL time, it is down.\nha_ttl_ms: 10000\n# Hostname and port the server will advertise to clients when HA enabled. If not set, it will use the local ip and configured port.\nadvertised_uri: 127.0.0.1:50052\n"
def test_get_configuration(self):
with TemporaryDirectory(prefix='test_config') as tmp_dir:
temp_config_file = os.path.join(tmp_dir, 'notification_server.yaml')
with open(temp_config_file, 'w') as f:
f.write(self.config_str)
notification_service.settings.NOTIFICATION_HOME = tmp_dir
ns_config = get_configuration()
self.assertEqual(50052, ns_config.port)
self.assertEqual('127.0.0.1:50052', ns_config.advertised_uri)
self.assertEqual('sqlite:///ns.db', ns_config.db_uri)
self.assertFalse(ns_config.enable_ha)
self.assertEqual(10000, ns_config.ha_ttl_ms)
def test_get_notification_home(self):
prev_home = os.environ['HOME']
try:
os.environ['HOME'] = '/home'
self.assertEqual(os.path.join('/home', 'notification_service'), get_notification_home())
os.environ['NOTIFICATION_HOME'] = '/notification_home'
self.assertEqual(os.path.join('/notification_home'), get_notification_home())
finally:
os.environ['HOME'] = prev_home
if ('NOTIFICATION_HOME' in os.environ):
del os.environ['NOTIFICATION_HOME']
def test_get_configuration_path(self):
with self.assertRaises(FileNotFoundError):
get_configuration_file_path('/non_exist_dir')
with TemporaryDirectory(prefix='test_config') as tmp_dir:
temp_config_file = os.path.join(tmp_dir, 'notification_server.yaml')
with open(temp_config_file, 'w') as f:
f.write(self.config_str)
self.assertEqual(temp_config_file, get_configuration_file_path(tmp_dir)) |
class SparkDataFrameSchemaReader(SchemaReader[pyspark.sql.DataFrame]):
def __init__(self, from_path: str, cols: typing.Optional[typing.Dict[(str, type)]], fmt: SchemaFormat):
super().__init__(from_path, cols, fmt)
def iter(self, **kwargs) -> typing.Generator[(T, None, None)]:
raise NotImplementedError('Spark DataFrame reader cannot iterate over individual chunks in spark dataframe')
def all(self, **kwargs) -> pyspark.sql.DataFrame:
if (self._fmt == SchemaFormat.PARQUET):
ctx = FlyteContext.current_context().user_space_params
return ctx.spark_session.read.parquet(self.from_path)
raise AssertionError('Only Parquet type files are supported for spark dataframe currently') |
class OptionSeriesVennSonificationTracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestParsingItems(unittest.TestCase):
def test_items(self):
actual = parse('items')
expected = (((trait('items', optional=True) | dict_items(optional=True)) | list_items(optional=True)) | set_items(optional=True))
self.assertEqual(actual, expected)
def test_items_not_notify(self):
actual = parse('items:attr')
expected = (((trait('items', notify=False, optional=True) | dict_items(notify=False, optional=True)) | list_items(notify=False, optional=True)) | set_items(notify=False, optional=True)).trait('attr')
self.assertEqual(actual, expected) |
class Chi(Distribution):
def __init__(self, dofs):
if (dofs is None):
self.dofs = 1
else:
self.dofs = dofs
if (self.dofs < 0):
raise ValueError('Invalid parameter in chi distribution: dofs must be positive.')
if (self.dofs == 1):
self.bounds = np.array([1e-15, np.inf])
else:
self.bounds = np.array([0.0, np.inf])
(mean, var, skew, kurt) = chi.stats(dofs, moments='mvsk')
self.mean = mean
self.variance = var
self.skewness = skew
self.kurtosis = kurt
self.x_range_for_pdf = np.linspace(0.0, (10.0 * self.mean), RECURRENCE_PDF_SAMPLES)
self.parent = chi(self.dofs)
def get_description(self):
text = (('is a chi distribution which is characterised by its degrees of freedom, which here is' + str(self.dofs)) + '.')
return text
def get_pdf(self, points=None):
if (points is not None):
return self.parent.pdf(points)
else:
raise ValueError('Please digit an input for get_pdf method')
def get_cdf(self, points=None):
if (points is not None):
return self.parent.cdf(points)
else:
raise ValueError('Please digit an input for get_cdf method')
def get_icdf(self, xx):
return self.parent.ppf(xx)
def get_samples(self, m=None):
if (m is not None):
number = m
else:
number = 500000
return self.parent.rvs(size=number) |
class Command(BaseCommand):
help = 'Generates configured API schema for project.'
def get_mode(self):
return (COREAPI_MODE if coreapi.is_enabled() else OPENAPI_MODE)
def add_arguments(self, parser):
parser.add_argument('--title', dest='title', default='', type=str)
parser.add_argument('--url', dest='url', default=None, type=str)
parser.add_argument('--description', dest='description', default=None, type=str)
if (self.get_mode() == COREAPI_MODE):
parser.add_argument('--format', dest='format', choices=['openapi', 'openapi-json', 'corejson'], default='openapi', type=str)
else:
parser.add_argument('--format', dest='format', choices=['openapi', 'openapi-json'], default='openapi', type=str)
parser.add_argument('--urlconf', dest='urlconf', default=None, type=str)
parser.add_argument('--generator_class', dest='generator_class', default=None, type=str)
parser.add_argument('--file', dest='file', default=None, type=str)
parser.add_argument('--api_version', dest='api_version', default='', type=str)
def handle(self, *args, **options):
if options['generator_class']:
generator_class = import_string(options['generator_class'])
else:
generator_class = self.get_generator_class()
generator = generator_class(url=options['url'], title=options['title'], description=options['description'], urlconf=options['urlconf'], version=options['api_version'])
schema = generator.get_schema(request=None, public=True)
renderer = self.get_renderer(options['format'])
output = renderer.render(schema, renderer_context={})
if options['file']:
with open(options['file'], 'wb') as f:
f.write(output)
else:
self.stdout.write(output.decode())
def get_renderer(self, format):
if (self.get_mode() == COREAPI_MODE):
renderer_cls = {'corejson': renderers.CoreJSONRenderer, 'openapi': renderers.CoreAPIOpenAPIRenderer, 'openapi-json': renderers.CoreAPIJSONOpenAPIRenderer}[format]
return renderer_cls()
renderer_cls = {'openapi': renderers.OpenAPIRenderer, 'openapi-json': renderers.JSONOpenAPIRenderer}[format]
return renderer_cls()
def get_generator_class(self):
if (self.get_mode() == COREAPI_MODE):
return coreapi.SchemaGenerator
return SchemaGenerator |
_exceptions
def weekly_upcoming_events():
logger.info('Running task: weekly_upcoming_events')
locations = Location.objects.all()
for location in locations:
events_this_week_at_location = published_events_this_week_local(location)
if (len(events_this_week_at_location) == 0):
logger.debug(('no events this week at %s; skipping email notification' % location.name))
continue
weekly_notifications_on = EventNotifications.objects.filter(location_weekly=location)
remindees_for_location = [notify.user for notify in weekly_notifications_on]
for user in remindees_for_location:
weekly_reminder_email(user, events_this_week_at_location, location) |
def disassemble_data(reader, address, data_begin, data_end, code_begin, code_end, data_pointers, code_pointers):
data = reader.read(4)
if (len(data) != 4):
LOGGER.debug('Failed to read 4 data bytes at address 0x%x.', address)
return
data += reader.read(4)
if (len(data) == 4):
return
elif (len(data) != 8):
LOGGER.debug('Failed to read 8 data bytes at address 0x%x.', address)
return
value = struct.unpack('<Q', data)[0]
if (data_begin <= value < data_end):
data_pointers[address] = value
elif (code_begin <= value < code_end):
code_pointers[address] = value
else:
reader.seek((- 4), os.SEEK_CUR) |
_tag
def generate_p_n(article: Articles):
article_list = list(Articles.objects.filter(category=article.category))
now_index = article_list.index(article)
max_index = (len(article_list) - 1)
if (now_index == 0):
prev = '<a href="javascript:void (0);"></a>'
else:
prev_article = article_list[(article_list.index(article) - 1)]
prev = f'<a href="/article/{prev_article.nid}/">:{prev_article.title}</a>'
if (now_index == max_index):
next = '<a href="javascript:void (0);"></a>'
else:
next_article = article_list[(article_list.index(article) + 1)]
next = f'<a href="/article/{next_article.nid}/">:{next_article.title}</a>'
return mark_safe((prev + next)) |
class _EditedMessage(_Message):
def __init__(self, message_id: Optional[int]):
self.message_id = message_id
def filter(self, event: EventCommon):
if (not super().filter(event)):
return False
if (not isinstance(event, MessageEdited.Event)):
return False
if (self.message_id is not None):
message = cast(MessageEdited.Event, event).message
return (message.id == self.message_id)
return True
def __repr__(self):
return f'EditedMessage({self.message_id})' |
()
('input_name')
('output_name')
('begin', type=int)
('end', type=int)
def apply(input_name, output_name, begin, end):
input = np.load((workspace / f'{input_name}.npy'))
output = np.load((workspace / f'{output_name}.npy'))
output[begin:end] = input[begin:end]
np.save((workspace / f'{output_name}.npy'), output)
logger.info(f'Applied {input_name} to {output_name}') |
def tex_Step(head, args, **kwargs):
(expr, forexpr) = args
(n, a, b) = forexpr.args()
na = expr.replace({n: a})
if a.is_integer():
na1 = expr.replace({n: Expr((int(a) + 1))})
else:
na1 = expr.replace({n: (a + 1)})
nb = expr.replace({n: b})
na = na.latex(**kwargs)
na1 = na1.latex(**kwargs)
nb = nb.latex(**kwargs)
return ('%s, %s, \\ldots, %s' % (na, na1, nb)) |
class SupportedRegion(str, _enum.Enum):
ASIA_NORTHEAST1 = 'asia-northeast1'
ASIA_EAST1 = 'asia-east1'
ASIA_NORTHEAST2 = 'asia-northeast2'
EUROPE_NORTH1 = 'europe-north1'
EUROPE_WEST1 = 'europe-west1'
EUROPE_WEST4 = 'europe-west4'
US_CENTRAL1 = 'us-central1'
US_EAST1 = 'us-east1'
US_EAST4 = 'us-east4'
US_WEST1 = 'us-west1'
ASIA_EAST2 = 'asia-east2'
ASIA_NORTHEAST3 = 'asia-northeast3'
ASIA_SOUTHEAST1 = 'asia-southeast1'
ASIA_SOUTHEAST2 = 'asia-southeast2'
ASIA_SOUTH1 = 'asia-south1'
AUSTRALIA_SOUTHEAST1 = 'australia-southeast1'
EUROPE_CENTRAL2 = 'europe-central2'
EUROPE_WEST2 = 'europe-west2'
EUROPE_WEST3 = 'europe-west3'
EUROPE_WEST6 = 'europe-west6'
NORTHAMERICA_NORTHEAST1 = 'northamerica-northeast1'
SOUTHAMERICA_EAST1 = 'southamerica-east1'
US_WEST2 = 'us-west2'
US_WEST3 = 'us-west3'
US_WEST4 = 'us-west4' |
class StringRevealer(object):
ASCII_BYTE = b'!\\"#\\$%&\\\'\\(\\)\\*\\+,-\\./:;<=>\\?\\[\\]\\^_`abcdefghijklmnopqrstuvwxyz\\{\\|\\}\\\\\\~\\t '
ASCII_RE = re.compile((b'([%s]{%d,})' % (ASCII_BYTE, 3)))
UNICODE_RE = re.compile((b'((?:[%s]\x00){%d,})' % (ASCII_BYTE, 3)))
def __init__(self, attrib):
self.sandbox = Sandbox_Win_x86_32
if (attrib == 64):
raise Exception('Not supported')
self.sandbox = Sandbox_Win_x86_64
parser = self.sandbox.parser()
self.options = parser.parse_args()
self.options.use_windows_structs = True
self.options.usesegm = True
self.options.mimic_env = True
self.options.jitter = 'llvm'
self.sb = None
def _exec_callback(dse, func, occurances, jitter, strings, get_strings_from_dse):
occurances[jitter.pc] = (occurances.get(jitter.pc, 0) + 1)
if (occurances[jitter.pc] > 500):
return False
dse.callback(jitter)
return True
def process_all(self, ip='localhost', port=4455, conn=None):
close_conn = False
if (not conn):
close_conn = True
conn = rpyc.classic.connect(ip, port)
strings = {}
file_path = conn.modules.idaapi.get_input_file_path()
with mock.patch('builtins.open', conn.builtins.open):
self.sb = self.sandbox(file_path, self.options, globals())
sp = self.sb.jitter.arch.getsp(self.sb.jitter.attrib)
setattr(self.sb.jitter.cpu, sp.name, ((self.sb.jitter.stack_base + self.sb.jitter.stack_size) - (8 * 80)))
for func_addr in conn.modules.idautils.Functions():
func = ExtendedAsmCFG(file_path)
func.disassemble(func_addr, conn)
strings[func_addr] = self._process_func(func)
if close_conn:
conn.close()
return strings
def process_funcs(self, func_addresses, ip='localhost', port=4455, conn=None):
close_conn = False
if (not conn):
close_conn = True
conn = rpyc.classic.connect(ip, port)
strings = {}
file_path = conn.modules.idaapi.get_input_file_path()
with mock.patch('builtins.open', conn.builtins.open):
self.sb = self.sandbox(file_path, self.options, globals())
sp = self.sb.jitter.arch.getsp(self.sb.jitter.attrib)
setattr(self.sb.jitter.cpu, sp.name, ((self.sb.jitter.stack_base + self.sb.jitter.stack_size) - (8 * 80)))
for func_address in func_addresses:
with mock.patch('builtins.open', conn.builtins.open):
func = ExtendedAsmCFG(file_path)
func.disassemble(func_address, conn)
strings[func_address] = self._process_func(func)
if close_conn:
conn.close()
return strings
def _wipe_dse_errors(dse):
dse.symb.reset_modified()
dse.jitter.vm.set_exception(0)
dse.jitter.cpu.set_exception(0)
dse.jitter.bs._atomic_mode = False
def _process_func(self, func):
dse = DSEEngine(self.sb.machine)
dse.attach(self.sb.jitter)
bak_snap = dse.take_snapshot()
dse.add_lib_handler(self.sb.libs, globals())
occurances = {}
addr = func.loc_db.get_location_offset(LocKey(0))
asmb = func.loc_key_to_block(LocKey(0))
strings = set()
self.sb.jitter.exec_cb = (lambda x: self._exec_callback(dse, func, occurances, x, strings, self._get_strings_from_dse))
self.sb.jitter.init_run(addr)
try:
self.sb.jitter.run_until(asmb.lines[(- 1)].offset)
except:
pass
dse.update_state_from_concrete()
initial_snap = dse.take_snapshot()
strings.update(self._get_strings_from_dse(dse))
dse.restore_snapshot(initial_snap)
for loc_key in func.walk_breadth_first_forward(LocKey(0)):
addr = func.loc_db.get_location_offset(loc_key)
if (not addr):
continue
occurances.clear()
self._emul_address(dse, addr)
dse.update_state_from_concrete()
strings.update(self._get_strings_from_dse(dse))
dse.restore_snapshot(initial_snap)
dse.restore_snapshot(bak_snap)
strings = self._get_top_level_strings(strings)
return strings
def _emul_address(self, dse, addr):
self.sb.jitter.init_run(addr)
crashed = set()
while 1:
self._wipe_dse_errors(dse)
try:
self.sb.jitter.continue_run()
except Exception as e:
if (isinstance(e, RuntimeError) and e.args and (e.args[0] == 'Cannot find address') and (self.sb.jitter.pc not in crashed)):
instr = self.sb.jitter.jit.mdis.dis_instr(self.sb.jitter.pc)
crashed.add(self.sb.jitter.pc)
if instr:
next_addr = (self.sb.jitter.pc + instr.l)
self.sb.jitter.init_run(next_addr)
continue
break
def _get_top_level_strings(strings):
new_strings = set()
while strings:
string = strings.pop()
for tmp_string in (strings | new_strings):
if (string in tmp_string):
break
else:
new_strings.add(string)
return new_strings
def _get_strings_from_dse(self, dse):
modified_mem = SortedList(key=(lambda x: int(x[0])))
for (key, val) in dse.symb.modified(ids=False, mems=True):
try:
val = dse.eval_expr(key)
key = dse.eval_expr(key.ptr)
except RuntimeError:
continue
if ((not key.is_int()) or (not val.is_int())):
continue
modified_mem.add((key, val))
following_address = None
current_sequence = b''
strings = set()
for (address, value) in modified_mem:
if (following_address == address):
current_sequence += int(value).to_bytes((value.size // 8), 'little')
else:
self._update_strings_from_sequence(current_sequence, strings)
current_sequence = int(value).to_bytes((value.size // 8), 'little')
following_address = expr_simp((address + ExprInt((value.size // 8), address.size)))
self._update_strings_from_sequence(current_sequence, strings)
return strings
def _update_strings_from_sequence(self, sequence, strings):
strings.update([i.decode() for i in self.ASCII_RE.findall(sequence)])
strings.update([i.decode('utf-16le') for i in self.UNICODE_RE.findall(sequence)]) |
class Player():
def __init__(self, address):
self.address = address
def get_team_num(self):
return mem.read_i32((self.address + nv.m_iTeamNum))
def get_health(self):
return mem.read_i32((self.address + nv.m_iHealth))
def get_life_state(self):
return mem.read_i32((self.address + nv.m_lifeState))
def get_tick_count(self):
return mem.read_i32((self.address + nv.m_nTickBase))
def get_shots_fired(self):
return mem.read_i32((self.address + nv.m_iShotsFired))
def get_cross_index(self):
return mem.read_i32((self.address + nv.m_iCrossHairID))
def get_weapon(self):
a0 = mem.read_i32((self.address + nv.m_hActiveWeapon))
return mem.read_i32((nv.dwEntityList + (((a0 & 4095) - 1) * 16)))
def get_weapon_id(self):
return mem.read_i32((self.get_weapon() + nv.m_iItemDefinitionIndex))
def get_origin(self):
return mem.read_vec3((self.address + nv.m_vecOrigin))
def get_vec_view(self):
return mem.read_vec3((self.address + nv.m_vecViewOffset))
def get_eye_pos(self):
v = self.get_vec_view()
o = self.get_origin()
return Vector3((v.x + o.x), (v.y + o.y), (v.z + o.z))
def get_vec_punch(self):
return mem.read_vec3((self.address + nv.m_vecPunch))
def get_bone_pos(self, index):
a0 = (48 * index)
a1 = mem.read_i32((self.address + nv.m_dwBoneMatrix))
return Vector3(mem.read_float(((a1 + a0) + 12)), mem.read_float(((a1 + a0) + 28)), mem.read_float(((a1 + a0) + 44)))
def is_valid(self):
health = self.get_health()
return ((self.address != 0) and (self.get_life_state() == 0) and (0 < health < 1338)) |
def test_from_file_missing_parameter(simple_roff_parameter_contents):
buff = io.BytesIO()
simple_roff_parameter_contents[0][1]
roffio.write(buff, simple_roff_parameter_contents)
buff.seek(0)
with pytest.raises(ValueError, match='Did not find parameter'):
RoffParameter.from_file(buff, 'c') |
class ThreadLocalContext(BaseContext):
thread_local = threading.local()
thread_local.transaction = None
thread_local.spans = ()
def get_transaction(self, clear: bool=False) -> 'elasticapm.traces.Transaction':
transaction = getattr(self.thread_local, 'transaction', None)
if clear:
self.thread_local.transaction = None
return transaction
def set_transaction(self, transaction: 'elasticapm.traces.Transaction') -> None:
self.thread_local.transaction = transaction
def get_span(self) -> 'elasticapm.traces.Span':
spans = getattr(self.thread_local, 'spans', ())
span = None
if spans:
span = spans[(- 1)]
return span
def set_span(self, span: 'elasticapm.traces.Span') -> None:
self.thread_local.spans = (getattr(self.thread_local, 'spans', ()) + (span,))
def unset_span(self, clear_all: bool=False) -> 'elasticapm.traces.Span':
spans = getattr(self.thread_local, 'spans', ())
span = None
if spans:
span = spans[(- 1)]
if clear_all:
self.thread_local.spans = ()
else:
self.thread_local.spans = spans[0:(- 1)]
return span |
class FetchBlockBehaviour(TickerBehaviour):
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
def _get_block(self) -> None:
ledger_api_dialogues = cast(LedgerApiDialogues, self.context.ledger_api_dialogues)
(ledger_api_msg, _) = ledger_api_dialogues.create(counterparty=str(LEDGER_API_ADDRESS), performative=LedgerApiMessage.Performative.GET_STATE, ledger_id=self.context.default_ledger_id, callable='blocks', args=('latest',), kwargs=Kwargs({}))
self.context.outbox.put_message(message=ledger_api_msg)
def setup(self) -> None:
self.context.logger.info('setting up FetchBlockBehaviour')
def act(self) -> None:
self.context.logger.info('Fetching latest block...')
self._get_block()
def teardown(self) -> None:
self.context.logger.info('tearing down FetchBlockBehaviour') |
class TestConsumption(unittest.TestCase):
def test_create_consumption(self):
consumption = TotalConsumption(zoneKey=ZoneKey('DE'), datetime=datetime(2023, 1, 1, tzinfo=timezone.utc), consumption=1, source='trust.me')
assert (consumption.zoneKey == ZoneKey('DE'))
assert (consumption.datetime == datetime(2023, 1, 1, tzinfo=timezone.utc))
assert (consumption.consumption == 1)
assert (consumption.source == 'trust.me')
def test_raises_if_invalid_consumption(self):
with self.assertRaises(ValueError):
TotalConsumption(zoneKey=ZoneKey('AT'), datetime=datetime(2023, 1, 1, tzinfo=timezone.utc), consumption=None, source='trust.me')
with self.assertRaises(ValueError):
TotalConsumption(zoneKey=ZoneKey('ATT'), datetime=datetime(2023, 1, 1, tzinfo=timezone.utc), consumption=1, source='trust.me')
with self.assertRaises(ValueError):
TotalConsumption(zoneKey=ZoneKey('AT'), datetime=datetime(2023, 1, 1), consumption=1, source='trust.me')
with self.assertRaises(ValueError):
TotalConsumption(zoneKey=ZoneKey('AT'), datetime=datetime(2023, 1, 1, tzinfo=timezone.utc), consumption=(- 1), source='trust.me')
def test_static_create_logs_error(self):
logger = logging.Logger('test')
with patch.object(logger, 'error') as mock_error:
TotalConsumption.create(logger=logger, zoneKey=ZoneKey('DE'), datetime=datetime(2023, 1, 1, tzinfo=timezone.utc), consumption=(- 1), source='trust.me')
mock_error.assert_called_once() |
(context_settings=get_width())
('--shrink_node', default='DETERMINISTIC', type=str, help='Named node, or DETERMINISTIC', show_default=True)
('--node_filters', help='JSON version of node_filters (see documentation)', callback=json_to_dict)
('--number_of_shards', default=1, type=int, help='Shrink to this many shards per index')
('--number_of_replicas', default=1, type=int, help='Number of replicas for the target index', show_default=True)
('--shrink_prefix', type=str, help='Prefix for the target index name')
('--shrink_suffix', default='-shrink', type=str, help='Suffix for the target index name', show_default=True)
('--copy_aliases', is_flag=True, help='Copy each source index aliases to target index')
('--delete_after/--no-delete_after', default=True, help='Delete source index after shrink', show_default=True)
('--post_allocation', help='JSON version of post_allocation (see documentation)', callback=json_to_dict)
('--extra_settings', help='JSON version of extra_settings (see documentation)', callback=json_to_dict)
('--wait_for_active_shards', default=1, type=int, help='Wait for number of active shards before continuing')
('--wait_for_rebalance/--no-wait_for_rebalance', default=True, help='Wait for rebalance to complete')
('--wait_for_completion/--no-wait_for_completion', default=True, help='Wait for the shrink to complete')
('--wait_interval', default=9, type=int, help='Seconds to wait between completion checks.')
('--max_wait', default=(- 1), type=int, help='Maximum number of seconds to wait_for_completion')
('--ignore_empty_list', is_flag=True, help='Do not raise exception if there are no actionable indices')
('--allow_ilm_indices/--no-allow_ilm_indices', help='Allow Curator to operate on Index Lifecycle Management monitored indices.', default=False, show_default=True)
('--filter_list', callback=validate_filter_json, help='JSON array of filters selecting indices to act on.', required=True)
_context
def shrink(ctx, shrink_node, node_filters, number_of_shards, number_of_replicas, shrink_prefix, shrink_suffix, copy_aliases, delete_after, post_allocation, extra_settings, wait_for_active_shards, wait_for_rebalance, wait_for_completion, wait_interval, max_wait, ignore_empty_list, allow_ilm_indices, filter_list):
manual_options = {'shrink_node': shrink_node, 'node_filters': node_filters, 'number_of_shards': number_of_shards, 'number_of_replicas': number_of_replicas, 'shrink_prefix': shrink_prefix, 'shrink_suffix': shrink_suffix, 'copy_aliases': copy_aliases, 'delete_after': delete_after, 'post_allocation': post_allocation, 'extra_settings': extra_settings, 'wait_for_active_shards': wait_for_active_shards, 'wait_for_rebalance': wait_for_rebalance, 'wait_for_completion': wait_for_completion, 'wait_interval': wait_interval, 'max_wait': max_wait, 'allow_ilm_indices': allow_ilm_indices}
action = CLIAction(ctx.info_name, ctx.obj['config'], manual_options, filter_list, ignore_empty_list)
action.do_singleton_action(dry_run=ctx.obj['dry_run']) |
(('conf' in cfgdiff.supported_formats), 'requires ConfigObj')
class ConfigDiffTestCase(CfgDiffTestCase):
def test_conf_same(self):
self._test_same(cfgdiff.ConfigDiff, './tests/test_same_1-a.ini', './tests/test_same_1-b.ini')
def test_conf_different(self):
self._test_different(cfgdiff.ConfigDiff, './tests/test_different_1-a.ini', './tests/test_different_1-b.ini') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.