code stringlengths 281 23.7M |
|---|
class FontStretch(BaseCFloat):
default_value = 100.0
def __init__(self, default_value=NoDefaultSpecified, **metadata):
if (default_value != NoDefaultSpecified):
default_value = self.validate(None, None, default_value)
super().__init__(default_value, **metadata)
def validate(self, object, name, value):
if (isinstance(value, str) and value.endswith('%')):
value = value[:(- 1)]
value = STRETCHES.get(value, value)
value = super().validate(object, name, value)
if (not (50 <= value <= 200)):
self.error(object, name, value)
return value
def info(self):
info = 'a float from 50 to 200, a value that can convert to a float from 50 to 200, '
info += ', '.join((repr(key) for key in SIZES))
info += " or a string with a float value from 50 to 200 followed by '%'"
return info |
class mac_lte_tags(IntEnum):
MAC_LTE_PAYLOAD_TAG = 1
MAC_LTE_RNTI_TAG = 2
MAC_LTE_UEID_TAG = 3
MAC_LTE_FRAME_SUBFRAME_TAG = 4
MAC_LTE_PREDEFINED_DATA_TAG = 5
MAC_LTE_RETX_TAG = 6
MAC_LTE_CRC_STATUS_TAG = 7
MAC_LTE_EXT_BSR_SIZES_TAG = 8
MAC_LTE_SEND_PREAMBLE_TAG = 9
MAC_LTE_CARRIER_ID_TAG = 10
MAC_LTE_PHY_TAG = 11
MAC_LTE_SIMULT_PUCCH_PUSCH_PCELL_TAG = 12
MAC_LTE_SIMULT_PUCCH_PUSCH_PSCELL_TAG = 13
MAC_LTE_CE_MODE_TAG = 14
MAC_LTE_NB_MODE_TAG = 15
MAC_LTE_N_UL_RB_TAG = 16
MAC_LTE_SR_TAG = 17 |
class TestESP8266FlashHeader(BaseTestCase):
def test_2mb(self):
ELF = 'esp8266-nonossdkv20-at-v2.elf'
BIN = 'esp8266-nonossdkv20-at-v2-0x01000.bin'
try:
self.run_elf2image('esp8266', ELF, version=2, extra_args=['--flash_size', '2MB', '--flash_mode', 'dio'])
with open(BIN, 'rb') as f:
header = f.read(4)
print(f'header {header}')
self.assertEqualHex(234, header[0])
self.assertEqualHex(2, header[2])
self.assertEqualHex(48, header[3])
finally:
try_delete(BIN) |
def browsesingle(type, heading, shares='', mask='', useThumbs=False, treatAsFolder=False, defaultt=None):
from xbmcgui import Dialog
if (not heading):
heading = ADDON.getAddonInfo('name')
return to_unicode(Dialog().browseSingle(type=type, heading=heading, shares=shares, mask=mask, useThumbs=useThumbs, treatAsFolder=treatAsFolder, defaultt=defaultt)) |
def extractKneeslappingtlWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
chp_prefixes = [('Youve Got The Wrong House, Villain ', "You've Got The Wrong House, Villain", 'translated'), ('Manowa', 'Manowa Mamono Taosu Nouryoku Ubau Watashi Tsuyokunaru', 'translated'), ('Cat ', 'Me and My Beloved Cat (Girlfriend)', 'translated')]
for (prefix, series, tl_type) in chp_prefixes:
if item['title'].lower().startswith(prefix.lower()):
return buildReleaseMessageWithType(item, series, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Chunk(Op):
__slots__ = ('_size', '_list')
def __init__(self, size, source=None):
Op.__init__(self, source)
self._size = size
self._list = []
def on_source(self, *args):
self._list.append((args[0] if (len(args) == 1) else (args if args else NO_VALUE)))
if (len(self._list) == self._size):
self.emit(self._list)
self._list = []
def on_source_done(self, source):
if self._list:
self.emit(self._list)
Op.on_source_done(self, self._source) |
class TimeChecker():
def __init__(self):
self.start = 0.0
self.data = []
self.begin()
def begin(self):
self.start = time.time()
del self.data[:]
def print_time(self, restart=True):
print(f'Time elapsed: {self.get_time(restart)}')
def get_time(self, restart=True):
t = (time.time() - self.start)
if restart:
self.begin()
return t
def print_data(self):
print(self.data)
def get_data(self):
return self.data
def save(self, msg=' '):
self.data.append([self.get_time(), msg]) |
def test_sync_streaming_response(test_client_factory):
async def app(scope, receive, send):
def numbers(minimum, maximum):
for i in range(minimum, (maximum + 1)):
(yield str(i))
if (i != maximum):
(yield ', ')
generator = numbers(1, 5)
response = StreamingResponse(generator, media_type='text/plain')
(await response(scope, receive, send))
client = test_client_factory(app)
response = client.get('/')
assert (response.text == '1, 2, 3, 4, 5') |
class CriticModel(nn.Module):
def __init__(self, inputs_shape):
super().__init__()
self.inut_shape = inputs_shape
self.features = nn.Sequential(nn.Conv2d(inputs_shape[1], 32, kernel_size=8, stride=4), nn.ReLU(), nn.Conv2d(32, 64, kernel_size=4, stride=2), nn.ReLU(), nn.Conv2d(64, 64, kernel_size=3, stride=1), nn.ReLU())
self.fc = nn.Sequential(nn.Linear(self.features_size(), 512), nn.ReLU(), nn.Linear(512, 1))
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), (- 1))
x = self.fc(x)
return x
def features_size(self):
a = self.features(torch.zeros(1, *self.inut_shape[1:]))
a = a.view(1, (- 1)).size(1)
return a |
class DebugListener(FdListener):
def __init__(self, evtype, fileno, cb, tb, mark_as_closed):
self.where_called = traceback.format_stack()
self.greenlet = greenlet.getcurrent()
super().__init__(evtype, fileno, cb, tb, mark_as_closed)
def __repr__(self):
return ('DebugListener(%r, %r, %r, %r, %r, %r)\n%sEndDebugFdListener' % (self.evtype, self.fileno, self.cb, self.tb, self.mark_as_closed, self.greenlet, ''.join(self.where_called)))
__str__ = __repr__ |
class SampleSheetPredictor():
def __init__(self, sample_sheet=None, sample_sheet_file=None):
self.projects = []
self._predict_for_package = 'bcl2fastq2'
self._predict_paired_end = False
self._predict_no_lane_splitting = False
self._predict_for_lanes = None
self._predict_for_reads = None
self._include_index_reads = False
self._force_sample_dir = False
if (sample_sheet is None):
sample_sheet = SampleSheet(sample_sheet_file)
if sample_sheet.has_lanes:
sample_sheet.data.sort((lambda line: (line['Lane'] if (line['Lane'] != '') else 99999)))
s_index = 0
for line in sample_sheet:
project_name = str(line[sample_sheet.sample_project_column])
sample_id = str(line[sample_sheet.sample_id_column])
try:
sample_name = str(line[sample_sheet.sample_name_column])
except TypeError:
sample_name = sample_id
if (not sample_id):
sample_id = None
if (not sample_name):
sample_name = None
project = self.add_project(project_name)
sample = project.add_sample(sample_id, sample_name=sample_name)
index_seq = samplesheet_index_sequence(line)
if (index_seq is None):
index_seq = 'NoIndex'
if sample_sheet.has_lanes:
lane = line['Lane']
else:
lane = None
sample.add_barcode(index_seq, lane=lane)
if (sample.s_index is None):
s_index += 1
sample.s_index = s_index
def nprojects(self):
return len(self.projects)
def project_names(self):
return sorted([str(p) for p in self.projects])
def get_project(self, project_name):
for project in self.projects:
if (project.name == project_name):
return project
raise KeyError(('%s: project not found' % project_name))
def add_project(self, project_name):
try:
return self.get_project(project_name)
except KeyError:
project = SampleSheetProject(project_name)
self.projects.append(project)
return project
def set(self, package=None, paired_end=None, no_lane_splitting=None, lanes=None, reads=None, include_index_reads=None, force_sample_dir=None):
if (package is not None):
self._predict_for_package = package
if (paired_end is not None):
self._predict_paired_end = paired_end
if (no_lane_splitting is not None):
self._predict_no_lane_splitting = no_lane_splitting
self._predict_for_lanes = lanes
if (reads is not None):
self._predict_for_reads = reads
if (include_index_reads is not None):
self._include_index_reads = include_index_reads
if (force_sample_dir is not None):
self._force_sample_dir = force_sample_dir
for project in self.projects:
project.set(package=package, paired_end=paired_end, no_lane_splitting=no_lane_splitting, lanes=lanes, reads=reads, include_index_reads=include_index_reads, force_sample_dir=force_sample_dir) |
def extractImemotranslationsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Hone to Issho', 'Hone to Issho no Isekai Seikatsu', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def collect_topics(account):
cmd_help_topics = []
if (not (str(account) == 'AnonymousUser')):
puppets = (account.characters.all() + [account])
for puppet in puppets:
for cmdset in puppet.cmdset.get():
cmdset.make_unique(puppet)
for cmd in cmdset:
if (not cmd.access(puppet, 'cmd')):
continue
if (not can_read_topic(cmd, puppet)):
continue
entry_exists = False
for verify_cmd in cmd_help_topics:
if (verify_cmd.key and cmd.key and (verify_cmd.help_category == cmd.help_category) and (verify_cmd.__doc__ == cmd.__doc__)):
entry_exists = True
break
if entry_exists:
continue
cmd_help_topics.append(cmd)
file_help_topics = {topic.key.lower().strip(): topic for topic in FILE_HELP_ENTRIES.all() if can_read_topic(topic, account)}
db_help_topics = {topic.key.lower().strip(): topic for topic in HelpEntry.objects.all() if can_read_topic(topic, account)}
cmd_help_topics = {(cmd.auto_help_display_key if hasattr(cmd, 'auto_help_display_key') else cmd.key): cmd for cmd in cmd_help_topics}
return (cmd_help_topics, db_help_topics, file_help_topics) |
class CompoundVolumeVolumeToken(CompoundToken):
def valid_volume_1(self, parse_ascii):
return self.is_valid_1(parse_ascii)
def valid_volume_2(self, parse_ascii):
return self.is_valid_2(parse_ascii)
def get_volume_1(self, parse_ascii):
return self.to_number_1(parse_ascii)
def get_volume_2(self, parse_ascii):
return self.to_number_2(parse_ascii) |
_in_both(Foo2)
def test_get_event_handlers():
foo = Foo2()
def bar(*events):
pass
bar = foo.reaction('!x', bar)
print([r.get_name() for r in foo.get_event_handlers('x')])
def zz1(*events):
pass
def zz2(*events):
pass
zz1 = foo.reaction('!x', zz1)
zz2 = foo.reaction('!x:a', zz2)
print([r.get_name() for r in foo.get_event_handlers('x')])
print([r.get_name() for r in foo.get_event_handlers('y')])
try:
foo.get_event_handlers('x:a')
except ValueError:
print('fail ValueError') |
class HealthServicer(object):
def Check(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Watch(self, request, context):
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!') |
class View(flx.Widget):
def init(self):
with flx.VBox():
with flx.HBox():
self.first_edit = flx.LineEdit(placeholder_text='first name', text='Jane')
self.last_edit = flx.LineEdit(placeholder_text='last name', text='Doe')
flx.Widget(flex=1)
with flx.HBox():
flx.Label(text=(lambda : self.root.first_name), style='border:1px solid red')
flx.Label(text=(lambda : self.root.last_name), style='border:1px solid red')
flx.Widget(flex=1)
MyPersonLabel(style='border:1px solid blue')
flx.Widget(flex=1)
def _update_name(self):
self.root.set_first_name(self.first_edit.text)
self.root.set_last_name(self.last_edit.text) |
class OLD_SequenceFace(Face):
def __init__(self, seq, seqtype, fsize=10, aafg=None, aabg=None, ntfg=None, ntbg=None):
Face.__init__(self)
self.seq = seq
self.fsize = fsize
self.fsize = fsize
self.style = seqtype
if (not aafg):
aafg = _aafgcolors
if (not aabg):
aabg = _aabgcolors
if (not ntfg):
ntfg = _ntfgcolors
if (not ntbg):
ntbg = _ntbgcolors
self.aafg = aafg
self.aabg = aabg
self.ntfg = ntfg
self.ntbg = ntbg
def update_pixmap(self):
font = QFont('Courier', self.fsize)
fm = QFontMetrics(font)
height = ((fm.leading() + fm.overlinePos()) + fm.underlinePos())
width = (self.fsize * len(self.seq))
self.pixmap = QPixmap(width, height)
self.pixmap.fill()
p = QPainter(self.pixmap)
x = 0
y = (height - (fm.underlinePos() * 2))
p.setFont(font)
for letter in self.seq:
letter = letter.upper()
if (self.style == 'nt'):
letter_brush = QBrush(QColor(self.ntbg.get(letter, 'white')))
letter_pen = QPen(QColor(self.ntfg.get(letter, 'black')))
else:
letter_brush = QBrush(QColor(self.aabg.get(letter, 'white')))
letter_pen = QPen(QColor(self.aafg.get(letter, 'black')))
p.setPen(letter_pen)
p.fillRect(QRectF(x, 0, width, height, letter_brush))
p.drawText(QPointF(x, y), letter)
x += (float(width) / len(self.seq))
p.end() |
.parametrize('events, expected', [(((0, left_click),), 'no action'), (((0.4, left_click),), 'leftclick'), (((0.4, left_click), (0.4, left_click)), 'leftclick'), (((0.2, left_click), (0, left_click)), 'doubleleftclick'), (((0.2, left_click), (0, left_click), (0.3, left_click)), 'leftclick'), (((0.2, left_click), (0, right_click)), 'leftclick'), (((0.4, right_click),), 'no action'), (((0.2, right_click), (0, right_click)), 'doublerightclick'), (((0, scroll_down), (0, scroll_down)), 'downscroll'), (((0.4, scroll_up),), 'upscroll'), (((0, scroll_up), (0, scroll_up)), 'doubleupscroll')])
def test_clicks(events, expected):
class TestClicks(IntervalModule):
def set_action(self, action):
self._action = action
on_leftclick = [set_action, 'leftclick']
on_doubleleftclick = [set_action, 'doubleleftclick']
on_doublerightclick = [set_action, 'doublerightclick']
on_upscroll = [set_action, 'upscroll']
on_doubleupscroll = [set_action, 'doubleupscroll']
on_downscroll = [set_action, 'downscroll']
_action = 'no action'
TestClicks.multi_click_timeout /= 10
m = TestClicks()
for (sl, ev) in events:
m.on_click(ev)
time.sleep((sl / 10))
assert (m._action == expected) |
class ParseCrawler(object):
helper = None
log = None
def __init__(self, helper):
self.helper = helper
self.log = logging.getLogger(__name__)
def pass_to_pipeline_if_article(self, response, source_domain, original_url, rss_title=None):
if self.helper.heuristics.is_article(response, original_url):
return self.pass_to_pipeline(response, source_domain, rss_title=None)
def pass_to_pipeline(self, response, source_domain, rss_title=None):
timestamp = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(time.time()))
relative_local_path = self.helper.savepath_parser.get_savepath(response.url)
article = self.helper.crawler_item_class()
article['local_path'] = self.helper.savepath_parser.get_formatted_relative_path(relative_local_path)
article['filename'] = self.helper.savepath_parser.get_filename(article['local_path'])
article['abs_local_path'] = self.helper.savepath_parser.get_abs_path(relative_local_path)
article['modified_date'] = timestamp
article['download_date'] = timestamp
article['source_domain'] = source_domain.encode('utf-8')
article['url'] = response.url
article['html_title'] = response.selector.xpath('//title/text()').extract_first().encode('utf-8')
if (rss_title is None):
article['rss_title'] = 'NULL'
else:
article['rss_title'] = rss_title.encode('utf-8')
article['spider_response'] = response
article['article_title'] = 'NULL'
article['article_description'] = 'NULL'
article['article_text'] = 'NULL'
article['article_image'] = 'NULL'
article['article_author'] = 'NULL'
article['article_publish_date'] = 'NULL'
article['article_language'] = 'NULL'
return article
def recursive_requests(response, spider, ignore_regex='', ignore_file_extensions='pdf'):
return [scrapy.Request(response.urljoin(href), callback=spider.parse) for href in response.css("a::attr('href')").extract() if ((re.match((('.*\\.' + ignore_file_extensions) + '$'), response.urljoin(href), re.IGNORECASE) is None) and (len(re.match(ignore_regex, response.urljoin(href)).group(0)) == 0))]
def content_type(self, response):
if (not re_html.match(response.headers.get('Content-Type').decode('utf-8'))):
self.log.warn("Dropped: %s's content is not of type text/html but %s", response.url, response.headers.get('Content-Type'))
return False
else:
return True |
class AdAssetFeedSpecAssetLabel(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isAdAssetFeedSpecAssetLabel = True
super(AdAssetFeedSpecAssetLabel, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
id = 'id'
name = 'name'
_field_types = {'id': 'string', 'name': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class BenchController():
def __init__(self, bus):
self.bus = bus
def reboot(self):
self.bus.regs.ctrl_reset.write(1)
def load_rom(self, filename, delay=0):
from litex.soc.integration.common import get_mem_data
rom_data = get_mem_data(filename, endianness='little')
for (i, data) in enumerate(rom_data):
self.bus.write((self.bus.mems.rom.base + (4 * i)), data)
print(f'''{((i + 1) * 4)}/{len((rom_data * 4))} bytes
''', end='')
time.sleep(delay)
print('') |
def test_partitioned_analyses_update_raises_error_if_shapes_are_inconsistent():
d = DumbPartDistinguisher()
traces = np.random.randint(0, 255, (10, 200), dtype='uint8')
data = np.random.randint(0, 8, (50, 64), dtype='uint8')
with pytest.raises(ValueError):
d.update(traces=traces, data=data)
traces = np.random.randint(0, 255, (50, 200), dtype='uint8')
data = np.random.randint(0, 8, (50, 64), dtype='uint8')
d.update(traces=traces, data=data)
with pytest.raises(ValueError):
d.update(traces=np.random.randint(0, 255, (10, 20), dtype='uint8'), data=np.random.randint(0, 255, (10, 64), dtype='uint8'))
with pytest.raises(ValueError):
d.update(traces=np.random.randint(0, 255, (10, 200), dtype='uint8'), data=np.random.randint(0, 255, (10, 62), dtype='uint8')) |
def _calculate_strategy(state: ShortDeckPokerState, I: str, strategy: DefaultDict[(str, DefaultDict[(str, float)])], count=None, total_count=None) -> str:
sigma = collections.defaultdict((lambda : collections.defaultdict((lambda : (1 / 3)))))
try:
sigma[I] = strategy[I].copy()
if (sigma[I] == {}):
raise KeyError
norm = sum(sigma[I].values())
for a in sigma[I].keys():
sigma[I][a] /= norm
a = np.random.choice(list(sigma[I].keys()), 1, p=list(sigma[I].values()))[0]
except KeyError:
if (count is not None):
count += 1
p = (1 / len(state.legal_actions))
probabilities = np.full(len(state.legal_actions), p)
a = np.random.choice(state.legal_actions, p=probabilities)
sigma[I] = {action: p for action in state.legal_actions}
if (total_count is not None):
total_count += 1
return (a, count, total_count) |
class VegaChart(JsPackage):
lib_alias = {'js': 'vega', 'css': 'vega'}
def parse(self, data):
return JsUtils.jsWrap(('vega.parse(%s)' % JsUtils.jsConvertData(data, None)))
def toSVG(self, scale_factor):
pass
def toImageURL(self, kind, scale_factor):
pass
def toCanvas(self, scale_factor, options=None):
pass
def events(self, source, type, filter):
pass |
class GlossaryViewSet(APIView):
endpoint_doc = 'usaspending_api/api_contracts/contracts/v2/references/glossary.md'
_response()
def get(self, request: Request) -> Response:
models = [{'name': 'page', 'key': 'page', 'type': 'integer', 'default': 1, 'min': 1}, {'name': 'limit', 'key': 'limit', 'type': 'integer', 'default': 500, 'min': 1, 'max': 500}]
request_dict = request.query_params
validated_request_data = TinyShield(models).block(request_dict)
limit = validated_request_data['limit']
page = validated_request_data['page']
queryset = Definition.objects.all()
(queryset, pagination) = get_pagination(queryset, int(limit), int(page))
serializer = DefinitionSerializer(queryset, many=True)
response = {'page_metadata': pagination, 'results': serializer.data}
return Response(response) |
def extractWwwThekingdomsofevilCom(item):
badwords = ['Book Review']
if any([(bad in item['tags']) for bad in badwords]):
return None
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def rollback_task_definition(deployment, old, new, timeout=600, sleep_time=1):
click.secho(('Rolling back to task definition: %s\n' % old.family_revision), fg='yellow')
deploy_task_definition(deployment=deployment, task_definition=old, title='Deploying previous task definition', success_message='Rollback successful', failure_message='Rollback failed. Please check ECS Console', timeout=timeout, deregister=True, previous_task_definition=new, ignore_warnings=False, sleep_time=sleep_time)
click.secho(('Deployment failed, but service has been rolled back to previous task definition: %s\n' % old.family_revision), fg='yellow', err=True) |
.parametrize('is_sampled', [pytest.param(True, id='is_sampled-True'), pytest.param(False, id='is_sampled-False')])
.parametrize('instance_headers', [pytest.param(True, id='instance-headers-set'), pytest.param(False, id='instance-headers-not-set')])
.parametrize('header_arg,header_kwarg', [pytest.param(True, False, id='args-set'), pytest.param(False, True, id='kwargs-set'), pytest.param(False, False, id='both-not-set')])
def test_instance_headers_are_respected(instrument, elasticapm_client, waiting_ is_sampled, instance_headers, header_arg, header_kwarg):
traceparent = TraceParent.from_string('00-0af7651916cd43dd8448eb211c80319c-b7ad6b-03', 'foo=bar,baz=bazzinga')
waiting_
url = (waiting_ + '/hello_world')
parsed_url = urllib.parse.urlparse(url)
transaction_object = elasticapm_client.begin_transaction('transaction', trace_parent=traceparent)
transaction_object.is_sampled = is_sampled
pool = urllib3.HTTPConnectionPool(parsed_url.hostname, parsed_url.port, maxsize=1, block=True, headers=({'instance': 'true'} if instance_headers else None))
if header_arg:
if urllib3_version.startswith('2'):
args = ('GET', url, None, None, {'args': 'true'})
else:
args = ('GET', url, None, {'args': 'true'})
else:
args = ('GET', url)
if header_kwarg:
kwargs = {'headers': {'kwargs': 'true'}}
else:
kwargs = {}
r = pool.request(*args, **kwargs)
request_headers = waiting_
assert ('traceparent' in request_headers), (instance_headers, header_arg, header_kwarg)
if header_arg:
assert ('args' in request_headers)
if header_kwarg:
assert ('kwargs' in request_headers)
if (instance_headers and (not (header_arg or header_kwarg))):
assert ('instance' in request_headers) |
(Output('radar-chart', 'figure'), [Input('music-intensity-selector', 'value'), Input('music-time-selector', 'value'), Input('music-sport-selector', 'value')], [State('music-intensity-selector', 'value'), State('music-time-selector', 'value'), State('music-sport-selector', 'value')])
def update_radar_chart(*args):
ctx = dash.callback_context
pop_time_period = ctx.states['music-time-selector.value']
workout_intensity = ctx.states['music-intensity-selector.value']
sport = ctx.states['music-sport-selector.value']
figure = get_radar_chart(workout_intensity=workout_intensity, sport=sport, pop_time_period=pop_time_period)
return figure |
def test_stream_get_admin(db, client, admin_jwt):
(room, stream, session) = get_room_session_stream(db, name='Test Stream')
response = client.get(f'/v1/video-streams/{stream.id}', content_type='application/vnd.api+json', headers=admin_jwt)
assert (response.status_code == 200)
assert (json.loads(response.data)['data']['id'] == str(stream.id))
assert (json.loads(response.data)['data']['attributes']['name'] == 'Test Stream')
response = client.get(f'/v1/microlocations/{room.id}/video-stream', content_type='application/vnd.api+json', headers=admin_jwt)
assert (response.status_code == 200)
assert (json.loads(response.data)['data']['attributes']['name'] == 'Test Stream')
response = client.get(f'/v1/microlocations/{room.id}?include=video-stream', content_type='application/vnd.api+json', headers=admin_jwt)
assert (response.status_code == 200)
assert (json.loads(response.data)['included'][0]['attributes']['name'] == 'Test Stream')
response = client.get(f'/v1/sessions/{session.id}?include=microlocation.video-stream', content_type='application/vnd.api+json', headers=admin_jwt)
assert (response.status_code == 200)
assert (json.loads(response.data)['included'][1]['attributes']['name'] == 'Test Stream') |
def test_transition_list_or_operator():
s1 = State('s1', initial=True)
s2 = State('s2')
s3 = State('s3')
s4 = State('s4', final=True)
t12 = s1.to(s2)
t23 = s2.to(s3)
t34 = s3.to(s4)
cycle = ((t12 | t23) | t34)
assert ([(t.source.name, t.target.name) for t in t12] == [('s1', 's2')])
assert ([(t.source.name, t.target.name) for t in t23] == [('s2', 's3')])
assert ([(t.source.name, t.target.name) for t in t34] == [('s3', 's4')])
assert ([(t.source.name, t.target.name) for t in cycle] == [('s1', 's2'), ('s2', 's3'), ('s3', 's4')]) |
class ModelDockerHubFetcher(ErsiliaBase):
def __init__(self, overwrite=None, config_json=None):
ErsiliaBase.__init__(self, config_json=config_json, credentials_json=None)
self.simple_docker = SimpleDocker()
self.overwrite = overwrite
def is_docker_installed(self):
return DockerRequirement().is_installed()
def is_available(self, model_id):
mp = ModelPuller(model_id=model_id, overwrite=self.overwrite, config_json=self.config_json)
if mp.is_available_locally():
return True
if mp.is_available_in_dockerhub():
return True
return False
def write_apis(self, model_id):
self.logger.debug('Writing APIs')
di = PulledDockerImageService(model_id=model_id, config_json=self.config_json, preferred_port=None)
di.serve()
di.close()
def copy_information(self, model_id):
fr_file = '/root/eos/dest/{0}/information.json'.format(model_id)
to_file = '{0}/dest/{1}/information.json'.format(EOS, model_id)
self.simple_docker.cp_from_image(img_path=fr_file, local_path=to_file, org=DOCKERHUB_ORG, img=model_id, tag=DOCKERHUB_LATEST_TAG)
def copy_metadata(self, model_id):
fr_file = '/root/eos/dest/{0}/api_schema.json'.format(model_id)
to_file = '{0}/dest/{1}/api_schema.json'.format(EOS, model_id)
self.simple_docker.cp_from_image(img_path=fr_file, local_path=to_file, org=DOCKERHUB_ORG, img=model_id, tag=DOCKERHUB_LATEST_TAG)
def copy_status(self, model_id):
fr_file = '/root/eos/dest/{0}/{1}'.format(model_id, STATUS_FILE)
to_file = '{0}/dest/{1}/{2}'.format(EOS, model_id, STATUS_FILE)
self.simple_docker.cp_from_image(img_path=fr_file, local_path=to_file, org=DOCKERHUB_ORG, img=model_id, tag=DOCKERHUB_LATEST_TAG)
def fetch(self, model_id):
mp = ModelPuller(model_id=model_id, config_json=self.config_json)
mp.pull()
mr = ModelRegisterer(model_id=model_id, config_json=self.config_json)
mr.register(is_from_dockerhub=True)
self.write_apis(model_id)
self.copy_information(model_id)
self.copy_metadata(model_id)
self.copy_status(model_id) |
class ClientList():
def __init__(self, prodj):
self.clients = []
self.client_keepalive_callback = None
self.client_change_callback = None
self.media_change_callback = None
self.log_played_tracks = True
self.auto_request_beatgrid = True
self.auto_track_download = False
self.prodj = prodj
def __len__():
return len(self.clients)
def getClient(self, player_number):
return next((p for p in self.clients if (p.player_number == player_number)), None)
def clientsByLoadedTrack(self, loaded_player_number, loaded_slot, track_id):
for p in self.clients:
if ((p.loaded_player_number == loaded_player_number) and (p.loaded_slot == loaded_slot) and (p.track_id == track_id)):
(yield p)
def clientsByLoadedTrackArtwork(self, loaded_player_number, loaded_slot, artwork_id):
for p in self.clients:
if ((p.loaded_player_number == loaded_player_number) and (p.loaded_slot == loaded_slot) and (p.metadata is not None) and (p.metadata['artwork_id'] == artwork_id)):
(yield p)
def storeMetadataByLoadedTrack(self, loaded_player_number, loaded_slot, track_id, metadata):
for p in self.clients:
if ((p.loaded_player_number == loaded_player_number) and (p.loaded_slot == loaded_slot) and (p.track_id == track_id)):
p.metadata = metadata
def mediaChanged(self, player_number, slot):
logging.debug('Media %s in player %d changed', slot, player_number)
self.prodj.data.cleanup_stores_from_changed_media(player_number, slot)
if (self.media_change_callback is not None):
self.media_change_callback(self, player_number, slot)
def updatePositionByBeat(self, player_number, new_beat_count, new_play_state):
c = self.getClient(player_number)
identifier = (c.loaded_player_number, c.loaded_slot, c.track_id)
if (identifier in self.prodj.data.beatgrid_store):
if (new_beat_count > 0):
if (((c.play_state == 'cued') and (new_play_state == 'cueing')) or ((c.play_state == 'playing') and (new_play_state == 'paused')) or ((c.play_state == 'paused') and (new_play_state == 'playing'))):
return
if (new_play_state != 'cued'):
new_beat_count -= 1
beatgrid = self.prodj.data.beatgrid_store[identifier]
if ((beatgrid is not None) and (len(beatgrid) > new_beat_count)):
c.position = (beatgrid[new_beat_count]['time'] / 1000)
else:
c.position = 0
else:
c.position = None
c.position_timestamp = time.time()
def logPlayedTrackCallback(self, request, source_player_number, slot, item_id, reply):
if ((request != 'metadata') or (reply is None) or (len(reply) == 0)):
return
with open('tracks.log', 'a') as f:
f.write('{}: {} - {} ({})\n'.format(datetime.now().strftime('%Y-%m-%d %H:%M:%S'), reply['artist'], reply['title'], reply['album']))
def eatKeepalive(self, keepalive_packet):
c = next((x for x in self.clients if (x.ip_addr == keepalive_packet.content.ip_addr)), None)
if (c is None):
conflicting_client = next((x for x in self.clients if (x.player_number == keepalive_packet.content.player_number)), None)
if (conflicting_client is not None):
logging.warning('New Player %d (%s), but already used by %s, ignoring keepalive', keepalive_packet.content.player_number, keepalive_packet.content.ip_addr, conflicting_client.ip_addr)
return
c = Client()
c.model = keepalive_packet.model
c.ip_addr = keepalive_packet.content.ip_addr
c.mac_addr = keepalive_packet.content.mac_addr
c.player_number = keepalive_packet.content.player_number
self.clients += [c]
logging.info('New Player %d: %s, %s, %s', c.player_number, c.model, c.ip_addr, c.mac_addr)
if self.client_keepalive_callback:
self.client_keepalive_callback(c.player_number)
elif (keepalive_packet.type != 'type_change'):
n = keepalive_packet.content.player_number
if (c.player_number != n):
logging.info('Player {} changed player number from {} to {}'.format(c.ip_addr, c.player_number, n))
old_player_number = c.player_number
c.player_number = n
for pn in [old_player_number, c.player_number]:
if self.client_keepalive_callback:
self.client_keepalive_callback(pn)
if self.client_change_callback:
self.client_change_callback(pn)
c.updateTtl()
def eatBeat(self, beat_packet):
c = self.getClient(beat_packet.player_number)
if (c is None):
return
c.updateTtl()
client_changed = False
if (beat_packet.type == 'type_mixer'):
for x in range(1, 5):
player = self.getClient(x)
if (player is not None):
on_air = (beat_packet.content.ch_on_air[(x - 1)] == 1)
if (player.on_air != on_air):
player.on_air = on_air
client_changed = True
elif ((beat_packet.type == 'type_beat') and ((not c.status_packet_received) or (c.model == 'CDJ-2000'))):
new_actual_pitch = beat_packet.content.pitch
if (c.actual_pitch != new_actual_pitch):
c.actual_pitch = new_actual_pitch
client_changed = True
new_bpm = beat_packet.content.bpm
if (c.bpm != new_bpm):
c.bpm = new_bpm
client_changed = True
new_beat = beat_packet.content.beat
if (c.beat != new_beat):
c.beat = new_beat
client_changed = True
if (self.client_change_callback and client_changed):
self.client_change_callback(c.player_number)
def eatStatus(self, status_packet):
if (status_packet.type not in ['cdj', 'djm', 'link_reply']):
logging.info('Received %s status packet from player %d, ignoring', status_packet.type, status_packet.player_number)
return
c = self.getClient(status_packet.player_number)
if (c is None):
return
client_changed = False
c.status_packet_received = True
if (status_packet.type == 'link_reply'):
link_info = {key: status_packet.content[key] for key in ['name', 'track_count', 'playlist_count', 'bytes_total', 'bytes_free', 'date']}
if (status_packet.content.slot == 'usb'):
c.usb_info = link_info
elif (status_packet.content.slot == 'sd'):
c.sd_info = link_info
else:
logging.warning('Received link info for %s not implemented', status_packet.content.slot)
logging.info('Player %d Link Info: %s "%s", %d tracks, %d playlists, %d/%dMB free', c.player_number, status_packet.content.slot, link_info['name'], link_info['track_count'], link_info['playlist_count'], ((link_info['bytes_free'] // 1024) // 1024), ((link_info['bytes_total'] // 1024) // 1024))
self.mediaChanged(c.player_number, status_packet.content.slot)
return
c.type = status_packet.type
new_bpm = (status_packet.content.bpm if (status_packet.content.bpm != 655.35) else '-')
if (c.bpm != new_bpm):
c.bpm = new_bpm
client_changed = True
new_pitch = status_packet.content.physical_pitch
if (c.pitch != new_pitch):
c.pitch = new_pitch
client_changed = True
new_beat = (status_packet.content.beat if (status_packet.content.beat != ) else 0)
if ((c.beat != new_beat) and (new_beat != 0)):
c.beat = new_beat
client_changed = True
new_state = [x for x in ['on_air', 'sync', 'master', 'play'] if (status_packet.content.state[x] == True)]
if (c.state != new_state):
c.state = new_state
client_changed = True
if (c.type == 'cdj'):
new_beat_count = (status_packet.content.beat_count if (status_packet.content.beat_count != ) else 0)
new_play_state = status_packet.content.play_state
if ((new_beat_count != c.beat_count) or (new_play_state != c.play_state)):
self.updatePositionByBeat(c.player_number, new_beat_count, new_play_state)
else:
c.updatePositionByPitch()
if (c.beat_count != new_beat_count):
c.beat_count = new_beat_count
client_changed = True
if (c.play_state != new_play_state):
c.play_state = new_play_state
client_changed = True
c.fw = status_packet.content.firmware
new_actual_pitch = status_packet.content.actual_pitch
if (c.actual_pitch != new_actual_pitch):
c.actual_pitch = new_actual_pitch
client_changed = True
new_cue_distance = (status_packet.content.cue_distance if (status_packet.content.cue_distance != 511) else '-')
if (c.cue_distance != new_cue_distance):
c.cue_distance = new_cue_distance
client_changed = True
new_usb_state = status_packet.content.usb_state
if (c.usb_state != new_usb_state):
c.usb_state = new_usb_state
if (new_usb_state != 'loaded'):
c.usb_info = {}
else:
self.prodj.vcdj.query_link_info(c.player_number, 'usb')
self.mediaChanged(c.player_number, 'usb')
new_sd_state = status_packet.content.sd_state
if (c.sd_state != new_sd_state):
c.sd_state = new_sd_state
if (new_sd_state != 'loaded'):
c.sd_info = {}
else:
self.prodj.vcdj.query_link_info(c.player_number, 'sd')
self.mediaChanged(c.player_number, 'sd')
c.track_number = status_packet.content.track_number
c.loaded_player_number = status_packet.content.loaded_player_number
c.loaded_slot = status_packet.content.loaded_slot
c.track_analyze_type = status_packet.content.track_analyze_type
new_track_id = status_packet.content.track_id
if (c.track_id != new_track_id):
c.track_id = new_track_id
client_changed = True
c.metadata = None
c.position = None
if ((c.loaded_slot in ['usb', 'sd']) and (c.track_analyze_type == 'rekordbox')):
if self.log_played_tracks:
self.prodj.data.get_metadata(c.loaded_player_number, c.loaded_slot, c.track_id, self.logPlayedTrackCallback)
if (self.auto_request_beatgrid and (c.track_id != 0)):
self.prodj.data.get_beatgrid(c.loaded_player_number, c.loaded_slot, c.track_id)
if self.auto_track_download:
logging.info('Automatic download of track in player %d', c.player_number)
self.prodj.data.get_mount_info(c.loaded_player_number, c.loaded_slot, c.track_id, self.prodj.nfs.enqueue_download_from_mount_info)
c.updateTtl()
if (self.client_change_callback and client_changed):
self.client_change_callback(c.player_number)
def gc(self):
cur_clients = self.clients
self.clients = []
for client in cur_clients:
if (not client.ttlExpired()):
self.clients += [client]
else:
logging.info('Player {} dropped due to timeout'.format(client.player_number))
if self.client_change_callback:
self.client_change_callback(client.player_number)
def getClientIps(self):
return [client.ip_addr for client in self.clients] |
class TestECSHelpers(unittest.TestCase):
def test_is_intermediate_field(self):
pseudo_field = {'field_details': {}}
self.assertEqual(ecs_helpers.is_intermediate(pseudo_field), False)
pseudo_field['field_details']['intermediate'] = False
self.assertEqual(ecs_helpers.is_intermediate(pseudo_field), False)
pseudo_field['field_details']['intermediate'] = True
self.assertEqual(ecs_helpers.is_intermediate(pseudo_field), True)
def test_dict_copy_existing_keys(self):
source = {'key1': 'value1'}
destination = {}
ecs_helpers.dict_copy_existing_keys(source, destination, ['key1', 'missingkey'])
self.assertEqual(destination, {'key1': 'value1'})
def test_dict_copy_existing_keys_overwrites(self):
source = {'key1': 'new_value'}
destination = {'key1': 'overwritten', 'untouched': 'untouched'}
ecs_helpers.dict_copy_existing_keys(source, destination, ['key1', 'untouched'])
self.assertEqual(destination, {'key1': 'new_value', 'untouched': 'untouched'})
def test_sorted_by_one_key(self):
dict = {'message': {'name': 'message'}, 'labels': {'name': 'labels'}, '': {'name': ''}, 'tags': {'name': 'tags'}}
expected = [{'name': ''}, {'name': 'labels'}, {'name': 'message'}, {'name': 'tags'}]
result = ecs_helpers.dict_sorted_by_keys(dict, 'name')
self.assertEqual(result, expected)
result = ecs_helpers.dict_sorted_by_keys(dict, ['name'])
self.assertEqual(result, expected)
def test_sorted_by_multiple_keys(self):
dict = {'cloud': {'group': 2, 'name': 'cloud'}, 'agent': {'group': 2, 'name': 'agent'}, 'base': {'group': 1, 'name': 'base'}}
expected = [{'group': 1, 'name': 'base'}, {'group': 2, 'name': 'agent'}, {'group': 2, 'name': 'cloud'}]
result = ecs_helpers.dict_sorted_by_keys(dict, ['group', 'name'])
self.assertEqual(result, expected)
def test_merge_dicts(self):
a = {'cloud': {'group': 2, 'name': 'cloud'}, 'agent': {'group': 2, 'name': 'agent'}}
b = {'base': {'group': 1, 'name': 'base'}}
result = ecs_helpers.safe_merge_dicts(a, b)
self.assertEqual(result, {'cloud': {'group': 2, 'name': 'cloud'}, 'agent': {'group': 2, 'name': 'agent'}, 'base': {'group': 1, 'name': 'base'}})
def test_merge_dicts_raises_if_duplicate_key_added(self):
a = {'cloud': {'group': 2, 'name': 'cloud'}}
b = {'cloud': {'group': 9, 'name': 'bazbar'}}
with self.assertRaises(ValueError):
ecs_helpers.safe_merge_dicts(a, b)
def test_clean_string_values(self):
dict = {'dirty': ' space, the final frontier ', 'clean': 'val', 'int': 1}
ecs_helpers.dict_clean_string_values(dict)
self.assertEqual(dict, {'dirty': 'space, the final frontier', 'clean': 'val', 'int': 1})
def test_list_subtract(self):
self.assertEqual(ecs_helpers.list_subtract(['a', 'b'], ['a']), ['b'])
self.assertEqual(ecs_helpers.list_subtract(['a', 'b'], ['a', 'c']), ['b'])
def test_get_tree_by_ref(self):
ref = 'v1.5.0'
tree = ecs_helpers.get_tree_by_ref(ref)
self.assertEqual(tree.hexsha, '4449df245f6930d59bcd537aa9476b')
def test_path_exists_in_git_tree(self):
ref = 'v1.6.0'
tree = ecs_helpers.get_tree_by_ref(ref)
self.assertFalse(ecs_helpers.path_exists_in_git_tree(tree, 'nonexistant'))
self.assertTrue(ecs_helpers.path_exists_in_git_tree(tree, 'schemas'))
def test_is_yaml(self):
self.assertTrue(ecs_helpers.is_yaml('./schemas/base.yml'))
self.assertTrue(ecs_helpers.is_yaml('./build/docs/conf.yaml'))
self.assertFalse(ecs_helpers.is_yaml('./README.md'))
self.assertFalse(ecs_helpers.is_yaml('./schemas/'))
self.assertFalse(ecs_helpers.is_yaml('./build'))
def test_glob_yaml_files(self):
self.assertEqual(ecs_helpers.glob_yaml_files('non_existent_file'), [])
self.assertEqual(ecs_helpers.glob_yaml_files('non_existent_directory/'), [])
self.assertEqual(ecs_helpers.glob_yaml_files('non_existent_wildcard.*'), [])
self.assertEqual(ecs_helpers.glob_yaml_files('schemas/base.yml'), ['schemas/base.yml'])
self.assertEqual(ecs_helpers.glob_yaml_files(['schemas/base.yml']), ['schemas/base.yml'])
self.assertEqual(set(ecs_helpers.glob_yaml_files(['schemas/base.yml', 'schemas/log.yml'])), {'schemas/base.yml', 'schemas/log.yml'})
self.assertTrue((set(ecs_helpers.glob_yaml_files('schemas/b*.yml')).intersection({'schemas/base.yml'}) != set()))
self.assertTrue((set(ecs_helpers.glob_yaml_files('schemas/[bl]*.yml')).intersection({'schemas/base.yml', 'schemas/log.yml'}) != set()))
min_schema_count = 46
self.assertTrue((len(ecs_helpers.glob_yaml_files(ecs_helpers.glob_yaml_files('schemas'))) >= min_schema_count))
self.assertTrue((len(ecs_helpers.glob_yaml_files(ecs_helpers.glob_yaml_files('schemas/'))) >= min_schema_count))
self.assertTrue((len(ecs_helpers.glob_yaml_files(ecs_helpers.glob_yaml_files('schemas/*.yml'))) >= min_schema_count))
self.assertEqual(len(ecs_helpers.glob_yaml_files(ecs_helpers.glob_yaml_files('schemas/*.yaml'))), 0)
def test_remove_top_level_false_field_sets(self):
nested_schema_original = {'as': {'group': 2, 'name': 'as', 'reusable': {'top_level': False}}, 'agent': {'group': 2, 'name': 'agent'}}
nested_schema_expected = {'agent': {'group': 2, 'name': 'agent'}}
self.assertEqual(ecs_helpers.remove_top_level_reusable_false(nested_schema_original), nested_schema_expected) |
def _replace_writes(ir, fwd, c, sym, repl, only_replace_attrs=True):
cur_fwd = (lambda x: x)
c = fwd(c)
matches = (match_pattern(c, f'{repr(sym)} = _', use_sym_id=True) + match_pattern(c, f'{repr(sym)} += _', use_sym_id=True))
for block in matches:
assert (len(block) == 1)
s = cur_fwd(block[0])
if (not (c_repl := repl(s))):
continue
(ir, fwd_s) = _replace_helper(s, c_repl, only_replace_attrs)
cur_fwd = _compose(fwd_s, cur_fwd)
return (ir, _compose(cur_fwd, fwd)) |
class Plugin(plugin.PluginProto):
PLUGIN_ID = 17
PLUGIN_NAME = 'RFID - PN532'
PLUGIN_VALUENAME1 = 'Tag'
def __init__(self, taskindex):
plugin.PluginProto.__init__(self, taskindex)
self.dtype = rpieGlobals.DEVICE_TYPE_I2C
self.vtype = rpieGlobals.SENSOR_TYPE_TEXT
self.valuecount = 1
self.senddataoption = True
self.timeroption = False
self.timeroptional = False
self.formulaoption = False
self.pn = None
self.ver = ''
self.lastread = 0
self.processing = False
self.i2cport = (- 1)
self.preset = None
def plugin_init(self, enableplugin=None):
plugin.PluginProto.plugin_init(self, enableplugin)
self.decimals[0] = (- 1)
self.initialized = False
time.sleep(1)
if self.enabled:
try:
i2cl = self.i2c
except:
i2cl = (- 1)
try:
i2cport = gpios.HWPorts.geti2clist()
if (i2cl == (- 1)):
i2cl = int(i2cport[0])
except:
i2cport = []
if ((len(i2cport) > 0) and (i2cl > (- 1))):
self.preset = str(self.taskdevicepin[0]).strip()
if ((self.preset == '') or (self.preset == '-1')):
self.preset = None
else:
try:
self.preset = int(reset)
except:
self.preset = None
self.ver = ''
try:
self.pn = pn532.PN532_I2C(reset=self.preset, i2c_c=self.i2c)
(ic, ver, rev, supp) = self.pn.get_firmware_version()
self.ver = ((str(ver) + '.') + str(rev))
self.pn.SAM_configuration()
misc.addLog(rpieGlobals.LOG_LEVEL_INFO, (('PN532 v' + str(self.ver)) + ' initialized'))
self.initialized = True
except Exception as e:
self.initialized = False
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('PN532 init error:' + str(e)))
self.processing = False
if self.initialized:
bgt = threading.Thread(target=self.bgreader)
bgt.daemon = True
bgt.start()
def bgreader(self):
while self.enabled:
if self.initialized:
try:
uid = self.pn.read_passive_target(timeout=1)
if (uid is not None):
self.callbackfunc(uid)
except Exception as e:
pass
else:
time.sleep(0.1)
def callbackfunc(self, rfid):
if (self.processing == False):
self.processing = True
tn = time.time()
if ((tn - self.lastread) > 1):
self.lastread = tn
try:
sval = str(int.from_bytes(rfid, byteorder='big', signed=False))
except:
sval = ''
if (sval != ''):
self.set_value(1, sval, True)
self.processing = False
def webform_load(self):
webserver.addFormNote("I2C address is fixed 0x24! You can check it at <a href='i2cscanner'>i2cscan</a> page.")
webserver.addFormPinSelect('Reset pin (optional)', 'taskdevicepin3', self.taskdevicepin[0])
webserver.addFormNote('Set to an Output pin connected to PN532 reset pin or None!')
return True
def webform_save(self, params):
par = webserver.arg('taskdevicepin3', params)
if (par == ''):
par = (- 1)
self.taskdevicepin[0] = int(par)
return True
def __del__(self):
self.initialized = False
def plugin_exit(self):
self.__del__()
return True |
class OptionSeriesAreasplinerangeLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
class OsherFuncCoef(ObjectiveFunction_base):
def __init__(self, LHS_s, RHS_s, fFunc, t, x, useShallowCopy=True):
ObjectiveFunction_base.__init__(self, LHS_s, RHS_s)
self.xi = old_div(x, t)
self.t = t
self.x = x
self.c = {('u', 0): numpy.zeros((1,), 'd'), ('m', 0): numpy.zeros((1,), 'd'), ('dm', 0, 0): numpy.zeros((1,), 'd'), ('f', 0): numpy.zeros((1, 1), 'd'), ('df', 0, 0): numpy.zeros((1, 1), 'd'), ('a', 0, 0): numpy.zeros((1, 1, 1), 'd'), ('da', 0, 0, 0): numpy.zeros((1, 1, 1), 'd'), ('phi', 0): numpy.zeros((1,), 'd'), ('dphi', 0, 0): numpy.zeros((1,), 'd'), ('r', 0): numpy.zeros((1,), 'd'), ('dr', 0, 0): numpy.zeros((1,), 'd'), ('H', 0): numpy.zeros((1,), 'd'), ('dH', 0, 0): numpy.zeros((1, 1), 'd')}
if useShallowCopy:
self.fFunc = fFunc
else:
import copy
self.fFunc = copy.deepcopy(fFunc)
self.fFunc.initializeElementQuadrature(self.t, self.c)
if (LHS_s < RHS_s):
self.getResidual = self.Argmin
else:
self.getResidual = self.Argmax
def Argmin(self, s):
self.c[('u', 0)][0] = s
self.fFunc.evaluate(self.t, self.c)
m = self.c[('m', 0)][0]
f = self.c[('f', 0)][(0, 0)]
return (f - (self.xi * m))
def Argmax(self, s):
self.c[('u', 0)][0] = s
self.fFunc.evaluate(self.t, self.c)
m = self.c[('m', 0)][0]
f = self.c[('f', 0)][(0, 0)]
return ((self.xi * m) - f) |
def get_referenced_missing_keys(taxonomy: Taxonomy) -> Set[FidesKey]:
referenced_keys: List[Set[FidesKey]] = [find_referenced_fides_keys(resource) for resource_type in taxonomy.__fields_set__ for resource in getattr(taxonomy, resource_type)]
key_set: Set[FidesKey] = set(reduce((lambda x, y: set().union(x).union(y)), referenced_keys))
keys_not_in_taxonomy = {fides_key for fides_key in key_set if (get_resource_by_fides_key(taxonomy, fides_key) is None)}
return keys_not_in_taxonomy |
class OptionSeriesFunnelSonificationContexttracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class GithubCodeView(generics.GenericAPIView):
serializer_class = GithubCodeSerializer
permission_classes = [permissions.AllowAny]
def post(self, request):
ACCESS_URL = '
payload = {'code': request.data['code'], 'client_id': os.environ['GITHUB_CLIENT_ID'], 'client_secret': os.environ['GITHUB_CLIENT_SECRET']}
headers = {'Accept': 'application/json'}
res = requests.post(ACCESS_URL, payload, headers=headers)
print(res.text)
return Response(res.text, status=200) |
class init_cond(object):
def __init__(self, L, scaling=0.75):
self.radius = 0.15
self.xc = 0.5
self.yc = 0.75
self.scaling = scaling
def uOfXT(self, x, t):
import numpy as np
return (self.scaling * (self.radius - math.sqrt((((x[0] - self.xc) ** 2) + ((x[1] - self.yc) ** 2))))) |
class TestCompareLatestGeneratorOutputWithTestProtocolWithNoCustomTypes():
def setup_class(cls):
cls.cwd = os.getcwd()
cls.t = tempfile.mkdtemp()
os.chdir(cls.t)
def test_compare_latest_generator_output_with_test_protocol(self):
path_to_generated_protocol = self.t
dotted_path_to_package_for_imports = 'tests.data.generator.'
try:
protocol_generator = ProtocolGenerator(path_to_protocol_specification=PATH_TO_T_PROTOCOL_NO_CT_SPECIFICATION, output_path=path_to_generated_protocol, dotted_path_to_protocol_package=dotted_path_to_package_for_imports)
protocol_generator.generate()
except Exception as e:
pytest.skip(('Something went wrong when generating the protocol. The exception:' + str(e)))
init_file_generated = Path(self.t, T_PROTOCOL_NO_CT_NAME, '__init__.py')
init_file_original = Path(PATH_TO_T_PROTOCOL_NO_CT, '__init__.py')
(is_matched, diff) = match_files(init_file_generated, init_file_original)
assert (is_matched or (len(diff) == 194)), f'''Difference Found between __init__.py files:
{diff}'''
message_file_generated = Path(self.t, T_PROTOCOL_NO_CT_NAME, 'message.py')
message_file_original = Path(PATH_TO_T_PROTOCOL_NO_CT, 'message.py')
(is_matched, diff) = match_files(message_file_generated, message_file_original)
assert is_matched, f'''Difference Found between message.py files:
{diff}'''
serialization_file_generated = Path(self.t, T_PROTOCOL_NO_CT_NAME, 'serialization.py')
serialization_file_original = Path(PATH_TO_T_PROTOCOL_NO_CT, 'serialization.py')
(is_matched, diff) = match_files(serialization_file_generated, serialization_file_original)
assert is_matched, f'''Difference Found between serialization.py files:
{diff}'''
dialogue_file_generated = Path(self.t, T_PROTOCOL_NO_CT_NAME, 'dialogues.py')
dialogue_file_original = Path(PATH_TO_T_PROTOCOL_NO_CT, 'dialogues.py')
(is_matched, diff) = match_files(dialogue_file_generated, dialogue_file_original)
assert is_matched, f'''Difference Found between dialogues.py files:
{diff}'''
proto_file_generated = Path(self.t, T_PROTOCOL_NO_CT_NAME, '{}.proto'.format(T_PROTOCOL_NO_CT_NAME))
proto_file_original = Path(PATH_TO_T_PROTOCOL_NO_CT, '{}.proto'.format(T_PROTOCOL_NO_CT_NAME))
(is_matched, diff) = match_files(proto_file_generated, proto_file_original)
assert is_matched, f'''Difference Found between .proto files:
{diff}'''
def teardown_class(cls):
os.chdir(cls.cwd)
try:
shutil.rmtree(cls.t)
except (OSError, IOError):
pass |
class Instance(object):
def __init__(self, index: int, dataloader: Optional[Union[(SpeechToTextDataloader, TextToTextDataloader)]], args: Optional[Namespace]):
self.index = index
self.finish_prediction = False
self.dataloader = dataloader
if (self.dataloader is not None):
self.source = self.dataloader[self.index]['source']
self.reference = self.dataloader[self.index]['target']
self.tgt_lang = self.dataloader[self.index]['tgt_lang']
self.reset()
if (args is not None):
self.args = args
self.latency_unit = args.eval_latency_unit
self.target_spm_model = None
def set_target_spm_model(self, spm_model):
self.target_spm_model = spm_model
def reset(self):
self.step = 0
self.elapsed = []
self.prediction_list = []
self.delays = []
self.start_time = None
self.metrics = {}
def step_to_elapsed(self, *args):
raise NotImplementedError
def step_to_delay(self, step):
raise NotImplementedError
def finish(self):
return self.finish_prediction
def finish(self, status: bool):
self.finish_prediction = status
def preprocess_target(self, target: str) -> str:
return target
def preprocess_source(self, source: str):
raise NotImplementedError
def receive_prediction(self, prediction: str):
raise NotImplementedError
def send_source(self, *args):
raise NotImplementedError
def source_length(self):
raise NotImplementedError
def prediction_length(self):
return len(self.prediction_list)
def target_length_latency(self):
raise NotImplementedError
def prediction(self):
raise NotImplementedError
def source_info(self):
return self.source
def reference_length(self) -> int:
if (self.latency_unit == 'word'):
return len(self.reference.split(' '))
elif (self.latency_unit == 'char'):
return len(self.reference.strip())
elif (self.latency_unit == 'spm'):
assert (self.target_spm_model is not None)
return len(self.target_spm_model.encode(self.reference, out_type=str))
else:
raise NotImplementedError
def summarize(self):
return_dict = {'index': self.index, 'prediction': self.prediction, 'delays': self.delays, 'elapsed': self.elapsed, 'prediction_length': self.prediction_length, 'reference': self.reference, 'source': self.source_info, 'source_length': self.source_length}
if (self.latency_unit == 'spm'):
return_dict['prediction_spm'] = self.prediction_list
return return_dict
def from_json(cls, json_string):
info = json.loads(json_string)
instance = cls(info['index'], None, None)
instance.prediction_list = info['prediction'].split()
instance.delays = info['delays']
instance.elapsed = info['elapsed']
instance.reference = info['reference']
instance.metrics = info['metric']
instance.finish_prediction = True
return instance |
class TestNoCmdLineKwargRaise():
def test_cmd_line_kwarg_raise(self, monkeypatch):
with monkeypatch.context() as m:
with pytest.raises(TypeError):
config = ConfigArgBuilder(*all_configs, no_cmd_line=True, configs='./tests/conf/yaml/test.yaml')
return config.generate() |
def _measure_for_children_in_entity(request, measure, parent_entity_code, parent_entity_type):
parent = _get_entity(parent_entity_type, parent_entity_code)
child_entity_type = {'pcn': 'practice', 'ccg': 'practice', 'stp': 'ccg', 'regional_team': 'ccg'}[parent_entity_type]
measure = get_object_or_404(Measure, pk=measure)
measure_options = {'chartTitleUrlTemplate': _url_template(('measures_for_one_' + child_entity_type)), 'globalMeasuresUrl': _build_global_measures_url(measure_id=measure.id), 'measure': measure, 'oneEntityUrlTemplate': _url_template(('measure_for_one_' + child_entity_type)), 'orgId': parent.code, 'orgLocationUrl': _build_org_location_url(parent), 'orgName': parent.name, 'orgType': child_entity_type, 'orgTypeHuman': _entity_type_human(child_entity_type), 'parentOrgType': _org_type_for_entity(parent), 'parentOrgTypeHuman': _entity_type_human(_org_type_for_entity(parent)), 'panelMeasuresUrl': _build_panel_measures_url(child_entity_type, entity_code=parent.code, parent_org_type=_org_type_for_entity(parent), measure_id=measure.id), 'rollUpBy': 'org_id', 'tagsFocusUrlTemplate': _url_template(('measures_for_one_' + child_entity_type))}
_add_measure_details(measure_options, measure)
_add_measure_for_children_in_entity_url(measure_options, child_entity_type)
context = {'parent_entity_type': parent_entity_type, 'parent_entity_type_human': _entity_type_human(parent_entity_type), 'child_entity_type_human': _entity_type_human(child_entity_type), 'parent': parent, 'page_id': parent_entity_code, 'parent_entity_measure_url': reverse(('measure_for_one_' + parent_entity_type), kwargs={'measure': measure.id, 'entity_code': parent_entity_code}), 'all_measures_url': reverse(('measures_for_one_' + parent_entity_type), kwargs={(parent_entity_type + '_code'): parent_entity_code}), 'measure': measure, 'measure_options': measure_options, 'measure_tags': _get_tags_with_names(measure.tags)}
if (not _user_is_bot(request)):
context['csv_download_url'] = measure_options['panelMeasuresUrl'].replace('format=json', 'format=csv')
return render(request, 'measure_for_children_in_entity.html', context) |
class InfoWindow(QtWidgets.QDialog):
def __init__(self, parent):
QtWidgets.QDialog.__init__(self, parent, QtCore.Qt.Tool)
self.setContentsMargins(0, 0, 0, 0)
self._cfg = Config.get()
self.layout = QtWidgets.QVBoxLayout(self)
self._textedit = QtWidgets.QTextEdit()
self._textedit.setCursorWidth(0)
self._textedit.setMinimumSize(300, 325)
self._textedit.setReadOnly(True)
self._textedit.setTextInteractionFlags((QtCore.Qt.TextSelectableByMouse | QtCore.Qt.TextSelectableByKeyboard))
self.layout.addWidget(self._textedit)
self._load_settings()
def closeEvent(self, ev):
self._save_settings()
ev.accept()
self.hide()
def _load_settings(self):
saved_geometry = self._cfg.getSettings(Config.INFOWIN_GEOMETRY)
if (saved_geometry is not None):
self.restoreGeometry(saved_geometry)
def _save_settings(self):
self._cfg.setSettings(Config.INFOWIN_GEOMETRY, self.saveGeometry())
def showText(self, text):
self._load_settings()
self._textedit.setText(text)
pos = QtGui.QCursor.pos()
win_size = self.size()
x_off = int((win_size.width() / 2))
y_off = int((win_size.height() / 2))
point = QtCore.QPoint((pos.x() - x_off), (pos.y() - y_off))
self.move(point.x(), point.y())
self.show() |
class IsSameUser(IsAuthed):
def __init__(self, topic_or_post=None):
self._topic_or_post = topic_or_post
def fulfill(self, user):
return (super(IsSameUser, self).fulfill(user) and (user.id == self._determine_user()))
def _determine_user(self):
if (self._topic_or_post is not None):
return self._topic_or_post.user_id
return self._get_user_id_from_post()
def _get_user_id_from_post(self):
if current_post:
return current_post.user_id
elif current_topic:
return current_topic.user_id
else:
raise FlaskBBError('Could not determine user') |
class All2All_Scatter_Wait(Function):
def forward(ctx, myreq, *output):
ctx.a2ai = myreq.a2ai
ctx.myreq = myreq
myreq.req.wait()
myreq.req = None
myreq.tensor = None
return output
def backward(ctx, *grad_output):
import torch.distributed as dist
myreq = ctx.myreq
my_size = myreq.bench.comm_size
my_rank = myreq.bench.my_rank
assert (len(grad_output) == my_size)
scatter_list = [t.contiguous() for t in grad_output]
a2ai = ctx.a2ai
mb_split_lengths = (a2ai.gNS if a2ai.gNS else a2ai.lN)
grad_input = grad_output[0].new_empty([a2ai.N, (a2ai.E * a2ai.lS)])
gather_list = list(grad_input.split(mb_split_lengths, dim=0))
req_list = []
for i in range(my_size):
req = dist.gather(scatter_list[i], (gather_list if (i == my_rank) else []), dst=i, async_op=True)
req_list.append(req)
myreq.req = req_list
myreq.tensor = grad_input
return (None, grad_output) |
class AEAProject():
old_cwd: str
temp_dir: str
def __init__(self, name: str='my_aea', parent_dir: Optional[str]=None):
self.name = name
self.parent_dir = parent_dir
def __enter__(self) -> None:
self.old_cwd = os.getcwd()
self.temp_dir = tempfile.mkdtemp(dir=self.parent_dir)
os.chdir(self.temp_dir)
run_aea('create', '--local', '--empty', self.name, '--author', 'fetchai')
os.chdir(self.name)
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
os.chdir(self.old_cwd)
shutil.rmtree(self.temp_dir) |
class KfpPipelineParameter(PipelineParameter):
property_id = 'KFP_PIPELINE_PARAMETERS'
property_attributes = [ListItemPropertyAttribute(attribute_id='name', description='The name of the parameter. This must be a valid Python identifier and not a keyword.', display_name='Parameter Name', allowed_input_types=[PropertyInputType(base_type='str', placeholder='param_1')], hidden=False, required=True, use_in_key=True, pattern='^[a-zA-Z][a-zA-Z0-9_]*$'), ListItemPropertyAttribute(attribute_id='description', description='A description for this parameter.', display_name='Description', allowed_input_types=[PropertyInputType(base_type='str')], hidden=False, required=False, use_in_key=False), ListItemPropertyAttribute(attribute_id='default_value', description='A default value for the parameter.', display_name='Default Value', allowed_input_types=[KfpPropertyInputType(base_type='String', placeholder='default_val'), KfpPropertyInputType(base_type='Integer'), KfpPropertyInputType(base_type='Float'), KfpPropertyInputType(base_type='Bool')], hidden=False, required=False, use_in_key=False), ListItemPropertyAttribute(attribute_id='value', display_name='Value', allowed_input_types=[KfpPropertyInputType(base_type='String'), KfpPropertyInputType(base_type='Integer'), KfpPropertyInputType(base_type='Float'), KfpPropertyInputType(base_type='Bool')], hidden=True, required=False, use_in_key=False), ListItemPropertyAttribute(attribute_id='required', description='Whether a value is required for this parameter during pipeline submit or export.', display_name='Required', allowed_input_types=[PropertyInputType(base_type='bool', placeholder=' ')], hidden=False, required=False, use_in_key=False)]
default_type = 'String'
def __init__(self, name, description, value, default_value, required, **kwargs):
super().__init__(name=name, description=description, value=value, default_value=default_value, required=required)
self.input_type = KfpPropertyInputType(base_type=(self.selected_type or self.default_type))
def get_all_validation_errors(self) -> List[str]:
validation_errors = []
if (not self.name):
validation_errors.append('Required parameter name was not specified.')
elif (not self.name.isidentifier()):
validation_errors.append(f"'{self.name}' is not a valid parameter name: name must be a Python variable name.")
elif iskeyword(self.name):
validation_errors.append(f"'{self.name}' is not a valid parameter name: name cannot be a Python keyword.")
if (self.required and ((self.value is None) or (self.value == ''))):
validation_errors.append('Parameter is marked as required but no value has been assigned.')
return validation_errors |
class TestUpdateGetBugKarma(BasePyTestCase):
def test_feedback_wrong_bug(self):
update = model.Update.query.first()
bk = model.BugKarma(karma=1, comment=update.comments[0], bug=update.bugs[0])
self.db.add(bk)
bug = model.Bug(bug_id=12345, title='some title')
update.bugs.append(bug)
(bad, good) = update.get_bug_karma(bug)
assert (bad == 0)
assert (good == 0)
def test_mixed_feedback(self):
update = model.Update.query.first()
for (i, karma) in enumerate([(- 1), 1, 1]):
user = model.User(name='user_{}'.format(i))
comment = model.Comment(text='Test comment', karma=karma, user=user)
self.db.add(comment)
update.comments.append(comment)
bug_karma = model.BugKarma(karma=karma, comment=comment, bug=update.bugs[0])
self.db.add(bug_karma)
(bad, good) = update.get_bug_karma(update.bugs[0])
assert (bad == (- 1))
assert (good == 2)
user = model.User(name='bodhi')
comment = model.Comment(text='New build', karma=0, user=user)
self.db.add(comment)
update.comments.append(comment)
(bad, good) = update.get_bug_karma(update.bugs[0])
assert (bad == 0)
assert (good == 0) |
class Ad(AbstractCrudObject, HasAdLabels):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isAd = True
super(Ad, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
account_id = 'account_id'
ad_active_time = 'ad_active_time'
ad_review_feedback = 'ad_review_feedback'
ad_schedule_end_time = 'ad_schedule_end_time'
ad_schedule_start_time = 'ad_schedule_start_time'
adlabels = 'adlabels'
adset = 'adset'
adset_id = 'adset_id'
bid_amount = 'bid_amount'
bid_info = 'bid_info'
bid_type = 'bid_type'
campaign = 'campaign'
campaign_id = 'campaign_id'
configured_status = 'configured_status'
conversion_domain = 'conversion_domain'
conversion_specs = 'conversion_specs'
created_time = 'created_time'
creative = 'creative'
demolink_hash = 'demolink_hash'
display_sequence = 'display_sequence'
effective_status = 'effective_status'
engagement_audience = 'engagement_audience'
failed_delivery_checks = 'failed_delivery_checks'
id = 'id'
issues_info = 'issues_info'
last_updated_by_app_id = 'last_updated_by_app_id'
name = 'name'
preview_shareable_link = 'preview_shareable_link'
priority = 'priority'
recommendations = 'recommendations'
source_ad = 'source_ad'
source_ad_id = 'source_ad_id'
status = 'status'
targeting = 'targeting'
tracking_and_conversion_with_defaults = 'tracking_and_conversion_with_defaults'
tracking_specs = 'tracking_specs'
updated_time = 'updated_time'
adset_spec = 'adset_spec'
audience_id = 'audience_id'
date_format = 'date_format'
draft_adgroup_id = 'draft_adgroup_id'
execution_options = 'execution_options'
include_demolink_hashes = 'include_demolink_hashes'
filename = 'filename'
class BidType():
absolute_ocpm = 'ABSOLUTE_OCPM'
cpa = 'CPA'
cpc = 'CPC'
cpm = 'CPM'
multi_premium = 'MULTI_PREMIUM'
class ConfiguredStatus():
active = 'ACTIVE'
archived = 'ARCHIVED'
deleted = 'DELETED'
paused = 'PAUSED'
class EffectiveStatus():
active = 'ACTIVE'
adset_paused = 'ADSET_PAUSED'
archived = 'ARCHIVED'
campaign_paused = 'CAMPAIGN_PAUSED'
deleted = 'DELETED'
disapproved = 'DISAPPROVED'
in_process = 'IN_PROCESS'
paused = 'PAUSED'
pending_billing_info = 'PENDING_BILLING_INFO'
pending_review = 'PENDING_REVIEW'
preapproved = 'PREAPPROVED'
with_issues = 'WITH_ISSUES'
class Status():
active = 'ACTIVE'
archived = 'ARCHIVED'
deleted = 'DELETED'
paused = 'PAUSED'
class DatePreset():
data_maximum = 'data_maximum'
last_14d = 'last_14d'
last_28d = 'last_28d'
last_30d = 'last_30d'
last_3d = 'last_3d'
last_7d = 'last_7d'
last_90d = 'last_90d'
last_month = 'last_month'
last_quarter = 'last_quarter'
last_week_mon_sun = 'last_week_mon_sun'
last_week_sun_sat = 'last_week_sun_sat'
last_year = 'last_year'
maximum = 'maximum'
this_month = 'this_month'
this_quarter = 'this_quarter'
this_week_mon_today = 'this_week_mon_today'
this_week_sun_today = 'this_week_sun_today'
this_year = 'this_year'
today = 'today'
yesterday = 'yesterday'
class ExecutionOptions():
include_recommendations = 'include_recommendations'
synchronous_ad_review = 'synchronous_ad_review'
validate_only = 'validate_only'
class Operator():
all = 'ALL'
any = 'ANY'
class StatusOption():
active = 'ACTIVE'
inherited_from_source = 'INHERITED_FROM_SOURCE'
paused = 'PAUSED'
def get_endpoint(cls):
return 'ads'
def api_create(self, parent_id, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.adobjects.adaccount import AdAccount
return AdAccount(api=self._api, fbid=parent_id).create_ad(fields, params, batch, success, failure, pending)
def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'am_call_tags': 'map', 'date_preset': 'date_preset_enum', 'from_adtable': 'bool', 'review_feedback_breakdown': 'bool', 'time_range': 'map'}
enums = {'date_preset_enum': ['data_maximum', 'last_14d', 'last_28d', 'last_30d', 'last_3d', 'last_7d', 'last_90d', 'last_month', 'last_quarter', 'last_week_mon_sun', 'last_week_sun_sat', 'last_year', 'maximum', 'this_month', 'this_quarter', 'this_week_mon_today', 'this_week_sun_today', 'this_year', 'today', 'yesterday']}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Ad, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'ad_schedule_end_time': 'datetime', 'ad_schedule_start_time': 'datetime', 'adlabels': 'list<Object>', 'adset_spec': 'AdSet', 'audience_id': 'string', 'bid_amount': 'int', 'conversion_domain': 'string', 'creative': 'AdCreative', 'display_sequence': 'unsigned int', 'draft_adgroup_id': 'string', 'engagement_audience': 'bool', 'execution_options': 'list<execution_options_enum>', 'include_demolink_hashes': 'bool', 'name': 'string', 'priority': 'unsigned int', 'status': 'status_enum', 'tracking_specs': 'Object'}
enums = {'execution_options_enum': Ad.ExecutionOptions.__dict__.values(), 'status_enum': Ad.Status.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Ad, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ad_creatives(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adcreative import AdCreative
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/adcreatives', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdCreative, api_type='EDGE', response_parser=ObjectParser(target_class=AdCreative, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_ad_label(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'adlabels': 'list<Object>', 'execution_options': 'list<execution_options_enum>'}
enums = {'execution_options_enum': Ad.ExecutionOptions.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/adlabels', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Ad, api_type='EDGE', response_parser=ObjectParser(target_class=Ad, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_ad_rules_governed(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adrule import AdRule
param_types = {'pass_evaluation': 'bool'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/adrules_governed', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdRule, api_type='EDGE', response_parser=ObjectParser(target_class=AdRule, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_copies(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'date_preset': 'date_preset_enum', 'effective_status': 'list<string>', 'time_range': 'map', 'updated_since': 'int'}
enums = {'date_preset_enum': Ad.DatePreset.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/copies', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Ad, api_type='EDGE', response_parser=ObjectParser(target_class=Ad, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_copy(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'adset_id': 'string', 'rename_options': 'Object', 'status_option': 'status_option_enum'}
enums = {'status_option_enum': Ad.StatusOption.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/copies', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Ad, api_type='EDGE', response_parser=ObjectParser(target_class=Ad, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_insights(self, fields=None, params=None, is_async=False, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adsinsights import AdsInsights
if is_async:
return self.get_insights_async(fields, params, batch, success, failure, pending)
param_types = {'action_attribution_windows': 'list<action_attribution_windows_enum>', 'action_breakdowns': 'list<action_breakdowns_enum>', 'action_report_time': 'action_report_time_enum', 'breakdowns': 'list<breakdowns_enum>', 'date_preset': 'date_preset_enum', 'default_summary': 'bool', 'export_columns': 'list<string>', 'export_format': 'string', 'export_name': 'string', 'fields': 'list<string>', 'filtering': 'list<Object>', 'level': 'level_enum', 'product_id_limit': 'int', 'sort': 'list<string>', 'summary': 'list<string>', 'summary_action_breakdowns': 'list<summary_action_breakdowns_enum>', 'time_increment': 'string', 'time_range': 'map', 'time_ranges': 'list<map>', 'use_account_attribution_setting': 'bool', 'use_unified_attribution_setting': 'bool'}
enums = {'action_attribution_windows_enum': AdsInsights.ActionAttributionWindows.__dict__.values(), 'action_breakdowns_enum': AdsInsights.ActionBreakdowns.__dict__.values(), 'action_report_time_enum': AdsInsights.ActionReportTime.__dict__.values(), 'breakdowns_enum': AdsInsights.Breakdowns.__dict__.values(), 'date_preset_enum': AdsInsights.DatePreset.__dict__.values(), 'level_enum': AdsInsights.Level.__dict__.values(), 'summary_action_breakdowns_enum': AdsInsights.SummaryActionBreakdowns.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/insights', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdsInsights, api_type='EDGE', response_parser=ObjectParser(target_class=AdsInsights, api=self._api), include_summary=False)
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_insights_async(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adreportrun import AdReportRun
from facebook_business.adobjects.adsinsights import AdsInsights
param_types = {'action_attribution_windows': 'list<action_attribution_windows_enum>', 'action_breakdowns': 'list<action_breakdowns_enum>', 'action_report_time': 'action_report_time_enum', 'breakdowns': 'list<breakdowns_enum>', 'date_preset': 'date_preset_enum', 'default_summary': 'bool', 'export_columns': 'list<string>', 'export_format': 'string', 'export_name': 'string', 'fields': 'list<string>', 'filtering': 'list<Object>', 'level': 'level_enum', 'product_id_limit': 'int', 'sort': 'list<string>', 'summary': 'list<string>', 'summary_action_breakdowns': 'list<summary_action_breakdowns_enum>', 'time_increment': 'string', 'time_range': 'map', 'time_ranges': 'list<map>', 'use_account_attribution_setting': 'bool', 'use_unified_attribution_setting': 'bool'}
enums = {'action_attribution_windows_enum': AdsInsights.ActionAttributionWindows.__dict__.values(), 'action_breakdowns_enum': AdsInsights.ActionBreakdowns.__dict__.values(), 'action_report_time_enum': AdsInsights.ActionReportTime.__dict__.values(), 'breakdowns_enum': AdsInsights.Breakdowns.__dict__.values(), 'date_preset_enum': AdsInsights.DatePreset.__dict__.values(), 'level_enum': AdsInsights.Level.__dict__.values(), 'summary_action_breakdowns_enum': AdsInsights.SummaryActionBreakdowns.__dict__.values()}
if (fields is not None):
params['fields'] = (params.get('fields') if (params.get('fields') is not None) else list())
params['fields'].extend((field for field in fields if (field not in params['fields'])))
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/insights', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdReportRun, api_type='EDGE', response_parser=ObjectParser(target_class=AdReportRun, api=self._api), include_summary=False)
request.add_params(params)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_leads(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.lead import Lead
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/leads', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Lead, api_type='EDGE', response_parser=ObjectParser(target_class=Lead, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_previews(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.adpreview import AdPreview
param_types = {'ad_format': 'ad_format_enum', 'creative_feature': 'creative_feature_enum', 'dynamic_asset_label': 'string', 'dynamic_creative_spec': 'Object', 'dynamic_customization': 'Object', 'end_date': 'datetime', 'height': 'unsigned int', 'locale': 'string', 'place_page_id': 'int', 'post': 'Object', 'product_item_ids': 'list<string>', 'render_type': 'render_type_enum', 'start_date': 'datetime', 'width': 'unsigned int'}
enums = {'ad_format_enum': AdPreview.AdFormat.__dict__.values(), 'creative_feature_enum': AdPreview.CreativeFeature.__dict__.values(), 'render_type_enum': AdPreview.RenderType.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/previews', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AdPreview, api_type='EDGE', response_parser=ObjectParser(target_class=AdPreview, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_targeting_sentence_lines(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.targetingsentenceline import TargetingSentenceLine
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/targetingsentencelines', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=TargetingSentenceLine, api_type='EDGE', response_parser=ObjectParser(target_class=TargetingSentenceLine, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'account_id': 'string', 'ad_active_time': 'string', 'ad_review_feedback': 'AdgroupReviewFeedback', 'ad_schedule_end_time': 'datetime', 'ad_schedule_start_time': 'datetime', 'adlabels': 'list<AdLabel>', 'adset': 'AdSet', 'adset_id': 'string', 'bid_amount': 'int', 'bid_info': 'map<string, unsigned int>', 'bid_type': 'BidType', 'campaign': 'Campaign', 'campaign_id': 'string', 'configured_status': 'ConfiguredStatus', 'conversion_domain': 'string', 'conversion_specs': 'list<ConversionActionQuery>', 'created_time': 'datetime', 'creative': 'AdCreative', 'demolink_hash': 'string', 'display_sequence': 'int', 'effective_status': 'EffectiveStatus', 'engagement_audience': 'bool', 'failed_delivery_checks': 'list<DeliveryCheck>', 'id': 'string', 'issues_info': 'list<AdgroupIssuesInfo>', 'last_updated_by_app_id': 'string', 'name': 'string', 'preview_shareable_link': 'string', 'priority': 'unsigned int', 'recommendations': 'list<AdRecommendation>', 'source_ad': 'Ad', 'source_ad_id': 'string', 'status': 'Status', 'targeting': 'Targeting', 'tracking_and_conversion_with_defaults': 'TrackingAndConversionWithDefaults', 'tracking_specs': 'list<ConversionActionQuery>', 'updated_time': 'datetime', 'adset_spec': 'AdSet', 'audience_id': 'string', 'date_format': 'string', 'draft_adgroup_id': 'string', 'execution_options': 'list<ExecutionOptions>', 'include_demolink_hashes': 'bool', 'filename': 'file'}
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['BidType'] = Ad.BidType.__dict__.values()
field_enum_info['ConfiguredStatus'] = Ad.ConfiguredStatus.__dict__.values()
field_enum_info['EffectiveStatus'] = Ad.EffectiveStatus.__dict__.values()
field_enum_info['Status'] = Ad.Status.__dict__.values()
field_enum_info['DatePreset'] = Ad.DatePreset.__dict__.values()
field_enum_info['ExecutionOptions'] = Ad.ExecutionOptions.__dict__.values()
field_enum_info['Operator'] = Ad.Operator.__dict__.values()
field_enum_info['StatusOption'] = Ad.StatusOption.__dict__.values()
return field_enum_info |
class DockerPack(BasePack):
def __init__(self, model_id, config_json):
BasePack.__init__(self, model_id, config_json)
self.docker = SimpleDocker()
self.docker_org = self.cfg.EXT.DOCKERHUB_ORG
self.docker_tag = self.cfg.ENV.DOCKER.REPO_TAG
self.logger.debug('Initializing docker packer')
def _load_model_from_tmp(self, path):
path = os.path.join(path, self.model_id)
if (not os.path.exists(path)):
return None
items = sorted(os.listdir(path))
if (not items):
return None
else:
tag = items[(- 1)]
path = os.path.join(path, tag)
return bentoml.load(path)
def _setup(self):
name = self.docker._image_name(self.docker_org, self.model_id, self.docker_tag)
self.logger.debug('Storing Docker image {0} in the local environment database'.format(name))
db = EnvironmentDb(config_json=self.config_json)
db.table = 'docker'
db.insert(model_id=self.model_id, env=name)
self.logger.debug('Done with the Docker setup')
def _delete_packer_container(self):
dm = DockerManager(config_json=self.config_json)
dm.delete_images(self.model_id)
def _run(self):
model_id = self.model_id
self._setup()
self.logger.debug('Building docker image')
self.docker.build('.', self.docker_org, model_id, self.docker_tag)
self.logger.debug('Running docker')
name = self.docker.run(self.docker_org, model_id, self.docker_tag, name=None)
self.logger.debug('Executing container {0}'.format(name))
self.docker.exec_container(name, ('python %s' % self.cfg.HUB.PACK_SCRIPT))
self.logger.debug('Copying bundle from docker image to host')
tmp_dir = tempfile.mkdtemp(prefix='ersilia-')
self.logger.debug('Using this temporary directory: {0}'.format(tmp_dir))
self.docker.cp_from_container(name, ('/root/bentoml/repository/%s' % model_id), tmp_dir)
self.logger.debug('Loading bentoml')
mdl = self._load_model_from_tmp(tmp_dir)
mdl.save()
self._symlinks()
self._delete_packer_container()
def run(self):
self.logger.debug('Packing model with Docker')
self._write_model_install_commands()
self._run() |
class HeatmapDrawer(TracksDrawer):
def __init__(self, the_poster: Poster):
super().__init__(the_poster)
self._center = None
self._radius = None
self._heatmap_line_width_low: float = 10.0
self._heatmap_line_width_upp: float = 1000.0
self._heatmap_line_width_lower: List[Tuple[(float, float)]] = [(0.1, 5.0), (0.2, 2.0), (1.0, 0.3)]
self._heatmap_line_width_upper: List[Tuple[(float, float)]] = [(0.02, 0.5), (0.05, 0.2), (1.0, 0.05)]
self._heatmap_line_width: Optional[List[Tuple[(float, float)]]] = self._heatmap_line_width_lower
def create_args(self, args_parser: argparse.ArgumentParser) -> None:
group = args_parser.add_argument_group('Heatmap Type Options')
group.add_argument('--heatmap-center', dest='heatmap_center', metavar='LAT,LNG', type=str, help='Center of the heatmap (default: automatic).')
group.add_argument('--heatmap-radius', dest='heatmap_radius', metavar='RADIUS_KM', type=float, help='Scale the heatmap such that at least a circle with radius=RADIUS_KM is visible (default: automatic).')
group.add_argument('--heatmap-line-transparency-width', dest='heatmap_line_width', metavar='TRANSP_1,WIDTH_1, TRANSP_2,WIDTH_2, TRANSP_3,WIDTH_3', type=str, help='Define three transparency and width tuples for the heatmap lines or set it to `automatic` for automatic calculation (default: 0.1,5.0, 0.2,2.0, 1.0,0.3).')
def fetch_args(self, args: argparse.Namespace) -> None:
self._center = None
if args.heatmap_center:
latlng_str = args.heatmap_center.split(',')
if (len(latlng_str) != 2):
raise ParameterError(f'Not a valid LAT,LNG pair: {args.heatmap_center}')
try:
lat = float(latlng_str[0].strip())
lng = float(latlng_str[1].strip())
except ValueError as e:
raise ParameterError(f'Not a valid LAT,LNG pair: {args.heatmap_center}') from e
if ((not ((- 90) <= lat <= 90)) or (not ((- 180) <= lng <= 180))):
raise ParameterError(f'Not a valid LAT,LNG pair: {args.heatmap_center}')
self._center = s2sphere.LatLng.from_degrees(lat, lng)
if args.heatmap_radius:
if (args.heatmap_radius <= 0):
raise ParameterError(f'Not a valid radius: {args.heatmap_radius} (must be > 0)')
if (not args.heatmap_center):
raise ParameterError('--heatmap-radius needs --heatmap-center')
self._radius = args.heatmap_radius
if args.heatmap_line_width:
if (args.heatmap_line_width.lower() == 'automatic'):
self._heatmap_line_width = None
else:
trans_width_str = args.heatmap_line_width.split(',')
if (len(trans_width_str) != 6):
raise ParameterError(f'Not three valid TRANSPARENCY,WIDTH pairs: {args.heatmap_line_width}')
try:
self._heatmap_line_width = []
for value in range(0, 5, 2):
transparency = float(trans_width_str[value].strip())
width = float(trans_width_str[(value + 1)].strip())
if ((transparency < 0) or (transparency > 1)):
raise ParameterError(f'Not a valid TRANSPARENCY value (0 < value < 1): {transparency} in {args.heatmap_line_width}')
self._heatmap_line_width.append((transparency, width))
except ValueError as e:
raise ParameterError(f'Not three valid TRANSPARENCY,WIDTH pairs: {args.heatmap_line_width}') from e
def _get_line_transparencies_and_widths(self, bbox: s2sphere.sphere.LatLngRect) -> List[Tuple[(float, float)]]:
if self._heatmap_line_width:
return self._heatmap_line_width
low = self._heatmap_line_width_low
upp = self._heatmap_line_width_upp
lower = self._heatmap_line_width_lower
upper = self._heatmap_line_width_upper
d = distance((bbox.lo().lat().degrees, bbox.lo().lng().degrees), (bbox.hi().lat().degrees, bbox.hi().lng().degrees)).km
log.info('Length of diagonal of boundary box %s', str(d))
if (d > upp):
return upper
if (d < low):
return lower
return [((lower[0][0] + ((d / (upp - low)) * (upper[0][0] - lower[0][0]))), (lower[0][1] + ((d / (upp - low)) * (upper[0][1] - lower[0][1])))), ((lower[1][0] + ((d / (upp - low)) * (upper[1][0] - lower[1][0]))), (lower[1][1] + ((d / (upp - low)) * (upper[1][1] - lower[1][1])))), ((lower[2][0] + ((d / (upp - low)) * (upper[2][0] - lower[2][0]))), (lower[2][1] + ((d / (upp - low)) * (upper[2][1] - lower[2][1]))))]
def _determine_bbox(self) -> s2sphere.LatLngRect:
if self._center:
log.info('Forcing heatmap center to %s', str(self._center))
(dlat, dlng) = (0, 0)
if self._radius:
er = 6378.1
quarter = ((er * math.pi) / 2)
dlat = ((90 * self._radius) / quarter)
scale = (1 / math.cos(self._center.lat().radians))
dlng = (((scale * 90) * self._radius) / quarter)
else:
for tr in self.poster.tracks:
for line in tr.polylines:
for latlng in line:
d = abs((self._center.lat().degrees - latlng.lat().degrees))
dlat = max(dlat, d)
d = abs((self._center.lng().degrees - latlng.lng().degrees))
while (d > 360):
d -= 360
if (d > 180):
d = (360 - d)
dlng = max(dlng, d)
return s2sphere.LatLngRect.from_center_size(self._center, s2sphere.LatLng.from_degrees((2 * dlat), (2 * dlng)))
tracks_bbox = s2sphere.LatLngRect()
for tr in self.poster.tracks:
tracks_bbox = tracks_bbox.union(tr.bbox())
return tracks_bbox
def draw(self, dr: svgwrite.Drawing, g: svgwrite.container.Group, size: XY, offset: XY) -> None:
bbox = self._determine_bbox()
line_transparencies_and_widths = self._get_line_transparencies_and_widths(bbox)
year_groups: Dict[(int, svgwrite.container.Group)] = {}
for tr in self.poster.tracks:
year = tr.start_time().year
if (year not in year_groups):
g_year = dr.g(id=f'year{year}')
g.add(g_year)
year_groups[year] = g_year
else:
g_year = year_groups[year]
color = self.color(self.poster.length_range, tr.length(), tr.special)
for line in utils.project(bbox, size, offset, tr.polylines):
for (opacity, width) in line_transparencies_and_widths:
g_year.add(dr.polyline(points=line, stroke=color, stroke_opacity=opacity, fill='none', stroke_width=width, stroke_linejoin='round', stroke_linecap='round')) |
def _swap_abilities(caller, raw_string, **kwargs):
if raw_string:
(abi1, *abi2) = raw_string.split(' ', 1)
if (not abi2):
caller.msg("That doesn't look right.")
return (None, kwargs)
abi2 = abi2[0]
(abi1, abi2) = (abi1.upper().strip(), abi2.upper().strip())
if ((abi1 not in _ABILITIES) or (abi2 not in _ABILITIES)):
caller.msg('Not a familiar set of abilites.')
return (None, kwargs)
tmp_character = kwargs['tmp_character']
abi1 = _ABILITIES[abi1]
abi2 = _ABILITIES[abi2]
abival1 = getattr(tmp_character, abi1)
abival2 = getattr(tmp_character, abi2)
setattr(tmp_character, abi1, abival2)
setattr(tmp_character, abi2, abival1)
tmp_character.ability_changes += 1
return ('node_chargen', kwargs) |
class NodeMatcher(Matcher):
def __init__(self, structure: dict[(str, Any)], group: dict[(str, Any)]) -> None:
super().__init__(structure, group)
self.draw: bool = True
if ('draw' in structure):
self.draw = structure['draw']
self.shapes: Optional[IconDescription] = None
if ('shapes' in structure):
self.shapes = get_shape_specifications(structure['shapes'])
self.over_icon: Optional[IconDescription] = None
if ('over_icon' in structure):
self.over_icon = get_shape_specifications(structure['over_icon'])
self.add_shapes: Optional[IconDescription] = None
if ('add_shapes' in structure):
self.add_shapes = get_shape_specifications(structure['add_shapes'])
self.set_main_color: Optional[str] = None
if ('set_main_color' in structure):
self.set_main_color = structure['set_main_color']
self.set_opacity: Optional[float] = None
if ('set_opacity' in structure):
self.set_opacity = structure['set_opacity']
self.under_icon: Optional[IconDescription] = None
if ('under_icon' in structure):
self.under_icon = get_shape_specifications(structure['under_icon'])
self.with_icon: Optional[IconDescription] = None
if ('with_icon' in structure):
self.with_icon = get_shape_specifications(structure['with_icon'])
def get_clean_shapes(self) -> Optional[list[str]]:
if (not self.shapes):
return None
return [x['shape'] for x in self.shapes] |
_builds_ns.route('/<int:build_id>')
class GetBuild(Resource):
_builds_ns.doc(params=get_build_docs)
_builds_ns.marshal_with(build_model)
def get(self, build_id):
build = ComplexLogic.get_build(build_id)
result = to_dict(build)
result['build_id'] = result['id']
return result |
class SchemaspaceExport(SchemaspaceBase):
schema_name_option = CliOption('--schema_name', name='schema_name', description='The schema name of the metadata instances to export', required=False)
include_invalid_flag = Flag('--include-invalid', name='include-invalid', description='Export valid and invalid instances. By default only valid instances are exported.', default_value=False)
clean_flag = Flag('--clean', name='clean', description='Clear out contents of the export directory', default_value=False)
directory_option = CliOption('--directory', name='directory', description='The local file system path where the exported metadata will be stored', required=True)
options: List[Option] = [schema_name_option, include_invalid_flag, clean_flag, directory_option]
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.metadata_manager = MetadataManager(schemaspace=self.schemaspace)
def start(self):
super().start()
schema_name = self.schema_name_option.value
if schema_name:
schema_list = sorted(list(self.schemas.keys()))
if (schema_name not in schema_list):
print(f"Schema name '{schema_name}' is invalid. For the '{self.schemaspace}' schemaspace, the schema name must be one of {schema_list}")
self.exit(1)
include_invalid = self.include_invalid_flag.value
directory = self.directory_option.value
clean = self.clean_flag.value
try:
if (self.schema_name_option is not None):
metadata_instances = self.metadata_manager.get_all(include_invalid=include_invalid, of_schema=schema_name)
else:
metadata_instances = self.metadata_manager.get_all(include_invalid=include_invalid)
except MetadataNotFoundError:
metadata_instances = None
if (not metadata_instances):
print((f"No metadata instances found for schemaspace '{self.schemaspace}'" + (f" and schema '{schema_name}'" if schema_name else '')))
print(f"Nothing exported to '{directory}'")
return
dest_directory = os.path.join(directory, self.schemaspace)
if (not os.path.exists(dest_directory)):
try:
print(f"Creating directory structure for '{dest_directory}'")
os.makedirs(dest_directory)
except OSError as e:
print(f"Error creating directory structure for '{dest_directory}': {e.strerror}: '{e.filename}'")
self.exit(1)
elif clean:
files = [os.path.join(dest_directory, f) for f in os.listdir(dest_directory)]
if (len(files) > 0):
print(f"Cleaning out all files in '{dest_directory}'")
[os.remove(f) for f in files if os.path.isfile(f)]
print((((f"Exporting metadata instances for schemaspace '{self.schemaspace}'" + (f" and schema '{schema_name}'" if schema_name else '')) + (' (includes invalid)' if include_invalid else ' (valid only)')) + f" to '{dest_directory}'"))
num_valid_exported = 0
num_invalid_exported = 0
for instance in metadata_instances:
dict_metadata = instance.to_dict()
output_file = os.path.join(dest_directory, f"{dict_metadata['name']}.json")
if (('reason' in dict_metadata) and (len(dict_metadata['reason']) > 0)):
num_invalid_exported += 1
else:
num_valid_exported += 1
with open(output_file, mode='w') as output_file:
json.dump(dict_metadata, output_file, indent=4)
total_exported = (num_valid_exported + num_invalid_exported)
print(((((f'Exported {total_exported} ' + ('instances' if (total_exported > 1) else 'instance')) + f' ({num_invalid_exported} of which ') + ('is' if (num_invalid_exported == 1) else 'are')) + ' invalid)')) |
.parametrize('params', (['t', 'u'], ['u', 't']))
.parametrize('levels', ([500, 850], [850, 500]))
.parametrize('source_name', ['indexed-directory'])
def test_indexing_pickle(params, levels, source_name):
request = dict(level=levels, variable=params, date=, time='1200')
(ds, __tmp, total, n) = get_fixtures(source_name, {})
assert (len(ds) == total), len(ds)
ds = ds.sel(**request)
ds = ds.order_by(level=levels, variable=params)
check_sel_and_order(ds, params, levels)
assert (len(ds) == n), (len(ds), ds, SERIALISATION)
state = serialise_state(ds)
ds = deserialise_state(state)
assert (len(ds) == n), (len(ds), ds, SERIALISATION)
check_sel_and_order(ds, params, levels) |
def variate(oid, tag, value, **context):
if (('snmpEngine' in context) and context['snmpEngine']):
snmpEngine = context['snmpEngine']
if (snmpEngine not in moduleContext):
moduleContext[snmpEngine] = {}
if (context['transportDomain'] not in moduleContext[snmpEngine]):
snmpEngine.registerTransportDispatcher(snmpEngine.transportDispatcher, (UdpTransportTarget.transportDomain + context['transportDomain']))
snmpEngine.registerTransportDispatcher(snmpEngine.transportDispatcher, (Udp6TransportTarget.transportDomain + context['transportDomain']))
moduleContext[snmpEngine][context['transportDomain']] = 1
else:
raise error.SnmpsimError('Variation module is not given snmpEngine. Make sure you are not running in --v2c-arch mode')
if ((not context['nextFlag']) and (not context['exactMatch'])):
return (context['origOid'], tag, context['errorStatus'])
if ('settings' not in recordContext):
recordContext['settings'] = dict([split(x, '=') for x in split(value, ',')])
for (k, v) in MODULE_OPTIONS:
recordContext['settings'].setdefault(k, v)
if ('hexvalue' in recordContext['settings']):
recordContext['settings']['value'] = [int(recordContext['settings']['hexvalue'][x:(x + 2)], 16) for x in range(0, len(recordContext['settings']['hexvalue']), 2)]
if ('vlist' in recordContext['settings']):
vlist = {}
recordContext['settings']['vlist'] = split(recordContext['settings']['vlist'], ':')
while recordContext['settings']['vlist']:
(o, v) = recordContext['settings']['vlist'][:2]
vlist = recordContext['settings']['vlist'][2:]
recordContext['settings']['vlist'] = vlist
(typeTag, _) = SnmprecRecord.unpack_tag(tag)
v = SnmprecGrammar.TAG_MAP[typeTag](v)
if (o not in vlist):
vlist[o] = set()
if (o == 'eq'):
vlist[o].add(v)
elif (o in ('lt', 'gt')):
vlist[o] = v
else:
log.info(('notification: bad vlist syntax: %s' % recordContext['settings']['vlist']))
recordContext['settings']['vlist'] = vlist
args = recordContext['settings']
if (context['setFlag'] and ('vlist' in args)):
if (('eq' in args['vlist']) and (context['origValue'] in args['vlist']['eq'])):
pass
elif (('lt' in args['vlist']) and (context['origValue'] < args['vlist']['lt'])):
pass
elif (('gt' in args['vlist']) and (context['origValue'] > args['vlist']['gt'])):
pass
else:
return (oid, tag, context['origValue'])
if (args['op'] not in ('get', 'set', 'any', '*')):
log.info(('notification: unknown SNMP request type configured: %s' % args['op']))
return (context['origOid'], tag, context['errorStatus'])
if (((args['op'] == 'get') and (not context['setFlag'])) or ((args['op'] == 'set') and context['setFlag']) or (args['op'] in ('any', '*'))):
if (args['version'] in ('1', '2c')):
authData = CommunityData(args['community'], mpModel=(((args['version'] == '2c') and 1) or 0))
elif (args['version'] == '3'):
if (args['authproto'] == 'md5'):
authProtocol = usmHMACMD5AuthProtocol
elif (args['authproto'] == 'sha'):
authProtocol = usmHMACSHAAuthProtocol
elif (args['authproto'] == 'none'):
authProtocol = usmNoAuthProtocol
else:
log.info(('notification: unknown auth proto %s' % args['authproto']))
return (context['origOid'], tag, context['errorStatus'])
if (args['privproto'] == 'des'):
privProtocol = usmDESPrivProtocol
elif (args['privproto'] == 'aes'):
privProtocol = usmAesCfb128Protocol
elif (args['privproto'] == 'none'):
privProtocol = usmNoPrivProtocol
else:
log.info(('notification: unknown privacy proto %s' % args['privproto']))
return (context['origOid'], tag, context['errorStatus'])
authData = UsmUserData(args['user'], args['authkey'], args['privkey'], authProtocol=authProtocol, privProtocol=privProtocol)
else:
log.info(('notification: unknown SNMP version %s' % args['version']))
return (context['origOid'], tag, context['errorStatus'])
if ('host' not in args):
log.info(('notification: target hostname not configured for OID %s' % (oid,)))
return (context['origOid'], tag, context['errorStatus'])
if (args['proto'] == 'udp'):
target = UdpTransportTarget((args['host'], int(args['port'])))
elif (args['proto'] == 'udp6'):
target = Udp6TransportTarget((args['host'], int(args['port'])))
else:
log.info(('notification: unknown transport %s' % args['proto']))
return (context['origOid'], tag, context['errorStatus'])
localAddress = None
if ('bindaddr' in args):
localAddress = args['bindaddr']
else:
transportDomain = context['transportDomain'][:len(target.transportDomain)]
if (transportDomain == target.transportDomain):
localAddress = snmpEngine.transportDispatcher.getTransport(context['transportDomain']).getLocalAddress()[0]
else:
log.info('notification: incompatible network transport types used by CommandResponder vs NotificationOriginator')
if ('bindaddr' in args):
localAddress = args['bindaddr']
if localAddress:
log.info(('notification: binding to local address %s' % localAddress))
target.setLocalAddress((localAddress, 0))
target.transportDomain = (target.transportDomain + context['transportDomain'])
varBinds = []
if ('uptime' in args):
varBinds.append((ObjectIdentifier('1.3.6.1.2.1.1.3.0'), TimeTicks(args['uptime'])))
if (args['version'] == '1'):
if ('agentaddress' in args):
varBinds.append((ObjectIdentifier('1.3.6.1.6.3.18.1.3.0'), IpAddress(args['agentaddress'])))
if ('enterprise' in args):
varBinds.append((ObjectIdentifier('1.3.6.1.6.3.1.1.4.3.0'), ObjectIdentifier(args['enterprise'])))
if ('varbinds' in args):
vbs = split(args['varbinds'], ':')
while vbs:
varBinds.append((ObjectIdentifier(vbs[0]), TYPE_MAP[vbs[1]](vbs[2])))
vbs = vbs[3:]
notificationType = NotificationType(ObjectIdentity(args['trapoid'])).addVarBinds(*varBinds)
sendNotification(snmpEngine, authData, target, ContextData(), args['ntftype'], notificationType, cbFun=_cbFun, cbCtx=(oid, value))
log.info(('notification: sending Notification to %s with credentials %s' % (authData, target)))
if (context['setFlag'] or ('value' not in args)):
return (oid, tag, context['origValue'])
else:
return (oid, tag, args['value']) |
def panel_with_localization_on(qtbot):
def func(settings, ensemble_size):
widget = AnalysisModuleVariablesPanel(settings, ensemble_size)
qtbot.addWidget(widget)
check_box = widget.findChild(QCheckBox, name='localization')
qtbot.mouseClick(check_box, Qt.LeftButton)
return (settings, widget)
(yield func) |
class Solution(object):
def toHex(self, num):
curr = []
ret = []
for i in xrange(32):
curr.append(str((num & 1)))
num = (num >> 1)
if (len(curr) == 4):
n = int(''.join(reversed(curr)), 2)
if (n < 10):
ret.append(str(n))
else:
ret.append(chr(((ord('a') + n) - 10)))
curr = []
cleaned = []
is_ok = False
for (i, n) in enumerate(reversed(ret)):
if (n != '0'):
is_ok = True
if is_ok:
cleaned.append(n)
if (not cleaned):
return '0'
return ''.join(cleaned) |
def get_primary_entity(options, cfg):
if options.primary_entity:
primary_entity = options.primary_entity
else:
primary_entity = cfg.style_config['copyright_primary_entity']
if ((not primary_entity) and (len(cfg.style_config['copyright_entity']) == 1)):
primary_entity = list(cfg.style_config['copyright_entity'])[0]
return primary_entity |
class Blueprint(BlueprintT):
routes: List[FutureRoute]
static_routes: List[FutureStaticRoute]
view_name_separator: str = ':'
def __init__(self, name: str, *, url_prefix: Optional[str]=None) -> None:
self.name = name
self.url_prefix = url_prefix
self.routes = []
self.static_routes = []
def cache(self, timeout: Optional[Seconds]=None, include_headers: bool=False, key_prefix: Optional[str]=None, backend: Union[(Type[CacheBackendT], str)]=None) -> CacheT:
if (key_prefix is None):
key_prefix = self.name
return Cache(timeout, include_headers, key_prefix, backend)
def route(self, uri: str, *, name: Optional[str]=None, cors_options: Mapping[(str, ResourceOptions)]=None, base: Type[View]=View) -> RouteDecoratorRet:
def _inner(handler: PageArg) -> PageArg:
route = FutureRoute(uri=uri, name=(name or handler.__name__), handler=handler, base=base, cors_options=(cors_options or {}))
self.routes.append(route)
return handler
return _inner
def static(self, uri: str, file_or_directory: Union[(str, Path)], *, name: Optional[str]=None) -> None:
_name: str = (name or 'static')
if (not _name.startswith((self.name + '.'))):
_name = f'{self.name}.{name}'
fut = FutureStaticRoute(uri, Path(file_or_directory), _name)
self.static_routes.append(fut)
def register(self, app: AppT, *, url_prefix: Optional[str]=None) -> None:
url_prefix = (url_prefix or self.url_prefix)
for route in self.routes:
self._apply_route(app, route, url_prefix)
for static_route in self.static_routes:
self._apply_static_route(app.web, static_route, url_prefix)
def _apply_route(self, app: AppT, route: FutureRoute, url_prefix: Optional[str]) -> None:
uri = self._url_with_prefix(route.uri, url_prefix)
app.page(path=(uri[1:] if uri.startswith('//') else uri), name=self._view_name(route.name), cors_options=route.cors_options)(route.handler)
def _view_name(self, name: str) -> str:
return self.view_name_separator.join([self.name, name])
def init_webserver(self, web: Web) -> None:
self.on_webserver_init(web)
def on_webserver_init(self, web: Web) -> None:
...
def _url_with_prefix(self, url: str, prefix: Optional[str]=None) -> str:
if prefix:
return ((prefix.rstrip('/') + '/') + url.lstrip('/'))
return url
def _apply_static_route(self, web: Web, route: FutureStaticRoute, url_prefix: Optional[str]) -> None:
uri = self._url_with_prefix(route.uri, url_prefix)
web.add_static(uri, route.file_or_directory)
def __repr__(self) -> str:
return f'<{type(self).__name__}: {self.name}>' |
class FixObserveTrueTest(unittest.TestCase):
def test_fix_observe_true(self) -> None:
self.maxDiff = None
observations = {observation(0): tensor(1.0), observation(1): tensor(1.0)}
queries = []
bmg = BMGInference()
observed = bmg.to_dot(queries, observations)
expected = '\ndigraph "graph" {\n N00[label=0.5];\n N01[label=Beta];\n N02[label=Sample];\n N03[label=1.0];\n N04[label=Beta];\n N05[label=Sample];\n N06[label=Sample];\n N07[label=2.0];\n N08[label=Beta];\n N09[label=Sample];\n N10[label=Sample];\n N11[label=Log];\n N12[label=Log];\n N13[label=Log];\n N14[label="+"];\n N15[label=complement];\n N16[label=Log];\n N17[label=complement];\n N18[label=Log];\n N19[label=complement];\n N20[label=Log];\n N21[label="+"];\n N22[label=LogSumExp];\n N23[label=Exp];\n N24[label=ToProb];\n N25[label=Bernoulli];\n N26[label=Sample];\n N27[label="Observation True"];\n N28[label=Sample];\n N29[label="Observation True"];\n N00 -> N01;\n N00 -> N01;\n N01 -> N02;\n N02 -> N11;\n N02 -> N15;\n N03 -> N04;\n N03 -> N04;\n N04 -> N05;\n N04 -> N06;\n N05 -> N12;\n N06 -> N13;\n N07 -> N08;\n N07 -> N08;\n N08 -> N09;\n N08 -> N10;\n N09 -> N17;\n N10 -> N19;\n N11 -> N14;\n N12 -> N14;\n N13 -> N14;\n N14 -> N22;\n N15 -> N16;\n N16 -> N21;\n N17 -> N18;\n N18 -> N21;\n N19 -> N20;\n N20 -> N21;\n N21 -> N22;\n N22 -> N23;\n N23 -> N24;\n N24 -> N25;\n N25 -> N26;\n N25 -> N28;\n N26 -> N27;\n N28 -> N29;\n}\n'
self.assertEqual(expected.strip(), observed.strip())
bmg = BMGInference()
bmg._fix_observe_true = True
observed = bmg.to_dot(queries, observations)
expected = '\ndigraph "graph" {\n N00[label=0.5];\n N01[label=Beta];\n N02[label=Sample];\n N03[label=1.0];\n N04[label=Beta];\n N05[label=Sample];\n N06[label=Sample];\n N07[label=2.0];\n N08[label=Beta];\n N09[label=Sample];\n N10[label=Sample];\n N11[label=Log];\n N12[label=Log];\n N13[label=Log];\n N14[label="+"];\n N15[label=complement];\n N16[label=Log];\n N17[label=complement];\n N18[label=Log];\n N19[label=complement];\n N20[label=Log];\n N21[label="+"];\n N22[label=LogSumExp];\n N23[label=ExpProduct];\n N24[label=ExpProduct];\n N00 -> N01;\n N00 -> N01;\n N01 -> N02;\n N02 -> N11;\n N02 -> N15;\n N03 -> N04;\n N03 -> N04;\n N04 -> N05;\n N04 -> N06;\n N05 -> N12;\n N06 -> N13;\n N07 -> N08;\n N07 -> N08;\n N08 -> N09;\n N08 -> N10;\n N09 -> N17;\n N10 -> N19;\n N11 -> N14;\n N12 -> N14;\n N13 -> N14;\n N14 -> N22;\n N15 -> N16;\n N16 -> N21;\n N17 -> N18;\n N18 -> N21;\n N19 -> N20;\n N20 -> N21;\n N21 -> N22;\n N22 -> N23;\n N22 -> N24;\n}\n'
self.assertEqual(expected.strip(), observed.strip()) |
class TestCandidateTotalsDetail(ApiBaseTest):
candidate_totals_fields = {'candidate_election_year': 2020, 'offsets_to_operating_expenditures': 100.0, 'political_party_committee_contributions': 110.1, 'other_disbursements': 120.0, 'other_political_committee_contributions': 130.0, 'individual_itemized_contributions': 140.0, 'individual_unitemized_contributions': 150.0, 'disbursements': 160.0, 'contributions': 170.0, 'contribution_refunds': 180.0, 'individual_contributions': 190.0, 'refunded_individual_contributions': 200.0, 'refunded_other_political_committee_contributions': 210.0, 'refunded_political_party_committee_contributions': 220.0, 'receipts': 230.0, 'coverage_start_date': None, 'coverage_end_date': None, 'transaction_coverage_date': None, 'operating_expenditures': 240.0, 'last_report_year': 2020, 'last_report_type_full': 'Q3', 'last_beginning_image_number': '123456', 'last_cash_on_hand_end_period': 250.0, 'last_debts_owed_by_committee': 260.0, 'last_debts_owed_to_committee': 270.0, 'candidate_contribution': 280.0, 'exempt_legal_accounting_disbursement': 290.0, 'federal_funds': 300.0, 'fundraising_disbursements': 310.0, 'offsets_to_fundraising_expenditures': 350.0, 'offsets_to_legal_accounting': 360.0, 'total_offsets_to_operating_expenditures': 370.0, 'other_receipts': 390.0, 'transfers_to_other_authorized_committee': 430.0, 'election_full': True, 'net_operating_expenditures': 440.0, 'net_contributions': 450.0}
excluded_schema_fields = {'other_loans_received': 380.0, 'loan_repayments_made': 320.0, 'repayments_loans_made_by_candidate': 400.0, 'repayments_other_loans': 410.0, 'loans_received': 330.0, 'loans_received_from_candidate': 340.0, 'transfers_from_affiliated_committee': 420.0, 'last_net_operating_expenditures': 530.0, 'last_net_contributions': 540.0}
def test_house_senate_totals_fields(self):
changed_schema_fields = {'all_other_loans': 380.0, 'loan_repayments': 320.0, 'loan_repayments_candidate_loans': 400.0, 'loan_repayments_other_loans': 410.0, 'loans': 330.0, 'loans_made_by_candidate': 340.0, 'transfers_from_other_authorized_committee': 420.0, 'last_net_operating_expenditures': 530.0, 'last_net_contributions': 540.0}
first_candidate = utils.extend(self.excluded_schema_fields, self.candidate_totals_fields, {'candidate_id': 'H', 'cycle': 2020})
second_candidate = utils.extend(self.excluded_schema_fields, self.candidate_totals_fields, {'candidate_id': 'H', 'cycle': 2022})
factories.CandidateTotalsDetailFactory(**first_candidate)
factories.CandidateTotalsDetailFactory(**second_candidate)
results = self._results(api.url_for(CandidateTotalsDetailView, candidate_id='H'))
fields = utils.extend(self.candidate_totals_fields, changed_schema_fields, {'candidate_id': 'H', 'cycle': 2020})
self.assertEqual(len(results), 1)
self.assertEqual(results[0], fields)
def test_presidential_totals_fields(self):
changed_schema_fields = {'other_loans_received': 380.0, 'loan_repayments_made': 320.0, 'repayments_loans_made_by_candidate': 400.0, 'repayments_other_loans': 410.0, 'loans_received': 330.0, 'loans_received_from_candidate': 340.0, 'transfers_from_affiliated_committee': 420.0, 'net_operating_expenditures': 530.0, 'net_contributions': 540.0}
first_candidate = utils.extend(self.candidate_totals_fields, self.excluded_schema_fields, {'candidate_id': 'P', 'cycle': 2020})
second_candidate = utils.extend(self.candidate_totals_fields, self.excluded_schema_fields, {'candidate_id': 'P', 'cycle': 2022})
factories.CandidateTotalsDetailFactory(**first_candidate)
factories.CandidateTotalsDetailFactory(**second_candidate)
results = self._results(api.url_for(CandidateTotalsDetailView, candidate_id='P'))
fields = utils.extend(self.candidate_totals_fields, changed_schema_fields, {'candidate_id': 'P', 'cycle': 2022})
self.assertEqual(len(results), 1)
self.assertEqual(results[0], fields)
def test_election_full_filter(self):
factories.CandidateTotalsDetailFactory(candidate_id='H', candidate_election_year=2020, cycle=2020, election_full=True)
factories.CandidateTotalsDetailFactory(candidate_id='H', candidate_election_year=2018, cycle=2018, election_full=False)
factories.CandidateTotalsDetailFactory(candidate_id='H', candidate_election_year=2016, cycle=2016, election_full=False)
results = self._results(api.url_for(CandidateTotalsDetailView, candidate_id='H', election_full=True))
self.assertEqual(len(results), 1)
factories.CandidateTotalsDetailFactory(candidate_id='P', candidate_election_year=2020, cycle=2020, election_full=True)
factories.CandidateTotalsDetailFactory(candidate_id='P', candidate_election_year=2018, cycle=2018, election_full=False)
factories.CandidateTotalsDetailFactory(candidate_id='P', candidate_election_year=2016, cycle=2016, election_full=False)
results = self._results(api.url_for(CandidateTotalsDetailView, candidate_id='P', election_full=False))
self.assertEqual(len(results), 2)
def test_cycle_filter(self):
factories.CandidateTotalsDetailFactory(candidate_id='H', candidate_election_year=2020, cycle=2020, election_full=True)
factories.CandidateTotalsDetailFactory(candidate_id='H', candidate_election_year=2018, cycle=2018, election_full=False)
factories.CandidateTotalsDetailFactory(candidate_id='H', candidate_election_year=2018, cycle=2018, election_full=False)
results = self._results(api.url_for(CandidateTotalsDetailView, candidate_id='H', cycle=2018))
self.assertEqual(len(results), 1)
factories.CandidateTotalsDetailFactory(candidate_id='P', candidate_election_year=2020, cycle=2020, election_full=True)
factories.CandidateTotalsDetailFactory(candidate_id='P', candidate_election_year=2022, cycle=2020, election_full=False)
factories.CandidateTotalsDetailFactory(candidate_id='P', candidate_election_year=2024, cycle=2020, election_full=False)
results = self._results(api.url_for(CandidateTotalsDetailView, candidate_id='P', cycle=2020))
self.assertEqual(len(results), 3)
def test_sort(self):
factories.CandidateTotalsDetailFactory(candidate_id='H', candidate_election_year=2020, cycle=2020, election_full=True)
factories.CandidateTotalsDetailFactory(candidate_id='H', candidate_election_year=2018, cycle=2018, election_full=False)
factories.CandidateTotalsDetailFactory(candidate_id='H', candidate_election_year=2018, cycle=2016, election_full=False)
results = self._results(api.url_for(CandidateTotalsDetailView, candidate_id='H', sort='cycle'))
self.assertEqual(len(results), 3)
self.assertEqual(results[0]['cycle'], 2016)
self.assertEqual(results[0]['election_full'], False)
self.assertEqual(results[0]['candidate_election_year'], 2018)
factories.CandidateTotalsDetailFactory(candidate_id='P', candidate_election_year=2016, cycle=2016, election_full=True)
factories.CandidateTotalsDetailFactory(candidate_id='P', candidate_election_year=2018, cycle=2018, election_full=False)
factories.CandidateTotalsDetailFactory(candidate_id='P', candidate_election_year=2022, cycle=2022, election_full=True)
results = self._results(api.url_for(CandidateTotalsDetailView, candidate_id='P', sort='-cycle'))
self.assertEqual(len(results), 3)
self.assertEqual(results[0]['cycle'], 2022)
self.assertEqual(results[0]['election_full'], True)
self.assertEqual(results[0]['candidate_election_year'], 2022) |
def make_uniform_grid(dims, extent, center=0, has_center=False):
num_dims = max(np.array([dims]).shape[(- 1)], np.array([extent]).shape[(- 1)], np.array([center]).shape[(- 1)])
dims = (np.ones(num_dims) * dims).astype('int')
extent = (np.ones(num_dims) * extent).astype('float')
center = (np.ones(num_dims) * center).astype('float')
delta = (extent / dims)
zero = ((((- extent) / 2) + center) + (delta / 2))
if has_center:
zero -= ((delta / 2) * (1 - np.mod(dims, 2)))
return CartesianGrid(RegularCoords(delta, dims, zero)) |
class Migration(migrations.Migration):
dependencies = [('core', '0005_auto__1301')]
operations = [migrations.AlterField(model_name='location', name='residents', field=models.ManyToManyField(related_name='residences', to=settings.AUTH_USER_MODEL, blank=True)), migrations.AlterField(model_name='locationemailtemplate', name='key', field=models.CharField(max_length=32, choices=[(b'admin_daily_update', b'Admin Daily Update'), (b'guest_daily_update', b'Guest Daily Update'), (b'invoice', b'Invoice'), (b'receipt', b'Receipt'), (b'newreservation', b'New Reservation'), (b'pre_arrival_welcome', b'Pre-Arrival Welcome'), (b'departure', b'Departure')])), migrations.AlterField(model_name='reservation', name='suppressed_fees', field=models.ManyToManyField(to='core.Fee', blank=True)), migrations.AlterField(model_name='room', name='residents', field=models.ManyToManyField(help_text=b'This field is optional.', related_name='residents', to=settings.AUTH_USER_MODEL, blank=True))] |
class PointEncoder(fl.Chain):
def __init__(self, embedding_dim: int=256, scale: float=1, device: ((Device | str) | None)=None, dtype: (DType | None)=None) -> None:
assert ((embedding_dim % 2) == 0), 'embedding_dim must be divisible by 2.'
self.embedding_dim = embedding_dim
self.scale = scale
super().__init__(CoordinateEncoder(num_positional_features=(embedding_dim // 2), scale=scale, device=device, dtype=dtype), fl.Lambda(func=self.pad), fl.Residual(fl.UseContext(context='point_encoder', key='type_mask'), PointTypeEmbedding(embedding_dim=embedding_dim, device=device, dtype=dtype)))
def pad(self, x: Tensor) -> Tensor:
type_mask: Tensor = self.use_context('point_encoder')['type_mask']
if torch.any(((type_mask == PointType.BOX_TOP_LEFT.value) | (type_mask == PointType.BOX_BOTTOM_RIGHT.value))):
return x
type_mask = torch.cat([type_mask, torch.full((type_mask.shape[0], 1), PointType.NOT_A_POINT.value, device=type_mask.device)], dim=1)
self.set_context(context='point_encoder', value={'type_mask': type_mask})
return torch.cat([x, torch.zeros((x.shape[0], 1, x.shape[(- 1)]), device=x.device)], dim=1)
def init_context(self) -> Contexts:
return {'point_encoder': {'type_mask': None}}
def set_type_mask(self, type_mask: Int[(Tensor, '1 num_points')]) -> None:
self.set_context(context='point_encoder', value={'type_mask': type_mask})
def get_dense_positional_embedding(self, image_embedding_size: tuple[(int, int)]) -> Float[(Tensor, 'num_positional_features height width')]:
coordinate_encoder = self.ensure_find(layer_type=CoordinateEncoder)
(height, width) = image_embedding_size
grid = torch.ones((height, width), device=self.device, dtype=torch.float32)
y_embedding = (grid.cumsum(dim=0) - 0.5)
x_embedding = (grid.cumsum(dim=1) - 0.5)
y_embedding = (y_embedding / height)
x_embedding = (x_embedding / width)
positional_embedding = coordinate_encoder(torch.stack(tensors=[x_embedding, y_embedding], dim=(- 1))).permute(2, 0, 1).unsqueeze(dim=0)
return positional_embedding
def points_to_tensor(self, foreground_points: (Sequence[tuple[(float, float)]] | None)=None, background_points: (Sequence[tuple[(float, float)]] | None)=None, not_a_points: (Sequence[tuple[(float, float)]] | None)=None, box_points: (Sequence[Sequence[tuple[(float, float)]]] | None)=None) -> tuple[(Float[(Tensor, '1 num_points 2')], Int[(Tensor, '1 num_points')])]:
foreground_points = (foreground_points or [])
background_points = (background_points or [])
not_a_points = (not_a_points or [])
box_points = (box_points or [])
top_left_points = [box[0] for box in box_points]
bottom_right_points = [box[1] for box in box_points]
coordinates: list[Tensor] = []
type_ids: list[Tensor] = []
for (type_id, coords_seq) in zip(PointType, [background_points, foreground_points, top_left_points, bottom_right_points, not_a_points]):
if (len(coords_seq) > 0):
coords_tensor = torch.tensor(data=list(coords_seq), dtype=torch.float, device=self.device)
coordinates.append(coords_tensor)
point_ids = torch.tensor(data=([type_id.value] * len(coords_seq)), dtype=torch.int, device=self.device)
type_ids.append(point_ids)
all_coordinates = torch.cat(tensors=coordinates, dim=0).unsqueeze(dim=0)
type_mask = torch.cat(tensors=type_ids, dim=0).unsqueeze(dim=0)
return (all_coordinates, type_mask) |
class SwaggerView(MethodView):
parameters = []
responses = {}
definitions = {}
tags = []
consumes = ['application/json']
produces = ['application/json']
schemes = []
security = []
deprecated = False
operationId = None
externalDocs = {}
summary = None
description = None
validation = False
validation_function = None
validation_error_handler = None
def dispatch_request(self, *args, **kwargs):
if self.validation:
specs = {}
attrs = (flasgger.constants.OPTIONAL_FIELDS + ['parameters', 'definitions', 'responses', 'summary', 'description'])
for attr in attrs:
specs[attr] = getattr(self, attr)
definitions = {}
specs.update(convert_schemas(specs, definitions))
specs['definitions'] = definitions
flasgger.utils.validate(specs=specs, validation_function=self.validation_function, validation_error_handler=self.validation_error_handler)
return super(SwaggerView, self).dispatch_request(*args, **kwargs) |
def extractNovicetranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (item['tags'] == ['Translations']):
titlemap = [('The Little Princess Imprisoned by the Demonic Brothers Ch', 'The Little Princess Imprisoned by the Demonic Brothers', 'translated'), ('Who moved my ashes ch', 'Who moved my ashes', 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
tagmap = [('this princess punishes your nine generations', 'this princess punishes your nine generations', 'translated'), ('the villain has blackened again', 'the villain has blackened again', 'translated'), ("don't be jealous, i will bend myself", "don't be jealous, i will bend myself", 'translated'), ('next door theres memeda', "Next Door There's Memeda", 'translated'), ('willingly baited', 'Willingly Baited', 'translated'), ('who moved my ashes', 'who moved my ashes', 'translated'), ('seeking good temptation', 'seeking good temptation', 'translated'), ('Lovable Beauty', 'Lovable Beauty', 'translated'), ('never dare to abuse the female protagonist again', 'never dare to abuse the female protagonist again', 'translated'), ('the little princess imprisoned by the demon brothers', 'the little princess imprisoned by the demon brothers', 'translated'), ('my husband with scholar syndrome', 'my husband with scholar syndrome', 'translated'), ('chasing a flower blossom', 'chasing a flower blossom', 'translated'), ('ah, senior brother is actually a sister!', 'ah, senior brother is actually a sister!', 'translated'), ('believe it or not, i already caught you', 'believe it or not, i already caught you', 'translated'), ('female lead, please let go of the white moonlight', 'female lead, please let go of the white moonlight', 'translated'), ('targeted by a feline beastman in ancient times', 'targeted by a feline beastman in ancient times', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def get_read_last_n_days(delta_days: int) -> Dict[(int, Tuple[(int, str)])]:
assert (delta_days >= 0)
stamp = utility.date.date_x_days_ago_stamp(abs(delta_days))
conn = _get_connection()
res = conn.execute(f"select counts.c, counts.nid, notes.title from notes join (select count(*) as c, nid from read where page > -1 and created >= '{stamp}' group by nid) as counts on notes.id = counts.nid").fetchall()
conn.close()
d = dict()
for (c, nid, title) in res:
d[nid] = (c, title)
return d |
def _modify_hypothesis_settings(settings, name, parent=None):
settings = settings.copy()
if (parent is None):
parent = hp_settings._current_profile
if ('phases' in settings):
try:
settings['phases'] = [getattr(Phase, k) for (k, v) in settings['phases'].items() if v]
except AttributeError as exc:
raise ValueError(f"'{exc.args[0]}' is not a valid hypothesis phase setting")
hp_settings.register_profile(name, parent=hp_settings.get_profile(parent), database=DirectoryBasedExampleDatabase(_get_data_folder().joinpath('hypothesis')), **settings)
hp_settings.load_profile(name) |
def test_ipaddress():
f = field.Ip()
assert (f.deserialize('127.0.0.1') == ipaddress.ip_address('127.0.0.1'))
assert (f.deserialize('::1') == ipaddress.ip_address('::1'))
assert (f.serialize(f.deserialize('::1')) == '::1')
assert (f.deserialize(None) is None)
with pytest.raises(ValueError):
assert f.deserialize('not_an_ipaddress') |
class GamePlayers():
def __init__(self, players: List[Player]):
self._players: Dict[(str, Player)] = {player.id: player for player in players}
self._player_ids: List[str] = [player.id for player in players]
self._folder_ids: Set[str] = set()
self._dead_player_ids: Set[str] = set()
def fold(self, player_id: str):
if (player_id not in self._player_ids):
raise ValueError('Unknown player id')
self._folder_ids.add(player_id)
def remove(self, player_id: str):
self.fold(player_id)
self._dead_player_ids.add(player_id)
def reset(self):
self._folder_ids = set(self._dead_player_ids)
def round(self, start_player_id: str, reverse=False) -> Generator[(Player, None, None)]:
start_item = self._player_ids.index(start_player_id)
step_multiplier = ((- 1) if reverse else 1)
for i in range(len(self._player_ids)):
next_item = ((start_item + (i * step_multiplier)) % len(self._player_ids))
player_id = self._player_ids[next_item]
if (player_id not in self._folder_ids):
(yield self._players[player_id])
def get(self, player_id: str) -> Player:
try:
return self._players[player_id]
except KeyError:
raise ValueError('Unknown player id')
def get_next(self, dealer_id: str) -> Optional[Player]:
if (dealer_id not in self._player_ids):
raise ValueError('Unknown player id')
if (dealer_id in self._folder_ids):
raise ValueError('Inactive player')
start_item = self._player_ids.index(dealer_id)
for i in range((len(self._player_ids) - 1)):
next_index = (((start_item + i) + 1) % len(self._player_ids))
next_id = self._player_ids[next_index]
if (next_id not in self._folder_ids):
return self._players[next_id]
return None
def is_active(self, player_id: str) -> bool:
if (player_id not in self._player_ids):
raise ValueError('Unknown player id')
return (player_id not in self._folder_ids)
def count_active(self) -> int:
return (len(self._player_ids) - len(self._folder_ids))
def count_active_with_money(self) -> int:
return len([player for player in self.active if (player.money > 0)])
def all(self) -> List[Player]:
return [self._players[player_id] for player_id in self._player_ids if (player_id not in self._dead_player_ids)]
def folders(self) -> List[Player]:
return [self._players[player_id] for player_id in self._folder_ids]
def dead(self) -> List[Player]:
return [self._players[player_id] for player_id in self._dead_player_ids]
def active(self) -> List[Player]:
return [self._players[player_id] for player_id in self._player_ids if (player_id not in self._folder_ids)] |
class OptionPlotoptionsScatterSonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def test_context_manager_negative():
testutil.add_response('login_response_200')
testutil.add_response('query_response_200')
client_args = {'username': testutil.username, 'password': testutil.password, 'client_id': testutil.client_id, 'client_secret': testutil.client_secret, 'version': '37.0'}
def logout():
raise Exception("Monkey patchin'...")
with sfdc.client(**client_args) as client:
client.query('SELECT Id, Name FROM Account LIMIT 10')
client.logout = logout
'\n The above should have made 2 calls: login, query\n '
assert (len(responses.calls) == 2) |
def encode_sleb128(num):
bs = bytearray()
b = (num & 127)
num = (num >> 7)
more = (not (((num == 0) and ((b & 64) == 0)) or ((num == (- 1)) and ((b & 64) != 0))))
if more:
b = (b | 128)
bs.append(b)
while more:
b = (num & 127)
num = (num >> 7)
more = (not (((num == 0) and ((b & 64) == 0)) or ((num == (- 1)) and ((b & 64) != 0))))
if more:
b = (b | 128)
bs.append(b)
return bs |
def test_dimensions():
fpath = 'data/chap4-7/iflr/multidimensions-ints-various.dlis'
with dlis.load(fpath) as (f, *_):
frame = f.object('FRAME', 'FRAME-DIMENSION', 11, 0)
curves = frame.curves()
np.testing.assert_array_equal(curves[0][1], [[1, 2, 3], [4, 5, 6]])
np.testing.assert_array_equal(curves[0][2], [[1, 2], [3, 4], [5, 6]])
arr2 = [[[1, 2], [3, 4], [5, 6]], [[7, 8], [9, 10], [11, 12]], [[13, 14], [15, 16], [17, 18]], [[19, 20], [21, 22], [23, 24]]]
np.testing.assert_array_equal(curves[0][3], arr2)
np.testing.assert_array_equal(curves[0][4], [[1, 2]])
np.testing.assert_array_equal(curves[0][5], [[1], [2]])
np.testing.assert_array_equal(curves[0][6], [[1]])
np.testing.assert_array_equal(curves[0][7], [1, 2, 3, 4]) |
.parametrize('value', [10, (- 10)], ids=(lambda v: ('(f = %d)' % v)))
.parametrize('expr', ['f', '2*f'])
def test_math_functions(expr, value):
mesh = UnitSquareMesh(2, 2)
V = FunctionSpace(mesh, 'CG', 1)
f = Function(V)
f.assign(value)
actual = Function(V)
actual.assign(eval(expr))
f = value
expect = eval(expr)
assert np.allclose(actual.dat.data_ro, expect) |
.EventDecorator()
def derivative(form, u, du=None, coefficient_derivatives=None):
if isinstance(form, firedrake.slate.TensorBase):
raise TypeError(f'Cannot take the derivative of a {type(form).__name__}')
u_is_x = isinstance(u, ufl.SpatialCoordinate)
if (u_is_x or isinstance(u, Constant)):
uc = u
else:
(uc,) = extract_coefficients(u)
if ((not u_is_x) and (len(uc.subfunctions) > 1) and (set(extract_coefficients(form)) & set(uc.subfunctions))):
raise ValueError('Taking derivative of form wrt u, but form contains coefficients from u.subfunctions.\nYou probably meant to write split(u) when defining your form.')
mesh = as_domain(form)
if (not mesh):
raise ValueError('Expression to be differentiated has no ufl domain.\nDo you need to add a domain to your Constant?')
is_dX = (u_is_x or (u is mesh.coordinates))
try:
args = form.arguments()
except AttributeError:
args = extract_arguments(form)
n = (max((a.number() for a in args)) if args else (- 1))
if is_dX:
coords = mesh.coordinates
u = ufl.SpatialCoordinate(mesh)
V = coords.function_space()
elif isinstance(uc, (firedrake.Function, firedrake.Cofunction)):
V = uc.function_space()
elif isinstance(uc, firedrake.Constant):
if (uc.ufl_shape != ()):
raise ValueError('Real function space of vector elements not supported')
V = firedrake.FunctionSpace(mesh, 'Real', 0)
x = ufl.Coefficient(V, (n + 1))
n += 1
form = ufl.replace(form, {u: x})
u = x
else:
raise RuntimeError("Can't compute derivative for form")
if (du is None):
du = Argument(V, (n + 1))
if is_dX:
internal_coefficient_derivatives = {coords: du}
else:
internal_coefficient_derivatives = {}
if coefficient_derivatives:
internal_coefficient_derivatives.update(coefficient_derivatives)
if (u.ufl_shape != du.ufl_shape):
raise ValueError('Shapes of u and du do not match.\nIf you passed an indexed part of split(u) into derivative, you need to provide an appropriate du as well.')
return ufl.derivative(form, u, du, internal_coefficient_derivatives) |
class TestGetKrbConf():
def test_all_config_items_missing(self):
config = {'some_meaningless_other_key': 'boring_value'}
config = buildsys.get_krb_conf(config)
assert (config == {})
def test_complete_config(self):
config = {'some_meaningless_other_key': 'boring_value', 'krb_ccache': 'a_ccache', 'krb_keytab': 'a_keytab', 'krb_principal': 'a_principal'}
config = buildsys.get_krb_conf(config)
assert (config == {'ccache': 'a_ccache', 'keytab': 'a_keytab', 'principal': 'a_principal'})
def test_krb_ccache(self):
config = {'some_meaningless_other_key': 'boring_value', 'krb_ccache': 'a_ccache'}
config = buildsys.get_krb_conf(config)
assert (config == {'ccache': 'a_ccache'})
def test_krb_ccache_uid(self):
config = {'some_meaningless_other_key': 'boring_value', 'krb_ccache': 'a_ccache_%{uid}'}
config = buildsys.get_krb_conf(config)
assert (config == {'ccache': ('a_ccache_%d' % os.geteuid())})
def test_krb_keytab(self):
config = {'some_meaningless_other_key': 'boring_value', 'krb_keytab': 'a_keytab'}
config = buildsys.get_krb_conf(config)
assert (config == {'keytab': 'a_keytab'})
def test_krb_principal(self):
config = {'some_meaningless_other_key': 'boring_value', 'krb_principal': 'a_principal'}
config = buildsys.get_krb_conf(config)
assert (config == {'principal': 'a_principal'}) |
class SerializableBase(abc.ABCMeta):
def __new__(cls, name, bases, attrs):
super_new = super(SerializableBase, cls).__new__
serializable_bases = tuple((b for b in bases if isinstance(b, SerializableBase)))
has_multiple_serializable_parents = (len(serializable_bases) > 1)
is_serializable_subclass = any(serializable_bases)
declares_fields = ('fields' in attrs)
if (not is_serializable_subclass):
return super_new(cls, name, bases, attrs)
elif (not declares_fields):
if has_multiple_serializable_parents:
raise TypeError('Cannot create subclass from multiple parent `Serializable` classes without explicit `fields` declaration.')
else:
parent_serializable = serializable_bases[0]
if hasattr(parent_serializable, '_meta'):
fields = parent_serializable._meta.fields
else:
fields = ()
else:
fields = tuple((tuple(field) for field in attrs.pop('fields')))
if fields:
(field_names, sedes) = zip(*fields)
else:
(field_names, sedes) = ((), ())
duplicate_field_names = _get_duplicates(field_names)
if duplicate_field_names:
raise TypeError(f"The following fields are duplicated in the `fields` declaration: {','.join(sorted(duplicate_field_names))}")
invalid_field_names = {field_name for field_name in field_names if (not _is_valid_identifier(field_name))}
if invalid_field_names:
raise TypeError(f"The following field names are not valid python identifiers: {','.join((f'`{item}`' for item in sorted(invalid_field_names)))}")
parent_field_names = {field_name for base in serializable_bases if hasattr(base, '_meta') for field_name in base._meta.field_names}
missing_fields = parent_field_names.difference(field_names)
if missing_fields:
raise TypeError(f"Subclasses of `Serializable` **must** contain a full superset of the fields defined in their parent classes. The following fields are missing: {','.join(sorted(missing_fields))}")
reserved_namespace = set(attrs.keys()).union((attr for base in bases for parent_cls in base.__mro__ for attr in _get_class_namespace(parent_cls)))
field_attrs = _mk_field_attrs(field_names, reserved_namespace)
meta_namespace = {'fields': fields, 'field_attrs': field_attrs, 'field_names': field_names, 'sedes': List(sedes)}
meta_base = attrs.pop('_meta', MetaBase)
meta = type('Meta', (meta_base,), meta_namespace)
attrs['_meta'] = meta
field_props = tuple(((field, _mk_field_property(field, attr)) for (field, attr) in zip(meta.field_names, meta.field_attrs)))
return super_new(cls, name, bases, dict((field_props + tuple(attrs.items())))) |
class MarketPriceDataContainer(AbstractPlayerInfoDataContainer):
DEFAULT_VAL = float('nan')
galactic_market_indicator_key = 'traded_on_galactic_market'
internal_market_indicator_key = 'traded_on_internal_market'
def __init__(self, country_perspective, resource_name, base_price, resource_index):
super().__init__(country_perspective=country_perspective)
self.resource_name = resource_name
self.base_price = base_price
self.resource_index = resource_index
def _iterate_budgetitems(self, cd: datamodel.CountryData) -> Iterable[Tuple[(str, float)]]:
gs = cd.game_state
if cd.has_galactic_market_access:
(yield from self._iter_galactic_market_price(gs, cd))
else:
(yield from self._iter_internal_market_price(gs, cd))
def _iter_galactic_market_price(self, gs: datamodel.GameState, cd: datamodel.CountryData) -> Iterable[Tuple[(str, float)]]:
market_fee = self.get_market_fee(gs)
market_resources: List[datamodel.GalacticMarketResource] = sorted(gs.galactic_market_resources, key=(lambda r: r.resource_index))
for (res, res_data) in zip(market_resources, config.CONFIG.market_resources):
if ((res_data['name'] == self.resource_name) and (res.availability != 0)):
(yield from self._get_resource_prices(market_fee, res_data['base_price'], res.fluctuation))
(yield (self.galactic_market_indicator_key, (- 0.001)))
(yield (self.internal_market_indicator_key, self.DEFAULT_VAL))
break
def _iter_internal_market_price(self, gs: datamodel.GameState, cd: datamodel.CountryData):
market_fee = self.get_market_fee(gs)
res_data = None
for r in config.CONFIG.market_resources:
if (r['name'] == self.resource_name):
res_data = r
break
if (res_data is None):
logger.info('Could not find configuration for resource {self.resour')
return
if (res_data['base_price'] is None):
return
always_tradeable = ['minerals', 'food', 'consumer_goods', 'alloys']
fluctuation = (0.0 if (self.resource_name in always_tradeable) else None)
for resource in cd.internal_market_resources:
if (resource.resource_name.text == self.resource_name):
fluctuation = resource.fluctuation
break
if (fluctuation is None):
return
(yield from self._get_resource_prices(market_fee, res_data['base_price'], fluctuation))
(yield (self.galactic_market_indicator_key, self.DEFAULT_VAL))
(yield (self.internal_market_indicator_key, (- 0.001)))
def _get_resource_prices(self, market_fee: float, base_price: float, fluctuation: float) -> Tuple[(float, float, float)]:
no_fee_price = (base_price * (1 + (fluctuation / 100)))
buy_price = ((base_price * (1 + (fluctuation / 100))) * (1 + market_fee))
sell_price = ((base_price * (1 + (fluctuation / 100))) * (1 - market_fee))
(yield (f'{self.resource_name}_base_price', no_fee_price))
if (buy_price != sell_price):
(yield (f'{self.resource_name}_buy_price', buy_price))
(yield (f'{self.resource_name}_sell_price', sell_price))
def get_market_fee(self, gs):
market_fees = config.CONFIG.market_fee
current_fee = {'date': 0, 'fee': 0.3}
for fee in sorted(market_fees, key=(lambda f: f['date'])):
if (datamodel.date_to_days(fee['date']) > gs.date):
break
current_fee = fee
market_fee = current_fee['fee']
return market_fee |
class NavierStokesPressureCorrection(object):
def __init__(self, L, prefix=None):
self.L = L
self.pc = p4pyPETSc.PC().create()
if prefix:
self.pc.setOptionsPrefix(prefix)
self.pc.setFromOptions()
self.hasNullSpace = True
self.nsp = p4pyPETSc.NullSpace().create(constant=True, comm=p4pyPETSc.COMM_WORLD)
self.L.setOption(p4pyPETSc.Mat.Option.SYMMETRIC, True)
self.L.setNullSpace(self.nsp)
def setUp(self, global_ksp=None, newton_its=None):
pass |
()
def create_multiple(doctype, docname):
if ((not doctype) or (not docname)):
frappe.throw(_('Sales Invoice or Patient Encounter is required to create Lab Tests'), title=_('Insufficient Data'))
lab_test_created = False
if (doctype == 'Sales Invoice'):
lab_test_created = create_lab_test_from_invoice(docname)
elif (doctype == 'Patient Encounter'):
lab_test_created = create_lab_test_from_encounter(docname)
if lab_test_created:
frappe.msgprint(_('Lab Test(s) {0} created successfully').format(lab_test_created), indicator='green') |
(scope='session')
def commit_search_cls(es_version):
if (es_version >= (7, 2)):
interval_kwargs = {'fixed_interval': '1d'}
else:
interval_kwargs = {'interval': 'day'}
class CommitSearch(FacetedSearch):
index = 'flat-git'
fields = ('description', 'files')
facets = {'files': TermsFacet(field='files'), 'frequency': DateHistogramFacet(field='authored_date', min_doc_count=1, **interval_kwargs), 'deletions': RangeFacet(field='stats.deletions', ranges=[('ok', (None, 1)), ('good', (1, 5)), ('better', (5, None))])}
return CommitSearch |
.skipif((not has_mxnet), reason='needs MXNet')
.parametrize('data,n_args,kwargs_keys', [(numpy.zeros((2, 3), dtype='f'), 1, []), ([numpy.zeros((2, 3), dtype='f'), numpy.zeros((2, 3), dtype='f')], 2, []), ((numpy.zeros((2, 3), dtype='f'), numpy.zeros((2, 3), dtype='f')), 2, []), ({'a': numpy.zeros((2, 3), dtype='f'), 'b': numpy.zeros((2, 3), dtype='f')}, 0, ['a', 'b']), (ArgsKwargs((numpy.zeros((2, 3), dtype='f'), numpy.zeros((2, 3), dtype='f')), {'c': numpy.zeros((2, 3), dtype='f')}), 2, ['c'])])
def test_mxnet_wrapper_convert_inputs(data, n_args, kwargs_keys):
import mxnet as mx
mx_model = mx.gluon.nn.Sequential()
mx_model.add(mx.gluon.nn.Dense(12))
mx_model.initialize()
model = MXNetWrapper(mx_model)
convert_inputs = model.attrs['convert_inputs']
(Y, backprop) = convert_inputs(model, data, is_train=True)
check_input_converters(Y, backprop, data, n_args, kwargs_keys, mx.nd.NDArray) |
class BoundingBoxDistanceBetween(NamedTuple):
bounding_box_distance: BoundingBoxDistance
bounding_box_ref_1: BoundingBoxRef
bounding_box_ref_2: BoundingBoxRef
def get_sort_key(self):
return self.bounding_box_distance.get_sort_key()
def is_better_than(self, other: Optional['BoundingBoxDistanceBetween']) -> bool:
if (not other):
return True
return self.bounding_box_distance.is_better_than(other.bounding_box_distance) |
('ner.fuzzy.manual', dataset=('The dataset to use', 'positional', None, str), spacy_model=('The base model', 'positional', None, str), source=('The source data as a JSONL file', 'positional', None, str), patterns=('Phrase patterns', 'positional', None, str), label=('One or more comma-separated labels', 'option', 'l', split_string), exclude=('Names of datasets to exclude', 'option', 'e', split_string))
def ner_fuzzy_manual(dataset: str, spacy_model: str, source: str, patterns: str, label: Optional[List[str]]=None, exclude: Optional[List[str]]=None):
nlp = spacy.load(spacy_model)
fuzzy_matcher = FuzzyMatcher(nlp.vocab)
patterns = JSONL(patterns)
(phrase_patterns, line_numbers) = parse_phrase_patterns(list(patterns))
for (pattern_label, patterns) in phrase_patterns.items():
for (line_number, pattern) in patterns:
fuzzy_matcher.add(line_number, [nlp(pattern)], kwargs=[{'ignorecase': True}])
stream = JSONL(source)
stream = add_tokens(nlp, stream)
stream = apply_fuzzy_matcher(stream, nlp, fuzzy_matcher, line_numbers)
return {'view_id': 'ner_manual', 'dataset': dataset, 'stream': stream, 'exclude': exclude, 'config': {'lang': nlp.lang, 'labels': label}} |
def _get_dynamic_fee_txn_intrinsic_gas(klass: Union[(DynamicFeeTransaction, UnsignedDynamicFeeTransaction)]) -> int:
core_gas = calculate_intrinsic_gas(ISTANBUL_TX_GAS_SCHEDULE, klass)
num_addresses = len(klass.access_list)
preload_address_costs = (ACCESS_LIST_ADDRESS_COST_EIP_2930 * num_addresses)
num_slots = sum((len(slots) for (_, slots) in klass.access_list))
preload_slot_costs = (ACCESS_LIST_STORAGE_KEY_COST_EIP_2930 * num_slots)
return ((core_gas + preload_address_costs) + preload_slot_costs) |
.benchmark(group='import/export')
def test_benchmark_xtgregsurf_import(benchmark, tmp_path, benchmark_surface):
fname = (tmp_path / 'benchmark_surface.xtgregsurf')
fn = benchmark_surface.to_file(fname, fformat='xtgregsurf')
surf2 = None
def read():
nonlocal surf2
surf2 = xtgeo.surface_from_file(fn, fformat='xtgregsurf')
benchmark(read)
assert_allclose(benchmark_surface.values, surf2.values) |
class OptionPlotoptionsScatterSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsBellcurveSonificationDefaultinstrumentoptionsMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_comp5():
string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)})
file_path = ((test_dir / 'subdir') / 'test_free.f90')
string += comp_request(file_path, 10, 22)
string += comp_request(file_path, 14, 27)
string += comp_request(file_path, 28, 15)
(errcode, results) = run_request(string, ['--use_signature_help'])
assert (errcode == 0)
exp_results = ([1, 'DIMENSION(:)', 'KEYWORD'], [2, 'vector_create', 'SUBROUTINE'], [3, 'INTENT(IN)', 'KEYWORD'])
assert (len(exp_results) == (len(results) - 1))
for (i, ref) in enumerate(exp_results):
validate_comp(results[(i + 1)], ref) |
class Card(QWidget):
def __init__(self, context: ListContext, row: Any, parent: Any=None):
super().__init__(parent)
self._context = context
self._row = row
image_filename = self._context.get_entry_image_filename(row)
image_text = self._context.get_entry_image_text(row)
self.setObjectName('NotificationCard')
image_container_label = QLabel('')
image_container_label.setPixmap(QPixmap(icon_path(image_filename)))
image_container_label.setObjectName('NotificationCardImage')
image_label = QLabel(image_text)
image_label.setAlignment(Qt.AlignHCenter)
name_layout = QVBoxLayout()
name_layout.setContentsMargins(0, 0, 0, 0)
name_layout.addStretch(1)
name_layout.addWidget(image_container_label)
name_layout.addWidget(image_label)
name_layout.addStretch(1)
action_layout = QVBoxLayout()
action_layout.setSpacing(0)
self.add_actions(action_layout)
self._layout = QHBoxLayout()
self._layout.setSpacing(8)
self._layout.setContentsMargins(10, 10, 10, 10)
self._layout.addLayout(name_layout)
self.add_main_content(self._layout)
self._layout.addLayout(action_layout)
self.setLayout(self._layout)
def paintEvent(self, event):
opt = QStyleOption()
opt.initFrom(self)
p = QPainter(self)
self.style().drawPrimitive(QStyle.PE_Widget, opt, p, self)
def add_main_content(self, layout: QLayout) -> None:
raise NotImplementedError
def add_actions(self, layout: QLayout) -> None:
raise NotImplementedError |
def log_jsonl(line_json: Any, default_level: int=logging.INFO) -> None:
try:
level = line_json['level'].upper()
except (NameError, TypeError):
level = default_level
else:
try:
level = int(logging.getLevelName(level))
except ValueError:
level = default_level
if logger.isEnabledFor(level):
print(json.dumps(line_json, sort_keys=True), file=sys.stderr) |
def find_cusp(a: float, b: float, lms_to_rgb: Matrix, ok_coeff: List[List[Vector]]) -> Vector:
s_cusp = compute_max_saturation(a, b, lms_to_rgb, ok_coeff)
(r, g, b) = oklab_to_linear_rgb([1, (s_cusp * a), (s_cusp * b)], lms_to_rgb)
l_cusp = alg.nth_root((1.0 / max(max(r, g), b)), 3)
c_cusp = (l_cusp * s_cusp)
return [l_cusp, c_cusp] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.