code stringlengths 281 23.7M |
|---|
class OptionPlotoptionsFunnel3dOnpointPosition(Options):
def offsetX(self):
return self._config_get(None)
def offsetX(self, num: float):
self._config(num, js_type=False)
def offsetY(self):
return self._config_get(None)
def offsetY(self, num: float):
self._config(num, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
def _wells_importer(wells: list[xtgeo.Well], tops: bool=True, incl_limit: (float | None)=None, top_prefix: str='Top', zonelist: (list | None)=None, use_undef: bool=False) -> dict:
dflist = []
for well in wells:
wp = well.get_zonation_points(tops=tops, incl_limit=incl_limit, top_prefix=top_prefix, zonelist=zonelist, use_undef=use_undef)
if (wp is not None):
dflist.append(wp)
dfr = pd.concat(dflist, ignore_index=True)
attrs = {}
for col in dfr.columns:
col_lower = col.lower()
if (col_lower == 'Zone'):
attrs[col] = 'int'
elif ((col_lower == 'ZoneName') or (col_lower == 'WellName')):
attrs[col] = 'str'
else:
attrs[col] = 'float'
return {'values': dfr, 'attributes': attrs} |
class Migration(migrations.Migration):
initial = True
dependencies = [('player', '0001_initial'), ('badge', '0001_initial')]
operations = [migrations.AddField(model_name='playerbadge', name='player', field=models.ForeignKey(help_text='', on_delete=django.db.models.deletion.CASCADE, related_name='+', to='player.player'))] |
class OptionSeriesXrangeSonificationDefaultinstrumentoptionsMappingTremolo(Options):
def depth(self) -> 'OptionSeriesXrangeSonificationDefaultinstrumentoptionsMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesXrangeSonificationDefaultinstrumentoptionsMappingTremoloDepth)
def speed(self) -> 'OptionSeriesXrangeSonificationDefaultinstrumentoptionsMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesXrangeSonificationDefaultinstrumentoptionsMappingTremoloSpeed) |
class _AssertLogsContext(object):
LOGGING_FORMAT = '%(levelname)s:%(name)s:%(message)s'
def __init__(self, logger, level):
if isinstance(logger, logging.Logger):
self.logger = logger
self.logger_name = logger.name
else:
self.logger = logging.getLogger(logger)
self.logger_name = logger
if level:
if isinstance(level, int):
self.level = level
elif six.PY2:
self.level = logging._levelNames[level]
else:
self.level = logging._nameToLevel[level]
else:
self.level = logging.INFO
def __enter__(self):
formatter = logging.Formatter(self.LOGGING_FORMAT)
handler = _CapturingHandler()
handler.setFormatter(formatter)
self.watcher = handler.watcher
self._old_handlers = self.logger.handlers[:]
self._old_level = self.logger.level
self._old_propagate = self.logger.propagate
self.logger.handlers = [handler]
self.logger.setLevel(self.level)
self.logger.propagate = False
return handler.watcher
def __exit__(self, exc_type, exc_value, tb):
self.logger.handlers = self._old_handlers
self.logger.setLevel(self._old_level)
self.logger.propagate = self._old_propagate
if ((exc_type is not None) or (len(self.watcher.records) > 0)):
return False
raise AssertionError('No logs of level {} or higher triggered on {}'.format(logging.getLevelName(self.level), self.logger_name)) |
class OptionSeriesParetoSonificationDefaultinstrumentoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
def _reset_bentoml_home(new_bentoml_home_directory):
global _config
global DEFAULT_BENTOML_HOME, BENTOML_HOME
DEFAULT_BENTOML_HOME = new_bentoml_home_directory
BENTOML_HOME = new_bentoml_home_directory
_config = load_config()
from bentoml import configure_logging
root = logging.getLogger()
map(root.removeHandler, root.handlers[:])
map(root.removeFilter, root.filters[:])
configure_logging() |
class OptionPlotoptionsAreasplineSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class GroupModifyLoop(GroupTest):
def runTest(self):
(port1,) = openflow_ports(1)
msg = ofp.message.group_add(group_type=ofp.OFPGT_ALL, group_id=0, buckets=[create_bucket(actions=[ofp.action.output(port1)])])
self.controller.message_send(msg)
do_barrier(self.controller)
msg = ofp.message.group_add(group_type=ofp.OFPGT_ALL, group_id=1, buckets=[create_bucket(actions=[ofp.action.group(0)])])
self.controller.message_send(msg)
do_barrier(self.controller)
msg = ofp.message.group_add(group_type=ofp.OFPGT_ALL, group_id=2, buckets=[create_bucket(actions=[ofp.action.group(1)])])
self.controller.message_send(msg)
do_barrier(self.controller)
msg = ofp.message.group_modify(group_type=ofp.OFPGT_ALL, group_id=0, buckets=[create_bucket(actions=[ofp.action.group(2)])])
(response, _) = self.controller.transact(msg)
self.assertIsInstance(response, ofp.message.group_mod_failed_error_msg)
self.assertEquals(response.code, ofp.OFPGMFC_LOOP) |
def condition_cfg(a, b, c, func):
s0 = Assignment(b, Constant(7))
s1 = Assignment(a, b)
s2 = Branch(Condition(OperationType.less_or_equal, [a, b]))
n0 = BasicBlock(0, [s0, s1, s2])
s3 = Branch(Condition(OperationType.less_or_equal, [a, c]))
n1 = BasicBlock(1, [s3])
s4 = Assignment(b, Constant(8))
s5 = Assignment(a, b)
n2 = BasicBlock(2, [s4, s5])
s6 = Assignment(ListOperation([c]), Call(func, [b]))
s7 = Assignment(a, c)
n3 = BasicBlock(3, [s6, s7])
s8 = Assignment(ListOperation([]), Call(ImportedFunctionSymbol('print', 66), [a]))
n4 = BasicBlock(4, [s8])
s9 = Return([c])
n6 = BasicBlock(6, [s9])
s10 = Assignment(b, Constant(6))
s11 = Assignment(a, b)
n5 = BasicBlock(5, [s10, s11])
cfg = ControlFlowGraph()
cfg.add_edges_from([UnconditionalEdge(n0, n5), UnconditionalEdge(n0, n1), UnconditionalEdge(n1, n2), UnconditionalEdge(n1, n3), UnconditionalEdge(n2, n4), UnconditionalEdge(n3, n4), UnconditionalEdge(n4, n6), UnconditionalEdge(n5, n6)])
return cfg |
class TestUnion(unittest.TestCase):
def test_union_incompatible_trait(self):
with self.assertRaises(ValueError) as exception_context:
Union(Str(), 'none')
self.assertEqual(str(exception_context.exception), "Union trait declaration expects a trait type or an instance of trait type or None, but got 'none' instead")
def test_list_trait_types(self):
class TestClass(HasTraits):
int_or_str_type = Union(Type, Int, Str)
TestClass(int_or_str_type=3)
TestClass(int_or_str_type='3.5')
with self.assertRaises(TraitError):
TestClass(int_or_str_type=3.5)
with self.assertRaises(TraitError):
TestClass(int_or_str_type=Int(3))
def test_malformed_declaration(self):
with self.assertRaises(ValueError):
class TestClass(HasTraits):
a = Union(int, Float)
TestClass(a=2.4)
with self.assertRaises(ValueError):
class TestClass(HasTraits):
a = Union([1, 2], Float)
TestClass(a=2.4)
def test_list_trait_instances(self):
class TestClass(HasTraits):
float_or_str_obj = Union(Instance(Float), Instance(Str))
TestClass(float_or_str_obj=Float(3.5))
TestClass(float_or_str_obj=Str('3.5'))
with self.assertRaises(TraitError):
TestClass(float_or_str_obj=Float)
with self.assertRaises(TraitError):
TestClass(float_or_str_obj=3.5)
def test_union_with_none(self):
class TestClass(HasTraits):
int_or_none = Union(None, Int)
TestClass(int_or_none=None)
def test_union_unspecified_arguments(self):
class TestClass(HasTraits):
none = Union()
TestClass(none=None)
def test_default_value(self):
class TestClass(HasTraits):
atr = Union(Int(3), Float(4.1), Str('Something'))
self.assertEqual(TestClass().atr, 3)
class TestClass(HasTraits):
atr = Union(Int(3), Float(4.1), Str('Something'), default_value='XYZ')
self.assertEqual(TestClass().atr, 'XYZ')
class TestClass(HasTraits):
atr = Union()
self.assertEqual(TestClass().atr, None)
class TestClass(HasTraits):
atr = Union(None)
self.assertEqual(TestClass().atr, None)
def test_default_raise_error(self):
with self.assertRaises(ValueError) as exception_context:
Union(Int(), Float(), default=1.0)
self.assertEqual(str(exception_context.exception), "Union default value should be set via 'default_value', not 'default'.")
def test_inner_traits(self):
class TestClass(HasTraits):
atr = Union(Float, Int, Str)
obj = TestClass()
(t1, t2, t3) = obj.trait('atr').inner_traits
self.assertEqual(type(t1.trait_type), Float)
self.assertEqual(type(t2.trait_type), Int)
self.assertEqual(type(t3.trait_type), Str)
def test_union_user_defined_class(self):
class TestClass(HasTraits):
obj = Union(Instance(CustomClass), Int)
TestClass(obj=CustomClass(value=5))
TestClass(obj=5)
with self.assertRaises(TraitError):
TestClass(obj=CustomClass)
def test_union_user_defined_type(self):
class TestClass(HasTraits):
type_value = Union(CustomStrType, Int)
TestClass(type_value='new string')
def test_notification(self):
class TestClass(HasTraits):
union_attr = Union(Int)
shadow_union_trait = None
def _union_attr_changed(self, new):
self.shadow_union_trait = new
obj = TestClass(union_attr=(- 1))
obj.union_attr = 1
self.assertEqual(obj.shadow_union_trait, 1)
def test_extending_union_trait(self):
class UnionAllowStr(Union):
def validate(self, obj, name, value):
if isinstance(value, str):
return value
return super().validate(obj, name, value)
class TestClass(HasTraits):
s = UnionAllowStr(Int, Float)
TestClass(s='sdf')
def test_list_inside_union_default(self):
class HasUnionWithList(HasTraits):
foo = Union(List(Int), Str)
has_union = HasUnionWithList()
value = has_union.foo
self.assertIsInstance(value, list)
with self.assertRaises(TraitError):
value.append('not an integer')
def test_constant_default(self):
class HasUnionWithList(HasTraits):
foo = Union(Int(23), Float)
nested = Union(Union(Str(), Bytes()), Union(Int(), Float(), None))
has_union = HasUnionWithList()
value = has_union.foo
self.assertEqual(value, 23)
self.assertEqual(has_union.trait('foo').default_value(), (DefaultValue.constant, 23))
self.assertEqual(has_union.trait('nested').default_value(), (DefaultValue.constant, '')) |
def show_sample_predictions(model: DecoderInferenceModel, dataset: CachedDataset, num_samples: int=25, beam_size: int=1):
torch.manual_seed(0)
idx = torch.randperm(len(dataset))[:num_samples].tolist()
subset = Subset(dataset, indices=idx)
for (encoding, captions) in subset:
pred = model(torch.from_numpy(encoding), beam_size=beam_size)
print(f'Pred: {pred}')
print(f'True: {captions}') |
class Panels():
def __init__(self, ui):
self.page = ui.page
def panel(self, components: List[html.Html.Html]=None, title: str=None, color: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, helper: str=None, options: dict=None, profile: types.PROFILE_TYPE=False):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
if ((components is not None) and (not isinstance(components, list))):
components = [components]
html_panel = html.HtmlContainer.Panel(self.page, (components or []), title, color, width, height, html_code, helper, options, profile)
html.Html.set_component_skin(html_panel)
return html_panel
def pills(self, color: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), align: str='left', html_code: str=None, helper: str=None, options: dict=None, profile: types.PROFILE_TYPE=False):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dflt_options = {'css_tab': {'text-align': 'center', 'cursor': 'pointer', 'margin': '0 2px 0 0', 'border-radius': '5px', 'color': 'inherit'}}
if (options is not None):
dflt_options.update(options)
html_tabs = html.HtmlContainer.Tabs(self.page, color, width, height, html_code, helper, dflt_options, profile)
html_tabs.options.css_tab_clicked = {'color': html_tabs.page.theme.greys[0], 'background': html_tabs.page.theme.colors[(- 1)]}
html_tabs.style.css.overflow_x = 'auto'
html_tabs.tabs_container.style.css.text_align = align
html_tabs.style.css.white_space = 'nowrap'
html.Html.set_component_skin(html_tabs)
return html_tabs
def boxes(self, color: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), align: str='left', html_code: str=None, helper: str=None, options: dict=None, profile: types.PROFILE_TYPE=False):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dfl_options = {'css_tab': {'text-align': 'center', 'cursor': 'pointer', 'margin': '0 2px 0 0', 'color': 'inherit'}}
if (options is not None):
dfl_options.update(options)
html_tabs = html.HtmlContainer.Tabs(self.page, color, width, height, html_code, helper, dfl_options, profile)
html_tabs.options.css_tab_clicked = {'color': html_tabs.page.theme.greys[0], 'background': html_tabs.page.theme.colors[(- 1)]}
html_tabs.style.css.overflow_x = 'auto'
html_tabs.tabs_container.style.css.text_align = align
html_tabs.tabs_container.style.css.border_bottom = ('1px solid %s' % html_tabs.page.theme.colors[(- 1)])
html_tabs.style.css.white_space = 'nowrap'
html.Html.set_component_skin(html_tabs)
return html_tabs
def tabs(self, color: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, helper: str=None, options: types.OPTION_TYPE=None, profile: types.PROFILE_TYPE=False):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dfl_options = {'css_tab': {'display': 'inline-block', 'text-align': 'center', 'cursor': 'pointer', 'margin': '0 2px 5px 0', 'border-bottom': ('2px solid %s' % self.page.theme.greys[0])}}
if (options is not None):
dfl_options.update(options)
html_tabs = html.HtmlContainer.Tabs(self.page, color, width, height, html_code, helper, dfl_options, profile)
html.Html.set_component_skin(html_tabs)
return html_tabs
def arrows_up(self, color: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, helper: str=None, options: types.OPTION_TYPE=None, profile: types.PROFILE_TYPE=False):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dfl_options = {'css_tab': {'display': 'inline-block', 'text-align': 'center', 'cursor': 'pointer', 'margin': '0 2px 0 0', 'border-bottom': ('2px solid %s' % self.page.theme.colors[(- 1)])}}
if (options is not None):
dfl_options.update(options)
html_tabs = html.HtmlContainer.TabsArrowsUp(self.page, color, width, height, html_code, helper, dfl_options, profile)
for t in html_tabs.tabs():
t.style.add_classes.layout.panel_arrow_up()
html_tabs.options.css_tab['color'] = 'inherit'
html_tabs.options.css_tab['height'] = '30px'
html_tabs.options.css_tab_clicked = {'background': self.page.theme.colors[(- 1)], 'color': self.page.theme.greys[0]}
html.Html.set_component_skin(html_tabs)
return html_tabs
def arrows_down(self, color: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, helper: str=None, options: types.OPTION_TYPE=None, profile: types.PROFILE_TYPE=False):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dflt_options = {'css_tab': {'display': 'inline-block', 'text-align': 'center', 'cursor': 'pointer', 'margin': '0 2px 0 0', 'border-bottom': ('2px solid %s' % self.page.theme.greys[0])}}
if (options is not None):
dflt_options.update(options)
html_tabs = html.HtmlContainer.TabsArrowsDown(self.page, color, width, height, html_code, helper, dflt_options, profile)
for t in html_tabs.tabs():
t.style.add_classes.layout.panel_arrow_down()
html_tabs.options.css_tab['color'] = 'inherit'
html_tabs.options.css_tab['height'] = '30px'
html_tabs.options.css_tab_clicked = {'background': html_tabs.page.theme.colors[(- 1)], 'color': self.page.theme.greys[0]}
html.Html.set_component_skin(html_tabs)
return html_tabs
def menu(self, color: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, helper: str=None, options: types.OPTION_TYPE=None, profile: types.PROFILE_TYPE=False):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dflt_options = {'css_tab': {'display': 'inline-block', 'text-align': 'center', 'cursor': 'pointer', 'margin': '0 2px 0 0', 'border-radius': '10px 10px 0 0'}}
if (options is not None):
dflt_options.update(options)
html_tabs = html.HtmlContainer.Tabs(self.page, color, width, height, html_code, helper, dflt_options, profile)
html_tabs.options.css_tab['color'] = 'inherit'
html_tabs.options.css_tab['background'] = html_tabs.page.theme.greys[0]
html_tabs.options.css_tab_clicked = {'color': html_tabs.page.theme.greys[0], 'background': html_tabs.page.theme.colors[(- 1)]}
html_tabs.tabs_container.css({'border-bottom': ('2px solid %s' % html_tabs.page.theme.colors[(- 1)])})
html.Html.set_component_skin(html_tabs)
return html_tabs
def sliding(self, components, title, color: str=None, align: str='center', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, helper: str=None, options: types.OPTION_TYPE=None, profile: types.PROFILE_TYPE=False) -> html.HtmlContainer.PanelSlide:
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
if ((components is not None) and (not isinstance(components, list))):
_components = [components]
else:
_components = components
components = []
for component in _components:
if (not hasattr(component, 'options')):
components.append(self.page.ui.texts.paragraph(component, options={'markdown': True}))
else:
components.append(component)
html_slide = html.HtmlContainer.PanelSlide(self.page, components, title, color, width, height, html_code, helper, (options or {}), profile)
if (align == 'center'):
html_slide.style.css.margin = 'auto'
html_slide.style.css.display = 'block'
html.Html.set_component_skin(html_slide)
return html_slide
def split(self, left: html.Html.Html=None, right: html.Html.Html=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(200, 'px'), left_width: types.SIZE_TYPE=(160, 'px'), resizable: bool=True, helper: str=None, options: types.OPTION_TYPE=None, profile: types.PROFILE_TYPE=None) -> html.HtmlContainer.PanelSplit:
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
html_split = html.HtmlContainer.PanelSplit(self.page, width, height, left_width, left, right, resizable, helper, options, profile)
html.Html.set_component_skin(html_split)
return html_split
def filters(self, items=None, category: str='group', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(60, 'px'), html_code: str=None, helper: str=None, options: dict=None, profile: types.PROFILE_TYPE=None):
options = (options or {})
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dfl_options = {'item_css': {'border': ('1px solid %s' % self.page.theme.success.light), 'border-radius': '5px', 'padding': '0 4px', 'margin-left': '5px', 'width': 'auto', 'display': 'inline-block', 'background': options.get('colored', 'inherit'), 'white-space': 'nowrap'}}
if options:
dfl_options.update(options)
chip = self.page.ui.chips(items, category, width=width, height=height, html_code=html_code, helper=helper, options=dfl_options, profile=profile)
chip.input.style.css.display = False
html.Html.set_component_skin(chip)
return chip
def nav(self, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(100, '%'), options: types.OPTION_TYPE=None, profile: types.PROFILE_TYPE=None, helper: str=None):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dflt_options = {'position': 'top'}
if (options is not None):
dflt_options.update(options)
h_drawer = html.HtmlMenu.PanelsBar(self.page, width, height, dflt_options, helper, profile)
html.Html.set_component_skin(h_drawer)
return h_drawer
def hamburger(self, components: List[html.Html.Html]=None, title: Union[(str, dict)]='', color: str=None, align: str='center', width=(100, '%'), height=(None, 'px'), html_code: str=None, helper: str=None, options: dict=None, profile: Union[(dict, bool)]=False):
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
if ((components is not None) and (not isinstance(components, list))):
_components = [components]
else:
_components = (components or [])
components = []
for component in _components:
if (not hasattr(component, 'options')):
components.append(self.page.ui.texts.paragraph(component, options={'markdown': True}))
else:
components.append(component)
dfl_options = {'icon_expanded': '', 'expanded': False, 'icon_closed': '', 'click_type': 'icon'}
if (options is not None):
dfl_options.update(options)
html_slide = html.HtmlContainer.PanelSlide(self.page, components, title, color, width, height, html_code, helper, dfl_options, profile)
html_slide.icon = self.page.ui.icons.hamburger()
html_slide.icon.options.managed = False
html_slide.icon.style.css.float = 'right'
html_slide.icon.style.css.margin_top = 3
html_slide.style.css.border = ('1px solid %s' % self.page.theme.greys[2])
html_slide._vals[1].style.css.padding = 5
html_slide.title.add(html_slide.icon)
html_slide.style.css.margin_top = 5
html_slide.style.css.margin_bottom = 5
if (align == 'center'):
html_slide.style.css.margin = '5px auto'
html_slide.style.css.display = 'block'
html.Html.set_component_skin(html_slide)
return html_slide
def slidings(self):
return Slidings(self) |
class module_net_proxy():
proxy_starting_s_i = "Starting HTTP/HTTPS proxy at '
proxy_set_proxy = "Set the proxy to tunnel through the target. Visit ' to install the certificate"
proxy_started_background = 'Proxy has been started in background and will shutdown at exit'
proxy_started_foreground = 'Foreground proxy has been started, press Ctrl-C to stop it' |
class BulkResponse(BaseModel):
def __init_subclass__(cls: BaseModel, **kwargs: Any):
super().__init_subclass__(**kwargs)
if (('succeeded' not in cls.__fields__) or ('failed' not in cls.__fields__)):
raise TypeError(f"Class {cls.__name__} needs both 'succeeded' and 'failed' attributes defined.") |
class KnowledgeStore():
_instance = None
def __new__(cls):
if (cls._instance is None):
cls._instance = super().__new__(cls)
import aios_kernel
knowledge_dir = (aios_kernel.storage.AIStorage().get_myai_dir() / 'knowledge')
if (not os.path.exists(knowledge_dir)):
os.makedirs(knowledge_dir)
cls._instance.__singleton_init__(knowledge_dir)
return cls._instance
def __singleton_init__(self, root_dir: str):
logging.info(f'will init knowledge store, root_dir={root_dir}')
self.root = root_dir
relation_store_dir = os.path.join(root_dir, 'relation')
self.relation_store = ObjectRelationStore(relation_store_dir)
object_store_dir = os.path.join(root_dir, 'object')
self.object_store = ObjectStore(object_store_dir)
chunk_store_dir = os.path.join(root_dir, 'chunk')
self.chunk_store = ChunkStore(chunk_store_dir)
self.chunk_tracker = ChunkTracker(chunk_store_dir)
self.chunk_list_writer = ChunkListWriter(self.chunk_store, self.chunk_tracker)
self.chunk_reader = ChunkReader(self.chunk_store, self.chunk_tracker)
self.vector_store = {}
def get_relation_store(self) -> ObjectRelationStore:
return self.relation_store
def get_object_store(self) -> ObjectStore:
return self.object_store
def get_chunk_store(self) -> ChunkStore:
return self.chunk_store
def get_chunk_tracker(self) -> ChunkTracker:
return self.chunk_tracker
def get_chunk_list_writer(self) -> ChunkListWriter:
return self.chunk_list_writer
def get_chunk_reader(self) -> ChunkReader:
return self.chunk_reader
def get_vector_store(self, model_name: str) -> VectorBase:
if (model_name not in self.vector_store):
self.vector_store[model_name] = ChromaVectorStore(self.root, model_name)
return self.vector_store[model_name] |
def get_typing_function(tp):
func = None
if is_typevar(tp):
if (len(tp.__constraints__) == 0):
func = _identity_function
elif (len(tp.__constraints__) == 1):
assert (not NEW_TYPING), "Python 3.7+ forbids single constraint for `TypeVar'"
func = get_typing_function(tp.__constraints__[0])
else:
raise ValueError('Cannot resolve typing function for TypeVar({constraints}) as it declares multiple types'.format(constraints=', '.join((getattr(c, '_name', c.__name__) for c in tp.__constraints__))))
elif (tp == typing.Any):
func = _identity_function
elif issubclass_(tp, str):
func = str
elif is_mapping_type(tp):
func = _apply_dict_type
elif is_tuple_type(tp):
func = _apply_tuple_type
elif is_iterable_type(tp):
func = _apply_list_type
elif is_optional_type(tp):
func = _apply_optional_type
elif callable(tp):
func = tp
else:
raise ValueError('Cannot find a function to apply type "{}"'.format(tp))
args = getattr(tp, '__args__', None)
if args:
args_types = [get_typing_function(arg) for arg in args]
func = _partial_builder(args_types)(func)
return func |
class EmailUniquenessValidator(UserValidator):
def __init__(self, users):
self.users = users
def validate(self, user_info):
count = self.users.query.filter((func.lower(self.users.email) == user_info.email)).count()
if (count != 0):
raise ValidationError('email', _('%(email)s is already registered', email=user_info.email)) |
def downgrade():
op.alter_column('video_stream_moderators', 'email', existing_type=citext.CIText(), type_=sa.VARCHAR(), existing_nullable=False)
op.alter_column('ticket_holders', 'email', existing_type=citext.CIText(), type_=sa.VARCHAR(), existing_nullable=True)
op.alter_column('speaker', 'email', existing_type=citext.CIText(), type_=sa.VARCHAR(), existing_nullable=True) |
(scope='function')
def brightdevice():
with tempfile.TemporaryDirectory() as brightnessdir:
with open(os.path.join(brightnessdir, 'brightness'), 'w') as b:
b.write(str(600))
with open(os.path.join(brightnessdir, 'max_brightness'), 'w') as w:
w.write(str(1000))
(yield brightnessdir) |
def extract_media_info(item):
media_info = []
media_sources = item['MediaSources']
if (media_sources is not None):
for media_source in media_sources:
media_info.append(('Media Stream (%s)' % (media_source['Name'],)))
media_info.append((' -Type: %s' % (media_source['Type'],)))
media_info.append((' -Protocol: %s' % (media_source['Protocol'],)))
media_info.append((' -Path: %s' % (media_source['Path'],)))
media_info.append((' -IsRemote: %s' % (media_source['IsRemote'],)))
media_info.append((' -Container: %s' % (media_source['Container'],)))
if (media_source['BitRate'] is not None):
media_info.append(' -Bitrate: {:,}'.format(media_source['Bitrate']))
if (media_source['Size'] is not None):
media_info.append(' -Size: {:,}'.format(media_source['Size']))
media_info.append((' -DefaultAudioStreamIndex: %s' % (media_source['DefaultAudioStreamIndex'],)))
media_streams = media_source['MediaStreams']
if (media_streams is not None):
for mediaStream in media_streams:
stream_type = mediaStream['Type']
if (stream_type == 'Video'):
media_info.append(' -Video Stream')
media_info.append((' -Index: %s' % (mediaStream['Index'],)))
media_info.append((' -Codec: %s' % (mediaStream['Codec'],)))
media_info.append((' -Size: %sx%s' % (mediaStream['Width'], mediaStream['Height'])))
media_info.append((' -AspectRatio: %s' % (mediaStream['AspectRatio'],)))
media_info.append((' -ColorSpace: %s' % (mediaStream['ColorSpace'],)))
media_info.append((' -DisplayTitle: %s' % (mediaStream['DisplayTitle'],)))
media_info.append((' -IsInterlaced: %s' % (mediaStream['IsInterlaced'],)))
if (mediaStream['BitRate'] is not None):
media_info.append(' -BitRate: {:,}'.format(mediaStream['BitRate']))
media_info.append((' -BitDepth: %s' % (mediaStream['BitDepth'],)))
media_info.append((' -AverageFrameRate: %s' % (mediaStream['AverageFrameRate'],)))
media_info.append((' -RealFrameRate: %s' % (mediaStream['RealFrameRate'],)))
media_info.append((' -Profile: %s' % (mediaStream['Profile'],)))
media_info.append((' -Level: %s' % (mediaStream['Level'],)))
media_info.append((' -PixelFormat: %s' % (mediaStream['PixelFormat'],)))
media_info.append((' -IsAnamorphic: %s' % (mediaStream['IsAnamorphic'],)))
if (stream_type == 'Audio'):
media_info.append(' -Audio Stream')
media_info.append((' -Index: %s' % (mediaStream['Index'],)))
media_info.append((' -Title: %s' % (mediaStream['DisplayTitle'],)))
media_info.append((' -Codec: %s' % (mediaStream['Codec'],)))
media_info.append((' -ChannelLayout: %s' % (mediaStream['ChannelLayout'],)))
media_info.append((' -Channels: %s' % (mediaStream['Channels'],)))
if (mediaStream['BitRate'] is not None):
media_info.append(' -BitRate: {:,}'.format(mediaStream['BitRate']))
media_info.append((' -SampleRate: %s' % (mediaStream['SampleRate'],)))
media_info.append((' -IsDefault: %s' % (mediaStream['IsDefault'],)))
media_info.append((' -IsForced: %s' % (mediaStream['IsForced'],)))
media_info.append((' -IsExternal: %s' % (mediaStream['IsExternal'],)))
media_info.append((' -IsExternal: %s' % (mediaStream['IsExternal'],)))
if (stream_type == 'Subtitle'):
media_info.append(' -Subtitle Stream')
media_info.append((' -Index: %s' % (mediaStream['Index'],)))
media_info.append((' -Codec: %s' % (mediaStream['Codec'],)))
media_info.append((' -Language: %s' % (mediaStream['Language'],)))
media_info.append((' -DisplayTitle: %s' % (mediaStream['DisplayTitle'],)))
media_info.append((' -DisplayLanguage: %s' % (mediaStream['DisplayLanguage'],)))
media_info.append((' -IsDefault: %s' % (mediaStream['IsDefault'],)))
media_info.append((' -IsForced: %s' % (mediaStream['IsForced'],)))
media_info.append((' -IsExternal: %s' % (mediaStream['IsExternal'],)))
media_info.append((' -IsTextSubtitleStream: %s' % (mediaStream['IsTextSubtitleStream'],)))
media_info.append('')
return media_info |
class CisSpidersSpiderMiddleware(object):
def from_crawler(cls, crawler):
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
return None
def process_spider_output(self, response, result, spider):
for i in result:
(yield i)
def process_spider_exception(self, response, exception, spider):
pass
def process_start_requests(self, start_requests, spider):
for r in start_requests:
(yield r)
def spider_opened(self, spider):
spider.logger.info(('Spider opened: %s' % spider.name)) |
class _RedisLogic(object):
_redis_client = None
def __init__(self):
from staticdhcpdlib import config
self._redis_client = redis.Redis(decode_responses=True, **getattr(config, 'X_REDISDB_KWARGS', {}))
def _lookupMAC(self, mac):
details = self._redis_client.hgetall(str(mac))
if (not details):
_logger.debug("Unknown MAC response for '{}'".format(mac))
return None
_logger.debug("Known MAC response for '{}': {!r}".format(mac, details))
subnet_serial = '{}|{}'.format(details['subnet'], details['serial'])
details_ss = self._redis_client.hgetall(subnet_serial)
if (not details_ss):
_logger.warning("Unknown subnet|serial: '{}'".format(subnet_serial))
return None
_logger.debug("Known subnet|serial response for '{}': {!r}".format(subnet_serial, details_ss))
extra = details_ss.get('extra')
combined_extra = ((extra and json.loads(extra)) or {})
extra = details.get('extra')
combined_extra.update(((extra and json.loads(extra)) or {}))
if (not combined_extra):
combined_extra = None
domain_name_servers = details.get('domain_name_servers', details_ss.get('domain_name_servers'))
if domain_name_servers:
domain_name_servers = [v.strip() for v in domain_name_servers.split(',')][:3]
ntp_servers = details.get('ntp_servers', details_ss.get('ntp_servers'))
if ntp_servers:
ntp_servers = [v.strip() for v in ntp_servers.split(',')][:3]
return Definition(ip=details['ip'], lease_time=details.get('lease_time', details_ss['lease_time']), subnet=details['subnet'], serial=details['serial'], hostname=details.get('hostname'), gateways=details.get('gateway', details_ss.get('gateway')), subnet_mask=details.get('subnet_mask', details_ss.get('subnet_mask')), broadcast_address=details.get('broadcast_address', details_ss.get('broadcast_address')), domain_name=details.get('domain_name', details_ss.get('domain_name')), domain_name_servers=domain_name_servers, ntp_servers=ntp_servers, extra=combined_extra) |
class TestSpotifyMaxLimits():
def test_turning_on_max_limits_returns_more(self, app_token):
client = Spotify(app_token)
(s1,) = client.search('piano')
with client.max_limits(True):
(s2,) = client.search('piano')
assert (s1.limit < s2.limit)
client.close()
def test_turning_off_max_limits_returns_less(self, app_token):
client = Spotify(app_token, max_limits_on=True)
(s1,) = client.search('piano')
with client.max_limits(False):
(s2,) = client.search('piano')
assert (s1.limit > s2.limit)
client.close()
def test_specifying_limit_kwarg_overrides_max_limits(self, app_token):
client = Spotify(app_token, max_limits_on=True)
(s,) = client.search('piano', limit=1)
assert (s.limit == 1)
client.close()
def test_specifying_limit_pos_arg_overrides_max_limits(self, app_token):
client = Spotify(app_token, max_limits_on=True)
(s,) = client.search('piano', ('track',), None, None, 1)
assert (s.limit == 1)
client.close()
def test_specifying_pos_args_until_limit(self, app_token):
client = Spotify(app_token, max_limits_on=True)
(s1,) = client.search('piano', ('track',), None, None)
with client.max_limits(False):
(s2,) = client.search('piano', ('track',), None, None)
assert (s1.limit > s2.limit)
client.close() |
_event
class Event():
def _get_thread(session, data):
key = data['threadKey']
if ('threadFbId' in key):
return _threads.Group(session=session, id=str(key['threadFbId']))
elif ('otherUserFbId' in key):
return _threads.User(session=session, id=str(key['otherUserFbId']))
raise _exception.ParseError('Could not find thread data', data=data) |
class AsyncDbApi2Instrumentation(AsyncAbstractInstrumentedModule):
connect_method = None
async def call(self, module, method, wrapped, instance, args, kwargs):
return AsyncConnectionProxy((await wrapped(*args, **kwargs)))
async def call_if_sampling(self, module, method, wrapped, instance, args, kwargs):
return (await self.call(module, method, wrapped, instance, args, kwargs)) |
class TestStarredSource():
data = {'nested1': {'a': 1, 'b': 2}, 'nested2': {'c': 3, 'd': 4}}
def setup_method(self):
class NestedSerializer1(serializers.Serializer):
a = serializers.IntegerField()
b = serializers.IntegerField()
class NestedSerializer2(serializers.Serializer):
c = serializers.IntegerField()
d = serializers.IntegerField()
class NestedBaseSerializer(serializers.Serializer):
nested1 = NestedSerializer1(source='*')
nested2 = NestedSerializer2(source='*')
class NullableNestedSerializer(serializers.Serializer):
nested = NestedSerializer1(source='*', allow_null=True)
class CustomField(serializers.Field):
def to_representation(self, instance):
return getattr(instance, 'foo', None)
def to_internal_value(self, data):
return {'foo': data}
class NullableFieldSerializer(serializers.Serializer):
field = CustomField(source='*', allow_null=True)
self.Serializer = NestedBaseSerializer
self.NullableNestedSerializer = NullableNestedSerializer
self.NullableFieldSerializer = NullableFieldSerializer
def test_nested_validate(self):
serializer = self.Serializer(data=self.data)
assert serializer.is_valid()
assert (serializer.validated_data == {'a': 1, 'b': 2, 'c': 3, 'd': 4})
def test_nested_null_validate(self):
serializer = self.NullableNestedSerializer(data={'nested': None})
assert (not serializer.is_valid())
def test_nested_serialize(self):
instance = {'a': 1, 'b': 2, 'c': 3, 'd': 4}
serializer = self.Serializer(instance)
assert (serializer.data == self.data)
def test_field_validate(self):
serializer = self.NullableFieldSerializer(data={'field': 'bar'})
assert serializer.is_valid()
assert (serializer.validated_data == {'foo': 'bar'})
def test_field_null_validate(self):
serializer = self.NullableFieldSerializer(data={'field': None})
assert serializer.is_valid()
assert (serializer.validated_data == {'foo': None}) |
class TestAppFromConfig():
def load_config_data(self, yaml_path):
with open(yaml_path, 'r') as file:
return yaml.safe_load(file)
def test_from_chroma_config(self, mocker):
mocker.patch('embedchain.vectordb.chroma.chromadb.Client')
yaml_path = 'configs/chroma.yaml'
config_data = self.load_config_data(yaml_path)
app = App.from_config(config_path=yaml_path)
assert isinstance(app, App)
assert (app.config.id == config_data['app']['config']['id'])
assert (app.config.collect_metrics is True)
llm_config = config_data['llm']['config']
assert (app.llm.config.temperature == llm_config['temperature'])
assert (app.llm.config.max_tokens == llm_config['max_tokens'])
assert (app.llm.config.top_p == llm_config['top_p'])
assert (app.llm.config.stream == llm_config['stream'])
db_config = config_data['vectordb']['config']
assert (app.db.config.collection_name == db_config['collection_name'])
assert (app.db.config.dir == db_config['dir'])
assert (app.db.config.allow_reset == db_config['allow_reset'])
embedder_config = config_data['embedder']['config']
assert (app.embedding_model.config.model == embedder_config['model'])
assert (app.embedding_model.config.deployment_name == embedder_config.get('deployment_name'))
def test_from_opensource_config(self, mocker):
mocker.patch('embedchain.vectordb.chroma.chromadb.Client')
yaml_path = 'configs/opensource.yaml'
config_data = self.load_config_data(yaml_path)
app = App.from_config(yaml_path)
assert isinstance(app, App)
assert (app.config.id == config_data['app']['config']['id'])
assert (app.config.collect_metrics == config_data['app']['config']['collect_metrics'])
llm_config = config_data['llm']['config']
assert (app.llm.config.model == llm_config['model'])
assert (app.llm.config.temperature == llm_config['temperature'])
assert (app.llm.config.max_tokens == llm_config['max_tokens'])
assert (app.llm.config.top_p == llm_config['top_p'])
assert (app.llm.config.stream == llm_config['stream'])
db_config = config_data['vectordb']['config']
assert (app.db.config.collection_name == db_config['collection_name'])
assert (app.db.config.dir == db_config['dir'])
assert (app.db.config.allow_reset == db_config['allow_reset'])
embedder_config = config_data['embedder']['config']
assert (app.embedding_model.config.deployment_name == embedder_config['deployment_name']) |
class IgPgTg(IPT):
BASE = 'xyz-d65'
NAME = 'igpgtg'
SERIALIZE = ('--igpgtg',)
CHANNELS = (Channel('ig', 0.0, 1.0), Channel('pg', (- 1.0), 1.0, flags=FLG_MIRROR_PERCENT), Channel('tg', (- 1.0), 1.0, flags=FLG_MIRROR_PERCENT))
CHANNEL_ALIASES = {'intensity': 'ig', 'protan': 'pg', 'tritan': 'tg'}
WHITE = WHITES['2deg']['D65']
ACHROMATIC = Achromatic(ACHROMATIC_RESPONSE, 1e-05, 1e-05, 0.03126, 'linear', mirror=True)
def resolve_channel(self, index: int, coords: Vector) -> float:
if (index in (1, 2)):
if (not math.isnan(coords[index])):
return coords[index]
return self.ACHROMATIC.get_ideal_ab(coords[0])[(index - 1)]
value = coords[index]
return (self.channels[index].nans if math.isnan(value) else value)
def to_base(self, coords: Vector) -> Vector:
return igpgtg_to_xyz(coords)
def from_base(self, coords: Vector) -> Vector:
return xyz_to_igpgtg(coords) |
class Field():
_creation_counter = 0
default_error_messages = {'required': _('This field is required.'), 'null': _('This field may not be null.')}
default_validators = []
default_empty_html = empty
initial = None
def __init__(self, *, read_only=False, write_only=False, required=None, default=empty, initial=empty, source=None, label=None, help_text=None, style=None, error_messages=None, validators=None, allow_null=False):
self._creation_counter = Field._creation_counter
Field._creation_counter += 1
if (required is None):
required = ((default is empty) and (not read_only))
assert (not (read_only and write_only)), NOT_READ_ONLY_WRITE_ONLY
assert (not (read_only and required)), NOT_READ_ONLY_REQUIRED
assert (not (required and (default is not empty))), NOT_REQUIRED_DEFAULT
assert (not (read_only and (self.__class__ == Field))), USE_READONLYFIELD
self.read_only = read_only
self.write_only = write_only
self.required = required
self.default = default
self.source = source
self.initial = (self.initial if (initial is empty) else initial)
self.label = label
self.help_text = help_text
self.style = ({} if (style is None) else style)
self.allow_null = allow_null
if (self.default_empty_html is not empty):
if (default is not empty):
self.default_empty_html = default
if (validators is not None):
self.validators = list(validators)
self.field_name = None
self.parent = None
messages = {}
for cls in reversed(self.__class__.__mro__):
messages.update(getattr(cls, 'default_error_messages', {}))
messages.update((error_messages or {}))
self.error_messages = messages
def __class_getitem__(cls, *args, **kwargs):
return cls
def bind(self, field_name, parent):
assert (self.source != field_name), ("It is redundant to specify `source='%s'` on field '%s' in serializer '%s', because it is the same as the field name. Remove the `source` keyword argument." % (field_name, self.__class__.__name__, parent.__class__.__name__))
self.field_name = field_name
self.parent = parent
if (self.label is None):
self.label = field_name.replace('_', ' ').capitalize()
if (self.source is None):
self.source = field_name
if (self.source == '*'):
self.source_attrs = []
else:
self.source_attrs = self.source.split('.')
def validators(self):
if (not hasattr(self, '_validators')):
self._validators = self.get_validators()
return self._validators
def validators(self, validators):
self._validators = validators
def get_validators(self):
return list(self.default_validators)
def get_initial(self):
if callable(self.initial):
return self.initial()
return self.initial
def get_value(self, dictionary):
if html.is_html_input(dictionary):
if (self.field_name not in dictionary):
if getattr(self.root, 'partial', False):
return empty
return self.default_empty_html
ret = dictionary[self.field_name]
if ((ret == '') and self.allow_null):
return ('' if getattr(self, 'allow_blank', False) else None)
elif ((ret == '') and (not self.required)):
return ('' if getattr(self, 'allow_blank', False) else empty)
return ret
return dictionary.get(self.field_name, empty)
def get_attribute(self, instance):
try:
return get_attribute(instance, self.source_attrs)
except BuiltinSignatureError as exc:
msg = 'Field source for `{serializer}.{field}` maps to a built-in function type and is invalid. Define a property or method on the `{instance}` instance that wraps the call to the built-in function.'.format(serializer=self.parent.__class__.__name__, field=self.field_name, instance=instance.__class__.__name__)
raise type(exc)(msg)
except (KeyError, AttributeError) as exc:
if (self.default is not empty):
return self.get_default()
if self.allow_null:
return None
if (not self.required):
raise SkipField()
msg = 'Got {exc_type} when attempting to get a value for field `{field}` on serializer `{serializer}`.\nThe serializer field might be named incorrectly and not match any attribute or key on the `{instance}` instance.\nOriginal exception text was: {exc}.'.format(exc_type=type(exc).__name__, field=self.field_name, serializer=self.parent.__class__.__name__, instance=instance.__class__.__name__, exc=exc)
raise type(exc)(msg)
def get_default(self):
if ((self.default is empty) or getattr(self.root, 'partial', False)):
raise SkipField()
if callable(self.default):
if getattr(self.default, 'requires_context', False):
return self.default(self)
else:
return self.default()
return self.default
def validate_empty_values(self, data):
if self.read_only:
return (True, self.get_default())
if (data is empty):
if getattr(self.root, 'partial', False):
raise SkipField()
if self.required:
self.fail('required')
return (True, self.get_default())
if (data is None):
if (not self.allow_null):
self.fail('null')
elif (self.source == '*'):
return (False, None)
return (True, None)
return (False, data)
def run_validation(self, data=empty):
(is_empty_value, data) = self.validate_empty_values(data)
if is_empty_value:
return data
value = self.to_internal_value(data)
self.run_validators(value)
return value
def run_validators(self, value):
errors = []
for validator in self.validators:
try:
if getattr(validator, 'requires_context', False):
validator(value, self)
else:
validator(value)
except ValidationError as exc:
if isinstance(exc.detail, dict):
raise
errors.extend(exc.detail)
except DjangoValidationError as exc:
errors.extend(get_error_detail(exc))
if errors:
raise ValidationError(errors)
def to_internal_value(self, data):
raise NotImplementedError('{cls}.to_internal_value() must be implemented for field {field_name}. If you do not need to support write operations you probably want to subclass `ReadOnlyField` instead.'.format(cls=self.__class__.__name__, field_name=self.field_name))
def to_representation(self, value):
raise NotImplementedError('{cls}.to_representation() must be implemented for field {field_name}.'.format(cls=self.__class__.__name__, field_name=self.field_name))
def fail(self, key, **kwargs):
try:
msg = self.error_messages[key]
except KeyError:
class_name = self.__class__.__name__
msg = MISSING_ERROR_MESSAGE.format(class_name=class_name, key=key)
raise AssertionError(msg)
message_string = msg.format(**kwargs)
raise ValidationError(message_string, code=key)
def root(self):
root = self
while (root.parent is not None):
root = root.parent
return root
def context(self):
return getattr(self.root, '_context', {})
def __new__(cls, *args, **kwargs):
instance = super().__new__(cls)
instance._args = args
instance._kwargs = kwargs
return instance
def __deepcopy__(self, memo):
args = [(copy.deepcopy(item) if (not isinstance(item, REGEX_TYPE)) else item) for item in self._args]
kwargs = {key: (copy.deepcopy(value, memo) if (key not in ('validators', 'regex')) else value) for (key, value) in self._kwargs.items()}
return self.__class__(*args, **kwargs)
def __repr__(self):
return representation.field_repr(self) |
class PageSavedFilter(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isPageSavedFilter = True
super(PageSavedFilter, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
display_name = 'display_name'
filters = 'filters'
id = 'id'
page_id = 'page_id'
section = 'section'
time_created = 'time_created'
time_updated = 'time_updated'
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=PageSavedFilter, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'display_name': 'string', 'filters': 'list<Object>', 'id': 'string', 'page_id': 'string', 'section': 'string', 'time_created': 'int', 'time_updated': 'int'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class OptionPlotoptionsDumbbellSonificationTracksMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsDumbbellSonificationTracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsDumbbellSonificationTracksMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsDumbbellSonificationTracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsDumbbellSonificationTracksMappingHighpassResonance) |
def add_tree(data):
tid = int(data['id'])
name = data['name']
nw = data.get('newick')
bpickle = data.get('b64pickle')
layouts = data.get('layouts', [])
if (type(layouts) == str):
layouts = layouts.split(',')
include_props = data.get('include_props')
if (type(include_props) == str):
include_props = include_props.split(',')
exclude_props = data.get('exclude_props')
if (type(exclude_props) == str):
exclude_props = exclude_props.split(',')
del_tree(tid)
if (nw is not None):
tree = load_tree_from_newick(tid, nw)
elif (bpickle is not None):
tree = ete_format.loads(bpickle, unpack=True)
ops.update_sizes_all(tree)
else:
tree = data.get('tree')
if (not tree):
abort(400, 'Either Newick or Tree object has to be provided.')
tree_data = app.trees[tid] = TreeData()
tree_data.name = name
tree_data.style = copy_style(TreeStyle())
tree_data.nodestyles = {}
tree_data.include_props = include_props
tree_data.exclude_props = exclude_props
tree_data.layouts = retrieve_layouts(layouts)
tree_data.timer = time()
tree_data.searches = {}
tree_data.selected = {}
tree_data.active = drawer_module.get_empty_active()
tree_data.tree = tree
def write_tree():
obj = {'name': name, 'layouts': layouts, 'tree': tree}
with open(f'/tmp/{tid}.pickle', 'wb') as handle:
pickle.dump(obj, handle)
thr_write = Thread(daemon=True, target=write_tree)
thr_write.start()
return tid |
class GSetting():
__instance = None
class __impl():
def __init__(self):
self.Path = self._enum(PLUGIN='org.gnome.rhythmbox.plugins.coverart_browser', RBSOURCE='org.gnome.rhythmbox.sources')
self.RBSourceKey = self._enum(VISIBLE_COLS='visible-columns')
self.PluginKey = self._enum(CUSTOM_STATUSBAR='custom-statusbar', DISPLAY_TEXT='display-text', DISPLAY_TEXT_POS='display-text-pos', RANDOM='random-queue', DISPLAY_TEXT_LOADING='display-text-loading', DISPLAY_TEXT_ELLIPSIZE='display-text-ellipsize', DISPLAY_TEXT_ELLIPSIZE_LENGTH='display-text-ellipsize-length', DISPLAY_FONT_SIZE='display-font-size', COVER_SIZE='cover-size', ADD_SHADOW='add-shadow', SHADOW_IMAGE='shadow-image', PANED_POSITION='paned-position', SORT_BY='sort-by', SORT_ORDER='sort-order', SORT_BY_ARTIST='sort-by-artist', SORT_ORDER_ARTIST='sort-order-artist', RATING='rating-threshold', AUTOSTART='autostart', TOOLBAR_POS='toolbar-pos', BUTTON_RELIEF='button-relief', THEME='theme', NEW_GENRE_ICON='new-genre-icon', ICON_PADDING='icon-padding', ICON_SPACING='icon-spacing', ICON_AUTOMATIC='icon-automatic', VIEW_NAME='view-name', FLOW_APPEARANCE='flow-appearance', FLOW_HIDE_CAPTION='flow-hide-caption', FLOW_SCALE='flow-scale', FLOW_BACKGROUND_COLOUR='flow-background-colour', FLOW_AUTOMATIC='flow-automatic', FLOW_WIDTH='flow-width', FLOW_MAX='flow-max-albums', WEBKIT='webkit-support', ARTIST_PANED_POSITION='artist-paned-pos', USE_FAVOURITES='use-favourites', ARTIST_INFO_PANED_POSITION='artist-info-paned-pos', LAST_GENRE_FOLDER='last-genre-folder', ENTRY_VIEW_MODE='entry-view-mode', FOLLOWING='following', ACTIVATIONS='activations', TEXT_ALIGNMENT='text-alignment')
self.setting = {}
def get_setting(self, path):
try:
setting = self.setting[path]
except:
self.setting[path] = Gio.Settings.new(path)
setting = self.setting[path]
return setting
def get_value(self, path, key):
return self.get_setting(path)[key]
def set_value(self, path, key, value):
self.get_setting(path)[key] = value
def _enum(self, **enums):
return type('Enum', (), enums)
def __init__(self):
if (GSetting.__instance is None):
GSetting.__instance = GSetting.__impl()
self.__dict__['_GSetting__instance'] = GSetting.__instance
def __getattr__(self, attr):
return getattr(self.__instance, attr)
def __setattr__(self, attr, value):
return setattr(self.__instance, attr, value) |
def extractSteadyTranslation(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('Online Game: Evil Dragon Against The Heaven' in item['tags']):
return buildReleaseMessageWithType(item, 'Online Game: Evil Dragon Against The Heaven', vol, chp, frag=frag, postfix=postfix)
if ('In Different World With Naruto System' in item['tags']):
return buildReleaseMessageWithType(item, 'In Different World With Naruto System', vol, chp, frag=frag, postfix=postfix)
if ('The Alchemist God' in item['tags']):
return buildReleaseMessageWithType(item, 'The Alchemist God', vol, chp, frag=frag, postfix=postfix)
return False |
def analyze_alignment_file_coordsorted(bam, options):
alignment_it = bam.fetch(until_eof=True)
sv_signatures = []
translocation_signatures_all_bnds = []
read_nr = 0
while True:
try:
current_alignment = next(alignment_it)
if (current_alignment.is_unmapped or current_alignment.is_secondary or (current_alignment.mapping_quality < options.min_mapq)):
continue
if current_alignment.is_supplementary:
(sigs, trans_sigs) = analyze_alignment_indel(current_alignment, bam, current_alignment.query_name, options)
sv_signatures.extend(sigs)
translocation_signatures_all_bnds.extend(trans_sigs)
else:
read_nr += 1
if ((read_nr % 10000) == 0):
logging.info('Processed read {0}'.format(read_nr))
supplementary_alignments = retrieve_other_alignments(current_alignment, bam)
good_suppl_alns = [aln for aln in supplementary_alignments if ((not aln.is_unmapped) and (aln.mapping_quality >= options.min_mapq))]
(sigs, trans_sigs) = analyze_alignment_indel(current_alignment, bam, current_alignment.query_name, options)
sv_signatures.extend(sigs)
translocation_signatures_all_bnds.extend(trans_sigs)
(sigs, trans_sigs) = analyze_read_segments(current_alignment, good_suppl_alns, bam, options)
sv_signatures.extend(sigs)
translocation_signatures_all_bnds.extend(trans_sigs)
except StopIteration:
break
except KeyboardInterrupt:
logging.warning('Execution interrupted by user. Stop detection and continue with next step..')
break
return (sv_signatures, translocation_signatures_all_bnds) |
class TestTVTKGarbageCollection(TestGarbageCollection):
((sys.platform.startswith('win') or (ETSConfig.toolkit == 'null')), f'CI with windows fails due to lack of OpenGL, or toolkit is null, got toolkit={ETSConfig.toolkit}')
def test_tvtk_scene(self):
def create_fn():
return TVTKScene()
def close_fn(o):
o.closing = True
self.check_object_garbage_collected(create_fn, close_fn)
((ETSConfig.toolkit in ('wx', 'null')), f'Test segfaults using WX (issue #216) and fails on null, got toolkit={ETSConfig.toolkit}')
def test_scene(self):
def create_fn():
return Scene()
def close_fn(o):
o.close()
self.check_object_garbage_collected(create_fn, close_fn)
((ETSConfig.toolkit in ('wx', 'null')), f'Test segfaults using WX (issue #216) and fails on null, got toolkit={ETSConfig.toolkit}')
(bad_pyqt5, 'Test segfaults using PyQt5 with older PyFace')
def test_decorated_scene(self):
def create_fn():
return DecoratedScene(parent=None)
def close_fn(o):
o.close()
self.check_object_garbage_collected(create_fn, close_fn)
def test_scene_model(self):
def create_fn():
return SceneModel()
def close_fn(o):
o.closing = True
self.check_object_garbage_collected(create_fn, close_fn) |
def choices(choice):
if (choice.upper() == 'Q'):
quit()
elif (choice.upper() == 'R'):
board.populate()
clear()
print('\n The board has been reset.')
pressEnter()
elif (choice.upper() == 'L'):
utils.readSave()
clear()
print('\n The last save has been loaded.')
pressEnter() |
def train(train_loader, model, optimizer, criterion, epoch, logger):
model.train()
losses = AverageMeter()
for (i, batch) in enumerate(train_loader):
model.zero_grad()
(x, y) = model.parse_batch(batch)
y_pred = model(x)
loss = criterion(y_pred, y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
losses.update(loss.item())
if ((i % args.print_freq) == 0):
logger.info('Epoch: [{0}][{1}/{2}]\tLoss {loss.val:.4f} ({loss.avg:.4f})'.format(epoch, i, len(train_loader), loss=losses))
return losses.avg |
class SigningHandler(Handler):
SUPPORTED_PROTOCOL = SigningMessage.protocol_id
def setup(self) -> None:
def handle(self, message: Message) -> None:
signing_msg = cast(SigningMessage, message)
signing_dialogues = cast(SigningDialogues, self.context.signing_dialogues)
signing_dialogue = cast(Optional[SigningDialogue], signing_dialogues.update(signing_msg))
if (signing_dialogue is None):
self._handle_unidentified_dialogue(signing_msg)
return
if (signing_msg.performative is SigningMessage.Performative.SIGNED_TRANSACTION):
self._handle_signed_transaction(signing_msg, signing_dialogue)
elif (signing_msg.performative is SigningMessage.Performative.ERROR):
self._handle_error(signing_msg, signing_dialogue)
else:
self._handle_invalid(signing_msg, signing_dialogue)
def teardown(self) -> None:
def _handle_unidentified_dialogue(self, signing_msg: SigningMessage) -> None:
self.context.logger.info('received invalid signing message={}, unidentified dialogue.'.format(signing_msg))
def _handle_signed_transaction(self, signing_msg: SigningMessage, signing_dialogue: SigningDialogue) -> None:
self.context.logger.info('transaction signing was successful.')
ledger_api_dialogues = cast(LedgerApiDialogues, self.context.ledger_api_dialogues)
(ledger_api_msg, ledger_api_dialogue) = ledger_api_dialogues.create(counterparty=LEDGER_API_ADDRESS, performative=LedgerApiMessage.Performative.SEND_SIGNED_TRANSACTION, signed_transaction=signing_msg.signed_transaction)
ledger_api_dialogue = cast(LedgerApiDialogue, ledger_api_dialogue)
ledger_api_dialogue.associated_signing_dialogue = signing_dialogue
self.context.outbox.put_message(message=ledger_api_msg)
self.context.logger.info('sending transaction to ledger.')
def _handle_error(self, signing_msg: SigningMessage, signing_dialogue: SigningDialogue) -> None:
self.context.logger.info('transaction signing was not successful. Error_code={} in dialogue={}'.format(signing_msg.error_code, signing_dialogue))
def _handle_invalid(self, signing_msg: SigningMessage, signing_dialogue: SigningDialogue) -> None:
self.context.logger.warning('cannot handle signing message of performative={} in dialogue={}.'.format(signing_msg.performative, signing_dialogue)) |
def extractEclipsefantasyCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('upillary master: peerless eldest miss', 'Supreme Pupillary Master: Peerless Eldest Miss', 'translated'), ('supreme pupillary master: peerless eldest miss', 'Supreme Pupillary Master: Peerless Eldest Miss', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.django_db
def test_new_award_count_invalid_agency_type(client, monkeypatch, new_award_data, helpers, elasticsearch_award_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
helpers.mock_current_fiscal_year(monkeypatch)
resp = client.get(url.format(code='123', filter=f'?agency_type=random'))
assert (resp.status_code == status.HTTP_400_BAD_REQUEST) |
def search_results_person(params):
handle = int(sys.argv[1])
person_id = params.get('person_id')
details_url = ((((('{server}/emby/Users/{userid}/items' + '?PersonIds=') + person_id) + '&Recursive=true') + '&Fields={field_filters}') + '&format=json')
params['name_format'] = 'Episode|episode_name_format'
(dir_items, detected_type, total_records) = process_directory(details_url, None, params)
log.debug('search_results_person results: {0}', dir_items)
log.debug('search_results_person detect_type: {0}', detected_type)
if (detected_type is not None):
log.debug('Detected content type: {0}', detected_type)
content_type = None
if (detected_type == 'Movie'):
content_type = 'movies'
elif (detected_type == 'Episode'):
content_type = 'episodes'
elif (detected_type == 'Series'):
content_type = 'tvshows'
elif ((detected_type == 'Music') or (detected_type == 'Audio') or (detected_type == 'Musicalbum')):
content_type = 'songs'
if content_type:
xbmcplugin.setContent(handle, content_type)
if (dir_items is not None):
xbmcplugin.addDirectoryItems(handle, dir_items)
xbmcplugin.endOfDirectory(handle, cacheToDisc=False) |
class ImportPackagesPivotExts():
def __init__(self, js: dict, css: dict, links: Optional[dict]=None):
self._js = js
self._css = css
self.__linked = links
def get(self, name: str):
if (name in self.__linked):
return self.__linked[name]
return ImportModule(name, self._js, self._css, self.__linked)
def c3(self):
return self.get('pivot-c3')
def plotly(self):
return self.get('pivot-plotly')
def d3(self):
return self.get('pivot-d3')
def subtotal(self):
return self.get('subtotal') |
class PluginDialog(Gtk.Dialog):
def __init__(self, parent_window, has_headerbar):
if has_headerbar:
super(PluginDialog, self).__init__(use_header_bar=True, parent=parent_window, flags=Gtk.DialogFlags.MODAL)
else:
super(PluginDialog, self).__init__(parent=parent_window, flags=Gtk.DialogFlags.MODAL)
self._has_headerbar = has_headerbar
self._parent_window = parent_window
listbox = Gtk.ListBox.new()
listbox.set_sort_func(self._listbox_sort, None)
self._listbox = listbox
self._items = {}
self._peas = Peas.Engine.get_default()
plugins = self._peas.get_plugin_list()
self._peas.connect_after('unload-plugin', self._on_load_unload_plugin)
self._peas.connect_after('load-plugin', self._on_load_unload_plugin)
for plugin in plugins:
if ((not plugin.is_builtin()) and (not plugin.is_hidden())):
row = PluginListRow(plugin, self._switch_callback)
self._items[plugin.get_module_name()] = row
listbox.add(row)
cl = CoverLocale()
cl.switch_locale(cl.Locale.RB)
def extract_text(str):
translation = gettext.gettext(str)
translation = re.sub('\\(..\\)', '', translation, flags=re.DOTALL)
translation = translation.replace('_', '')
return translation
toolbar = Gtk.Toolbar.new()
context = toolbar.get_style_context()
context.add_class(Gtk.STYLE_CLASS_INLINE_TOOLBAR)
item = Gtk.ToolItem.new()
btn = Gtk.Button()
icon = Gio.ThemedIcon(name='preferences-system-symbolic')
image = Gtk.Image()
image.props.margin = 3
btn.add(image)
btn.set_tooltip_text(extract_text('_Preferences'))
image.set_from_gicon(icon, Gtk.IconSize.BUTTON)
box = Gtk.Box()
box.pack_start(btn, False, False, 0)
item.add(box)
toolbar.insert(item, 0)
btn.connect('clicked', self._preferences_button_clicked)
self._preferences_button = btn
minitoolbar_box = Gtk.ButtonBox()
context = minitoolbar_box.get_style_context()
context.add_class('linked')
minitoolbar_box.set_layout(Gtk.ButtonBoxStyle.START)
btn = Gtk.Button()
icon = Gio.ThemedIcon(name='preferences-system-details-symbolic')
image = Gtk.Image()
image.props.margin = 3
btn.add(image)
btn.set_tooltip_text(extract_text('_About'))
image.set_from_gicon(icon, Gtk.IconSize.BUTTON)
minitoolbar_box.add(btn)
minitoolbar_box.child_set_property(btn, 'non-homogeneous', True)
btn.connect('clicked', self._info_button_clicked)
self._info_button = btn
btn = Gtk.Button()
icon = Gio.ThemedIcon(name='help-contents-symbolic')
image = Gtk.Image()
image.props.margin = 3
btn.add(image)
btn.set_tooltip_text(extract_text('_Help'))
image.set_from_gicon(icon, Gtk.IconSize.BUTTON)
minitoolbar_box.add(btn)
minitoolbar_box.child_set_property(btn, 'non-homogeneous', True)
btn.connect('clicked', self._help_button_clicked)
self._help_button = btn
item = Gtk.SeparatorToolItem.new()
item.props.draw = False
toolbar.insert(item, 1)
toolbar.child_set_property(item, 'expand', True)
item = Gtk.ToolItem.new()
item.add(minitoolbar_box)
toolbar.insert(item, 2)
contentbox = Gtk.Box()
contentbox.set_orientation(Gtk.Orientation.VERTICAL)
scrollwindow = Gtk.ScrolledWindow.new(None, None)
scrollwindow.add(listbox)
scrollwindow.props.hexpand = True
scrollwindow.props.vexpand = True
contentbox.pack_start(scrollwindow, True, True, 0)
contentbox.pack_start(toolbar, False, False, 1)
self.props.title = _('Configure Plugins')
if (not self._has_headerbar):
self.add_button(Gtk.STOCK_CLOSE, Gtk.ResponseType.CLOSE)
else:
headerbar = self.get_header_bar()
headerbar.set_show_close_button(True)
contentbox.show_all()
area = self.get_content_area()
area.add(contentbox)
listbox.connect('row-selected', self._listbox_row_selected)
def _on_load_unload_plugin(self, engine, plugin):
module_name = plugin.get_module_name()
print(module_name)
if (module_name in self._items):
self._items[module_name].refresh()
def _listbox_sort(self, row1, row2, *args):
return (row1.plugin.get_name().lower() > row2.plugin.get_name().lower())
def _switch_callback(self, switch, plugin):
value = switch.get_active()
if (value and (not plugin.is_loaded())):
self._peas.load_plugin(plugin)
if ((not value) and plugin.is_loaded()):
self._peas.unload_plugin(plugin)
row = switch.get_parent().get_parent()
self._listbox.select_row(row)
self._listbox_row_selected(_, row)
def _get_preference_widget(self, row):
try:
ext = self._peas.create_extension(row.plugin, PeasGtk.Configurable, None)
widget = ext.create_configure_widget()
cl = CoverLocale()
cl.switch_locale(cl.Locale.RB)
return widget
except:
pass
return None
def _listbox_row_selected(self, listbox, row):
if row:
has_preference = False
widget = self._get_preference_widget(row)
if widget:
has_preference = True
self._preferences_button.set_sensitive(has_preference)
help_link = row.plugin.get_help_uri()
if help_link:
self._help_button.set_sensitive(True)
else:
self._help_button.set_sensitive(False)
def _help_button_clicked(self, *args):
row = self._listbox.get_selected_row()
help_link = row.plugin.get_help_uri()
webbrowser.open(help_link)
def _info_button_clicked(self, *args):
if self._has_headerbar:
dlg = Gtk.Dialog(use_header_bar=True, flags=Gtk.DialogFlags.MODAL)
dlg.get_header_bar().set_show_close_button(True)
else:
dlg = Gtk.Dialog(flags=Gtk.DialogFlags.MODAL)
dlg.add_button(Gtk.STOCK_CLOSE, Gtk.ResponseType.CLOSE)
row = self._listbox.get_selected_row()
title = row.plugin.get_name()
version = row.plugin.get_version()
dlg.props.title = _('About this plugin')
area = dlg.get_content_area()
widget = Gtk.Box()
widget.set_orientation(Gtk.Orientation.VERTICAL)
website = row.plugin.get_website()
copyright = row.plugin.get_copyright()
description = row.plugin.get_description()
help = row.plugin.get_help_uri()
pos = 0
def get_label(label):
label = Gtk.Label(label)
label.set_line_wrap(True)
label.set_justify(Gtk.Justification.CENTER)
label.set_max_width_chars(60)
return label
label = Gtk.Label()
escape = GLib.markup_escape_text(title)
label.set_markup((('<b>' + escape) + '</b>'))
label.set_justify(Gtk.Justification.CENTER)
label.props.margin_bottom = 5
widget.pack_start(label, False, False, pos)
pos += 1
if version:
label = get_label((_('Version: ') + version))
label.props.margin_bottom = 5
widget.pack_start(label, False, False, pos)
pos += 1
if description:
label = get_label(description)
label.props.margin_bottom = 5
widget.pack_start(label, False, False, pos)
pos += 1
if copyright:
label = get_label(copyright)
label.props.margin_bottom = 5
widget.pack_start(label, False, False, pos)
pos += 1
if (title == _('Alternative Toolbar')):
grid = Gtk.Grid()
grid.props.halign = Gtk.Align.CENTER
label = Gtk.Label(_('Developer:'))
label.props.halign = Gtk.Align.END
grid.attach(label, 0, 0, 1, 1)
link = Gtk.Label()
link.props.halign = Gtk.Align.START
m = ' <a href=" Mohammed</a>'
link.set_markup(m)
grid.attach(link, 1, 0, 1, 1)
label = Gtk.Label(_('Designer:'))
label.props.halign = Gtk.Align.END
grid.attach(label, 0, 1, 1, 1)
link = Gtk.Label()
link.props.halign = Gtk.Align.START
m = ' <a href=" Karavasilev</a>'
link.set_markup(m)
grid.attach(link, 1, 1, 1, 1)
widget.pack_start(grid, False, False, pos)
grid.props.margin_bottom = 5
pos += 1
box = Gtk.Box()
box.set_homogeneous(True)
def launch_browser(button, uri):
webbrowser.open(uri)
button = Gtk.Button(_('Help'))
if help:
button.connect('clicked', launch_browser, help)
else:
button.set_sensitive(False)
box.pack_start(button, False, True, 0)
button = Gtk.Button(_('Homepage'))
if help:
button.connect('clicked', launch_browser, website)
else:
button.set_sensitive(False)
box.pack_start(Gtk.Label(''), False, True, 1)
box.pack_start(Gtk.Label(''), False, True, 2)
box.pack_start(button, False, True, 3)
widget.pack_start(box, False, True, pos)
widget.show_all()
frame = Gtk.Frame.new('')
frame.props.margin = 8
frame.set_shadow_type(Gtk.ShadowType.NONE)
frame.add(widget)
frame.show_all()
area.add(frame)
dlg.set_resizable(False)
dlg.run()
dlg.destroy()
def _preferences_button_clicked(self, *args):
row = self._listbox.get_selected_row()
widget = self._get_preference_widget(row)
if (not widget):
return
if self._has_headerbar:
dlg = Gtk.Dialog(use_header_bar=True, flags=Gtk.DialogFlags.MODAL)
dlg.get_header_bar().set_show_close_button(True)
else:
dlg = Gtk.Dialog(flags=Gtk.DialogFlags.MODAL)
dlg.add_button(Gtk.STOCK_CLOSE, Gtk.ResponseType.CLOSE)
dlg.props.title = row.plugin.get_name()
area = dlg.get_content_area()
area.add(widget)
dlg.set_resizable(False)
dlg.run()
dlg.destroy() |
def test_data_integrity_test_duplicated_rows_json_render() -> None:
test_dataset = pd.DataFrame({'category_feature': ['1', '1', '1', '1'], 'numerical_feature': ['1', '1', '1', '1'], 'target': ['1', '1', '1', '1']})
suite = TestSuite(tests=[TestNumberOfDuplicatedRows()])
suite.run(current_data=test_dataset, reference_data=test_dataset, column_mapping=ColumnMapping())
assert suite
result_from_json = json.loads(suite.json())
assert (result_from_json['summary']['all_passed'] is True)
test_info = result_from_json['tests'][0]
assert (test_info == {'description': 'The number of duplicate rows is 3. The test threshold is eq=3 0.3.', 'group': 'data_integrity', 'name': 'Number of Duplicate Rows', 'parameters': {'condition': {'eq': {'absolute': 1e-12, 'relative': 0.1, 'value': 3.0}}, 'value': 3}, 'status': 'SUCCESS'}) |
def upgrade():
op.add_column('microlocations', sa.Column('is_chat_enabled', sa.Boolean(), nullable=True))
op.add_column('microlocations', sa.Column('is_global_event_room', sa.Boolean(), nullable=True))
op.add_column('microlocations', sa.Column('chat_room_id', sa.String(), nullable=True)) |
('cuda.concatenate.gen_function')
def gen_function(func_attrs, element_func=None, element_func_def=None):
if _is_valid_fast_cat(func_attrs):
return concatenate_fast.gen_function(func_attrs, concatenate_common.SRC_TEMPLATE, element_func=element_func, element_func_def=element_func_def)
else:
return concatenate_common.gen_function(func_attrs=func_attrs, backend_spec=CUDASpec(), element_func=element_func, element_func_def=element_func_def) |
_matches('((?:\n # Dots, except ellipsis\n {alnum} \\. (?!\\.\\.)\n | # Comma, surrounded by digits (e.g., chemicals) or letters\n {alnum} , (?={alnum})\n | # Colon, surrounded by digits (e.g., time, references)\n {number} : (?={number})\n | # Hyphen, surrounded by digits (e.g., DNA endings: "5\'-ACGT-3\'") or letters\n {alnum} {apo}? {hyphen} (?={alnum}) # incl. optional apostrophe for DNA segments\n | # Apostophes, non-consecutive\n {apo} (?!{apo})\n | # ASCII single quote, surrounded by digits or letters (no dangling allowed)\n {alnum} \' (?={alnum})\n | # ASCII single quote after an s and at the token\'s end\n s \' $\n | # Terminal dimensions (superscript minus, 1, 2, and 3) attached to physical units\n # size-prefix unit-acronym dimension\n \\b [yzafpn\\u00B5mcdhkMGTPEZY]? {letter}{{1,3}} {power} $\n | # Atom counts (subscript numbers) and ionization states (optional superscript\n # 2 or 3 followed by a + or -) are attached to valid fragments of a chemical formula\n \\b (?:[A-Z][a-z]?|[\\)\\]])+ {subdigit}+ (?:[\\u00B2\\u00B3]?[\\u207A\\u207B])?\n | # Any (Unicode) letter, digit, or the underscore\n {alnum}\n )+)'.format(alnum=ALNUM, apo=APOSTROPHE, power=POWER, subdigit=SUBDIGIT, hyphen=HYPHEN, letter=LETTER, number=NUMBER))
def word_tokenizer(sentence):
pruned = HYPHENATED_LINEBREAK.sub('\\1\\2', sentence)
tokens = [token for span in space_tokenizer(pruned) for token in word_tokenizer.split(span) if token]
for (idx, word) in enumerate(reversed(tokens[(- 3):]), 1):
if ((word_tokenizer.match(word) and (not APO_MATCHER.match(word))) or any(((t in word) for t in SENTENCE_TERMINALS))):
last = (len(word) - 1)
if ((0 == last) or (u'...' == word)):
pass
elif any(((word.rfind(t) == last) for t in SENTENCE_TERMINALS)):
tokens[(- idx)] = word[:(- 1)]
tokens.insert(((len(tokens) - idx) + 1), word[(- 1)])
elif any(((word.find(t) == 0) for t in SENTENCE_TERMINALS)):
tokens[(- idx)] = word[0]
tokens.insert(((len(tokens) - idx) + 1), word[1:])
break
dirty = True
while dirty:
dirty = False
for (idx, word) in enumerate(reversed(tokens), 1):
while ((len(word) > 1) and (word[(- 1)] in u',;:')):
char = word[(- 1)]
word = word[:(- 1)]
tokens[(- idx)] = word
tokens.insert(((len(tokens) - idx) + 1), char)
idx += 1
dirty = True
if dirty:
break
return tokens |
def plot_bar_graph(stats, benchmark_stats=None, metrics=default_metrics, extras=None, fname=None):
if (extras is None):
extras = ()
metrics += extras
df = pfstatistics.summary(stats, benchmark_stats, metrics)
fig = plt.figure()
axes = fig.add_subplot(111, ylabel='Trading Metrix')
df.plot(kind='bar', ax=axes, color=['g', 'r'])
axes.set_xticklabels(df.index, rotation=60)
plt.legend(loc='best')
if fname:
plt.savefig(fname, bbox_inches='tight')
return df |
class OptionSeriesPackedbubbleDataAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
('/users/{user_id}', status_code=status.HTTP_204_NO_CONTENT)
def remove(user_id: int, user_service: UserService=Depends(Provide[Container.user_service])):
try:
user_service.delete_user_by_id(user_id)
except NotFoundError:
return Response(status_code=status.HTTP_404_NOT_FOUND)
else:
return Response(status_code=status.HTTP_204_NO_CONTENT) |
class TestACEScct(util.ColorAssertsPyTest):
COLORS = [('red', 'color(--acescct 0.51451 0.33604 0.23515)'), ('orange', 'color(--acescct 0.53009 0.48237 0.32561)'), ('yellow', 'color(--acescct 0.5508 0.55368 0.38691)'), ('green', 'color(--acescct 0.3396 0.42136 0.24647)'), ('blue', 'color(--acescct 0.30368 0.2 0.54331)'), ('indigo', 'color(--acescct 0.31401 0.1566 0.42044)'), ('violet', 'color(--acescct 0.51811 0.44881 0.53494)'), ('white', 'color(--acescct 0.55479 0.55479 0.55479)'), ('gray', 'color(--acescct 0.42855 0.42855 0.42855)'), ('black', 'color(--acescct 0.07291 0.07291 0.07291)'), ('color(--acescct 0 0.50196 0)', 'color(--acescct 0 0.50196 0)'), ('color(--acescct 0 0.50196 0 / 0.5)', 'color(--acescct 0 0.50196 0 / 0.5)'), ('color(--acescct 50% 50% 50% / 50%)', 'color(--acescct 0.77045 0.77045 0.77045 / 0.5)'), ('color(--acescct none none none / none)', 'color(--acescct none none none / none)'), ('color(--acescct 0% 0% 0%)', 'color(--acescct 0.07291 0.07291 0.07291)'), ('color(--acescct 100% 100% 100%)', 'color(--acescct 1.468 1.468 1.468)'), ('color(--acescct -100% -100% -100%)', 'color(--acescct -1.3222 -1.3222 -1.3222)')]
.parametrize('color1,color2', COLORS)
def test_colors(self, color1, color2):
self.assertColorEqual(Color(color1).convert('acescct'), Color(color2)) |
('cuda.gemm_rcr_permute.func_decl')
def gen_function_decl(func_attrs):
func_name = func_attrs['name']
input_ndims = len(func_attrs['input_accessors'][0].original_shapes)
weight_ndims = len(func_attrs['input_accessors'][1].original_shapes)
return common.FUNC_DECL_TEMPLATE.render(func_name=func_name, input_ndims=input_ndims, weight_ndims=weight_ndims, support_split_k=True) |
def extractRadishthoughtstranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestPidutil(testslide.TestCase):
def test_procs_holding_file_no_lsof(self):
self.mock_callable(pidutil, 'which').to_return_value(None).and_assert_called_once()
with self.assertRaises(DcRPMException):
pidutil.procs_holding_file('/tmp/foo')
def test__pids_holding_file_timeout(self):
self.mock_callable(pidutil, 'run_with_timeout').to_raise(DcRPMException()).and_assert_called_once
self.assertFalse(pidutil._pids_holding_file('/path/to/lsof', '/tmp/foo'))
def test__pids_holding_file_failed(self):
self.mock_callable(pidutil, 'run_with_timeout').to_return_value(CompletedProcess(returncode=1, stderr='oh no')).and_assert_called_once
self.assertFalse(pidutil._pids_holding_file('/path/to/lsof', '/tmp/foo'))
def test__pids_holding_file_success(self):
self.mock_callable(pidutil, 'run_with_timeout').to_return_value(CompletedProcess(stdout='\n'.join(['p12345', 'f1', 'p123456', 'f1']))).and_assert_called_once
self.assertEqual(set([12345, 123456]), pidutil._pids_holding_file('/path/to/lsof', '/tmp/a'))
def test_send_signal_success(self):
proc = make_mock_process(12345, [])
self.assertTrue(pidutil.send_signal(proc, signal.SIGKILL))
def test_send_signal_no_such_process(self):
proc = make_mock_process(12345, [], signal_throw=True)
self.assertFalse(pidutil.send_signal(proc, signal.SIGKILL))
def test_send_signal_timeout(self):
proc = make_mock_process(12345, [], wait_throw=True)
self.assertFalse(pidutil.send_signal(proc, signal.SIGKILL))
def test_send_signals_no_processes(self):
procs = []
self.assertFalse(pidutil.send_signals(procs, signal.SIGKILL))
def test_send_signals_success(self):
procs = [make_mock_process(12345, ['/tmp/a', '/tmp/2']), make_mock_process(54321, ['/tmp/1', '/tmp/3'])]
self.assertTrue(pidutil.send_signals(procs, signal.SIGKILL))
self.assertEqual(sum((p.send_signal.call_count for p in procs)), len(procs))
def test_send_signals_signal_throws(self):
procs = [make_mock_process(12345, ['/tmp/a', '/tmp/2'], signal_throw=True), make_mock_process(54321, ['/tmp/1', '/tmp/3'])]
self.assertTrue(pidutil.send_signals(procs, signal.SIGKILL))
self.assertEqual(sum((p.wait.call_count for p in procs)), 1)
def test_send_signals_wait_throws(self):
procs = [make_mock_process(12345, ['/tmp/a', '/tmp/2'], wait_throw=True), make_mock_process(54321, ['/tmp/1', '/tmp/3'])]
self.assertTrue(pidutil.send_signals(procs, signal.SIGKILL))
def test_send_signals_all_throw(self):
procs = [make_mock_process(12345, ['/tmp/a', '/tmp/2'], signal_throw=True), make_mock_process(54321, ['/tmp/1', '/tmp/3'], wait_throw=True)]
self.assertFalse(pidutil.send_signals(procs, signal.SIGKILL)) |
class TwoLevelPC(PCBase):
needs_python_pmat = False
def coarsen(self, pc):
raise NotImplementedError
def initialize(self, pc):
from firedrake.assemble import allocate_matrix, TwoFormAssembler
(A, P) = pc.getOperators()
appctx = self.get_appctx(pc)
fcp = appctx.get('form_compiler_parameters')
prefix = pc.getOptionsPrefix()
options_prefix = (prefix + self._prefix)
opts = PETSc.Options()
(coarse_operator, coarse_space_bcs, interp_petscmat) = self.coarsen(pc)
coarse_options_prefix = (options_prefix + 'mg_coarse_')
coarse_mat_type = opts.getString((coarse_options_prefix + 'mat_type'), parameters['default_matrix_type'])
self.coarse_op = allocate_matrix(coarse_operator, bcs=coarse_space_bcs, form_compiler_parameters=fcp, mat_type=coarse_mat_type, options_prefix=coarse_options_prefix)
self._assemble_coarse_op = TwoFormAssembler(coarse_operator, tensor=self.coarse_op, form_compiler_parameters=fcp, bcs=coarse_space_bcs).assemble
self._assemble_coarse_op()
coarse_opmat = self.coarse_op.petscmat
pcmg = PETSc.PC().create(comm=pc.comm)
pcmg.incrementTabLevel(1, parent=pc)
pcmg.setType(pc.Type.MG)
pcmg.setOptionsPrefix(options_prefix)
pcmg.setMGLevels(2)
pcmg.setMGType(pc.MGType.ADDITIVE)
pcmg.setMGCycleType(pc.MGCycleType.V)
pcmg.setMGInterpolation(1, interp_petscmat)
pcmg.setMGRScale(1, interp_petscmat.createVecRight())
pcmg.setOperators(A=A, P=P)
coarse_solver = pcmg.getMGCoarseSolve()
coarse_solver.setOperators(A=coarse_opmat, P=coarse_opmat)
coarse_space = coarse_operator.arguments()[(- 1)].function_space()
coarse_dm = coarse_space.dm
coarse_solver.setDM(coarse_dm)
coarse_solver.setDMActive(False)
pcmg.setDM(pc.getDM())
pcmg.setFromOptions()
self.pc = pcmg
self._dm = coarse_dm
prefix = coarse_solver.getOptionsPrefix()
self._ctx_ref = self.new_snes_ctx(pc, coarse_operator, coarse_space_bcs, coarse_mat_type, fcp, options_prefix=prefix)
with dmhooks.add_hooks(coarse_dm, self, appctx=self._ctx_ref, save=False):
coarse_solver.setFromOptions()
def update(self, pc):
self._assemble_coarse_op()
self.pc.setUp()
def apply(self, pc, X, Y):
dm = self._dm
with dmhooks.add_hooks(dm, self, appctx=self._ctx_ref):
self.pc.apply(X, Y)
def applyTranspose(self, pc, X, Y):
dm = self._dm
with dmhooks.add_hooks(dm, self, appctx=self._ctx_ref):
self.pc.applyTranspose(X, Y)
def view(self, pc, viewer=None):
super(TwoLevelPC, self).view(pc, viewer)
if hasattr(self, 'pc'):
viewer.printfASCII('Two level PC\n')
self.pc.view(viewer) |
class Bmm_problem_info():
alpha_value: float = 1
beta_value: float = 0
problem_dim_0: str = 'M'
problem_dim_1: str = 'N'
problem_dim_2: str = 'K'
batch_size: str = 'B'
a_ptr: str = 'a_ptr'
b_ptr: str = 'b_ptr'
bias_ptr: str = 'd_ptr'
c_ptr: str = 'c_ptr'
a_batch_stride: str = '0'
b_batch_stride: str = '0'
bias_batch_stride: str = '0'
c_batch_stride: str = '0'
lda: str = '0'
ldb: str = '0'
ldbias: str = '0'
ldc: str = '0'
a_row_major: bool = True
b_row_major: bool = False
c_row_major: bool = True |
class port_mod_failed_error_msg(error_msg):
version = 5
type = 1
err_type = 7
def __init__(self, xid=None, code=None, data=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (code != None):
self.code = code
else:
self.code = 0
if (data != None):
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.err_type))
packed.append(struct.pack('!H', self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = port_mod_failed_error_msg()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 1)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_err_type = reader.read('!H')[0]
assert (_err_type == 7)
obj.code = reader.read('!H')[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.code != other.code):
return False
if (self.data != other.data):
return False
return True
def pretty_print(self, q):
q.text('port_mod_failed_error_msg {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('code = ')
value_name_map = {0: 'OFPPMFC_BAD_PORT', 1: 'OFPPMFC_BAD_HW_ADDR', 2: 'OFPPMFC_BAD_CONFIG', 3: 'OFPPMFC_BAD_ADVERTISE', 4: 'OFPPMFC_EPERM'}
if (self.code in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.code], self.code)))
else:
q.text(('%#x' % self.code))
q.text(',')
q.breakable()
q.text('data = ')
q.pp(self.data)
q.breakable()
q.text('}') |
def test_trivial_1():
cfg = ControlFlowGraph()
cfg.add_node((node := BasicBlock(0, instructions=[Assignment(Variable('a'), BinaryOperation(OperationType.plus, [expr1.copy(), Constant(1)])), Assignment(Variable('b'), BinaryOperation(OperationType.plus, [expr1.copy(), Constant(2)])), Assignment(Variable('c'), BinaryOperation(OperationType.plus, [expr1.copy(), Constant(3)])), Assignment(Variable('d'), BinaryOperation(OperationType.plus, [expr1.copy(), Constant(4)]))])))
_run_cse(cfg)
replacement = Variable('c0', ssa_label=0)
assert (node.instructions == [Assignment(replacement.copy(), expr1), Assignment(Variable('a'), BinaryOperation(OperationType.plus, [replacement.copy(), Constant(1)])), Assignment(Variable('b'), BinaryOperation(OperationType.plus, [replacement.copy(), Constant(2)])), Assignment(Variable('c'), BinaryOperation(OperationType.plus, [replacement.copy(), Constant(3)])), Assignment(Variable('d'), BinaryOperation(OperationType.plus, [replacement.copy(), Constant(4)]))]) |
def test_slurm_weird_dir(weird_tmp_path: Path) -> None:
if ('\n' in weird_tmp_path.name):
pytest.skip("test doesn't support newline in 'weird_tmp_path'")
with mocked_slurm():
executor = slurm.SlurmExecutor(folder=weird_tmp_path)
job = executor.submit(test_core.do_nothing, 1, 2, blublu=3)
job.paths.stdout.write_text('')
job.paths.stderr.write_text('')
sbatch_args = {}
for l in job.paths.submission_file.read_text().splitlines():
if (not l.startswith('#SBATCH')):
continue
if ('=' not in l):
continue
(key, val) = l[len('#SBATCH'):].strip().split('=', 1)
sbatch_args[key] = val.replace('%j', job.job_id).replace('%t', '0')
subprocess.check_call(('ls ' + sbatch_args['--output']), shell=True)
subprocess.check_call(('ls ' + sbatch_args['--error']), shell=True) |
class OptionSeriesLollipopSonificationDefaultspeechoptionsMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_serializer(compat=True)
class InferenceTask(Generic[Input]):
version: int = 0
data: Input = None
error: Optional[InferenceResult] = None
task_id: str = field(default_factory=(lambda : str(uuid.uuid4())))
is_discarded: bool = False
batch: Optional[int] = None
Optional[str] = None
HTTPHeaders = HTTPHeaders()
aws_lambda_event: Optional[dict] = None
cli_args: Optional[Sequence[str]] = None
inference_job_args: Optional[Mapping[(str, Any)]] = None
def discard(self, err_msg='', **context):
self.is_discarded = True
self.error = InferenceError(err_msg=err_msg, **context)
return self |
.usefixtures('use_tmpdir')
def test_run_with_defined_executable_but_missing():
executable = os.path.join(os.getcwd(), 'this/is/not/a/file')
job = Job({'name': 'TEST_EXECUTABLE_NOT_FOUND', 'executable': executable, 'stdout': 'mkdir_out', 'stderr': 'mkdir_err'}, 0)
with pytest.raises(IOError):
for _ in job.run():
pass |
class _DefaultLifespan():
def __init__(self, router: 'Router'):
self._router = router
async def __aenter__(self) -> None:
(await self._router.startup())
async def __aexit__(self, *exc_info: object) -> None:
(await self._router.shutdown())
def __call__(self: _T, app: object) -> _T:
return self |
class PaymentMethod(QuickbooksManagedObject, QuickbooksTransactionEntity):
class_dict = {}
qbo_object_name = 'PaymentMethod'
def __init__(self):
super(PaymentMethod, self).__init__()
self.Name = ''
self.Type = ''
self.Active = True
def __str__(self):
return self.Name
def to_ref(self):
ref = Ref()
ref.name = self.Name
ref.type = self.qbo_object_name
ref.value = self.Id
return ref |
def test():
assert (len(pattern) == 4), 'A expressao deve descrever quatro tokens (quatro dicionarios).'
assert (isinstance(pattern[0], dict) and isinstance(pattern[1], dict) and isinstance(pattern[2], dict) and isinstance(pattern[3], dict)), 'Cada item da expressao deve ser um dicionario.'
assert ((len(pattern[0]) == 1) and (len(pattern[3]) == 1)), 'Cada item da primeira e da ultima expressao deve ter apenas uma chave'
assert ((len(pattern[1]) == 2) and (len(pattern[2]) == 2)), 'A segunda e a terceira expressao devem ter duas chaves.'
assert any(((pattern[0].get(key) == 'NOUN') for key in ['pos', 'POS'])), 'Voce esta fazendo a correspondencia da classe gramatical do primeiro token com o rotulo correto?'
assert any(((pattern[1].get(key) == 'ADJ') for key in ['pos', 'POS'])), 'Voce esta fazendo a correspondencia da classe gramatical do segundo token com o rotulo correto?'
assert any(((pattern[3].get(key) == 'ADJ') for key in ['pos', 'POS'])), 'Voce esta fazendo a correspondencia da classe gramatical do terceiro token com o rotulo correto?'
assert (pattern[2].get('OP') == '?'), 'Voce esta usando o operador correto para o segundo token?'
__msg__.good('Bom trabalho essas foram expressoes complexas! Vamos continuar e no proximo capitulo vamos dar uma olhada em como usar a spaCy para analises mais avancadas de textos.') |
class EpisodeBatcher(MultiThreadEpisodeBatcher):
def __init__(self, n_timesteps, n_slots, create_agent, agent_args, create_env, env_args, n_threads, seeds=None):
if (rlstructures.__deprecated_message__ == False):
print('[DEPRECATED]: The current version of rlstructures is based on rlstructures.rl_batchers implementation. We advise you to switch your codebase for using this new batcher, and the corresponding needed adaptations')
rlstructures.__deprecated_message__ = True
agent = create_agent(**agent_args)
env = create_env(**{**env_args, 'seed': 0})
(obs, who) = env.reset()
(a, b, c) = agent(None, obs)
self.n_envs = env.n_envs()
specs_agent_state = a.specs()
specs_agent_output = b.specs()
specs_environment = obs.specs()
del env
del agent
self.buffer = LocalBuffer(n_slots=n_slots, s_slots=n_timesteps, specs_agent_state=specs_agent_state, specs_agent_output=specs_agent_output, specs_environment=specs_environment)
self.workers = []
self.n_per_worker = []
self.warning = False
if (seeds is None):
print(('Seeds for batcher environments has not been chosen. Default' + ' is None'))
seeds = [None for k in range(n_threads)]
if isinstance(seeds, int):
s = seeds
seeds = [(s + (k * 64)) for k in range(n_threads)]
assert (len(seeds) == n_threads), 'You have to choose one seed per thread'
print(('[EpisodeBatcher] Creating %d threads' % n_threads))
for k in range(n_threads):
e_args = {**env_args, 'seed': seeds[k]}
worker = ThreadWorker(len(self.workers), create_agent, agent_args, create_env, e_args, self.buffer)
self.workers.append(worker)
def close(self):
super().close()
self.buffer.close() |
def devices():
p = SdkToolsPopen(['adb', 'devices'])
if (p.returncode != 0):
raise FDroidException(('An error occured when finding devices: %s' % p.output))
lines = [line for line in p.output.splitlines() if (not line.startswith('* '))]
if (len(lines) < 3):
return []
lines = lines[1:(- 1)]
return [line.split()[0] for line in lines] |
class T():
Num = LoopIR.Num
F16 = LoopIR.F16
F32 = LoopIR.F32
F64 = LoopIR.F64
INT8 = LoopIR.INT8
UINT8 = LoopIR.UINT8
UINT16 = LoopIR.UINT16
INT32 = LoopIR.INT32
Bool = LoopIR.Bool
Int = LoopIR.Int
Index = LoopIR.Index
Size = LoopIR.Size
Stride = LoopIR.Stride
Error = LoopIR.Error
Tensor = LoopIR.Tensor
Window = LoopIR.WindowType
type = LoopIR.type
R = Num()
f16 = F16()
f32 = F32()
int8 = INT8()
uint8 = UINT8()
uint16 = UINT16()
i8 = INT8()
ui8 = UINT8()
ui16 = UINT16()
int32 = INT32()
i32 = INT32()
f64 = F64()
bool = Bool()
int = Int()
index = Index()
size = Size()
stride = Stride()
err = Error() |
_custom_acc_mapper_fn(op_and_target=('call_method', 'repeat'), arg_replacement_tuples=[('input', 'input'), ('*', 'sizes')], skip_normalization_if_none=True)
def repeat_mapper(node: torch.fx.Node, _: nn.Module) -> Optional[torch.fx.Node]:
with node.graph.inserting_before(node):
inputs = node.kwargs['input']
dims = node.kwargs['sizes']
if (isinstance(dims, (list, tuple)) and (len(dims) > 0) and (not all((isinstance(x, int) for x in dims)))):
logger.info("Not mapping repeat to an acc op. We can't handle variable dims.")
return
new_node = node.graph.create_node('call_function', tile, kwargs={'input': inputs, 'dims': dims}, name=f'{node.name}_repeat_map')
new_node.meta = node.meta.copy()
return new_node |
.parametrize('primitive, digest', [(b'cowm\xc3\xb6', HexBytes('0x0f355f04c0a06eebac1d219b34c598f85a1169badee164be8afe8')), (b'', HexBytes('0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470'))])
def test_keccak_primitive(primitive, digest):
assert (Web3.keccak(primitive) == digest) |
def process_new_build_url(copr, add_view, url_on_success):
def factory(**build_options):
pkgs = form.pkgs.data.split('\n')
for pkg in pkgs:
BuildsLogic.create_new_from_url(flask.g.user, copr, pkg, chroot_names=form.selected_chroots, **build_options)
for pkg in pkgs:
flask.flash('New build has been created: {}'.format(pkg), 'success')
form = forms.BuildFormUrlFactory(copr.active_chroots)()
return process_new_build(copr, form, factory, render_add_build, add_view, url_on_success, msg_on_success=False) |
def union_manifests(manifests: List[Dict]) -> Dict[(str, List[Dict])]:
key_lists: List[List[str]] = [list(manifest.keys()) for manifest in manifests]
key_set: Set[str] = set(reduce((lambda x, y: [*x, *y]), key_lists))
unioned_dict: Dict[(str, List)] = {}
for manifest in manifests:
for key in key_set:
if ((key in manifest.keys()) and (key in unioned_dict.keys())):
unioned_dict[key] += manifest[key]
elif (key in manifest.keys()):
unioned_dict[key] = manifest[key]
return unioned_dict |
.parametrize('prediction_column, current_data, reference_data, threshold, expected_prediction_column', ((None, pd.DataFrame({}), pd.DataFrame({}), 0.0, None), ('preds', pd.DataFrame({'preds': [1, 2, 3]}), pd.DataFrame({'preds': [1, 2, 3]}), 0.0, 'preds'), (['pred_a', 'pred_b'], pd.DataFrame({'pred_a': [1, 0, 1], 'pred_b': [1, 0, 1]}), pd.DataFrame({'pred_a': [1, 0, 1], 'pred_b': [1, 0, 1]}), 0.0, 'predicted_labels'), (['pred_a', 'pred_b', 'pred_c', 'pred_d'], pd.DataFrame({'pred_a': [0.5, 0, 0.8], 'pred_b': [0, 0.2, 0.5], 'pred_c': [0.3, 0.2, 0.5], 'pred_d': [0.1, 0.1, 0.9]}), pd.DataFrame({'pred_a': [1, 0, 0, 0], 'pred_b': [0, 1, 0, 0], 'pred_c': [0, 0, 1, 0], 'pred_d': [0, 0, 0, 1]}), 0.3, 'predicted_labels')))
def test_ensure_prediction_column_is_string(prediction_column: Optional[Union[(str, List)]], current_data: pd.DataFrame, reference_data: pd.DataFrame, threshold: float, expected_prediction_column: Optional[str]):
result = ensure_prediction_column_is_string(prediction_column=prediction_column, current_data=current_data, reference_data=reference_data, threshold=threshold)
assert (result == expected_prediction_column)
if (prediction_column is not None):
assert (result in current_data)
assert (result in reference_data) |
class TestTargetMetricTest():
def test_target_metric_optimize_for_max(self) -> None:
metrics = [80, 81, 89, 90, 95]
target_value = 90
for average_type in [AverageType.SMA, AverageType.EMA]:
target_tracker = TargetMetricTracker(target_value=target_value, window_size=3, average_type=average_type, direction=TargetMetricDirection.MAX)
for metric in metrics[:(- 1)]:
assertFalse(target_tracker.update_and_check_target(metric))
assertLess(target_tracker.mean, target_value)
assertTrue(target_tracker.update_and_check_target(metrics[(- 1)]))
assertGreater(target_tracker.mean, target_value)
def test_target_metric_optimize_for_min(self) -> None:
metrics = [0.5, 0.4, 0.15, 0.04, 0.1]
target_value = 0.1
for average_type in [AverageType.SMA, AverageType.EMA]:
target_tracker = TargetMetricTracker(target_value=target_value, window_size=3, average_type=average_type, direction=TargetMetricDirection.MIN)
for metric in metrics[:(- 1)]:
assertFalse(target_tracker.update_and_check_target(metric))
assertGreater(target_tracker.mean, target_value)
assertTrue(target_tracker.update_and_check_target(metrics[(- 1)]))
assertLess(target_tracker.mean, target_value) |
class TestIndexDataAccessor(unittest.TestCase, DataAccessorMixin):
def create_accessor(self):
return IndexDataAccessor(index=1, value_type=TextValue())
def test_defaults(self):
accessor = IndexDataAccessor()
self.assertEqual(accessor.index, 0)
self.assertIsNone(accessor.value_type)
self.assertIsInstance(accessor.title_type, TextValue)
self.assertEqual(accessor.title, '0')
def test_typical_defaults(self):
accessor = self.create_accessor()
self.assertIsInstance(accessor.title_type, TextValue)
self.assertEqual(accessor.title, '1')
def test_get_value(self):
accessor = self.create_accessor()
obj = ['zero', 'one', 'two', 'three']
value = accessor.get_value(obj)
self.assertEqual(value, 'one')
def test_get_value_out_of_bounds(self):
accessor = self.create_accessor()
accessor.index = 10
obj = ['zero', 'one', 'two', 'three']
with self.assertRaises(IndexError):
accessor.get_value(obj)
def test_can_set_value(self):
accessor = self.create_accessor()
obj = ['zero', 'one', 'two', 'three']
can_set = accessor.can_set_value(obj)
self.assertTrue(can_set)
def test_can_set_value_false(self):
accessor = self.create_accessor()
obj = ['zero']
can_set = accessor.can_set_value(obj)
self.assertFalse(can_set)
def test_can_set_value_immuatble(self):
accessor = self.create_accessor()
obj = ('zero', 'one', 'two', 'three')
can_set = accessor.can_set_value(obj)
self.assertFalse(can_set)
def test_set_value(self):
accessor = self.create_accessor()
obj = ['zero', 'one', 'two', 'three']
accessor.set_value(obj, 'new_value')
self.assertEqual(obj[1], 'new_value')
def test_set_value_error(self):
accessor = self.create_accessor()
obj = ['zero']
with self.assertRaises(DataViewSetError):
accessor.set_value(obj, 'new_value')
def test_index_updated(self):
accessor = self.create_accessor()
accessor.observe(self.accessor_observer, 'updated')
with self.assertTraitChanges(accessor, 'updated', count=1):
accessor.index = 2
self.assertEqual(self.accessor_event.new, (accessor, 'value')) |
(scope='function')
def succeeded_privacy_request(cache, db: Session, policy: Policy) -> PrivacyRequest:
pr = PrivacyRequest.create(db=db, data={'external_id': f'ext-{str(uuid4())}', 'started_processing_at': datetime(2021, 10, 1), 'finished_processing_at': datetime(2021, 10, 3), 'requested_at': datetime(2021, 10, 1), 'status': PrivacyRequestStatus.complete, 'origin': f' 'policy_id': policy.id, 'client_id': policy.client_id})
identity_kwargs = {'email': ''}
pr.cache_identity(identity_kwargs)
pr.persist_identity(db=db, identity=Identity(**identity_kwargs))
(yield pr)
pr.delete(db) |
class OptionSeriesWindbarbDataDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('rgba(0, 0, 0, 0.6)')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(901)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class ModelBase(ErsiliaBase):
_ersilia_exception
def __init__(self, model_id_or_slug=None, repo_path=None, config_json=None):
ErsiliaBase.__init__(self, config_json=config_json, credentials_json=None)
if ((model_id_or_slug is None) and (repo_path is None)):
raise Exception
if ((model_id_or_slug is not None) and (repo_path is not None)):
raise Exception
if (model_id_or_slug is not None):
self.text = model_id_or_slug
slugger = Slug()
if slugger.is_slug(model_id_or_slug):
self.slug = model_id_or_slug
self.model_id = slugger.encode(self.slug)
else:
self.model_id = model_id_or_slug
self.slug = slugger.decode(self.model_id)
if (not self.is_valid()):
raise InvalidModelIdentifierError(model=self.text)
if (repo_path is not None):
self.logger.debug('Repo path specified: {0}'.format(repo_path))
self.logger.debug('Absolute path: {0}'.format(os.path.abspath(repo_path)))
self.text = self._get_model_id_from_path(repo_path)
self.model_id = self.text
slug = self._get_slug_if_available(repo_path)
if (slug is None):
self.slug = 'my-model'
else:
self.slug = slug
def _get_model_id_from_path(self, repo_path):
return os.path.basename(os.path.abspath(repo_path)).rstrip('/')
def _get_slug_if_available(self, repo_path):
metadata_json = os.path.join(repo_path, 'metadata.json')
if os.path.exists(metadata_json):
with open(metadata_json, 'r') as f:
data = json.load(f)
slug = data['Slug']
if (slug == ''):
return None
else:
return slug
else:
return None
def is_valid(self):
if ((self.model_id is None) or (self.slug is None)):
return False
else:
return True
def _is_available_locally_from_status(self):
fetch_status_file = os.path.join(self._dest_dir, self.model_id, STATUS_FILE)
if (not os.path.exists(fetch_status_file)):
self.logger.debug('No status file exists')
is_fetched = False
else:
with open(fetch_status_file, 'r') as f:
status = json.load(f)
is_fetched = status[DONE_TAG]
self.logger.debug('Is fetched: {0}'.format(is_fetched))
return is_fetched
def _is_available_locally_from_dockerhub(self):
from_dockerhub_file = os.path.join(self._dest_dir, self.model_id, IS_FETCHED_FROM_DOCKERHUB_FILE)
if (not os.path.exists(from_dockerhub_file)):
return False
else:
return True
def is_available_locally(self):
bs = self._is_available_locally_from_status()
bd = self._is_available_locally_from_dockerhub()
if (bs or bd):
return True
else:
return False
def was_fetched_from_dockerhub(self):
from_dockerhub_file = os.path.join(self._dest_dir, self.model_id, IS_FETCHED_FROM_DOCKERHUB_FILE)
if (not os.path.exists(from_dockerhub_file)):
return False
with open(from_dockerhub_file, 'r') as f:
data = json.load(f)
return data['docker_hub'] |
def apply_compositing(color1: Color, color2: Color, blender: (blend_modes.Blend | None), operator: (str | bool)) -> Color:
csa = color1.alpha(nans=False)
cba = color2.alpha(nans=False)
coords1 = color1.coords(nans=False)
coords2 = color2.coords(nans=False)
compositor = None
cra = csa
if isinstance(operator, str):
compositor = porter_duff.compositor(operator)(cba, csa)
cra = alg.clamp(compositor.ao(), 0, 1)
elif (operator is True):
compositor = porter_duff.compositor('source-over')(cba, csa)
cra = alg.clamp(compositor.ao(), 0, 1)
channels = color1._space.CHANNELS
i = 0
for (cb, cr) in zip(coords2, (blender.blend(coords2, coords1) if blender else coords1)):
cr = clip_channel(cr, channels[i])
if compositor:
color1[i] = compositor.co(cb, cr)
if (cra not in (0, 1)):
color1[i] /= cra
else:
color1[i] = cr
i += 1
color1[(- 1)] = cra
return color1 |
class TestFunctions(PreqlTests):
def test_fmt(self):
p = self.Preql()
p('\n a = "hello"\n b = "world"\n\n f1 = fmt("")\n f2 = fmt("a")\n f3 = fmt("a b c $a")\n f4 = fmt("a b c $a $b!")\n f5 = fmt("$a my $b!")\n ')
assert (p.f1 == '')
assert (p.f2 == 'a')
assert (p.f3 == 'a b c hello')
assert (p.f4 == 'a b c hello world!')
assert (p.f5 == 'hello my world!') |
class TestCosmTradeHandler(BaseSkillTestCase):
path_to_skill = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'tac_negotiation')
def setup(cls):
super().setup()
cls.cosm_trade_handler = cast(CosmTradeHandler, cls._skill.skill_context.handlers.cosm_trade)
cls.strategy = cast(Strategy, cls._skill.skill_context.strategy)
cls.logger = cls._skill.skill_context.logger
cls.fipa_dialogues = cast(FipaDialogues, cls._skill.skill_context.fipa_dialogues)
cls.cosm_trade_dialogues = cast(CosmTradeDialogues, cls._skill.skill_context.cosm_trade_dialogues)
cls.signing_dialogues = cast(SigningDialogues, cls._skill.skill_context.signing_dialogues)
cls.dialogue_stats = cls.cosm_trade_dialogues.dialogue_stats
cls.ledger_id = 'some_ledger_id'
cls.counterprty_address = COUNTERPARTY_AGENT_ADDRESS
cls.amount_by_currency_id = {'1': 50}
cls.quantities_by_good_id = {'2': (- 10)}
cls.nonce = '234543'
cls.body = {'some_key': 'some_value'}
cls.fipa_dialogue_id = ('1', '1')
cls.terms = Terms(cls.ledger_id, cls._skill.skill_context.agent_address, cls.counterprty_address, cls.amount_by_currency_id, cls.quantities_by_good_id, cls.nonce)
cls.raw_message = RawMessage(ledger_id=cls.ledger_id, body=cls.terms.sender_hash.encode('utf-8'))
cls.signed_tx = SignedTransaction(cls.ledger_id, cls.body)
cls.cfp_query = Query([Constraint('some_attribute', ConstraintType('==', 'some_service'))], DataModel(SUPPLY_DATAMODEL_NAME, [Attribute('some_attribute', str, False, 'Some attribute descriptions.')]))
cls.proposal = Description({'ledger_id': cls.ledger_id, 'price': 100, 'currency_id': '1', 'fee': 1, 'nonce': cls.nonce})
cls.list_of_fipa_messages = (DialogueMessage(FipaMessage.Performative.CFP, {'query': cls.cfp_query}, True), DialogueMessage(FipaMessage.Performative.PROPOSE, {'proposal': cls.proposal}), DialogueMessage(FipaMessage.Performative.ACCEPT), DialogueMessage(FipaMessage.Performative.MATCH_ACCEPT_W_INFORM, {'info': {'address': 'some_term_sender_address'}}), DialogueMessage(FipaMessage.Performative.INFORM, {'info': {'transaction_digest': 'some_transaction_digest_body'}}))
cls.list_of_cosm_trade_messages = (DialogueMessage(CosmTradeMessage.Performative.INFORM_SIGNED_TRANSACTION, {'signed_transaction': cls.signed_tx, 'fipa_dialogue_id': ('1', '')}),)
def _assert_stat_state(dialogue_stats: DialogueStats, changed_agent: Optional[str]=None, changed_end_state: Optional[CosmTradeDialogue.EndState]=None) -> None:
if ((changed_agent is None) and (changed_end_state is None)):
unchanged_dict_1 = dialogue_stats.self_initiated
unchanged_dict_2 = dialogue_stats.other_initiated
for end_state_numbers in unchanged_dict_1.values():
assert (end_state_numbers == 0)
for end_state_numbers in unchanged_dict_2.values():
assert (end_state_numbers == 0)
elif ((changed_agent is not None) and (changed_end_state is not None)):
if (changed_agent == 'self'):
changed_dict = dialogue_stats.self_initiated
unchanged_dict = dialogue_stats.other_initiated
elif (changed_agent == 'other'):
changed_dict = dialogue_stats.other_initiated
unchanged_dict = dialogue_stats.self_initiated
else:
raise SyntaxError(f"changed_agent can only be 'self' or 'other'. Found {changed_agent}.")
for end_state_numbers in unchanged_dict.values():
assert (end_state_numbers == 0)
for (end_state, end_state_numbers) in changed_dict.items():
if (end_state == changed_end_state):
assert (end_state_numbers == 1)
else:
assert (end_state_numbers == 0)
else:
raise SyntaxError('changed_agent and changed_end_state should either both be None, or neither.')
def test_setup(self):
assert (self.cosm_trade_handler.setup() is None)
self.assert_quantity_in_outbox(0)
def test_handle_unidentified_dialogue(self):
self.strategy._is_contract_tx = True
incorrect_dialogue_reference = ('', '')
incoming_message = self.build_incoming_message(message_type=CosmTradeMessage, dialogue_reference=incorrect_dialogue_reference, performative=CosmTradeMessage.Performative.INFORM_PUBLIC_KEY, public_key='some_public_key')
with patch.object(self.logger, 'log') as mock_logger:
self.cosm_trade_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received invalid cosm_trade message={incoming_message}, unidentified dialogue.')
self.assert_quantity_in_outbox(1)
(has_attributes, error_str) = self.message_has_attributes(actual_message=self.get_message_from_outbox(), message_type=DefaultMessage, performative=DefaultMessage.Performative.ERROR, to=incoming_message.sender, sender=self.skill.skill_context.agent_address, error_code=DefaultMessage.ErrorCode.INVALID_DIALOGUE, error_msg='Invalid dialogue.', error_data={'cosm_trade_message': incoming_message.encode()})
assert has_attributes, error_str
def test_handle_signed_tx_i(self):
self.strategy._is_contract_tx = True
self.strategy._ledger_id = FetchAIApi.identifier
fipa_dialogue = cast(FipaDialogue, self.prepare_skill_dialogue(dialogues=self.fipa_dialogues, messages=self.list_of_fipa_messages[:4], is_agent_to_agent_messages=True))
fipa_dialogue._terms = self.terms
incoming_message = self.build_incoming_message(message_type=CosmTradeMessage, performative=CosmTradeMessage.Performative.INFORM_SIGNED_TRANSACTION, signed_transaction=self.signed_tx, fipa_dialogue_id=fipa_dialogue.dialogue_label.dialogue_reference)
raw_tx = RawTransaction(ledger_id=self.signed_tx.ledger_id, body=self.signed_tx.body)
with patch.object(self.logger, 'log') as mock_logger:
self.cosm_trade_handler.handle(incoming_message)
self.assert_quantity_in_decision_making_queue(1)
mock_logger.assert_any_call(logging.INFO, f'received inform_signed_tx with signed_tx={self.signed_tx}')
message = self.get_message_from_decision_maker_inbox()
(has_attributes, error_str) = self.message_has_attributes(actual_message=message, message_type=SigningMessage, performative=SigningMessage.Performative.SIGN_TRANSACTION, to=self.skill.skill_context.decision_maker_address, sender=str(self.skill.skill_context.skill_id), terms=self.terms, raw_transaction=raw_tx)
assert has_attributes, error_str
assert (cast(SigningDialogue, self.signing_dialogues.get_dialogue(message)).associated_fipa_dialogue == fipa_dialogue)
assert (cast(SigningDialogue, self.signing_dialogues.get_dialogue(message)).associated_cosm_trade_dialogue == self.cosm_trade_dialogues.get_dialogue(incoming_message))
mock_logger.assert_any_call(logging.INFO, 'proposing the transaction to the decision maker. Waiting for confirmation ...')
def test_handle_signed_tx_ii(self):
self.strategy._is_contract_tx = True
self.strategy._ledger_id = FetchAIApi.identifier
fipa_dialogue = cast(FipaDialogue, self.prepare_skill_dialogue(dialogues=self.fipa_dialogues, messages=self.list_of_fipa_messages[:4], is_agent_to_agent_messages=True))
fipa_dialogue._terms = self.terms
incoming_message = self.build_incoming_message(message_type=CosmTradeMessage, performative=CosmTradeMessage.Performative.INFORM_SIGNED_TRANSACTION, signed_transaction=self.signed_tx, fipa_dialogue_id=None)
with patch.object(self.logger, 'log') as mock_logger:
self.cosm_trade_handler.handle(incoming_message)
self.assert_quantity_in_decision_making_queue(0)
mock_logger.assert_any_call(logging.INFO, f'received inform_signed_tx with signed_tx={self.signed_tx}')
mock_logger.assert_any_call(logging.INFO, 'inform_signed_tx must contain fipa dialogue reference.')
def test_handle_error(self):
self.strategy._is_contract_tx = True
self.strategy._ledger_id = FetchAIApi.identifier
cosm_trade_dialogue = self.prepare_skill_dialogue(dialogues=self.cosm_trade_dialogues, messages=self.list_of_cosm_trade_messages[:1])
incoming_message = self.build_incoming_message_for_skill_dialogue(dialogue=cosm_trade_dialogue, performative=CosmTradeMessage.Performative.ERROR, code=1)
self._assert_stat_state(self.dialogue_stats)
with patch.object(self.logger, 'log') as mock_logger:
self.cosm_trade_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received cosm_trade_api error message={incoming_message} in dialogue={cosm_trade_dialogue}.')
self._assert_stat_state(self.dialogue_stats, 'self', CosmTradeDialogue.EndState.FAILED)
def test_handle_end(self):
self.strategy._is_contract_tx = True
self.strategy._ledger_id = FetchAIApi.identifier
cosm_trade_dialogue = self.prepare_skill_dialogue(dialogues=self.cosm_trade_dialogues, messages=self.list_of_cosm_trade_messages[:1])
incoming_message = self.build_incoming_message_for_skill_dialogue(dialogue=cosm_trade_dialogue, performative=CosmTradeMessage.Performative.END)
self._assert_stat_state(self.dialogue_stats)
with patch.object(self.logger, 'log') as mock_logger:
self.cosm_trade_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received cosm_trade_api end message={incoming_message} in dialogue={cosm_trade_dialogue}.')
self._assert_stat_state(self.dialogue_stats, 'self', CosmTradeDialogue.EndState.SUCCESSFUL)
def test_teardown(self):
assert (self.cosm_trade_handler.teardown() is None)
self.assert_quantity_in_outbox(0) |
class SharedLoginMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
self.make_shared_login(request)
response = self.get_response(request)
return response
def make_shared_login(cls, request):
csession = request.session
account = request.user
website_uid = csession.get('website_authenticated_uid', None)
webclient_uid = csession.get('webclient_authenticated_uid', None)
if (not csession.session_key):
csession.save()
if account.is_authenticated:
if (website_uid is None):
csession['website_authenticated_uid'] = account.id
if (webclient_uid is None):
csession['webclient_authenticated_uid'] = account.id
elif webclient_uid:
if (website_uid is None):
csession['website_authenticated_uid'] = account.id
account = AccountDB.objects.get(id=webclient_uid)
try:
authenticate(autologin=account)
login(request, account)
except AttributeError:
logger.log_trace()
if csession.get('webclient_authenticated_uid', None):
csession['webclient_authenticated_nonce'] = (csession.get('webclient_authenticated_nonce', 0) + 1)
if (csession['webclient_authenticated_nonce'] > 32):
csession['webclient_authenticated_nonce'] = 0 |
class TestPCF2AttributionStageService(IsolatedAsyncioTestCase):
def setUp(self) -> None:
self.mock_mpc_svc = MagicMock(spec=MPCService)
self.mock_mpc_svc.onedocker_svc = MagicMock()
self.run_id = '681ba82c-16d9-11ed-861d-0242ac120002'
onedocker_binary_config_map = defaultdict((lambda : OneDockerBinaryConfig(tmp_directory='/test_tmp_directory/', binary_version='latest', repository_path='test_path/')))
self.stage_svc = PCF2AttributionStageService(onedocker_binary_config_map, self.mock_mpc_svc)
self.container_permission_id = 'test-container-permission'
async def test_attribution_stage(self) -> None:
containers = [ContainerInstance(instance_id='test_container_id', status=ContainerInstanceStatus.STARTED)]
self.mock_mpc_svc.start_containers.return_value = containers
private_computation_instance = self._create_pc_instance(pcs_features=set())
binary_name = 'private_attribution/pcf2_attribution'
test_server_ips = [f'192.0.2.{i}' for i in range(private_computation_instance.infra_config.num_mpc_containers)]
self.mock_mpc_svc.convert_cmd_args_list.return_value = (binary_name, ['cmd_1', 'cmd_2'])
(await self.stage_svc.run_async(private_computation_instance, NullCertificateProvider(), NullCertificateProvider(), '', '', test_server_ips))
self.mock_mpc_svc.start_containers.assert_called_once_with(cmd_args_list=['cmd_1', 'cmd_2'], onedocker_svc=self.mock_mpc_svc.onedocker_svc, binary_version='latest', binary_name=binary_name, timeout=None, env_vars={'ONEDOCKER_REPOSITORY_PATH': 'test_path/'}, wait_for_containers_to_start_up=True, existing_containers=None, env_vars_list=None, opa_workflow_path=None, permission=ContainerPermissionConfig(self.container_permission_id))
self.assertEqual(containers, private_computation_instance.infra_config.instances[(- 1)].containers)
self.assertEqual('PCF2_ATTRIBUTION', private_computation_instance.infra_config.instances[(- 1)].stage_name)
def test_get_game_args(self) -> None:
private_computation_instance = self._create_pc_instance(pcs_features=set())
run_name_base = ((private_computation_instance.infra_config.instance_id + '_') + GameNames.PCF2_ATTRIBUTION.value)
common_game_args = {'input_base_path': private_computation_instance.data_processing_output_path, 'output_base_path': private_computation_instance.pcf2_attribution_stage_output_base_path, 'num_files': private_computation_instance.infra_config.num_files_per_mpc_container, 'concurrency': private_computation_instance.infra_config.mpc_compute_concurrency, 'max_num_touchpoints': private_computation_instance.product_config.common.padding_size, 'max_num_conversions': private_computation_instance.product_config.common.padding_size, 'attribution_rules': AttributionRule.LAST_CLICK_1D.value, 'use_xor_encryption': True, 'use_postfix': True, 'log_cost': True, 'run_id': self.run_id, 'use_tls': False, 'ca_cert_path': '', 'server_cert_path': '', 'private_key_path': '', 'log_cost_s3_bucket': private_computation_instance.infra_config.log_cost_bucket, 'use_new_output_format': False}
test_game_args = [{**common_game_args, 'run_name': (f'{run_name_base}_0' if self.stage_svc._log_cost_to_s3 else ''), 'file_start_index': 0}, {**common_game_args, 'run_name': (f'{run_name_base}_1' if self.stage_svc._log_cost_to_s3 else ''), 'file_start_index': private_computation_instance.infra_config.num_files_per_mpc_container}]
self.assertEqual(test_game_args, self.stage_svc.get_game_args(private_computation_instance, '', ''))
def test_get_game_args_with_feature_flags(self) -> None:
private_computation_instance = self._create_pc_instance({PCSFeature.PRIVATE_ATTRIBUTION_REFORMATTED_OUTPUT})
run_name_base = ((private_computation_instance.infra_config.instance_id + '_') + GameNames.PCF2_ATTRIBUTION.value)
common_game_args = {'input_base_path': private_computation_instance.data_processing_output_path, 'output_base_path': private_computation_instance.pcf2_attribution_stage_output_base_path, 'num_files': private_computation_instance.infra_config.num_files_per_mpc_container, 'concurrency': private_computation_instance.infra_config.mpc_compute_concurrency, 'max_num_touchpoints': private_computation_instance.product_config.common.padding_size, 'max_num_conversions': private_computation_instance.product_config.common.padding_size, 'attribution_rules': AttributionRule.LAST_CLICK_1D.value, 'use_xor_encryption': True, 'use_postfix': True, 'log_cost': True, 'run_id': self.run_id, 'use_tls': False, 'ca_cert_path': '', 'server_cert_path': '', 'private_key_path': '', 'log_cost_s3_bucket': private_computation_instance.infra_config.log_cost_bucket, 'use_new_output_format': True, 'pc_feature_flags': 'private_attribution_reformatted_output'}
test_game_args = [{**common_game_args, 'run_name': (f'{run_name_base}_0' if self.stage_svc._log_cost_to_s3 else ''), 'file_start_index': 0}, {**common_game_args, 'run_name': (f'{run_name_base}_1' if self.stage_svc._log_cost_to_s3 else ''), 'file_start_index': private_computation_instance.infra_config.num_files_per_mpc_container}]
self.assertEqual(test_game_args, self.stage_svc.get_game_args(private_computation_instance, '', ''))
def _create_pc_instance(self, pcs_features: Set[PCSFeature]) -> PrivateComputationInstance:
infra_config: InfraConfig = InfraConfig(instance_id='test_instance_123', role=PrivateComputationRole.PARTNER, _stage_flow_cls_name='PrivateComputationPCF2StageFlow', status=PrivateComputationInstanceStatus.PCF2_ATTRIBUTION_STARTED, status_update_ts=, instances=[], game_type=PrivateComputationGameType.ATTRIBUTION, num_pid_containers=2, num_mpc_containers=2, num_files_per_mpc_container=NUM_NEW_SHARDS_PER_FILE, status_updates=[], run_id=self.run_id, log_cost_bucket='test_log_cost_bucket', pcs_features=pcs_features, container_permission_id=self.container_permission_id)
common: CommonProductConfig = CommonProductConfig(input_path='456', output_dir='789', padding_size=4)
product_config: ProductConfig = AttributionConfig(common=common, attribution_rule=AttributionRule.LAST_CLICK_1D, aggregation_type=AggregationType.MEASUREMENT)
return PrivateComputationInstance(infra_config=infra_config, product_config=product_config) |
def _get_mat_type(mat_type, sub_mat_type, arguments):
if (mat_type is None):
mat_type = parameters.parameters['default_matrix_type']
if any(((V.ufl_element().family() == 'Real') for arg in arguments for V in arg.function_space())):
mat_type = 'nest'
if (mat_type not in {'matfree', 'aij', 'baij', 'nest', 'dense'}):
raise ValueError(f"Unrecognised matrix type, '{mat_type}'")
if (sub_mat_type is None):
sub_mat_type = parameters.parameters['default_sub_matrix_type']
if (sub_mat_type not in {'aij', 'baij'}):
raise ValueError(f"Invalid submatrix type, '{sub_mat_type}' (not 'aij' or 'baij')")
return (mat_type, sub_mat_type) |
.django_db
.parametrize('model, request_data, expected', [(Award, {'field': 'total_obligation', 'group': 'period_of_performance_start_date__fy'}, _expected_fy_aggregated)])
def test_aggregate_fy(monkeypatch, aggregate_models, model, request_data, expected):
request = Mock()
request.query_params = {}
request.data = request_data
a = AggregateQuerysetMixin()
agg = a.aggregate(request=request, queryset=model.objects.all())
agg_list = [a for a in agg]
if ('order' not in request_data):
agg_list.sort(key=itemgetter('item'))
expected.sort(key=itemgetter('item'))
assert (agg_list == expected) |
.smoke
def test_parse_workflow_with_subdag():
filename = 'test/data/good-dags/subdag_test.yaml'
wf = Workflow.load(filename)
assert (frozenset((node.name for node in wf.specs.graphs.primary.graph.nodes())) == frozenset(['tester', 'Datacopier', 'SuccessFileSensor', 'dataproc_cluster_create', 'dataproc_cluster_destroy', 'dataproc_cluster_destroy-sentinel']))
assert (len(wf.specs.graphs.secondary) == 1)
assert (frozenset((node.name for node in wf.specs.graphs.secondary[0].graph.nodes())) == frozenset(['SubDagSuccessFileSensor']))
print(wf.build_dag(PrimaryDagBuilder, SubDagBuilder, GeneratorBuilder)) |
class OptionPlotoptionsVariwideLabel(Options):
def boxesToAvoid(self):
return self._config_get(None)
def boxesToAvoid(self, value: Any):
self._config(value, js_type=False)
def connectorAllowed(self):
return self._config_get(False)
def connectorAllowed(self, flag: bool):
self._config(flag, js_type=False)
def connectorNeighbourDistance(self):
return self._config_get(24)
def connectorNeighbourDistance(self, num: float):
self._config(num, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get('undefined')
def formatter(self, value: Any):
self._config(value, js_type=False)
def maxFontSize(self):
return self._config_get(None)
def maxFontSize(self, num: float):
self._config(num, js_type=False)
def minFontSize(self):
return self._config_get(None)
def minFontSize(self, num: float):
self._config(num, js_type=False)
def onArea(self):
return self._config_get(None)
def onArea(self, flag: bool):
self._config(flag, js_type=False)
def style(self) -> 'OptionPlotoptionsVariwideLabelStyle':
return self._config_sub_data('style', OptionPlotoptionsVariwideLabelStyle)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False) |
def upgrade():
op.create_table('moderatorlog', sa.Column('id', sa.Integer(), nullable=False), sa.Column('date', sa.BigInteger(), nullable=False), sa.Column('moderator_id', sa.Integer(), nullable=True), sa.Column('board_id', sa.Integer(), nullable=True), sa.Column('type', sa.Integer(), nullable=False), sa.Column('text', sa.String(), nullable=False), sa.ForeignKeyConstraint(['board_id'], ['board.id']), sa.ForeignKeyConstraint(['moderator_id'], ['moderator.id']), sa.PrimaryKeyConstraint('id'))
op.create_index(op.f('ix_moderatorlog_board_id'), 'moderatorlog', ['board_id'], unique=False)
op.create_index(op.f('ix_moderatorlog_date'), 'moderatorlog', ['date'], unique=False)
op.create_index(op.f('ix_moderatorlog_moderator_id'), 'moderatorlog', ['moderator_id'], unique=False)
op.create_index(op.f('ix_moderatorlog_type'), 'moderatorlog', ['type'], unique=False) |
('cuda.gemm_rcr_bias_hardswish.gen_profiler')
def gen_profiler(func_attrs, workdir, profiler_filename, dim_info_dict):
return common_bias_activation.gen_profiler(func_attrs=func_attrs, workdir=workdir, profiler_filename=profiler_filename, dim_info_dict=dim_info_dict, problem_args_template=PROBLEM_ARGS_TEMPLATE, problem_args_template_cutlass_3x=PROBLEM_ARGS_TEMPLATE_CUTLASS_3X) |
def build_graph():
global a
a = tf.placeholder(tf.float32, shape=None, name='a')
b = tf.reduce_mean(a, name='b')
r_list = []
for i in range(1):
v = tf.Variable(dtype=tf.float32, initial_value=tf.constant(1.0), name=('v_' + str(i)))
c = tf.add(b, v, name=('c_' + str(i)))
add = tf.assign(v, c, name=('assign_' + str(i)))
sum = tf.summary.scalar(name=('sum_' + str(i)), tensor=c)
r_list.append(add)
global_step = tf.train.get_or_create_global_step()
global_step_inc = tf.assign_add(global_step, 1)
r_list.append(global_step_inc)
return r_list |
class Races():
_cached_dict = None
Human = Race(key='human', name='Human', cunning_mod=1, desc='Your average human.')
Dwarf = Race(key='dwarf', name='Dwarf', strength_mod=1, desc='Short and strong.')
HalfElf = Race(key='half_elf', name='Half Elf', will_mod=1, desc='Bit less average')
Elf = Race(key='elf', name='Elf', strength_mod=(- 1), will_mod=1, desc='Regular elves')
Goblin = Race(key='goblin', name='Goblin', cunning_mod=1, strength_mod=(- 1), will_mod=1, desc='Small and cunning')
Orc = Race(key='orc', name='Orc', strength_mod=2, will_mod=(- 1), desc='Tall and strong')
Lizardman = Race(key='lizardman', name='Lizardman', cunning_mod=1, strength_mod=1, will_mod=(- 1), desc='Reptilian hunters')
Ratman = Race(key='ratman', name='Ratman', cunning_mod=2, strength_mod=(- 1), desc='Shorter but cunning')
def _get_cached_dict(cls):
if (not cls._cached_dict):
new_dict = {value.key: value for value in cls.__dict__.values() if isinstance(value, Race)}
cls._cached_dict = new_dict
return cls._cached_dict
def items(cls):
return cls._get_cached_dict().items()
def values(cls):
return cls._get_cached_dict().values()
def get(cls, key):
return cls._get_cached_dict().get(key) |
def extractRemonwaterWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_shaper_traffic_shaper': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['firewall_shaper_traffic_shaper']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['firewall_shaper_traffic_shaper']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_shaper_traffic_shaper')
(is_error, has_changed, result, diff) = fortios_firewall_shaper(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
def test_recall_scores():
current = pd.DataFrame(data=dict(user_id=['a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'c'], prediction=[1.25, 1.0, 0.3, 0.9, 0.8, 0.7, 1.0, 0.5, 0.3], target=[1, 0, 0, 0, 0, 0, 0, 0, 1]))
metric = RecallTopKMetric(k=3)
report = Report(metrics=[metric])
column_mapping = ColumnMapping()
report.run(reference_data=None, current_data=current, column_mapping=column_mapping)
results = metric.get_result()
assert (len(results.current) == 3)
assert (results.current[1] == 0.5)
assert (results.current[2] == 0.5)
assert (results.current[3] == 1) |
def plot_timeline_gpu_kernels_from_trace(title: str, trace_data: Trace, ranks: Optional[List[int]]=None, iterations: Optional[List[int]]=None, streams: Optional[List[int]]=None, duration_threshold: int=1000) -> None:
if (ranks is not None):
_ranks = list(set(trace_data.get_all_traces().keys()).intersection(set(ranks)))
else:
_ranks = list(trace_data.get_all_traces().keys())
df = pd.concat([trace_data.get_trace(r) for r in _ranks], axis=0, keys=_ranks, names=['rank', 'idx']).reset_index()
plot_timeline_gpu_kernels(title, df, trace_data.symbol_table, ranks, iterations, streams, duration_threshold) |
class KafkaBackend(BroadcastBackend):
def __init__(self, url: str):
self._servers = [urlparse(url).netloc]
self._consumer_channels: typing.Set = set()
async def connect(self) -> None:
self._producer = AIOKafkaProducer(bootstrap_servers=self._servers)
self._consumer = AIOKafkaConsumer(bootstrap_servers=self._servers)
(await self._producer.start())
(await self._consumer.start())
async def disconnect(self) -> None:
(await self._producer.stop())
(await self._consumer.stop())
async def subscribe(self, channel: str) -> None:
self._consumer_channels.add(channel)
self._consumer.subscribe(topics=self._consumer_channels)
async def unsubscribe(self, channel: str) -> None:
self._consumer.unsubscribe()
async def publish(self, channel: str, message: typing.Any) -> None:
(await self._producer.send_and_wait(channel, message.encode('utf8')))
async def next_published(self) -> Event:
message = (await self._consumer.getone())
return Event(channel=message.topic, message=message.value.decode('utf8')) |
def test_transformer_pipeline_empty():
orig_config = Config().from_str(cfg_string)
nlp = util.load_model_from_config(orig_config, auto_fill=True, validate=True)
tagger = nlp.get_pipe('tagger')
train_examples = []
for t in TRAIN_DATA:
train_examples.append(Example.from_dict(nlp.make_doc(t[0]), t[1]))
for tag in t[1]['tags']:
tagger.add_label(tag)
optimizer = nlp.initialize()
losses = {}
empty_train_example = Example.from_dict(nlp.make_doc(''), {})
nlp.update(train_examples, sgd=optimizer, losses=losses)
nlp.update([empty_train_example], sgd=optimizer, losses=losses)
train_examples.append(empty_train_example)
nlp.update(train_examples, sgd=optimizer, losses=losses)
train_examples.insert(1, Example.from_dict(nlp.make_doc(''), {}))
nlp.update(train_examples, sgd=optimizer, losses=losses)
doc = nlp('')
_assert_empty(doc._.trf_data)
docs = nlp.pipe(['', ''])
for doc in docs:
_assert_empty(doc._.trf_data)
nlp.pipe([])
doc = nlp('This is a sentence')
normal_tags = [t.tag_ for t in doc]
docs = list(nlp.pipe(['', 'This is a sentence', '', '']))
_assert_empty(docs[0]._.trf_data)
assert ([t.tag_ for t in docs[0]] == [])
assert ([t.tag_ for t in docs[1]] == normal_tags)
_assert_empty(docs[2]._.trf_data)
_assert_empty(docs[3]._.trf_data) |
def warning(msg, oneshot=True, end='\n'):
outfile = (args.log if args.log else sys.stderr)
if ((msg.strip() not in log_history) or (oneshot is False)):
tstamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')[:(- 3)]
full_msg = ('{:s}: [W] {:s}'.format(tstamp, msg) if args.log else '[W] {:s}'.format(msg))
print(full_msg, file=outfile, end=end)
log_history.add(msg.strip()) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.