code stringlengths 281 23.7M |
|---|
class OptionPlotoptionsPyramidSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionPlotoptionsPyramidSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionPlotoptionsPyramidSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionPlotoptionsPyramidSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionPlotoptionsPyramidSonificationTracksMappingTremoloSpeed) |
class ViewUpdater(ReadWriteDbInterface):
def update_view(self, plugin_name: str, content: bytes):
with self.get_read_write_session() as session:
entry = session.get(WebInterfaceTemplateEntry, plugin_name)
if (entry is None):
new_entry = WebInterfaceTemplateEntry(plugin=plugin_name, template=content)
session.add(new_entry)
else:
entry.template = content
logging.debug(f'view updated: {plugin_name}') |
class OptionSeriesAreasplinerangeSonificationDefaultspeechoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
_entry_point
def test_intercepted_scope_non_flyte_exception():
value_error = ValueError('Bad value')
with pytest.raises(scopes.FlyteScopedUserException) as e:
_user_func(value_error)
e = e.value
assert (e.value == value_error)
assert ('Bad value' in e.verbose_message)
assert ('User error.' in e.verbose_message)
assert (e.error_code == 'USER:Unknown')
assert (e.kind == _error_models.ContainerError.Kind.NON_RECOVERABLE)
with pytest.raises(scopes.FlyteScopedSystemException) as e:
_system_func(value_error)
e = e.value
assert (e.value == value_error)
assert ('Bad value' in e.verbose_message)
assert ('SYSTEM ERROR!' in e.verbose_message)
assert (e.error_code == 'SYSTEM:Unknown')
assert (e.kind == _error_models.ContainerError.Kind.RECOVERABLE) |
class ConvDepthwiseBiasTestCase(unittest.TestCase):
def test_fp16(self, batch=4):
groups = 32
size = (12, 12)
target = detect_target()
X = Tensor(shape=[IntImm(batch), *size, 32], dtype='float16', name='input_0', is_input=True)
W = Tensor(shape=[32, 3, 3, 1], dtype='float16', name='input_1', is_input=True)
B = Tensor(shape=[32], dtype='float16', name='input_2', is_input=True)
OP = ops.conv2d_depthwise_bias(stride=1, pad=1, dilate=1, group=groups)
Y = OP(X, W, B)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, target, './tmp', 'conv2d_dw_bias')
X_pt = torch.randn(batch, 32, *size).cuda().half()
W_pt = torch.randn(32, 1, 3, 3).cuda().half()
B_pt = torch.randn(32).cuda().half()
Y_pt = torch.nn.functional.conv2d(X_pt, W_pt, bias=B_pt, padding=1, groups=groups)
x = X_pt.permute((0, 2, 3, 1)).contiguous()
w = W_pt.permute((0, 2, 3, 1)).contiguous()
y = torch.empty([batch, *size, 32]).cuda().half()
module.run_with_tensors({'input_0': x, 'input_1': w, 'input_2': B_pt}, [y])
y_transpose = y.permute((0, 3, 1, 2))
if (target.name() == 'cuda'):
self.assertTrue(torch.allclose(Y_pt, y_transpose, atol=0.01, rtol=0.01))
else:
self.assertTrue(torch.allclose(Y_pt, y_transpose, atol=0.125, rtol=0.1)) |
def test_list_models(model_list):
filter_str = 'displayName={0} OR tags:{1}'.format(model_list[0].display_name, model_list[1].tags[0])
all_models = ml.list_models(list_filter=filter_str)
all_model_ids = [mdl.model_id for mdl in all_models.iterate_all()]
for mdl in model_list:
assert (mdl.model_id in all_model_ids) |
def maketodo(pipfile, dbfile):
todos = set()
with open(pipfile, 'r') as f:
for line in f:
todos.add(line.split()[0])
with open(dbfile, 'r') as f:
for line in f:
todos.remove(line.split()[0])
for line in todos:
if line.endswith('.VCC_WIRE'):
continue
if line.endswith('.GND_WIRE'):
continue
if re.match('.*\\.(L[HV]B?|G?CLK)(_L)?(_B)?[0-9]', line):
continue
if re.match('^INT_[LR]\\.(CTRL|GFAN)(_L)?[0-9]', line):
continue
print(line) |
class MusicRequest(BaseModel):
song: Optional[List[str]] = Field(default=None, description='The song(s) that the user would like to be played.')
album: Optional[List[str]] = Field(default=None, description='The album(s) that the user would like to be played.')
artist: Optional[List[str]] = Field(default=None, description='The artist(s) whose music the user would like to hear.', examples=[('Songs by paul simon', 'paul simon')])
action: Optional[Action] = Field(default=None, description='The action that should be taken; one of `play`, `stop`, `next`, `previous`', examples=[('Please stop the music', 'stop'), ('play something', 'play'), ('play a song', 'play'), ('next song', 'next')]) |
def extractL4StkHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('second story online', 'Second Story Online', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
(no_gui_test_assistant, 'No GuiTestAssistant')
class TestAdvancedEditorAreaPane(unittest.TestCase, GuiTestAssistant):
def setUp(self):
GuiTestAssistant.setUp(self)
self.area_pane = AdvancedEditorAreaPane()
def tearDown(self):
if (self.area_pane.control is not None):
with self.delete_widget(self.area_pane.control):
self.area_pane.destroy()
GuiTestAssistant.tearDown(self)
def test_create_destroy(self):
with self.event_loop():
self.area_pane.create(None)
with self.event_loop():
self.area_pane.destroy()
def test_create_destroy_with_editor(self):
with self.event_loop():
self.area_pane.create(None)
with self.event_loop():
editor = self.area_pane.create_editor('Hello', Editor)
with self.event_loop():
self.area_pane.add_editor(editor)
with self.event_loop():
self.area_pane.activate_editor(editor)
with self.event_loop():
self.area_pane.destroy() |
class Coherence(PipelineStage):
name = 'coherence'
def run(self, task: DecompilerTask) -> None:
variables = self._collect_variables(task.graph)
self.enforce_same_types(variables)
self.enforce_same_aliased_value(variables)
def _collect_variables(self, cfg: ControlFlowGraph) -> Dict[(str, Dict[(int, List[Variable])])]:
variables = {}
for variable in self._iter_variables(cfg):
if (variable.name not in variables):
variables[variable.name] = {variable.ssa_label: [variable]}
elif (variable.ssa_label not in variables[variable.name]):
variables[variable.name][variable.ssa_label] = [variable]
else:
variables[variable.name][variable.ssa_label].append(variable)
return variables
def _iter_variables(self, cfg: ControlFlowGraph) -> Iterator[Variable]:
for instruction in cfg.instructions:
for variable in chain(instruction.requirements, instruction.definitions):
(yield variable)
def enforce_same_types(self, variables: Dict[(str, Dict[(int, List[Variable])])]) -> None:
for variable_name in variables.keys():
for (variable_version, variable_instances) in variables[variable_name].items():
variable_types = {instance.type for instance in variable_instances}
if (len(variable_types) > 1):
self._set_variables_type(variable_instances)
info(f'[{self.name}] Harmonized {variable_name}#{variable_version} to type {variable_instances[0].type} from {variable_types}.')
def enforce_same_aliased_value(self, variables: Dict[(str, Dict[(int, List[Variable])])]) -> None:
for variable_name in variables.keys():
aliased_values = (variable.is_aliased for variable_instances in variables[variable_name].values() for variable in variable_instances)
is_aliased = next(aliased_values)
for aliased_value in aliased_values:
if (aliased_value != is_aliased):
self._set_variables_aliased([instance for variable_instances in variables[variable_name].values() for instance in variable_instances])
info(f'[{self.name}] Set variable {variable_name} to be aliased in all of its instances.')
break
def _set_variables_type(self, variables: List[Variable]) -> None:
group_type = variables[0].type
for variable in variables:
variable._type = group_type.copy()
def _set_variables_aliased(self, variables: List) -> None:
for variable in variables:
variable.is_aliased = True |
class NameModel(Model):
def get_data_generator(self, document_features_context: DocumentFeaturesContext) -> NameDataGenerator:
return NameDataGenerator(document_features_context=document_features_context)
def get_semantic_extractor(self) -> NameSemanticExtractor:
return NameSemanticExtractor()
def get_tei_training_data_generator(self) -> NameTeiTrainingDataGenerator:
return NameTeiTrainingDataGenerator()
def get_training_tei_parser(self) -> NameTrainingTeiParser:
return NameTrainingTeiParser() |
class TestSequenceFunctions(unittest.TestCase):
def __init__(self, *args, **kwargs):
logSetup.initLogging()
super().__init__(*args, **kwargs)
def setUp(self):
self.buildTestTree()
def buildTestTree(self):
self.tree = hamDb.BkHammingTree()
for (nodeId, nodeHash) in TEST_DATA:
self.tree.insert(nodeHash, nodeId)
def test_1(self):
tgtHash = (- )
ret = self.tree.getWithinDistance(tgtHash, 2)
self.assertEqual(ret, set([item[0] for item in TEST_DATA]))
def test_signModification_1(self):
x = hamDb.explicitUnsignCast(5)
x = hamDb.explicitSignCast(x)
self.assertEqual(x, 5)
tgtHash = (- )
for hashVal in [data[1] for data in TEST_DATA]:
x = hamDb.explicitUnsignCast(hashVal)
x = hamDb.explicitSignCast(x)
self.assertEqual(hashVal, x) |
def extractDeeptranslations5WordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class StalkerThumbnailCache(object):
def get(cls, thumbnail_full_path, login=None, password=None):
filename = os.path.basename(thumbnail_full_path)
logger.debug(('filename : %s' % filename))
cache_path = os.path.expanduser(defaults.local_cache_folder)
cached_file_full_path = os.path.join(cache_path, filename)
url = ('%s/%s' % (defaults.stalker_server_internal_address, thumbnail_full_path))
login_url = ('%s/login' % defaults.stalker_server_internal_address)
logger.debug(('cache_path : %s' % cache_path))
logger.debug(('cached_file_full_path : %s' % cached_file_full_path))
logger.debug(('url : %s' % url))
if ((not os.path.exists(cached_file_full_path)) and login and password):
if (sys.version_info[0] >= 3):
from import CookieJar
from urllib.request import build_opener
from urllib.parse import urlencode
from urllib.request import HTTPCookieProcessor
else:
from cookielib import CookieJar
from urllib import urlencode
from urllib2 import build_opener, HTTPCookieProcessor
cj = CookieJar()
opener = build_opener(HTTPCookieProcessor(cj))
login_data = urlencode({'login': login, 'password': password, 'submit': True})
opener.open(login_url, login_data)
resp = opener.open(url)
data = resp.read()
if (not os.path.exists(cache_path)):
os.makedirs(cache_path)
with open(cached_file_full_path, 'wb') as f:
f.write(data)
return cached_file_full_path |
def require_showbase(func):
(func)
def wrapper(*args, **kwargs):
if is_showbase_initialized():
return func(*args, **kwargs)
raise ConfigError(f"ShowBase instance has not been initialized, but a function has been called that requires it: '{func.__name__}'.")
return wrapper |
class ShamelessOniisanPageProcessor(HtmlProcessor.HtmlPageProcessor):
wanted_mimetypes = ['text/html']
want_priority = 80
loggerPath = 'Main.Text.ShamelessOniisan'
def wantsUrl(url):
if re.search('^ url):
print(("wwsd Wants url: '%s'" % url))
return True
return False
def preprocessBody(self, soup):
badspans = soup.find_all('span', style=re.compile('color\\W?:\\W?#ffffff', re.I))
for bad in badspans:
bad.decompose()
return soup |
class MeteredSocket():
def __init__(self, socket):
self._socket = socket
self._recv_bytes = 0
self._recv_ops = 0
self._send_bytes = 0
self._send_ops = 0
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return self._socket.__exit__(exc_type, exc_val, exc_tb)
def recv(self, bufsize, flags=0):
self._recv_ops += 1
data = self._socket.recv(bufsize, flags)
self._recv_bytes += len(data)
return data
def recv_bytes(self):
return self._recv_bytes
def recv_ops(self):
return self._recv_ops
def send(self, data, flags=0):
self._send_ops += 1
length = self._socket.send(data, flags)
self._send_bytes += length
return length
def send_bytes(self):
return self._send_bytes
def send_ops(self):
return self._send_ops |
class OptionPlotoptionsPyramid3dSonificationDefaultspeechoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class GalacticCommunityProcessor(AbstractGamestateDataProcessor):
ID = 'galactic_community'
DEPENDENCIES = [CountryProcessor.ID, RulerEventProcessor.ID]
def __init__(self):
super().__init__()
self._ruler_dict = None
self._countries_dict = None
def extract_data_from_gamestate(self, dependencies):
self._ruler_dict = dependencies[RulerEventProcessor.ID]
self._countries_dict = dependencies[CountryProcessor.ID]
community_dict = self._gamestate_dict.get('galactic_community')
if (not isinstance(community_dict, dict)):
return
self._update_community_members(community_dict)
self._update_council_members(community_dict)
def _update_community_members(self, community_dict):
members = community_dict.get('members', [])
if (not isinstance(members, list)):
return
non_members = (set(self._countries_dict.keys()) - set(members))
matching_event_types = (datamodel.HistoricalEventType.joined_galactic_community, datamodel.HistoricalEventType.left_galactic_community)
new_event_type = datamodel.HistoricalEventType.joined_galactic_community
for c_id in members:
self._update_membership_events(c_id, matching_event_types, new_event_type)
new_event_type = datamodel.HistoricalEventType.left_galactic_community
for c_id in non_members:
self._update_membership_events(c_id, matching_event_types, new_event_type)
def _update_council_members(self, community_dict):
members = community_dict.get('council', [])
if (not isinstance(members, list)):
return
non_members = (set(self._countries_dict.keys()) - set(members))
matching_event_types = (datamodel.HistoricalEventType.joined_galactic_council, datamodel.HistoricalEventType.left_galactic_council)
new_event_type = datamodel.HistoricalEventType.joined_galactic_council
for c_id in members:
self._update_membership_events(c_id, matching_event_types, new_event_type)
new_event_type = datamodel.HistoricalEventType.left_galactic_council
for c_id in non_members:
self._update_membership_events(c_id, matching_event_types, new_event_type)
def _update_membership_events(self, c_id, matching_event_types, new_event_type):
country = self._countries_dict.get(c_id)
if (not country):
logger.warning(f'{self._basic_info.logger_str} Could not find country with ID {c_id}')
return
event_is_known = country.has_met_player()
previous_event = self._session.query(datamodel.HistoricalEvent).filter(datamodel.HistoricalEvent.event_type.in_(matching_event_types), (datamodel.HistoricalEvent.country == country), datamodel.HistoricalEvent.end_date_days.is_(None)).order_by(datamodel.HistoricalEvent.start_date_days.desc()).first()
if ((previous_event is None) and (new_event_type in (datamodel.HistoricalEventType.left_galactic_community, datamodel.HistoricalEventType.left_galactic_council))):
return
elif ((previous_event is not None) and (previous_event.event_type == new_event_type)):
return
else:
if (previous_event is not None):
previous_event.end_date_days = self._basic_info.date_in_days
previous_event.event_is_known_to_player = event_is_known
self._session.add(previous_event)
self._session.add(datamodel.HistoricalEvent(event_type=new_event_type, leader=self._ruler_dict.get(c_id), country=country, start_date_days=self._basic_info.date_in_days, event_is_known_to_player=event_is_known)) |
class TensorboardLogger(ExperimentLogger):
def __init__(self, save_dir: tp.Union[(Path, str)], with_media_logging: bool=True, name: tp.Optional[str]=None, **kwargs: tp.Any):
self._with_media_logging = with_media_logging
self._save_dir = str(save_dir)
self._name = (name or 'tensorboard')
self._writer: tp.Optional[SummaryWriter] = None
if (SummaryWriter is not None):
self._writer = SummaryWriter(self.save_dir, **kwargs)
else:
warnings.warn('tensorboard package was not found: use pip install tensorboard')
_zero_only
def writer(self) -> tp.Optional[SummaryWriter]:
return self._writer
_zero_only
def is_disabled(self) -> bool:
return (self.writer is None)
_zero_only
def log_hyperparams(self, params: tp.Union[(tp.Dict[(str, tp.Any)], Namespace)], metrics: tp.Optional[dict]=None) -> None:
assert is_rank_zero(), 'experiment tried to log from global_rank != 0'
if self.is_disabled():
return
params = _convert_params(params)
params = _flatten_dict(params)
params = _sanitize_params(params)
if ((metrics is None) or (len(metrics) == 0)):
metrics = {'hparams_metrics': (- 1)}
self.writer.add_hparams(params, metrics)
_zero_only
def log_metrics(self, prefix: tp.Union[(str, tp.List[str])], metrics: dict, step: tp.Optional[int]=None) -> None:
assert is_rank_zero(), 'experiment tried to log from global_rank != 0'
if self.is_disabled():
return
metrics = _add_prefix(metrics, prefix, self.group_separator)
for (key, val) in metrics.items():
if isinstance(val, torch.Tensor):
val = val.item()
if isinstance(val, dict):
self.writer.add_scalars(key, val, step)
else:
try:
self.writer.add_scalar(key, val, step)
except Exception as ex:
msg = f'''
you tried to log {val} ({type(val)}) which is currently not supported. Try a dict or a scalar/tensor.'''
raise ValueError(msg) from ex
_zero_only
def log_audio(self, key: str, prefix: tp.Union[(str, tp.List[str])], audio: tp.Any, sample_rate: int, step: tp.Optional[int]=None, **kwargs: tp.Any) -> None:
assert is_rank_zero(), 'experiment tried to log from global_rank != 0'
if (self.is_disabled() or (not self.with_media_logging)):
return
assert isinstance(audio, torch.Tensor), 'Only support logging torch.Tensor as audio'
metrics = {key: audio.mean(dim=(- 2), keepdim=True).clamp((- 0.99), 0.99)}
metrics = _add_prefix(metrics, prefix, self.group_separator)
for (name, media) in metrics.items():
self.writer.add_audio(name, media, step, sample_rate, **kwargs)
_zero_only
def log_image(self, prefix: tp.Union[(str, tp.List[str])], key: str, image: tp.Any, step: tp.Optional[int]=None, **kwargs: tp.Any) -> None:
assert is_rank_zero(), 'experiment tried to log from global_rank != 0'
if (self.is_disabled() or (not self.with_media_logging)):
return
assert isinstance(image, torch.Tensor), 'Only support logging torch.Tensor as image'
metrics = {key: image}
metrics = _add_prefix(metrics, prefix, self.group_separator)
for (name, media) in metrics.items():
self.writer.add_image(name, media, step, **kwargs)
_zero_only
def log_text(self, prefix: tp.Union[(str, tp.List[str])], key: str, text: str, step: tp.Optional[int]=None, **kwargs: tp.Any) -> None:
assert is_rank_zero(), 'writer tried to log from global_rank != 0'
if (self.is_disabled() or (not self.with_media_logging)):
return
metrics = {key: text}
metrics = _add_prefix(metrics, prefix, self.group_separator)
for (name, media) in metrics.items():
self.writer.add_text(name, media, step, **kwargs)
def save_dir(self) -> str:
return self._save_dir
def with_media_logging(self) -> bool:
return self._with_media_logging
def name(self) -> str:
return self._name
def from_xp(cls, with_media_logging: bool=True, name: tp.Optional[str]=None, sub_dir: tp.Optional[str]=None):
save_dir = (dora.get_xp().folder / 'tensorboard')
if sub_dir:
save_dir = (save_dir / sub_dir)
save_dir.mkdir(exist_ok=True, parents=True)
return TensorboardLogger(save_dir, with_media_logging, name=name) |
def extract_flake8_import_order() -> Dict[(str, str)]:
return {'I666': 'Import statement mixes groups.', 'I100': 'Import statements are in the wrong order.', 'I101': 'Imported names are in the wrong order.', 'I201': 'Missing newline between import groups.', 'I202': 'Additional newline in a group of imports.'} |
class OptionPlotoptionsBulletSonificationDefaultinstrumentoptionsMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def calculate_intrinsic_cost(tx: Transaction) -> Uint:
data_cost = 0
for byte in tx.data:
if (byte == 0):
data_cost += TX_DATA_COST_PER_ZERO
else:
data_cost += TX_DATA_COST_PER_NON_ZERO
if (tx.to == Bytes0(b'')):
create_cost = TX_CREATE_COST
else:
create_cost = 0
return Uint(((TX_BASE_COST + data_cost) + create_cost)) |
class NotificationList(APIView):
permission_classes = [IsAuthenticated]
def get(self, request, format=None):
notifications = Notification.objects.filter(to_user_id=request.user.id)
serializer = NotificationSerializerGet(notifications, user=None, many=True)
return Response(serializer.data)
def post(self, request, format=None):
serializer = NotificationSerializerPost(data=request.data, user=request.user)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) |
(short_help='Get stock codes.')
('-n', '--name', 'names', metavar='NAME', multiple=True, help='Name of stock. Can set multiple times.')
('-m', '--market', 'markets', metavar='MARKET', multiple=True, type=click.Choice(market_codes, case_sensitive=False), help='Stock market code to get. Can set multiple times.')
('-p', '--port', metavar='PORT', help='Port number of grpc server (optional).')
_option()
def stockcode(names, markets, port):
markets_option = markets
from koapy.backend.kiwoom_open_api_plus.core.KiwoomOpenApiPlusEntrypoint import KiwoomOpenApiPlusEntrypoint
with KiwoomOpenApiPlusEntrypoint(port=port) as context:
context.EnsureConnected()
if (not markets):
markets = ['0']
codes = set()
for market in markets:
codes = codes.union(set(context.GetCodeListByMarketAsList(market)))
codes = sorted(list(codes))
if markets_option:
for code in codes:
click.echo(code)
else:
def get_names():
if names:
if ('-' in names):
with click.open_file('-', 'r') as f:
for name in f:
(yield name.strip())
else:
for name in names:
(yield name)
else:
while True:
try:
name = click.prompt('name', prompt_suffix=' >>> ')
name = name.strip()
if (name == 'exit'):
break
if name:
(yield name)
except EOFError:
break
all_names = [context.GetMasterCodeName(code) for code in codes]
codes_by_name = dict(zip(all_names, codes))
for name in get_names():
code = codes_by_name.get(name, None)
if code:
click.echo(code)
else:
click.echo(('Cannot find code for given name: %s.' % name)) |
class TestOpenClose():
def test_open_none_existing_device(device_not_exists: PyK4A):
with pytest.raises(K4AException):
device_not_exists.open()
def test_open_existing_device(device: PyK4A):
device.open()
def test_open_twice(device: PyK4A):
device.open()
with pytest.raises(K4AException, match='Device already opened'):
device.open() |
def request_new_link(request, useremail=None, usertoken=None):
try:
if ((useremail is None) or (usertoken is None)):
if (request.method == 'POST'):
form = RequestNewVerificationEmail(request.POST)
if form.is_valid():
form_data: dict = form.cleaned_data
email = form_data['email']
inactive_user = get_user_model().objects.get(email=email)
if inactive_user.is_active:
raise UserAlreadyActive('User is already active')
else:
status = resend_verification_email(request, email, user=inactive_user, encoded=False)
if status:
return render(request, template_name=new_email_sent_template, context={'msg': 'You have requested another verification email!', 'minor_msg': 'Your verification link has been sent', 'status': 'Email Sent!'})
else:
logger.error('something went wrong during sending email')
else:
form = RequestNewVerificationEmail()
return render(request, template_name=request_new_email_template, context={'form': form})
else:
status = resend_verification_email(request, useremail, token=usertoken)
if status:
return render(request, template_name=new_email_sent_template, context={'msg': 'You have requested another verification email!', 'minor_msg': 'Your verification link has been sent', 'status': 'Email Sent!'})
else:
messages.info(request, 'Something went wrong during sending email :(')
logger.error('something went wrong during sending email')
except ObjectDoesNotExist as error:
messages.warning(request, 'User not found associated with given email!')
logger.error(f'[ERROR]: User not found. exception: {error}')
return HttpResponse(b'User Not Found', status=404)
except MultipleObjectsReturned as error:
logger.error(f'[ERROR]: Multiple users found. exception: {error}')
return HttpResponse(b'Internal server error!', status=500)
except KeyError as error:
logger.error(f'[ERROR]: Key error for email in your form: {error}')
return HttpResponse(b'Internal server error!', status=500)
except MaxRetriesExceeded as error:
logger.error(f'[ERROR]: Maximum retries for link has been reached. exception: {error}')
return render(request, template_name=failed_template, context={'msg': 'You have exceeded the maximum verification requests! Contact admin.', 'status': 'Maxed out!'})
except InvalidToken:
return render(request, template_name=failed_template, context={'msg': 'This link is invalid or been used already, we cannot verify using this link.', 'status': 'Invalid Link'})
except UserAlreadyActive:
return render(request, template_name=failed_template, context={'msg': "This user's account is already active", 'status': 'Already Verified!'}) |
class BlogPost(Document):
content = Text()
tags = Keyword(multi=True)
class Index():
name = 'test-blogpost'
def add_tags(self):
s = Search(index='test-percolator')
s = s.query('percolate', field='query', index=self._get_index(), document=self.to_dict())
for percolator in s:
self.tags.extend(percolator.tags)
self.tags = list(set(self.tags))
def save(self, **kwargs):
self.add_tags()
return super().save(**kwargs) |
def run():
print('\nmodule top(input wire in, output wire out);\n ')
params = {}
sites = list(gen_sites())
for ((tile_name, site_name), isone) in zip(sites, util.gen_fuzz_states(len(sites))):
params[tile_name] = (site_name, isone)
print('\n (* KEEP, DONT_TOUCH, LOC = "{}" *)\n GTPE2_CHANNEL #(\n .RXSYNC_OVRD(1\'b{})\n ) gtpe2_channel_{} ();'.format(site_name, isone, site_name))
print('endmodule')
write_params(params) |
class FuncParser():
def __init__(self, callables, start_char=_START_CHAR, escape_char=_ESCAPE_CHAR, max_nesting=_MAX_NESTING, **default_kwargs):
if isinstance(callables, dict):
loaded_callables = {**callables}
else:
loaded_callables = {}
for module_or_path in make_iter(callables):
callables_mapping = variable_from_module(module_or_path, variable='FUNCPARSER_CALLABLES')
if callables_mapping:
try:
loaded_callables.update(callables_mapping)
except ValueError:
raise ParsingError(f"Failure to parse - {module_or_path}.FUNCPARSER_CALLABLES (must be a dict {{'funcname': callable, ...}})")
else:
loaded_callables.update(callables_from_module(module_or_path))
self.validate_callables(loaded_callables)
self.callables = loaded_callables
self.escape_char = escape_char
self.start_char = start_char
self.default_kwargs = default_kwargs
def validate_callables(self, callables):
for (funcname, clble) in callables.items():
try:
mapping = inspect.getfullargspec(clble)
except TypeError:
logger.log_trace(f'Could not run getfullargspec on {funcname}: {clble}')
else:
assert mapping.varargs, f"Parse-func callable '{funcname}' does not support *args."
assert mapping.varkw, f"Parse-func callable '{funcname}' does not support **kwargs."
def execute(self, parsedfunc, raise_errors=False, **reserved_kwargs):
(funcname, args, kwargs) = parsedfunc.get()
func = self.callables.get(funcname)
if (not func):
if raise_errors:
available = ', '.join((f"'{key}'" for key in self.callables))
raise ParsingError(f"Unknown parsed function '{str(parsedfunc)}' (available: {available})")
return str(parsedfunc)
kwargs = {**self.default_kwargs, **kwargs, **reserved_kwargs, **{'funcparser': self, 'raise_errors': raise_errors}}
try:
ret = func(*args, **kwargs)
return ret
except ParsingError:
if raise_errors:
raise
return str(parsedfunc)
except Exception:
logger.log_trace()
if raise_errors:
raise
return str(parsedfunc)
def parse(self, string, raise_errors=False, escape=False, strip=False, return_str=True, **reserved_kwargs):
start_char = self.start_char
escape_char = self.escape_char
string = string.replace((start_char + start_char), (escape_char + start_char))
callstack = []
double_quoted = (- 1)
open_lparens = 0
open_lsquare = 0
open_lcurly = 0
escaped = False
current_kwarg = ''
exec_return = ''
curr_func = None
fullstr = ''
infuncstr = ''
literal_infuncstr = False
for char in string:
if escaped:
if curr_func:
infuncstr += char
else:
fullstr += char
escaped = False
continue
if (char == escape_char):
escaped = True
continue
if (char == start_char):
if curr_func:
if (len(callstack) >= (_MAX_NESTING - 1)):
if raise_errors:
raise ParsingError(f'Only allows for parsing nesting function defs to a max depth of {_MAX_NESTING}.')
infuncstr += char
continue
else:
curr_func.current_kwarg = current_kwarg
curr_func.infuncstr = infuncstr
curr_func.double_quoted = double_quoted
curr_func.open_lparens = open_lparens
curr_func.open_lsquare = open_lsquare
curr_func.open_lcurly = open_lcurly
curr_func.rawstr = curr_func.rawstr[:(- len(infuncstr))]
current_kwarg = ''
infuncstr = ''
double_quoted = (- 1)
open_lparens = 0
open_lsquare = 0
open_lcurly = 0
exec_return = ''
literal_infuncstr = False
callstack.append(curr_func)
curr_func = _ParsedFunc(prefix=char, fullstr=char)
continue
if (not curr_func):
fullstr += char
return_str = True
continue
curr_func.rawstr += char
if ((exec_return != '') and (char not in ',=)')):
infuncstr += str(exec_return)
exec_return = ''
if (char == '"'):
if (double_quoted == 0):
infuncstr = infuncstr[1:]
double_quoted = (- 1)
elif (double_quoted > 0):
prefix = infuncstr[0:double_quoted]
infuncstr = (prefix + infuncstr[(double_quoted + 1):])
double_quoted = (- 1)
else:
infuncstr += char
infuncstr = infuncstr.strip()
double_quoted = (len(infuncstr) - 1)
literal_infuncstr = True
continue
if (double_quoted >= 0):
infuncstr += char
continue
if (char == '('):
if (not curr_func.funcname):
curr_func.funcname = infuncstr
curr_func.fullstr += (infuncstr + char)
infuncstr = ''
else:
infuncstr += char
open_lparens += 1
continue
if (char in '[]'):
infuncstr += char
open_lsquare += ((- 1) if (char == ']') else 1)
continue
if (char in '{}'):
infuncstr += char
open_lcurly += ((- 1) if (char == '}') else 1)
continue
if (char == '='):
if (exec_return != ''):
infuncstr = exec_return
current_kwarg = infuncstr.strip()
curr_func.kwargs[current_kwarg] = ''
curr_func.fullstr += (infuncstr + char)
infuncstr = ''
continue
if (char in ',)'):
if (open_lparens > 1):
infuncstr += char
open_lparens -= (1 if (char == ')') else 0)
continue
if ((open_lcurly > 0) or (open_lsquare > 0)):
infuncstr += char
continue
if (exec_return != ''):
if current_kwarg:
curr_func.kwargs[current_kwarg] = exec_return
else:
curr_func.args.append(exec_return)
else:
if (not literal_infuncstr):
infuncstr = infuncstr.strip()
if current_kwarg:
curr_func.kwargs[current_kwarg] = infuncstr
elif (literal_infuncstr or infuncstr.strip()):
curr_func.args.append(infuncstr)
curr_func.fullstr += ((str(exec_return) + infuncstr) + char)
current_kwarg = ''
exec_return = ''
infuncstr = ''
literal_infuncstr = False
if (char == ')'):
open_lparens = 0
if strip:
exec_return = ''
elif escape:
exec_return = (escape_char + curr_func.fullstr)
else:
exec_return = self.execute(curr_func, raise_errors=raise_errors, **reserved_kwargs)
if callstack:
curr_func = callstack.pop()
current_kwarg = curr_func.current_kwarg
if curr_func.infuncstr:
infuncstr = (curr_func.infuncstr + str(exec_return))
exec_return = ''
curr_func.infuncstr = ''
double_quoted = curr_func.double_quoted
open_lparens = curr_func.open_lparens
open_lsquare = curr_func.open_lsquare
open_lcurly = curr_func.open_lcurly
else:
curr_func = None
fullstr += str(exec_return)
if return_str:
exec_return = ''
infuncstr = ''
literal_infuncstr = False
continue
infuncstr += char
if curr_func:
callstack.append(curr_func)
for (inum, _) in enumerate(range(len(callstack))):
funcstr = str(callstack.pop())
if ((inum == 0) and funcstr.endswith(infuncstr)):
infuncstr = funcstr
else:
infuncstr = (funcstr + infuncstr)
if ((not return_str) and (exec_return != '')):
return exec_return
fullstr += infuncstr
return fullstr
def parse_to_any(self, string, raise_errors=False, escape=False, strip=False, **reserved_kwargs):
return self.parse(string, raise_errors=raise_errors, escape=escape, strip=strip, return_str=False, **reserved_kwargs) |
def quadrupole3d_01(ax, da, A, bx, db, B, R):
result = numpy.zeros((6, 1, 3), dtype=float)
x0 = ((ax + bx) ** (- 1.0))
x1 = (x0 * ((ax * A[0]) + (bx * B[0])))
x2 = (- x1)
x3 = (x2 + R[0])
x4 = (x2 + B[0])
x5 = (x3 * x4)
x6 = (0.5 * x0)
x7 = ((ax * bx) * x0)
x8 = ((((5. * da) * db) * numpy.sqrt(x0)) * numpy.exp(((- x7) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
x9 = (x6 * x8)
x10 = (x0 * ((ax * A[1]) + (bx * B[1])))
x11 = (- x10)
x12 = (x11 + B[1])
x13 = (x0 * x8)
x14 = (x13 * ((x3 ** 2) + x6))
x15 = (x0 * ((ax * A[2]) + (bx * B[2])))
x16 = (- x15)
x17 = (x16 + B[2])
x18 = (x11 + R[1])
x19 = (x13 * (x5 + x6))
x20 = (x12 * x18)
x21 = (x20 + x6)
x22 = (x13 * x3)
x23 = (x16 + R[2])
x24 = (x17 * x23)
x25 = (x24 + x6)
x26 = (x13 * ((x18 ** 2) + x6))
x27 = (x13 * x23)
x28 = (x13 * ((x23 ** 2) + x6))
result[(0, 0, 0)] = numpy.sum(((- x9) * ((x0 * ((((- 2.0) * x1) + B[0]) + R[0])) + (x3 * (x0 + (2.0 * x5))))))
result[(0, 0, 1)] = numpy.sum(((- x12) * x14))
result[(0, 0, 2)] = numpy.sum(((- x14) * x17))
result[(1, 0, 0)] = numpy.sum(((- x18) * x19))
result[(1, 0, 1)] = numpy.sum(((- x21) * x22))
result[(1, 0, 2)] = numpy.sum((((- x17) * x18) * x22))
result[(2, 0, 0)] = numpy.sum(((- x19) * x23))
result[(2, 0, 1)] = numpy.sum((((- x12) * x22) * x23))
result[(2, 0, 2)] = numpy.sum(((- x22) * x25))
result[(3, 0, 0)] = numpy.sum(((- x26) * x4))
result[(3, 0, 1)] = numpy.sum(((- x9) * ((x0 * ((((- 2.0) * x10) + B[1]) + R[1])) + (x18 * (x0 + (2.0 * x20))))))
result[(3, 0, 2)] = numpy.sum(((- x17) * x26))
result[(4, 0, 0)] = numpy.sum((((- x18) * x27) * x4))
result[(4, 0, 1)] = numpy.sum(((- x21) * x27))
result[(4, 0, 2)] = numpy.sum((((- x13) * x18) * x25))
result[(5, 0, 0)] = numpy.sum(((- x28) * x4))
result[(5, 0, 1)] = numpy.sum(((- x12) * x28))
result[(5, 0, 2)] = numpy.sum(((- x9) * ((x0 * ((((- 2.0) * x15) + B[2]) + R[2])) + (x23 * (x0 + (2.0 * x24))))))
return result |
def validate_check_in_out_status(station: Station, attendee_data: UserCheckIn):
if attendee_data:
if ((attendee_data.station.station_type == station.station_type) and (station.station_type == STATION_TYPE.get('check in'))):
raise UnprocessableEntityError({'attendee': attendee_data.ticket_holder_id, 'session ': attendee_data.session_id}, 'Attendee already checked in.')
if ((attendee_data.station.station_type == station.station_type) and (station.station_type == STATION_TYPE.get('check out'))):
raise UnprocessableEntityError({'attendee': attendee_data.ticket_holder_id, 'session ': attendee_data.session_id}, 'Attendee not check in yet.')
elif (station.station_type == STATION_TYPE.get('check out')):
raise UnprocessableEntityError({'attendee': attendee_data.ticket_holder_id, 'session ': attendee_data.session_id}, 'Attendee not check in yet.') |
class Solution():
def flatten(self, head: 'Node') -> 'Node':
def flat(head):
prev = None
curr = head
while (curr is not None):
if (curr.child is not None):
(ch, ct) = flat(curr.child)
n = curr.next
if (n is not None):
n.prev = ct
ct.next = n
curr.next = ch
ch.prev = curr
curr.child = None
prev = ct
curr = n
else:
prev = curr
curr = curr.next
return (head, prev)
return flat(head)[0] |
def _fuse_gemm_reshape_permute0213(sorted_graph: List[Tensor], workdir: str=None) -> List[Tensor]:
sorted_ops = graph_utils.get_sorted_ops(sorted_graph)
for op in sorted_ops:
if (op._attrs['op'] != 'gemm_rcr'):
continue
outputs = op._attrs['outputs']
assert (len(outputs) == 1)
gemm_output = outputs[0]
if (len(gemm_output.dst_ops()) != 1):
continue
reshape_op = list(gemm_output.dst_ops())[0]
if (reshape_op._attrs['op'] != 'reshape'):
continue
reshape_output = reshape_op._attrs['outputs'][0]
if (len(reshape_output.dst_ops()) != 1):
continue
permute_op = list(reshape_output.dst_ops())[0]
if (permute_op._attrs['op'] not in ('permute', 'permute0213')):
continue
permute_output = permute_op._attrs['outputs'][0]
if (not _check_reshape(reshape_op)):
continue
if ((permute_op._attrs['op'] != 'permute0213') and (not _check_permute(permute_op, [0, 2, 1, 3]))):
continue
(_, d1, d2, _) = reshape_output.shape()
d1_v = d1.value()
d2_v = d2.value()
gemm_permute_op = gemm_rcr_permute(shape=(d1_v, d2_v), layout='0213')
(a, b) = op._attrs['inputs']
transform_utils.remove_dst_op_from_tensor(a, op)
transform_utils.remove_dst_op_from_tensor(b, op)
new_output = gemm_permute_op(a, b)
transform_utils.replace_tensor(permute_output, new_output)
sorted_graph.append(new_output)
transform_utils.remove_tensor_from_sorted_graph(gemm_output)
transform_utils.remove_tensor_from_sorted_graph(reshape_output)
sorted_graph = toposort(sorted_graph)
transform_utils.sanitize_sorted_graph(sorted_graph)
return sorted_graph |
class OptionSeriesColumnDataDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
def test_transacting_with_contract_no_arguments(w3, math_contract, transact, call):
initial_value = call(contract=math_contract, contract_function='counter')
txn_hash = transact(contract=math_contract, contract_function='incrementCounter')
txn_receipt = w3.eth.wait_for_transaction_receipt(txn_hash)
assert (txn_receipt is not None)
final_value = call(contract=math_contract, contract_function='counter')
assert ((final_value - initial_value) == 1) |
def get_lpddr4_phy_init_sequence(phy_settings, timing_settings):
cl = phy_settings.cl
cwl = phy_settings.cwl
bl = 16
dq_odt = getattr(phy_settings, 'dq_odt', 'RZQ/2')
ca_odt = getattr(phy_settings, 'ca_odt', 'RZQ/2')
pull_down_drive_strength = getattr(phy_settings, 'pull_down_drive_strength', 'RZQ/2')
vref_ca_range = getattr(phy_settings, 'vref_ca_range', 1)
vref_ca = getattr(phy_settings, 'vref_ca', 30.4)
vref_dq_range = getattr(phy_settings, 'vref_dq_range', 1)
vref_dq = getattr(phy_settings, 'vref_dq', 30.4)
def get_nwr():
frequency_ranges = [[(6, 6), (4, 4), 6, 8, (10, 266)], [(10, 12), (6, 8), 10, 8, (266, 533)], [(14, 16), (8, 12), 16, 8, (533, 800)], [(20, 22), (10, 18), 20, 8, (800, 1066)], [(24, 28), (12, 22), 24, 10, (1066, 1333)], [(28, 32), (14, 26), 30, 12, (1333, 1600)], [(32, 36), (16, 30), 34, 14, (1600, 1866)], [(36, 40), (18, 34), 40, 16, (1866, 2133)]]
for ((rl, _), (wl, _), nwr, nrtp, (fmin, fmax)) in frequency_ranges:
if (rl == cl):
assert (wl == cwl), 'Wrong (RL, WL) combination'
return nwr
nwr = get_nwr()
odt_map = {'disable': 0, 'RZQ/1': 1, 'RZQ/2': 2, 'RZQ/3': 3, 'RZQ/4': 4, 'RZQ/5': 5, 'RZQ/6': 6}
vref_ranges = {0: {10.0: 0, 10.4: 1, 10.8: 2, 11.2: 3, 11.6: 4, 12.0: 5, 12.4: 6, 12.8: 7, 13.2: 8, 13.6: 9, 14.0: 10, 14.4: 11, 14.8: 12, 15.2: 13, 15.6: 14, 16.0: 15, 16.4: 16, 16.8: 17, 17.2: 18, 17.6: 19, 18.0: 20, 18.4: 21, 18.8: 22, 19.2: 23, 19.6: 24, 20.0: 25, 20.4: 26, 20.8: 27, 21.2: 28, 21.6: 29, 22.0: 30, 22.4: 31, 22.8: 32, 23.2: 33, 23.6: 34, 24.0: 35, 24.4: 36, 24.8: 37, 25.2: 38, 25.6: 39, 26.0: 40, 26.4: 41, 26.8: 42, 27.2: 43, 27.6: 44, 28.0: 45, 28.4: 46, 28.8: 47, 29.2: 48, 29.6: 49, 30.0: 50}, 1: {22.0: 0, 22.4: 1, 22.8: 2, 23.2: 3, 23.6: 4, 24.0: 5, 24.4: 6, 24.8: 7, 25.2: 8, 25.6: 9, 26.0: 10, 26.4: 11, 26.8: 12, 27.2: 13, 27.6: 14, 28.0: 15, 28.4: 16, 28.8: 17, 29.2: 18, 29.6: 19, 30.0: 20, 30.4: 21, 30.8: 22, 31.2: 23, 31.6: 24, 32.0: 25, 32.4: 26, 32.8: 27, 33.2: 28, 33.6: 29, 34.0: 30, 34.4: 31, 34.8: 32, 35.2: 33, 35.6: 34, 36.0: 35, 36.4: 36, 36.8: 37, 37.2: 38, 37.6: 39, 38.0: 40, 38.4: 41, 38.8: 42, 39.2: 43, 39.6: 44, 40.0: 45, 40.4: 46, 40.8: 47, 41.2: 48, 41.6: 49, 42.0: 50}}
mr = {}
mr[1] = reg([(0, 2, {16: 0, 32: 1, 'on-the-fly': 2}[bl]), (2, 1, 1), (3, 1, 0), (4, 3, {6: 0, 10: 1, 16: 2, 20: 3, 24: 4, 30: 5, 34: 6, 40: 7}[nwr]), (7, 1, 0)])
mr[2] = reg([(0, 3, {6: 0, 10: 1, 14: 2, 20: 3, 24: 4, 28: 5, 32: 6, 36: 7}[cl]), (3, 3, {4: 0, 6: 1, 8: 2, 10: 3, 12: 4, 14: 5, 16: 6, 18: 7}[cwl]), (6, 1, 0), (7, 1, 0)])
mr[3] = reg([(0, 1, 1), (1, 1, 0), (2, 1, 0), (3, 3, odt_map[pull_down_drive_strength]), (6, 1, 0), (7, 1, 0)])
mr[11] = reg([(0, 3, odt_map[dq_odt]), (4, 3, odt_map[ca_odt])])
mr[12] = reg([(0, 6, vref_ranges[vref_ca_range][vref_ca]), (6, 1, vref_ca_range)])
mr[14] = reg([(0, 6, vref_ranges[vref_dq_range][vref_dq]), (6, 1, vref_dq_range)])
mr[13] = 0
from litedram.phy.lpddr4.commands import SpecialCmd, MPC
def cmd_mr(ma):
op = mr[ma]
assert (ma < (2 ** 6)), 'MR address to big: {}'.format(ma)
assert (op < (2 ** 8)), 'MR opcode to big: {}'.format(op)
a = op
ba = ma
return ('Load More Register {}'.format(ma), a, ba, cmds['MODE_REGISTER'], 200)
def ck(sec):
fmax = .0
return int(math.ceil((sec * fmax)))
init_sequence = [('Assert reset', 0, 0, 'DFII_CONTROL_ODT', ck(1e-07)), ('Release reset', 0, 0, cmds['UNRESET'], ck(0.002)), ('Bring CKE high', 0, 0, cmds['CKE'], ck(2e-06)), *[cmd_mr(ma) for ma in sorted(mr.keys())], ('ZQ Calibration start', MPC.ZQC_START, SpecialCmd.MPC, 'DFII_COMMAND_WE|DFII_COMMAND_CS', ck(1e-06)), ('ZQ Calibration latch', MPC.ZQC_LATCH, SpecialCmd.MPC, 'DFII_COMMAND_WE|DFII_COMMAND_CS', max(8, ck(3e-08)))]
return (init_sequence, mr) |
def concat_pooling_forward(pooling: Model[(Ragged, Floats2d)], X: List[Ragged], is_train: bool):
xp = pooling.ops.xp
datas = []
lens = []
doc_lens = []
for X_doc_data in X:
datas.append(X_doc_data.dataXd)
lens.append(X_doc_data.lengths)
doc_lens.append(len(X_doc_data.lengths))
X_flat = Ragged(xp.concatenate(datas, axis=0), xp.concatenate(lens, axis=0))
(Y_pooled, pooling_backprop) = pooling(X_flat, is_train)
Y = xp.split(Y_pooled, numpy.cumsum(doc_lens)[:(- 1)])
def backprop(dY):
dY_pooled_flat = xp.concatenate(dY)
dY_flat = pooling_backprop(dY_pooled_flat).dataXd
dY = []
for X_doc_data in X:
doc_unpooled_len = X_doc_data.dataXd.shape[0]
dY.append(Ragged(dY_flat[:doc_unpooled_len], X_doc_data.lengths))
dY_flat = dY_flat[doc_unpooled_len:]
return dY
return (Y, backprop) |
def __start_commoncrawl_extractor(warc_path, callback_on_article_extracted=None, callback_on_warc_completed=None, valid_hosts=None, start_date=None, end_date=None, strict_date=True, reuse_previously_downloaded_files=True, local_download_dir_warc=None, continue_after_error=True, show_download_progress=False, log_level=logging.ERROR, delete_warc_after_extraction=True, continue_process=True, log_pathname_fully_extracted_warcs=None, extractor_cls=CommonCrawlExtractor, fetch_images=False):
commoncrawl_extractor = extractor_cls()
commoncrawl_extractor.extract_from_commoncrawl(warc_path, callback_on_article_extracted, callback_on_warc_completed=callback_on_warc_completed, valid_hosts=valid_hosts, start_date=start_date, end_date=end_date, strict_date=strict_date, reuse_previously_downloaded_files=reuse_previously_downloaded_files, local_download_dir_warc=local_download_dir_warc, continue_after_error=continue_after_error, show_download_progress=show_download_progress, log_level=log_level, delete_warc_after_extraction=delete_warc_after_extraction, log_pathname_fully_extracted_warcs=__log_pathname_fully_extracted_warcs, fetch_images=fetch_images) |
def main():
with open('../config.yaml') as f:
config_dict = yaml.full_load(f)
config = dm.Config(**config_dict)
ctx = zmq.Context()
sock_sender = ctx.socket(zmq.PUSH)
sock_sender.bind(config.zmq_input_address)
sock_receiver = ctx.socket(zmq.PULL)
sock_receiver.connect(config.zmq_output_address)
try:
db = plyvel.DB(config.db_file, create_if_missing=True)
except IOError:
raise RuntimeError('Failed to open database')
uid_generator = uuid4_string_generator()
responses = []
for _ in range(10):
uid = next(uid_generator)
batch_mapping = BatchMapping(batch_uid=uid, request_object_uids=['request-test-1', 'request-test-2'], source_ids=['source-id-test-1', 'source-id-test-2'])
request_info1 = dm.RequestInfo(input=np.array([1, 2, 3, 4]), parameters={})
request_info2 = dm.RequestInfo(input=np.array([5, 6, 7, 8]), parameters={})
response_info1 = dm.ResponseInfo(output=np.array([1, 2, 3, 4]), picture=np.array([5, 6, 7, 8]), parameters={})
response_info2 = dm.ResponseInfo(output=np.array([1, 2, 3, 4]), picture=np.array([5, 6, 7, 8]), parameters={})
mini_batch1 = MiniResponseBatch(responses_info=[response_info1])
mini_batch2 = MiniResponseBatch(responses_info=[response_info2])
responses += [ResponseBatch(uid=uid, requests_info=[request_info1, request_info2], model=stateful_model, status=Status.CREATED, mini_batches=[mini_batch1, mini_batch2])]
db.put(*batch_mapping.to_key_value())
db.close()
print(responses)
for response in responses:
sock_sender.send_pyobj(response)
while True:
response_object = sock_receiver.recv_pyobj()
print(response_object) |
class ParserReflect():
def __init__(self, pdict, log=None):
self.pdict = pdict
self.start = None
self.error_func = None
self.tokens = None
self.modules = set()
self.grammar = []
self.error = False
if (log is None):
self.log = PlyLogger(sys.stderr)
else:
self.log = log
def get_all(self):
self.get_start()
self.get_error_func()
self.get_tokens()
self.get_precedence()
self.get_pfunctions()
def validate_all(self):
self.validate_start()
self.validate_error_func()
self.validate_tokens()
self.validate_precedence()
self.validate_pfunctions()
self.validate_modules()
return self.error
def signature(self):
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
sig = md5()
if self.start:
sig.update(self.start.encode('latin-1'))
if self.prec:
sig.update(''.join([''.join(p) for p in self.prec]).encode('latin-1'))
if self.tokens:
sig.update(' '.join(self.tokens).encode('latin-1'))
for f in self.pfuncs:
if f[3]:
sig.update(f[3].encode('latin-1'))
except (TypeError, ValueError):
pass
digest = base64.b16encode(sig.digest())
if (sys.version_info[0] >= 3):
digest = digest.decode('latin-1')
return digest
def validate_modules(self):
fre = re.compile('\\s*def\\s+(p_[a-zA-Z_0-9]*)\\(')
for module in self.modules:
try:
(lines, linen) = inspect.getsourcelines(module)
except OSError:
continue
counthash = {}
for (linen, line) in enumerate(lines):
linen += 1
m = fre.match(line)
if m:
name = m.group(1)
prev = counthash.get(name)
if (not prev):
counthash[name] = linen
else:
filename = inspect.getsourcefile(module)
self.log.warning('%s:%d: Function %s redefined. Previously defined on line %d', filename, linen, name, prev)
def get_start(self):
self.start = self.pdict.get('start')
def validate_start(self):
if (self.start is not None):
if (not isinstance(self.start, string_types)):
self.log.error("'start' must be a string")
def get_error_func(self):
self.error_func = self.pdict.get('p_error')
def validate_error_func(self):
if self.error_func:
if isinstance(self.error_func, types.FunctionType):
ismethod = 0
elif isinstance(self.error_func, types.MethodType):
ismethod = 1
else:
self.log.error("'p_error' defined, but is not a function or method")
self.error = True
return
eline = self.error_func.__code__.co_firstlineno
efile = self.error_func.__code__.co_filename
module = inspect.getmodule(self.error_func)
self.modules.add(module)
argcount = (self.error_func.__code__.co_argcount - ismethod)
if (argcount != 1):
self.log.error('%s:%d: p_error() requires 1 argument', efile, eline)
self.error = True
def get_tokens(self):
tokens = self.pdict.get('tokens')
if (not tokens):
self.log.error('No token list is defined')
self.error = True
return
if (not isinstance(tokens, (list, tuple))):
self.log.error('tokens must be a list or tuple')
self.error = True
return
if (not tokens):
self.log.error('tokens is empty')
self.error = True
return
self.tokens = tokens
def validate_tokens(self):
if ('error' in self.tokens):
self.log.error("Illegal token name 'error'. Is a reserved word")
self.error = True
return
terminals = set()
for n in self.tokens:
if (n in terminals):
self.log.warning('Token %r multiply defined', n)
terminals.add(n)
def get_precedence(self):
self.prec = self.pdict.get('precedence')
def validate_precedence(self):
preclist = []
if self.prec:
if (not isinstance(self.prec, (list, tuple))):
self.log.error('precedence must be a list or tuple')
self.error = True
return
for (level, p) in enumerate(self.prec):
if (not isinstance(p, (list, tuple))):
self.log.error('Bad precedence table')
self.error = True
return
if (len(p) < 2):
self.log.error('Malformed precedence entry %s. Must be (assoc, term, ..., term)', p)
self.error = True
return
assoc = p[0]
if (not isinstance(assoc, string_types)):
self.log.error('precedence associativity must be a string')
self.error = True
return
for term in p[1:]:
if (not isinstance(term, string_types)):
self.log.error('precedence items must be strings')
self.error = True
return
preclist.append((term, assoc, (level + 1)))
self.preclist = preclist
def get_pfunctions(self):
p_functions = []
for (name, item) in self.pdict.items():
if ((not name.startswith('p_')) or (name == 'p_error')):
continue
if isinstance(item, (types.FunctionType, types.MethodType)):
line = getattr(item, 'co_firstlineno', item.__code__.co_firstlineno)
module = inspect.getmodule(item)
p_functions.append((line, module, name, item.__doc__))
p_functions.sort(key=(lambda p_function: (p_function[0], str(p_function[1]), p_function[2], p_function[3])))
self.pfuncs = p_functions
def validate_pfunctions(self):
grammar = []
if (len(self.pfuncs) == 0):
self.log.error('no rules of the form p_rulename are defined')
self.error = True
return
for (line, module, name, doc) in self.pfuncs:
file = inspect.getsourcefile(module)
func = self.pdict[name]
if isinstance(func, types.MethodType):
reqargs = 2
else:
reqargs = 1
if (func.__code__.co_argcount > reqargs):
self.log.error('%s:%d: Rule %r has too many arguments', file, line, func.__name__)
self.error = True
elif (func.__code__.co_argcount < reqargs):
self.log.error('%s:%d: Rule %r requires an argument', file, line, func.__name__)
self.error = True
elif (not func.__doc__):
self.log.warning('%s:%d: No documentation string specified in function %r (ignored)', file, line, func.__name__)
else:
try:
parsed_g = parse_grammar(doc, file, line)
for g in parsed_g:
grammar.append((name, g))
except SyntaxError as e:
self.log.error(str(e))
self.error = True
self.modules.add(module)
for (n, v) in self.pdict.items():
if (n.startswith('p_') and isinstance(v, (types.FunctionType, types.MethodType))):
continue
if n.startswith('t_'):
continue
if (n.startswith('p_') and (n != 'p_error')):
self.log.warning('%r not defined as a function', n)
if ((isinstance(v, types.FunctionType) and (v.__code__.co_argcount == 1)) or (isinstance(v, types.MethodType) and (v.__func__.__code__.co_argcount == 2))):
if v.__doc__:
try:
doc = v.__doc__.split(' ')
if (doc[1] == ':'):
self.log.warning('%s:%d: Possible grammar rule %r defined without p_ prefix', v.__code__.co_filename, v.__code__.co_firstlineno, n)
except IndexError:
pass
self.grammar = grammar |
_session
def single_file_update(session, product_file, directory, product):
filename = f"{product_file['identifier']}.nc"
if session.query(db.File).filter((db.File.filename == filename)).first():
logger.warning('File %s already processed', filename)
return
logger.info('Downloading file %s', filename)
sentinel5dl.download((product_file,), directory)
filepath = os.path.join(directory, filename)
preprocess.preprocess_file(filepath, product['table'], product['product'])
logger.info('Removing %s', filepath)
os.remove(filepath) |
def use_direct_and_iddr(p, luts, connects):
p['mux_config'] = random.choice(('direct', 'idelay', 'none'))
p['iddr_mux_config'] = random.choice(('direct', 'idelay', 'none'))
if (p['iddr_mux_config'] != 'none'):
p['INIT_Q1'] = random.randint(0, 1)
p['INIT_Q2'] = random.randint(0, 1)
p['IS_C_INVERTED'] = random.randint(0, 1)
p['IS_D_INVERTED'] = random.randint(0, 1)
p['SRTYPE'] = verilog.quote(random.choice(('SYNC', 'ASYNC')))
p['DDR_CLK_EDGE'] = verilog.quote(random.choice(('OPPOSITE_EDGE', 'SAME_EDGE', 'SAME_EDGE_PIPELINED')))
print('\n (* KEEP, DONT_TOUCH, LOC = "{ilogic_loc}" *)\n IDDR #(\n .IS_D_INVERTED({IS_D_INVERTED}),\n .IS_C_INVERTED({IS_C_INVERTED}),\n .INIT_Q1({INIT_Q1}),\n .INIT_Q2({INIT_Q2}),\n .SRTYPE({SRTYPE}),\n .DDR_CLK_EDGE({DDR_CLK_EDGE})\n ) iddr_{site} (\n .C({cnet}),\n .D(iddr_d_{site}),\n .Q1({q1}),\n .Q2({q2})\n );\n '.format(cnet=luts.get_next_output_net(), q1=luts.get_next_input_net(), q2=luts.get_next_input_net(), **p), file=connects)
if ((p['iddr_mux_config'] == 'idelay') or (p['mux_config'] == 'idelay') or (p['iddr_mux_config'] == 'tristate_feedback')):
print('\n wire idelay_{site};\n\n (* KEEP, DONT_TOUCH, LOC = "{idelay_loc}" *)\n IDELAYE2 #(\n ) idelay_site_{site} (\n .IDATAIN({iwire}),\n .DATAOUT(idelay_{site})\n );'.format(**p), file=connects)
print('\n assign {owire} = {onet};\n assign {twire} = {tnet};\n '.format(onet=luts.get_next_output_net(), tnet=luts.get_next_output_net(), **p), file=connects)
if (p['iddr_mux_config'] == 'direct'):
print('\n assign iddr_d_{site} = {iwire};'.format(**p), file=connects)
elif (p['iddr_mux_config'] == 'idelay'):
print('\n assign iddr_d_{site} = idelay_{site};'.format(**p), file=connects)
elif (p['iddr_mux_config'] == 'tristate_feedback'):
print('\n assign iddr_d_{site} = tfb_{site} ? ofb_{site} : idelay_{site};'.format(**p), file=connects)
elif (p['iddr_mux_config'] == 'none'):
pass
else:
assert False, p['mux_config']
if (p['mux_config'] == 'direct'):
print('\n assign {net} = {iwire};'.format(net=luts.get_next_input_net(), **p), file=connects)
elif (p['mux_config'] == 'idelay'):
print('\n assign {net} = idelay_{site};'.format(net=luts.get_next_input_net(), **p), file=connects)
elif (p['mux_config'] == 'none'):
pass
else:
assert False, p['mux_config'] |
class CfgNode(_CfgNode):
def cast_from_other_class(cls, other_cfg):
new_cfg = cls(other_cfg)
for (k, v) in other_cfg.__dict__.items():
new_cfg.__dict__[k] = v
return new_cfg
def merge_from_file(self, cfg_filename: str, *args, **kwargs):
cfg_filename = reroute_config_path(cfg_filename)
with reroute_load_yaml_with_base():
res = super().merge_from_file(cfg_filename, *args, **kwargs)
self._run_custom_processing(is_dump=False)
return res
def merge_from_list(self, cfg_list: List[str]):
override_cfg = _opts_to_dict(cfg_list)
res = super().merge_from_other_cfg(CfgNode(override_cfg))
self._run_custom_processing(is_dump=False)
return res
def dump(self, *args, **kwargs):
cfg = copy.deepcopy(self)
cfg._run_custom_processing(is_dump=True)
return super(CfgNode, cfg).dump(*args, **kwargs)
def load_yaml_with_base(filename: str, *args, **kwargs):
filename = reroute_config_path(filename)
with reroute_load_yaml_with_base():
return _CfgNode.load_yaml_with_base(filename, *args, **kwargs)
def __hash__(self):
return hash(self.dump())
def _run_custom_processing(self, is_dump=False):
frozen = self.is_frozen()
self.defrost()
for (name, process_func) in CONFIG_CUSTOM_PARSE_REGISTRY:
logger.info(f'Apply config processing: {name}, is_dump={is_dump}')
process_func(self, is_dump)
if frozen:
self.freeze()
def get_default_cfg(self):
return resolve_default_config(self) |
class HTTPError(Exception):
def __init__(self, code, reason=None, debug=None, headers={}):
self.answer =
if (reason is None):
message = self.answer
else:
message = ('%s (%s)' % (self.answer, reason))
Exception.__init__(self, message)
self.code = code
self.reason = reason
self.debug = debug
self.headers = headers |
class OptionSeriesLineSonificationDefaultinstrumentoptionsMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class List(KqlNode):
__slots__ = ('items',)
precedence = (Value.precedence + 1)
operator = ''
template = Template('$items')
def __init__(self, items):
self.items = items
KqlNode.__init__(self)
def delims(self):
return {'items': ' {} '.format(self.operator)}
def __eq__(self, other):
from .optimizer import Optimizer
from functools import cmp_to_key
if (type(self) == type(other)):
a = list(self.items)
b = list(other.items)
a.sort(key=cmp_to_key(Optimizer.sort_key))
b.sort(key=cmp_to_key(Optimizer.sort_key))
return (a == b)
return False |
def extractQuthsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
('calling_file, calling_module', [param('tests/test_apps/sweep_complex_defaults/my_app.py', None, id='file_path'), param(None, 'tests.test_apps.sweep_complex_defaults.my_app', id='pkg_path')])
def test_sweep_complex_defaults(hydra_restore_singletons: Any, hydra_sweep_runner: TSweepRunner, calling_file: str, calling_module: str) -> None:
with hydra_sweep_runner(calling_file=calling_file, calling_module=calling_module, config_path='conf', config_name='config.yaml', task_function=None, overrides=['optimizer=adam,nesterov']) as sweep:
assert ((sweep.returns is not None) and (len(sweep.returns[0]) == 2))
assert (sweep.returns[0][0].overrides == ['optimizer=adam'])
assert (sweep.returns[0][1].overrides == ['optimizer=nesterov']) |
def update_awards(award_tuple: Optional[tuple]=None) -> int:
if award_tuple:
values = [award_tuple, award_tuple, award_tuple]
predicate = 'WHERE tn.award_id IN %s'
else:
values = None
predicate = ''
return execute_database_statement(general_award_update_sql_string.format(predicate=predicate), values) |
class CssDivModalContent(CssStyle.Style):
_attrs = {'margin': '10%', 'padding': '5px 5px 5px 5px', 'border': '1px solid #888', 'width': '75%', 'box-shadow': '0 19px 38px rgba(0, 0, 0, 0.12), 0 15px 12px rgba(0, 0, 0, 0.22)', 'display': 'inline-flex', 'flex-direction': 'column'}
def customize(self):
self.css({'background-color': self.page.theme.greys[0]})
self.animation('epyk_modal_animatetop', {'0%': {'top': '-300px', 'opacity': '0'}, '100%': {'top': '0px', 'opacity': '1'}}, 0.7, iteration=1) |
.django_db
def test_federal_account_list_pagination(client, agency_account_data, helpers):
query_params = f'?fiscal_year={helpers.get_mocked_current_fiscal_year()}&limit=2&page=1'
resp = client.get(url.format(code='007', query_params=query_params))
expected_result = {'fiscal_year': helpers.get_mocked_current_fiscal_year(), 'toptier_code': '007', 'messages': [], 'page_metadata': {'hasNext': True, 'hasPrevious': False, 'next': 2, 'page': 1, 'previous': None, 'limit': 2, 'total': 3}, 'results': [{'gross_outlay_amount': .0, 'name': 'FA 1', 'code': '001-0000', 'obligated_amount': 111.0, 'children': [{'gross_outlay_amount': .0, 'name': 'TA 1', 'code': '001-X-0000-000', 'obligated_amount': 111.0}]}, {'gross_outlay_amount': 100000.0, 'name': 'FA 3', 'code': '003-0000', 'obligated_amount': 100.0, 'children': [{'gross_outlay_amount': 100000.0, 'name': 'TA 6', 'code': '003-2017/2018-0000-000', 'obligated_amount': 100.0}]}]}
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json() == expected_result)
query_params = f'?fiscal_year={helpers.get_mocked_current_fiscal_year()}&limit=2&page=2'
resp = client.get(url.format(code='007', query_params=query_params))
expected_result = {'fiscal_year': helpers.get_mocked_current_fiscal_year(), 'toptier_code': '007', 'messages': [], 'page_metadata': {'hasNext': False, 'hasPrevious': True, 'next': None, 'page': 2, 'previous': 1, 'total': 3, 'limit': 2}, 'results': [{'gross_outlay_amount': 1000000.0, 'name': 'FA 2', 'code': '002-0000', 'obligated_amount': 10.0, 'children': [{'gross_outlay_amount': 1000000.0, 'name': 'TA 5', 'code': '002-2008/2009-0000-000', 'obligated_amount': 10.0}]}]}
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json() == expected_result) |
class MDHKinematicChain(KinematicChain):
_links = attr.ib(type=Sequence[MDHLink])
def __attrs_post_init__(self) -> None:
self._links = _validate_links(self._links)
def from_parameters(cls: Any, parameters: npt.NDArray[np.float64]) -> Any:
kc = cls(parameters)
return kc
def matrix(self) -> npt.NDArray[np.float64]:
return np.array([l.vector for l in self._links])
def matrix(self, value: npt.NDArray[np.float64]) -> None:
for (i, v) in enumerate(value):
self.links[i].vector = v
def links(self) -> Sequence[MDHLink]:
x = self._links
return x
def links(self, value: Union[(Sequence[MDHLink], npt.NDArray[np.float64])]) -> None:
self._links = _validate_links(value)
def __len__(self) -> int:
return len(self._links)
def num_parameters(self) -> int:
return (len(self) * MDHLink._size)
def transforms(self, q: Optional[npt.NDArray[np.float64]]=None) -> Sequence[npt.NDArray[np.float64]]:
q = (np.zeros(len(self)) if (q is None) else q)
transforms = [link.transform(p) for (link, p) in zip(self._links, q)]
return transforms
def vector(self) -> npt.NDArray[np.float64]:
return self.matrix.ravel()
def vector(self, value: npt.NDArray[np.float64]) -> None:
value = np.array(value).reshape(((- 1), MDHLink._size))
self.matrix = value |
class BiometricWindow(QMainWindow):
def __init__(self):
super().__init__()
self.reg_exp_for_ip = '((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(?=\\s*netmask)'
self.init_ui()
def closeEvent(self, event):
can_exit = (not hasattr(self, 'p'))
if can_exit:
event.accept()
else:
create_message_box(text='Window cannot be closed when \nservice is running!', title='Message', width=200)
event.ignore()
def init_ui(self):
self.counter = 0
self.setup_window()
self.setup_textboxes_and_label()
self.center()
self.show()
def setup_window(self):
self.setFixedSize(470, 550)
self.setWindowTitle('ERPNext Biometric Service')
def setup_textboxes_and_label(self):
self.create_label('API Secret', 'api_secret', 20, 0, 200, 30)
self.create_field('textbox_erpnext_api_secret', 20, 30, 200, 30)
self.create_label('API Key', 'api_key', 20, 60, 200, 30)
self.create_field('textbox_erpnext_api_key', 20, 90, 200, 30)
self.create_label('ERPNext URL', 'erpnext_url', 20, 120, 200, 30)
self.create_field('textbox_erpnext_url', 20, 150, 200, 30)
self.create_label('Pull Frequency (in minutes)', 'pull_frequency', 250, 0, 200, 30)
self.create_field('textbox_pull_frequency', 250, 30, 200, 30)
self.create_label('Import Start Date', 'import_start_date', 250, 60, 200, 30)
self.create_field('textbox_import_start_date', 250, 90, 200, 30)
self.validate_data('^\\d{1,2}/\\d{1,2}/\\d{4}$', 'textbox_import_start_date')
self.create_separator(210, 470)
self.create_button('+', 'add', 390, 230, 35, 30, self.add_devices_fields)
self.create_button('-', 'remove', 420, 230, 35, 30, self.remove_devices_fields)
self.create_label('Device ID', 'device_id', 20, 260, 0, 30)
self.create_label('Device IP', 'device_ip', 170, 260, 0, 30)
self.create_label('Shift', 'shift', 320, 260, 0, 0)
self.create_field('device_id_0', 20, 290, 145, 30)
self.create_field('device_ip_0', 165, 290, 145, 30)
self.validate_data(self.reg_exp_for_ip, 'device_ip_0')
self.create_field('shift_0', 310, 290, 145, 30)
self.create_button('Set Configuration', 'set_conf', 20, 500, 130, 30, self.setup_local_config)
self.create_button('Start Service', 'start_or_stop_service', 320, 500, 130, 30, self.integrate_biometric, enable=False)
self.create_button('Running Status', 'running_status', 170, 500, 130, 30, self.get_running_status, enable=False)
self.set_default_value_or_placeholder_of_field()
self.onlyInt = QIntValidator(10, 30)
self.textbox_pull_frequency.setValidator(self.onlyInt)
def set_default_value_or_placeholder_of_field(self):
if os.path.exists('local_config.py'):
import local_config as config
self.textbox_erpnext_api_secret.setText(config.ERPNEXT_API_SECRET)
self.textbox_erpnext_api_key.setText(config.ERPNEXT_API_KEY)
self.textbox_erpnext_url.setText(config.ERPNEXT_URL)
self.textbox_pull_frequency.setText(str(config.PULL_FREQUENCY))
if len(config.devices):
self.device_id_0.setText(config.devices[0]['device_id'])
self.device_ip_0.setText(config.devices[0]['ip'])
self.shift_0.setText(config.shift_type_device_mapping[0]['shift_type_name'])
if (len(config.devices) > 1):
for _ in range(self.counter, (len(config.devices) - 1)):
self.add_devices_fields()
device = getattr(self, ('device_id_' + str(self.counter)))
ip = getattr(self, ('device_ip_' + str(self.counter)))
shift = getattr(self, ('shift_' + str(self.counter)))
device.setText(config.devices[self.counter]['device_id'])
ip.setText(config.devices[self.counter]['ip'])
shift.setText(config.shift_type_device_mapping[self.counter]['shift_type_name'])
else:
self.textbox_erpnext_api_secret.setPlaceholderText('c70ee57c7b3124c')
self.textbox_erpnext_api_key.setPlaceholderText('fb37y8fd4uh8ac')
self.textbox_erpnext_url.setPlaceholderText('example.erpnext.com')
self.textbox_pull_frequency.setPlaceholderText('60')
self.textbox_import_start_date.setPlaceholderText('DD/MM/YYYY')
def create_label(self, label_text, label_name, x, y, height, width):
setattr(self, label_name, QLabel(self))
label = getattr(self, label_name)
label.move(x, y)
label.setText(label_text)
if (height and width):
label.resize(height, width)
label.show()
def create_field(self, field_name, x, y, height, width):
setattr(self, field_name, QLineEdit(self))
field = getattr(self, field_name)
field.move(x, y)
field.resize(height, width)
field.show()
def create_separator(self, y, width):
setattr(self, 'separator', QLineEdit(self))
field = getattr(self, 'separator')
field.move(0, y)
field.resize(width, 5)
field.setEnabled(False)
field.show()
def create_button(self, button_label, button_name, x, y, height, width, callback_function, enable=True):
setattr(self, button_name, QPushButton(button_label, self))
button = getattr(self, button_name)
button.move(x, y)
button.resize(height, width)
button.clicked.connect(callback_function)
button.setEnabled(enable)
def center(self):
frame = self.frameGeometry()
screen = QApplication.desktop().screenNumber(QApplication.desktop().cursor().pos())
centerPoint = QApplication.desktop().screenGeometry(screen).center()
frame.moveCenter(centerPoint)
self.move(frame.topLeft())
def add_devices_fields(self):
if (self.counter < 5):
self.counter += 1
self.create_field(('device_id_' + str(self.counter)), 20, (290 + (self.counter * 30)), 145, 30)
self.create_field(('device_ip_' + str(self.counter)), 165, (290 + (self.counter * 30)), 145, 30)
self.validate_data(self.reg_exp_for_ip, ('device_ip_' + str(self.counter)))
self.create_field(('shift_' + str(self.counter)), 310, (290 + (self.counter * 30)), 145, 30)
def validate_data(self, reg_exp, field_name):
field = getattr(self, field_name)
reg_ex = QRegExp(reg_exp)
input_validator = QRegExpValidator(reg_ex, field)
field.setValidator(input_validator)
def remove_devices_fields(self):
if (self.counter > 0):
b = getattr(self, ('shift_' + str(self.counter)))
b.deleteLater()
b = getattr(self, ('device_id_' + str(self.counter)))
b.deleteLater()
b = getattr(self, ('device_ip_' + str(self.counter)))
b.deleteLater()
self.counter -= 1
def integrate_biometric(self):
button = getattr(self, 'start_or_stop_service')
if (not hasattr(self, 'p')):
print('Starting Service...')
command = shlex.split('python -c "from erpnext_sync import infinite_loop; infinite_loop()"')
self.p = subprocess.Popen(command, stdout=subprocess.PIPE)
print('Process running at {}'.format(self.p.pid))
button.setText('Stop Service')
create_message_box('Service status', 'Service has been started')
self.create_label(str(datetime.datetime.now()), 'service_start_time', 20, 60, 200, 30)
self.service_start_time.setHidden(True)
getattr(self, 'running_status').setEnabled(True)
else:
print('Stopping Service...')
self.p.kill()
del self.p
button.setText('Start Service')
create_message_box('Service status', 'Service has been stoped')
getattr(self, 'running_status').setEnabled(False)
def setup_local_config(self):
bio_config = self.get_local_config()
print('Setting Local Configuration...')
if (not bio_config):
print('Local Configuration not updated...')
return 0
if os.path.exists('local_config.py'):
os.remove('local_config.py')
with open('local_config.py', 'w+') as f:
f.write(bio_config)
print('Local Configuration Updated.')
create_message_box('Message', 'Configuration Updated!\nClick on Start Service.')
getattr(self, 'start_or_stop_service').setEnabled(True)
def get_device_details(self):
device = {}
devices = []
shifts = []
for idx in range(0, (self.counter + 1)):
shift = getattr(self, ('shift_' + str(idx))).text()
device_id = getattr(self, ('device_id_' + str(idx))).text()
devices.append({'device_id': device_id, 'ip': getattr(self, ('device_ip_' + str(idx))).text(), 'punch_direction': '', 'clear_from_device_on_fetch': ''})
if (shift in device):
device[shift].append(device_id)
else:
device[shift] = [device_id]
for shift_type_name in device.keys():
shifts.append({'shift_type_name': shift_type_name, 'related_device_id': device[shift_type_name]})
return (devices, shifts)
def get_local_config(self):
if (not validate_fields(self)):
return 0
string = self.textbox_import_start_date.text()
formated_date = ''.join([ele for ele in reversed(string.split('/'))])
(devices, shifts) = self.get_device_details()
return config_template.format(self.textbox_erpnext_api_key.text(), self.textbox_erpnext_api_secret.text(), self.textbox_erpnext_url.text(), self.textbox_pull_frequency.text(), formated_date, json.dumps(devices), json.dumps(shifts))
def get_running_status(self):
running_status = []
with open(('/'.join([config.LOGS_DIRECTORY]) + '/logs.log'), 'r') as f:
index = 0
for (idx, line) in enumerate(f, 1):
logdate = convert_into_date(line.split(',')[0], '%Y-%m-%d %H:%M:%S')
if (logdate and (logdate >= convert_into_date(self.service_start_time.text().split('.')[0], '%Y-%m-%d %H:%M:%S'))):
index = idx
break
if index:
running_status.extend(read_file_contents('logs', index))
with open(('/'.join([config.LOGS_DIRECTORY]) + '/error.log'), 'r') as fread:
error_index = 0
for (error_idx, error_line) in enumerate(fread, 1):
start_date = convert_into_date(self.service_start_time.text().split('.')[0], '%Y-%m-%d %H:%M:%S')
if (start_date and (start_date.strftime('%Y-%m-%d') in error_line)):
error_logdate = convert_into_date(error_line.split(',')[0], '%Y-%m-%d %H:%M:%S')
if (error_logdate and (error_logdate >= start_date)):
error_index = error_idx
break
if error_index:
running_status.extend(read_file_contents('error', error_index))
if running_status:
create_message_box('Running status', ''.join(running_status))
else:
create_message_box('Running status', 'Process not yet started') |
def set_icc_max(config):
for plane in CURRENT_PLANES:
try:
write_current_amp = config.getfloat('ICCMAX.{:s}'.format(power['source']), plane, fallback=config.getfloat('ICCMAX', plane, fallback=(- 1.0)))
if (write_current_amp > 0):
write_value = calc_icc_max_msr(plane, write_current_amp)
writemsr('MSR_OC_MAILBOX', write_value)
if args.debug:
write_value &= 1023
read_value = get_icc_max(plane)[plane]
read_current_A = calc_icc_max_amp(read_value)
match = (OK if (write_value == read_value) else ERR)
log('[D] IccMax plane {:s} - write {:.2f} A ({:#x}) - read {:.2f} A ({:#x}) - match {}'.format(plane, write_current_amp, write_value, read_current_A, read_value, match))
except (configparser.NoSectionError, configparser.NoOptionError):
pass |
class SnortLib(app_manager.RyuApp):
def __init__(self):
super(SnortLib, self).__init__()
self.name = 'snortlib'
self.config = {'unixsock': True}
self._set_logger()
self.sock = None
self.nwsock = None
def set_config(self, config):
assert isinstance(config, dict)
self.config = config
def start_socket_server(self):
if (not self.config.get('unixsock')):
if (self.config.get('port') is None):
self.config['port'] = 51234
self._start_recv_nw_sock(self.config.get('port'))
else:
self._start_recv()
self.logger.info(self.config)
def _recv_loop(self):
self.logger.info('Unix socket start listening...')
while True:
data = self.sock.recv(BUFSIZE)
msg = alert.AlertPkt.parser(data)
if msg:
self.send_event_to_observers(EventAlert(msg))
def _start_recv(self):
if os.path.exists(SOCKFILE):
os.unlink(SOCKFILE)
self.sock = hub.socket.socket(hub.socket.AF_UNIX, hub.socket.SOCK_DGRAM)
self.sock.bind(SOCKFILE)
hub.spawn(self._recv_loop)
def _start_recv_nw_sock(self, port):
self.nwsock = hub.socket.socket(hub.socket.AF_INET, hub.socket.SOCK_STREAM)
self.nwsock.setsockopt(hub.socket.SOL_SOCKET, hub.socket.SO_REUSEADDR, 1)
self.nwsock.bind(('0.0.0.0', port))
self.nwsock.listen(5)
hub.spawn(self._accept_loop_nw_sock)
def _accept_loop_nw_sock(self):
self.logger.info('Network socket server start listening...')
while True:
(conn, addr) = self.nwsock.accept()
self.logger.info('Connected with %s', addr[0])
hub.spawn(self._recv_loop_nw_sock, conn, addr)
def _recv_loop_nw_sock(self, conn, addr):
buf = six.binary_type()
while True:
ret = conn.recv(BUFSIZE)
if (len(ret) == 0):
self.logger.info('Disconnected from %s', addr[0])
break
buf += ret
while (len(buf) >= BUFSIZE):
data = buf[:BUFSIZE]
msg = alert.AlertPkt.parser(data)
if msg:
self.send_event_to_observers(EventAlert(msg))
buf = buf[BUFSIZE:]
def _set_logger(self):
self.logger.propagate = False
hdl = logging.StreamHandler()
fmt_str = '[snort][%(levelname)s] %(message)s'
hdl.setFormatter(logging.Formatter(fmt_str))
self.logger.addHandler(hdl) |
class XarFile():
def __init__(self, filename):
self.filename = filename
(root, self.extension) = os.path.splitext(self.filename)
self.alias = os.path.basename(root)
self._read_header()
version_suffix = ('-%d' % self.version)
if self.alias.endswith(version_suffix):
self.alias = self.alias[:(- len(version_suffix))]
def _read_header(self):
self.attributes = {}
fh = open(self.filename, 'rb')
try:
header = fh.read(4096).split('\n')
for line in header:
if (line == '#xar_stop'):
break
if (line[0] == '#'):
continue
m = attr_re.match(line)
if m:
self.attributes[m.group(1)] = m.group(2)
finally:
fh.close()
for attr in required_attributes:
if (attr not in self.attributes):
raise ValueError(('Attribute %s missing from %s' % (attr, self.filename)))
self.version = int(self.attributes['VERSION'])
self.dependencies = []
self.optional_dependencies = []
def mount(self, xarexec):
logger.info(('Mounting %s with %s' % (self.filename, xarexec)))
proc = subprocess.Popen([xarexec.split(), '-m', self.filename], stdout=subprocess.PIPE)
(stdout, _) = proc.communicate()
if (proc.returncode != 0):
logger.fatal(('Mount of %s failed, see stderr for details' % self.filename))
return False
self.mountpoint = stdout.split('\n')[0].strip()
return True
def symlink(self, destdir):
dest = os.path.join(destdir, self.alias)
logger.info(('Symlinking %s -> %s' % (self.mountpoint, dest)))
if os.path.islink(dest):
os.unlink(dest)
os.symlink(self.mountpoint, dest) |
class FaucetTaggedOrderedSwapVidMirrorTest(FaucetTaggedTest):
CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "tagged"\n 101:\n description: "tagged"\nacls:\n 1:\n - rule:\n vlan_vid: 100\n actions:\n mirror: %(port_3)d\n force_port_vlan: 1\n output:\n - swap_vid: 101\n allow: 1\n'
CONFIG = '\n interfaces:\n %(port_1)d:\n tagged_vlans: [100]\n acl_in: 1\n %(port_2)d:\n tagged_vlans: [101]\n %(port_3)d:\n tagged_vlans: [100]\n %(port_4)d:\n tagged_vlans: [100]\n '
def test_tagged(self):
(first_host, second_host, third_host) = self.hosts_name_ordered()[:3]
def test_acl(tcpdump_host, tcpdump_filter):
tcpdump_txt = self.tcpdump_helper(tcpdump_host, tcpdump_filter, [(lambda : first_host.cmd(('arp -s %s %s' % (second_host.IP(), '01:02:03:04:05:06')))), (lambda : first_host.cmd(' '.join((self.FPINGS_ARGS_ONE, second_host.IP()))))], root_intf=True)
self.assertTrue(re.search(('%s: ICMP echo request' % second_host.IP()), tcpdump_txt))
self.assertTrue(re.search(tcpdump_filter, tcpdump_txt))
test_acl(second_host, 'vlan 101')
test_acl(third_host, 'vlan 100') |
.django_db
def test_award_type(award_data_fixture, elasticsearch_award_index):
elasticsearch_award_index.update_index()
should = {'match': {'type': 'A'}}
query = create_query(should)
client = elasticsearch_award_index.client
response = client.search(index=elasticsearch_award_index.index_name, body=query)
assert (response['hits']['total']['value'] == 1)
should = {'match': {'type': 'D'}}
query = create_query(should)
response = client.search(index=elasticsearch_award_index.index_name, body=query)
assert (response['hits']['total']['value'] == 0) |
class MyTCPHandler(socketserver.BaseRequestHandler):
def handle(self):
self.data = self.request.recv(1024).strip()
print('{} - Contact by {}'.format(time.strftime('[%Y/%m/%d %I:%M:%S]'), self.client_address[0]))
response = open('data/cnc1-response.raw', 'rb')
self.request.sendall(response.read())
print('{} - Responded to {}'.format(time.strftime('[%Y/%m/%d %I:%M:%S]'), self.client_address[0])) |
def compute_max_saturation(a: float, b: float, lms_to_rgb: Matrix, ok_coeff: list[Matrix]) -> float:
if (alg.vdot(ok_coeff[0][0], [a, b]) > 1):
(k0, k1, k2, k3, k4) = ok_coeff[0][1]
(wl, wm, ws) = lms_to_rgb[0]
elif (alg.vdot(ok_coeff[1][0], [a, b]) > 1):
(k0, k1, k2, k3, k4) = ok_coeff[1][1]
(wl, wm, ws) = lms_to_rgb[1]
else:
(k0, k1, k2, k3, k4) = ok_coeff[2][1]
(wl, wm, ws) = lms_to_rgb[2]
sat = ((((k0 + (k1 * a)) + (k2 * b)) + (k3 * (a ** 2))) + ((k4 * a) * b))
k_l = alg.vdot(OKLAB_TO_LMS3[0][1:], [a, b])
k_m = alg.vdot(OKLAB_TO_LMS3[1][1:], [a, b])
k_s = alg.vdot(OKLAB_TO_LMS3[2][1:], [a, b])
l_ = (1.0 + (sat * k_l))
m_ = (1.0 + (sat * k_m))
s_ = (1.0 + (sat * k_s))
l = (l_ ** 3)
m = (m_ ** 3)
s = (s_ ** 3)
l_ds = ((3.0 * k_l) * (l_ ** 2))
m_ds = ((3.0 * k_m) * (m_ ** 2))
s_ds = ((3.0 * k_s) * (s_ ** 2))
l_ds2 = ((6.0 * (k_l ** 2)) * l_)
m_ds2 = ((6.0 * (k_m ** 2)) * m_)
s_ds2 = ((6.0 * (k_s ** 2)) * s_)
f = (((wl * l) + (wm * m)) + (ws * s))
f1 = (((wl * l_ds) + (wm * m_ds)) + (ws * s_ds))
f2 = (((wl * l_ds2) + (wm * m_ds2)) + (ws * s_ds2))
sat = (sat - ((f * f1) / ((f1 ** 2) - ((0.5 * f) * f2))))
return sat |
(scope='function')
def privacy_notice_fr_provide_service_frontend_only(db: Session) -> Generator:
privacy_notice = PrivacyNotice.create(db=db, data={'name': 'example privacy notice us_co provide.service.operations', 'notice_key': 'example_privacy_notice_us_co_provide.service.operations', 'description': 'a sample privacy notice configuration', 'regions': [PrivacyNoticeRegion.fr], 'consent_mechanism': ConsentMechanism.opt_in, 'data_uses': ['essential.service'], 'enforcement_level': EnforcementLevel.frontend, 'displayed_in_overlay': True, 'displayed_in_privacy_center': False, 'displayed_in_api': False})
(yield privacy_notice) |
def build_neck(config):
from .db_fpn import DBFPN
from .east_fpn import EASTFPN
from .sast_fpn import SASTFPN
from .rnn import SequenceEncoder
from .pg_fpn import PGFPN
from .table_fpn import TableFPN
support_dict = ['DBFPN', 'EASTFPN', 'SASTFPN', 'SequenceEncoder', 'PGFPN', 'TableFPN']
module_name = config.pop('name')
assert (module_name in support_dict), Exception('neck only support {}'.format(support_dict))
module_class = eval(module_name)(**config)
return module_class |
class CssBigIcon(CssStyle.Style):
_attrs = {'display': 'inline-block', 'margin': '0 10px 0 10px', 'cursor': 'pointer'}
def customize(self):
self.css({'color': self.page.theme.danger.base, 'font-size': self.page.body.style.globals.icon.big_size()})
self.hover.css({'color': self.page.theme.danger.base}) |
def check_pde_args(F, J, Jp):
if (not isinstance(F, (ufl.BaseForm, slate.slate.TensorBase))):
raise TypeError(("Provided residual is a '%s', not a BaseForm or Slate Tensor" % type(F).__name__))
if (len(F.arguments()) != 1):
raise ValueError('Provided residual is not a linear form')
if (not isinstance(J, (ufl.BaseForm, slate.slate.TensorBase))):
raise TypeError(("Provided Jacobian is a '%s', not a BaseForm or Slate Tensor" % type(J).__name__))
if (len(J.arguments()) != 2):
raise ValueError('Provided Jacobian is not a bilinear form')
if ((Jp is not None) and (not isinstance(Jp, (ufl.BaseForm, slate.slate.TensorBase)))):
raise TypeError(("Provided preconditioner is a '%s', not a BaseForm or Slate Tensor" % type(Jp).__name__))
if ((Jp is not None) and (len(Jp.arguments()) != 2)):
raise ValueError('Provided preconditioner is not a bilinear form') |
def extractWwwPeachblossomgroveCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('World of Hidden Phoenixes' in item['tags']):
return buildReleaseMessageWithType(item, 'World of Hidden Phoenixes', vol, chp, frag=frag, postfix=postfix)
return False |
class BracketPlugin(object):
def __init__(self, plugin, loaded):
self.enabled = False
self.args = (plugin['args'] if ('args' in plugin) else {})
self.plugin = None
if ('command' in plugin):
plib = plugin['command']
try:
module = _import_module(plib, loaded)
self.plugin = getattr(module, 'plugin')()
loaded.add(plib)
self.enabled = True
except Exception:
print(('BracketHighlighter: Load Plugin Error: %s\n%s' % (plugin['command'], traceback.format_exc())))
def is_enabled(self):
return self.enabled
def run_command(self, view, name, left, right, selection):
nobracket = False
refresh_match = False
Payload.status = False
Payload.plugin = self.plugin()
setattr(Payload.plugin, 'left', left)
setattr(Payload.plugin, 'right', right)
setattr(Payload.plugin, 'view', view)
setattr(Payload.plugin, 'selection', selection)
setattr(Payload.plugin, 'nobracket', False)
setattr(Payload.plugin, 'refresh_match', False)
self.args['edit'] = None
self.args['name'] = name
Payload.args = self.args
view.run_command('bracket_plugin_run')
if Payload.status:
left = Payload.plugin.left
right = Payload.plugin.right
selection = Payload.plugin.selection
nobracket = Payload.plugin.nobracket
refresh_match = Payload.plugin.refresh_match
Payload.clear()
return (left, right, selection, nobracket, refresh_match) |
class OptionSeriesPolygonSonificationContexttracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class SimulEvalResults():
def __init__(self, path: Union[(Path, str)]) -> None:
self.path = Path(path)
scores_path = (self.path / 'scores')
if scores_path.exists():
self.is_finished = True
with open((self.path / 'scores')) as f:
self.scores = json.load(f)
else:
self.is_finished = False
self.scores = {}
def quality(self) -> float:
if self.is_finished:
if (self.scores is None):
return 0
return self.scores['Quality']['BLEU']
else:
return 0
def bleu(self) -> float:
return self.quality
def latency(self) -> Dict[(str, float)]:
if self.is_finished:
return self.scores['Latency']
else:
return {}
def average_lagging(self):
return self.latency.get('AL', 0)
def average_lagging_ca(self):
return self.latency.get('AL_CA', 0)
def average_proportion(self):
return self.latency.get('AP', 0)
def name(self):
return self.path.name |
class OptionSeriesColumnpyramidSonificationDefaultspeechoptionsMapping(Options):
def pitch(self) -> 'OptionSeriesColumnpyramidSonificationDefaultspeechoptionsMappingPitch':
return self._config_sub_data('pitch', OptionSeriesColumnpyramidSonificationDefaultspeechoptionsMappingPitch)
def playDelay(self) -> 'OptionSeriesColumnpyramidSonificationDefaultspeechoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesColumnpyramidSonificationDefaultspeechoptionsMappingPlaydelay)
def rate(self) -> 'OptionSeriesColumnpyramidSonificationDefaultspeechoptionsMappingRate':
return self._config_sub_data('rate', OptionSeriesColumnpyramidSonificationDefaultspeechoptionsMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesColumnpyramidSonificationDefaultspeechoptionsMappingTime':
return self._config_sub_data('time', OptionSeriesColumnpyramidSonificationDefaultspeechoptionsMappingTime)
def volume(self) -> 'OptionSeriesColumnpyramidSonificationDefaultspeechoptionsMappingVolume':
return self._config_sub_data('volume', OptionSeriesColumnpyramidSonificationDefaultspeechoptionsMappingVolume) |
class SingletonMeta(ImmutableMeta):
def __new__(mcls, name, bases, namespace, **kwargs):
cls = super().__new__(mcls, name, bases, namespace, **kwargs)
cls._cache = weakref.WeakValueDictionary()
return cls
def _new(cls, *args):
try:
self = cls._cache[args]
except KeyError:
self = cls._cache[args] = super()._new(*args)
return self |
_pass_through.command('pyflyte-map-execute')
_click.option('--inputs', required=True)
_click.option('--output-prefix', required=True)
_click.option('--raw-output-data-prefix', required=False)
_click.option('--max-concurrency', type=int, required=False)
_click.option('--test', is_flag=True)
_click.option('--dynamic-addl-distro', required=False)
_click.option('--dynamic-dest-dir', required=False)
_click.option('--resolver', required=True)
_click.option('--checkpoint-path', required=False)
_click.option('--prev-checkpoint', required=False)
_click.option('--experimental', is_flag=True, default=False, required=False)
_click.argument('resolver-args', type=_click.UNPROCESSED, nargs=(- 1))
def map_execute_task_cmd(inputs, output_prefix, raw_output_data_prefix, max_concurrency, test, dynamic_addl_distro, dynamic_dest_dir, resolver, resolver_args, prev_checkpoint, experimental, checkpoint_path):
logger.info(get_version_message())
(raw_output_data_prefix, checkpoint_path, prev_checkpoint) = normalize_inputs(raw_output_data_prefix, checkpoint_path, prev_checkpoint)
_execute_map_task(inputs=inputs, output_prefix=output_prefix, raw_output_data_prefix=raw_output_data_prefix, max_concurrency=max_concurrency, test=test, dynamic_addl_distro=dynamic_addl_distro, dynamic_dest_dir=dynamic_dest_dir, resolver=resolver, resolver_args=resolver_args, checkpoint_path=checkpoint_path, prev_checkpoint=prev_checkpoint, experimental=experimental) |
class OptionSeriesAreasplineData(Options):
def accessibility(self) -> 'OptionSeriesAreasplineDataAccessibility':
return self._config_sub_data('accessibility', OptionSeriesAreasplineDataAccessibility)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def colorIndex(self):
return self._config_get(None)
def colorIndex(self, num: float):
self._config(num, js_type=False)
def custom(self):
return self._config_get(None)
def custom(self, value: Any):
self._config(value, js_type=False)
def dataLabels(self) -> 'OptionSeriesAreasplineDataDatalabels':
return self._config_sub_data('dataLabels', OptionSeriesAreasplineDataDatalabels)
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def dragDrop(self) -> 'OptionSeriesAreasplineDataDragdrop':
return self._config_sub_data('dragDrop', OptionSeriesAreasplineDataDragdrop)
def drilldown(self):
return self._config_get(None)
def drilldown(self, text: str):
self._config(text, js_type=False)
def events(self) -> 'OptionSeriesAreasplineDataEvents':
return self._config_sub_data('events', OptionSeriesAreasplineDataEvents)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def labelrank(self):
return self._config_get(None)
def labelrank(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionSeriesAreasplineDataMarker':
return self._config_sub_data('marker', OptionSeriesAreasplineDataMarker)
def name(self):
return self._config_get(None)
def name(self, text: str):
self._config(text, js_type=False)
def selected(self):
return self._config_get(False)
def selected(self, flag: bool):
self._config(flag, js_type=False)
def x(self):
return self._config_get(None)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(None)
def y(self, num: float):
self._config(num, js_type=False) |
def extractNeoTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('The Man Picked up by the Gods'.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, 'The Man Picked up by the Gods', vol, chp, frag=frag, postfix=postfix)
if ('I came back but the world is still a fantasy' in item['tags']):
return buildReleaseMessageWithType(item, 'Kaettekite mo Fantasy!?', vol, chp, frag=frag, postfix=postfix)
if ('I came back but the world is still a fantasy' in item['title']):
return buildReleaseMessageWithType(item, 'Kaettekite mo Fantasy!?', vol, chp, frag=frag, postfix=postfix)
if ('Ashes and Kingdoms' in item['tags']):
return buildReleaseMessageWithType(item, 'Ashes and Kingdoms', vol, chp, frag=frag, postfix=postfix)
if ('Goblin Kingdom' in item['tags']):
return buildReleaseMessageWithType(item, 'Goblin no Oukoku', vol, chp, frag=frag, postfix=postfix)
return False |
class modules():
def __init__(self):
self._load_command = COMMANDS['modprobe']
self._unload_command = COMMANDS['rmmod']
def __repr__(self):
return ('%s' % self.__class__)
def loaded_modules(self):
mods = []
deps = {}
try:
with open('/proc/modules', 'r') as f:
for line in f:
if (not line):
break
line = line.strip()
splits = line.split()
mods.append(splits[0])
if (splits[3] != '-'):
deps[splits[0]] = splits[3].split(',')[:(- 1)]
else:
deps[splits[0]] = []
except FileNotFoundError:
pass
return (mods, deps)
def load_module(self, module):
log.debug2('%s: %s %s', self.__class__, self._load_command, module)
return runProg(self._load_command, [module])
def unload_module(self, module):
log.debug2('%s: %s %s', self.__class__, self._unload_command, module)
return runProg(self._unload_command, [module])
def get_deps(self, module, deps, ret):
if (module not in deps):
return
for mod in deps[module]:
self.get_deps(mod, deps, ret)
if (mod not in ret):
ret.append(mod)
if (module not in ret):
ret.append(module)
def get_firewall_modules(self):
mods = []
(mods2, deps) = self.loaded_modules()
self.get_deps('nf_conntrack', deps, mods)
for bad_bad_module in ['nf_conntrack_ipv4', 'nf_conntrack_ipv6']:
if (bad_bad_module in mods):
mods.remove(bad_bad_module)
mods.insert((- 1), bad_bad_module)
for mod in mods2:
if ((mod in ['ip_tables', 'ip6_tables', 'ebtables']) or mod.startswith('iptable_') or mod.startswith('ip6table_') or mod.startswith('nf_') or mod.startswith('xt_') or mod.startswith('ipt_') or mod.startswith('ip6t_')):
self.get_deps(mod, deps, mods)
return mods
def unload_firewall_modules(self):
for module in self.get_firewall_modules():
(status, ret) = self.unload_module(module)
if (status != 0):
log.debug1(("Failed to unload module '%s': %s" % (module, ret))) |
def cMedQA2():
train_data = csv.reader(open('./QA/cMedQA2-master/train_candidates.txt'))
test_data = csv.reader(open('./QA/cMedQA2-master/test_candidates.txt'))
questions_data = csv.reader(open('./QA/cMedQA2-master/question.csv'))
answers_data = csv.reader(open('./QA/cMedQA2-master/answer.csv'))
questions = dict()
questions_data.__next__()
for line in questions_data:
que_id = line[0]
content = line[1]
questions[que_id] = [content]
answers = dict()
answers_data.__next__()
for line in answers_data:
ans_id = line[0]
que_id = line[1]
content = line[2]
answers[ans_id] = content
writer = open('./collect_datasets/cMedQA2.txt', 'w')
used_set = set()
title = train_data.__next__()
for line in train_data:
que_id = line[0]
ans_id = line[1]
used_id = 'que:{},ans_id{}'.format(que_id, ans_id)
if (used_id in used_set):
continue
else:
used_set.add(used_id)
que = questions[que_id][0]
ans = answers[ans_id].replace('', '')
item = {'prompt': que, 'output': ans, 'source': 'cMedQA-2'}
item = json.dumps(item, ensure_ascii=False)
writer.write((item + '\n'))
title = test_data.__next__()
for line in test_data:
que_id = line[0]
ans_id = line[1]
label = line[(- 1)]
if (label == '0'):
continue
used_id = 'que:{},ans_id{}'.format(que_id, ans_id)
if (used_id in used_set):
continue
else:
used_set.add(used_id)
que = questions[que_id][0]
ans = answers[ans_id].replace('', '')
item = {'prompt': que, 'output': ans, 'source': 'cMedQA-2'}
item = json.dumps(item, ensure_ascii=False)
writer.write((item + '\n')) |
class OptionSeriesSankeyStatesSelectMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def get_icdar_2013_detector_dataset(cache_dir=None, skip_illegible=False):
if (cache_dir is None):
cache_dir = tools.get_default_cache_dir()
main_dir = os.path.join(cache_dir, 'icdar2013')
training_images_dir = os.path.join(main_dir, 'Challenge2_Training_Task12_Images')
training_zip_images_path = tools.download_and_verify(url=' cache_dir=main_dir, filename='Challenge2_Training_Task12_Images.zip', sha256='7a57d1699fbb92db3ad82cedaf72e1c422ddd923860d8ace8ded')
if (len(glob.glob(os.path.join(training_images_dir, '*.jpg'))) != 229):
with zipfile.ZipFile(training_zip_images_path) as zfile:
zfile.extractall(training_images_dir)
training_gt_dir = os.path.join(main_dir, 'Challenge2_Training_Task2_GT')
training_zip_gt_path = tools.download_and_verify(url=' cache_dir=main_dir, filename='Challenge2_Training_Task2_GT.zip', sha256='4cedd5b1e33dc4354058f5967221ac85dbdf91a99b30f3ab1ecdf42786a9d027')
if (len(glob.glob(os.path.join(training_gt_dir, '*.txt'))) != 229):
with zipfile.ZipFile(training_zip_gt_path) as zfile:
zfile.extractall(training_gt_dir)
dataset = []
for gt_filepath in glob.glob(os.path.join(training_gt_dir, '*.txt')):
image_id = os.path.split(gt_filepath)[1].split('_')[0]
image_path = os.path.join(training_images_dir, (image_id + '.jpg'))
lines = []
with open(gt_filepath, 'r', encoding='utf8') as f:
current_line: typing.List[typing.Tuple[(np.ndarray, str)]] = []
for raw_row in f.read().split('\n'):
if (raw_row == ''):
lines.append(current_line)
current_line = []
else:
row = raw_row.split(' ')[5:]
character = row[(- 1)][1:(- 1)]
if ((character == '') and skip_illegible):
continue
(x1, y1, x2, y2) = map(int, row[:4])
current_line.append((np.array([[x1, y1], [x2, y1], [x2, y2], [x1, y2]]), character))
lines = [line for line in lines if line]
dataset.append((image_path, lines, 1))
return dataset |
class OptionSeriesWindbarbDataMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesWindbarbDataMarkerStates':
return self._config_sub_data('states', OptionSeriesWindbarbDataMarkerStates)
def symbol(self):
return self._config_get(None)
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
class EnglishSpeechCounter(SpeechToSpeechAgent):
def __init__(self, args):
super().__init__(args)
self.wait_seconds = args.wait_seconds
self.tts_model = TTSModel()
def add_args(parser):
parser.add_argument('--wait-seconds', default=1, type=int)
def policy(self, states: Optional[AgentStates]=None):
if (states is None):
states = self.states
if (states.source_sample_rate == 0):
length_in_seconds = 0
else:
length_in_seconds = round((len(states.source) / states.source_sample_rate))
if ((not states.source_finished) and (length_in_seconds < self.wait_seconds)):
return ReadAction()
(samples, fs) = self.tts_model.synthesize(f'{length_in_seconds} mississippi')
return WriteAction(SpeechSegment(content=samples, sample_rate=fs, finished=states.source_finished), finished=states.source_finished) |
class OptionSeriesSankeyEvents(Options):
def afterAnimate(self):
return self._config_get(None)
def afterAnimate(self, value: Any):
self._config(value, js_type=False)
def checkboxClick(self):
return self._config_get(None)
def checkboxClick(self, value: Any):
self._config(value, js_type=False)
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def hide(self):
return self._config_get(None)
def hide(self, value: Any):
self._config(value, js_type=False)
def legendItemClick(self):
return self._config_get(None)
def legendItemClick(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def show(self):
return self._config_get(None)
def show(self, value: Any):
self._config(value, js_type=False) |
class TestSVSignature(unittest.TestCase):
def test_accessors(self):
deletion = SignatureDeletion('chr1', 100, 300, 'cigar', 'read1')
self.assertEqual(deletion.get_source(), ('chr1', 100, 300))
self.assertEqual(deletion.get_key(), ('DEL', 'chr1', 200))
def test_position_distance_to(self):
deletion1 = SignatureDeletion('chr1', 100, 300, 'cigar', 'read1')
deletion2 = SignatureDeletion('chr1', 150, 200, 'cigar', 'read2')
deletion3 = SignatureDeletion('chr2', 150, 200, 'cigar', 'read2')
insertion = SignatureInsertion('chr1', 150, 200, 'cigar', 'read2', 'ACGTAGTAGCTAGCTTTGCTAGCATTAGCGACTGCTTACGCAGCTCCCTA')
self.assertEqual(deletion1.position_distance_to(deletion2), 25)
self.assertEqual(deletion1.position_distance_to(deletion3), float('Inf'))
self.assertEqual(deletion1.position_distance_to(insertion), float('Inf'))
def test_as_string(self):
deletion1 = SignatureDeletion('chr1', 100, 300, 'cigar', 'read1')
self.assertEqual(deletion1.as_string(), 'chr1\t100\t300\tDEL;cigar\tread1')
self.assertEqual(deletion1.as_string(':'), 'chr1:100:300:DEL;cigar:read1') |
class ErtWorkflowDocumentation(_ErtDocumentation):
pm = ErtPluginManager()
_JOBS = pm.get_documentation_for_workflows()
_TITLE = 'Workflow jobs'
_SECTION_ID = 'ert-workflow-jobs'
def run(self) -> List[nodes.section]:
section_id = ErtWorkflowDocumentation._SECTION_ID
title = ErtWorkflowDocumentation._TITLE
return self._generate_job_documentation(ErtWorkflowDocumentation._JOBS, section_id, title) |
class TestXYZD65Serialize(util.ColorAssertsPyTest):
COLORS = [('color(xyz-d65 0 0.3 0.75 / 0.5)', {}, 'color(xyz-d65 0 0.3 0.75 / 0.5)'), ('color(xyz-d65 0 0.3 0.75)', {'alpha': True}, 'color(xyz-d65 0 0.3 0.75 / 1)'), ('color(xyz-d65 0 0.3 0.75 / 0.5)', {'alpha': False}, 'color(xyz-d65 0 0.3 0.75)'), ('color(xyz-d65 none 0.3 0.75)', {}, 'color(xyz-d65 0 0.3 0.75)'), ('color(xyz-d65 none 0.3 0.75)', {'none': True}, 'color(xyz-d65 none 0.3 0.75)'), ('color(xyz-d65 1.2 0.2 0)', {}, 'color(xyz-d65 1.2 0.2 0)'), ('color(xyz-d65 1.2 0.2 0)', {'fit': False}, 'color(xyz-d65 1.2 0.2 0)')]
.parametrize('color1,options,color2', COLORS)
def test_colors(self, color1, options, color2):
self.assertEqual(Color(color1).to_string(**options), color2) |
def extractRiftxrCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def SegmentByPeaks(data, peaks, weights=None):
segs = np.zeros_like(data)
for (seg_start, seg_end) in zip(np.insert(peaks, 0, 0), np.append(peaks, len(data))):
if ((weights is not None) and (weights[seg_start:seg_end].sum() > 0)):
val = np.average(data[seg_start:seg_end], weights=weights[seg_start:seg_end])
else:
val = np.mean(data[seg_start:seg_end])
segs[seg_start:seg_end] = val
return segs |
def test_rf_directional_expectation(dummy_titanic_rf, dummy_passengers):
model = dummy_titanic_rf
(p1, p2) = dummy_passengers
test_df = pd.DataFrame.from_dict([p1], orient='columns')
(X, y) = get_feats_and_labels(prep_df(test_df))
p1_prob = model.predict(X)[0]
p1_female = p1.copy()
p1_female['Name'] = ' Mrs. Owen'
p1_female['Sex'] = 'female'
test_df = pd.DataFrame.from_dict([p1_female], orient='columns')
(X, y) = get_feats_and_labels(prep_df(test_df))
p1_female_prob = model.predict(X)[0]
p1_class = p1.copy()
p1_class['Pclass'] = 1
test_df = pd.DataFrame.from_dict([p1_class], orient='columns')
(X, y) = get_feats_and_labels(prep_df(test_df))
p1_class_prob = model.predict(X)[0]
assert (p1_prob < p1_female_prob), 'Changing gender from male to female should increase survival probability.'
assert (p1_prob < p1_class_prob), 'Changing class from 3 to 1 should increase survival probability.'
test_df = pd.DataFrame.from_dict([p2], orient='columns')
(X, y) = get_feats_and_labels(prep_df(test_df))
p2_prob = model.predict(X)[0]
p2_male = p2.copy()
p2_male['Name'] = ' Mr. John'
p2_male['Sex'] = 'male'
test_df = pd.DataFrame.from_dict([p2_male], orient='columns')
(X, y) = get_feats_and_labels(prep_df(test_df))
p2_male_prob = model.predict(X)[0]
p2_class = p2.copy()
p2_class['Pclass'] = 3
test_df = pd.DataFrame.from_dict([p2_class], orient='columns')
(X, y) = get_feats_and_labels(prep_df(test_df))
p2_class_prob = model.predict(X)[0]
p2_fare = p2.copy()
p2_fare['Fare'] = 5
test_df = pd.DataFrame.from_dict([p2_fare], orient='columns')
(X, y) = get_feats_and_labels(prep_df(test_df))
p2_fare_prob = model.predict(X)[0]
assert (p2_prob > p2_male_prob), 'Changing gender from female to male should decrease survival probability.'
assert (p2_prob > p2_class_prob), 'Changing class from 1 to 3 should decrease survival probability.'
assert (p2_prob > p2_fare_prob), 'Changing fare from 72 to 5 should decrease survival probability.' |
def service_definition_file(servicename):
boto_service_definition_files()
service_definitions_for_service = fnmatch.filter(boto_service_definition_files(), (('**/' + servicename) + '/*/service-*.json.gz'))
service_definitions_for_service.sort()
return service_definitions_for_service[(- 1)] |
class CommandResponder(cmdrsp.CommandResponderBase):
CMDGEN_MAP = {v2c.GetRequestPDU.tagSet: cmdgen.GetCommandGenerator(), v2c.SetRequestPDU.tagSet: cmdgen.SetCommandGenerator(), v2c.GetNextRequestPDU.tagSet: cmdgen.NextCommandGeneratorSingleRun(), v2c.GetBulkRequestPDU.tagSet: cmdgen.BulkCommandGeneratorSingleRun()}
SUPPORTED_PDU_TYPES = tuple(CMDGEN_MAP)
def handleMgmtOperation(self, snmpEngine, stateReference, contextName, PDU, acInfo):
cbCtx = (stateReference, PDU)
contextEngineId = None
try:
self.CMDGEN_MAP[PDU.tagSet].sendPdu(snmpEngine, 'distant-agent', contextEngineId, contextName, PDU, self.handleResponsePdu, cbCtx)
except error.PySnmpError:
self.handleResponsePdu(snmpEngine, stateReference, 'error', None, cbCtx)
def handleResponsePdu(self, snmpEngine, sendRequestHandle, errorIndication, PDU, cbCtx):
(stateReference, reqPDU) = cbCtx
if errorIndication:
PDU = v2c.apiPDU.getResponse(reqPDU)
PDU.setErrorStatus(PDU, 5)
self.sendPdu(snmpEngine, stateReference, PDU)
self.releaseStateInformation(stateReference) |
def del_port(manager, system_id, bridge_name, fn):
def _delete_port(tables, *_):
bridge = _get_bridge(tables, bridge_name)
if (not bridge):
return
port = fn(tables)
if (not port):
return
ports = bridge.ports
ports.remove(port)
bridge.ports = ports
req = ovsdb_event.EventModifyRequest(system_id, _delete_port)
return manager.send_request(req) |
def main():
formula = {'py_files': re.compile('^(#!.*\\n)\\n*', re.MULTILINE)}
sample = fetch_samplefile()
filelists = fetch_files(sample.keys())
failure_list = []
for filename in filelists:
if (not verify_file(filename, sample, formula)):
failure_list.append(os.path.relpath(filename))
print('Copyright structure missing or incorrect for: ', os.path.relpath(filename))
if failure_list:
return 1
print('Copyright structure intact for all python files ')
return |
class ShopifyCustomer(EcommerceCustomer):
def __init__(self, customer_id: str):
self.setting = frappe.get_doc(SETTING_DOCTYPE)
super().__init__(customer_id, CUSTOMER_ID_FIELD, MODULE_NAME)
def sync_customer(self, customer: Dict[(str, Any)]) -> None:
customer_name = ((cstr(customer.get('first_name')) + ' ') + cstr(customer.get('last_name')))
if (len(customer_name.strip()) == 0):
customer_name = customer.get('email')
customer_group = self.setting.customer_group
super().sync_customer(customer_name, customer_group)
billing_address = (customer.get('billing_address', {}) or customer.get('default_address'))
shipping_address = customer.get('shipping_address', {})
if billing_address:
self.create_customer_address(customer_name, billing_address, address_type='Billing', email=customer.get('email'))
if shipping_address:
self.create_customer_address(customer_name, shipping_address, address_type='Shipping', email=customer.get('email'))
self.create_customer_contact(customer)
def create_customer_address(self, customer_name, shopify_address: Dict[(str, Any)], address_type: str='Billing', email: Optional[str]=None) -> None:
address_fields = _map_address_fields(shopify_address, customer_name, address_type, email)
super().create_customer_address(address_fields)
def update_existing_addresses(self, customer):
billing_address = (customer.get('billing_address', {}) or customer.get('default_address'))
shipping_address = customer.get('shipping_address', {})
customer_name = ((cstr(customer.get('first_name')) + ' ') + cstr(customer.get('last_name')))
email = customer.get('email')
if billing_address:
self._update_existing_address(customer_name, billing_address, 'Billing', email)
if shipping_address:
self._update_existing_address(customer_name, shipping_address, 'Shipping', email)
def _update_existing_address(self, customer_name, shopify_address: Dict[(str, Any)], address_type: str='Billing', email: Optional[str]=None) -> None:
old_address = self.get_customer_address_doc(address_type)
if (not old_address):
self.create_customer_address(customer_name, shopify_address, address_type, email)
else:
exclude_in_update = ['address_title', 'address_type']
new_values = _map_address_fields(shopify_address, customer_name, address_type, email)
old_address.update({k: v for (k, v) in new_values.items() if (k not in exclude_in_update)})
old_address.flags.ignore_mandatory = True
old_address.save()
def create_customer_contact(self, shopify_customer: Dict[(str, Any)]) -> None:
if (not (shopify_customer.get('first_name') and shopify_customer.get('email'))):
return
contact_fields = {'status': 'Passive', 'first_name': shopify_customer.get('first_name'), 'last_name': shopify_customer.get('last_name'), 'unsubscribed': (not shopify_customer.get('accepts_marketing'))}
if shopify_customer.get('email'):
contact_fields['email_ids'] = [{'email_id': shopify_customer.get('email'), 'is_primary': True}]
phone_no = (shopify_customer.get('phone') or shopify_customer.get('default_address', {}).get('phone'))
if validate_phone_number(phone_no, throw=False):
contact_fields['phone_nos'] = [{'phone': phone_no, 'is_primary_phone': True}]
super().create_customer_contact(contact_fields) |
class spawnAttachForm(QDialog, Ui_SpawnAttachDialog):
def __init__(self, parent=None):
super(spawnAttachForm, self).__init__(parent)
self.setupUi(self)
self.setWindowOpacity(0.93)
self.btnSubmit.clicked.connect(self.submit)
self.packageName = ''
self.packages = []
self.listPackage.itemClicked.connect(self.packageClick)
self.flushList()
def flushList(self):
self.packages.clear()
self.listPackage.clear()
packagePath = './tmp/spawnPackage.txt'
if os.path.exists(packagePath):
with open('./tmp/spawnPackage.txt', 'r') as packageFile:
packageData = packageFile.read()
packages = packageData.split('\n')
for item in packages:
if (item in self.packages):
continue
self.packages.append(item)
self.listPackage.addItem(item)
def packageClick(self, item):
self.txtPackage.setText(item.text())
def submit(self):
packageName = self.txtPackage.text()
if (len(packageName) <= 0):
QMessageBox().information(self, 'hint', 'missing packageName')
return
self.packageName = packageName
if (packageName not in self.packages):
self.listPackage.addItem(packageName)
with open('./tmp/spawnPackage.txt', 'w') as packageFile:
packageFile.write((packageName + '\n'))
self.accept() |
def test_slate_hybridization_wrong_option():
(a, L, W) = setup_poisson()
w = Function(W)
params = {'mat_type': 'matfree', 'ksp_type': 'preonly', 'pc_type': 'python', 'pc_python_type': 'firedrake.HybridizationPC', 'hybridization': {'ksp_type': 'preonly', 'pc_type': 'lu', 'localsolve': {'ksp_type': 'preonly', 'pc_type': 'fieldsplit', 'pc_fieldsplit_type': 'frog'}}}
eq = (a == L)
problem = LinearVariationalProblem(eq.lhs, eq.rhs, w)
solver = LinearVariationalSolver(problem, solver_parameters=params)
with pytest.raises(ValueError):
from firedrake.petsc import PETSc
PETSc.Sys.pushErrorHandler('ignore')
solver.solve()
PETSc.Sys.popErrorHandler('ignore') |
(('cfg', 'expected'), [param({'_target_': 'tests.instantiate.ArgsClass', '_args_': ['${.1}', 2]}, ArgsClass(2, 2), id='config:args_only'), param({'_target_': 'tests.instantiate.ArgsClass', '_args_': [1], 'foo': '${._args_}'}, ArgsClass(1, foo=[1]), id='config:args+kwargs'), param({'_target_': 'tests.instantiate.ArgsClass', 'foo': '${._target_}'}, ArgsClass(foo='tests.instantiate.ArgsClass'), id='config:kwargs_only)')])
def test_instantiate_args_kwargs_with_interpolation(cfg: Any, expected: Any) -> None:
assert (instantiate(cfg) == expected) |
def setup():
global mc
print('')
plist = list(serial.tools.list_ports.comports())
idx = 0
for port in plist:
print('{} : {}'.format(idx, port))
idx += 1
if (idx == 0):
print('The connected device was not detected. Please try reconnecting.')
exit(1)
_in = input("\nPlease input 0 - {} to choice, you can choice many like: '2,1,3':".format(idx))
idxes = _in.split(',')
try:
idxes = [int(i) for i in idxes]
except Exception:
print('Error: Input format error.')
exit(1)
ports = [str(plist[i]).split(' - ')[0].strip() for i in idxes]
print(ports)
print('')
baud = 115200
_baud = input('Please input baud(default:115200):')
try:
baud = int(_baud)
except Exception:
pass
print(baud)
print('')
for port in ports:
try:
mycobot = MyCobot(port, baud)
except Exception as e:
print(e)
exit(1)
mc.append(mycobot) |
class AdminSalesInvoicesSchema(Schema):
class Meta():
type_ = 'admin-sales-invoices'
self_view = 'v1.admin_sales_invoices'
inflect = dasherize
id = fields.String()
identifier = fields.String()
status = fields.String()
amount = fields.Float()
created_at = fields.DateTime()
completed_at = fields.DateTime()
event_name = fields.Method('format_event_name')
sent_to = fields.Method('format_sent_to')
def format_event_name(self):
return f'{self.event.name}'
def format_sent_to(self):
return f'{self.user.fullname} <{self.user.email}>' |
class ThreeDSImporter(VRMLImporter):
reader = Instance(tvtk.ThreeDSImporter, args=(), kw={'compute_normals': True}, allow_none=False, record=True)
def has_output_port(self):
return True
def get_output_object(self):
return self.reader.output_port
def _file_name_changed(self, value):
self.reader = reader = tvtk.ThreeDSImporter(compute_normals=True)
reader.file_name = value
if (self.scene is not None):
self.reader.render_window = self.scene.render_window
name = ('3DStudio file (%s)' % basename(self.file_name))
if ('[Hidden]' in self.name):
name += ' [Hidden]'
self.name = name
self._file_path.trait_set(value)
self._update_reader()
self.render() |
def gist_gray(range, **traits):
_data = dict(red=[(0.0, 0.0, 0.0), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0.5, 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (1.0, 0., 0.)], green=[(0.0, 0.0, 0.0), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0.5, 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (1.0, 0., 0.)], blue=[(0.0, 0.0, 0.0), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0.5, 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (0., 0., 0.), (1.0, 0., 0.)])
return ColorMapper.from_segment_map(_data, range=range, **traits) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.