content
stringlengths
0
1.55M
<import_stmt>logging<import_stmt>time<import_from_stmt>tests.common.helpers.assertions pytest_assert<line_sep>logger=logging.getLogger(__name__)<def_stmt>join_master duthost master_vip<block_start>""" Joins DUT to Kubernetes master Args: duthost: DUT host object master_vip: VIP of high availability Kubernetes master If join fails, test will fail at the assertion to check_connected """<line_sep>logger.info("Joining DUT to Kubernetes master")<line_sep>dut_join_cmds=['sudo config kube server disable on' 'sudo config kube server ip {}'.format(master_vip) 'sudo config kube server disable off']<line_sep>duthost.shell_cmds(cmds=dut_join_cmds)<line_sep>pytest_assert(poll_for_status_change(duthost <true>) "DUT failed to successfully join Kubernetes master")<block_end><def_stmt>make_vip_unreachable duthost master_vip<block_start>""" Makes Kubernetes master VIP unreachable from SONiC DUT by configuring iptables rules. Cleans preexisting iptables rules for VIP. Args: duthost: DUT host object master_vip: VIP of high availability Kubernetes master """<line_sep>logger.info("Making Kubernetes master VIP unreachable from DUT")<line_sep>clean_vip_iptables_rules(duthost master_vip)<line_sep>duthost.shell('sudo iptables -A INPUT -s {} -j DROP'.format(master_vip))<line_sep>duthost.shell('sudo iptables -A OUTPUT -d {} -j DROP'.format(master_vip))<block_end><def_stmt>make_vip_reachable duthost master_vip<block_start>""" Makes Kubernetes master VIP reachable from SONiC DUT by removing any iptables rules associated with the VIP. Args: duthost: DUT host object master_vip: VIP of high availability Kubernetes master """<line_sep>logger.info("Making Kubernetes master VIP reachable from DUT")<line_sep>clean_vip_iptables_rules(duthost master_vip)<block_end><def_stmt>clean_vip_iptables_rules duthost master_vip<block_start>""" Removes all iptables rules associated with the VIP. Args: duthost: DUT host object master_vip: VIP of high availability Kubernetes master """<line_sep>iptables_rules=duthost.shell('sudo iptables -S | grep {} || true'.format(master_vip))["stdout_lines"]<line_sep>logger.info('iptables rules: {}'.format(iptables_rules))<for_stmt>line iptables_rules<block_start><if_stmt>line<block_start>duthost.shell('sudo iptables -D {}'.format(line[2:]))<block_end><block_end><block_end><def_stmt>check_connected duthost<block_start>""" Checks if the DUT already shows status 'connected' to Kubernetes master Args: duthost: DUT host object Returns: True if connected, False if not connected """<line_sep>kube_server_status=duthost.shell('show kube server')["stdout_lines"]<line_sep>logger.info("Kube server status: {}".format(kube_server_status))<for_stmt>line kube_server_status<block_start><if_stmt>line.startswith("KUBERNETES_MASTER SERVER connected")<block_start><return>line.endswith("true")<block_end><block_end>logger.info("Kubernetes server check_connected failed to check server status")<block_end><def_stmt>poll_for_status_change duthost exp_status poll_wait_secs=5 min_wait_time=20 max_wait_time=120<block_start>""" Polls to see if kube server connected status updates as expected Args: duthost: DUT host object exp_status: expected server connected status once processes are synced poll_wait_secs: seconds between each server connected status poll. Default: 5 seconds min_wait_time: seconds before starting poll of server connected status. Default: 20 seconds max_wait_time: maximum amount of time to spend polling for status change. Default: 120 seconds Returns: True if server connected status updates as expected by max_wait_time False if server connected status fails to update as expected by max_wait_time """<line_sep>time.sleep(min_wait_time)<line_sep>timeout_wait_secs=max_wait_time-min_wait_time<while_stmt>(timeout_wait_secs<g>0)<block_start><if_stmt>(check_connected(duthost)<eq>exp_status)<block_start>logging.info("Time taken to update Kube server status: {} seconds".format(timeout_wait_secs))<line_sep><return><true><block_end>time.sleep(poll_wait_secs)<line_sep>timeout_wait_secs<augsub>poll_wait_secs<block_end><return><false><block_end>
# -*- coding: ascii -*- # # Copyright 2007, 2008, 2009, 2010, 2011 # <NAME> or his licensors, as applicable # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ =================== Data distribution =================== This module provides tools to simplify data distribution. """<line_sep>__author__=u"<NAME>"<line_sep>__docformat__="restructuredtext en"<import_from_stmt>distutils filelist<as>_filelist<import_stmt>os<as>_os<import_stmt>posixpath<as>_posixpath<import_stmt>sys<as>_sys<import_from_stmt>_setup commands<as>_commands<def_stmt>splitpath path<block_start>""" Split a path """<line_sep>drive,path='' _os.path.normpath(path)<try_stmt><block_start>splitunc=_os.path.splitunc<block_end><except_stmt>AttributeError<block_start><pass><block_end><else_stmt><block_start>drive,path=splitunc(path)<block_end><if_stmt><not>drive<block_start>drive,path=_os.path.splitdrive(path)<block_end>elems=[]<try_stmt><block_start>sep=_os.path.sep<block_end><except_stmt>AttributeError<block_start>sep=_os.path.join('1' '2')[1:-1]<block_end><while_stmt>1<block_start>prefix,path=_os.path.split(path)<line_sep>elems.append(path)<if_stmt>prefix<in>('' sep)<block_start>drive=_os.path.join(drive prefix)<line_sep><break><block_end>path=prefix<block_end>elems.reverse()<line_sep><return>drive elems<block_end><def_stmt>finalizer installer<block_start>""" Finalize install_data """<line_sep>data_files=[]<for_stmt>item installer.data_files<block_start><if_stmt><not>isinstance(item Data)<block_start>data_files.append(item)<line_sep><continue><block_end>data_files.extend(item.flatten(installer))<block_end>installer.data_files=data_files<block_end><class_stmt>Data(object)<block_start>""" File list container """<def_stmt>__init__ self files target=<none> preserve=0 strip=0 prefix=<none><block_start>""" Initialization """<line_sep>self._files=files<line_sep>self._target=target<line_sep>self._preserve=preserve<line_sep>self._strip=strip<line_sep>self._prefix=prefix<line_sep>self.fixup_commands()<block_end><def_stmt>fixup_commands self<block_start><pass><block_end><def_stmt>from_templates cls *templates **kwargs<block_start>""" Initialize from template """<line_sep>files=_filelist.FileList()<for_stmt>tpl templates<block_start><for_stmt>line tpl.split(';')<block_start>files.process_template_line(line.strip())<block_end><block_end>files.sort()<line_sep>files.remove_duplicates()<line_sep>result=[]<for_stmt>filename files.files<block_start>_,elems=splitpath(filename)<if_stmt>'.svn'<in>elems<or>'.git'<in>elems<block_start><continue><block_end>result.append(filename)<block_end><return>cls(result **kwargs)<block_end>from_templates=classmethod(from_templates)<def_stmt>flatten self installer<block_start>""" Flatten the file list to (target, file) tuples """<line_sep># pylint: disable = W0613 <if_stmt>self._prefix<block_start>_,prefix=splitpath(self._prefix)<line_sep>telems=prefix<block_end><else_stmt><block_start>telems=[]<block_end>tmap={}<for_stmt>fname self._files<block_start>(_ name),target=splitpath(fname) telems<if_stmt>self._preserve<block_start><if_stmt>self._strip<block_start>name=name[max(0 min(self._strip len(name)-1)):]<block_end><if_stmt>len(name)<g>1<block_start>target=telems+name[:-1]<block_end><block_end>tmap.setdefault(_posixpath.join(*target) []).append(fname)<block_end><return>tmap.items()<block_end><block_end><class_stmt>Documentation(Data)<block_start>""" Documentation container """<def_stmt>fixup_commands self<block_start>_commands.add_option('install_data' 'without-docs' help_text='Do not install documentation files' inherit='install' )<line_sep>_commands.add_finalizer('install_data' 'documentation' finalizer)<block_end><def_stmt>flatten self installer<block_start>""" Check if docs should be installed at all """<if_stmt>installer.without_docs<block_start><return>[]<block_end><return>Data.flatten(self installer)<block_end><block_end><class_stmt>Manpages(Documentation)<block_start>""" Manpages container """<def_stmt>dispatch cls files<block_start>""" Automatically dispatch manpages to their target directories """<line_sep>mpmap={}<for_stmt>manpage files<block_start>normalized=_os.path.normpath(manpage)<line_sep>_,ext=_os.path.splitext(normalized)<if_stmt>ext.startswith(_os.path.extsep)<block_start>ext=ext[len(_os.path.extsep):]<block_end>mpmap.setdefault(ext []).append(manpage)<block_end><return>[cls(manpages prefix=_posixpath.join('share' 'man' 'man%s'%section ))<for>section,manpages mpmap.items()]<block_end>dispatch=classmethod(dispatch)<def_stmt>flatten self installer<block_start>""" Check if manpages are suitable """<if_stmt>_sys.platform<eq>'win32'<block_start><return>[]<block_end><return>Documentation.flatten(self installer)<block_end><block_end>
# -*- coding:UTF-8 -*- ''' MIT License Copyright (c) 2018 <NAME> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. '''<line_sep>''' ****************************************************************************** * 文 件:keyboard.py * 概 述:识别单个机械按键的单击、连击(暂未限制连击次数)、长按、短按动作,并返回事件。 * 版 本:V0.10 * 作 者:<NAME> * 日 期:2018年7月26日 * 历 史: 日期 编辑 版本 记录 2018年7月26日 <NAME> V0.10 创建文件 ` ******************************************************************************'''<class_stmt>KEYBOARD<block_start>cont=0<def_stmt>__init__ self _btnKey _tmBtn _btnDef=1 even_djlong=<none> even_lj=<none> _pull=<none><block_start>self.btn=_btnKey<if_stmt>_pull<eq>"UP"<block_start>self.btn.init(_btnKey.IN _btnKey.PULL_UP)<block_end><elif_stmt>_pull<eq>"DOWN"<block_start>self.btn.init(_btnKey.IN _btnKey.PULL_DOWN)<block_end><else_stmt><block_start>self.btn.init(_btnKey.IN)<block_end>self.btnDef=_btnDef<line_sep>self.eve_btnLon=even_djlong<line_sep>self.evn_Continuous_Clicks=even_lj<line_sep>self.btnLabDown=0# 按钮扫描记次,按下状态 self.btnLabUp=0# 按钮扫描记次,弹起状态 self.Continuous_Clicks=0# 连续点击次数 self.clock=10# 定时器时钟,单位毫秒 _tmBtn.init(freq=(1000/self.clock))<line_sep>_tmBtn.callback(self.doBtnScan)<line_sep>self.staLon=1# 长按标志字,1:长按计时,0:长按计次 self.tLon=3000# 计时或计次延时,单位毫秒 self.TIME_CONT_CLICKS=50# 连击时间间隔,按下和松开的状态保持时间长度,单位,次 <block_end>'''************************************************************************* * 功 能:按键扫描 * 说 明:定时器回调函数,用于识别当前按键是否动作,并判断其动作形式。 * 输入参数: t : 定时器无参回调函数必备,否则调用不成功。 * 输出参数:None * 返 回 值:True **************************************************************************'''<line_sep># 扫描按键,定时中断调用函数 <def_stmt>doBtnScan self t<block_start><global>cont<line_sep>self.btnLabUp=(self.btnLabUp<times>int(<not>(self.btn.value()^int(<not>(self.btnDef)))))+int(<not>(self.btn.value()^int(<not>(self.btnDef))))<line_sep>btdown=self.btnLabDown<line_sep>self.btnLabDown=(self.btnLabDown<times>int(<not>(self.btn.value()^self.btnDef)))+int(<not>(self.btn.value()^self.btnDef))<line_sep># 长按计时/计次 # t1:按键保持按下的时长 <if_stmt>(self.btnLabDown<times>self.clock)<eq>self.tLon<block_start><if_stmt>self.staLon<eq>1<block_start><if_stmt>self.eve_btnLon<ne><none><block_start>self.eve_btnLon()# 按键长按事件,请勿在事件中执行过长时间的程序,否则会报定时器错误。 <block_end><block_end><elif_stmt>self.staLon<eq>0<block_start><if_stmt>self.eve_btnLon<ne><none><block_start>cont<augadd>1<line_sep>self.eve_btnLon(cont)# 按键长按事件,请勿在事件中执行过长时间的程序,否则会报定时器错误。 <block_end>self.btnLabDown=0<block_end><block_end><if_stmt>self.btnLabUp<g>5<block_start>cont=0<block_end># 连续点击 <if_stmt>(btdown<g>5<and>btdown<l>self.TIME_CONT_CLICKS)<and>self.btnLabUp<g>0<block_start>self.Continuous_Clicks<augadd>1<block_end><if_stmt>(self.btnLabUp<g>self.TIME_CONT_CLICKS)<and>(self.Continuous_Clicks<g>0)<or>(self.btnLabDown<g>self.TIME_CONT_CLICKS)<and>(self.Continuous_Clicks<g>0)<block_start><if_stmt>self.evn_Continuous_Clicks<ne><none><block_start>self.evn_Continuous_Clicks(self.Continuous_Clicks)# 连续点击事件,次数为1时为单击,请勿在事件中执行过长时间的程序,否则会报定时器错误。 <block_end>self.Continuous_Clicks=0<block_end><block_end><block_end>
# # SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # """ Channel sub-package of the Sionna library implementing 3GPP TR39.801 models. """<line_sep># pylint: disable=line-too-long <import_from_stmt>.antenna AntennaElement AntennaPanel PanelArray Antenna AntennaArray<import_from_stmt>.lsp LSP LSPGenerator<import_from_stmt>.rays Rays RaysGenerator<import_from_stmt>.system_level_scenario SystemLevelScenario<import_from_stmt>.rma_scenario RMaScenario<import_from_stmt>.umi_scenario UMiScenario<import_from_stmt>.uma_scenario UMaScenario<import_from_stmt>.channel_coefficients Topology ChannelCoefficientsGenerator<import_from_stmt>.system_level_channel SystemLevelChannel<import_from_stmt>.rma RMa<import_from_stmt>.uma UMa<import_from_stmt>.umi UMi<import_from_stmt>.tdl TDL<import_from_stmt>.cdl CDL<line_sep>
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. <import_stmt>copy<import_stmt>inspect<import_from_stmt>oslo_config cfg<import_stmt>stevedore<def_stmt>list_opts <block_start>default_config_files=['~/.project/project.conf' '~/project.conf' '/etc/project/project.conf' '/etc/project.conf' ]<line_sep>default_config_dirs=['~/.project/project.conf.d/' '~/project.conf.d/' '/etc/project/project.conf.d/' '/etc/project.conf.d/' ]<line_sep>options=[(<none> cfg.ConfigOpts._list_options_for_discovery(default_config_files default_config_dirs ))]<line_sep>ext_mgr=stevedore.ExtensionManager("oslo.config.driver" invoke_on_load=<true>)<line_sep>source_names=ext_mgr.names()<for_stmt>source_name source_names<block_start>source=ext_mgr[source_name].obj<line_sep>source_options=copy.deepcopy(source.list_options_for_discovery())<line_sep>source_description=inspect.getdoc(source)<line_sep>source_options.insert(0 cfg.StrOpt(name='driver' sample_default=source_name help=cfg._SOURCE_DRIVER_OPTION_HELP ))<line_sep>group_name='sample_{}_source'.format(source_name)<line_sep>group_help='Example of using a {} source'.format(source_name)<if_stmt>source_description<block_start>group_help='{}\n\n{}: {}'.format(group_help source_name source_description )<block_end>group=cfg.OptGroup(name=group_name help=group_help driver_option='driver' dynamic_group_owner='config_source' )<line_sep>options.append((group source_options))<block_end><return>options<block_end>
<import_stmt>calendar<import_stmt>datetime<import_stmt>re<import_stmt>sys<import_from_stmt>dateutil.relativedelta relativedelta<import_stmt>gam<import_from_stmt>gam.var *<import_from_stmt>gam controlflow<import_from_stmt>gam display<import_from_stmt>gam gapi<import_from_stmt>gam utils<import_from_stmt>gam.gapi.directory orgunits<as>gapi_directory_orgunits<def_stmt>build <block_start><return>gam.buildGAPIObject('reports')<block_end>REPORT_CHOICE_MAP={'access':'access_transparency' 'accesstransparency':'access_transparency' 'calendars':'calendar' 'customers':'customer' 'doc':'drive' 'docs':'drive' 'domain':'customer' 'enterprisegroups':'groups_enterprise' 'google+':'gplus' 'group':'groups' 'groupsenterprise':'groups_enterprise' 'hangoutsmeet':'meet' 'logins':'login' 'oauthtoken':'token' 'tokens':'token' 'usage':'usage' 'usageparameters':'usageparameters' 'users':'user' 'useraccounts':'user_accounts' }<def_stmt>showUsageParameters <block_start>rep=build()<line_sep>throw_reasons=[gapi.errors.ErrorReason.INVALID gapi.errors.ErrorReason.BAD_REQUEST]<line_sep>todrive=<false><if_stmt>len(sys.argv)<eq>3<block_start>controlflow.missing_argument_exit('user or customer' 'report usageparameters')<block_end>report=sys.argv[3].lower()<line_sep>titles=['parameter']<if_stmt>report<eq>'customer'<block_start>endpoint=rep.customerUsageReports()<line_sep>kwargs={}<block_end><elif_stmt>report<eq>'user'<block_start>endpoint=rep.userUsageReport()<line_sep>kwargs={'userKey':gam._get_admin_email()}<block_end><else_stmt><block_start>controlflow.expected_argument_exit('usageparameters' ['user' 'customer'] report)<block_end>customerId=GC_Values[GC_CUSTOMER_ID]<if_stmt>customerId<eq>MY_CUSTOMER<block_start>customerId=<none><block_end>tryDate=datetime.date.today().strftime(YYYYMMDD_FORMAT)<line_sep>all_parameters=set()<line_sep>i=4<while_stmt>i<l>len(sys.argv)<block_start>myarg=sys.argv[i].lower().replace('_' '')<if_stmt>myarg<eq>'todrive'<block_start>todrive=<true><line_sep>i<augadd>1<block_end><else_stmt><block_start>controlflow.invalid_argument_exit(sys.argv[i] 'gam report usageparameters')<block_end><block_end>fullDataRequired=['all']<while_stmt><true><block_start><try_stmt><block_start>result=gapi.call(endpoint 'get' throw_reasons=throw_reasons date=tryDate customerId=customerId fields='warnings,usageReports(parameters(name))' **kwargs)<line_sep>warnings=result.get('warnings' [])<line_sep>usage=result.get('usageReports')<line_sep>has_reports=bool(usage)<line_sep>fullData,tryDate=_check_full_data_available(warnings tryDate fullDataRequired has_reports)<if_stmt>fullData<l>0<block_start>print('No usage parameters available.')<line_sep>sys.exit(1)<block_end><if_stmt>has_reports<block_start><for_stmt>parameter usage[0]['parameters']<block_start>name=parameter.get('name')<if_stmt>name<block_start>all_parameters.add(name)<block_end><block_end><block_end><if_stmt>fullData<eq>1<block_start><break><block_end><block_end><except_stmt>gapi.errors.GapiInvalidError<as>e<block_start>tryDate=_adjust_date(str(e))<block_end><block_end>csvRows=[]<for_stmt>parameter sorted(all_parameters)<block_start>csvRows.append({'parameter':parameter})<block_end>display.write_csv_file(csvRows titles f'{report.capitalize()} Report Usage Parameters' todrive)<block_end>REPORTS_PARAMETERS_SIMPLE_TYPES=['intValue' 'boolValue' 'datetimeValue' 'stringValue']<def_stmt>showUsage <block_start>rep=build()<line_sep>throw_reasons=[gapi.errors.ErrorReason.INVALID gapi.errors.ErrorReason.BAD_REQUEST]<line_sep>todrive=<false><if_stmt>len(sys.argv)<eq>3<block_start>controlflow.missing_argument_exit('user or customer' 'report usage')<block_end>report=sys.argv[3].lower()<line_sep>titles=['date']<if_stmt>report<eq>'customer'<block_start>endpoint=rep.customerUsageReports()<line_sep>kwargs=[{}]<block_end><elif_stmt>report<eq>'user'<block_start>endpoint=rep.userUsageReport()<line_sep>kwargs=[{'userKey':'all'}]<line_sep>titles.append('user')<block_end><else_stmt><block_start>controlflow.expected_argument_exit('usage' ['user' 'customer'] report)<block_end>customerId=GC_Values[GC_CUSTOMER_ID]<if_stmt>customerId<eq>MY_CUSTOMER<block_start>customerId=<none><block_end>parameters=[]<line_sep>start_date=end_date=orgUnitId=<none><line_sep>skip_day_numbers=[]<line_sep>skip_dates=set()<line_sep>one_day=datetime.timedelta(days=1)<line_sep>i=4<while_stmt>i<l>len(sys.argv)<block_start>myarg=sys.argv[i].lower().replace('_' '')<if_stmt>myarg<eq>'startdate'<block_start>start_date=utils.get_yyyymmdd(sys.argv[i+1] returnDateTime=<true>)<line_sep>i<augadd>2<block_end><elif_stmt>myarg<eq>'enddate'<block_start>end_date=utils.get_yyyymmdd(sys.argv[i+1] returnDateTime=<true>)<line_sep>i<augadd>2<block_end><elif_stmt>myarg<eq>'todrive'<block_start>todrive=<true><line_sep>i<augadd>1<block_end><elif_stmt>myarg<in>['fields' 'parameters']<block_start>parameters=sys.argv[i+1].split(',')<line_sep>i<augadd>2<block_end><elif_stmt>myarg<eq>'skipdates'<block_start><for_stmt>skip sys.argv[i+1].split(',')<block_start><if_stmt>skip.find(':')<eq>-1<block_start>skip_dates.add(utils.get_yyyymmdd(skip returnDateTime=<true>))<block_end><else_stmt><block_start>skip_start,skip_end=skip.split(':' 1)<line_sep>skip_start=utils.get_yyyymmdd(skip_start returnDateTime=<true>)<line_sep>skip_end=utils.get_yyyymmdd(skip_end returnDateTime=<true>)<while_stmt>skip_start<le>skip_end<block_start>skip_dates.add(skip_start)<line_sep>skip_start<augadd>one_day<block_end><block_end><block_end>i<augadd>2<block_end><elif_stmt>myarg<eq>'skipdaysofweek'<block_start>skipdaynames=sys.argv[i+1].split(',')<line_sep>dow=[d.lower()<for>d calendar.day_abbr]<line_sep>skip_day_numbers=[dow.index(d)<for>d skipdaynames<if>d<in>dow]<line_sep>i<augadd>2<block_end><elif_stmt>report<eq>'user'<and>myarg<in>['orgunit' 'org' 'ou']<block_start>_,orgUnitId=gapi_directory_orgunits.getOrgUnitId(sys.argv[i+1])<line_sep>i<augadd>2<block_end><elif_stmt>report<eq>'user'<and>myarg<in>usergroup_types<block_start>users=gam.getUsersToModify(myarg sys.argv[i+1])<line_sep>kwargs=[{'userKey':user}<for>user users]<line_sep>i<augadd>2<block_end><else_stmt><block_start>controlflow.invalid_argument_exit(sys.argv[i] f'gam report usage {report}')<block_end><block_end><if_stmt>parameters<block_start>titles.extend(parameters)<line_sep>parameters=','.join(parameters)<block_end><else_stmt><block_start>parameters=<none><block_end><if_stmt><not>end_date<block_start>end_date=datetime.datetime.now()<block_end><if_stmt><not>start_date<block_start>start_date=end_date+relativedelta(months=-1)<block_end><if_stmt>orgUnitId<block_start><for_stmt>kw kwargs<block_start>kw['orgUnitID']=orgUnitId<block_end><block_end>usage_on_date=start_date<line_sep>start_date=usage_on_date.strftime(YYYYMMDD_FORMAT)<line_sep>usage_end_date=end_date<line_sep>end_date=end_date.strftime(YYYYMMDD_FORMAT)<line_sep>start_use_date=end_use_date=<none><line_sep>csvRows=[]<while_stmt>usage_on_date<le>usage_end_date<block_start><if_stmt>usage_on_date.weekday()<in>skip_day_numbers<or>usage_on_date<in>skip_dates<block_start>usage_on_date<augadd>one_day<line_sep><continue><block_end>use_date=usage_on_date.strftime(YYYYMMDD_FORMAT)<line_sep>usage_on_date<augadd>one_day<try_stmt><block_start><for_stmt>kwarg kwargs<block_start><try_stmt><block_start>usage=gapi.get_all_pages(endpoint 'get' 'usageReports' throw_reasons=throw_reasons customerId=customerId date=use_date parameters=parameters **kwarg)<block_end><except_stmt>gapi.errors.GapiBadRequestError<block_start><continue><block_end><for_stmt>entity usage<block_start>row={'date':use_date}<if_stmt>'userEmail'<in>entity['entity']<block_start>row['user']=entity['entity']['userEmail']<block_end><for_stmt>item entity.get('parameters' [])<block_start><if_stmt>'name'<not><in>item<block_start><continue><block_end>name=item['name']<if_stmt>name<eq>'cros:device_version_distribution'<block_start><for_stmt>cros_ver item['msgValue']<block_start>v=cros_ver['version_number']<line_sep>column_name=f'cros:num_devices_chrome_{v}'<if_stmt>column_name<not><in>titles<block_start>titles.append(column_name)<block_end>row[column_name]=cros_ver['num_devices']<block_end><block_end><else_stmt><block_start><if_stmt><not>name<in>titles<block_start>titles.append(name)<block_end><for_stmt>ptype REPORTS_PARAMETERS_SIMPLE_TYPES<block_start><if_stmt>ptype<in>item<block_start>row[name]=item[ptype]<line_sep><break><block_end><block_end><else_stmt><block_start>row[name]=''<block_end><block_end><block_end><if_stmt><not>start_use_date<block_start>start_use_date=use_date<block_end>end_use_date=use_date<line_sep>csvRows.append(row)<block_end><block_end><block_end><except_stmt>gapi.errors.GapiInvalidError<as>e<block_start>display.print_warning(str(e))<line_sep><break><block_end><block_end><if_stmt>start_use_date<block_start>report_name=f'{report.capitalize()} Usage Report - {start_use_date}:{end_use_date}'<block_end><else_stmt><block_start>report_name=f'{report.capitalize()} Usage Report - {start_date}:{end_date} - No Data'<block_end>display.write_csv_file(csvRows titles report_name todrive)<block_end><def_stmt>showReport <block_start>rep=build()<line_sep>throw_reasons=[gapi.errors.ErrorReason.INVALID]<line_sep>report=sys.argv[2].lower()<line_sep>report=REPORT_CHOICE_MAP.get(report.replace('_' '') report)<if_stmt>report<eq>'usage'<block_start>showUsage()<line_sep><return><block_end><if_stmt>report<eq>'usageparameters'<block_start>showUsageParameters()<line_sep><return><block_end>valid_apps=gapi.get_enum_values_minus_unspecified(rep._rootDesc['resources']['activities']['methods']['list']['parameters']['applicationName']['enum'])+['customer' 'user']<if_stmt>report<not><in>valid_apps<block_start>controlflow.expected_argument_exit('report' ', '.join(sorted(valid_apps)) report)<block_end>customerId=GC_Values[GC_CUSTOMER_ID]<if_stmt>customerId<eq>MY_CUSTOMER<block_start>customerId=<none><block_end>filters=parameters=actorIpAddress=groupIdFilter=startTime=endTime=eventName=orgUnitId=<none><line_sep>tryDate=datetime.date.today().strftime(YYYYMMDD_FORMAT)<line_sep>to_drive=<false><line_sep>userKey='all'<line_sep>fullDataRequired=<none><line_sep>i=3<while_stmt>i<l>len(sys.argv)<block_start>myarg=sys.argv[i].lower()<if_stmt>myarg<eq>'date'<block_start>tryDate=utils.get_yyyymmdd(sys.argv[i+1])<line_sep>i<augadd>2<block_end><elif_stmt>myarg<in>['orgunit' 'org' 'ou']<block_start>_,orgUnitId=gapi_directory_orgunits.getOrgUnitId(sys.argv[i+1])<line_sep>i<augadd>2<block_end><elif_stmt>myarg<eq>'fulldatarequired'<block_start>fullDataRequired=[]<line_sep>fdr=sys.argv[i+1].lower()<if_stmt>fdr<and>fdr<eq>'all'<block_start>fullDataRequired='all'<block_end><else_stmt><block_start>fullDataRequired=fdr.replace(',' ' ').split()<block_end>i<augadd>2<block_end><elif_stmt>myarg<eq>'start'<block_start>startTime=utils.get_time_or_delta_from_now(sys.argv[i+1])<line_sep>i<augadd>2<block_end><elif_stmt>myarg<eq>'end'<block_start>endTime=utils.get_time_or_delta_from_now(sys.argv[i+1])<line_sep>i<augadd>2<block_end><elif_stmt>myarg<eq>'event'<block_start>eventName=sys.argv[i+1]<line_sep>i<augadd>2<block_end><elif_stmt>myarg<eq>'user'<block_start>userKey=sys.argv[i+1].lower()<if_stmt>userKey<ne>'all'<block_start>userKey=gam.normalizeEmailAddressOrUID(sys.argv[i+1])<block_end>i<augadd>2<block_end><elif_stmt>myarg<in>['filter' 'filters']<block_start>filters=sys.argv[i+1]<line_sep>i<augadd>2<block_end><elif_stmt>myarg<in>['fields' 'parameters']<block_start>parameters=sys.argv[i+1]<line_sep>i<augadd>2<block_end><elif_stmt>myarg<eq>'ip'<block_start>actorIpAddress=sys.argv[i+1]<line_sep>i<augadd>2<block_end><elif_stmt>myarg<eq>'groupidfilter'<block_start>groupIdFilter=sys.argv[i+1]<line_sep>i<augadd>2<block_end><elif_stmt>myarg<eq>'todrive'<block_start>to_drive=<true><line_sep>i<augadd>1<block_end><else_stmt><block_start>controlflow.invalid_argument_exit(sys.argv[i] 'gam report')<block_end><block_end><if_stmt>report<eq>'user'<block_start><while_stmt><true><block_start><try_stmt><block_start>one_page=gapi.call(rep.userUsageReport() 'get' throw_reasons=throw_reasons date=tryDate userKey=userKey customerId=customerId orgUnitID=orgUnitId fields='warnings,usageReports' maxResults=1)<line_sep>warnings=one_page.get('warnings' [])<line_sep>has_reports=bool(one_page.get('usageReports'))<line_sep>fullData,tryDate=_check_full_data_available(warnings tryDate fullDataRequired has_reports)<if_stmt>fullData<l>0<block_start>print('No user report available.')<line_sep>sys.exit(1)<block_end><if_stmt>fullData<eq>0<block_start><continue><block_end>page_message=gapi.got_total_items_msg('Users' '...\n')<line_sep>usage=gapi.get_all_pages(rep.userUsageReport() 'get' 'usageReports' page_message=page_message throw_reasons=throw_reasons date=tryDate userKey=userKey customerId=customerId orgUnitID=orgUnitId filters=filters parameters=parameters)<line_sep><break><block_end><except_stmt>gapi.errors.GapiInvalidError<as>e<block_start>tryDate=_adjust_date(str(e))<block_end><block_end><if_stmt><not>usage<block_start>print('No user report available.')<line_sep>sys.exit(1)<block_end>titles=['email' 'date']<line_sep>csvRows=[]<for_stmt>user_report usage<block_start><if_stmt>'entity'<not><in>user_report<block_start><continue><block_end>row={'email':user_report['entity']['userEmail'] 'date':tryDate}<for_stmt>item user_report.get('parameters' [])<block_start><if_stmt>'name'<not><in>item<block_start><continue><block_end>name=item['name']<if_stmt><not>name<in>titles<block_start>titles.append(name)<block_end><for_stmt>ptype REPORTS_PARAMETERS_SIMPLE_TYPES<block_start><if_stmt>ptype<in>item<block_start>row[name]=item[ptype]<line_sep><break><block_end><block_end><else_stmt><block_start>row[name]=''<block_end><block_end>csvRows.append(row)<block_end>display.write_csv_file(csvRows titles f'User Reports - {tryDate}' to_drive)<block_end><elif_stmt>report<eq>'customer'<block_start><while_stmt><true><block_start><try_stmt><block_start>first_page=gapi.call(rep.customerUsageReports() 'get' throw_reasons=throw_reasons customerId=customerId date=tryDate fields='warnings,usageReports')<line_sep>warnings=first_page.get('warnings' [])<line_sep>has_reports=bool(first_page.get('usageReports'))<line_sep>fullData,tryDate=_check_full_data_available(warnings tryDate fullDataRequired has_reports)<if_stmt>fullData<l>0<block_start>print('No customer report available.')<line_sep>sys.exit(1)<block_end><if_stmt>fullData<eq>0<block_start><continue><block_end>usage=gapi.get_all_pages(rep.customerUsageReports() 'get' 'usageReports' throw_reasons=throw_reasons customerId=customerId date=tryDate parameters=parameters)<line_sep><break><block_end><except_stmt>gapi.errors.GapiInvalidError<as>e<block_start>tryDate=_adjust_date(str(e))<block_end><block_end><if_stmt><not>usage<block_start>print('No customer report available.')<line_sep>sys.exit(1)<block_end>titles=['name' 'value' 'client_id']<line_sep>csvRows=[]<line_sep>auth_apps=list()<for_stmt>item usage[0]['parameters']<block_start><if_stmt>'name'<not><in>item<block_start><continue><block_end>name=item['name']<if_stmt>'intValue'<in>item<block_start>value=item['intValue']<block_end><elif_stmt>'msgValue'<in>item<block_start><if_stmt>name<eq>'accounts:authorized_apps'<block_start><for_stmt>subitem item['msgValue']<block_start>app={}<for_stmt>an_item subitem<block_start><if_stmt>an_item<eq>'client_name'<block_start>app['name']='App: '+subitem[an_item].replace('\n' '\\n')<block_end><elif_stmt>an_item<eq>'num_users'<block_start>app['value']=f'{subitem[an_item]} users'<block_end><elif_stmt>an_item<eq>'client_id'<block_start>app['client_id']=subitem[an_item]<block_end><block_end>auth_apps.append(app)<block_end><continue><block_end>values=[]<for_stmt>subitem item['msgValue']<block_start><if_stmt>'count'<in>subitem<block_start>mycount=myvalue=<none><for_stmt>key,value list(subitem.items())<block_start><if_stmt>key<eq>'count'<block_start>mycount=value<block_end><else_stmt><block_start>myvalue=value<block_end><if_stmt>mycount<and>myvalue<block_start>values.append(f'{myvalue}:{mycount}')<block_end><block_end>value=' '.join(values)<block_end><elif_stmt>'version_number'<in>subitem<and>'num_devices'<in>subitem<block_start>values.append(f'{subitem["version_number"]}:'<concat>f'{subitem["num_devices"]}')<block_end><else_stmt><block_start><continue><block_end>value=' '.join(sorted(values reverse=<true>))<block_end><block_end>csvRows.append({'name':name 'value':value})<block_end><for_stmt>app auth_apps# put apps at bottom <block_start>csvRows.append(app)<block_end>display.write_csv_file(csvRows titles f'Customer Report - {tryDate}' todrive=to_drive)<block_end><else_stmt><block_start>page_message=gapi.got_total_items_msg('Activities' '...\n')<line_sep>activities=gapi.get_all_pages(rep.activities() 'list' 'items' page_message=page_message applicationName=report userKey=userKey customerId=customerId actorIpAddress=actorIpAddress startTime=startTime endTime=endTime eventName=eventName filters=filters orgUnitID=orgUnitId groupIdFilter=groupIdFilter)<if_stmt>activities<block_start>titles=['name']<line_sep>csvRows=[]<for_stmt>activity activities<block_start>events=activity['events']<del_stmt>activity['events']<line_sep>activity_row=utils.flatten_json(activity)<line_sep>purge_parameters=<true><for_stmt>event events<block_start><for_stmt>item event.get('parameters' [])<block_start><if_stmt>set(item)<eq>{'value' 'name'}<block_start>event[item['name']]=item['value']<block_end><elif_stmt>set(item)<eq>{'intValue' 'name'}<block_start><if_stmt>item['name']<in>['start_time' 'end_time']<block_start>val=item.get('intValue')<if_stmt>val<is><not><none><block_start>val=int(val)<if_stmt>val<ge>62135683200<block_start>event[item['name']]=datetime.datetime.fromtimestamp(val-62135683200).isoformat()<block_end><block_end><block_end><else_stmt><block_start>event[item['name']]=item['intValue']<block_end><block_end><elif_stmt>set(item)<eq>{'boolValue' 'name'}<block_start>event[item['name']]=item['boolValue']<block_end><elif_stmt>set(item)<eq>{'multiValue' 'name'}<block_start>event[item['name']]=' '.join(item['multiValue'])<block_end><elif_stmt>item['name']<eq>'scope_data'<block_start>parts={}<for_stmt>message item['multiMessageValue']<block_start><for_stmt>mess message['parameter']<block_start>value=mess.get('value' ' '.join(mess.get('multiValue' [])))<line_sep>parts[mess['name']]=parts.get(mess['name'] [])+[value]<block_end><block_end><for_stmt>part,v parts.items()<block_start><if_stmt>part<eq>'scope_name'<block_start>part='scope'<block_end>event[part]=' '.join(v)<block_end><block_end><else_stmt><block_start>purge_parameters=<false><block_end><block_end><if_stmt>purge_parameters<block_start>event.pop('parameters' <none>)<block_end>row=utils.flatten_json(event)<line_sep>row.update(activity_row)<for_stmt>item row<block_start><if_stmt>item<not><in>titles<block_start>titles.append(item)<block_end><block_end>csvRows.append(row)<block_end><block_end>display.sort_csv_titles(['name' ] titles)<line_sep>display.write_csv_file(csvRows titles f'{report.capitalize()} Activity Report' to_drive)<block_end><block_end><block_end><def_stmt>_adjust_date errMsg<block_start>match_date=re.match('Data for dates later than (.*) is not yet '<concat>'available. Please check back later' errMsg)<if_stmt><not>match_date<block_start>match_date=re.match('Start date can not be later than (.*)' errMsg)<block_end><if_stmt><not>match_date<block_start>controlflow.system_error_exit(4 errMsg)<block_end><return>str(match_date.group(1))<block_end><def_stmt>_check_full_data_available warnings tryDate fullDataRequired has_reports<block_start>one_day=datetime.timedelta(days=1)<line_sep>tryDateTime=datetime.datetime.strptime(tryDate YYYYMMDD_FORMAT)<line_sep># move to day before if we don't have at least one usageReport <if_stmt><not>has_reports<block_start>tryDateTime<augsub>one_day<line_sep><return>(0 tryDateTime.strftime(YYYYMMDD_FORMAT))<block_end><for_stmt>warning warnings<block_start><if_stmt>warning['code']<eq>'PARTIAL_DATA_AVAILABLE'<block_start><for_stmt>app warning['data']<block_start><if_stmt>app['key']<eq>'application'<and>app['value']<ne>'docs'<and>fullDataRequired<is><not><none><and>(fullDataRequired<eq>'all'<or>app['value']<in>fullDataRequired)<block_start>tryDateTime<augsub>one_day<line_sep><return>(0 tryDateTime.strftime(YYYYMMDD_FORMAT))<block_end><block_end><block_end><elif_stmt>warning['code']<eq>'DATA_NOT_AVAILABLE'<block_start><for_stmt>app warning['data']<block_start><if_stmt>app['key']<eq>'application'<and>app['value']<ne>'docs'<and>(<not>fullDataRequired<or>app['value']<in>fullDataRequired)<block_start><return>(-1 tryDate)<block_end><block_end><block_end><block_end><return>(1 tryDate)<block_end>
""" ${NAME} """<import_from_future_stmt> absolute_import division print_function unicode_literals<import_stmt>logging<import_stmt>time<import_stmt>weakref<import_from_stmt>PySide QtGui<import_from_stmt>mcedit2.widgets.layout Column<line_sep>log=logging.getLogger(__name__)<class_stmt>InfoPanel(QtGui.QWidget)<block_start><def_stmt>__init__ self attrs signals **kwargs<block_start>""" Create a widget that displays a list of an object's selected attributes, named in `attrs`. The widget updates itself whenever one of the object's signals named in `signals` is emitted. If an attribute named in `attrs` is not found on `object`, the InfoPanel instance is checked for an attribute of the same name and it is used instead if found. :type attrs: list of attribute names to display :type signals: list of signals to monitor :param kwargs: args for QWidget :type kwargs: """<line_sep>QtGui.QWidget.__init__(self **kwargs)<line_sep>self.attrs=attrs<line_sep>self.signals=signals<line_sep>self.lastUpdate=time.time()<line_sep>self.labels=[QtGui.QLabel()<for>_ attrs]<line_sep>self.setLayout(Column(*self.labels))<block_end><def_stmt>updateLabels self<block_start>now=time.time()<if_stmt>now<l>self.lastUpdate+0.25<block_start><return><block_end>self.lastUpdate=now<if_stmt>self.object<block_start><for_stmt>attr,label zip(self.attrs self.labels)<block_start><try_stmt><block_start>value=getattr(self.object attr)<block_end><except_stmt>AttributeError# catches unrelated AttributeErrors in property getters... <block_start><try_stmt><block_start>value=getattr(self attr)<block_end><except_stmt>AttributeError<block_start>log.exception("Error updating info panel.")<line_sep>value=getattr(self attr "Attribute not found")<block_end><block_end>label.setText("%s: %s"%(attr value))<block_end><block_end><block_end>_object=<none><line_sep>@property<def_stmt>object self<block_start><return>self._object()<block_end>@object.setter<def_stmt>object self value<block_start>self._object=weakref.ref(value)<line_sep>self.updateLabels()<for_stmt>signal self.signals<block_start>signal=getattr(self.object signal <none>)<if_stmt>signal<block_start>signal.connect(self.updateLabels)<block_end><block_end><block_end>setObject=object.setter<block_end>
# -*- coding: utf-8 -*- # ------------------------------------------------------------------------------ # # Copyright 2018-2019 Fetch.AI Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ------------------------------------------------------------------------------ """This module contains the tests of the helpers module of the tac negotiation."""<import_from_stmt>pathlib Path<import_from_stmt>aea.helpers.search.models Attribute Constraint ConstraintType DataModel Description <import_from_stmt>aea.test_tools.test_skill BaseSkillTestCase<import_from_stmt>packages.fetchai.skills.tac_negotiation.helpers DEMAND_DATAMODEL_NAME SUPPLY_DATAMODEL_NAME _build_goods_datamodel build_goods_description build_goods_query <import_from_stmt>tests.conftest ROOT_DIR<class_stmt>TestHelpers(BaseSkillTestCase)<block_start>"""Test Helper module methods of tac control."""<line_sep>path_to_skill=Path(ROOT_DIR "packages" "fetchai" "skills" "tac_negotiation")<line_sep>@classmethod<def_stmt>setup cls<block_start>"""Setup the test class."""<line_sep>super().setup()<block_end><def_stmt>test_build_goods_datamodel_supply self<block_start>"""Test the _build_goods_datamodel of Helpers module for a supply."""<line_sep>good_ids=["1" "2"]<line_sep>is_supply=<true><line_sep>attributes=[Attribute("1" int <true> "A good on offer.") Attribute("2" int <true> "A good on offer.") Attribute("ledger_id" str <true> "The ledger for transacting.") Attribute("currency_id" str <true> "The currency for pricing and transacting the goods." ) Attribute("price" int <false> "The price of the goods in the currency.") Attribute("fee" int <false> "The transaction fee payable by the buyer in the currency." ) Attribute("nonce" str <false> "The nonce to distinguish identical descriptions.") ]<line_sep>expected_data_model=DataModel(SUPPLY_DATAMODEL_NAME attributes)<line_sep>actual_data_model=_build_goods_datamodel(good_ids is_supply)<assert_stmt>actual_data_model<eq>expected_data_model<block_end><def_stmt>test_build_goods_datamodel_demand self<block_start>"""Test the _build_goods_datamodel of Helpers module for a demand."""<line_sep>good_ids=["1" "2"]<line_sep>is_supply=<false><line_sep>attributes=[Attribute("1" int <true> "A good on offer.") Attribute("2" int <true> "A good on offer.") Attribute("ledger_id" str <true> "The ledger for transacting.") Attribute("currency_id" str <true> "The currency for pricing and transacting the goods." ) Attribute("price" int <false> "The price of the goods in the currency.") Attribute("fee" int <false> "The transaction fee payable by the buyer in the currency." ) Attribute("nonce" str <false> "The nonce to distinguish identical descriptions.") ]<line_sep>expected_data_model=DataModel(DEMAND_DATAMODEL_NAME attributes)<line_sep>actual_data_model=_build_goods_datamodel(good_ids is_supply)<assert_stmt>actual_data_model<eq>expected_data_model<block_end><def_stmt>test_build_goods_description_supply self<block_start>"""Test the build_goods_description of Helpers module for supply."""<line_sep>quantities_by_good_id={"2":5 "3":10}<line_sep>currency_id="1"<line_sep>ledger_id="some_ledger_id"<line_sep>is_supply=<true><line_sep>attributes=[Attribute("2" int <true> "A good on offer.") Attribute("3" int <true> "A good on offer.") Attribute("ledger_id" str <true> "The ledger for transacting.") Attribute("currency_id" str <true> "The currency for pricing and transacting the goods." ) Attribute("price" int <false> "The price of the goods in the currency.") Attribute("fee" int <false> "The transaction fee payable by the buyer in the currency." ) Attribute("nonce" str <false> "The nonce to distinguish identical descriptions.") ]<line_sep>expected_data_model=DataModel(SUPPLY_DATAMODEL_NAME attributes)<line_sep>expected_values={"currency_id":currency_id "ledger_id":ledger_id}<line_sep>expected_values.update(quantities_by_good_id)<line_sep>expected_description=Description(expected_values expected_data_model)<line_sep>actual_description=build_goods_description(quantities_by_good_id currency_id ledger_id is_supply)<assert_stmt>actual_description<eq>expected_description<block_end><def_stmt>test_build_goods_description_demand self<block_start>"""Test the build_goods_description of Helpers module for demand (same as above)."""<line_sep>quantities_by_good_id={"2":5 "3":10}<line_sep>currency_id="1"<line_sep>ledger_id="some_ledger_id"<line_sep>is_supply=<false><line_sep>attributes=[Attribute("2" int <true> "A good on offer.") Attribute("3" int <true> "A good on offer.") Attribute("ledger_id" str <true> "The ledger for transacting.") Attribute("currency_id" str <true> "The currency for pricing and transacting the goods." ) Attribute("price" int <false> "The price of the goods in the currency.") Attribute("fee" int <false> "The transaction fee payable by the buyer in the currency." ) Attribute("nonce" str <false> "The nonce to distinguish identical descriptions.") ]<line_sep>expected_data_model=DataModel(DEMAND_DATAMODEL_NAME attributes)<line_sep>expected_values={"currency_id":currency_id "ledger_id":ledger_id}<line_sep>expected_values.update(quantities_by_good_id)<line_sep>expected_description=Description(expected_values expected_data_model)<line_sep>actual_description=build_goods_description(quantities_by_good_id currency_id ledger_id is_supply)<assert_stmt>actual_description<eq>expected_description<block_end><def_stmt>test_build_goods_query self<block_start>"""Test the build_goods_query of Helpers module."""<line_sep>good_ids=["2" "3"]<line_sep>currency_id="1"<line_sep>ledger_id="some_ledger_id"<line_sep>is_searching_for_sellers=<true><line_sep>attributes=[Attribute("2" int <true> "A good on offer.") Attribute("3" int <true> "A good on offer.") Attribute("ledger_id" str <true> "The ledger for transacting.") Attribute("currency_id" str <true> "The currency for pricing and transacting the goods." ) Attribute("price" int <false> "The price of the goods in the currency.") Attribute("fee" int <false> "The transaction fee payable by the buyer in the currency." ) Attribute("nonce" str <false> "The nonce to distinguish identical descriptions.") ]<line_sep>expected_data_model=DataModel(SUPPLY_DATAMODEL_NAME attributes)<line_sep>expected_constraints=[Constraint("2" ConstraintType(">=" 1)) Constraint("3" ConstraintType(">=" 1)) Constraint("ledger_id" ConstraintType("==" ledger_id)) Constraint("currency_id" ConstraintType("==" currency_id)) ]<line_sep>actual_query=build_goods_query(good_ids currency_id ledger_id is_searching_for_sellers)<line_sep>constraints=[(c.constraint_type.type c.constraint_type.value)<for>c actual_query.constraints[0].constraints]<for_stmt>constraint expected_constraints<block_start><assert_stmt>(constraint.constraint_type.type constraint.constraint_type.value )<in>constraints<block_end><assert_stmt>actual_query.model<eq>expected_data_model<block_end><def_stmt>test_build_goods_query_1_good self<block_start>"""Test the build_goods_query of Helpers module where there is 1 good."""<line_sep>good_ids=["2"]<line_sep>currency_id="1"<line_sep>ledger_id="some_ledger_id"<line_sep>is_searching_for_sellers=<true><line_sep>attributes=[Attribute("2" int <true> "A good on offer.") Attribute("ledger_id" str <true> "The ledger for transacting.") Attribute("currency_id" str <true> "The currency for pricing and transacting the goods." ) Attribute("price" int <false> "The price of the goods in the currency.") Attribute("fee" int <false> "The transaction fee payable by the buyer in the currency." ) Attribute("nonce" str <false> "The nonce to distinguish identical descriptions.") ]<line_sep>expected_data_model=DataModel(SUPPLY_DATAMODEL_NAME attributes)<line_sep>expected_constraints=[Constraint("2" ConstraintType(">=" 1)) Constraint("ledger_id" ConstraintType("==" ledger_id)) Constraint("currency_id" ConstraintType("==" currency_id)) ]<line_sep>actual_query=build_goods_query(good_ids currency_id ledger_id is_searching_for_sellers)<for_stmt>constraint expected_constraints<block_start><assert_stmt>constraint<in>actual_query.constraints<block_end><assert_stmt>actual_query.model<eq>expected_data_model<block_end><block_end>
# Copyright 2019 The Texar Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Data processors. Adapted from https://github.com/zihangdai/xlnet/blob/master/run_classifier.py """<import_stmt>csv<import_stmt>logging<import_from_stmt>abc ABC<import_from_stmt>pathlib Path<import_from_stmt>typing NamedTuple Optional Union List Dict Type<class_stmt>InputExample(NamedTuple)<block_start>r"""A single training/test example for simple sequence classification."""<line_sep>guid:str<line_sep>r"""Unique id for the example."""<line_sep>text_a:str<line_sep>r"""string. The untokenized text of the first sequence. For single sequence tasks, only this sequence must be specified."""<line_sep>text_b:Optional[str]<line_sep>r"""(Optional) string. The untokenized text of the second sequence. Only needs to be specified for sequence pair tasks."""<line_sep>label:Optional[Union[str float]]<line_sep>r"""(Optional) string. The label of the example. This should be specified for train and dev examples, but not for test examples."""<block_end><class_stmt>DataProcessor<block_start>r"""Base class for data converters for sequence classification data sets."""<line_sep>labels:List[str]<line_sep>is_regression:bool=<false><line_sep>task_name:str<line_sep>__task_dict__:Dict[str Type['DataProcessor']]={}<def_stmt>__init__ self data_dir:str<block_start>self.data_dir=Path(data_dir)<block_end>@classmethod<def_stmt>register cls *names<block_start><def_stmt>decorator klass<block_start><for_stmt>name names<block_start>prev_processor=DataProcessor.__task_dict__.get(name.lower() <none>)<if_stmt>prev_processor<is><not><none><block_start><raise>ValueError(f"Cannot register {klass} as {name}. "<concat>f"The name is already taken by {prev_processor}")<block_end>DataProcessor.__task_dict__[name.lower()]=klass<block_end>klass.task_name=names[0]<line_sep><return>klass<block_end><return>decorator<block_end><def_stmt>get_train_examples self<arrow>List[InputExample]<block_start>r"""Gets a collection of `InputExample`s for the train set."""<line_sep><raise>NotImplementedError<block_end><def_stmt>get_dev_examples self<arrow>List[InputExample]<block_start>r"""Gets a collection of `InputExample`s for the dev set."""<line_sep><raise>NotImplementedError<block_end><def_stmt>get_test_examples self<arrow>List[InputExample]<block_start>r"""Gets a collection of `InputExample`s for prediction."""<line_sep><raise>NotImplementedError<block_end>@classmethod<def_stmt>_read_tsv cls input_file:Path quotechar:Optional[str]=<none><arrow>List[List[str]]<block_start>"""Reads a tab separated value file."""<with_stmt>input_file.open('r')<as>f<block_start>reader=csv.reader(f delimiter="\t" quotechar=quotechar)<line_sep>lines=[]<for_stmt>line reader<block_start><if_stmt>len(line)<eq>0<block_start><continue><block_end>lines.append(line)<block_end><return>lines<block_end><block_end><block_end><def_stmt>get_processor_class task:str<arrow>Type[DataProcessor]<block_start>task=task.lower()<line_sep>klass=DataProcessor.__task_dict__.get(task <none>)<if_stmt>klass<is><none><block_start><raise>ValueError(f"Unsupported task {task}")<block_end><return>klass<block_end><class_stmt>GLUEProcessor(DataProcessor ABC)<block_start>train_file="train.tsv"<line_sep>dev_file="dev.tsv"<line_sep>test_file="test.tsv"<line_sep>label_column:int<line_sep>text_a_column:int<line_sep>text_b_column:int<line_sep>contains_header=<true><line_sep>test_text_a_column:int<line_sep>test_text_b_column:int<line_sep>test_contains_header=<true><def_stmt>__init__ self data_dir:str<block_start>super().__init__(data_dir)<if_stmt><not>hasattr(self 'test_text_a_column')<block_start>self.test_text_a_column=self.text_a_column<block_end><if_stmt><not>hasattr(self 'test_text_b_column')<block_start>self.test_text_b_column=self.text_b_column<block_end><block_end><def_stmt>get_train_examples self<arrow>List[InputExample]<block_start><return>self._create_examples(self._read_tsv(self.data_dir/self.train_file) "train")<block_end><def_stmt>get_dev_examples self<arrow>List[InputExample]<block_start><return>self._create_examples(self._read_tsv(self.data_dir/self.dev_file) "dev")<block_end><def_stmt>get_test_examples self<arrow>List[InputExample]<block_start><return>self._create_examples(self._read_tsv(self.data_dir/self.test_file) "test")<block_end><def_stmt>_create_examples self lines:List[List[str]] set_type:str<arrow>List[InputExample]<block_start>"""Creates examples for the training and dev sets."""<line_sep>examples=[]<for_stmt>(i line) enumerate(lines)<block_start><if_stmt>i<eq>0<and>self.contains_header<and>set_type<ne>"test"<block_start><continue><block_end><if_stmt>i<eq>0<and>self.test_contains_header<and>set_type<eq>"test"<block_start><continue><block_end>guid=f"{set_type}-{i}"<line_sep>a_column=(self.text_a_column<if>set_type<ne>"test"<else>self.test_text_a_column)<line_sep>b_column=(self.text_b_column<if>set_type<ne>"test"<else>self.test_text_b_column)<line_sep># there are some incomplete lines in QNLI <if_stmt>len(line)<le>a_column<block_start>logging.warning('Incomplete line, ignored.')<line_sep><continue><block_end>text_a=line[a_column]<if_stmt>b_column<is><not><none><block_start><if_stmt>len(line)<le>b_column<block_start>logging.warning('Incomplete line, ignored.')<line_sep><continue><block_end>text_b=line[b_column]<block_end><else_stmt><block_start>text_b=<none><block_end><if_stmt>set_type<eq>"test"<block_start>label=self.labels[0]<block_end><else_stmt><block_start><if_stmt>len(line)<le>self.label_column<block_start>logging.warning('Incomplete line, ignored.')<line_sep><continue><block_end>label=line[self.label_column]<block_end>examples.append(InputExample(guid text_a text_b label))<block_end><return>examples<block_end><block_end>@DataProcessor.register("MNLI" "MNLI_matched")<class_stmt>MnliMatchedProcessor(GLUEProcessor)<block_start>labels=["contradiction" "entailment" "neutral"]<line_sep>dev_file="dev_matched.tsv"<line_sep>test_file="test_matched.tsv"<line_sep>label_column=-1<line_sep>text_a_column=8<line_sep>text_b_column=9<block_end>@DataProcessor.register("MNLI_mismatched")<class_stmt>MnliMismatchedProcessor(MnliMatchedProcessor)<block_start>dev_file="dev_mismatched.tsv"<line_sep>test_file="test_mismatched.tsv"<block_end>@DataProcessor.register("STS-B" "stsb")<class_stmt>StsbProcessor(GLUEProcessor)<block_start>labels:List[str]=[]<line_sep>is_regression=<true><line_sep>label_column=9<line_sep>text_a_column=7<line_sep>text_b_column=8<def_stmt>_create_examples self lines:List[List[str]] set_type:str<arrow>List[InputExample]<block_start>"""Creates examples for the training and dev sets."""<line_sep>examples=[]<for_stmt>(i line) enumerate(lines)<block_start><if_stmt>i<eq>0<and>self.contains_header<and>set_type<ne>"test"<block_start><continue><block_end><if_stmt>i<eq>0<and>self.test_contains_header<and>set_type<eq>"test"<block_start><continue><block_end>guid=f"{set_type}-{i}"<line_sep>a_column=(self.text_a_column<if>set_type<ne>"test"<else>self.test_text_a_column)<line_sep>b_column=(self.text_b_column<if>set_type<ne>"test"<else>self.test_text_b_column)<line_sep># there are some incomplete lines in QNLI <if_stmt>len(line)<le>a_column<block_start>logging.warning('Incomplete line, ignored.')<line_sep><continue><block_end>text_a=line[a_column]<if_stmt>b_column<is><not><none><block_start><if_stmt>len(line)<le>b_column<block_start>logging.warning('Incomplete line, ignored.')<line_sep><continue><block_end>text_b=line[b_column]<block_end><else_stmt><block_start>text_b=<none><block_end><if_stmt>set_type<eq>"test"<block_start>label=0.0<block_end><else_stmt><block_start><if_stmt>len(line)<le>self.label_column<block_start>logging.warning('Incomplete line, ignored.')<line_sep><continue><block_end>label=float(line[self.label_column])<block_end>examples.append(InputExample(guid text_a text_b label))<block_end><return>examples<block_end><block_end>@DataProcessor.register("Yelp5")<class_stmt>Yelp5Processor(DataProcessor)<block_start>labels=["1" "2" "3" "4" "5"]<def_stmt>get_train_examples self<arrow>List[InputExample]<block_start><return>self._create_examples(self.data_dir/"train.csv")<block_end><def_stmt>get_dev_examples self<arrow>List[InputExample]<block_start><return>self._create_examples(self.data_dir/"test.csv")<block_end><def_stmt>get_test_examples self<block_start><raise>TypeError("The Yelp 5 dataset does not have a test set.")<block_end>@staticmethod<def_stmt>_create_examples input_file:Path<arrow>List[InputExample]<block_start>"""Creates examples for the training and dev sets."""<line_sep>examples=[]<with_stmt>input_file.open()<as>f<block_start>reader=csv.reader(f)<for_stmt>i,line enumerate(reader)<block_start>label=line[0]<line_sep>text_a=line[1].replace('""' '"').replace('\\"' '"')<line_sep>examples.append(InputExample(guid=str(i) text_a=text_a text_b=<none> label=label))<block_end><block_end><return>examples<block_end><block_end>@DataProcessor.register("IMDB")<class_stmt>ImdbProcessor(DataProcessor)<block_start>labels=["neg" "pos"]<def_stmt>get_train_examples self<arrow>List[InputExample]<block_start><return>self._create_examples(self.data_dir/"train")<block_end><def_stmt>get_dev_examples self<arrow>List[InputExample]<block_start><return>self._create_examples(self.data_dir/"test")<block_end><def_stmt>get_test_examples self<block_start><raise>TypeError("The IMDB dataset does not have a test set.")<block_end>@staticmethod<def_stmt>_create_examples data_dir:Path<arrow>List[InputExample]<block_start>examples=[]<for_stmt>label ["neg" "pos"]<block_start>cur_dir=data_dir/label<for_stmt>filename cur_dir.iterdir()<block_start><if_stmt>filename.suffix<ne>".txt"<block_start><continue><block_end><with_stmt>filename.open()<as>f<block_start>text=f.read().strip().replace("<br />" " ")<block_end>examples.append(InputExample(guid=str(filename) text_a=text text_b=<none> label=label))<block_end><block_end><return>examples<block_end><block_end>
# coding:utf-8 # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License" # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>ast<import_stmt>os<import_stmt>json<import_stmt>sys<import_stmt>argparse<import_stmt>contextlib<import_from_stmt>collections namedtuple<import_stmt>paddle.fluid<as>fluid<import_stmt>paddlehub<as>hub<import_from_stmt>paddlehub.module.module runnable<import_from_stmt>paddlehub.module.nlp_module DataFormatError<import_from_stmt>paddlehub.common.logger logger<import_from_stmt>paddlehub.module.module moduleinfo serving<import_stmt>plato2_en_base.models<as>plato_models<import_from_stmt>plato2_en_base.tasks.dialog_generation DialogGeneration<import_from_stmt>plato2_en_base.utils check_cuda Timer<import_from_stmt>plato2_en_base.utils.args parse_args<line_sep>@moduleinfo(name="plato2_en_base" version="1.0.0" summary="A novel pre-training model for dialogue generation, incorporated with latent discrete variables for one-to-many relationship modeling." author="baidu-nlp" author_email="" type="nlp/text_generation" )<class_stmt>Plato(hub.NLPPredictionModule)<block_start><def_stmt>_initialize self<block_start>""" initialize with the necessary elements """<if_stmt>"CUDA_VISIBLE_DEVICES"<not><in>os.environ<block_start><raise>RuntimeError("The module only support GPU. Please set the environment variable CUDA_VISIBLE_DEVICES.")<block_end>args=self.setup_args()<line_sep>self.task=DialogGeneration(args)<line_sep>self.model=plato_models.create_model(args fluid.CUDAPlace(0))<line_sep>self.Example=namedtuple("Example" ["src" "data_id"])<line_sep>self._interactive_mode=<false><block_end><def_stmt>setup_args self<block_start>""" Setup arguments. """<line_sep>assets_path=os.path.join(self.directory "assets")<line_sep>vocab_path=os.path.join(assets_path "vocab.txt")<line_sep>init_pretraining_params=os.path.join(assets_path "24L" "Plato")<line_sep>spm_model_file=os.path.join(assets_path "spm.model")<line_sep>nsp_inference_model_path=os.path.join(assets_path "24L" "NSP")<line_sep>config_path=os.path.join(assets_path "24L.json")<line_sep># ArgumentParser.parse_args use argv[1:], it will drop the first one arg, so the first one in sys.argv should be "" sys.argv=["" "--model" "Plato" "--vocab_path" "%s"%vocab_path "--do_lower_case" "False" "--init_pretraining_params" "%s"%init_pretraining_params "--spm_model_file" "%s"%spm_model_file "--nsp_inference_model_path" "%s"%nsp_inference_model_path "--ranking_score" "nsp_score" "--do_generation" "True" "--batch_size" "1" "--config_path" "%s"%config_path]<line_sep>parser=argparse.ArgumentParser()<line_sep>plato_models.add_cmdline_args(parser)<line_sep>DialogGeneration.add_cmdline_args(parser)<line_sep>args=parse_args(parser)<line_sep>args.load(args.config_path "Model")<line_sep>args.run_infer=<true># only build infer program <return>args<block_end>@serving<def_stmt>generate self texts<block_start>""" Get the robot responses of the input texts. Args: texts(list or str): If not in the interactive mode, texts should be a list in which every element is the chat context separated with '\t'. Otherwise, texts shoule be one sentence. The module can get the context automatically. Returns: results(list): the robot responses. """<if_stmt><not>texts<block_start><return>[]<block_end><if_stmt>self._interactive_mode<block_start><if_stmt>isinstance(texts str)<block_start>self.context.append(texts.strip())<line_sep>texts=[" [SEP] ".join(self.context[-self.max_turn:])]<block_end><else_stmt><block_start><raise>ValueError("In the interactive mode, the input data should be a string.")<block_end><block_end><elif_stmt><not>isinstance(texts list)<block_start><raise>ValueError("If not in the interactive mode, the input data should be a list.")<block_end>bot_responses=[]<for_stmt>i,text enumerate(texts)<block_start>example=self.Example(src=text.replace("\t" " [SEP] ") data_id=i)<line_sep>record=self.task.reader._convert_example_to_record(example is_infer=<true>)<line_sep>data=self.task.reader._pad_batch_records([record] is_infer=<true>)<line_sep>pred=self.task.infer_step(self.model data)[0]# batch_size is 1 bot_response=pred["response"]# ignore data_id and score bot_responses.append(bot_response)<block_end><if_stmt>self._interactive_mode<block_start>self.context.append(bot_responses[0].strip())<block_end><return>bot_responses<block_end>@contextlib.contextmanager<def_stmt>interactive_mode self max_turn=6<block_start>""" Enter the interactive mode. Args: max_turn(int): the max dialogue turns. max_turn = 1 means the robot can only remember the last one utterance you have said. """<line_sep>self._interactive_mode=<true><line_sep>self.max_turn=max_turn<line_sep>self.context=[]<line_sep><yield><line_sep>self.context=[]<line_sep>self._interactive_mode=<false><block_end>@runnable<def_stmt>run_cmd self argvs<block_start>""" Run as a command """<line_sep>self.parser=argparse.ArgumentParser(description='Run the %s module.'%self.name prog='hub run %s'%self.name usage='%(prog)s' add_help=<true>)<line_sep>self.arg_input_group=self.parser.add_argument_group(title="Input options" description="Input data. Required")<line_sep>self.arg_config_group=self.parser.add_argument_group(title="Config options" description="Run configuration for controlling module behavior, optional.")<line_sep>self.add_module_input_arg()<line_sep>args=self.parser.parse_args(argvs)<try_stmt><block_start>input_data=self.check_input_data(args)<block_end><except_stmt>DataFormatError<and>RuntimeError<block_start>self.parser.print_help()<line_sep><return><none><block_end>results=self.generate(texts=input_data)<line_sep><return>results<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>module=Plato()<for_stmt>result module.generate(["Hello" "Hello\thi, nice to meet you, my name is tom\tso your name is tom?"])<block_start>print(result)<block_end><with_stmt>module.interactive_mode(max_turn=3)<block_start><while_stmt><true><block_start>human_utterance=input()<line_sep>robot_utterance=module.generate(human_utterance)<line_sep>print("Robot: %s"%robot_utterance[0])<block_end><block_end><block_end>
<import_from_stmt>twarc Twarc2 expansions<import_stmt>json<line_sep># Replace your bearer token below client=Twarc2(bearer_token="<PASSWORD>")<def_stmt>main # The followers function gets followers for specified user <block_start>followers=client.followers(user="twitterdev")<for_stmt>page followers<block_start>result=expansions.flatten(page)<for_stmt>user result# Here we are printing the full Tweet object JSON to the console <block_start>print(json.dumps(user))<block_end><block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>main()<block_end>
# Generated by Django 3.2.12 on 2022-04-12 14:00 <import_from_stmt>django.db migrations<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[("product" "0167_digitalcontenturl_order_line_token") ("order" "0140_alter_orderline_old_id_and_created_at") ]<line_sep>operations=[migrations.RunSQL(""" UPDATE product_digitalcontenturl SET order_line_token = ( SELECT token FROM order_orderline WHERE product_digitalcontenturl.line_id = order_orderline.id ) WHERE line_id IS NOT NULL; """ reverse_sql=migrations.RunSQL.noop ) ]<block_end>
<import_stmt>starry<import_stmt>numpy<as>np<import_stmt>matplotlib.pyplot<as>plt<import_stmt>pytest<line_sep>@pytest.mark.parametrize("ydeg,nw" [[0 <none>] [0 10] [1 <none>] [1 10]])<def_stmt>test_system ydeg nw# Oblate map <block_start>map=starry.Map(udeg=2 ydeg=ydeg oblate=<true> nw=nw)<line_sep>map[1]=0.5<line_sep>map[2]=0.25<line_sep>map.omega=0.5<line_sep>map.beta=1.23<line_sep>map.tpole=8000<line_sep>map.f=1-2/(map.omega<power>2+2)<line_sep>map.obl=30<line_sep># Compute system flux star=starry.Primary(map r=1.5)<line_sep>planet=starry.Secondary(starry.Map(amp=0 nw=nw) porb=1.0 r=0.1 m=0)<line_sep>sys=starry.System(star planet)<line_sep>t=np.linspace(-0.1 0.1 1000)<line_sep>flux_sys=sys.flux(t integrated=<true>)<line_sep># Compute map flux manually x,y,z=sys.position(t)<line_sep>xo=x[1]/star._r<line_sep>yo=y[1]/star._r<line_sep>flux_map=map.flux(xo=xo yo=yo ro=planet._r/star._r integrated=<true>)<line_sep># Check that they agree <assert_stmt>np.allclose(flux_map flux_sys)<block_end>
<import_stmt>torch<import_stmt>numpy<as>np<import_from_stmt>..CustomModule CustomModule<class_stmt>PCA(CustomModule)<block_start><def_stmt>__init__ self n_components<block_start>""" Principle Component Analysis (PCA) n_components: int number of principle components """<line_sep>super(PCA self).__init__()<assert_stmt>n_components<g>0<line_sep>self.n_components=n_components<line_sep>self.register_buffer("_mean" <none>)<line_sep>self.register_buffer("_components" <none>)<block_end>@staticmethod<def_stmt>covar x meaned=<true> rowvar=<true> inplace=<false><block_start>""" compute covariance matrix of 'x' x: torch.Tensor, shape : [m, n] meaned: bool, default : True if True, assume 'x' has zero mean rowvar: bool, default : True if True, assume 'm' represents n_features and 'n' represents n_samples if False, assume 'm' represents n_samples and 'n' represents n_features inplace: bool, default : False if meaned is False and inplace is True, mean of 'x' will be subtracted from 'x' inplace, and will be added back to 'x' at the end, this will prevent creating a new tensor of shape [m, n] with the cost of extra computation. """<if_stmt>x.dim()<g>2<block_start><raise>ValueError('x has more than 2 dimensions')<block_end><if_stmt>x.dim()<l>2<block_start>x=x.view(1 -1)<block_end><if_stmt><not>rowvar<and>x.shape[0]<ne>1<block_start>x=x.T<block_end>fact=1.0/(x.shape[1]-1)<if_stmt><not>meaned<block_start>mean=x.mean(dim=1 keepdim=<true>)<if_stmt>inplace<block_start>x.sub_(mean)<block_end><else_stmt><block_start>x=x-mean<block_end><block_end>result=fact<times>(x@x.T).squeeze()<if_stmt>inplace<and><not>meaned<block_start>x.add_(mean)<block_end><return>result<block_end><def_stmt>train self x inplace=<false><block_start>""" train PCA with 'x' x: torch.Tensor, shape : [d_vec, n_sample] inplace: bool, default : False if True, reduce the memory consumption with the cost of extra computation """<assert_stmt>x.shape[0]<ge>self.n_components<line_sep>mean=x.mean(dim=1 keepdim=<true>)#[d_vec, 1] <if_stmt>inplace<block_start>x.sub_(mean)<block_end><else_stmt><block_start>x=x-mean<block_end>x_cov=self.covar(x rowvar=<true> meaned=<true>)<if_stmt>inplace<block_start>x.add_(mean)<block_end>eig_val,eig_vec=torch.symeig(x_cov eigenvectors=<true> upper=<false>)<line_sep>sorted_eig_val,sorted_index=eig_val.sort(descending=<true>)<line_sep>sorted_eig_vec=eig_vec[: sorted_index]<line_sep>components=sorted_eig_vec[: :self.n_components].T<line_sep>self.register_buffer("_components" components)<line_sep>self.register_buffer("_mean" mean)<block_end><def_stmt>encode self x<block_start>""" reduce the dimentionality of 'x' from 'd_vec' to 'n_components' x: torch.Tensor, shape : [d_vec, n_samples], dtype : float32 return: torch.Tensor, shape : [n_components, n_samples], dtype : float32 """<assert_stmt>self._components<is><not><none><assert_stmt>x.shape[0]<eq>self._components.shape[1]<line_sep>x=x-self._mean<line_sep>y=self._components@x<line_sep><return>y<block_end><def_stmt>decode self x<block_start>""" reconstruct 'x' from 'n_components' dimentional space to 'd_vec' dimentional space x: torch.Tensor, shape : [n_components, n_samples], dtype : float32 return: torch.Tensor, shape : [d_vec, n_samples], dtype : float32 """<assert_stmt>self._components<is><not><none><assert_stmt>x.shape[0]<eq>self._components.shape[0]<line_sep>y=self._components.T@x<line_sep>y=y+self._mean<line_sep><return>y<block_end><block_end>
# -*- coding: utf-8 -*- # Copyright 2016 Yelp Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. <import_from_future_stmt> absolute_import<import_from_future_stmt> unicode_literals<import_stmt>os<import_stmt>signal<import_stmt>sys<import_stmt>mock<import_stmt>pytest<import_from_stmt>data_pipeline.producer Producer<import_from_stmt>data_pipeline.schematizer_clientlib.schematizer SchematizerClient<import_from_stmt>pymysqlreplication.event QueryEvent<import_stmt>replication_handler.batch.base_parse_replication_stream<import_from_stmt>replication_handler.batch.base_parse_replication_stream BaseParseReplicationStream<import_from_stmt>replication_handler.components.change_log_data_event_handler ChangeLogDataEventHandler<import_from_stmt>replication_handler.components.data_event_handler DataEventHandler<import_from_stmt>replication_handler.components.schema_event_handler SchemaEventHandler<import_from_stmt>replication_handler.models.global_event_state EventType<import_from_stmt>replication_handler.util.misc DataEvent<import_from_stmt>replication_handler.util.misc ReplicationHandlerEvent<import_from_stmt>replication_handler.util.position GtidPosition<class_stmt>BaseParseReplicationStreamTest(object)<block_start>@pytest.yield_fixture(autouse=<true>)<def_stmt>patch_zk self<block_start><with_stmt>mock.patch.object(replication_handler.batch.base_parse_replication_stream 'ZKLock')<as>mock_zk<block_start><yield>mock_zk<block_end><block_end>@pytest.fixture<def_stmt>schema_event self<block_start><return>mock.Mock(spec=QueryEvent)<block_end>@pytest.fixture<def_stmt>data_event self<block_start><return>mock.Mock(spec=DataEvent)<block_end>@pytest.yield_fixture<def_stmt>patch_restarter self<block_start><with_stmt>mock.patch.object(replication_handler.batch.base_parse_replication_stream 'ReplicationStreamRestarter')<as>mock_restarter<block_start><yield>mock_restarter<block_end><block_end>@pytest.yield_fixture<def_stmt>patch_save_position self<block_start><with_stmt>mock.patch('replication_handler.batch.base_parse_replication_stream.save_position')<as>mock_save_position<block_start><yield>mock_save_position<block_end><block_end>@pytest.yield_fixture<def_stmt>patch_data_handle_event self<block_start><with_stmt>mock.patch.object(DataEventHandler 'handle_event' )<as>mock_handle_event<block_start><yield>mock_handle_event<block_end><block_end>@pytest.yield_fixture<def_stmt>patch_schema_handle_event self<block_start><with_stmt>mock.patch.object(SchemaEventHandler 'handle_event' )<as>mock_handle_event<block_start><yield>mock_handle_event<block_end><block_end>@pytest.fixture<def_stmt>producer self<block_start><return>mock.Mock(autospec=Producer)<block_end>@pytest.fixture<def_stmt>schematizer self<block_start><return>mock.Mock(autospec=SchematizerClient)<block_end>@pytest.yield_fixture<def_stmt>patch_producer self producer<block_start><with_stmt>mock.patch('replication_handler.batch.base_parse_replication_stream.Producer')<as>mock_producer<block_start>mock_producer.return_value.__enter__.return_value=producer<line_sep><yield>mock_producer<block_end><block_end>@pytest.yield_fixture<def_stmt>patch_running self<block_start><with_stmt>mock.patch.object(BaseParseReplicationStream 'running' new_callable=mock.PropertyMock)<as>mock_running<block_start>mock_running.return_value=<true><line_sep><yield>mock_running<block_end><block_end>@pytest.yield_fixture<def_stmt>patch_process_event self<block_start><with_stmt>mock.patch.object(BaseParseReplicationStream 'process_event' )<as>mock_process_event<block_start><yield>mock_process_event<block_end><block_end>@pytest.yield_fixture(autouse=<true>)<def_stmt>patch_schematizer self schematizer<block_start><with_stmt>mock.patch('replication_handler.batch.base_parse_replication_stream.get_schematizer')<as>mock_schematizer<block_start>mock_schematizer.return_value=schematizer<line_sep><yield>mock_schematizer<block_end><block_end>@pytest.yield_fixture<def_stmt>patch_db_connections self mock_db_connections<block_start><with_stmt>mock.patch('replication_handler.batch.base_parse_replication_stream.get_connection')<as>mock_get_db_conn<block_start>mock_get_db_conn.return_value=mock_db_connections<line_sep><yield>mock_get_db_conn<block_end><block_end>@pytest.yield_fixture<def_stmt>patch_exit self<block_start><with_stmt>mock.patch.object(os '_exit')<as>mock_exit<block_start><yield>mock_exit<block_end><block_end>@pytest.yield_fixture<def_stmt>patch_sys_exit self<block_start><with_stmt>mock.patch.object(sys 'exit')<as>mock_exit<block_start><yield>mock_exit<block_end><block_end>@pytest.yield_fixture<def_stmt>patch_signal self<block_start><with_stmt>mock.patch.object(signal 'signal')<as>mock_signal<block_start><yield>mock_signal<block_end><block_end>@pytest.fixture<def_stmt>position_gtid_1 self<block_start><return>GtidPosition(gtid="fake_gtid_1")<block_end>@pytest.fixture<def_stmt>position_gtid_2 self<block_start><return>GtidPosition(gtid="fake_gtid_2")<block_end>@pytest.yield_fixture<def_stmt>patch_config self<block_start><with_stmt>mock.patch('replication_handler.batch.base_parse_replication_stream.config.env_config')<as>mock_config<block_start>mock_config.register_dry_run=<false><line_sep>mock_config.publish_dry_run=<false><line_sep>mock_config.namespace="test_namespace"<line_sep>mock_config.disable_meteorite=<false><line_sep>mock_config.changelog_mode=<false><line_sep>mock_config.topology_path='topology.yaml'<line_sep><yield>mock_config<block_end><block_end>@pytest.yield_fixture<def_stmt>patch_config_with_small_recovery_queue_size self<block_start><with_stmt>mock.patch('replication_handler.batch.base_parse_replication_stream.config.env_config')<as>mock_config<block_start>mock_config.register_dry_run=<false><line_sep>mock_config.publish_dry_run=<false><line_sep>mock_config.namespace="test_namespace"<line_sep>mock_config.recovery_queue_size=1<line_sep><yield>mock_config<block_end><block_end>@pytest.yield_fixture<def_stmt>patch_config_changelog_on self patch_config<block_start>patch_config.changelog_mode=<true><line_sep><yield>patch_config<block_end><def_stmt>_different_events_builder self schema_event data_event patch_config position_gtid_1 position_gtid_2 patch_restarter patch_db_connections patch_data_handle_event patch_schema_handle_event patch_producer patch_save_position patch_exit<block_start>schema_event_with_gtid=ReplicationHandlerEvent(position=position_gtid_1 event=schema_event)<line_sep>data_event_with_gtid=ReplicationHandlerEvent(position=position_gtid_2 event=data_event)<line_sep>patch_restarter.return_value.get_stream.return_value.next.side_effect=[schema_event_with_gtid data_event_with_gtid ]<block_end><def_stmt>test_replication_stream_different_events self schema_event data_event patch_config position_gtid_1 position_gtid_2 patch_restarter patch_db_connections patch_data_handle_event patch_schema_handle_event patch_producer patch_save_position patch_exit<block_start>self._different_events_builder(schema_event data_event patch_config position_gtid_1 position_gtid_2 patch_restarter patch_db_connections patch_data_handle_event patch_schema_handle_event patch_producer patch_save_position patch_exit)<line_sep>stream=self._init_and_run_batch()<assert_stmt>patch_schema_handle_event.call_args_list<eq>[mock.call(schema_event position_gtid_1)]<assert_stmt>patch_data_handle_event.call_args_list<eq>[mock.call(data_event position_gtid_2)]<assert_stmt>patch_schema_handle_event.call_count<eq>1<assert_stmt>patch_data_handle_event.call_count<eq>1<assert_stmt>stream.register_dry_run<is><false><assert_stmt>stream.publish_dry_run<is><false><block_end><def_stmt>test_replication_stream_same_events self data_event patch_config position_gtid_1 position_gtid_2 patch_restarter patch_db_connections patch_data_handle_event patch_producer patch_exit patch_save_position <block_start>data_event_with_gtid_1=ReplicationHandlerEvent(position=position_gtid_1 event=data_event)<line_sep>data_event_with_gtid_2=ReplicationHandlerEvent(position=position_gtid_2 event=data_event)<line_sep>patch_restarter.return_value.get_stream.return_value.next.side_effect=[data_event_with_gtid_1 data_event_with_gtid_2]<line_sep>self._init_and_run_batch()<assert_stmt>patch_data_handle_event.call_args_list<eq>[mock.call(data_event position_gtid_1) mock.call(data_event position_gtid_2)]<assert_stmt>patch_data_handle_event.call_count<eq>2<assert_stmt>patch_save_position.call_count<eq>1<block_end><def_stmt>test_register_signal_handler self patch_config patch_db_connections patch_restarter patch_signal patch_running patch_producer patch_exit <block_start>patch_running.return_value=<false><line_sep>replication_stream=self._init_and_run_batch()<line_sep># ZKLock also calls patch_signal, so we have to work around it <assert_stmt>[mock.call(signal.SIGINT replication_stream._handle_shutdown_signal) mock.call(signal.SIGTERM replication_stream._handle_shutdown_signal) ]<in>patch_signal.call_args_list<block_end><def_stmt>test_graceful_exit_if_buffer_size_mismatch self producer patch_config_with_small_recovery_queue_size patch_restarter patch_data_handle_event patch_db_connections patch_save_position <block_start><with_stmt>pytest.raises(SystemExit)<block_start>self._init_and_run_batch()<block_end><block_end><def_stmt>test_changelog_ON_chooses_changelog_dataevent_handler self patch_config patch_config_changelog_on producer patch_db_connections<block_start>replication_stream=self._get_parse_replication_stream()<line_sep>replication_stream.producer=producer<line_sep>replication_stream.counters=mock.MagicMock()<line_sep>handler_info=replication_stream._build_handler_map()[DataEvent]<assert_stmt>isinstance(handler_info.handler ChangeLogDataEventHandler)<block_end><def_stmt>test_without_changelog_mode_dataevent_handler_is_default self patch_config producer patch_db_connections<block_start>replication_stream=self._get_parse_replication_stream()<line_sep>replication_stream.producer=producer<line_sep>replication_stream.counters=mock.MagicMock()<line_sep>handler_info=replication_stream._build_handler_map()[DataEvent]<assert_stmt>isinstance(handler_info.handler DataEventHandler)<block_end><def_stmt>test_handle_graceful_termination_data_event self producer patch_producer patch_config patch_restarter patch_data_handle_event patch_save_position patch_exit patch_running patch_db_connections<block_start>patch_running.return_value=<false><line_sep>replication_stream=self._get_parse_replication_stream()<line_sep>replication_stream.current_event_type=EventType.DATA_EVENT<line_sep>replication_stream.run()<assert_stmt>producer.get_checkpoint_position_data.call_count<eq>1<assert_stmt>producer.flush.call_count<eq>1<assert_stmt>patch_exit.call_count<eq>1<block_end><def_stmt>test_handle_graceful_termination_schema_event self producer patch_config patch_producer patch_restarter patch_data_handle_event patch_exit patch_running patch_db_connections<block_start>patch_running.return_value=<false><line_sep>replication_stream=self._get_parse_replication_stream()<line_sep>replication_stream.current_event_type=EventType.SCHEMA_EVENT<line_sep>replication_stream.run()<assert_stmt>producer.get_checkpoint_position_data.call_count<eq>0<assert_stmt>producer.flush.call_count<eq>0<assert_stmt>patch_exit.call_count<eq>1<block_end><def_stmt>test_with_dry_run_options self patch_db_connections patch_restarter<block_start><with_stmt>mock.patch('replication_handler.batch.base_parse_replication_stream.config.env_config')<as>mock_config<block_start>mock_config.register_dry_run=<true><line_sep>mock_config.publish_dry_run=<false><line_sep>replication_stream=self._get_parse_replication_stream()<assert_stmt>replication_stream.register_dry_run<is><true><assert_stmt>replication_stream.publish_dry_run<is><false><block_end><block_end><def_stmt>test_zk_lock_acquired self patch_config patch_exit patch_restarter patch_db_connections patch_zk patch_process_event # ZK will exit the proc if it can't acquire a lock using sys.exit <block_start>patch_zk.side_effect=SystemExit<with_stmt>pytest.raises(SystemExit)<block_start>self._init_and_run_batch()<assert_stmt>patch_zk.assert_called_once_with("replication_handler" "test_namespace")<assert_stmt>patch_process_event.call_count<eq>0<block_end><block_end><def_stmt>test_zk_exit_on_exception self patch_config patch_restarter patch_db_connections patch_zk<block_start>patch_restarter.return_value.get_stream.return_value.__iter__.side_effect=Exception<with_stmt>pytest.raises(Exception)<block_start>self._init_and_run_batch()<assert_stmt>patch_zk.__exit__.call_count<eq>1<block_end><block_end><def_stmt>_init_and_run_batch self<block_start>replication_stream=self._get_parse_replication_stream()<line_sep>replication_stream.run()<line_sep><return>replication_stream<block_end><def_stmt>_get_parse_replication_stream self<block_start><raise>NotImplementedError()<block_end><block_end>
# SPDX-License-Identifier: MIT # Copyright (c) 2020 The Pybricks Authors """ Hardware Module: 1 Description: This tests the lights on the Ultrasonic Sensor. No external sensors are used to verify that it works. """<import_from_stmt>pybricks.pupdevices UltrasonicSensor<import_from_stmt>pybricks.parameters Port<import_from_stmt>pybricks.tools wait<import_from_stmt>urandom randint<line_sep># Initialize devices. lights=UltrasonicSensor(Port.C).lights<line_sep># Turn on all lights at full brightness. lights.on()<line_sep>wait(500)<line_sep># Turn on all lights. <for_stmt>i range(-50 150 2)<block_start>lights.on(i)<line_sep>wait(20)<block_end># Turn of all lights. lights.off()<line_sep>wait(500)<line_sep># Turn on all lights. <for_stmt>i range(50)<block_start>lights.on([randint(0 100)<for>j range(4)])<line_sep>wait(50)<block_end>
<import_stmt>unittest<import_stmt>numpy<as>np<import_stmt>prml.nn<as>nn<class_stmt>TestGaussian(unittest.TestCase)<block_start><def_stmt>test_gaussian_draw_forward self<block_start>mu=nn.array(0)<line_sep>sigma=nn.softplus(nn.array(-1))<line_sep>gaussian=nn.Gaussian(mu sigma)<line_sep>sample=[]<for_stmt>_ range(1000)<block_start>sample.append(gaussian.draw().value)<block_end>self.assertTrue(np.allclose(np.mean(sample) 0 rtol=0.1 atol=0.1) np.mean(sample))<line_sep>self.assertTrue(np.allclose(np.std(sample) gaussian.std.value 0.1 0.1))<block_end><def_stmt>test_gaussian_draw_backward self<block_start>mu=nn.array(0)<line_sep>s=nn.array(2)<line_sep>optimizer=nn.optimizer.Gradient({0:mu 1:s} 0.01)<line_sep>prior=nn.Gaussian(1 1)<for_stmt>_ range(1000)<block_start>mu.cleargrad()<line_sep>s.cleargrad()<line_sep>gaussian=nn.Gaussian(mu nn.softplus(s))<line_sep>gaussian.draw()<line_sep>loss=nn.loss.kl_divergence(gaussian prior).sum()<line_sep>optimizer.minimize(loss)<block_end>self.assertTrue(np.allclose(gaussian.mean.value 1 0.1 0.1))<line_sep>self.assertTrue(np.allclose(gaussian.std.value 1 0.1 0.1))<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>unittest.main()<block_end>
"""The module contains functions to preprocess input datasets into usable format."""<import_stmt>gc<import_stmt>gzip<import_stmt>json<import_stmt>logging<import_stmt>multiprocessing<as>mp<import_stmt>pathlib<import_stmt>sys<import_from_stmt>itertools repeat<import_stmt>numpy<as>np<import_stmt>scipy.sparse<as>smat<line_sep>logging.basicConfig(stream=sys.stdout format="%(asctime)s - %(levelname)s - %(name)s - %(message)s" datefmt="%m/%d/%Y %H:%M:%S" level=logging.INFO )<line_sep>logger=logging.getLogger(__name__)<line_sep>_FUNC=<none># place holder to Pool functions. <def_stmt>_worker_init func<block_start>"init method to invoke Pool."<line_sep><global>_FUNC<line_sep>_FUNC=func<block_end><def_stmt>_worker x<block_start>"init function to invoke pool"<line_sep><return>_FUNC(x)<block_end><def_stmt>open_file_helper filename compressed mode="rt"<block_start>""" Supports reading of gzip compressed or uncompressed file. Parameters: ---------- filename : str Name of the file to open. compressed : bool If true, treat filename as gzip compressed. mode : str Reading mode. Returns: -------- file handle to the opened file. """<line_sep><return>gzip.open(filename mode=mode)<if>compressed<else>open(filename mode)<block_end><def_stmt>_get_unique_rows_cols filename compressed delim="<@@>"<block_start>"""Function to load a json file in the format of processed session-data for qp2q. Then it returns dictionary of query<delim>prefix as r2i and next_query as c2i. """<line_sep>r2i={}<line_sep>c2i={}<line_sep>logger.info("Processing file for rows and columns: {}".format(filename))<with_stmt>open_file_helper(filename compressed)<as>fp<block_start><for_stmt>line fp<block_start><try_stmt><block_start>pline=json.loads(line)<block_end><except_stmt>json.decoder.JSONDecodeError<block_start>logger.warn(f"Failed to parse: {line}")<line_sep><continue><block_end>query_prefix=delim.join([pline["prev_query"] pline["prefix"]])<line_sep>kw=pline["next_query"]<if_stmt>query_prefix<not><in>r2i<block_start>r2i[query_prefix]=1<block_end><if_stmt>kw<not><in>c2i<block_start>c2i[kw]=1<block_end><block_end><block_end><return>r2i c2i<block_end><def_stmt>_transform_file_to_matrix_qp2q filename compressed delim g_r2i g_c2i<block_start>""" Helper Function to extract qp2q matrix from input_file which was generated as a output of the function parallel_process_session_data_qp2p. Parameters: ---------- input_file: filename full filepath of input dataframe compressed: bool compressed or not delim: str delim separating query and prefix g_r2i: dictionary mapping for input items g_c2i: dictionary mapping of output item Returns: ------- qp2q count matrix """<line_sep>rows=[]<line_sep>cols=[]<line_sep>data=[]<line_sep>logger.info("Processing file for matrix: {}".format(filename))<with_stmt>open_file_helper(filename compressed)<as>fp<block_start><for_stmt>line fp<block_start><try_stmt><block_start>pline=json.loads(line)<block_end><except_stmt>json.decoder.JSONDecodeError<block_start>logger.warn(f"Failed to parse: {line}")<line_sep><continue><block_end>query_prefix=delim.join([pline["prev_query"] pline["prefix"]])<line_sep>kw=pline["next_query"]<line_sep>freq=1<line_sep>data.append(freq)<line_sep>rows.append(g_r2i[query_prefix])<line_sep>cols.append(g_c2i[kw])<block_end><block_end>matrix=smat.coo_matrix((data (rows cols)) shape=(len(g_r2i) len(g_c2i)) dtype=np.float32)<line_sep><return>matrix<block_end><def_stmt>parallel_get_qp2q_sparse_data fdir compressed delim="<@@>" n_jobs=4<block_start>"""Process session data to sparse matrix and dictionaries mapping rows and columns. Parameters: ---------- fdir: str path to directory having all the files in json format compressed: bool files being compressed or not delim: str delimiter between query and prefix n_jobs: int number of threads to be used Returns: ------- dictionary mapping row index to row names dictionary mapping col index to col names qp2q sparse csr matrix containing freq. of occurences. """<if_stmt>compressed<block_start>extension="*.gz"<block_end><else_stmt><block_start>extension="*.json"<block_end><if_stmt>pathlib.Path(fdir).is_dir()<block_start>files=pathlib.Path(fdir).glob(extension)<block_end><else_stmt><block_start><raise>ValueError(f"{fdir} is not a valid directory")<block_end>files=[str(f)<for>f files]<line_sep>logger.info("Getting qp2q unique rows and columns from files in {}".format(fdir))<if_stmt>n_jobs<g>1<block_start><with_stmt>mp.Pool(processes=n_jobs)<as>pool<block_start>dicts=pool.starmap(_get_unique_rows_cols zip(files repeat(compressed) repeat(delim)) )<block_end><block_end><else_stmt><block_start>dicts=[_get_unique_rows_cols(file compressed delim)<for>file files]<block_end>g_r2i={}<line_sep>g_c2i={}<for_stmt>dic dicts<block_start>g_r2i.update(dic[0])<line_sep>g_c2i.update(dic[1])<block_end>g_i2r={}<line_sep>g_i2c={}<for_stmt>i,k enumerate(g_r2i.keys())<block_start>g_r2i[k]=i<line_sep>g_i2r[i]=k<block_end><for_stmt>i,k enumerate(g_c2i.keys())<block_start>g_c2i[k]=i<line_sep>g_i2c[i]=k<block_end><del_stmt>dicts<line_sep>gc.collect()<line_sep>logger.info("Number of unique rows: {}".format(len(g_r2i)))<line_sep>logger.info("Number of unique cols: {}".format(len(g_c2i)))<if_stmt>n_jobs<g>1<block_start><with_stmt>mp.Pool(processes=n_jobs initializer=_worker_init initargs=(<lambda>x:_transform_file_to_matrix_qp2q(x compressed delim g_r2i g_c2i) ) )<as>pool<block_start>matrices=pool.map(_worker files)<block_end><block_end><else_stmt><block_start>matrices=[_transform_file_to_matrix_qp2q(x compressed delim g_r2i g_c2i)<for>x files]<block_end>matrices=[m.tocsr()<for>m matrices]<line_sep>qp2q_matrix=matrices[0]<for_stmt>i range(1 len(matrices))<block_start>qp2q_matrix<augadd>matrices[i]<block_end><del_stmt>matrices<line_sep>gc.collect()<line_sep><return>g_i2r g_i2c qp2q_matrix<block_end>
""" Test expression command options. Test cases: o test_expr_options: Test expression command options. """<import_from_future_stmt> print_function<import_stmt>os<import_stmt>time<import_stmt>lldb<import_stmt>lldbsuite.test.lldbutil<as>lldbutil<import_from_stmt>lldbsuite.test.lldbtest *<class_stmt>ExprOptionsTestCase(TestBase)<block_start>mydir=TestBase.compute_mydir(__file__)<def_stmt>setUp self# Call super's setUp(). <block_start>TestBase.setUp(self)<line_sep>self.main_source="main.cpp"<line_sep>self.main_source_spec=lldb.SBFileSpec(self.main_source)<line_sep>self.line=line_number('main.cpp' '// breakpoint_in_main')<line_sep>self.exe=os.path.join(os.getcwd() "a.out")<block_end><def_stmt>test_expr_options self<block_start>"""These expression command options should work as expected."""<line_sep>self.build()<line_sep># Set debugger into synchronous mode self.dbg.SetAsync(<false>)<line_sep># Create a target by the debugger. target=self.dbg.CreateTarget(self.exe)<line_sep>self.assertTrue(target VALID_TARGET)<line_sep># Set breakpoints inside main. breakpoint=target.BreakpointCreateBySourceRegex('// breakpoint_in_main' self.main_source_spec)<line_sep>self.assertTrue(breakpoint)<line_sep># Now launch the process, and do not stop at entry point. process=target.LaunchSimple(<none> <none> self.get_process_working_directory())<line_sep>self.assertTrue(process PROCESS_IS_VALID)<line_sep>threads=lldbutil.get_threads_stopped_at_breakpoint(process breakpoint)<line_sep>self.assertEqual(len(threads) 1)<line_sep>frame=threads[0].GetFrameAtIndex(0)<line_sep>options=lldb.SBExpressionOptions()<line_sep># test --language on C++ expression using the SB API's # Make sure we can evaluate a C++11 expression. val=frame.EvaluateExpression('foo != nullptr')<line_sep>self.assertTrue(val.IsValid())<line_sep>self.assertTrue(val.GetError().Success())<line_sep>self.DebugSBValue(val)<line_sep># Make sure it still works if language is set to C++11: options.SetLanguage(lldb.eLanguageTypeC_plus_plus_11)<line_sep>val=frame.EvaluateExpression('foo != nullptr' options)<line_sep>self.assertTrue(val.IsValid())<line_sep>self.assertTrue(val.GetError().Success())<line_sep>self.DebugSBValue(val)<line_sep># Make sure it fails if language is set to C: options.SetLanguage(lldb.eLanguageTypeC)<line_sep>val=frame.EvaluateExpression('foo != nullptr' options)<line_sep>self.assertTrue(val.IsValid())<line_sep>self.assertFalse(val.GetError().Success())<block_end><block_end>
# coding=utf-8 """ Logical permission backends module """<import_from_stmt>permission.conf settings<import_from_stmt>permission.utils.handlers registry<import_from_stmt>permission.utils.permissions perm_to_permission<line_sep>__all__=('PermissionBackend' )<class_stmt>PermissionBackend(object)<block_start>""" A handler based permission backend """<line_sep>supports_object_permissions=<true><line_sep>supports_anonymous_user=<true><line_sep>supports_inactive_user=<true><line_sep># pylint:disable=unused-argument <def_stmt>authenticate self username password<block_start>""" Always return ``None`` to prevent authentication within this backend. """<line_sep><return><none><block_end><def_stmt>has_perm self user_obj perm obj=<none><block_start>""" Check if user have permission (of object) based on registered handlers. It will raise ``ObjectDoesNotExist`` exception when the specified string permission does not exist and ``PERMISSION_CHECK_PERMISSION_PRESENCE`` is ``True`` in ``settings`` module. Parameters ---------- user_obj : django user model instance A django user model instance which be checked perm : string `app_label.codename` formatted permission string obj : None or django model instance None or django model instance for object permission Returns ------- boolean Whether the specified user have specified permission (of specified object). Raises ------ django.core.exceptions.ObjectDoesNotExist If the specified string permission does not exist and ``PERMISSION_CHECK_PERMISSION_PRESENCE`` is ``True`` in ``settings`` module. """<if_stmt>settings.PERMISSION_CHECK_PERMISSION_PRESENCE# get permission instance from string permission (perm) # it raise ObjectDoesNotExists when the permission is not exists <block_start><try_stmt><block_start>perm_to_permission(perm)<block_end><except_stmt>AttributeError# Django 1.2 internally use wrong permission string thus ignore <block_start><pass><block_end><block_end># get permission handlers fot this perm cache_name='_%s_cache'%perm<if_stmt>hasattr(self cache_name)<block_start>handlers=getattr(self cache_name)<block_end><else_stmt><block_start>handlers=[h<for>h registry.get_handlers()<if>perm<in>h.get_supported_permissions()]<line_sep>setattr(self cache_name handlers)<block_end><for_stmt>handler handlers<block_start><if_stmt>handler.has_perm(user_obj perm obj=obj)<block_start><return><true><block_end><block_end><return><false><block_end><def_stmt>has_module_perms self user_obj app_label<block_start>""" Check if user have permission of specified app based on registered handlers. It will raise ``ObjectDoesNotExist`` exception when the specified string permission does not exist and ``PERMISSION_CHECK_PERMISSION_PRESENCE`` is ``True`` in ``settings`` module. Parameters ---------- user_obj : django user model instance A django user model instance which is checked app_label : string `app_label.codename` formatted permission string Returns ------- boolean Whether the specified user have specified permission. Raises ------ django.core.exceptions.ObjectDoesNotExist If the specified string permission does not exist and ``PERMISSION_CHECK_PERMISSION_PRESENCE`` is ``True`` in ``settings`` module. """<line_sep># get permission handlers fot this perm cache_name='_%s_cache'%app_label<if_stmt>hasattr(self cache_name)<block_start>handlers=getattr(self cache_name)<block_end><else_stmt><block_start>handlers=[h<for>h registry.get_handlers()<if>app_label<in>h.get_supported_app_labels()]<line_sep>setattr(self cache_name handlers)<block_end><for_stmt>handler handlers<block_start><if_stmt>handler.has_module_perms(user_obj app_label)<block_start><return><true><block_end><block_end><return><false><block_end><block_end>
''' Find the largest element and place that element at the bottom of the list. Repeat for each sub-array. O(n^2) time complexity. '''<import_from_stmt>string ascii_letters<line_sep>arrays=([12 3 7 22 -12 100 1] [10 9 8 7 6 5 4 3 2 1 0] [4 1 3 9 7] [0 -1.5 1.5 1.3 -1.3 -1.01 1.01] list(reversed(ascii_letters)) )<def_stmt>selection_sort arr<block_start>""" >>> all(selection_sort(arr) or arr == sorted(arr) for arr in arrays) True """<for_stmt>i range(len(arr)-1 0 -1)<block_start>k=0<for_stmt>j range(1 i+1)<block_start><if_stmt>arr[j]<g>arr[k]<block_start>k=j<block_end><block_end>arr[i],arr[k]=arr[k] arr[i]<block_end><block_end># swap <if_stmt>__name__<eq>"__main__"<block_start><for_stmt>arr arrays<block_start>selection_sort(arr)<line_sep>print("Sorted array: ")<for_stmt>ele arr# type: ignore <block_start>print(f"\t{ele}")<block_end><block_end><block_end>
<import_stmt>os<import_stmt>shutil<import_stmt>subprocess<import_from_stmt>distutils.dir_util copy_tree<import_from_stmt>shutil copyfile<import_from_stmt>typing List Optional<import_stmt>click<import_stmt>git<import_from_stmt>omegaconf DictConfig<def_stmt>copy_objects target_dir:os.PathLike objects_to_copy:List[os.PathLike]<block_start><for_stmt>src_path objects_to_copy<block_start>trg_path=os.path.join(target_dir os.path.basename(src_path))<if_stmt>os.path.islink(src_path)<block_start>os.symlink(os.readlink(src_path) trg_path)<block_end><elif_stmt>os.path.isfile(src_path)<block_start>copyfile(src_path trg_path)<block_end><elif_stmt>os.path.isdir(src_path)<block_start>copy_tree(src_path trg_path)<block_end><else_stmt><block_start><raise>NotImplementedError(f"Unknown object type: {src_path}")<block_end><block_end><block_end><def_stmt>create_symlinks target_dir:os.PathLike symlinks_to_create:List[os.PathLike]<block_start>""" Creates symlinks to the given paths """<for_stmt>src_path symlinks_to_create<block_start>trg_path=os.path.join(target_dir os.path.basename(src_path))<if_stmt>os.path.islink(src_path)# Let's not create symlinks to symlinks # Since dropping the current symlink will break the experiment <block_start>os.symlink(os.readlink(src_path) trg_path)<block_end><else_stmt><block_start>print(f'Creating a symlink to {src_path}, so try not to delete it occasionally!')<line_sep>os.symlink(src_path trg_path)<block_end><block_end><block_end><def_stmt>is_git_repo path:os.PathLike<block_start><try_stmt><block_start>_=git.Repo(path).git_dir<line_sep><return><true><block_end><except_stmt>git.exc.InvalidGitRepositoryError<block_start><return><false><block_end><block_end><def_stmt>create_project_dir project_dir:os.PathLike objects_to_copy:List[os.PathLike] symlinks_to_create:List[os.PathLike]<block_start><if_stmt>is_git_repo(os.getcwd())<and>are_there_uncommitted_changes()<block_start><if_stmt>click.confirm("There are uncommited changes. Continue?" default=<false>)<block_start>print('Ok...')<block_end><else_stmt><block_start><raise>PermissionError("Cannot created a dir when there are uncommited changes")<block_end><block_end><if_stmt>os.path.exists(project_dir)<block_start><if_stmt>click.confirm(f'Dir {project_dir} already exists. Remove it?' default=<false>)<block_start>shutil.rmtree(project_dir)<block_end><else_stmt><block_start>print('User refused to delete an existing project dir.')<line_sep><raise>PermissionError("There is an existing dir and I cannot delete it.")<block_end><block_end>os.makedirs(project_dir)<line_sep>copy_objects(project_dir objects_to_copy)<line_sep>create_symlinks(project_dir symlinks_to_create)<line_sep>print(f'Created a project dir: {project_dir}')<block_end><def_stmt>get_git_hash <arrow>Optional[str]<block_start><if_stmt><not>is_git_repo(os.getcwd())<block_start><return><none><block_end><try_stmt><block_start><return>subprocess.check_output(['git' 'rev-parse' '--short' 'HEAD']).decode("utf-8").strip()<block_end><except_stmt><block_start><return><none><block_end><block_end><def_stmt>get_experiment_path master_dir:os.PathLike experiment_name:str<arrow>os.PathLike<block_start><return>os.path.join(master_dir f"{experiment_name}-{get_git_hash()}")<block_end><def_stmt>get_git_hash_suffix <arrow>str<block_start>git_hash:Optional[str]=get_git_hash()<line_sep>git_hash_suffix=""<if>git_hash<is><none><else>f"-{git_hash}"<line_sep><return>git_hash_suffix<block_end><def_stmt>are_there_uncommitted_changes <arrow>bool<block_start><return>len(subprocess.check_output('git status -s'.split()).decode("utf-8"))<g>0<block_end><def_stmt>cfg_to_args_str cfg:DictConfig use_dashes=<true><arrow>str<block_start>dashes='--'<if>use_dashes<else>''<line_sep><return>' '.join([f'{dashes}{p}={cfg[p]}'<for>p cfg])<block_end>
<import_from_stmt>....utils.byte_io_mdl ByteIO<import_from_stmt>....shared.base Base<line_sep>
# The following comments couldn't be translated into the new config version: #------------------------------------------------ #AlCaReco filtering for phi symmetry calibration: #------------------------------------------------ # # Passes events that are coming from the online phi-symmetry stream # # <import_stmt>FWCore.ParameterSet.Config<as>cms<import_stmt>HLTrigger.HLTfilters.hltHighLevel_cfi<line_sep>ecalphiSymHLT=HLTrigger.HLTfilters.hltHighLevel_cfi.hltHighLevel.clone(HLTPaths=['AlCa_EcalPhiSym*'] andOr=<true> throw=<false>)<line_sep>
<import_from_stmt>.response BoltResponse<line_sep>
<import_from_future_stmt> division<line_sep>""" critical properties of diffBragg objects which should be logged for reproducibility """<line_sep># TODO : implement a savestate and getstate for these objects # attrs of diffBragg() instances DIFFBRAGG_ATTRS=['Amatrix' 'Bmatrix' 'Ncells_abc' 'Ncells_abc_aniso' 'Ncells_def' 'Npix_to_allocate' 'Omatrix' 'Umatrix' 'beamsize_mm' 'compute_curvatures' 'default_F' 'detector_thick_mm' 'detector_thickstep_mm' 'detector_thicksteps' 'detector_twotheta_deg' 'device_Id' 'diffuse_gamma' 'diffuse_sigma' 'exposure_s' 'fluence' 'flux' 'has_anisotropic_mosaic_spread' 'interpolate' 'isotropic_ncells' 'lambda_coefficients' 'mosaic_domains' 'mosaic_spread_deg' 'no_Nabc_scale' 'nopolar' 'only_diffuse' 'only_save_omega_kahn' 'oversample' 'oversample_omega' 'phi_deg' 'phistep_deg' 'phisteps' 'point_pixel' 'polar_vector' 'polarization' 'spindle_axis' 'spot_scale' 'twotheta_axis' 'unit_cell_Adeg' 'unit_cell_tuple' 'use_diffuse' 'use_lambda_coefficients']<line_sep># properties of nanoBragg_crystal.NBcryst instances NB_CRYST_ATTRS=['anisotropic_mos_spread_deg' 'isotropic_ncells' 'miller_is_complex' 'mos_spread_deg' 'n_mos_domains' 'symbol' 'xtal_shape']<line_sep># properties of nanoBragg_beam.NBbeam instances NB_BEAM_ATTRS=['divergence' 'polarization_fraction' 'size_mm' 'number_of_sources' 'unit_s0']<line_sep>
<import_from_stmt>allennlp.data.fields Field<def_stmt>test_eq_with_inheritance <block_start><class_stmt>SubField(Field)<block_start>__slots__=["a"]<def_stmt>__init__ self a<block_start>self.a=a<block_end><block_end><class_stmt>SubSubField(SubField)<block_start>__slots__=["b"]<def_stmt>__init__ self a b<block_start>super().__init__(a)<line_sep>self.b=b<block_end><block_end><class_stmt>SubSubSubField(SubSubField)<block_start>__slots__=["c"]<def_stmt>__init__ self a b c<block_start>super().__init__(a b)<line_sep>self.c=c<block_end><block_end><assert_stmt>SubField(1)<eq>SubField(1)<assert_stmt>SubField(1)<ne>SubField(2)<assert_stmt>SubSubField(1 2)<eq>SubSubField(1 2)<assert_stmt>SubSubField(1 2)<ne>SubSubField(1 1)<assert_stmt>SubSubField(1 2)<ne>SubSubField(2 2)<assert_stmt>SubSubSubField(1 2 3)<eq>SubSubSubField(1 2 3)<assert_stmt>SubSubSubField(1 2 3)<ne>SubSubSubField(0 2 3)<block_end><def_stmt>test_eq_with_inheritance_for_non_slots_field <block_start><class_stmt>SubField(Field)<block_start><def_stmt>__init__ self a<block_start>self.a=a<block_end><block_end><assert_stmt>SubField(1)<eq>SubField(1)<assert_stmt>SubField(1)<ne>SubField(2)<block_end><def_stmt>test_eq_with_inheritance_for_mixed_field <block_start><class_stmt>SubField(Field)<block_start>__slots__=["a"]<def_stmt>__init__ self a<block_start>self.a=a<block_end><block_end><class_stmt>SubSubField(SubField)<block_start><def_stmt>__init__ self a b<block_start>super().__init__(a)<line_sep>self.b=b<block_end><block_end><assert_stmt>SubField(1)<eq>SubField(1)<assert_stmt>SubField(1)<ne>SubField(2)<assert_stmt>SubSubField(1 2)<eq>SubSubField(1 2)<assert_stmt>SubSubField(1 2)<ne>SubSubField(1 1)<assert_stmt>SubSubField(1 2)<ne>SubSubField(2 2)<block_end>
# Leo colorizer control file for kivy mode. # This file is in the public domain. # Properties for kivy mode. properties={"ignoreWhitespace":"false" "lineComment":"#" }<line_sep># Attributes dict for kivy_main ruleset. kivy_main_attributes_dict={"default":"null" "digit_re":"" "escape":"" "highlight_digits":"true" "ignore_case":"true" "no_word_sep":"" }<line_sep># Dictionary of attributes dictionaries for kivy mode. attributesDictDict={"kivy_main":kivy_main_attributes_dict }<line_sep># Keywords dict for kivy_main ruleset. kivy_main_keywords_dict={"app":"keyword2" "args":"keyword2" "canvas":"keyword1" "id":"keyword1" "root":"keyword2" "self":"keyword2" "size":"keyword1" "text":"keyword1" "x":"keyword1" "y":"keyword1" }<line_sep># Dictionary of keywords dictionaries for kivy mode. keywordsDictDict={"kivy_main":kivy_main_keywords_dict }<line_sep># Rules for kivy_main ruleset. <def_stmt>kivy_rule0 colorer s i<block_start><return>colorer.match_eol_span(s i kind="comment1" seq="#" at_line_start=<false> at_whitespace_end=<false> at_word_start=<false> delegate="" exclude_match=<false>)<block_end><def_stmt>kivy_rule1 colorer s i<block_start><return>colorer.match_span(s i kind="literal1" begin="\"" end="\"" at_line_start=<false> at_whitespace_end=<false> at_word_start=<false> delegate="kivy::literal_one" exclude_match=<false> no_escape=<false> no_line_break=<false> no_word_break=<false>)<block_end><def_stmt>kivy_rule2 colorer s i<block_start><return>colorer.match_keywords(s i)<block_end># Rules dict for kivy_main ruleset. rulesDict1={"\"":[kivy_rule1 ] "#":[kivy_rule0 ] "0":[kivy_rule2 ] "1":[kivy_rule2 ] "2":[kivy_rule2 ] "3":[kivy_rule2 ] "4":[kivy_rule2 ] "5":[kivy_rule2 ] "6":[kivy_rule2 ] "7":[kivy_rule2 ] "8":[kivy_rule2 ] "9":[kivy_rule2 ] "@":[kivy_rule2 ] "A":[kivy_rule2 ] "B":[kivy_rule2 ] "C":[kivy_rule2 ] "D":[kivy_rule2 ] "E":[kivy_rule2 ] "F":[kivy_rule2 ] "G":[kivy_rule2 ] "H":[kivy_rule2 ] "I":[kivy_rule2 ] "J":[kivy_rule2 ] "K":[kivy_rule2 ] "L":[kivy_rule2 ] "M":[kivy_rule2 ] "N":[kivy_rule2 ] "O":[kivy_rule2 ] "P":[kivy_rule2 ] "Q":[kivy_rule2 ] "R":[kivy_rule2 ] "S":[kivy_rule2 ] "T":[kivy_rule2 ] "U":[kivy_rule2 ] "V":[kivy_rule2 ] "W":[kivy_rule2 ] "X":[kivy_rule2 ] "Y":[kivy_rule2 ] "Z":[kivy_rule2 ] "a":[kivy_rule2 ] "b":[kivy_rule2 ] "c":[kivy_rule2 ] "d":[kivy_rule2 ] "e":[kivy_rule2 ] "f":[kivy_rule2 ] "g":[kivy_rule2 ] "h":[kivy_rule2 ] "i":[kivy_rule2 ] "j":[kivy_rule2 ] "k":[kivy_rule2 ] "l":[kivy_rule2 ] "m":[kivy_rule2 ] "n":[kivy_rule2 ] "o":[kivy_rule2 ] "p":[kivy_rule2 ] "q":[kivy_rule2 ] "r":[kivy_rule2 ] "s":[kivy_rule2 ] "t":[kivy_rule2 ] "u":[kivy_rule2 ] "v":[kivy_rule2 ] "w":[kivy_rule2 ] "x":[kivy_rule2 ] "y":[kivy_rule2 ] "z":[kivy_rule2 ] }<line_sep># x.rulesDictDict for kivy mode. rulesDictDict={"kivy_main":rulesDict1 }<line_sep># Import dict for kivy mode. importDict={}<line_sep>
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """A module for the info implementation of Command."""<import_from_future_stmt> print_function<import_stmt>cr<class_stmt>InfoCommand(cr.Command)<block_start>"""The cr info command implementation."""<def_stmt>__init__ self<block_start>super(InfoCommand self).__init__()<line_sep>self.help='Print information about the cr environment'<block_end><def_stmt>AddArguments self subparsers<block_start>parser=super(InfoCommand self).AddArguments(subparsers)<line_sep>parser.add_argument('-s' '--short' dest='_short' action='store_true' default=<false> help='Short form results, useful for scripting.')<line_sep>self.ConsumeArgs(parser 'the environment')<line_sep><return>parser<block_end><def_stmt>EarlyArgProcessing self<block_start><if_stmt>getattr(cr.context.args '_short' <false>)<block_start>self.requires_build_dir=<false><block_end>cr.Command.EarlyArgProcessing(self)<block_end><def_stmt>Run self<block_start><if_stmt>cr.context.remains<block_start><for_stmt>var cr.context.remains<block_start><if_stmt>getattr(cr.context.args '_short' <false>)<block_start>val=cr.context.Find(var)<if_stmt>val<is><none><block_start>val=''<block_end>print(val)<block_end><else_stmt><block_start>print(var '=' cr.context.Find(var))<block_end><block_end><block_end><else_stmt><block_start>cr.base.client.PrintInfo()<block_end><block_end><block_end>
<import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<import_from_future_stmt> unicode_literals<import_from_stmt>enum Enum<class_stmt>Type(Enum)<block_start>STRING='string'<line_sep>NUMBER='number'<line_sep>BOOLEAN='boolean'<line_sep>DATE='date'<line_sep>DATETIME='datetime'<line_sep>TIMEOFDAY='timeofday'<block_end>
'''OpenGL extension ARB.shader_clock This module customises the behaviour of the OpenGL.raw.GL.ARB.shader_clock to provide a more Python-friendly API Overview (from the spec) This extension exposes a 64-bit monotonically incrementing shader counter which may be used to derive local timing information within a single shader invocation. The official definition of this extension is available here: http://www.opengl.org/registry/specs/ARB/shader_clock.txt '''<import_from_stmt>OpenGL platform constant arrays<import_from_stmt>OpenGL extensions wrapper<import_stmt>ctypes<import_from_stmt>OpenGL.raw.GL _types _glgets<import_from_stmt>OpenGL.raw.GL.ARB.shader_clock *<import_from_stmt>OpenGL.raw.GL.ARB.shader_clock _EXTENSION_NAME<def_stmt>glInitShaderClockARB <block_start>'''Return boolean indicating whether this extension is available'''<import_from_stmt>OpenGL extensions<line_sep><return>extensions.hasGLExtension(_EXTENSION_NAME)<block_end>### END AUTOGENERATED SECTION
# Copyright (c) 2018 <NAME>, <NAME>, <NAME>, <NAME>, # <NAME>, <NAME>, <NAME>, <NAME>. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # ============================================================================= """ A few-shot classification model implementation that refines on unlabled refinement images. Author: <NAME> (<EMAIL>) A single episode is divided into three parts: 1) Labeled reference images (self.x_ref). 2) Unlabeled refinement images (self.x_unlabel). 3) Labeled query images (from validation) (self.x_candidate). """<import_from_future_stmt> absolute_import division print_function unicode_literals <import_stmt>numpy<as>np<import_stmt>tensorflow<as>tf<import_from_stmt>fewshot.models.nnlib cnn weight_variable concat<import_from_stmt>fewshot.models.basic_model BasicModel<import_from_stmt>fewshot.utils logger<line_sep>log=logger.get()<line_sep># Load up the LSTM cell implementation. <if_stmt>tf.__version__.startswith("0")<block_start>BasicLSTMCell=tf.nn.rnn_cell.BasicLSTMCell<line_sep>LSTMStateTuple=tf.nn.rnn_cell.LSTMStateTuple<block_end><else_stmt><block_start>BasicLSTMCell=tf.contrib.rnn.BasicLSTMCell<line_sep>LSTMStateTuple=tf.contrib.rnn.LSTMStateTuple<block_end><class_stmt>RefineModel(BasicModel)<block_start>"""A retrieval model with an additional refinement stage."""<def_stmt>__init__ self config nway=1 nshot=1 num_unlabel=10 candidate_size=10 is_training=<true> dtype=tf.float32<block_start>"""Initiliazer. Args: config: Model configuration object. nway: Int. Number of classes in the reference images. nshot: Int. Number of labeled reference images. num_unlabel: Int. Number of unlabeled refinement images. candidate_size: Int. Number of candidates in the query stage. is_training: Bool. Whether is in training mode. dtype: TensorFlow data type. """<line_sep>self._num_unlabel=num_unlabel<line_sep>self._x_unlabel=tf.placeholder(dtype [<none> <none> config.height config.width config.num_channel] name="x_unlabel")<line_sep>self._y_unlabel=tf.placeholder(dtype [<none> <none>] name="y_unlabel")<line_sep>super(RefineModel self).__init__(config nway=nway nshot=nshot num_test=candidate_size is_training=is_training dtype=dtype)<block_end>@property<def_stmt>x_unlabel self<block_start><return>self._x_unlabel<block_end>@property<def_stmt>y_unlabel self<block_start><return>self._y_unlabel<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start><import_from_stmt>fewshot.configs.omniglot_config OmniglotRefineConfig<line_sep>model=RefineModel(OmniglotRefineConfig())<block_end>
"""Exceptions Table Revision ID: 6245d75fa12 Revises: <PASSWORD> Create Date: 2016-08-16 11:35:38.575026 """<line_sep># revision identifiers, used by Alembic. revision='6245d75fa12'<line_sep>down_revision='e0a6af364a3f'<import_from_stmt>alembic op<import_stmt>sqlalchemy<as>sa<def_stmt>upgrade ### commands auto generated by Alembic - please adjust! ### <block_start>op.create_table('exceptions' sa.Column('id' sa.BigInteger() nullable=<false>) sa.Column('source' sa.String(length=256) nullable=<false>) sa.Column('occurred' sa.DateTime() nullable=<false>) sa.Column('ttl' sa.DateTime() nullable=<false>) sa.Column('type' sa.String(length=256) nullable=<false>) sa.Column('message' sa.String(length=512) nullable=<true>) sa.Column('stacktrace' sa.Text() nullable=<true>) sa.Column('region' sa.String(length=32) nullable=<true>) sa.Column('tech_id' sa.Integer() nullable=<true>) sa.Column('item_id' sa.Integer() nullable=<true>) sa.Column('account_id' sa.Integer() nullable=<true>) sa.ForeignKeyConstraint(['account_id'] ['account.id'] ) sa.ForeignKeyConstraint(['item_id'] ['item.id'] ) sa.ForeignKeyConstraint(['tech_id'] ['technology.id'] ) sa.PrimaryKeyConstraint('id'))<line_sep>op.create_index('ix_exceptions_account_id' 'exceptions' ['account_id'] unique=<false>)<line_sep>op.create_index('ix_exceptions_item_id' 'exceptions' ['item_id'] unique=<false>)<line_sep>op.create_index('ix_exceptions_region' 'exceptions' ['region'] unique=<false>)<line_sep>op.create_index('ix_exceptions_source' 'exceptions' ['source'] unique=<false>)<line_sep>op.create_index('ix_exceptions_tech_id' 'exceptions' ['tech_id'] unique=<false>)<line_sep>op.create_index('ix_exceptions_type' 'exceptions' ['type'] unique=<false>)<line_sep>### end Alembic commands ### <block_end><def_stmt>downgrade ### commands auto generated by Alembic - please adjust! ### <block_start>op.drop_index('ix_exceptions_type' table_name='exceptions')<line_sep>op.drop_index('ix_exceptions_tech_id' table_name='exceptions')<line_sep>op.drop_index('ix_exceptions_source' table_name='exceptions')<line_sep>op.drop_index('ix_exceptions_region' table_name='exceptions')<line_sep>op.drop_index('ix_exceptions_item_id' table_name='exceptions')<line_sep>op.drop_index('ix_exceptions_account_id' table_name='exceptions')<line_sep>op.drop_table('exceptions')<line_sep>### end Alembic commands ### <block_end>
<import_stmt>matplotlib.pyplot<as>plt<import_stmt>numpy<as>np<import_stmt>tensorflow<as>tf<line_sep>tf.logging.set_verbosity(tf.logging.ERROR)<def_stmt>generator_fn noise weight_decay=2.5e-5 is_training=<true><block_start>layers=tf.contrib.layers<line_sep>framework=tf.contrib.framework<line_sep>f1=framework.arg_scope([layers.fully_connected layers.conv2d_transpose] activation_fn=tf.nn.relu normalizer_fn=layers.batch_norm weights_regularizer=layers.l2_regularizer(weight_decay))<line_sep>f2=framework.arg_scope([layers.batch_norm] is_training=is_training zero_debias_moving_mean=<true>)<with_stmt>f1 f2<block_start>net=layers.fully_connected(noise 1024)<line_sep>net=layers.fully_connected(net 7<times>7<times>256)<line_sep>net=tf.reshape(net [-1 7 7 256])<line_sep>net=layers.conv2d_transpose(net 64 [4 4] stride=2)<line_sep>net=layers.conv2d_transpose(net 32 [4 4] stride=2)<line_sep>net=layers.conv2d(net 1 4 activation_fn=tf.tanh)<line_sep><return>net<block_end><block_end><def_stmt>discriminator_fn img _ weight_decay=2.5e-5 is_training=<true><block_start>layers=tf.contrib.layers<line_sep>framework=tf.contrib.framework<with_stmt>framework.arg_scope([layers.conv2d layers.fully_connected] activation_fn=(<lambda>n:tf.nn.leaky_relu(n alpha=0.01)) weights_regularizer=layers.l2_regularizer(weight_decay) biases_regularizer=layers.l2_regularizer(weight_decay))<block_start>net=layers.conv2d(img 64 [4 4] stride=2)<line_sep>net=layers.conv2d(net 128 [4 4] stride=2)<line_sep>net=layers.flatten(net)<with_stmt>framework.arg_scope([layers.batch_norm] is_training=is_training)<block_start>net=layers.fully_connected(net 1024 normalizer_fn=layers.batch_norm)<block_end><return>layers.linear(net 1)<block_end><block_end><def_stmt>provide_data source batch_size<block_start>slim=tf.contrib.slim<line_sep>keys_to_features={'image/encoded':tf.FixedLenFeature(() tf.string default_value='') 'image/format':tf.FixedLenFeature(() tf.string default_value='raw') }<line_sep>datanum=sum(1<for>_ tf.python_io.tf_record_iterator(source))<line_sep>items_to_handlers={'image':slim.tfexample_decoder.Image(shape=[28 28 1] channels=1) }<line_sep>decoder=slim.tfexample_decoder.TFExampleDecoder(keys_to_features items_to_handlers)<line_sep>dataprovider=slim.dataset_data_provider.DatasetDataProvider<line_sep>reader=tf.TFRecordReader<line_sep>dataset=slim.dataset.Dataset(source reader decoder datanum <none>)<line_sep>provider=dataprovider(dataset shuffle=<true>)<line_sep>image,=provider.get(['image'])<line_sep>image=(tf.cast(image tf.float32)-128.0)/128.0<line_sep>images=tf.train.batch([image] batch_size=batch_size)<line_sep><return>images<block_end><def_stmt>run_gan TRAIN_DATA TOTAL_STEPS=400<block_start>BATCH_SIZE=32<line_sep>TOTAL_STEPS<augadd>1<line_sep>tfgan=tf.contrib.gan<line_sep>tf.reset_default_graph()<with_stmt>tf.device('/cpu:0')<block_start>real_images=provide_data(TRAIN_DATA BATCH_SIZE)<block_end>gan_model=tfgan.gan_model(generator_fn discriminator_fn real_data=real_images generator_inputs=tf.random_normal([BATCH_SIZE 64]))<line_sep>improved_wgan_loss=tfgan.gan_loss(gan_model generator_loss_fn=tfgan.losses.wasserstein_generator_loss discriminator_loss_fn=tfgan.losses.wasserstein_discriminator_loss gradient_penalty_weight=1.0)<line_sep>generator_optimizer=tf.train.AdamOptimizer(0.001 beta1=0.5)<line_sep>discriminator_optimizer=tf.train.AdamOptimizer(0.0001 beta1=0.5)<line_sep>gan_train_ops=tfgan.gan_train_ops(gan_model improved_wgan_loss generator_optimizer discriminator_optimizer)<with_stmt>tf.variable_scope('Generator' reuse=<true>)<block_start>eval_images=gan_model.generator_fn(tf.random_normal([500 64]) is_training=<false>)<block_end>visualizer=tfgan.eval.image_reshaper(eval_images[:20 <ellipsis>] num_cols=10)<line_sep>train_step_fn=tfgan.get_sequential_train_steps()<line_sep>global_step=tf.train.get_or_create_global_step()<line_sep>INTERVAL=25<with_stmt>tf.train.SingularMonitoredSession()<as>sess<block_start><for_stmt>i range(TOTAL_STEPS)<block_start>train_step_fn(sess gan_train_ops global_step train_step_kwargs={})<if_stmt>i%INTERVAL<eq>0<block_start>digits_np=sess.run([visualizer])<line_sep>plt.axis('off')<line_sep>plt.imshow(np.squeeze(digits_np) cmap='gray')<line_sep>plt.show()<block_end><block_end><block_end><block_end>#filename = "mnist.tfrecord" #filename = "hiragana.tfrecord" # run_gan(filename)
<import_stmt>torch<import_stmt>utility<import_stmt>data<import_stmt>model<import_stmt>loss<import_from_stmt>option args<import_from_stmt>trainer Trainer<def_stmt>print_network net<block_start>num_params=0<for_stmt>param net.parameters()<block_start>num_params<augadd>param.numel()<block_end>print(net)<line_sep>print('Total number of parameters: %d'%num_params)<block_end><def_stmt>print_setting net args<block_start>print('init this train:')<line_sep>print_network(net)<line_sep>print('training model:' args.model)<line_sep>print('scale:' args.scale)<line_sep>print('resume from ' args.resume)<line_sep>print('output patch size' args.patch_size)<line_sep>print('model setting: n_resblocks:' args.n_resblocks 'n_feats:' args.n_feats 'block_feats:' args.block_feats)<line_sep>print('optimization setting: ' args.optimizer)<line_sep>print('total epochs:' args.epochs)<line_sep>print('lr:' args.lr 'lr_decay at:' args.decay_type 'decay gamma:' args.gamma)<line_sep>print('train loss:' args.loss)<line_sep>print('save_name:' args.save)<block_end>torch.manual_seed(args.seed)<line_sep>checkpoint=utility.checkpoint(args)<if_stmt>checkpoint.ok<block_start>loader=data.Data(args)<line_sep>model=model.Model(args checkpoint)<line_sep>print_setting(model args)<line_sep>loss=loss.Loss(args checkpoint)<if><not>args.test_only<else><none><line_sep>t=Trainer(args loader model loss checkpoint)<while_stmt><not>t.terminate()<block_start>t.train()<line_sep>t.test()<block_end>checkpoint.done()<block_end>
<import_stmt>numpy<as>np<import_stmt>torch<import_stmt>torch.nn<as>nn<import_from_stmt>collections OrderedDict<def_stmt>tf2th conv_weights<block_start>"""Possibly convert HWIO to OIHW."""<if_stmt>conv_weights.ndim<eq>4<block_start>conv_weights=conv_weights.transpose([3 2 0 1])<block_end><return>torch.from_numpy(conv_weights)<block_end><def_stmt>_rename_conv_weights_for_deformable_conv_layers state_dict cfg<block_start><import_stmt>re<line_sep>layer_keys=sorted(state_dict.keys())<for_stmt>ix,stage_with_dcn enumerate(cfg.MODEL.RESNETS.STAGE_WITH_DCN 1)<block_start><if_stmt><not>stage_with_dcn<block_start><continue><block_end><for_stmt>old_key layer_keys<block_start>pattern=".*block{}.*conv2.*".format(ix)<line_sep>r=re.match(pattern old_key)<if_stmt>r<is><none><block_start><continue><block_end><for_stmt>param ["weight" "bias"]<block_start><if_stmt>old_key.find(param)<is>-1<block_start><continue><block_end><if_stmt>'unit01'<in>old_key<block_start><continue><block_end>new_key=old_key.replace("conv2.{}".format(param) "conv2.conv.{}".format(param))<line_sep>print("pattern: {}, old_key: {}, new_key: {}".format(pattern old_key new_key))<line_sep># Calculate SD conv weight w=state_dict[old_key]<line_sep>v,m=torch.var_mean(w dim=[1 2 3] keepdim=<true> unbiased=<false>)<line_sep>w=(w-m)/torch.sqrt(v+1e-10)<line_sep>state_dict[new_key]=w<del_stmt>state_dict[old_key]<block_end><block_end><block_end><return>state_dict<block_end><def_stmt>load_big_format cfg f<block_start>model=OrderedDict()<line_sep>weights=np.load(f)<line_sep>cmap={'a':1 'b':2 'c':3}<for_stmt>key,val weights.items()<block_start>old_key=key.replace('resnet/' '')<if_stmt>'root_block'<in>old_key<block_start>new_key='root.conv.weight'<block_end><elif_stmt>'/proj/standardized_conv2d/kernel'<in>old_key<block_start>key_pattern=old_key.replace('/proj/standardized_conv2d/kernel' '').replace('resnet/' '')<line_sep>bname,uname,cidx=key_pattern.split('/')<line_sep>new_key='{}.downsample.{}.conv{}.weight'.format(bname uname cmap[cidx])<block_end><elif_stmt>'/standardized_conv2d/kernel'<in>old_key<block_start>key_pattern=old_key.replace('/standardized_conv2d/kernel' '').replace('resnet/' '')<line_sep>bname,uname,cidx=key_pattern.split('/')<line_sep>new_key='{}.{}.conv{}.weight'.format(bname uname cmap[cidx])<block_end><elif_stmt>'/group_norm/gamma'<in>old_key<block_start>key_pattern=old_key.replace('/group_norm/gamma' '').replace('resnet/' '')<line_sep>bname,uname,cidx=key_pattern.split('/')<line_sep>new_key='{}.{}.gn{}.weight'.format(bname uname cmap[cidx])<block_end><elif_stmt>'/group_norm/beta'<in>old_key<block_start>key_pattern=old_key.replace('/group_norm/beta' '').replace('resnet/' '')<line_sep>bname,uname,cidx=key_pattern.split('/')<line_sep>new_key='{}.{}.gn{}.bias'.format(bname uname cmap[cidx])<block_end><else_stmt><block_start>print('Unknown key {}'.format(old_key))<line_sep><continue><block_end>print('Map {} -> {}'.format(key new_key))<line_sep>model[new_key]=tf2th(val)<block_end>model=_rename_conv_weights_for_deformable_conv_layers(model cfg)<line_sep><return>dict(model=model)<block_end>
<import_stmt>torch<import_stmt>auraloss<line_sep>input=torch.rand(8 2 44100)<line_sep>target=torch.rand(8 2 44100)<line_sep>loss=auraloss.freq.SumAndDifferenceSTFTLoss()<line_sep>print(loss(input target))<line_sep>
<import_from_stmt>generator.actions Actions<import_stmt>random<import_stmt>string<import_stmt>struct<import_stmt>numpy<as>np<import_stmt>math<import_stmt>datetime<as>dt<import_stmt>ctypes<def_stmt>kaprica_mixin self<block_start><if_stmt>hasattr(self 'xlat_seed')<block_start><return><block_end><def_stmt>xlat_seed seed<block_start><def_stmt>hash_string seed<block_start>H=0x314abc86<for_stmt>c seed<block_start>H=(H<times>37)&0xffffffff<line_sep>H<augxor>ord(c)<line_sep>H=((H<lshift>13)^(H<rshift>19))&0xffffffff<block_end><return>H<block_end><def_stmt>hash_iterate H<block_start>H=(H<times>3)&0xffffffff<line_sep>H=((H<lshift>13)^(H<rshift>19)^(H<rshift>21))&0xffffffff<line_sep><return>H<block_end>xmap=list(xrange(256))<line_sep>xmap_inv=list(xrange(256))<line_sep>state=hash_string(seed)<for_stmt>i xrange(255 0 -1)<block_start>j=state%i<line_sep>state=hash_iterate(state)<line_sep>xmap[i],xmap[j]=xmap[j] xmap[i]<block_end><for_stmt>i xrange(256)<block_start>xmap_inv[xmap[i]]=i<block_end>self.xlat_map=xmap<line_sep>self.xlat_map_inv=xmap_inv<block_end>self.xlat_seed=xlat_seed<line_sep>self.xlat_map=<none><line_sep>self.xlat_map_inv=<none><def_stmt>xlat_string s inverse=<false><block_start><if_stmt>inverse<block_start><return>''.join([chr(self.xlat_map_inv[ord(c)])<for>c s])<block_end><return>''.join([chr(self.xlat_map[ord(c)])<for>c s])<block_end>self.xlat_string=xlat_string<def_stmt>read delim=<none> length=<none> expect=<none><block_start><if_stmt>self.xlat_map<block_start><if_stmt>delim<block_start>delim=self.xlat_string(delim)<block_end><if_stmt>expect<block_start>expect=self.xlat_string(expect)<block_end><block_end><return>self._original_read(delim=delim length=length expect=expect)<block_end>self._original_read=self.read<line_sep>self.read=read<def_stmt>write s<block_start><if_stmt>self.xlat_map<block_start><if_stmt>s<block_start>s=self.xlat_string(s)<block_end><block_end><return>self._original_write(s)<block_end>self._original_write=self.write<line_sep>self.write=write<block_end><def_stmt>random_word max_size=10 min_size=2<block_start><if_stmt>random.randint(0 4)<block_start>characters=string.letters+string.digits<block_end><else_stmt><block_start>characters=string.letters<block_end>max_size=max_size<if>max_size<ge>min_size<else>min_size<line_sep>max_size=random.randint(min_size max_size)<line_sep><return>("".join(random.choice(characters)<for>c range(max_size))).lower()<block_end><def_stmt>random_text max_words=10 min_words=3<block_start>max_words=max_words<if>max_words<ge>min_words<else>min_words<line_sep>text=''<for_stmt>x xrange(random.randint(min_words max_words))<block_start>text<augadd>random_word()+' '<block_end><return>text<block_end><class_stmt>Cs10Fun(Actions)<block_start><def_stmt>_read self data<block_start>self.read(length=len(data) expect=data)<block_end><def_stmt>_read_array self array=<none><block_start><if_stmt><not>array<block_start>array=self._cur_array<block_end><if_stmt><not>len(array)<block_start>self._read("Empty Array\n")<block_end><else_stmt><block_start>self._read("Array data\n")<line_sep>temp_list=[]<for_stmt>val array<block_start>temp_list.append("%d, "%val)<block_end>temp_list[-1]=temp_list[-1][:-2]<line_sep>self._read("["+''.join(temp_list)+']\n')<block_end><block_end><def_stmt>start self#self.delay(100) <block_start>kaprica_mixin(self)<line_sep>self.xlat_seed('2e0715f1709c')<line_sep>self.write(self.magic_page[:4])<line_sep>self.read(length=0x1000 expect=self.magic_page)<line_sep>self._cur_array=[]<line_sep>self._read("Welcome to CS10Fun!\n")<line_sep>self._read("Today's lesson is on sorting! We're going to cover 4 types of sorts.\n")<line_sep>self._read("Before we begin, create an array of ints\n")<line_sep>self.menu()<line_sep>self.enter_array()<block_end><def_stmt>menu self<block_start>self._read("Main Menu\n")<line_sep>self._read("1. Enter Array\n")<line_sep>self._read("2. Multiply Array\n")<line_sep>self._read("3. Try Insertion Sort\n")<line_sep>self._read("4. Try Selection Sort\n")<line_sep>self._read("5. Try Heap Sort\n")<line_sep>self._read("6. Try Merge Sort\n")<line_sep>self._read("7. Reprint Array\n")<line_sep>self._read("0. Exit\n")<line_sep>self._read(" -----\n")<line_sep>self._read("Current Number of items in array = %d\n"%len(self._cur_array))<line_sep>#self._read_array() self._read(" -----\n")<line_sep>self._read("Make a selection\n")<block_end><def_stmt>enter_array self<block_start>self.write('1\n')<line_sep>self._read("Enter a list of numbers to sort. End the list with ';;'\n")<line_sep>self._cur_array=[]<for_stmt>x xrange(random.randint(1 100))<block_start>self._cur_array.append(random.randint(-1000000000 1000000000))<block_end>self.write(','.join([str(x)<for>x self._cur_array])+',;;\n')<line_sep>self._read("New Array\n")<line_sep>self._read("Number of items in array = %d\n"%len(self._cur_array))<line_sep>self._read_array()<block_end><def_stmt>multiply_array self<block_start>self.write('2\n')<if_stmt>len(self._cur_array)<g>10000<block_start>self._read("Array is too long. Can't multiply any more\n")<block_end><elif_stmt>len(self._cur_array)<block_start>self._read("Quick Grow! Enter a list multiplier. End number with ';'\n")<line_sep>multiplier=random.randint(1 3)<while_stmt>multiplier<times>len(self._cur_array)<g>1024<and>multiplier<times>len(self._cur_array)<le>1048<block_start>multiplier=random.randint(1 3)<block_end>self.write("%d;\n"%multiplier)<line_sep>self._cur_array<augmul>multiplier<block_end>self._read("Multiplied Array\n")<line_sep>self._read("Number of items in array = %d\n"%len(self._cur_array))<line_sep>self._read_array()<block_end><def_stmt>insert_sort self<block_start>self.write('3\n')<line_sep>self._read_array(sorted(self._cur_array))<line_sep>#self.read(expect='Insertion sort takes [\d]+ operations\n', expect_format='pcre', delim='\n') self.read(delim='\n')<block_end><def_stmt>selection_sort self<block_start>self.write('4\n')<line_sep>self._read_array(sorted(self._cur_array))<line_sep>#self.read(expect='Selection sort takes [\d]+ operations\n', expect_format='pcre', delim='\n') self.read(delim='\n')<block_end><def_stmt>heap_sort self<block_start>self.write('5\n')<line_sep>self._read_array(sorted(self._cur_array))<line_sep>#self.read(expect='Heap sort takes [\d]+ operations\n', expect_format='pcre', delim='\n') self.read(delim='\n')<block_end><def_stmt>merge_sort self<block_start>self.write('6\n')<line_sep>self._read_array(sorted(self._cur_array))<line_sep>#self.read(expect='Merge sort takes [\d]+ operations\n', expect_format='pcre', delim='\n') self.read(delim='\n')<block_end><def_stmt>reprint_array self<block_start>self.write('7\n')<line_sep>self._read("Current Array\n")<line_sep>self._read("Number of items in array = %d\n"%len(self._cur_array))<line_sep>self._read_array()<block_end><def_stmt>exit self<block_start>self.write('0\n')<line_sep>self._read("Thanks for joining us\n")<line_sep>self._read("See you next time\n")<block_end><block_end>
<import_stmt>numpy<as>np<class_stmt>BatchGenerator(object)<block_start>'''Generator for returning shuffled batches. data_x -- list of input matrices data_y -- list of output matrices batch_size -- size of batch input_size -- input width output_size -- output width mini -- create subsequences for truncating backprop mini_len -- truncated backprop window'''<def_stmt>__init__ self data_x data_y batch_size input_size output_size mini=<true> mini_len=200<block_start>self.input_size=input_size<line_sep>self.output_size=output_size<line_sep>self.data_x=data_x<line_sep>self.data_y=data_y<line_sep>self.batch_size=batch_size<line_sep>self.batch_count=len(range(0 len(self.data_x) self.batch_size))<line_sep>self.batch_length=<none><line_sep>self.mini=mini<line_sep>self.mini_len=mini_len<block_end><def_stmt>batch self<block_start><while_stmt><true><block_start>idxs=np.arange(0 len(self.data_x))<line_sep>np.random.shuffle(idxs)<line_sep># np.random.shuffle(idxs) shuff_x=[]<line_sep>shuff_y=[]<for_stmt>i idxs<block_start>shuff_x.append(self.data_x[i])<line_sep>shuff_y.append(self.data_y[i])<block_end><for_stmt>batch_idx range(0 len(self.data_x) self.batch_size)<block_start>input_batch=[]<line_sep>output_batch=[]<for_stmt>j xrange(batch_idx min(batch_idx+self.batch_size len(self.data_x)) 1)<block_start>input_batch.append(shuff_x[j])<line_sep>output_batch.append(shuff_y[j])<block_end>input_batch,output_batch,seq_len=self.pad(input_batch output_batch)<line_sep><yield>input_batch output_batch seq_len<block_end><block_end><block_end><def_stmt>pad self sequence_X sequence_Y<block_start>current_batch=len(sequence_X)<line_sep>padding_X=[0]<times>self.input_size<line_sep>padding_Y=[0]<times>self.output_size<line_sep>lens=[sequence_X[i].shape[0]<for>i range(len(sequence_X))]<line_sep># lens2 = [sequence_Y[i].shape[0] for i in range(len(sequence_Y))] # max_lens=max(lens)<line_sep># max_lens2 = max(lens2) # # assert max_lens == max_lens2 # print(max_lens) <for_stmt>i,x enumerate(lens)<block_start>length=x<line_sep>a=list(sequence_X[i])<line_sep>b=list(sequence_Y[i])<while_stmt>length<l>max_lens<block_start>a.append(padding_X)<line_sep>b.append(padding_Y)<line_sep>length<augadd>1<block_end><if_stmt>self.mini<block_start><while_stmt>length%self.mini_len<ne>0<block_start>a.append(padding_X)<line_sep>b.append(padding_Y)<line_sep>length<augadd>1<block_end><block_end>sequence_X[i]=np.array(a)<line_sep>sequence_Y[i]=np.array(b)<line_sep># for x in minis: # mini_X.append(np.array(a[x:min(x+self.mini,x)])) # mini_Y.append(np.array(b[x:min(x+self.mini,x)])) # print sequence_X[i].shape # print sequence_Y[i].shape <block_end># assert all(x.shape == (max_lens, self.input_size) for x in sequence_X) # assert all(y.shape == (max_lens, self.output_size) for y in sequence_Y) sequence_X=np.vstack([np.expand_dims(x 1)<for>x sequence_X])<line_sep>sequence_Y=np.vstack([np.expand_dims(y 1)<for>y sequence_Y])<if_stmt><not>self.mini<block_start>mini_batches=1<line_sep>max_lens=max(lens)<block_end><else_stmt><block_start>mini_batches=length/self.mini_len<line_sep>max_lens=self.mini_len<block_end>sequence_X=np.reshape(sequence_X [current_batch<times>mini_batches max_lens self.input_size])<line_sep>sequence_Y=np.reshape(sequence_Y [current_batch<times>mini_batches max_lens self.output_size])<line_sep><return>sequence_X sequence_Y max_lens<block_end><block_end>
#@PydevCodeAnalysisIgnore # XXX This module needs cleanup. <import_from_stmt>ctypes *<line_sep>DWORD=c_ulong<line_sep>WORD=c_ushort<line_sep>BYTE=c_byte<line_sep>ULONG=c_ulong<line_sep>LONG=c_long<line_sep>LARGE_INTEGER=c_longlong<line_sep>ULARGE_INTEGER=c_ulonglong<line_sep>HANDLE=c_ulong# in the header files: void * HWND=HANDLE<line_sep>HDC=HANDLE<line_sep>HMODULE=HANDLE<line_sep>HINSTANCE=HANDLE<line_sep>HRGN=HANDLE<line_sep>HTASK=HANDLE<line_sep>HKEY=HANDLE<line_sep>HPEN=HANDLE<line_sep>HGDIOBJ=HANDLE<line_sep>HMENU=HANDLE<line_sep>LCID=DWORD<line_sep>WPARAM=c_uint<line_sep>LPARAM=c_long<line_sep>BOOL=c_long<line_sep>VARIANT_BOOL=c_short<line_sep>LPCOLESTR=LPOLESTR=OLESTR=c_wchar_p<line_sep>LPCWSTR=LPWSTR=c_wchar_p<line_sep>LPCSTR=LPSTR=c_char_p<class_stmt>RECT(Structure)<block_start>_fields_=[("left" c_long) ("top" c_long) ("right" c_long) ("bottom" c_long)]<block_end>RECTL=RECT<class_stmt>POINT(Structure)<block_start>_fields_=[("x" c_long) ("y" c_long)]<block_end>POINTL=POINT<class_stmt>SIZE(Structure)<block_start>_fields_=[("cx" c_long) ("cy" c_long)]<block_end>SIZEL=SIZE<def_stmt>RGB red green blue<block_start><return>red+(green<lshift>8)+(blue<lshift>16)<block_end><class_stmt>FILETIME(Structure)<block_start>_fields_=[("dwLowDateTime" DWORD) ("dwHighDateTime" DWORD)]<block_end><class_stmt>MSG(Structure)<block_start>_fields_=[("hWnd" HWND) ("message" c_uint) ("wParam" WPARAM) ("lParam" LPARAM) ("time" DWORD) ("pt" POINT)]<block_end>MAX_PATH=260<class_stmt>WIN32_FIND_DATAA(Structure)<block_start>_fields_=[("dwFileAttributes" DWORD) ("ftCreationTime" FILETIME) ("ftLastAccessTime" FILETIME) ("ftLastWriteTime" FILETIME) ("nFileSizeHigh" DWORD) ("nFileSizeLow" DWORD) ("dwReserved0" DWORD) ("dwReserved1" DWORD) ("cFileName" c_char<times>MAX_PATH) ("cAlternameFileName" c_char<times>14)]<block_end><class_stmt>WIN32_FIND_DATAW(Structure)<block_start>_fields_=[("dwFileAttributes" DWORD) ("ftCreationTime" FILETIME) ("ftLastAccessTime" FILETIME) ("ftLastWriteTime" FILETIME) ("nFileSizeHigh" DWORD) ("nFileSizeLow" DWORD) ("dwReserved0" DWORD) ("dwReserved1" DWORD) ("cFileName" c_wchar<times>MAX_PATH) ("cAlternameFileName" c_wchar<times>14)]<block_end>
# coding: utf-8 <import_from_stmt>pathlib Path<import_stmt>pytest<import_from_stmt>AxonDeepSeg.visualization.visualize visualize_training<class_stmt>TestCore(object)<block_start><def_stmt>setup self# Get the directory where this current file is saved <block_start>self.fullPath=Path(__file__).resolve().parent<line_sep># Move up to the test directory, "test/" self.testPath=self.fullPath.parent<line_sep>self.pathModel=(self.testPath/'__test_files__'/'__test_model__'/'Model')<block_end><def_stmt>teardown self<block_start><pass><block_end># --------------visualize_training tests-------------- # @pytest.mark.unit<def_stmt>test_visualize_training_runs_successfully self<block_start><assert_stmt>visualize_training(str(self.pathModel))<block_end><block_end>
# Copyright 2020 Makani Technologies LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for checks."""<import_from_stmt>makani.analysis.checks base_check<import_from_stmt>makani.lib.python import_util<line_sep># TODO: Move this to //analysis/checks/base_check.py <def_stmt>LoadListOfChecks path_to_checks<block_start>"""Load the ListOfChecks object given the path to its file and class. Args: path_to_checks: A string specifying the location of the checks. E.g. makani.analysis.my_checks.MyCheck. Returns: The ListOfChecks object. """<line_sep>cls=import_util.ImportClass(path_to_checks)<line_sep><return>cls(for_log=<true>)<block_end><def_stmt>LoadJsonCheck path_to_check parameters_json<block_start>r"""Load the Check object given the path to its classpath and parameters. Args: path_to_check: A string specifying the location of the checks. E.g. makani.analysis.my_checks.MyCheck parameters_json: A JSON serialized string of the parameters needed to instantiate the class. E.g. "{\"for_log\": true, \"warning_ranges\": {\"ranges\": [0, 180]}, \"normal_ranges\": {\"ranges\": [80, 150]}}" Returns: The Check object. """<line_sep>cls=import_util.ImportClass(path_to_check)<line_sep>parameters=base_check.ParseCheckSpecs(parameters_json)<line_sep><return>cls(**parameters)<block_end><def_stmt>LoadCheck path_to_check params<block_start>"""Load the ListOfChecks object given the path to its file and class. Args: path_to_check: A string specifying the location of the checks. E.g. makani.analysis.my_checks.MyCheck. params: A string specifying parameters to be passed into the check. Returns: The CheckItem object. """<line_sep>cls=import_util.ImportClass(path_to_check)<line_sep><return>cls(**params)<block_end>
# Back compatibility -- use broad subdirectory for new code <import_from_stmt>bcbio.broad.metrics *<line_sep>
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>os<import_stmt>pickle<as>pkl<import_from_stmt>argparse ArgumentParser<import_from_stmt>collections OrderedDict<import_from_stmt>typing Dict<import_stmt>numpy<as>np<import_stmt>torch<import_from_stmt>build_index load_model<import_from_stmt>omegaconf DictConfig OmegaConf<import_from_stmt>nemo.utils logging<try_stmt><block_start><import_stmt>faiss<block_end><except_stmt>ModuleNotFoundError<block_start>logging.warning("Faiss is required for building the index. Please install faiss-gpu")<block_end>device=torch.device('cuda'<if>torch.cuda.is_available()<else>'cpu')<def_stmt>get_query_embedding query model<block_start>"""Use entity linking encoder to get embedding for index query"""<line_sep>model_input=model.tokenizer(query add_special_tokens=<true> padding=<true> truncation=<true> max_length=512 return_token_type_ids=<true> return_attention_mask=<true> )<line_sep>query_emb=model.forward(input_ids=torch.LongTensor([model_input["input_ids"]]).to(device) token_type_ids=torch.LongTensor([model_input["token_type_ids"]]).to(device) attention_mask=torch.LongTensor([model_input["attention_mask"]]).to(device) )<line_sep><return>query_emb<block_end><def_stmt>query_index query:str cfg:DictConfig model:object index:object pca:object idx2id:dict id2string:dict <arrow>Dict<block_start>""" Query the nearest neighbor index of entities to find the concepts in the index dataset that are most similar to the query. Args: query (str): entity to look up in the index cfg (DictConfig): config object to specifiy query parameters model (EntityLinkingModel): entity linking encoder model index (object): faiss index pca (object): sklearn pca transformation to be applied to queries idx2id (dict): dictionary mapping unique concept dataset index to its CUI id2string (dict): dictionary mapping each unqiue CUI to a representative english description of the concept Returns: A dictionary with the concept ids of the index's most similar entities as the keys and a tuple containing the string representation of that concept and its cosine similarity to the query as the values. """<line_sep>query_emb=get_query_embedding(query model).detach().cpu().numpy()<if_stmt>cfg.apply_pca<block_start>query_emb=pca.transform(query_emb)<block_end>dist,neighbors=index.search(query_emb.astype(np.float32) cfg.query_num_factor<times>cfg.top_n)<line_sep>dist,neighbors=dist[0] neighbors[0]<line_sep>unique_ids=OrderedDict()<line_sep>neighbor_idx=0<line_sep># Many of nearest neighbors could map to the same concept id, their idx is their unique identifier <while_stmt>len(unique_ids)<l>cfg.top_n<and>neighbor_idx<l>len(neighbors)<block_start>concept_id_idx=neighbors[neighbor_idx]<line_sep>concept_id=idx2id[concept_id_idx]<line_sep># Only want one instance of each unique concept <if_stmt>concept_id<not><in>unique_ids<block_start>concept=id2string[concept_id]<line_sep>unique_ids[concept_id]=(concept 1-dist[neighbor_idx])<block_end>neighbor_idx<augadd>1<block_end>unique_ids=dict(unique_ids)<line_sep><return>unique_ids<block_end><def_stmt>main cfg:DictConfig restore:bool<block_start>""" Loads faiss index and allows commandline queries to the index. Builds new index if one hasn't been built yet. Args: cfg: Config file specifying index parameters restore: Whether to restore model weights trained by the user. Otherwise will load weights used before self alignment pretraining. """<if_stmt><not>os.path.isfile(cfg.index.index_save_name)<or>(cfg.apply_pca<and><not>os.path.isfile(cfg.index.pca.pca_save_name)<or><not>os.path.isfile(cfg.index.idx_to_id))<block_start>logging.warning("Either no index and/or no mapping from entity idx to ids exists. Please run `build_index.py`")<line_sep><return><block_end>logging.info("Loading entity linking encoder model")<line_sep>model=load_model(cfg.model restore)<line_sep>logging.info("Loading index and associated files")<line_sep>index=faiss.read_index(cfg.index.index_save_name)<line_sep>idx2id=pkl.load(open(cfg.index.idx_to_id "rb"))<line_sep>id2string=pkl.load(open(cfg.index.id_to_string "rb"))# Should be created during dataset prep <if_stmt>cfg.index.apply_pca<block_start>pca=pkl.load(open(cfg.index.pca.pca_save_name "rb"))<block_end><while_stmt><true><block_start>query=input("enter index query: ")<line_sep>output=query_index(query cfg.top_n cfg.index model index pca idx2id id2string)<if_stmt>query<eq>"exit"<block_start><break><block_end><for_stmt>concept_id output<block_start>concept_details=output[concept_id]<line_sep>concept_id="C"+str(concept_id).zfill(7)<line_sep>print(concept_id concept_details)<block_end>print("----------------\n")<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>parser=ArgumentParser()<line_sep>parser.add_argument("--restore" action="store_true" help="Whether to restore encoder model weights from nemo path")<line_sep>parser.add_argument("--project_dir" required=<false> type=str default=".")<line_sep>parser.add_argument("--cfg" required=<false> type=str default="./conf/umls_medical_entity_linking_config.yaml")<line_sep>args=parser.parse_args()<line_sep>cfg=OmegaConf.load(args.cfg)<line_sep>cfg.project_dir=args.project_dir<line_sep>main(cfg args.restore)<block_end>
# Copyright (C) 2022 Intel Corporation # # SPDX-License-Identifier: MIT <import_from_stmt>drf_spectacular.extensions OpenApiFilterExtension OpenApiAuthenticationExtension<import_from_stmt>drf_spectacular.plumbing build_parameter_type<import_from_stmt>drf_spectacular.utils OpenApiParameter<line_sep># https://drf-spectacular.readthedocs.io/en/latest/customization.html?highlight=OpenApiFilterExtension#step-5-extensions <class_stmt>OrganizationFilterExtension(OpenApiFilterExtension)<block_start>"""Describe OrganizationFilterBackend filter"""<line_sep>target_class='cvat.apps.iam.filters.OrganizationFilterBackend'<line_sep>priority=1<def_stmt>get_schema_operation_parameters self auto_schema *args **kwargs<block_start>"""Describe query parameters"""<line_sep><return>[build_parameter_type(name=self.target.organization_slug required=<false> location=OpenApiParameter.QUERY description=self.target.organization_slug_description schema={'type':'string'} ) build_parameter_type(name=self.target.organization_id required=<false> location=OpenApiParameter.QUERY description=self.target.organization_id_description schema={'type':'string'} )]<block_end><block_end><class_stmt>SignatureAuthenticationScheme(OpenApiAuthenticationExtension)<block_start>target_class='cvat.apps.iam.authentication.SignatureAuthentication'<line_sep>name='SignatureAuthentication'# name used in the schema <def_stmt>get_security_definition self auto_schema<block_start><return>{'type':'apiKey' 'in':'query' 'name':'sign' }<block_end><block_end>
<import_from_stmt>decimal Decimal<import_stmt>simplejson<as>json<import_stmt>requests<import_from_stmt>.converter RatesNotAvailableError DecimalFloatMismatchError<class_stmt>BtcConverter(object)<block_start>""" Get bit coin rates and convertion """<def_stmt>__init__ self force_decimal=<false><block_start>self._force_decimal=force_decimal<block_end><def_stmt>_decode_rates self response use_decimal=<false><block_start><if_stmt>self._force_decimal<or>use_decimal<block_start>decoded_data=json.loads(response.text use_decimal=<true>)<block_end><else_stmt><block_start>decoded_data=response.json()<block_end><return>decoded_data<block_end><def_stmt>get_latest_price self currency<block_start>""" Get Lates price of one bitcoin to valid Currency 1BTC => X USD """<line_sep>url='https://api.coindesk.com/v1/bpi/currentprice/{}.json'.format(currency)<line_sep>response=requests.get(url)<if_stmt>response.status_code<eq>200<block_start>data=response.json()<line_sep>price=data.get('bpi').get(currency {}).get('rate_float' <none>)<if_stmt>self._force_decimal<block_start><return>Decimal(price)<block_end><return>price<block_end><return><none><block_end><def_stmt>get_previous_price self currency date_obj<block_start>""" Get Price for one bit coin on given date """<line_sep>start=date_obj.strftime('%Y-%m-%d')<line_sep>end=date_obj.strftime('%Y-%m-%d')<line_sep>url=('https://api.coindesk.com/v1/bpi/historical/close.json'<concat>'?start={}&end={}&currency={}'.format(start end currency))<line_sep>response=requests.get(url)<if_stmt>response.status_code<eq>200<block_start>data=response.json()<line_sep>price=data.get('bpi' {}).get(start <none>)<if_stmt>self._force_decimal<block_start><return>Decimal(price)<block_end><return>price<block_end><raise>RatesNotAvailableError("BitCoin Rates Source Not Ready For Given date")<block_end><def_stmt>get_previous_price_list self currency start_date end_date<block_start>""" Get List of prices between two dates """<line_sep>start=start_date.strftime('%Y-%m-%d')<line_sep>end=end_date.strftime('%Y-%m-%d')<line_sep>url=('https://api.coindesk.com/v1/bpi/historical/close.json'<concat>'?start={}&end={}&currency={}'.format(start end currency))<line_sep>response=requests.get(url)<if_stmt>response.status_code<eq>200<block_start>data=self._decode_rates(response)<line_sep>price_dict=data.get('bpi' {})<line_sep><return>price_dict<block_end><return>{}<block_end><def_stmt>convert_to_btc self amount currency<block_start>""" Convert X amount to Bit Coins """<if_stmt>isinstance(amount Decimal)<block_start>use_decimal=<true><block_end><else_stmt><block_start>use_decimal=self._force_decimal<block_end>url='https://api.coindesk.com/v1/bpi/currentprice/{}.json'.format(currency)<line_sep>response=requests.get(url)<if_stmt>response.status_code<eq>200<block_start>data=response.json()<line_sep>price=data.get('bpi').get(currency {}).get('rate_float' <none>)<if_stmt>price<block_start><if_stmt>use_decimal<block_start>price=Decimal(price)<block_end><try_stmt><block_start>converted_btc=amount/price<line_sep><return>converted_btc<block_end><except_stmt>TypeError<block_start><raise>DecimalFloatMismatchError("convert_to_btc requires amount parameter is of type Decimal when force_decimal=True")<block_end><block_end><block_end><raise>RatesNotAvailableError("BitCoin Rates Source Not Ready For Given date")<block_end><def_stmt>convert_btc_to_cur self coins currency<block_start>""" Convert X bit coins to valid currency amount """<if_stmt>isinstance(coins Decimal)<block_start>use_decimal=<true><block_end><else_stmt><block_start>use_decimal=self._force_decimal<block_end>url='https://api.coindesk.com/v1/bpi/currentprice/{}.json'.format(currency)<line_sep>response=requests.get(url)<if_stmt>response.status_code<eq>200<block_start>data=response.json()<line_sep>price=data.get('bpi').get(currency {}).get('rate_float' <none>)<if_stmt>price<block_start><if_stmt>use_decimal<block_start>price=Decimal(price)<block_end><try_stmt><block_start>converted_amount=coins<times>price<line_sep><return>converted_amount<block_end><except_stmt>TypeError<block_start><raise>DecimalFloatMismatchError("convert_btc_to_cur requires coins parameter is of type Decimal when force_decimal=True")<block_end><block_end><block_end><raise>RatesNotAvailableError("BitCoin Rates Source Not Ready For Given date")<block_end><def_stmt>convert_to_btc_on self amount currency date_obj<block_start>""" Convert X amount to BTC based on given date rate """<if_stmt>isinstance(amount Decimal)<block_start>use_decimal=<true><block_end><else_stmt><block_start>use_decimal=self._force_decimal<block_end>start=date_obj.strftime('%Y-%m-%d')<line_sep>end=date_obj.strftime('%Y-%m-%d')<line_sep>url=('https://api.coindesk.com/v1/bpi/historical/close.json'<concat>'?start={}&end={}&currency={}'.format(start end currency))<line_sep>response=requests.get(url)<if_stmt>response.status_code<eq>200<block_start>data=response.json()<line_sep>price=data.get('bpi' {}).get(start <none>)<if_stmt>price<block_start><if_stmt>use_decimal<block_start>price=Decimal(price)<block_end><try_stmt><block_start>converted_btc=amount/price<line_sep><return>converted_btc<block_end><except_stmt>TypeError<block_start><raise>DecimalFloatMismatchError("convert_to_btc_on requires amount parameter is of type Decimal when force_decimal=True")<block_end><block_end><block_end><raise>RatesNotAvailableError("BitCoin Rates Source Not Ready For Given Date")<block_end><def_stmt>convert_btc_to_cur_on self coins currency date_obj<block_start>""" Convert X BTC to valid currency amount based on given date """<if_stmt>isinstance(coins Decimal)<block_start>use_decimal=<true><block_end><else_stmt><block_start>use_decimal=self._force_decimal<block_end>start=date_obj.strftime('%Y-%m-%d')<line_sep>end=date_obj.strftime('%Y-%m-%d')<line_sep>url=('https://api.coindesk.com/v1/bpi/historical/close.json'<concat>'?start={}&end={}&currency={}'.format(start end currency))<line_sep>response=requests.get(url)<if_stmt>response.status_code<eq>200<block_start>data=response.json()<line_sep>price=data.get('bpi' {}).get(start <none>)<if_stmt>price<block_start><if_stmt>use_decimal<block_start>price=Decimal(price)<block_end><try_stmt><block_start>converted_btc=coins<times>price<line_sep><return>converted_btc<block_end><except_stmt>TypeError<block_start><raise>DecimalFloatMismatchError("convert_btc_to_cur_on requires amount parameter is of type Decimal when force_decimal=True")<block_end><block_end><block_end><raise>RatesNotAvailableError("BitCoin Rates Source Not Ready For Given Date")<block_end><def_stmt>get_symbol self<block_start>""" Here is Unicode symbol for bitcoin """<line_sep><return>"\u0E3F"<block_end><block_end>_Btc_Converter=BtcConverter()<line_sep>get_btc_symbol=_Btc_Converter.get_symbol<line_sep>convert_btc_to_cur_on=_Btc_Converter.convert_btc_to_cur_on<line_sep>convert_to_btc_on=_Btc_Converter.convert_to_btc_on<line_sep>convert_btc_to_cur=_Btc_Converter.convert_btc_to_cur<line_sep>convert_to_btc=_Btc_Converter.convert_to_btc<line_sep>get_latest_price=_Btc_Converter.get_latest_price<line_sep>get_previous_price=_Btc_Converter.get_previous_price<line_sep>get_previous_price_list=_Btc_Converter.get_previous_price_list<line_sep>
<import_from_stmt>django.utils.translation ugettext_lazy<as>_<import_from_stmt>mayan.apps.authentication.link_conditions condition_user_is_authenticated<import_from_stmt>mayan.apps.navigation.classes Link Separator Text<import_from_stmt>mayan.apps.navigation.utils factory_condition_queryset_access<import_from_stmt>.icons icon_current_user_details icon_group_create icon_group_delete_single icon_group_delete_multiple icon_group_edit icon_group_list icon_group_setup icon_group_user_list icon_user_create icon_user_edit icon_user_group_list icon_user_list icon_user_delete_single icon_user_delete_multiple icon_user_set_options icon_user_setup <import_from_stmt>.link_conditions condition_user_is_not_superuser<import_from_stmt>.permissions permission_group_create permission_group_delete permission_group_edit permission_group_view permission_user_create permission_user_delete permission_user_edit permission_user_view <import_from_stmt>.utils get_user_label_text<line_sep># Current user link_current_user_details=Link(args='request.user.id' condition=condition_user_is_authenticated icon=icon_current_user_details text=_('User details') view='user_management:user_details')<line_sep># Group link_group_create=Link(icon=icon_group_create permissions=(permission_group_create ) text=_('Create new group') view='user_management:group_create')<line_sep>link_group_delete_single=Link(args='object.id' icon=icon_group_delete_single permissions=(permission_group_delete ) tags='dangerous' text=_('Delete') view='user_management:group_delete_single')<line_sep>link_group_delete_multiple=Link(icon=icon_group_delete_multiple tags='dangerous' text=_('Delete') view='user_management:group_delete_multiple')<line_sep>link_group_edit=Link(args='object.id' icon=icon_group_edit permissions=(permission_group_edit ) text=_('Edit') view='user_management:group_edit')<line_sep>link_group_list=Link(condition=factory_condition_queryset_access(app_label='auth' model_name='Group' object_permission=permission_group_view ) icon=icon_group_list text=_('Groups') view='user_management:group_list')<line_sep>link_group_user_list=Link(args='object.id' icon=icon_group_user_list permissions=(permission_group_edit ) text=_('Users') view='user_management:group_members')<line_sep>link_group_setup=Link(condition=factory_condition_queryset_access(app_label='auth' model_name='Group' callback=condition_user_is_not_superuser object_permission=permission_group_view view_permission=permission_group_create) icon=icon_group_setup text=_('Groups') view='user_management:group_list')<line_sep># User link_user_create=Link(condition=condition_user_is_authenticated icon=icon_user_create permissions=(permission_user_create ) text=_('Create new user') view='user_management:user_create')<line_sep>link_user_delete_single=Link(args='object.id' condition=condition_user_is_authenticated icon=icon_user_delete_single permissions=(permission_user_delete ) tags='dangerous' text=_('Delete') view='user_management:user_delete_single')<line_sep>link_user_delete_multiple=Link(icon=icon_user_delete_multiple tags='dangerous' text=_('Delete') view='user_management:user_delete_multiple')<line_sep>link_user_edit=Link(args='object.id' condition=condition_user_is_authenticated icon=icon_user_edit permissions=(permission_user_edit ) text=_('Edit') view='user_management:user_edit')<line_sep>link_user_group_list=Link(args='object.id' condition=condition_user_is_authenticated icon=icon_user_group_list permissions=(permission_user_edit ) text=_('Groups') view='user_management:user_groups')<line_sep>link_user_list=Link(icon=icon_user_list text=_('Users') condition=factory_condition_queryset_access(app_label='auth' model_name='User' callback=condition_user_is_authenticated object_permission=permission_user_view view_permission=permission_user_create) view='user_management:user_list')<line_sep>link_user_set_options=Link(args='object.id' condition=condition_user_is_authenticated icon=icon_user_set_options permissions=(permission_user_edit ) text=_('User options') view='user_management:user_options')<line_sep>link_user_setup=Link(condition=factory_condition_queryset_access(app_label='auth' model_name='User' object_permission=permission_user_view view_permission=permission_user_create ) icon=icon_user_setup text=_('Users') view='user_management:user_list')<line_sep>separator_user_label=Separator()<line_sep>text_user_label=Text(html_extra_classes='menu-user-name' text=get_user_label_text)<line_sep>
#! /usr/bin/env python3 """ Maigret entrypoint """<import_stmt>asyncio<import_from_stmt>.maigret main<if_stmt>__name__<eq>"__main__"<block_start>asyncio.run(main())<block_end>
__all__=["GuestUserPool" "GuestUser" "NormalUserPool" "NormalUser" "GoldUserPool" "GoldUser" "GoldUserStatus" ]<import_from_stmt>.gold_user_pool GoldUserPool GoldUser GoldUserStatus<import_from_stmt>.guest_user_pool GuestUserPool GuestUser<import_from_stmt>.normal_user_pool NormalUserPool NormalUser<line_sep>
<import_stmt>sys<import_stmt>logging<import_stmt>functools<import_stmt>asyncio<import_stmt>cocrawler.burner<as>burner<import_stmt>cocrawler.parse<as>parse<import_stmt>cocrawler.stats<as>stats<line_sep>test_threadcount=2<line_sep>loop=asyncio.get_event_loop()<line_sep>b=burner.Burner(test_threadcount loop 'parser')<line_sep>queue=asyncio.Queue()<def_stmt>parse_all name string<block_start>links1,_=parse.find_html_links(string url=name)<line_sep>links2,embeds2=parse.find_html_links_and_embeds(string url=name)<line_sep>all2=links2.union(embeds2)<if_stmt>len(links1)<ne>len(all2)<block_start>print('{} had different link counts of {} and {}'.format(name len(links1) len(all2)))<line_sep>extra1=links1.difference(all2)<line_sep>extra2=all2.difference(links1)<line_sep>print(' extra in links: {!r}'.format(extra1))<line_sep>print(' extra in links and embeds: {!r}'.format(extra2))<block_end><return>1 <block_end><async_keyword><def_stmt>work <block_start><while_stmt><true><block_start>w=<await>queue.get()<line_sep>string=' '<times>10000<line_sep>partial=functools.partial(parse_all w string)<line_sep><await>b.burn(partial)<line_sep>queue.task_done()<block_end><block_end><async_keyword><def_stmt>crawl <block_start>workers=[asyncio.Task(work() loop=loop)<for>_ range(test_threadcount)]<line_sep>print('queue count is {}'.format(queue.qsize()))<line_sep><await>queue.join()<line_sep>print('join is done')<for_stmt>w workers<block_start><if_stmt><not>w.done()<block_start>w.cancel()<block_end><block_end><block_end># Main program: <for_stmt>i range(10000)<block_start>queue.put_nowait('foo')<block_end>print('Queue size is {}, beginning work.'.format(queue.qsize()))<try_stmt><block_start>loop.run_until_complete(crawl())<line_sep>print('exit run until complete')<block_end><except_stmt>KeyboardInterrupt<block_start>sys.stderr.flush()<line_sep>print('\nInterrupt. Exiting cleanly.\n')<block_end><finally_stmt><block_start>loop.stop()<line_sep>loop.run_forever()<line_sep>loop.close()<block_end>levels=[logging.ERROR logging.WARN logging.INFO logging.DEBUG]<line_sep>logging.basicConfig(level=levels[3])<line_sep>stats.report()<line_sep>parse.report()<line_sep>
<import_stmt>numpy<as>np<import_from_stmt>..utils.constants *<import_from_stmt>..utils.vector3 vec3<import_from_stmt>..geometry Primitive Collider<class_stmt>Sphere(Primitive)<block_start><def_stmt>__init__ self center material radius max_ray_depth=5 shadow=<true><block_start>super().__init__(center material max_ray_depth shadow=shadow)<line_sep>self.collider_list<augadd>[Sphere_Collider(assigned_primitive=self center=center radius=radius)]<line_sep>self.bounded_sphere_radius=radius<block_end><def_stmt>get_uv self hit<block_start><return>hit.collider.get_uv(hit)<block_end><block_end><class_stmt>Sphere_Collider(Collider)<block_start><def_stmt>__init__ self radius **kwargs<block_start>super().__init__(**kwargs)<line_sep>self.radius=radius<block_end><def_stmt>intersect self O D<block_start>b=2<times>D.dot(O-self.center)<line_sep>c=self.center.square_length()+O.square_length()-2<times>self.center.dot(O)-(self.radius<times>self.radius)<line_sep>disc=(b<power>2)-(4<times>c)<line_sep>sq=np.sqrt(np.maximum(0 disc))<line_sep>h0=(-b-sq)/2<line_sep>h1=(-b+sq)/2<line_sep>h=np.where((h0<g>0)&(h0<l>h1) h0 h1)<line_sep>pred=(disc<g>0)&(h<g>0)<line_sep>M=(O+D<times>h)<line_sep>NdotD=((M-self.center)<times>(1./self.radius)).dot(D)<line_sep>pred1=(disc<g>0)&(h<g>0)&(NdotD<g>0)<line_sep>pred2=(disc<g>0)&(h<g>0)&(NdotD<l>0)<line_sep>pred3=<true><line_sep>#return an array with hit distance and the hit orientation <return>np.select([pred1 pred2 pred3] [[h np.tile(UPDOWN h.shape)] [h np.tile(UPWARDS h.shape)] FARAWAY])<block_end><def_stmt>get_Normal self hit# M = intersection point <block_start><return>(hit.point-self.center)<times>(1./self.radius)<block_end><def_stmt>get_uv self hit<block_start>M_C=(hit.point-self.center)/self.radius<line_sep>phi=np.arctan2(M_C.z M_C.x)<line_sep>theta=np.arcsin(M_C.y)<line_sep>u=(phi+np.pi)/(2<times>np.pi)<line_sep>v=(theta+np.pi/2)/np.pi<line_sep><return>u v<block_end><block_end>
"""Tests various methods of the Download class. All the methods that start with test are used to test a certain function. The test method will have the name of the method being tested seperated by an underscore. If the method to be tested is extract_content, the test method name will be test_extract_content """<import_from_stmt>hashlib md5<import_from_stmt>os remove<import_from_stmt>downloader_cli.download Download<line_sep>TEST_URL="http://172.16.17.32/5MB.zip"<def_stmt>test__extract_border_icon <block_start>"""Test the _extract_border_icon method"""<line_sep>download=Download(TEST_URL)<line_sep>icon_one=download._extract_border_icon("#")<line_sep>icon_two=download._extract_border_icon("[]")<line_sep>icon_none=download._extract_border_icon("")<line_sep>icon_more=download._extract_border_icon("sdafasdfasdf")<assert_stmt>icon_one<eq>('#' '#') "Should be ('#', '#')"<assert_stmt>icon_two<eq>('[' ']') "Should be ('[', '])"<assert_stmt>icon_none<eq>('|' '|') "Should be ('|', '|')"<assert_stmt>icon_more<eq>('|' '|') "Should be ('|', '|')"<block_end><def_stmt>test__build_headers <block_start>"""Test the _build_headers method"""<line_sep>download=Download(TEST_URL)<line_sep>download._build_headers(1024)<line_sep>header_built=download.headers<assert_stmt>header_built<eq>{"Range":"bytes={}-".format(1024)} "Should be 1024"<block_end><def_stmt>test__preprocess_conn <block_start>"""Test the _preprocess_conn method"""<line_sep>download=Download(TEST_URL)<line_sep>download._preprocess_conn()<assert_stmt>download.f_size<eq>5242880 "Should be 5242880"<block_end><def_stmt>test__format_size <block_start>""" Test the function that formats the size """<line_sep>download=Download(TEST_URL)<line_sep>size,unit=download._format_size(255678999)<line_sep># Size should be 243.83449459075928 # and unit should be `MB` size=int(size)<assert_stmt>size<eq>243 "Should be 243"<assert_stmt>unit<eq>"MB" "Should be MB"<block_end><def_stmt>test__format_time <block_start>""" Test the format time function that formats the passed time into a readable value """<line_sep>download=Download(TEST_URL)<line_sep>time,unit=download._format_time(2134991)<line_sep># Time should be 9 days <assert_stmt>int(time)<eq>9 "Should be 9"<assert_stmt>unit<eq>"d" "Should be d"<line_sep>time,unit=download._format_time(245)<line_sep># Time should be 4 minutes <assert_stmt>int(time)<eq>4 "Should be 4"<assert_stmt>unit<eq>"m" "Should be m"<block_end><def_stmt>test_file_integrity <block_start>""" Test the integrity of the downloaded file. We will test the 5MB.zip file which has a hash of `eb08885e3082037a12a42308c521fa3c`. """<line_sep>HASH="eb08885e3082037a12a42308c521fa3c"<line_sep>download=Download(TEST_URL)<line_sep>download.download()<line_sep># Once download is done, check the integrity _hash=md5(open("5MB.zip" "rb").read()).hexdigest()<assert_stmt>_hash<eq>HASH "Integrity check failed for 5MB.zip"<line_sep># Remove the file now remove(download.basename)<block_end>
<import_stmt>os<import_stmt>ipaddress<import_stmt>numpy<as>np<import_stmt>pandas<as>pd<import_stmt>datetime<import_stmt>boto3<import_stmt>gzip<import_stmt>json<import_from_stmt>signal_processing signalProcess<line_sep>BUCKET_NAME=os.environ.get("BUCKET_NAME" <none>)<line_sep>VPC_FLOW_LOGS_PATH=os.environ.get("VPC_FLOW_LOGS_PATH" <none>)<line_sep>FINDINGS_PATH=os.environ.get("FINDINGS_PATH" <none>)<line_sep>TMP_DOWNLOAD_DIR="/tmp/s3_download"<line_sep>FLOW_COLUMNS=["date" "version" "account-id" "interface-id" "srcaddr" "dstaddr" "srcport" "dstport" "protocol" "packets" "bytes" "start" "end" "action" "log-status" ]<def_stmt>cloud_sniper_beaconing_detection event context<block_start>bucket_name=BUCKET_NAME<line_sep>vpc_flow_logs_path=VPC_FLOW_LOGS_PATH<line_sep>findings_path=FINDINGS_PATH<line_sep>df=load_data(bucket_name vpc_flow_logs_path)<line_sep>print(f"Number of raw records: {len(df.index)}")<line_sep>version=df.version.iloc[0]# constant account_id=df["account-id"].iloc[0]# constant df=filter_format_data(df)<line_sep>print(f"Number of records after filtering missing data: {len(df.index)}")<line_sep>df=sort_data(df)<line_sep>print(f"Number of records after filtering by time: {len(df.index)}")<line_sep>df=filter_useless_data(df)<line_sep>print(f"Number of records after filtering by port: {len(df.index)}")<line_sep>df=filter_unfrequent_data(df)<line_sep>print(f"Number of records after filtering unfrequent: {len(df.index)}")<line_sep>res=find_beacons(df)<line_sep>new_fields={"hits":"" "cloud.provider":"aws" "event.type":"beaconing" "cloud.account.name":"" "interface.vpc.id":"" "protocol":"" "version":version "cloud.account.id":account_id }<line_sep>list(map(<lambda>x:x.update(new_fields) res))<line_sep>print(f"Result: {res}")<line_sep>save_results(bucket_name findings_path res)<line_sep><return>res<block_end><def_stmt>load_data s3_bucket s3_vpc_flow_logs_path<block_start>s3=boto3.resource('s3')<line_sep>bucket=s3.Bucket(name=s3_bucket)<line_sep>prefix=s3_vpc_flow_logs_path<if_stmt>prefix.startswith("/")<block_start>prefix=prefix[1:]<block_end><if_stmt><not>prefix.endswith("/")<block_start>prefix<augadd>"/"<block_end><if_stmt><not>os.path.exists(TMP_DOWNLOAD_DIR)<block_start>os.mkdir(TMP_DOWNLOAD_DIR)<block_end><for_stmt>i,s3_file_obj enumerate(bucket.objects.filter(Prefix=prefix))<block_start><if_stmt>s3_file_obj.key.endswith(".log.gz")<block_start>extension="log.gz"<block_end><elif_stmt>s3_file_obj.key.endswith(".log")<block_start>extension="log"<block_end><else_stmt><block_start><continue><block_end>bucket.download_file(s3_file_obj.key TMP_DOWNLOAD_DIR+"/%06d"%i+"."+extension)<block_end>data=[]<for_stmt>fname sorted(os.listdir(TMP_DOWNLOAD_DIR))<block_start><if_stmt>fname.endswith(".log.gz")<block_start>open_=gzip.open<line_sep>decode=<true><block_end><elif_stmt>fname.endswith(".log")<block_start>open_=open<line_sep>decode=<false><block_end><else_stmt><block_start><continue><block_end><with_stmt>open_(os.path.join(TMP_DOWNLOAD_DIR fname) 'r')<as>fd<block_start>first_line=<true><for_stmt>line fd<block_start><if_stmt>first_line<block_start>first_line=<false><line_sep><continue><block_end><if_stmt>decode<block_start>line=line.decode("utf-8").strip().split(" ")<block_end><else_stmt><block_start>line=line.strip().split(" ")<block_end>data.append(line)<block_end><block_end><block_end><if_stmt>data<and>(len(data[0])<eq>len(FLOW_COLUMNS))<block_start>df=pd.DataFrame(data columns=FLOW_COLUMNS)<line_sep>df.drop(['date'] axis=1 inplace=<true>)<block_end><else_stmt><block_start>df=pd.DataFrame(data columns=FLOW_COLUMNS[1:])<block_end><return>df<block_end><def_stmt>filter_format_data df<block_start>df=df[df.srcaddr<ne>"-"]<line_sep>df=df[df.dstaddr<ne>"-"]<line_sep>df.drop(["version" "srcport"] axis=1 inplace=<true>)<line_sep>df=df.replace("-" np.nan)<line_sep>df=df.replace("-" np.nan)<line_sep>df[["dstport" "protocol" "packets" "bytes" "start" "end"]]=df[["dstport" "protocol" "packets" "bytes" "start" "end"]].apply(pd.to_numeric)<line_sep><return>df<block_end><def_stmt>sort_data df<block_start>df['datetime']=pd.to_datetime(df.start unit='s')<line_sep># TODO: should we process just the last hours? df=df.set_index('datetime')<line_sep>df.sort_index(inplace=<true>)<line_sep><return>df.reset_index(level=0)<block_end><def_stmt>filter_useless_data df# Requirements # * srcIP should be private # * dstport < 1024 and != 123 <block_start><if_stmt>df.empty<block_start><return>df<block_end>df=df[df.srcaddr.map(<lambda>x:ipaddress.ip_address(x).is_private)]<line_sep>df=df[df.dstport<le>1024]<line_sep>df=df[df.dstport<ne>123]<line_sep><return>df<block_end><def_stmt>filter_unfrequent_data df# remove communications if there were less than 6 snippets <block_start>selection=df.groupby(["srcaddr" "dstaddr" "dstport"])<line_sep>df=selection.filter(<lambda>x:len(x)<ge>6)<line_sep>df=df.reset_index(level=0)<line_sep><return>df<block_end><def_stmt>find_beacons df<block_start>res=[]<line_sep>time_fmt="%Y-%m-%dT%H:%M:%S.%f"<line_sep>groups=df.groupby(["srcaddr" "dstaddr" "dstport"])<line_sep>data_in={"data":{} "time":{}}<for_stmt>(srcaddr dstaddr port),traffic groups<block_start>k=(srcaddr dstaddr port)<line_sep>data_in["data"][k]=traffic.bytes<line_sep>data_in["time"][k]=traffic.datetime<block_end>lrner=signalProcess(data_in options_in=<none>)<line_sep>output=lrner.getPrimaryPeriods()<for_stmt>(srcaddr dstaddr port) output["powers"]<block_start><if_stmt>output["powers"][(srcaddr dstaddr port)][0]<is><not><none><block_start>print(data_in["time"][k])<line_sep>k=(srcaddr dstaddr port)<line_sep>start_time=data_in["time"][k].iloc[0].strftime(time_fmt)[:-3]+'Z'<line_sep>end_time=data_in["time"][k].iloc[-1].strftime(time_fmt)[:-3]+'Z'<line_sep>res.append({"source.ip":srcaddr "destination.ip":dstaddr "destination.port":int(port) "timestamp":start_time "event.end":end_time "event.start":start_time})<block_end><block_end><return>res<block_end><def_stmt>save_results bucket_name findings_path res<block_start>now=datetime.datetime.now().strftime("%Y%m%d_%H%M%S")<line_sep>s3_resource=boto3.resource('s3')<line_sep>bucket=s3_resource.Bucket(name=bucket_name)<if_stmt>findings_path.startswith("/")<block_start>findings_path=findings_path[1:]<block_end><if_stmt>findings_path.endswith("/")<block_start>findings_path=findings_path[:-1]<block_end>(bucket.Object(key=f"{findings_path}/beaconing_detection_{now}.json").put(Body=bytes(json.dumps(res).encode('UTF-8'))))<block_end><if_stmt>__name__<eq>"__main__"<block_start>print(json.dumps(cloud_sniper_beaconing_detection(<none> <none>) indent=4))<block_end>
"""Module to hold the instance of the cache"""<import_from_stmt>flask.ext.cache Cache<line_sep>cache=Cache()<line_sep>
<import_stmt>re logging<import_from_stmt>django forms<import_from_stmt>django.forms ModelForm<import_from_stmt>django.utils.translation ugettext<as>_<import_from_stmt>django.contrib.localflavor.us.forms USStateSelect USPhoneNumberField<import_from_stmt>models Preference ShippingWeight ShippingPrice ShippingItem TaxState DnsShop EmailNotification<import_from_stmt>preferences.models ShopPolicies<import_from_stmt>auth.models User<import_from_stmt>users.models Profile<class_stmt>GeneralPreferenceForm(ModelForm)<block_start>email=forms.EmailField(required=<false>)<line_sep>phone=USPhoneNumberField(required=<false>)<class_stmt>Meta<block_start>model=Preference<line_sep>fields=['name_store' 'email' 'phone']<block_end><block_end><class_stmt>ProfileForm(ModelForm)<block_start>state=forms.CharField(widget=USStateSelect)<class_stmt>Meta<block_start>model=Profile<line_sep>fields=['street_address' 'zip' 'city' 'state' 'country' ]<block_end><def_stmt>clean_zip self<block_start>zip=self.cleaned_data.get("zip" "")<if_stmt>zip.strip()<eq>""<block_start><raise>forms.ValidationError("Zip is a required field.")<block_end><if_stmt><not>(re.match("[0-9]{5}(-[0-9]{4})?$" zip))<block_start><raise>forms.ValidationError("Invalid Zip code. Valid formats are XXXXX or XXXXX-XXXX")<block_end><return>zip<block_end><def_stmt>clean_country self<block_start>country=self.cleaned_data.get("country" "")<if_stmt>country.strip()<eq>""<block_start><raise>forms.ValidationError("Country is a required field.")<block_end><return>country<block_end><def_stmt>clean_street_address self<block_start>street=self.cleaned_data.get("street_address" "")<if_stmt>street.strip()<eq>""<block_start><raise>forms.ValidationError("Street is a required field.")<block_end><return>street<block_end><def_stmt>clean_city self<block_start>city=self.cleaned_data.get("city" "")<if_stmt>city.strip()<eq>""<block_start><raise>forms.ValidationError("City is a required field.")<block_end><return>city<block_end><block_end><class_stmt>TaxesPreferenceForm(ModelForm)<block_start><class_stmt>Meta<block_start>model=Preference<line_sep>fields=['taxes_same_state_store' 'taxes_to_shipping_fees']<block_end><block_end><class_stmt>TaxStateForm(ModelForm)#state = forms.CharField(widget=USStateSelect) <block_start>tax=forms.DecimalField(help_text=_("Enter a state tax rate number (between 1 and 100)"))<class_stmt>Meta<block_start>model=TaxState<line_sep>exclude=['shop']<block_end><def_stmt>__init__ self shop *args ** kwargs<block_start>self.shop=shop<line_sep>super(TaxStateForm self).__init__(*args ** kwargs)<block_end><def_stmt>clean_state self<block_start>state=self.cleaned_data['state']<try_stmt><block_start>TaxState.objects.get(shop=self.shop state=state)<block_end><except_stmt>TaxState.DoesNotExist<block_start><return>state<block_end><raise>forms.ValidationError(_("A tax for state %s already exists."%state))<block_end><def_stmt>clean_tax self<block_start>tax=self.cleaned_data['tax']<if_stmt>tax<l>0<block_start><raise>forms.ValidationError(_("A tax has to be more or equal 0%"))<block_end><elif_stmt>tax<g>100<block_start><raise>forms.ValidationError(_("A tax has to be less than 100%"))<block_end><return>tax<block_end><block_end><class_stmt>TaxStateEditForm(ModelForm)<block_start><class_stmt>Meta<block_start>model=TaxState<line_sep>exclude=['shop' 'state']<block_end><def_stmt>__init__ self shop *args ** kwargs<block_start>self.shop=shop<line_sep>super(TaxStateEditForm self).__init__(*args ** kwargs)<block_end><def_stmt>clean_tax self<block_start>tax=self.cleaned_data['tax']<if_stmt>tax<l>0<block_start><raise>forms.ValidationError(_("A tax has to be more or equal 0%"))<block_end><elif_stmt>tax<g>100<block_start><raise>forms.ValidationError(_("A tax has to be less than 100%"))<block_end><return>tax<block_end><block_end><class_stmt>AuctionsPreferenceForm(ModelForm)<block_start><class_stmt>Meta<block_start>model=Preference<line_sep>fields=['allow_sessions' 'allow_open_auctions' 'default_days' 'open_auto_extend' 'session_auto_extend']<block_end><block_end><class_stmt>DnsShopForm(ModelForm)<block_start><class_stmt>Meta<block_start>model=DnsShop<line_sep>exclude=['shop']<block_end><def_stmt>clean_dns self<block_start>dns=self.cleaned_data['dns']<try_stmt><block_start>DnsShop.objects.get(dns=dns)<block_end><except_stmt>DnsShop.DoesNotExist<block_start><return>dns<block_end><raise>forms.ValidationError(_("A shop with that dns already exists."))<block_end><block_end><class_stmt>ShippingWeightForm(ModelForm)<block_start><class_stmt>Meta<block_start>model=ShippingWeight<line_sep>exclude=['shop']<block_end><block_end><class_stmt>ShippingPriceForm(ModelForm)<block_start><class_stmt>Meta<block_start>model=ShippingPrice<line_sep>exclude=['shop']<block_end><block_end><class_stmt>ShippingItemForm(ModelForm)<block_start><class_stmt>Meta<block_start>model=ShippingItem<line_sep>exclude=['shop']<block_end><block_end><class_stmt>EmailNotificationForm(ModelForm)<block_start><class_stmt>Meta<block_start>model=EmailNotification<line_sep>fields=['subject' 'body']<block_end><block_end><class_stmt>ShopPoliciesForm(ModelForm)<block_start><class_stmt>Meta<block_start>model=ShopPolicies<line_sep>fields=['refund_policy' 'privacy_policy' 'terms_of_service']<block_end><block_end><class_stmt>MarketingForm(ModelForm)<block_start><class_stmt>Meta<block_start>model=Preference<line_sep>fields=['google_analytics_account_number']<block_end><def_stmt>clean_google_analytics_account_number self<block_start>google_analytics_account_number=self.cleaned_data['google_analytics_account_number']<if_stmt>re.match(r"^\w{2}\-\d{4,8}\-\d$" google_analytics_account_number)<is><none><block_start><raise>forms.ValidationError('Invalid analitycs account number')<block_end><return>google_analytics_account_number<block_end><block_end><class_stmt>UsernameChangeForm(forms.ModelForm)<block_start>username=forms.RegexField(label=_("Username") max_length=30 regex=r'^\w+$' help_text=_("Required. 30 characters or fewer. Alphanumeric characters only (letters, digits and underscores).") error_message=_("This value must contain only letters, numbers and underscores."))<class_stmt>Meta<block_start>model=User<line_sep>fields=['username']<block_end><block_end>
<import_from_stmt>datetime datetime<import_from_stmt>logging info<import_from_stmt>pathlib Path<import_from_stmt>typing List<import_stmt>requests<import_from_stmt>celery shared_task<import_from_stmt>django.conf settings<import_from_stmt>django.contrib.admin.options get_content_type_for_model<import_from_stmt>requests HTTPError<import_from_stmt>tika parser<import_from_stmt>web.datasets.adapters to_citycouncil_bid to_citycouncil_contract to_citycouncil_expense to_citycouncil_revenue <import_from_stmt>web.datasets.models CityCouncilBid CityCouncilContract CityCouncilExpense CityCouncilRevenue File SyncInformation <import_from_stmt>web.datasets.services get_s3_client<line_sep>client=get_s3_client(settings)<class_stmt>WebserviceException(Exception)<block_start><pass><block_end>@shared_task<def_stmt>content_from_file file_pk=<none> path=<none> keep_file=<true><block_start><if_stmt><not>any([file_pk path])<block_start><raise>Exception("Ou `file_pk` ou `path` devem ser informados.")<block_end>a_file=<none><if_stmt>file_pk<block_start>a_file=File.objects.get(pk=file_pk)<if_stmt>a_file.content<is><not><none><block_start><return>a_file.content<block_end>path=client.download_file(a_file.s3_file_path)<line_sep>keep_file=<false><block_end><if_stmt><not>Path(path).exists()<block_start>info(f"Arquivo {path} não encontrado.")<line_sep><return><block_end>raw=parser.from_file(path)<if_stmt><not>keep_file<block_start>Path(path).unlink()<block_end><if_stmt>a_file<block_start>a_file.content=raw["content"]<or>""<line_sep>a_file.save()<line_sep><return>a_file.content<block_end><return>raw["content"]<block_end>@shared_task<def_stmt>backup_file file_id<block_start><try_stmt><block_start>file_obj=File.objects.get(pk=file_id s3_url__isnull=<true>)<block_end><except_stmt>File.DoesNotExist<block_start>info(f"O arquivo ({file_id}) não existe ou já possui backup.")<line_sep><return><block_end><if_stmt><not>file_obj.url<and><not>file_obj.local_path<block_start>info(f"O arquivo ({file_id}) não tem URL ou não existe localmente.")<line_sep><return><block_end>model_name=file_obj.content_object._meta.model_name<line_sep>relative_file_path=(f"{model_name}/{file_obj.created_at.year}/"<concat>f"{file_obj.created_at.month}/{file_obj.created_at.day}/")<line_sep>location=file_obj.local_path<or>file_obj.url<line_sep>s3_url,s3_file_path=client.upload_file(location relative_file_path prefix=file_obj.checksum)<line_sep>file_obj.s3_file_path=s3_file_path<line_sep>file_obj.s3_url=s3_url<line_sep>file_obj.save()<line_sep><return>s3_url<block_end>@shared_task<def_stmt>get_city_council_updates formatted_date<block_start>"""Solicita atualizações ao webservice da Câmara."""<line_sep>target_date=datetime.strptime(formatted_date "%Y-%m-%d").date()<line_sep>sync_info,_=SyncInformation.objects.get_or_create(date=target_date source="camara" defaults={"succeed":<false>})<line_sep>response=requests.get(settings.CITY_COUNCIL_WEBSERVICE_ENDPOINT params={"data":formatted_date # formato aaaa-mm-dd "token":settings.CITY_COUNCIL_WEBSERVICE_TOKEN } headers={"User-Agent":"Maria Quitéria"} )<try_stmt><block_start>response.raise_for_status()<line_sep>sync_info.succeed=<true><block_end><except_stmt>HTTPError<block_start>sync_info.succeed=<false><line_sep>sync_info.save()<line_sep><raise>HTTPError<block_end>response=response.json()<line_sep>sync_info.response=response<if_stmt>response.get("erro")<block_start>sync_info.succeed=<false><line_sep>sync_info.save()<line_sep><raise>WebserviceException(response["erro"])<block_end>sync_info.save()<line_sep><return>response<block_end>@shared_task(ignore_result=<true>)<def_stmt>distribute_city_council_objects_to_sync payload<block_start>"""Recebe o payload e dispara uma task para cada registro. O webservice da Câmara retorna uma lista de ações (inserção, atualização e deleção) e os registros que sofreram cada uma delas. Essa task executa cada uma de maneira separada para que, caso tenham algum erro, possam ser tratados de maneira separada. """<line_sep>action_methods={"inclusoesContrato":add_citycouncil_contract "alteracoesContrato":update_citycouncil_contract "exclusoesContrato":remove_citycouncil_contract "inclusoesLicitacao":add_citycouncil_bid "alteracoesLicitacao":update_citycouncil_bid "exclusoesLicitacao":remove_citycouncil_bid "inclusoesReceita":add_citycouncil_revenue "alteracoesReceita":update_citycouncil_revenue "exclusoesReceita":remove_citycouncil_revenue "inclusoesDespesa":add_citycouncil_expense "alteracoesDespesa":update_citycouncil_expense "exclusoesDespesa":remove_citycouncil_expense }<for_stmt>action_name,records payload.items()<block_start>info(f"{action_name}: {len(records)} registros")<line_sep>task=action_methods.get(action_name)<if_stmt>action_name.startswith("exclusoes")<block_start>task.delay(records)<block_end><else_stmt><block_start><for_stmt>record records<block_start>task.delay(record)<block_end><block_end><block_end><block_end>@shared_task(retry_kwargs={"max_retries":1} ignore_result=<true>)<def_stmt>save_citycouncil_files files object url_key<block_start><if_stmt><not>files<block_start><return><block_end>content_type=get_content_type_for_model(object)<import_from_stmt>web.datasets.management.commands._file save_file<if_stmt>files<block_start><for_stmt>file_ files<block_start>save_file(file_[url_key] content_type object.pk)<block_end><block_end><block_end>@shared_task(retry_kwargs={"max_retries":1} ignore_result=<true>)<def_stmt>add_citycouncil_bid record<block_start>new_item=to_citycouncil_bid(record)<line_sep>new_item["crawled_at"]=datetime.now()<line_sep>new_item["crawled_from"]=settings.CITY_COUNCIL_WEBSERVICE_ENDPOINT<line_sep>bid,_=CityCouncilBid.objects.get_or_create(external_code=new_item["external_code"] defaults=new_item)<line_sep>save_citycouncil_files(record.get("arquivos") bid "caminhoArqLic")<line_sep><return>bid<block_end>@shared_task(retry_kwargs={"max_retries":1} ignore_result=<true>)<def_stmt>update_citycouncil_bid record<block_start>bid=CityCouncilBid.objects.get(external_code=record["codLic"])<line_sep>updated_item=to_citycouncil_bid(record)<for_stmt>key,value updated_item.items()<block_start>setattr(bid key value)<block_end>bid.save()<line_sep>save_citycouncil_files(record.get("arquivos") bid "caminhoArqLic")<line_sep><return>bid<block_end>@shared_task(retry_kwargs={"max_retries":1} ignore_result=<true>)<def_stmt>remove_citycouncil_bid records:List[dict]<block_start>to_be_removed=[record["codLic"]<for>record records]<line_sep>CityCouncilBid.objects.filter(external_code__in=to_be_removed).update(excluded=<true>)<block_end>@shared_task(retry_kwargs={"max_retries":1} ignore_result=<true>)<def_stmt>add_citycouncil_contract record<block_start>new_item=to_citycouncil_contract(record)<line_sep>new_item["crawled_at"]=datetime.now()<line_sep>new_item["crawled_from"]=settings.CITY_COUNCIL_WEBSERVICE_ENDPOINT<line_sep>contract,_=CityCouncilContract.objects.get_or_create(external_code=new_item["external_code"] defaults=new_item)<line_sep>save_citycouncil_files(record.get("arquivos") contract "caminho")<line_sep><return>contract<block_end>@shared_task(retry_kwargs={"max_retries":1} ignore_result=<true>)<def_stmt>update_citycouncil_contract record<block_start>contract=CityCouncilContract.objects.get(external_code=record["codCon"])<line_sep>updated_item=to_citycouncil_contract(record)<for_stmt>key,value updated_item.items()<block_start>setattr(contract key value)<block_end>contract.save()<line_sep>save_citycouncil_files(record.get("arquivos") contract "caminho")<line_sep><return>contract<block_end>@shared_task(retry_kwargs={"max_retries":1} ignore_result=<true>)<def_stmt>remove_citycouncil_contract records:List[dict]<block_start>to_be_removed=[record["codCon"]<for>record records]<line_sep>CityCouncilContract.objects.filter(external_code__in=to_be_removed).update(excluded=<true>)<block_end>@shared_task(retry_kwargs={"max_retries":1} ignore_result=<true>)<def_stmt>add_citycouncil_revenue record<block_start>new_item=to_citycouncil_revenue(record)<line_sep>new_item["crawled_at"]=datetime.now()<line_sep>new_item["crawled_from"]=settings.CITY_COUNCIL_WEBSERVICE_ENDPOINT<line_sep>revenue,_=CityCouncilRevenue.objects.get_or_create(external_code=new_item["external_code"] defaults=new_item)<line_sep><return>revenue<block_end>@shared_task(retry_kwargs={"max_retries":1} ignore_result=<true>)<def_stmt>update_citycouncil_revenue record<block_start>revenue=CityCouncilRevenue.objects.get(external_code=record["codLinha"])<line_sep>updated_item=to_citycouncil_revenue(record)<for_stmt>key,value updated_item.items()<block_start>setattr(revenue key value)<block_end>revenue.save()<line_sep><return>revenue<block_end>@shared_task(retry_kwargs={"max_retries":1} ignore_result=<true>)<def_stmt>remove_citycouncil_revenue records:List[dict]<block_start>to_be_removed=[record["codLinha"]<for>record records]<line_sep>CityCouncilRevenue.objects.filter(external_code__in=to_be_removed).update(excluded=<true>)<block_end>@shared_task(retry_kwargs={"max_retries":1} ignore_result=<true>)<def_stmt>add_citycouncil_expense record<block_start>new_item=to_citycouncil_expense(record)<line_sep>new_item["crawled_at"]=datetime.now()<line_sep>new_item["crawled_from"]=settings.CITY_COUNCIL_WEBSERVICE_ENDPOINT<line_sep>expense,_=CityCouncilExpense.objects.get_or_create(external_file_code=new_item["external_file_code"] external_file_line=new_item["external_file_line"] number=new_item["number"] phase=new_item["phase"] defaults=new_item )<line_sep><return>expense<block_end>@shared_task(retry_kwargs={"max_retries":1} ignore_result=<true>)<def_stmt>update_citycouncil_expense record<block_start>expense=CityCouncilExpense.objects.get(external_file_code=record["codArquivo"] external_file_line=record["codLinha"] )<line_sep>updated_item=to_citycouncil_expense(record)<for_stmt>key,value updated_item.items()<block_start>setattr(expense key value)<block_end>expense.save()<line_sep><return>expense<block_end>@shared_task(retry_kwargs={"max_retries":1} ignore_result=<true>)<def_stmt>remove_citycouncil_expense records:List[dict]<block_start><for_stmt>record records<block_start>CityCouncilExpense.objects.filter(external_file_code=record["codigo"] external_file_line=record["linha"]).update(excluded=<true>)<block_end><block_end>
<import_stmt>sys<import_stmt>time<def_stmt>main <block_start>sys.stdout.write('this is stdout')<line_sep>sys.stdout.flush()<line_sep>sys.stderr.write('this is stderr')<line_sep>sys.stderr.flush()<line_sep># Give the debugger some time to add a breakpoint. time.sleep(5)<for_stmt>i range(1)<block_start>time.sleep(0.5)<line_sep><pass><block_end>print('this is print')<block_end>main()<line_sep>
<import_stmt>sys<import_from_stmt>datetime datetime<import_stmt>yfinance<as>yf<def_stmt>get_ticker_symbol ticker:yf.Ticker<arrow>str<block_start><try_stmt><block_start><return>ticker.get_info()['symbol']<block_end><except_stmt>ImportError<block_start><return>""<block_end><block_end><def_stmt>get_stock_state ticker:yf.Ticker<arrow>{}<block_start>stock_info=ticker.history("1d").iloc[0].to_dict()<line_sep>stock_info['Time']=datetime.now().strftime("%H:%M:%S")<del_stmt>stock_info['Dividends']<del_stmt>stock_info['Stock Splits']<line_sep><return>stock_info<block_end># Valid periods: 1d,5d,1mo,3mo,6mo,1y,2y,5y,10y,ytd,max <def_stmt>previous_high ticker:yf.Ticker time_period:str<arrow>float<block_start>high=0<line_sep>stock_history=ticker.history(time_period)<for_stmt>i range(0 len(stock_history)-2)<block_start>temp_high=stock_history.iloc[i].to_dict()['High']<if_stmt>temp_high<g>high<block_start>high=temp_high<block_end><block_end><return>high<block_end># Valid periods: 1d,5d,1mo,3mo,6mo,1y,2y,5y,10y,ytd,max <def_stmt>calculate_sma ticker:yf.Ticker time_period="1mo" interval="1d"<arrow>float<block_start>stock_history=ticker.history(period=time_period interval=interval)<line_sep>summation=0<line_sep>time_period_days=0<for_stmt>i range(0 len(stock_history)-1)<block_start>summation<augadd>stock_history.iloc[i].to_dict()['Close']<line_sep>time_period_days<augadd>1<block_end><if_stmt>time_period_days<g>0<block_start><return>summation/time_period_days<block_end><return>sys.maxsize<block_end># Valid periods: 1d,5d,1mo,3mo,6mo,1y,2y,5y,10y,ytd,max <def_stmt>calculate_ema ticker:yf.Ticker time_period="1mo"<arrow>float<block_start>stock_history=ticker.history(period=time_period)<line_sep><return>stock_history.iloc[len(stock_history)-1].to_dict()['Close']<times>(2.5/(1+len(stock_history)))+calculate_sma(ticker time_period)<times>(1-(2.5/(1+len(stock_history))))<block_end># Valid periods: 1d,5d,1mo,3mo,6mo,1y,2y,5y,10y,ytd,max <def_stmt>calculate_previous_ema ticker:yf.Ticker time_period="1mo" days_previous=1<arrow>float<block_start>time_period_days=len(ticker.history(period=time_period))<line_sep>stock_history=ticker.history(period=time_period)<line_sep><return>stock_history.iloc[time_period_days-days_previous-1].to_dict()['Close']<times>(2.5/(1+time_period_days))+calculate_sma(ticker time_period)<times>(1-(2.5/(1+time_period_days)))<block_end><def_stmt>get_high2current_price_change_percent ticker:yf.Ticker<arrow>float<block_start>stock_info=ticker.history("1d").iloc[0].to_dict()<line_sep><return>(stock_info['Close']-stock_info['High'])/stock_info['High']<block_end><def_stmt>get_direction ticker:yf.Ticker<arrow>float<block_start>stock_history=ticker.history(period="1d" interval="1m")<line_sep><return>(stock_history.iloc[len(stock_history)-1].to_dict()['Close']-stock_history.iloc[len(stock_history)-2].to_dict()['Close'])/stock_history.iloc[len(stock_history)-2].to_dict()['Close']<block_end>
# MIT licensed # Copyright (c) 2020 lilydjwg <<EMAIL>>, et al. # Copyright (c) 2017 <NAME> <<EMAIL>>, et al. <import_from_stmt>flaky flaky<import_stmt>pytest<line_sep>pytestmark=[pytest.mark.asyncio pytest.mark.needs_net]<line_sep>@flaky(max_runs=10)<async_keyword><def_stmt>test_debianpkg get_version<block_start><assert_stmt><await>get_version("sigrok-firmware-fx2lafw" {"source":"debianpkg" })<eq>"0.1.7-1"<block_end>@flaky(max_runs=10)<async_keyword><def_stmt>test_debianpkg_strip_release get_version<block_start><assert_stmt><await>get_version("sigrok-firmware-fx2lafw" {"source":"debianpkg" "strip_release":1 })<eq>"0.1.7"<block_end>@flaky(max_runs=10)<async_keyword><def_stmt>test_debianpkg_suite get_version<block_start><assert_stmt><await>get_version("sigrok-firmware-fx2lafw" {"source":"debianpkg" "suite":"buster" })<eq>"0.1.6-1"<block_end>
<import_from_stmt>gazette.spiders.base.fecam FecamGazetteSpider<class_stmt>ScChapecoSpider(FecamGazetteSpider)<block_start>name="sc_chapeco"<line_sep>FECAM_QUERY="cod_entidade:71"<line_sep>TERRITORY_ID="4204202"<block_end>
<import_from_stmt>collections defaultdict<import_from_stmt>...account.models Address CustomerEvent User<import_from_stmt>..core.dataloaders DataLoader<class_stmt>AddressByIdLoader(DataLoader)<block_start>context_key="address_by_id"<def_stmt>batch_load self keys<block_start>address_map=Address.objects.in_bulk(keys)<line_sep><return>[address_map.get(address_id)<for>address_id keys]<block_end><block_end><class_stmt>UserByUserIdLoader(DataLoader)<block_start>context_key="user_by_id"<def_stmt>batch_load self keys<block_start>user_map=User.objects.in_bulk(keys)<line_sep><return>[user_map.get(user_id)<for>user_id keys]<block_end><block_end><class_stmt>CustomerEventsByUserLoader(DataLoader)<block_start>context_key="customer_events_by_user"<def_stmt>batch_load self keys<block_start>events=CustomerEvent.objects.filter(user_id__in=keys)<line_sep>events_by_user_map=defaultdict(list)<for_stmt>event events<block_start>events_by_user_map[event.user_id].append(event)<block_end><return>[events_by_user_map.get(user_id [])<for>user_id keys]<block_end><block_end>
# Time: O(n) # Space: O(1) <import_stmt>collections<class_stmt>Solution(object)<block_start><def_stmt>maxConsecutiveAnswers self answerKey k<block_start>""" :type answerKey: str :type k: int :rtype: int """<line_sep>result=max_count=0<line_sep>count=collections.Counter()<for_stmt>i xrange(len(answerKey))<block_start>count[answerKey[i]]<augadd>1<line_sep>max_count=max(max_count count[answerKey[i]])<if_stmt>result-max_count<ge>k<block_start>count[answerKey[i-result]]<augsub>1<block_end><else_stmt><block_start>result<augadd>1<block_end><block_end><return>result<block_end><block_end>
"""The eliqonline component."""<line_sep>
<import_from_stmt>backpack.core.derivatives.conv1d Conv1DDerivatives<import_from_stmt>backpack.extensions.secondorder.diag_ggn.convnd BatchDiagGGNConvND DiagGGNConvND <class_stmt>DiagGGNConv1d(DiagGGNConvND)<block_start><def_stmt>__init__ self<block_start>super().__init__(derivatives=Conv1DDerivatives() params=["bias" "weight"])<block_end><block_end><class_stmt>BatchDiagGGNConv1d(BatchDiagGGNConvND)<block_start><def_stmt>__init__ self<block_start>super().__init__(derivatives=Conv1DDerivatives() params=["bias" "weight"])<block_end><block_end>
args_global=['server_addr' 'port' 'timeout' 'verbose' 'dry_run' 'conn_retries' 'is_server' 'rpc_plugin' 'called_rpc_name' 'func' 'client']<def_stmt>strip_globals kwargs<block_start><for_stmt>arg args_global<block_start>kwargs.pop(arg <none>)<block_end><block_end><def_stmt>remove_null kwargs<block_start>keys=[]<for_stmt>key,value kwargs.items()<block_start><if_stmt>value<is><none><block_start>keys.append(key)<block_end><block_end><for_stmt>key keys<block_start>kwargs.pop(key <none>)<block_end><block_end><def_stmt>apply_defaults kwargs **defaults<block_start><for_stmt>key,value defaults.items()<block_start><if_stmt>key<not><in>kwargs<block_start>kwargs[key]=value<block_end><block_end><block_end><def_stmt>group_as kwargs name values<block_start>group={}<for_stmt>arg values<block_start><if_stmt>arg<in>kwargs<and>kwargs[arg]<is><not><none><block_start>group[arg]=kwargs.pop(arg <none>)<block_end><block_end>kwargs[name]=group<block_end>
<import_stmt>os<import_from_stmt>djangular utils<import_from_stmt>django.test SimpleTestCase<class_stmt>SiteAndPathUtilsTest(SimpleTestCase)<block_start>site_utils=utils.SiteAndPathUtils()<def_stmt>test_djangular_root self<block_start>current_dir=os.path.dirname(os.path.abspath(__file__))<line_sep>djangular_dir=os.path.dirname(current_dir)<line_sep>self.assertEqual(djangular_dir self.site_utils.get_djangular_root())<block_end><block_end>
<import_from_stmt>encoded.searches.mixins CartAggsToFacetsMixin<import_from_stmt>snosearch.responses BasicQueryResponseWithFacets<import_from_stmt>snosearch.responses BasicMatrixResponseWithFacets<class_stmt>CartQueryResponseWithFacets(CartAggsToFacetsMixin BasicQueryResponseWithFacets)<block_start>''' Like BasicQueryResponseWithFacets but uses CartAggsToFacetsMixin instead of AggsToFacetsMixin. '''<def_stmt>__init__ self results query_builder *args **kwargs<block_start>super().__init__(results query_builder *args **kwargs)<block_end><block_end><class_stmt>CartMatrixResponseWithFacets(CartAggsToFacetsMixin BasicMatrixResponseWithFacets)<block_start>''' Like BasicMatrixResponseWithFacets but uses CartAggsToFacetsMixin instead of AggsToFacetsMixin. '''<def_stmt>__init__ self results query_builder *args **kwargs<block_start>super().__init__(results query_builder *args **kwargs)<block_end><block_end>
# License: Apache 2.0. See LICENSE file in root directory. # Copyright(c) 2021 Intel Corporation. All Rights Reserved. # test:device L500* # test:device D400* <import_stmt>pyrealsense2<as>rs<import_from_stmt>rspy.stopwatch Stopwatch<import_from_stmt>rspy test log<import_stmt>time<import_stmt>platform<line_sep># Start depth + color streams and measure the time from stream opened until first frame arrived using sensor API. # Verify that the time do not exceeds the maximum time allowed # Note - Using Windows Media Foundation to handle power management between USB actions take time (~27 ms) <def_stmt>time_to_first_frame sensor profile max_delay_allowed<block_start>""" Wait for the first frame for 'max_delay_allowed' + 1 extra second If the frame arrives it will return the seconds it took since open() call If no frame it will return 'max_delay_allowed' """<line_sep>first_frame_time=max_delay_allowed<line_sep>open_call_stopwatch=Stopwatch()<def_stmt>frame_cb frame<block_start><nonlocal>first_frame_time open_call_stopwatch<if_stmt>first_frame_time<eq>max_delay_allowed<block_start>first_frame_time=open_call_stopwatch.get_elapsed()<block_end><block_end>open_call_stopwatch.reset()<line_sep>sensor.open(profile)<line_sep>sensor.start(frame_cb)<line_sep># Wait condition: # 1. first frame did not arrive yet # 2. timeout of 'max_delay_allowed' + 1 extra second reached. <while_stmt>first_frame_time<eq>max_delay_allowed<and>open_call_stopwatch.get_elapsed()<l>max_delay_allowed+1<block_start>time.sleep(0.05)<block_end>sensor.stop()<line_sep>sensor.close()<line_sep><return>first_frame_time<block_end># The device starts at D0 (Operational) state, allow time for it to get into idle state time.sleep(3)<line_sep>##################################################################################################### test.start("Testing device creation time on "+platform.system()+" OS")<line_sep>device_creation_stopwatch=Stopwatch()<line_sep>dev=test.find_first_device_or_exit()<line_sep>device_creation_time=device_creation_stopwatch.get_elapsed()<line_sep>max_time_for_device_creation=1.5<line_sep>print("Device creation time is: {:.3f} [sec] max allowed is: {:.1f} [sec] ".format(device_creation_time max_time_for_device_creation))<line_sep>test.check(device_creation_time<l>max_time_for_device_creation)<line_sep>test.finish()<line_sep># Set maximum delay for first frame according to product line product_line=dev.get_info(rs.camera_info.product_line)<if_stmt>product_line<eq>"D400"<block_start>max_delay_for_depth_frame=1.5<line_sep>max_delay_for_color_frame=1.5<block_end><elif_stmt>product_line<eq>"L500"<block_start>max_delay_for_depth_frame=2.5# L515 depth frame has a 1.5 seconds built in delay at the FW side + 1.0 second for LRS max_delay_for_color_frame=1.5<block_end><else_stmt><block_start>log.f("This test support only D400 + L515 devices")<block_end>ds=dev.first_depth_sensor()<line_sep>cs=dev.first_color_sensor()<line_sep>dp=next(p<for>p ds.profiles<if>p.fps()<eq>30<and>p.stream_type()<eq>rs.stream.depth<and>p.format()<eq>rs.format.z16)<line_sep>cp=next(p<for>p cs.profiles<if>p.fps()<eq>30<and>p.stream_type()<eq>rs.stream.color<and>p.format()<eq>rs.format.rgb8)<line_sep>##################################################################################################### test.start("Testing first depth frame delay on "+product_line+" device - "+platform.system()+" OS")<line_sep>first_depth_frame_delay=time_to_first_frame(ds dp max_delay_for_depth_frame)<line_sep>print("Time until first depth frame is: {:.3f} [sec] max allowed is: {:.1f} [sec] ".format(first_depth_frame_delay max_delay_for_depth_frame))<line_sep>test.check(first_depth_frame_delay<l>max_delay_for_depth_frame)<line_sep>test.finish()<line_sep>##################################################################################################### test.start("Testing first color frame delay on "+product_line+" device - "+platform.system()+" OS")<line_sep>first_color_frame_delay=time_to_first_frame(cs cp max_delay_for_color_frame)<line_sep>print("Time until first color frame is: {:.3f} [sec] max allowed is: {:.1f} [sec] ".format(first_color_frame_delay max_delay_for_color_frame))<line_sep>test.check(first_color_frame_delay<l>max_delay_for_color_frame)<line_sep>test.finish()<line_sep>##################################################################################################### test.print_results_and_exit()<line_sep>
"""Tests for profiles module."""<import_stmt>mock<import_from_stmt>google.apputils basetest<import_stmt>profiles<class_stmt>ProfilesModuleTest(basetest.TestCase)<block_start><def_stmt>testGenerateUUID self<block_start>self.assertIsInstance(profiles.GenerateUUID('a') str)<line_sep>self.assertTrue(profiles.GenerateUUID('a').isupper())<line_sep>self.assertEqual(profiles.GenerateUUID('a') profiles.GenerateUUID('a'))<block_end><def_stmt>testValidatePayload self<block_start>payload={}<with_stmt>self.assertRaises(profiles.PayloadValidationError)<block_start>profiles.ValidatePayload(payload)<block_end>payload.update({profiles.PAYLOADKEYS_IDENTIFIER:'a' profiles.PAYLOADKEYS_DISPLAYNAME:'a' profiles.PAYLOADKEYS_TYPE:'com.apple.welcome.to.1984'})<line_sep>profiles.ValidatePayload(payload)<line_sep>self.assertEqual(payload.get(profiles.PAYLOADKEYS_UUID) profiles.GenerateUUID('a'))<line_sep>self.assertEqual(payload.get(profiles.PAYLOADKEYS_ENABLED) <true>)<line_sep>self.assertEqual(payload.get(profiles.PAYLOADKEYS_VERSION) 1)<block_end><block_end><class_stmt>ProfileClassTest(basetest.TestCase)<block_start>"""Tests for the Profile class."""<def_stmt>_GetValidProfile self include_payload=<true><block_start>profile=profiles.Profile()<line_sep>profile.Set(profiles.PAYLOADKEYS_DISPLAYNAME 'Acme Corp Config Profile')<line_sep>profile.Set(profiles.PAYLOADKEYS_IDENTIFIER 'com.acme.configprofile')<line_sep>profile.Set(profiles.PAYLOADKEYS_ORG 'Acme Corp')<line_sep>profile.Set(profiles.PAYLOADKEYS_SCOPE ['System' 'User'])<line_sep>profile.Set(profiles.PAYLOADKEYS_TYPE 'Configuration')<if_stmt>include_payload<block_start>profile.AddPayload(self._GetValidPayload())<block_end><return>profile<block_end><def_stmt>_GetValidPayload self<block_start>test_payload={profiles.PAYLOADKEYS_IDENTIFIER:'com.test.payload' profiles.PAYLOADKEYS_DISPLAYNAME:'Test Payload' profiles.PAYLOADKEYS_TYPE:'com.apple.welcome.to.1984'}<line_sep><return>test_payload<block_end><def_stmt>testInit self<block_start>"""Test the __init__ method."""<line_sep>profile=profiles.Profile()<line_sep>self.assertIsNotNone(profile._profile)<line_sep>self.assertEqual(profile._profile[profiles.PAYLOADKEYS_CONTENT] [])<block_end><def_stmt>testGet self<block_start>profile=profiles.Profile()<line_sep>profile._profile['TestKey']='TestValue'<line_sep>self.assertEqual(profile.Get(profiles.PAYLOADKEYS_CONTENT) [])<line_sep>self.assertEqual(profile.Get('TestKey') 'TestValue')<block_end><def_stmt>testSet self<block_start>profile=profiles.Profile()<line_sep>profile.Set('TestKey' 'TestValue')<line_sep>profile.Set('OtherKey' 'OtherValue')<line_sep>self.assertEqual(profile._profile['TestKey'] 'TestValue')<line_sep>self.assertEqual(profile._profile['OtherKey'] 'OtherValue')<block_end><def_stmt>testStr self<block_start>profile=self._GetValidProfile()<line_sep>self.assertEqual(profile.__str__() 'Acme Corp Config Profile')<block_end><def_stmt>testAddPayload self<block_start>profile=self._GetValidProfile(include_payload=<false>)<line_sep>test_payload=self._GetValidPayload()<with_stmt>self.assertRaises(profiles.PayloadValidationError)<block_start>profile.AddPayload('Payloads should be dicts')<block_end>profile.AddPayload(test_payload)<line_sep>self.assertEqual(profile.Get(profiles.PAYLOADKEYS_CONTENT) [test_payload])<block_end><def_stmt>testValidateProfile self<block_start>profile=profiles.Profile()<with_stmt>self.assertRaises(profiles.ProfileValidationError)<block_start>profile._ValidateProfile()<block_end>profile=self._GetValidProfile(include_payload=<false>)<with_stmt>self.assertRaises(profiles.ProfileValidationError)<block_start>profile._ValidateProfile()<block_end>profile.AddPayload(self._GetValidPayload())<line_sep>profile._ValidateProfile()<line_sep>self.assertIsNotNone(profile.Get(profiles.PAYLOADKEYS_UUID))<line_sep>self.assertIsNotNone(profile.Get(profiles.PAYLOADKEYS_VERSION))<block_end>@mock.patch.object(profiles.plistlib 'writePlist')<def_stmt>testSaveSuccess self mock_writeplist<block_start>profile=self._GetValidProfile()<line_sep>profile.Save('/tmp/hello')<line_sep>mock_writeplist.assert_called_once_with(profile._profile '/tmp/hello')<block_end>@mock.patch.object(profiles.plistlib 'writePlist')<def_stmt>testSaveIOError self mock_writeplist<block_start>profile=self._GetValidProfile()<line_sep>mock_writeplist.side_effect=IOError<with_stmt>self.assertRaises(profiles.ProfileSaveError)<block_start>profile.Save('/tmp/hello')<block_end>mock_writeplist.assert_called_once_with(profile._profile '/tmp/hello')<block_end>@mock.patch.object(profiles.gmacpyutil 'RunProcess')@mock.patch.object(profiles.Profile 'Save')<def_stmt>testInstallSuccess self mock_save mock_runprocess<block_start>profile=self._GetValidProfile()<line_sep>mock_runprocess.return_value=['Output' <none> 0]<line_sep>profile.Install()<line_sep>mock_save.assert_called_once_with(mock.ANY)<line_sep>mock_runprocess.assert_called_once_with([profiles.CMD_PROFILES '-I' '-F' mock.ANY] sudo=<none> sudo_password=<none>)<block_end>@mock.patch.object(profiles.gmacpyutil 'RunProcess')@mock.patch.object(profiles.Profile 'Save')<def_stmt>testInstallSudoPassword self mock_save mock_runprocess<block_start>profile=self._GetValidProfile()<line_sep>mock_runprocess.return_value=['Output' <none> 0]<line_sep>profile.Install(sudo_password='<PASSWORD>')<line_sep>mock_save.assert_called_once_with(mock.ANY)<line_sep>mock_runprocess.assert_called_once_with([profiles.CMD_PROFILES '-I' '-F' mock.ANY] sudo='ladygagaeatss<PASSWORD>' sudo_password='<PASSWORD>')<block_end>@mock.patch.object(profiles.gmacpyutil 'RunProcess')@mock.patch.object(profiles.Profile 'Save')<def_stmt>testInstallCommandFail self mock_save mock_runprocess<block_start>profile=self._GetValidProfile()<line_sep>mock_runprocess.return_value=['Output' 'Errors' 42]<with_stmt>self.assertRaisesRegexp(profiles.ProfileInstallationError 'Profile installation failed!\n'<concat>'Output, Errors, 42')<block_start>profile.Install(sudo_password='<PASSWORD>')<block_end>mock_save.assert_called_once_with(mock.ANY)<line_sep>mock_runprocess.assert_called_once_with([profiles.CMD_PROFILES '-I' '-F' mock.ANY] sudo='ladygagaeatssocks' sudo_password='<PASSWORD>')<block_end>@mock.patch.object(profiles.gmacpyutil 'RunProcess')@mock.patch.object(profiles.Profile 'Save')<def_stmt>testInstallCommandException self mock_save mock_runprocess<block_start>profile=self._GetValidProfile()<line_sep>mock_runprocess.side_effect=profiles.gmacpyutil.GmacpyutilException<with_stmt>self.assertRaisesRegexp(profiles.ProfileInstallationError 'Profile installation failed!\n')<block_start>profile.Install(sudo_password='<PASSWORD>')<block_end>mock_save.assert_called_once_with(mock.ANY)<line_sep>mock_runprocess.assert_called_once_with([profiles.CMD_PROFILES '-I' '-F' mock.ANY] sudo='ladygagaeatssocks' sudo_password='<PASSWORD>')<block_end><block_end><class_stmt>NetworkProfileClassTest(basetest.TestCase)<block_start>"""Tests for the NetworkProfile class."""<def_stmt>testInit self<block_start>profile=profiles.NetworkProfile('testuser')<line_sep>self.assertEqual(profile.Get(profiles.PAYLOADKEYS_DISPLAYNAME) 'Network Profile (testuser)')<line_sep>self.assertEqual(profile.Get(profiles.PAYLOADKEYS_DESCRIPTION) 'Network authentication settings')<line_sep>self.assertEqual(profile.Get(profiles.PAYLOADKEYS_IDENTIFIER) 'com.megacorp.networkprofile')<line_sep>self.assertEqual(profile.Get(profiles.PAYLOADKEYS_SCOPE) ['System' 'User'])<line_sep>self.assertEqual(profile.Get(profiles.PAYLOADKEYS_TYPE) 'Configuration')<line_sep>self.assertEqual(profile.Get(profiles.PAYLOADKEYS_CONTENT) [])<block_end><def_stmt>testGenerateID self<block_start>profile=profiles.NetworkProfile('testuser')<line_sep>self.assertEqual(profile._GenerateID('test_suffix') 'com.megacorp.networkprofile.test_suffix')<line_sep>self.assertEqual(profile._GenerateID('another_suffix') 'com.megacorp.networkprofile.another_suffix')<block_end>@mock.patch.object(profiles.NetworkProfile 'AddPayload')@mock.patch.object(profiles.crypto 'load_privatekey')@mock.patch.object(profiles.crypto 'load_certificate')@mock.patch.object(profiles.crypto 'PKCS12Type')@mock.patch.object(profiles.certs 'Certificate')<def_stmt>testAddMachineCertificateSuccess self mock_certificate mock_pkcs12 mock_loadcert mock_loadkey mock_addpayload<block_start>mock_certobj=mock.MagicMock()<line_sep>mock_certobj.subject_cn='My Cert Subject'<line_sep>mock_certobj.osx_fingerprint='0011223344556677889900'<line_sep>mock_certificate.return_value=mock_certobj<line_sep>mock_pkcs12obj=mock.MagicMock()<line_sep>mock_pkcs12obj.export.return_value='-----PKCS12 Data-----'<line_sep>mock_pkcs12.return_value=mock_pkcs12obj<line_sep>mock_loadcert.return_value='certobj'<line_sep>mock_loadkey.return_value='keyobj'<line_sep>profile=profiles.NetworkProfile('testuser')<line_sep>profile.AddMachineCertificate('fakecert' 'fakekey')<line_sep>mock_pkcs12.assert_called_once_with()<line_sep>mock_pkcs12obj.set_certificate.assert_called_once_with('certobj')<line_sep>mock_pkcs12obj.set_privatekey.assert_called_once_with('keyobj')<line_sep>mock_pkcs12obj.export.assert_called_once_with('0011223344556677889900')<line_sep>mock_loadcert.assert_called_once_with(1 'fakecert')<line_sep>mock_loadkey.assert_called_once_with(1 'fakekey')<line_sep>mock_addpayload.assert_called_once_with({profiles.PAYLOADKEYS_IDENTIFIER:'com.megacorp.networkprofile.machine_cert' profiles.PAYLOADKEYS_TYPE:'com.apple.security.pkcs12' profiles.PAYLOADKEYS_DISPLAYNAME:'My Cert Subject' profiles.PAYLOADKEYS_ENABLED:<true> profiles.PAYLOADKEYS_VERSION:1 profiles.PAYLOADKEYS_CONTENT:profiles.plistlib.Data('-----PKCS12 Data-----') profiles.PAYLOADKEYS_UUID:mock.ANY 'Password':'<PASSWORD>'})<block_end>@mock.patch.object(profiles.crypto 'load_privatekey')@mock.patch.object(profiles.crypto 'load_certificate')@mock.patch.object(profiles.crypto 'PKCS12Type')@mock.patch.object(profiles.certs 'Certificate')<def_stmt>testAddMachineCertificateInvalidKey self mock_certificate mock_pkcs12 mock_loadcert mock_loadkey<block_start>mock_certobj=mock.MagicMock()<line_sep>mock_certobj.subject_cn='My Cert Subject'<line_sep>mock_certobj.osx_fingerprint='<PASSWORD>'<line_sep>mock_certificate.return_value=mock_certobj<line_sep>mock_pkcs12obj=mock.MagicMock()<line_sep>mock_pkcs12obj.export.side_effect=profiles.crypto.Error<line_sep>mock_pkcs12.return_value=mock_pkcs12obj<line_sep>mock_loadcert.return_value='certobj'<line_sep>mock_loadkey.return_value='keyobj_from_different_cert'<line_sep>profile=profiles.NetworkProfile('testuser')<with_stmt>self.assertRaises(profiles.CertificateError)<block_start>profile.AddMachineCertificate('fakecert' 'otherfakekey')<block_end><block_end>@mock.patch.object(profiles.certs 'Certificate')<def_stmt>testAddMachineCertificateBadCert self mock_certificate<block_start>mock_certificate.side_effect=profiles.certs.CertError<line_sep>profile=profiles.NetworkProfile('testuser')<with_stmt>self.assertRaises(profiles.CertificateError)<block_start>profile.AddMachineCertificate('fakecert' 'fakekey')<block_end><block_end>@mock.patch.object(profiles.NetworkProfile 'AddPayload')@mock.patch.object(profiles.certs 'Certificate')<def_stmt>testAddAnchorCertificateSuccess self mock_certificate mock_addpayload<block_start>mock_certobj=mock.MagicMock()<line_sep>mock_certobj.subject_cn='My Cert Subject'<line_sep>mock_certobj.osx_fingerprint='0011223344556677889900'<line_sep>mock_certificate.return_value=mock_certobj<line_sep>profile=profiles.NetworkProfile('testuser')<line_sep>profile.AddAnchorCertificate('my_cert')<line_sep>mock_certificate.assert_called_once_with('my_cert')<line_sep>mock_addpayload.assert_called_once_with({profiles.PAYLOADKEYS_IDENTIFIER:'com.megacorp.networkprofile.0011223344556677889900' profiles.PAYLOADKEYS_TYPE:'com.apple.security.pkcs1' profiles.PAYLOADKEYS_DISPLAYNAME:'My Cert Subject' profiles.PAYLOADKEYS_CONTENT:profiles.plistlib.Data('my_cert') profiles.PAYLOADKEYS_ENABLED:<true> profiles.PAYLOADKEYS_VERSION:1 profiles.PAYLOADKEYS_UUID:mock.ANY})<block_end>@mock.patch.object(profiles.certs 'Certificate')<def_stmt>testAddAnchorCertificateBadCert self mock_certificate<block_start>mock_certificate.side_effect=profiles.certs.CertError<line_sep>profile=profiles.NetworkProfile('testuser')<with_stmt>self.assertRaises(profiles.CertificateError)<block_start>profile.AddAnchorCertificate('test_cert')<block_end><block_end>@mock.patch.object(profiles.NetworkProfile 'AddPayload')<def_stmt>testAddNetworkPayloadSSID self mock_addpayload<block_start>profile=profiles.NetworkProfile('test_user')<line_sep>profile._auth_cert='00000000-AUTH-CERT-UUID-00000000'<line_sep>profile._anchor_certs=['00000000-ANCH-ORCE-RTUU-ID000000']<line_sep>profile.AddTrustedServer('radius.company.com')<line_sep>profile.AddNetworkPayload('SSID')<line_sep>eap_client_data={'AcceptEAPTypes':[13] 'PayloadCertificateAnchorUUID':['00000000-ANCH-ORCE-RTUU-ID000000'] 'TLSTrustedServerNames':['radius.company.com'] 'TLSAllowTrustExceptions':<false>}<line_sep>mock_addpayload.assert_called_once_with({'AutoJoin':<true> 'SetupModes':['System' 'User'] 'PayloadCertificateUUID':'00000000-AUTH-CERT-UUID-00000000' 'EncryptionType':'WPA' 'Interface':'BuiltInWireless' profiles.PAYLOADKEYS_DISPLAYNAME:'SSID' profiles.PAYLOADKEYS_IDENTIFIER:'com.megacorp.networkprofile.ssid.SSID' profiles.PAYLOADKEYS_TYPE:'com.apple.wifi.managed' 'SSID_STR':'SSID' 'EAPClientConfiguration':eap_client_data})<block_end>@mock.patch.object(profiles.NetworkProfile 'AddPayload')<def_stmt>testAddNetworkPayloadWired self mock_addpayload<block_start>profile=profiles.NetworkProfile('test_user')<line_sep>profile._auth_cert='00000000-AUTH-CERT-UUID-00000000'<line_sep>profile._anchor_certs=['00000000-ANCH-ORCE-RTUU-ID000000']<line_sep>profile.AddTrustedServer('radius.company.com')<line_sep>profile.AddNetworkPayload('wired')<line_sep>eap_client_data={'AcceptEAPTypes':[13] 'PayloadCertificateAnchorUUID':['00000000-ANCH-ORCE-RTUU-ID000000'] 'TLSTrustedServerNames':['radius.company.com'] 'TLSAllowTrustExceptions':<false>}<line_sep>mock_addpayload.assert_called_once_with({'AutoJoin':<true> 'SetupModes':['System' 'User'] 'PayloadCertificateUUID':'00000000-AUTH-CERT-UUID-00000000' 'EncryptionType':'Any' 'Interface':'FirstActiveEthernet' profiles.PAYLOADKEYS_DISPLAYNAME:'Wired' profiles.PAYLOADKEYS_IDENTIFIER:'com.megacorp.networkprofile.wired' profiles.PAYLOADKEYS_TYPE:'com.apple.firstactiveethernet.managed' 'EAPClientConfiguration':eap_client_data})<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>basetest.main()<block_end>
<import_stmt>torch<import_from_stmt>glasses.nn.regularization DropBlock StochasticDepth<def_stmt>test_drop_block <block_start>drop=DropBlock()<line_sep>x=torch.ones((1 3 28 28))<line_sep>x_drop=drop(x)<assert_stmt><not>torch.equal(x x_drop)<assert_stmt>drop.training<line_sep>drop=drop.eval()<line_sep>x_drop=drop(x)<assert_stmt>torch.equal(x x_drop)<assert_stmt><not>drop.training<assert_stmt>drop.__repr__()<eq>"DropBlock(p=0.5)"<block_end><def_stmt>test_stocastic_depth <block_start>stoc=StochasticDepth()<assert_stmt>stoc.__repr__()<eq>"StochasticDepth(p=0.5)"<line_sep>x=torch.ones((2 3 28 28))<line_sep>stoc=StochasticDepth(p=1)<line_sep>out=stoc(x)<assert_stmt>out.sum()<g>0<line_sep>stoc=StochasticDepth(p=10e-6)<line_sep>out=stoc(x)<assert_stmt>out.sum()<eq>0<block_end>
<class_stmt>Node<block_start>props=()<def_stmt>__init__ self **kwargs<block_start><for_stmt>prop kwargs<block_start><if_stmt>prop<not><in>self.props<block_start><raise>Exception('Invalid property %r, allowed only: %s'%(prop self.props))<block_end>self.__dict__[prop]=kwargs[prop]<block_end><for_stmt>prop self.props<block_start><if_stmt>prop<not><in>self.__dict__<block_start>self.__dict__[prop]=<none><block_end><block_end>self.attrs={}<block_end><def_stmt>print_node self indent=0 indent_size=4 extra=0<block_start>s=self.__class__.__name__<line_sep>s<augadd>'(\n'<line_sep>i=' '<times>(indent+indent_size)<for_stmt>prop self.props<block_start>s<augadd>i+prop+' = '<line_sep>s<augadd>self._print_val(self.__dict__[prop] indent+indent_size indent_size (len(prop)+3)-indent_size)<line_sep>s<augadd>'\n'<block_end>s<augadd>(' '<times>(indent+extra))+')'<line_sep><return>s<block_end><def_stmt>_print_val self val indent indent_size extra=0<block_start><if_stmt>isinstance(val Node)<block_start><return>val.print_node(indent+indent_size indent_size extra)<block_end><elif_stmt>type(val)<eq>list<block_start>s='[\n'<line_sep>i=' '<times>(indent+indent_size)<for_stmt>e val<block_start>s<augadd>i+self._print_val(e indent indent_size)<line_sep>s<augadd>',\n'<block_end>s<augadd>(' '<times>(indent+extra))+']'<line_sep><return>s<block_end><else_stmt><block_start><return>str(val)<block_end><block_end><block_end><class_stmt>Statement(Node)<block_start><pass><block_end><class_stmt>Expression(Node)<block_start><pass><block_end><class_stmt>EmptyStatement(Statement)<block_start><pass><block_end>EmptyStatement.INSTANCE=EmptyStatement()<class_stmt>FunctionDeclaration(Statement)<block_start>props=('type' 'decl' 'body')<block_end><class_stmt>Declaration(Statement)<block_start>props=('type' 'init')<block_end><class_stmt>ParamDeclaration(Node)<block_start>props=('type' 'decl')<block_end><class_stmt>StructTypeRef(Node)<block_start>props=('name' )<block_end><class_stmt>DeclarationSpecifier(Node)<block_start>props=('store' 'qual' 'type')<block_end><class_stmt>InitSpec(Node)<block_start>props=('decl' 'val')<block_end><class_stmt>DeclaratorSpec(Node)<block_start>props=('pointer_depth' 'name_spec')<block_end><class_stmt>ArrayDeclSpec(Node)<block_start>props=('name' 'dim')<block_end><class_stmt>FuncDeclSpec(Node)<block_start>props=('name' 'params')<block_end><class_stmt>VarArgs(Node)<block_start><pass><block_end>VarArgs.INSTANCE=VarArgs()<class_stmt>StructSpec(Node)<block_start>props=('name' 'decl')<block_end><class_stmt>StructMemberDecl(Node)<block_start>props=('spec' 'decl')<block_end><class_stmt>MemberReference(Node)<block_start>props=('child' 'idx' 'name')<block_end><class_stmt>TypeName(Node)<block_start>props=('type' 'spec')<block_end><class_stmt>LabelledStmt(Statement)<block_start>props=('label' 'stmt')<block_end><class_stmt>WhileStmt(Statement)<block_start>props=('cond' 'body')<block_end><class_stmt>DoWhileStmt(Statement)<block_start>props=('body' 'cond')<block_end><class_stmt>ForStmt(Statement)<block_start>props=('init' 'cond' 'after' 'body')<block_end><class_stmt>IfStmt(Statement)<block_start>props=('cond' 'true' 'false')<block_end><class_stmt>SwitchStmt(Statement)<block_start>props=('expr' 'cases')<block_end><class_stmt>ContinueStmt(Statement)<block_start><pass><block_end>ContinueStmt.INSTANCE=ContinueStmt()<class_stmt>BreakStmt(Statement)<block_start><pass><block_end>BreakStmt.INSTANCE=BreakStmt()<class_stmt>ReturnStmt(Statement)<block_start>props=('expr' )<block_end><class_stmt>GotoStmt(Statement)<block_start>props=('label' )<block_end><class_stmt>CaseStmt(Statement)<block_start>props=('choice' 'body')<block_end><class_stmt>SyncStmt(Statement)<block_start><pass><block_end><class_stmt>ExpressionStmt(Statement)<block_start>props=('expr' )<block_end><class_stmt>SizeofExpr(Expression)<block_start>props=('expr' )<block_end><class_stmt>ConditionalExpr(Expression)<block_start>props=('cond' 'true' 'false')<block_end><class_stmt>FunctionCallExpr(Expression)<block_start>props=('ref' 'args')<block_end><class_stmt>IdentifierExpr(Expression)<block_start>props=('val' )<block_end><class_stmt>AssignmentExpr(Expression)<block_start>props=('left' 'right')<block_end><class_stmt>AssignmentOperatorExpr(Expression)<block_start>props=('left' 'op' 'right')<block_end><class_stmt>UnaryExpr(Expression)<block_start>props=('op' 'expr')<block_end><class_stmt>BinaryOperatorExpr(Expression)<block_start>props=('left' 'op' 'right')<block_end><class_stmt>IncrementExpr(Expression)<block_start>props=('dir' 'post' 'expr')<block_end><class_stmt>MemberAccessExpr(Expression)<block_start>props=('expr' 'prop' 'deref')<block_end><class_stmt>ArraySubscriptExpr(Expression)<block_start>props=('expr' 'sub')<block_end><class_stmt>Literal(Expression)<block_start>props=('val' )<block_end><class_stmt>IntLiteral(Literal)<block_start><pass><block_end><class_stmt>StringLiteral(Literal)<block_start><pass><block_end><class_stmt>Pragma(Node)<block_start>props=('val' )<block_end><class_stmt>Token<block_start><class_stmt>Type<block_start>IDENTIFIER='identifier'<line_sep>OPERATOR='operator'<line_sep>NUMBER='number'<line_sep>STRING='string'<block_end><def_stmt>__init__ self val type=<none><block_start>self.val=val<line_sep>self.type=type<or>Token.Type.OPERATOR<block_end><def_stmt>__str__ self<block_start><return>'Token(%r, %s)'%(self.val self.type)<block_end><block_end><class_stmt>Keyword(Token)<block_start>REGISTRY={}<def_stmt>__init__ self val<block_start>super().__init__(val Token.Type.IDENTIFIER)<line_sep>Keyword.REGISTRY[val]=self<block_end><block_end>Token.EOF=Token('<eof>')<line_sep>Token.OPEN_PAREN=Token('(')<line_sep>Token.CLOSE_PAREN=Token(')')<line_sep>Token.OPEN_BRACE=Token('{')<line_sep>Token.CLOSE_BRACE=Token('}')<line_sep>Token.OPEN_SQUARE=Token('[')<line_sep>Token.CLOSE_SQUARE=Token(']')<line_sep>Token.COMMA=Token(',')<line_sep>Token.SEMICOLON=Token(';')<line_sep>Token.QUESTION=Token('?')<line_sep>Token.COLON=Token(':')<line_sep>Token.DOT=Token('.')<line_sep>Token.ARROW=Token('->')<line_sep>Token.VARARG=Token('...')<line_sep>Token.OP_ASSIGN=Token('=')<line_sep>Token.OP_MUL_ASSIGN=Token('*=')<line_sep>Token.OP_DIV_ASSIGN=Token('/=')<line_sep>Token.OP_MOD_ASSIGN=Token('%=')<line_sep>Token.OP_PLUS_ASSIGN=Token('+=')<line_sep>Token.OP_MINUS_ASSIGN=Token('-=')<line_sep>Token.OP_LSHIFT_ASSIGN=Token('<<=')<line_sep>Token.OP_RSHIFT_ASSIGN=Token('>>=')<line_sep>Token.OP_AND_ASSIGN=Token('&=')<line_sep>Token.OP_XOR_ASSIGN=Token('^=')<line_sep>Token.OP_OR_ASSIGN=Token('|=')<line_sep>Token.OP_PLUS=Token('+')<line_sep>Token.OP_PLUS_PLUS=Token('++')<line_sep>Token.OP_MINUS=Token('-')<line_sep>Token.OP_MINUS_MINUS=Token('--')<line_sep>Token.OP_STAR=Token('*')<line_sep>Token.OP_DIV=Token('/')<line_sep>Token.OP_MOD=Token('%')<line_sep>Token.OP_AND=Token('&')<line_sep>Token.OP_OR=Token('|')<line_sep>Token.OP_AND_AND=Token('&&')<line_sep>Token.OP_OR_OR=Token('||')<line_sep>Token.OP_XOR=Token('^')<line_sep>Token.OP_NOT=Token('!')<line_sep>Token.OP_BITNOT=Token('~')<line_sep>Token.OP_SHIFT_LEFT=Token('<<')<line_sep>Token.OP_SHIFT_RIGHT=Token('>>')<line_sep>Token.OP_EQUAL=Token('==')<line_sep>Token.OP_NOT_EQUAL=Token('!=')<line_sep>Token.OP_LESS_THAN=Token('<')<line_sep>Token.OP_LESS_OR_EQUAL=Token('<=')<line_sep>Token.OP_GREATER_THAN=Token('>')<line_sep>Token.OP_GREATER_OR_EQUAL=Token('>=')<line_sep>Keyword.DO=Keyword('do')<line_sep>Keyword.WHILE=Keyword('while')<line_sep>Keyword.FOR=Keyword('for')<line_sep>Keyword.IF=Keyword('if')<line_sep>Keyword.ELSE=Keyword('else')<line_sep>Keyword.SIZEOF=Keyword('sizeof')<line_sep>Keyword.SYNC=Keyword('sync')<line_sep>Keyword.SWITCH=Keyword('switch')<line_sep>Keyword.CASE=Keyword('case')<line_sep>Keyword.DEFAULT=Keyword('default')<line_sep>Keyword.GOTO=Keyword('goto')<line_sep>Keyword.CONTINUE=Keyword('continue')<line_sep>Keyword.BREAK=Keyword('break')<line_sep>Keyword.RETURN=Keyword('return')<line_sep>Keyword.CONST=Keyword('const')<line_sep>Keyword.STATIC=Keyword('static')<line_sep>Keyword.TYPEDEF=Keyword('typedef')<line_sep>Keyword.STRUCT=Keyword('struct')<line_sep>
<import_stmt>math<import_stmt>itertools<class_stmt>Vector<block_start>""" Generic vector operations. """<def_stmt>_apply self op other<block_start>pairwise=<none><if_stmt>type(other)<is>Vector<block_start>pairwise=zip(self.vals other.vals)<block_end><else_stmt><block_start>pairwise=zip(self.vals [other<for>_ self.vals])<block_end><return>Vector(*itertools.starmap(op pairwise))<block_end><def_stmt>__init__ self *args<block_start>self.vals=args<block_end><def_stmt>__add__ self other<block_start><return>self._apply(<lambda>a b:a+b other)<block_end><def_stmt>__sub__ self other<block_start><return>self._apply(<lambda>a b:a-b other)<block_end><def_stmt>__mul__ self other<block_start><return>self._apply(<lambda>a b:a<times>b other)<block_end><def_stmt>__div__ self other<block_start><return>self._apply(<lambda>a b:a/b other)<block_end><def_stmt>length self<block_start>total=sum(map(<lambda>a:math.pow(a 2) self.vals))<line_sep><return>math.sqrt(total)<block_end><def_stmt>normalized self<block_start>divisor=[self.length()]<times>len(self)<line_sep><return>Vector(*(self/divisor))<block_end><def_stmt>__iter__ self<block_start><return>py_iter(self.vals)<block_end>@classmethod<def_stmt>map cls *args<block_start><return>args[0].map(args[1:])<block_end><def_stmt>__getitem__ self item<block_start><return>self.values[item]<block_end><def_stmt>__str__ self<block_start><return>str(self.vals)<block_end><def_stmt>__len__ self<block_start><return>len(self.vals)<block_end>@classmethod<def_stmt>add cls a b<block_start><return>Vector(*a)+Vector(*b)<block_end>@classmethod<def_stmt>sub cls a b<block_start><return>Vector(*a)-Vector(*b)<block_end>@classmethod<def_stmt>mul cls a b<block_start><return>Vector(*a)<times>Vector(*b)<block_end>@classmethod<def_stmt>div cls a b<block_start><return>Vector(*a)/Vector(*b)<block_end>@classmethod<def_stmt>dot cls left right<block_start><return>sum(Vector.mul(left right))<block_end>@classmethod<def_stmt>norm_dot Vector left right<block_start>left=Vector(*left).normalized()<line_sep>right=Vector(*right).normalized()<line_sep><return>sum(Vector.mul(left right))<block_end><block_end>
_QUEUED_JOBS_KEY='projects:global:jobs:queued'<line_sep>_ARCHIVED_JOBS_KEY='projects:global:jobs:archived'<def_stmt>list_jobs redis<block_start><return>{job_id.decode()<for>job_id redis.smembers(_QUEUED_JOBS_KEY)}<block_end><def_stmt>remove_jobs redis job_id_project_mapping<block_start><for_stmt>job_id,project_name job_id_project_mapping.items()<block_start>redis.srem(_QUEUED_JOBS_KEY job_id)<line_sep>redis.srem('project:{}:jobs:queued'.format(project_name) job_id)<block_end><block_end><def_stmt>job_project_names redis list_of_job_ids<block_start><return>{job_id:_job_project_name(redis job_id)<for>job_id list_of_job_ids}<block_end><def_stmt>_job_project_name redis job_id<block_start>project_name=redis.get('jobs:{}:project'.format(job_id))<if_stmt>project_name<block_start><return>project_name.decode()<block_end><block_end><def_stmt>add_jobs_to_archive redis list_of_job_ids<block_start><for_stmt>job_id list_of_job_ids<block_start>redis.sadd(_ARCHIVED_JOBS_KEY job_id)<block_end><block_end><def_stmt>list_archived_jobs redis<block_start><return>{job_id.decode()<for>job_id redis.smembers(_ARCHIVED_JOBS_KEY)}<block_end>
<import_stmt>functools<import_stmt>operator<import_from_stmt>collections.abc Iterable<import_from_stmt>typing overload Union TypeVar<line_sep>T=TypeVar('T')<line_sep>S=TypeVar('S')# <1> @overload<def_stmt>sum it:Iterable[T]<arrow>Union[T int]<block_start><ellipsis># <2> <block_end>@overload<def_stmt>sum it:Iterable[T] / start:S<arrow>Union[T S]<block_start><ellipsis># <3> <block_end><def_stmt>sum it / start=0# <4> <block_start><return>functools.reduce(operator.add it start)<block_end>
# Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # <import_stmt>os<import_stmt>sys<line_sep>sys.path.insert(0 os.path.abspath('..'))<line_sep># -- Project information ----------------------------------------------------- project='Elpis'<line_sep>copyright='2020, The University of Queensland'<line_sep>author='<NAME>, <NAME>, <NAME>'<line_sep># The full version, including alpha/beta/rc tags release='0.96.0'<line_sep>master_doc='index'<line_sep># -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions=['sphinx.ext.autodoc' 'sphinx.ext.coverage' 'sphinx_autodoc_typehints' 'recommonmark']<line_sep># Show undocumented members in docs autodoc_default_options={'undoc-members':<true> }<line_sep># Mock to get RTD docs to compile autodoc_mock_imports=["pytest"]<line_sep># Add any paths that contain templates here, relative to this directory. templates_path=['_templates']<line_sep># List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. # We also exclude the "ugly" auto-generated elpis.rst file and replace it with our own. exclude_patterns=['_build' 'Thumbs.db' '.DS_Store' 'elpis/elpis.rst']<line_sep># -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme='sphinx_rtd_theme'<line_sep>html_logo='_static/img/logo.png'<line_sep>html_theme_options={'logo_only':<true> }<line_sep>github_url='https://github.com/CoEDL/elpis'<line_sep># Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path=['_static']<line_sep>html_css_files=['style.css' ]<line_sep># -- Extension configuration -------------------------------------------------
<import_stmt>unittest<import_stmt>warnings<import_from_stmt>unittest.mock patch <import_from_stmt>uuid UUID uuid4 <import_from_stmt>minos.common Model <import_from_stmt>minos.networks BrokerMessageV1 BrokerMessageV1Payload BrokerMessageV1Status BrokerMessageV1Strategy <import_from_stmt>tests.utils FakeModel <class_stmt>TestBrokerMessageV1(unittest.TestCase)<block_start><def_stmt>setUp self<arrow><none><block_start>self.topic="FooCreated"<line_sep>self.identifier=uuid4()<line_sep>self.reply_topic="AddOrderReply"<line_sep>self.strategy=BrokerMessageV1Strategy.MULTICAST<line_sep>self.payload=BrokerMessageV1Payload(content=[FakeModel("blue") FakeModel("red")] headers={"foo":"bar"} status=BrokerMessageV1Status.ERROR)<block_end><def_stmt>test_constructor_simple self<block_start>message=BrokerMessageV1(self.topic self.payload)<line_sep>self.assertEqual(self.topic message.topic)<line_sep>self.assertIsInstance(message.identifier UUID)<line_sep>self.assertEqual(<none> message.reply_topic)<line_sep>self.assertEqual(BrokerMessageV1Strategy.UNICAST message.strategy)<line_sep>self.assertEqual(self.payload message.payload)<block_end><def_stmt>test_constructor self<block_start>message=BrokerMessageV1(self.topic identifier=self.identifier reply_topic=self.reply_topic strategy=self.strategy payload=self.payload )<line_sep>self.assertEqual(self.topic message.topic)<line_sep>self.assertEqual(self.identifier message.identifier)<line_sep>self.assertEqual(self.reply_topic message.reply_topic)<line_sep>self.assertEqual(self.strategy message.strategy)<line_sep>self.assertEqual(self.payload message.payload)<block_end><def_stmt>test_version self<block_start>self.assertEqual(1 BrokerMessageV1.version)<block_end><def_stmt>test_topic self<block_start>message=BrokerMessageV1(self.topic self.payload)<line_sep>self.assertEqual(self.topic message.topic)<block_end><def_stmt>test_identifier self<block_start>message=BrokerMessageV1(self.topic self.payload identifier=self.identifier)<line_sep>self.assertEqual(self.identifier message.identifier)<block_end><def_stmt>test_reply_topic self<block_start>message=BrokerMessageV1(self.topic self.payload reply_topic=self.reply_topic)<line_sep>self.assertEqual(self.reply_topic message.reply_topic)<block_end><def_stmt>test_set_reply_topic self<block_start>message=BrokerMessageV1(self.topic self.payload)<line_sep>self.assertIsNone(message.reply_topic)<line_sep>message.set_reply_topic(self.reply_topic)<line_sep>self.assertEqual(self.reply_topic message.reply_topic)<block_end><def_stmt>test_ok self<block_start>message=BrokerMessageV1(self.topic self.payload)<line_sep>self.assertEqual(self.payload.ok message.ok)<block_end><def_stmt>test_status self<block_start>message=BrokerMessageV1(self.topic self.payload)<line_sep>self.assertEqual(self.payload.status message.status)<block_end><def_stmt>test_headers self<block_start>message=BrokerMessageV1(self.topic self.payload)<line_sep>self.assertEqual(self.payload.headers message.headers)<block_end><def_stmt>test_content self<block_start>message=BrokerMessageV1(self.topic self.payload)<line_sep>self.assertEqual(self.payload.content message.content)<block_end><def_stmt>test_data self<block_start>message=BrokerMessageV1(self.topic self.payload)<with_stmt>warnings.catch_warnings()<block_start>warnings.simplefilter("ignore" DeprecationWarning)<line_sep># noinspection PyDeprecation self.assertEqual(self.payload.content message.data)<block_end><block_end><def_stmt>test_avro self<block_start>message=BrokerMessageV1(self.topic identifier=self.identifier reply_topic=self.reply_topic strategy=self.strategy payload=self.payload )<line_sep>observed=BrokerMessageV1.from_avro_bytes(message.avro_bytes)<line_sep>self.assertEqual(message observed)<block_end><def_stmt>test_sort self<block_start>unsorted=[BrokerMessageV1("" BrokerMessageV1Payload("foo")) BrokerMessageV1("" BrokerMessageV1Payload(4)) BrokerMessageV1("" BrokerMessageV1Payload(2)) BrokerMessageV1("" BrokerMessageV1Payload(3)) BrokerMessageV1("" BrokerMessageV1Payload(1)) BrokerMessageV1("" BrokerMessageV1Payload("bar")) ]<line_sep>expected=[unsorted[0] unsorted[4] unsorted[2] unsorted[3] unsorted[1] unsorted[5]]<line_sep>observed=sorted(unsorted)<line_sep>self.assertEqual(expected observed)<block_end><def_stmt>test_from_avro self<block_start>expected=BrokerMessageV1(self.topic self.payload identifier=self.identifier)<line_sep>schema={"fields":[{"name":"topic" "type":"string"} {"name":"identifier" "type":{"logicalType":"uuid" "type":"string"}} {"name":"reply_topic" "type":["string" "null"]} {"name":"strategy" "type":{"logicalType":"minos.networks.brokers.messages.models.v1.BrokerMessageV1Strategy" "type":"string" } } {"name":"payload" "type":{"fields":[{"name":"content" "type":{"items":{"fields":[{"name":"data" "type":"string"}] "name":"FakeModel" "namespace":"tests.utils.hello" "type":"record" } "type":"array" } } {"name":"status" "type":{"logicalType":"minos.networks.brokers.messages.models.v1.BrokerMessageV1Status" "type":"int" } } {"name":"headers" "type":{"type":"map" "values":"string"}} ] "name":"BrokerMessageV1Payload" "namespace":"minos.networks.brokers.messages.models.v1.hello" "type":"record" } } {"name":"version" "type":"int"} ] "name":"BrokerMessage" "namespace":"minos.networks.brokers.messages.models.abc.hello" "type":"record" }<line_sep>data={"identifier":str(self.identifier) "payload":{"content":[{"data":"blue"} {"data":"red"}] "headers":{"foo":"bar"} "status":400} "reply_topic":<none> "strategy":"unicast" "topic":"FooCreated" "version":1 }<line_sep>observed=Model.from_avro(schema data)<line_sep>self.assertEqual(expected observed)<block_end><def_stmt>test_avro_schema self<block_start>schema={"fields":[{"name":"topic" "type":"string"} {"name":"identifier" "type":{"logicalType":"uuid" "type":"string"}} {"name":"reply_topic" "type":["string" "null"]} {"name":"strategy" "type":{"logicalType":"minos.networks.brokers.messages.models.v1.BrokerMessageV1Strategy" "type":"string" } } {"name":"payload" "type":{"fields":[{"name":"content" "type":{"items":{"fields":[{"name":"data" "type":"string"}] "name":"FakeModel" "namespace":"tests.utils.hello" "type":"record" } "type":"array" } } {"name":"status" "type":{"logicalType":"minos.networks.brokers.messages.models.v1.BrokerMessageV1Status" "type":"int" } } {"name":"headers" "type":{"type":"map" "values":"string"}} ] "name":"BrokerMessageV1Payload" "namespace":"minos.networks.brokers.messages.models.v1.hello" "type":"record" } } {"name":"version" "type":"int"} ] "name":"BrokerMessage" "namespace":"minos.networks.brokers.messages.models.abc.hello" "type":"record" }<with_stmt>patch("minos.common.AvroSchemaEncoder.generate_random_str" return_value="hello")<block_start>observed=BrokerMessageV1(self.topic self.payload).avro_schema<block_end>self.assertEqual([schema] observed)<block_end><def_stmt>test_avro_data self<block_start>expected={"identifier":str(self.identifier) "payload":{"content":[{"data":"blue"} {"data":"red"}] "headers":{"foo":"bar"} "status":400} "reply_topic":<none> "strategy":"unicast" "topic":"FooCreated" "version":1 }<line_sep>observed=BrokerMessageV1(self.topic self.payload identifier=self.identifier).avro_data<line_sep>self.assertEqual(expected observed)<block_end><def_stmt>test_avro_bytes self<block_start>expected=BrokerMessageV1(self.topic self.payload)<line_sep>self.assertEqual(expected Model.from_avro_bytes(expected.avro_bytes))<block_end><block_end><class_stmt>TestBrokerMessagePayload(unittest.TestCase)<block_start><def_stmt>setUp self<arrow><none><block_start>self.content=[FakeModel("blue") FakeModel("red")]<block_end><def_stmt>test_ok self<block_start>self.assertTrue(BrokerMessageV1Payload(self.content status=BrokerMessageV1Status.SUCCESS).ok)<line_sep>self.assertFalse(BrokerMessageV1Payload(self.content status=BrokerMessageV1Status.ERROR).ok)<line_sep>self.assertFalse(BrokerMessageV1Payload(self.content status=BrokerMessageV1Status.SYSTEM_ERROR).ok)<line_sep>self.assertFalse(BrokerMessageV1Payload(self.content status=BrokerMessageV1Status.UNKNOWN).ok)<block_end><def_stmt>test_data self<block_start>payload=BrokerMessageV1Payload(self.content)<with_stmt>warnings.catch_warnings()<block_start>warnings.simplefilter("ignore" DeprecationWarning)<line_sep># noinspection PyDeprecation self.assertEqual(self.content payload.data)<block_end><block_end><block_end><class_stmt>TestBrokerMessageV1Status(unittest.TestCase)<block_start><def_stmt>test_success self<block_start>self.assertEqual(BrokerMessageV1Status.SUCCESS BrokerMessageV1Status(200))<block_end><def_stmt>test_error self<block_start>self.assertEqual(BrokerMessageV1Status.ERROR BrokerMessageV1Status(400))<block_end><def_stmt>test_system_error self<block_start>self.assertEqual(BrokerMessageV1Status.SYSTEM_ERROR BrokerMessageV1Status(500))<block_end><def_stmt>test_unknown self<block_start>self.assertEqual(BrokerMessageV1Status.UNKNOWN BrokerMessageV1Status(56))<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>unittest.main()<block_end>
<import_stmt>logging<import_from_stmt>flask Response make_response request<import_from_stmt>microraiden HTTPHeaders<as>header<import_from_stmt>flask_restful.utils unpack<import_from_stmt>microraiden.channel_manager ChannelManager <import_from_stmt>microraiden.exceptions NoOpenChannel InvalidBalanceProof InvalidBalanceAmount InsufficientConfirmations <import_stmt>microraiden.constants<as>constants<import_from_stmt>microraiden.proxy.resources.request_data RequestData<import_from_stmt>functools wraps<import_from_stmt>eth_utils is_address<line_sep>log=logging.getLogger(__name__)<class_stmt>Paywall(object)<block_start><def_stmt>__init__ self channel_manager light_client_proxy=<none><block_start>super().__init__()<assert_stmt>isinstance(channel_manager ChannelManager)<assert_stmt>is_address(channel_manager.channel_manager_contract.address)<assert_stmt>is_address(channel_manager.receiver)<line_sep>self.contract_address=channel_manager.channel_manager_contract.address<line_sep>self.receiver_address=channel_manager.receiver<line_sep>self.channel_manager=channel_manager<line_sep>self.light_client_proxy=light_client_proxy<block_end><def_stmt>access self resource method *args **kwargs<block_start><if_stmt>self.channel_manager.node_online()<is><false><block_start><return>"Ethereum node is not responding" 502<block_end><if_stmt>self.channel_manager.get_eth_balance()<l>constants.PROXY_BALANCE_LIMIT<block_start><return>"Channel manager ETH balance is below limit" 502<block_end><try_stmt><block_start>data=RequestData(request.headers request.cookies)<block_end><except_stmt>ValueError<as>e<block_start><return>str(e) 409<block_end>accepts_html=('text/html'<in>request.accept_mimetypes<and>request.accept_mimetypes.best<ne>'*/*')<line_sep>headers={}<line_sep>price=resource.price()<line_sep># payment required <if_stmt>price<g>0<block_start>paywall,headers=self.paywall_check(price data)<if_stmt>paywall<and>accepts_html<is><true><block_start>reply_data=resource.get_paywall(request.path)<line_sep><return>self.reply_webui(reply_data headers)<block_end><elif_stmt>paywall<block_start><return>make_response('' 402 headers)<block_end><block_end># all ok, return actual content resp=method(request.path *args **kwargs)<line_sep># merge headers, resource headers take precedence headers_lower={key.lower():value<for>key,value headers.items()}<line_sep>lower_to_case={key.lower():key<for>key headers}<if_stmt>isinstance(resp Response)<block_start>resource_headers=(key<for>key,value resp.headers)<block_end><else_stmt><block_start>data,code,resource_headers=unpack(resp)<block_end><for_stmt>key resource_headers<block_start>key_lower=key.lower()<if_stmt>key_lower<in>headers_lower<block_start>headers.pop(lower_to_case[key_lower])<block_end><block_end><if_stmt>isinstance(resp Response)<block_start>resp.headers.extend(headers)<line_sep><return>resp<block_end><else_stmt><block_start>headers.update(resource_headers)<line_sep><return>make_response(str(data) code resource_headers)<block_end><block_end><def_stmt>paywall_check self price data<block_start>"""Check if the resource can be sent to the client. Returns (is_paywalled: Bool, http_headers: dict) """<line_sep>headers=self.generate_headers(price)<if_stmt><not>data.balance_signature<block_start><return><true> headers<block_end># try to get an existing channel <try_stmt><block_start>channel=self.channel_manager.verify_balance_proof(data.sender_address data.open_block_number data.balance data.balance_signature)<block_end><except_stmt>InsufficientConfirmations<as>e<block_start>log.debug('Refused payment: Insufficient confirmations (sender=%s, block=%d)'%(data.sender_address data.open_block_number))<line_sep>headers.update({header.INSUF_CONFS:"1"})<line_sep><return><true> headers<block_end><except_stmt>NoOpenChannel<as>e<block_start>log.debug('Refused payment: Channel does not exist (sender=%s, block=%d)'%(data.sender_address data.open_block_number))<line_sep>headers.update({header.NONEXISTING_CHANNEL:"1"})<line_sep><return><true> headers<block_end><except_stmt>InvalidBalanceAmount<as>e<block_start>log.debug('Refused payment: Invalid balance amount: %s (sender=%s, block=%d)'%(str(e) data.sender_address data.open_block_number))<line_sep>headers.update({header.INVALID_PROOF:1})<line_sep><return><true> headers<block_end><except_stmt>InvalidBalanceProof<as>e<block_start>log.debug('Refused payment: Invalid balance proof: %s (sender=%s, block=%d)'%(str(e) data.sender_address data.open_block_number))<line_sep>headers.update({header.INVALID_PROOF:1})<line_sep><return><true> headers<block_end># set headers to reflect channel state <assert_stmt>channel.sender<is><not><none><assert_stmt>channel.balance<ge>0<line_sep>headers.update({header.SENDER_ADDRESS:channel.sender header.SENDER_BALANCE:channel.balance})<if_stmt>channel.last_signature<is><not><none><block_start>headers.update({header.BALANCE_SIGNATURE:channel.last_signature})<block_end>amount_sent=data.balance-channel.balance<if_stmt>amount_sent<ne>0<and>amount_sent<ne>price<block_start>headers[header.INVALID_AMOUNT]=1<line_sep># if difference is 0, it will be handled by channel manager <return><true> headers<block_end># set the headers to reflect actual state of a channel <try_stmt><block_start>self.channel_manager.register_payment(channel.sender data.open_block_number data.balance data.balance_signature)<block_end><except_stmt>(InvalidBalanceAmount InvalidBalanceProof)# balance sent to the proxy is less than in the previous proof <block_start><return><true> headers<block_end># all ok, return premium content <return><false> headers<block_end># when are these generated? <def_stmt>generate_headers self price:int<block_start><assert_stmt>price<g>0<line_sep>"""Generate basic headers that are sent back for every request"""<line_sep>headers={header.GATEWAY_PATH:constants.API_PATH header.RECEIVER_ADDRESS:self.receiver_address header.CONTRACT_ADDRESS:self.contract_address header.TOKEN_ADDRESS:self.channel_manager.get_token_address() header.PRICE:price 'Content-Type':'application/json'}<line_sep><return>headers<block_end><def_stmt>reply_webui self reply_data='' headers:dict={}<block_start>headers.update({"Content-Type":"text/html" })<line_sep>reply=make_response(reply_data 402 headers)<for_stmt>k,v headers.items()<block_start><if_stmt>k.startswith('RDN-')<block_start>reply.set_cookie(k str(v))<block_end><block_end><return>reply<block_end><block_end><def_stmt>paywall_decorator func<block_start>"""Method decorator for Flask's Resource object. It magically makes every method paywalled. Example: class MyPaywalledResource(Resource): method_decorators = [paywall_decorator] """<line_sep>@wraps(func)<def_stmt>wrapper *args **kwargs<block_start>self=func.__self__# get instance of the bound method <return>self.paywall.access(self func *args **kwargs)<block_end><return>wrapper<block_end>
<import_from_stmt>datetime datetime timedelta<import_from_stmt>dimagi.utils parsing<as>dateparse<import_from_stmt>casexml.apps.stock.consumption ConsumptionConfiguration compute_daily_consumption_from_transactions <line_sep>to_ts=dateparse.json_format_datetime<line_sep>now=datetime.utcnow()<def_stmt>ago days<block_start><return>now-timedelta(days=days)<block_end># note that you must add inferred consumption transactions manually to txdata <def_stmt>mock_consumption txdata window params=<none><block_start>default_params={'min_window':0 'min_periods':0}<line_sep>params=params<or>{}<line_sep>default_params.update(params)<line_sep>config=ConsumptionConfiguration(**default_params)<line_sep><return>compute_daily_consumption_from_transactions(txdata ago(window) config )<block_end>
<import_from_stmt>models.multiple_solution.swarm_based.WOA BaseWOA BaoWOA<import_from_stmt>utils.FunctionUtil square_function<line_sep>## Setting parameters root_paras={"problem_size":30 "domain_range":[-1 1] "print_train":<true> "objective_func":square_function}<line_sep>woa_paras={"epoch":100 "pop_size":250}<line_sep>## Run model md=BaoWOA(root_algo_paras=root_paras woa_paras=woa_paras)<line_sep>md._train__()<line_sep>
<import_from_stmt>django.views.generic DeleteView<import_from_stmt>django.contrib.auth.mixins LoginRequiredMixin<import_from_stmt>django.contrib messages<import_from_stmt>django.urls reverse<import_from_stmt>django.http HttpResponseRedirect<import_from_stmt>django.shortcuts get_object_or_404<import_from_stmt>schedule.models Calendar<import_from_stmt>schedule.views CreateEventView EditEventView EventMixin<import_from_stmt>apps.events.forms CustomEventForm<class_stmt>CustomCreateEventView(CreateEventView)<block_start>form_class=CustomEventForm<line_sep>template_name='event/edit.html'<def_stmt>get_context_data self **kwargs<block_start>context=super(CustomCreateEventView self).get_context_data(**kwargs)<line_sep>calendar=get_object_or_404(Calendar slug=self.kwargs.get('calendar_slug'))<line_sep>extra_context={"calendar":calendar }<line_sep>context.update(extra_context)<line_sep><return>context<block_end><def_stmt>form_valid self form<block_start>super(CustomCreateEventView self).form_valid(form)<line_sep>messages.error(self.request 'Event created successfully.')<line_sep><return>HttpResponseRedirect(reverse('calendar_details' kwargs={'calendar_slug':self.kwargs.get('calendar_slug')}))<block_end><block_end><class_stmt>CustomUpdateEventView(EditEventView)<block_start>form_class=CustomEventForm<line_sep>template_name='event/edit.html'<def_stmt>get_context_data self **kwargs<block_start>context=super(CustomUpdateEventView self).get_context_data(**kwargs)<line_sep>calendar=get_object_or_404(Calendar slug=self.kwargs.get('calendar_slug'))<line_sep>extra_context={"calendar":calendar }<line_sep>context.update(extra_context)<line_sep><return>context<block_end><def_stmt>form_valid self form<block_start>super(CustomUpdateEventView self).form_valid(form)<line_sep>messages.error(self.request 'Event edited successfully.')<line_sep><return>HttpResponseRedirect(reverse('calendar_details' kwargs={'calendar_slug':self.kwargs.get('calendar_slug')}))<block_end><block_end><class_stmt>CustomDeleteEventView(LoginRequiredMixin EventMixin DeleteView)<block_start>"""Delete Event"""<line_sep>template_name='event/delete.html'<def_stmt>get_success_url self<block_start><return>reverse('calendar_details' args=[self.kwargs.get('calendar_slug')])<block_end><def_stmt>get_context_data self **kwargs<block_start>context=super(CustomDeleteEventView self).get_context_data(**kwargs)<line_sep>calendar=get_object_or_404(Calendar slug=self.kwargs.get('calendar_slug'))<line_sep>context.update({'event':self.object 'calendar':calendar})<line_sep><return>context<block_end><block_end>
"""Defines the application configuration for the product application"""<import_from_future_stmt> unicode_literals<import_from_stmt>django.apps AppConfig<class_stmt>ProductConfig(AppConfig)<block_start>"""Configuration for the product application"""<line_sep>name='product'<line_sep>label='product'<line_sep>verbose_name='Product'<def_stmt>ready self<block_start>"""Registers the product implementations with other applications."""<import_from_stmt>job.configuration.data.data_file DATA_FILE_STORE<import_from_stmt>product.configuration.product_data_file ProductDataFileStore<line_sep># Register product files for the data file store DATA_FILE_STORE['DATA_FILE_STORE']=ProductDataFileStore()<block_end><block_end>
<import_stmt>os<import_stmt>shutil<import_stmt>subprocess<import_stmt>sys<import_from_stmt>pathlib Path<import_from_stmt>unittest mock<import_stmt>pytest<import_from_stmt>requests exceptions<as>requests_exceptions<import_from_stmt>briefcase.console Log<import_from_stmt>briefcase.exceptions BriefcaseCommandError MissingToolError NetworkFailure<import_from_stmt>briefcase.integrations.java JDK<import_from_stmt>tests.utils FsPathMock<line_sep>@pytest.fixture<def_stmt>test_command tmp_path<block_start>command=mock.MagicMock()<line_sep>command.logger=Log()<line_sep>command.tools_path=tmp_path/"tools"<line_sep># Mock environ.get returning no explicit JAVA_HOME command.os.environ.get=mock.MagicMock(return_value="")<line_sep><return>command<block_end><def_stmt>test_macos_tool_java_home test_command capsys<block_start>"""On macOS, the /usr/libexec/java_home utility is checked."""<line_sep># Mock being on macOS test_command.host_os="Darwin"<line_sep># Mock 2 calls to check_output. test_command.subprocess.check_output.side_effect=["/path/to/java" "javac 1.8.0_144\n" ]<line_sep># Create a JDK wrapper by verification jdk=JDK.verify(command=test_command)<line_sep># The JDK should have the path returned by the tool <assert_stmt>jdk.java_home<eq>Path("/path/to/java")<line_sep>test_command.subprocess.check_output.assert_has_calls([# First call is to /usr/lib/java_home mock.call(["/usr/libexec/java_home"] stderr=subprocess.STDOUT ) # Second is a call to verify a valid Java version mock.call([os.fsdecode(Path("/path/to/java/bin/javac")) "-version"] stderr=subprocess.STDOUT ) ])<line_sep># No console output output=capsys.readouterr()<assert_stmt>output.out<eq>""<assert_stmt>output.err<eq>""<block_end><def_stmt>test_macos_tool_failure test_command tmp_path capsys<block_start>"""On macOS, if the libexec tool fails, the Briefcase JDK is used."""<line_sep># Mock being on macOS test_command.host_os="Darwin"<line_sep># Mock a failed call on the libexec tool test_command.subprocess.check_output.side_effect=subprocess.CalledProcessError(returncode=1 cmd="/usr/libexec/java_home")<line_sep># Create a directory to make it look like the Briefcase Java already exists. (tmp_path/"tools"/"java"/"Contents"/"Home"/"bin").mkdir(parents=<true>)<line_sep># Create a JDK wrapper by verification jdk=JDK.verify(command=test_command)<line_sep># The JDK should have the briefcase JAVA_HOME <assert_stmt>jdk.java_home<eq>tmp_path/"tools"/"java"/"Contents"/"Home"<line_sep>test_command.subprocess.check_output.assert_has_calls([# First call is to /usr/lib/java_home mock.call(["/usr/libexec/java_home"] stderr=subprocess.STDOUT ) ])<line_sep># No console output output=capsys.readouterr()<assert_stmt>output.out<eq>""<assert_stmt>output.err<eq>""<block_end><def_stmt>test_macos_provided_overrides_tool_java_home test_command capsys<block_start>"""On macOS, an explicit JAVA_HOME overrides /usr/libexec/java_home."""<line_sep># Mock being on macOS test_command.host_os="Darwin"<line_sep># Mock environ.get returning an explicit JAVA_HOME test_command.os.environ.get=mock.MagicMock(return_value="/path/to/java")<line_sep># Mock return value from javac. libexec won't be invoked. test_command.subprocess.check_output.return_value="javac 1.8.0_144\n"<line_sep># Create a JDK wrapper by verification jdk=JDK.verify(command=test_command)<line_sep># The JDK should have the path returned by the tool <assert_stmt>jdk.java_home<eq>Path("/path/to/java")<line_sep># A single call to check output test_command.subprocess.check_output.assert_called_once_with([os.fsdecode(Path("/path/to/java/bin/javac")) "-version"] stderr=subprocess.STDOUT ) <line_sep># No console output output=capsys.readouterr()<assert_stmt>output.out<eq>""<assert_stmt>output.err<eq>""<block_end><def_stmt>test_valid_provided_java_home test_command capsys<block_start>"""If a valid JAVA_HOME is provided, it is used."""<line_sep># Mock environ.get returning an explicit JAVA_HOME test_command.os.environ.get=mock.MagicMock(return_value="/path/to/java")<line_sep># Mock return value from javac. test_command.subprocess.check_output.return_value="javac 1.8.0_144\n"<line_sep># Create a JDK wrapper by verification jdk=JDK.verify(command=test_command)<line_sep># The JDK should have the path returned by the tool <assert_stmt>jdk.java_home<eq>Path("/path/to/java")<line_sep># A single call to check output test_command.subprocess.check_output.assert_called_once_with([os.fsdecode(Path("/path/to/java/bin/javac")) "-version"] stderr=subprocess.STDOUT ) <line_sep># No console output output=capsys.readouterr()<assert_stmt>output.out<eq>""<assert_stmt>output.err<eq>""<block_end><def_stmt>test_invalid_jdk_version test_command tmp_path capsys<block_start>"""If the JDK pointed to by JAVA_HOME isn't a Java 8 JDK, the briefcase JDK is used."""<line_sep># Mock environ.get returning an explicit JAVA_HOME test_command.os.environ.get=mock.MagicMock(return_value="/path/to/java")<line_sep># Mock return value from javac. test_command.subprocess.check_output.return_value="javac 14\n"<line_sep># Create a directory to make it look like the Briefcase Java already exists. (tmp_path/"tools"/"java"/"bin").mkdir(parents=<true>)<line_sep># Create a JDK wrapper by verification jdk=JDK.verify(command=test_command)<line_sep># The JDK should have the briefcase JAVA_HOME <assert_stmt>jdk.java_home<eq>tmp_path/"tools"/"java"<line_sep># A single call was made to check javac test_command.subprocess.check_output.assert_called_once_with([os.fsdecode(Path("/path/to/java/bin/javac")) "-version"] stderr=subprocess.STDOUT ) <line_sep># No console output (because Briefcase JDK exists) output=capsys.readouterr()<assert_stmt>output.out<eq>""<assert_stmt>output.err<eq>""<block_end><def_stmt>test_no_javac test_command tmp_path capsys<block_start>"""If the JAVA_HOME doesn't point to a location with a bin/javac, the briefcase JDK is used."""<line_sep># Mock environ.get returning an explicit JAVA_HOME test_command.os.environ.get=mock.MagicMock(return_value="/path/to/nowhere")<line_sep># Mock return value from javac failing because executable doesn't exist test_command.subprocess.check_output.side_effect=FileNotFoundError<line_sep># Create a directory to make it look like the Briefcase Java already exists. (tmp_path/"tools"/"java"/"bin").mkdir(parents=<true>)<line_sep># Create a JDK wrapper by verification jdk=JDK.verify(command=test_command)<line_sep># The JAVA_HOME should point at the Briefcase-provided JDK <assert_stmt>jdk.java_home<eq>tmp_path/"tools"/"java"<line_sep># A single call was made to check javac test_command.subprocess.check_output.assert_called_once_with([os.fsdecode(Path("/path/to/nowhere/bin/javac")) "-version"] stderr=subprocess.STDOUT ) <line_sep># No console output (because Briefcase JDK exists) output=capsys.readouterr()<assert_stmt>output.out<eq>""<assert_stmt>output.err<eq>""<block_end><def_stmt>test_javac_error test_command tmp_path capsys<block_start>"""If javac can't be executed, the briefcase JDK is used."""<line_sep># Mock environ.get returning an explicit JAVA_HOME test_command.os.environ.get=mock.MagicMock(return_value="/path/to/java")<line_sep># Mock return value from javac failing because executable doesn't exist test_command.subprocess.check_output.side_effect=subprocess.CalledProcessError(returncode=1 cmd="/path/to/java/bin/javac")<line_sep># Create a directory to make it look like the Briefcase Java already exists. (tmp_path/"tools"/"java"/"bin").mkdir(parents=<true>)<line_sep># Create a JDK wrapper by verification jdk=JDK.verify(command=test_command)<line_sep># The JDK should have the briefcase JAVA_HOME <assert_stmt>jdk.java_home<eq>tmp_path/"tools"/"java"<line_sep># A single call was made to check javac test_command.subprocess.check_output.assert_called_once_with([os.fsdecode(Path("/path/to/java/bin/javac")) "-version"] stderr=subprocess.STDOUT ) <line_sep># No console output (because Briefcase JDK exists) output=capsys.readouterr()<assert_stmt>output.out<eq>""<assert_stmt>output.err<eq>""<block_end><def_stmt>test_unparseable_javac_version test_command tmp_path capsys<block_start>"""If the javac version can't be parsed, the briefcase JDK is used."""<line_sep># Mock environ.get returning an explicit JAVA_HOME test_command.os.environ.get=mock.MagicMock(return_value="/path/to/java")<line_sep># Mock return value from javac. test_command.subprocess.check_output.return_value="NONSENSE\n"<line_sep># Create a directory to make it look like the Briefcase Java already exists. (tmp_path/"tools"/"java"/"bin").mkdir(parents=<true>)<line_sep># Create a JDK wrapper by verification jdk=JDK.verify(command=test_command)<line_sep># The JDK should have the briefcase JAVA_HOME <assert_stmt>jdk.java_home<eq>tmp_path/"tools"/"java"<line_sep># A single call was made to check javac test_command.subprocess.check_output.assert_called_once_with([os.fsdecode(Path("/path/to/java/bin/javac")) "-version"] stderr=subprocess.STDOUT ) <line_sep># No console output (because Briefcase JDK exists) output=capsys.readouterr()<assert_stmt>output.out<eq>""<assert_stmt>output.err<eq>""<block_end>@pytest.mark.parametrize(("host_os, jdk_url, jhome") [("Darwin" "https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/"<concat>"jdk8u242-b08/OpenJDK8U-jdk_x64_mac_hotspot_8u242b08.tar.gz" "java/Contents/Home" ) ("Linux" "https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/"<concat>"jdk8u242-b08/OpenJDK8U-jdk_x64_linux_hotspot_8u242b08.tar.gz" "java" ) ("Windows" "https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/"<concat>"jdk8u242-b08/OpenJDK8U-jdk_x64_windows_hotspot_8u242b08.zip" "java" ) ] )<def_stmt>test_successful_jdk_download test_command tmp_path capsys host_os jdk_url jhome<block_start>"""If needed, a JDK can be downloaded."""<line_sep># Mock host OS test_command.host_os=host_os<line_sep># Mock a JAVA_HOME that won't exist # This is only needed to make macOS *not* run /usr/libexec/java_home test_command.os.environ.get=mock.MagicMock(return_value="/does/not/exist")<line_sep># Mock the cached download path # Consider to remove if block when we drop py3.7 support, only keep statements from else. # MagicMock below py3.8 doesn't has __fspath__ attribute. <if_stmt>sys.version_info<l>(3 8)<block_start>archive=FsPathMock("/path/to/download.zip")<block_end><else_stmt><block_start>archive=mock.MagicMock()<line_sep>archive.__fspath__.return_value="/path/to/download.zip"<block_end>test_command.download_url.return_value=archive<line_sep># Create a directory to make it look like Java was downloaded and unpacked. (tmp_path/"tools"/"jdk8u242-b08").mkdir(parents=<true>)<line_sep># Invoke the verify call jdk=JDK.verify(command=test_command)<assert_stmt>jdk.java_home<eq>tmp_path/"tools"/jhome<line_sep># Console output contains a warning about the bad JDK location output=capsys.readouterr()<assert_stmt>output.err<eq>""<assert_stmt>"** WARNING: JAVA_HOME does not point to a Java 8 JDK"<in>output.out<line_sep># Download was invoked test_command.download_url.assert_called_with(url=jdk_url download_path=tmp_path/"tools" )<line_sep># The archive was unpacked # TODO: Py3.6 compatibility; os.fsdecode not required in Py3.7 test_command.shutil.unpack_archive.assert_called_with("/path/to/download.zip" extract_dir=os.fsdecode(tmp_path/"tools"))<line_sep># The original archive was deleted archive.unlink.assert_called_once_with()<block_end><def_stmt>test_not_installed test_command tmp_path<block_start>"""If the JDK isn't installed, and install isn't requested, an error is raised."""<line_sep># Mock host OS test_command.host_os="Linux"<line_sep># Invoke the verify call. Install is not requested, so this will fail. <with_stmt>pytest.raises(MissingToolError)<block_start>JDK.verify(command=test_command install=<false>)<block_end># Download was not invoked <assert_stmt>test_command.download_url.call_count<eq>0<block_end><def_stmt>test_jdk_download_failure test_command tmp_path<block_start>"""If an error occurs downloading the JDK, an error is raised."""<line_sep># Mock Linux as the host test_command.host_os="Linux"<line_sep># Mock a failure on download test_command.download_url.side_effect=requests_exceptions.ConnectionError<line_sep># Invoking verify_jdk causes a network failure. <with_stmt>pytest.raises(NetworkFailure)<block_start>JDK.verify(command=test_command)<block_end># That download was attempted test_command.download_url.assert_called_with(url="https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/"<concat>"jdk8u242-b08/OpenJDK8U-jdk_x64_linux_hotspot_8u242b08.tar.gz" download_path=tmp_path/"tools" )<line_sep># No attempt was made to unpack the archive <assert_stmt>test_command.shutil.unpack_archive.call_count<eq>0<block_end><def_stmt>test_invalid_jdk_archive test_command tmp_path<block_start>"""If the JDK download isn't a valid archive, raise an error."""<line_sep># Mock Linux as the host test_command.host_os="Linux"<line_sep># Mock the cached download path # Consider to remove if block when we drop py3.7 support, only keep statements from else. # MagicMock below py3.8 doesn't has __fspath__ attribute. <if_stmt>sys.version_info<l>(3 8)<block_start>archive=FsPathMock("/path/to/download.zip")<block_end><else_stmt><block_start>archive=mock.MagicMock()<line_sep>archive.__fspath__.return_value="/path/to/download.zip"<block_end>test_command.download_url.return_value=archive<line_sep># Mock an unpack failure due to an invalid archive test_command.shutil.unpack_archive.side_effect=shutil.ReadError<with_stmt>pytest.raises(BriefcaseCommandError)<block_start>JDK.verify(command=test_command)<block_end># The download occurred test_command.download_url.assert_called_with(url="https://github.com/AdoptOpenJDK/openjdk8-binaries/releases/download/"<concat>"jdk8u242-b08/OpenJDK8U-jdk_x64_linux_hotspot_8u242b08.tar.gz" download_path=tmp_path/"tools" )<line_sep># An attempt was made to unpack the archive. # TODO: Py3.6 compatibility; os.fsdecode not required in Py3.7 test_command.shutil.unpack_archive.assert_called_with("/path/to/download.zip" extract_dir=os.fsdecode(tmp_path/"tools"))<line_sep># The original archive was not deleted <assert_stmt>archive.unlink.call_count<eq>0<block_end>
""" .. module:: Katna.config :platform: Platfrom Independent :synopsis: This module defines some helpful configuration variables """<import_stmt>os<line_sep># # Configuration parameters for Image class <class_stmt>Image# default value by which image size to be reduces for processing <block_start>down_sample_factor=8<line_sep># Debug flag DEBUG=<false><line_sep># Crop_height_reduction_factor_in_each_iterationnot found crop height # will be reduced by this multiplier/factor and search for candidate crops # is resumed. # Decreasing the height and width for crops while checking it don't get small by 1/(min_image_to_crop_factor) of image height/width min_image_to_crop_factor=4<line_sep>crop_height_reduction_factor_in_each_iteration=0.05<block_end># # Configurations for Scoring crops for crop extractor <class_stmt>CropScorer<block_start>detail_weight=0.2# default weight value for detail parameter edge_radius=0.4# default edge radius edge_weight=-20# default edge weight outside_importance=(-0.5)<line_sep># default value to set if the pixel is outside crop rectangle rule_of_thirds=<true># boolean to set rule of third condition check saliency_bias=0.2# bias color value for saliency(+- error value) saliency_weight=1.3# default edge radius face_bias=0.01# bias color value for face(+- error value) face_weight=3.4# default weight value for face parameter rects_weight=1<block_end># default weight value for crop rectangles # # Configurations for Text detection class <class_stmt>TextDetector# Min Confidence Threshold for Text detection model <block_start>min_confidence=0.9<line_sep># Threshold for merging text detection boxes merge_threshold=1<line_sep># Name of Model files to be used for text detection frozen_weights="frozen_east_text_detection.pb"<line_sep># Location where model file will be downloaded cache_subdir="models"<line_sep># Layers Name for text detection layerNames=["feature_fusion/Conv_7/Sigmoid" "feature_fusion/concat_3"]<line_sep># Download Link for Text detection model model_download_link="https://github.com/oyyd/frozen_east_text_detection.pb/raw/master/frozen_east_text_detection.pb"<block_end># # Configurations for Edge Feature class <class_stmt>EdgeFeature# min edge threshold value <block_start>min_val_threshold=100<line_sep># Max edge threshold value max_val_threshold=200<line_sep># aperture_size/size of Sobel kernel for canny edge detector ksize=3<block_end># # Configurations for Face detection Feature class <class_stmt>FaceFeature# Model file name to be used for face detection <block_start>model_file="res10_300x300_ssd_iter_140000_fp16.caffemodel"<line_sep># Model definition file name to be used for face detetion prototxt_file="deploy.prototxt"<line_sep># Location where model file will be downloaded cache_subdir="models"<line_sep># Min Confidence Threshold for face detection model confidence=0.5<line_sep># Download Link for face detection model defintion file prototxt_download_link="https://raw.githubusercontent.com/opencv/opencv/master/samples/dnn/face_detector/deploy.prototxt"<line_sep># Download Link for face detection model modelfile_download_link="https://raw.githubusercontent.com/opencv/opencv_3rdparty/dnn_samples_face_detector_20180205_fp16/res10_300x300_ssd_iter_140000_fp16.caffemodel"<block_end># # Configuration parameters for Video class <class_stmt>Video# Debug flag <block_start>DEBUG=<false><line_sep>min_video_duration=5.0<line_sep># consume % of memory during video keyframe extraction # 80% of available memory will be consumed memory_consumption_threshold=0.80<line_sep># assumed numbers of frames within which 1 candidate frames which might be available # seconds to reach threshold if all frames are collected, but not all are candidate frames # currently we assume 1 in 5 frame for that assumed_no_of_frames_per_candidate_frame=5<line_sep># if video duration greater than this number video will be treated as a large video video_split_threshold_in_minutes=20<line_sep># https://trac.ffmpeg.org/wiki/Encode/H.264 # Keep this between 20 to 30 value video_compression_crf_parameter=23<line_sep>video_compression_codec="libx264"# Currently "libx264 and is supported" compression_output_file_extension="mp4"<line_sep># Supported/valid video extensions supported by ffmpeg # You can generate updated list by using following shell script on MacOSX or Linux # $ ffmpeg -demuxers -hide_banner | tail -n +5 | cut -d' ' -f4 | xargs -I{} ffmpeg -hide_banner -h demuxer={} | grep 'Common extensions' | cut -d' ' -f7 | tr ',' $'\n' | tr -d '.' video_extensions=[".str" ".aa" ".aac" ".ac3" ".acm" ".adf" ".adp" ".dtk" ".ads" ".ss2" ".adx" ".aea" ".afc" ".aix" ".al" ".ape" ".apl" ".mac" ".aptx" ".aptxhd" ".aqt" ".ast" ".avi" ".avr" ".bfstm" ".bcstm" ".bit" ".bmv" ".brstm" ".cdg" ".cdxl" ".xl" ".c2" ".302" ".daud" ".str" ".dss" ".dts" ".dtshd" ".dv" ".dif" ".cdata" ".eac3" ".paf" ".fap" ".flm" ".flac" ".flv" ".fsb" ".g722" ".722" ".tco" ".rco" ".g723_1" ".g729" ".genh" ".gsm" ".h261" ".h26l" ".h264" ".264" ".avc" ".hevc" ".h265" ".265" ".idf" ".cgi" ".sf" ".ircam" ".ivr" ".flv" ".lvf" ".m4v" ".mkv" ".mk3d" ".mka" ".mks" ".mjpg" ".mjpeg" ".mpo" ".j2k" ".mlp" ".mov" ".mp4" ".m4a" ".3gp" ".3g2" ".mj2" ".mp2" ".mp3" ".m2a" ".mpa" ".mpc" ".mjpg" ".txt" ".mpl2" ".sub" ".msf" ".mtaf" ".ul" ".musx" ".mvi" ".mxg" ".v" ".nist" ".sph" ".nsp" ".nut" ".ogg" ".oma" ".omg" ".aa3" ".pjs" ".pvf" ".yuv" ".cif" ".qcif" ".rgb" ".rt" ".rsd" ".rsd" ".rso" ".sw" ".sb" ".smi" ".sami" ".sbc" ".msbc" ".sbg" ".scc" ".sdr2" ".sds" ".sdx" ".shn" ".vb" ".son" ".sln" ".mjpg" ".stl" ".sub" ".sub" ".sup" ".svag" ".tak" ".thd" ".tta" ".ans" ".art" ".asc" ".diz" ".ice" ".nfo" ".txt" ".vt" ".ty" ".ty+" ".uw" ".ub" ".v210" ".yuv10" ".vag" ".vc1" ".viv" ".idx" ".vpk" ".txt" ".vqf" ".vql" ".vqe" ".vtt" ".wsd" ".xmv" ".xvag" ".yop" ".y4m" ]<block_end># Configuration parameters for mediapipe <class_stmt>MediaPipe<block_start><class_stmt>AutoFlip# Rerun is required due to autoflip issue mentione here: # https://github.com/google/mediapipe/issues/497 <block_start>RERUN_LIMIT=2<line_sep># Models folder location MODELS_FOLDER_LOCATION=os.path.join(os.getcwd() "mediapipe" "models")<line_sep># pbtxt temp folder name TMP_PBTXT_FOLDER_NAME="temp_pbtxt"<line_sep>TMP_PBTXT_FOLDER_PATH=os.path.join(os.getcwd() TMP_PBTXT_FOLDER_NAME)<line_sep># Default pbtxt and build cmd CONFIG_FILE_PBTXT=os.path.join(os.path.dirname(os.path.abspath(__file__)) "mediapipe_autoflip.pbtxt")<line_sep>BUILD_CMD="run_autoflip"<line_sep># user friendly conf keys ENFORCE_FEATURES_KEYNAME="ENFORCE_FEATURES"<line_sep>STABALIZATION_THRESHOLD_KEYNAME="STABALIZATION_THRESHOLD"<line_sep>BLUR_AREA_OPACITY_KEYNAME="BLUR_AREA_OPACITY"<line_sep># DEFAULT VALUES IN PBTXT DEFAULT_BLUR_AREA_OPACITY=0.6<line_sep>DEFAULT_MOTION_STABALIZATION_THRESHOLD=0.5<line_sep>DEFAULT_FEATURE_SIGNAL_VALUE="false"<line_sep># ENFORCE_FEATURES Keys _FACE_CORE_LANDMARKS="FACE_CORE_LANDMARKS"<line_sep>_FACE_FULL="FACE_FULL"<line_sep>_FACE_ALL_LANDMARKS="FACE_ALL_LANDMARKS"<line_sep>_HUMAN="HUMAN"<line_sep>_PET="PET"<line_sep>_CAR="CAR"<line_sep>_OBJECT="OBJECT"<line_sep># the variables names below should match the keyname for set_conf to work # smoothly # ENFORCE_FEATURES list ENFORCE_FEATURES={_FACE_CORE_LANDMARKS:<false> _FACE_ALL_LANDMARKS:<false> _FACE_FULL:<false> _HUMAN:<false> _PET:<false> _CAR:<false> _OBJECT:<false> }<line_sep># % AREA from center where most of the content is # usually applied when content is focused near center STABALIZATION_THRESHOLD=DEFAULT_MOTION_STABALIZATION_THRESHOLD<line_sep># opacity of blur area BLUR_AREA_OPACITY=DEFAULT_BLUR_AREA_OPACITY<line_sep>@classmethod<def_stmt>get_pbtxt_mapping cls<block_start><return>{cls.ENFORCE_FEATURES_KEYNAME:"signal_settings" cls.STABALIZATION_THRESHOLD_KEYNAME:"motion_stabilization_threshold_percent" cls.BLUR_AREA_OPACITY_KEYNAME:"overlay_opacity" }<block_end>@classmethod<def_stmt>get_conf cls<block_start>"""Gets the current config :return: dictionary containing the current config :rtype: dict """<line_sep><return>{cls.ENFORCE_FEATURES_KEYNAME:cls.ENFORCE_FEATURES cls.STABALIZATION_THRESHOLD_KEYNAME:cls.STABALIZATION_THRESHOLD cls.BLUR_AREA_OPACITY_KEYNAME:cls.BLUR_AREA_OPACITY }<block_end>@classmethod<def_stmt>set_conf cls config<block_start>"""Sets the config passed :param config: The configuration to set. :type config: dict """<for_stmt>attr config.keys()<block_start>current_conf=cls.get_conf()<if_stmt>attr<in>current_conf.keys()<block_start><if_stmt>attr<eq>cls.ENFORCE_FEATURES_KEYNAME<block_start>updated_attr_dict={**current_conf[attr] **config[attr]}<line_sep>setattr(cls attr updated_attr_dict)<block_end><else_stmt><block_start>setattr(cls attr config[attr])<block_end><block_end><else_stmt><block_start><raise>Exception(" Invalid configuration. Use get_conf method to see existing configuration or refer documentation.")<block_end><block_end><block_end><block_end><block_end><class_stmt>ImageSelector# Setting for optimum Brightness values <block_start>min_brightness_value=10.0<line_sep>max_brightness_value=90.0<line_sep>brightness_step=2.0<line_sep># Setting for optimum Contrast/Entropy values min_entropy_value=1.0<line_sep>max_entropy_value=10.0<line_sep>entropy_step=0.5<block_end><class_stmt>FrameExtractor# Setting local maxima criteria <block_start>USE_LOCAL_MAXIMA=<true><line_sep># Lenght of sliding window taking difference len_window=20<line_sep># Chunk size of Images to be processed at a time in memory max_frames_in_chunk=500<line_sep># Type of smoothening window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman' flat window will produce a moving average smoothing. window_type="hanning"<block_end>
# -*- coding: utf-8 -*- """Manages custom event formatter helpers."""<class_stmt>FormattersManager(object)<block_start>"""Custom event formatter helpers manager."""<line_sep>_custom_formatter_helpers={}<line_sep>@classmethod<def_stmt>GetEventFormatterHelper cls identifier<block_start>"""Retrieves a custom event formatter helper. Args: identifier (str): identifier. Returns: CustomEventFormatterHelper: custom event formatter or None if not available. """<line_sep>identifier=identifier.lower()<line_sep><return>cls._custom_formatter_helpers.get(identifier)<block_end>@classmethod<def_stmt>RegisterEventFormatterHelper cls formatter_helper_class<block_start>"""Registers a custom event formatter helper. The custom event formatter helpers are identified based on their lower case identifier. Args: formatter_helper_class (type): class of the custom event formatter helper. Raises: KeyError: if a custom formatter helper is already set for the corresponding identifier. """<line_sep>identifier=formatter_helper_class.IDENTIFIER.lower()<if_stmt>identifier<in>cls._custom_formatter_helpers<block_start><raise>KeyError(('Custom event formatter helper already set for identifier: '<concat>'{0:s}.').format(formatter_helper_class.IDENTIFIER))<block_end>cls._custom_formatter_helpers[identifier]=formatter_helper_class()<block_end>@classmethod<def_stmt>RegisterEventFormatterHelpers cls formatter_helper_classes<block_start>"""Registers custom event formatter helpers. The formatter classes are identified based on their lower case data type. Args: formatter_helper_classes (list[type]): classes of the custom event formatter helpers. Raises: KeyError: if a custom formatter helper is already set for the corresponding data type. """<for_stmt>formatter_helper_class formatter_helper_classes<block_start>cls.RegisterEventFormatterHelper(formatter_helper_class)<block_end><block_end><block_end>
c=get_config()<line_sep>#Export all the notebooks in the current directory to the sphinx_howto format. c.NbConvertApp.notebooks=['*.ipynb']<line_sep>c.NbConvertApp.export_format='latex'<line_sep>c.NbConvertApp.postprocessor_class='PDF'<line_sep>c.Exporter.template_file='custom_article.tplx'<line_sep>
<import_stmt>json<import_stmt>requests<import_from_stmt>rotkehlchen.assets.asset Asset<import_from_stmt>rotkehlchen.constants.timing DEFAULT_TIMEOUT_TUPLE<import_from_stmt>rotkehlchen.errors.misc RemoteError<import_from_stmt>rotkehlchen.errors.serialization DeserializationError<import_from_stmt>rotkehlchen.history.deserialization deserialize_price<import_from_stmt>rotkehlchen.types Price<line_sep>PRICE_API_URL='https://bisq.markets/api/ticker?market={symbol}_BTC'<def_stmt>get_bisq_market_price asset:Asset<arrow>Price<block_start>""" Get price for pair at bisq marketplace. Price is returned against BTC. Can raise: - RemoteError: If the market doesn't exists or request fails - DeserializationError: If the data returned is not a valid price """<line_sep>url=PRICE_API_URL.format(symbol=asset.symbol)<try_stmt><block_start>response=requests.get(url timeout=DEFAULT_TIMEOUT_TUPLE)<block_end><except_stmt>requests.exceptions.RequestException<as>e<block_start><raise>RemoteError(f'bisq.markets request {url} failed due to {str(e)}')<from>e<block_end><try_stmt><block_start>data=response.json()<block_end><except_stmt>json.decoder.JSONDecodeError<as>e<block_start><raise>RemoteError(f'Failed to read json response from bisq.markets. {response.text}. {str(e)}' )<from>e<block_end><if_stmt>'error'<in>data<block_start><raise>RemoteError(f'Request data from bisq.markets {url} is not valid {data["error"]}')<block_end><try_stmt><block_start>price=data['last']<block_end><except_stmt>KeyError<as>e<block_start><raise>DeserializationError(f'Response from bisq.markets didnt contain expected key "last". {data}' )<from>e<block_end><return>deserialize_price(price)<block_end>
<import_from_stmt>pydantic BaseModel EmailStr HttpUrl ValidationError<class_stmt>User(BaseModel)<block_start>email:EmailStr<line_sep>website:HttpUrl<block_end># Invalid email <try_stmt><block_start>User(email="jdoe" website="https://www.example.com")<block_end><except_stmt>ValidationError<as>e<block_start>print(str(e))<block_end># Invalid URL <try_stmt><block_start>User(email="<EMAIL>" website="jdoe")<block_end><except_stmt>ValidationError<as>e<block_start>print(str(e))<block_end># Valid user=User(email="<EMAIL>" website="https://www.example.com")<line_sep># email='<EMAIL>' website=HttpUrl('https://www.example.com', scheme='https', host='www.example.com', tld='com', host_type='domain') print(user)<line_sep>
<import_stmt>zengl<import_from_stmt>defaults defaults<import_from_stmt>grid grid_pipeline<import_from_stmt>window Window<line_sep>window=Window(1280 720)<line_sep>ctx=zengl.context()<line_sep>image=ctx.image(window.size 'rgba8unorm' samples=4)<line_sep>depth=ctx.image(window.size 'depth24plus' samples=4)<line_sep>image.clear_value=(0.2 0.2 0.2 1.0)<line_sep>ctx.includes['defaults']=defaults<line_sep>grid=grid_pipeline(ctx [image depth])<line_sep>pipeline=ctx.pipeline(vertex_shader=''' #version 330 #include "defaults" vec3 vertices[24] = vec3[]( vec3(0.000000, 1.000000, -0.500000), vec3(0.000000, 1.000000, 0.500000), vec3(0.500000, 0.866025, -0.500000), vec3(0.500000, 0.866025, 0.500000), vec3(0.866025, 0.500000, -0.500000), vec3(0.866025, 0.500000, 0.500000), vec3(1.000000, -0.000000, -0.500000), vec3(1.000000, -0.000000, 0.500000), vec3(0.866025, -0.500000, -0.500000), vec3(0.866025, -0.500000, 0.500000), vec3(0.500000, -0.866025, -0.500000), vec3(0.500000, -0.866025, 0.500000), vec3(-0.000000, -1.000000, -0.500000), vec3(-0.000000, -1.000000, 0.500000), vec3(-0.500000, -0.866025, -0.500000), vec3(-0.500000, -0.866025, 0.500000), vec3(-0.866025, -0.500000, -0.500000), vec3(-0.866025, -0.500000, 0.500000), vec3(-1.000000, 0.000000, -0.500000), vec3(-1.000000, 0.000000, 0.500000), vec3(-0.866025, 0.500000, -0.500000), vec3(-0.866025, 0.500000, 0.500000), vec3(-0.500000, 0.866025, -0.500000), vec3(-0.500000, 0.866025, 0.500000) ); vec3 normals[14] = vec3[]( vec3(-0.0000, 1.0000, -0.0000), vec3(0.5000, 0.8660, -0.0000), vec3(0.8660, 0.5000, -0.0000), vec3(1.0000, -0.0000, -0.0000), vec3(0.8660, -0.5000, -0.0000), vec3(0.5000, -0.8660, -0.0000), vec3(-0.0000, -1.0000, -0.0000), vec3(-0.5000, -0.8660, -0.0000), vec3(-0.8660, -0.5000, -0.0000), vec3(-1.0000, -0.0000, -0.0000), vec3(-0.8660, 0.5000, -0.0000), vec3(-0.0000, -0.0000, 1.0000), vec3(-0.5000, 0.8660, -0.0000), vec3(-0.0000, -0.0000, -1.0000) ); vec2 texcoords[50] = vec2[]( vec2(1.000000, 0.500000), vec2(0.000000, 0.500000), vec2(0.750000, 0.490000), vec2(1.000000, 1.000000), vec2(0.250000, 0.490000), vec2(0.000000, 1.000000), vec2(0.916667, 0.500000), vec2(0.870000, 0.457846), vec2(0.916667, 1.000000), vec2(0.370000, 0.457846), vec2(0.833333, 0.500000), vec2(0.957846, 0.370000), vec2(0.833333, 1.000000), vec2(0.457846, 0.370000), vec2(0.750000, 0.500000), vec2(0.990000, 0.250000), vec2(0.750000, 1.000000), vec2(0.490000, 0.250000), vec2(0.666667, 0.500000), vec2(0.957846, 0.130000), vec2(0.666667, 1.000000), vec2(0.457846, 0.130000), vec2(0.583333, 0.500000), vec2(0.870000, 0.042154), vec2(0.583333, 1.000000), vec2(0.370000, 0.042154), vec2(0.500000, 0.500000), vec2(0.750000, 0.010000), vec2(0.500000, 1.000000), vec2(0.250000, 0.010000), vec2(0.416667, 0.500000), vec2(0.630000, 0.042154), vec2(0.416667, 1.000000), vec2(0.130000, 0.042154), vec2(0.333333, 0.500000), vec2(0.542154, 0.130000), vec2(0.333333, 1.000000), vec2(0.042154, 0.130000), vec2(0.250000, 0.500000), vec2(0.510000, 0.250000), vec2(0.250000, 1.000000), vec2(0.010000, 0.250000), vec2(0.166667, 0.500000), vec2(0.542154, 0.370000), vec2(0.042154, 0.370000), vec2(0.166667, 1.000000), vec2(0.083333, 0.500000), vec2(0.630000, 0.457846), vec2(0.130000, 0.457846), vec2(0.083333, 1.000000) ); int vertex_indices[132] = int[]( 1, 2, 0, 3, 4, 2, 5, 6, 4, 7, 8, 6, 9, 10, 8, 11, 12, 10, 13, 14, 12, 15, 16, 14, 17, 18, 16, 19, 20, 18, 21, 13, 5, 21, 22, 20, 23, 0, 22, 6, 14, 22, 1, 3, 2, 3, 5, 4, 5, 7, 6, 7, 9, 8, 9, 11, 10, 11, 13, 12, 13, 15, 14, 15, 17, 16, 17, 19, 18, 19, 21, 20, 5, 3, 1, 1, 23, 21, 21, 19, 17, 17, 15, 13, 13, 11, 9, 9, 7, 5, 5, 1, 21, 21, 17, 13, 13, 9, 5, 21, 23, 22, 23, 1, 0, 22, 0, 2, 2, 4, 6, 6, 8, 10, 10, 12, 14, 14, 16, 18, 18, 20, 22, 22, 2, 6, 6, 10, 14, 14, 18, 22 ); int normal_indices[132] = int[]( 0, 1, 0, 1, 2, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 8, 9, 10, 9, 11, 11, 11, 10, 12, 10, 12, 0, 12, 13, 13, 13, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5, 6, 6, 6, 7, 7, 7, 8, 8, 8, 9, 9, 9, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 10, 12, 12, 12, 0, 0, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13 ); int texcoord_indices[132] = int[]( 3, 6, 0, 8, 10, 6, 12, 14, 10, 16, 18, 14, 20, 22, 18, 24, 26, 22, 28, 30, 26, 32, 34, 30, 36, 38, 34, 40, 42, 38, 44, 29, 13, 45, 46, 42, 49, 1, 46, 15, 31, 47, 3, 8, 6, 8, 12, 10, 12, 16, 14, 16, 20, 18, 20, 24, 22, 24, 28, 26, 28, 32, 30, 32, 36, 34, 36, 40, 38, 40, 45, 42, 13, 9, 4, 4, 48, 44, 44, 41, 37, 37, 33, 29, 29, 25, 21, 21, 17, 13, 13, 4, 44, 44, 37, 29, 29, 21, 13, 45, 49, 46, 49, 5, 1, 47, 2, 7, 7, 11, 15, 15, 19, 23, 23, 27, 31, 31, 35, 39, 39, 43, 47, 47, 7, 15, 15, 23, 31, 31, 39, 47 ); out vec3 v_vertex; out vec3 v_normal; out vec2 v_texcoord; void main() { v_vertex = vertices[vertex_indices[gl_VertexID]]; v_normal = normals[normal_indices[gl_VertexID]]; v_texcoord = texcoords[texcoord_indices[gl_VertexID]]; gl_Position = mvp * vec4(v_vertex, 1.0); } ''' fragment_shader=''' #version 330 #include "defaults" in vec3 v_normal; layout (location = 0) out vec4 out_color; void main() { float lum = dot(normalize(light.xyz), normalize(v_normal)) * 0.7 + 0.3; out_color = vec4(lum, lum, lum, 1.0); } ''' framebuffer=[image depth] topology='triangles' cull_face='back' vertex_count=132 )<while_stmt>window.update()<block_start>image.clear()<line_sep>depth.clear()<line_sep>grid.render()<line_sep>pipeline.render()<line_sep>image.blit()<block_end>
<import_from_stmt>.smpl_flow SMPLFlow<import_from_stmt>.skeleton_flow SkeletonFlow<import_from_stmt>.fc_head FCHead<line_sep>
<import_stmt>sys<line_sep>sys.path.append("../../")<import_from_stmt>appJar gui<def_stmt>press btn<block_start><if_stmt>btn<eq>"FIRST"<block_start>app.firstFrame("Pages")<block_end><elif_stmt>btn<eq>"NEXT"<block_start>app.nextFrame("Pages")<block_end><elif_stmt>btn<eq>"PREV"<block_start>app.prevFrame("Pages")<block_end><elif_stmt>btn<eq>"LAST"<block_start>app.lastFrame("Pages")<block_end><block_end><def_stmt>changed <block_start>msg="Changed from: "+str(app.getPreviousFrame("Pages"))+" to "+str(app.getCurrentFrame("Pages"))<line_sep>print(msg)<block_end># return app.okBox("Sure?", msg) <with_stmt>gui("FRAME STACK")<as>app<block_start><with_stmt>app.frameStack("Pages" change=changed)#, start=1): <block_start><with_stmt>app.frame(bg='red')<block_start><for_stmt>i range(5)<block_start>app.label("Text: "+str(i))<block_end><block_end><with_stmt>app.frame(bg='green')<block_start><for_stmt>i range(5)<block_start>app.entry("e"+str(i))<block_end><block_end><with_stmt>app.frame(bg='pink')<block_start><for_stmt>i range(5)<block_start>app.button(str(i) <none>)<block_end><block_end><block_end>app.buttons(["FIRST" "PREV" "NEXT" "LAST"] press)<line_sep>changed()<block_end>
""" Given a binary tree, return the postorder traversal of its nodes' values. For example: Given binary tree {1,#,2,3}, 1 \ 2 / 3 return [3,2,1]. """<line_sep># Definition for a binary tree node. # class TreeNode(object): # def __init__(self, x): # self.val = x # self.left = None # self.right = None <class_stmt>Solution(object)<block_start><def_stmt>postorderTraversal self root<block_start>""" :type root: TreeNode :rtype: List[int] """<line_sep>path=[]<if_stmt>root<is><none><block_start><return>path<block_end>stack1=[]<line_sep>stack2=[]<line_sep>stack1.append(root)<while_stmt>stack1<block_start>root=stack1.pop()<line_sep>stack2.append(root.val)<if_stmt>root.left<is><not><none><block_start>stack1.append(root.left)<block_end><if_stmt>root.right<is><not><none><block_start>stack1.append(root.right)<block_end><block_end><while_stmt>stack2<block_start>path.append(stack2.pop())<block_end><return>path<block_end><block_end>
ADMIN_THEME="admin"<line_sep>DEFAULT_THEME="core"<line_sep>
<import_stmt>numpy<as>np<import_stmt>pytest<import_from_stmt>sklearn.dummy DummyRegressor<import_from_stmt>sklearn.model_selection GridSearchCV<import_from_stmt>sklearn.pipeline Pipeline<import_from_stmt>hcrystalball.metrics get_scorer<import_from_stmt>hcrystalball.model_selection FinerTimeSplit<import_from_stmt>hcrystalball.model_selection get_best_not_failing_model<import_from_stmt>hcrystalball.model_selection select_model<import_from_stmt>hcrystalball.wrappers ExponentialSmoothingWrapper<import_from_stmt>hcrystalball.wrappers get_sklearn_wrapper<line_sep>@pytest.mark.parametrize("train_data, grid_search, parallel_over_dict" [("two_regions" "" {"Region":"region_0"}) ("two_regions" "" <none>)] indirect=["train_data" "grid_search"] )<def_stmt>test_select_model train_data grid_search parallel_over_dict<block_start>_train_data=train_data<if_stmt>parallel_over_dict<block_start>col,value=list(parallel_over_dict.items())[0]<line_sep>_train_data=train_data[train_data[col]<eq>value].drop(columns="Region")<block_end>partition_columns=["Region" "Product"]<line_sep>results=select_model(_train_data target_col_name="Quantity" partition_columns=partition_columns parallel_over_dict=parallel_over_dict grid_search=grid_search country_code_column="Holidays_code" )<if_stmt>parallel_over_dict<block_start>partitions=(train_data.loc[train_data[col]<eq>value partition_columns].drop_duplicates().to_dict(orient="records"))<block_end><else_stmt><block_start>partitions=train_data[partition_columns].drop_duplicates().to_dict(orient="records")<block_end><assert_stmt>len(results)<eq>len(partitions)<for_stmt>result results<block_start><assert_stmt>result.best_model_name<eq>"good_dummy"<assert_stmt>result.partition<in>partitions<block_end><block_end>@pytest.mark.parametrize("X_y_optional, negative_data, best_model_name, rank, expected_error" [("" <false> "ExponentialSmoothingWrapper" 1 <none>) ("" <true> "SklearnWrapper" 2 <none>) ("" <true> "" 2 ValueError) ] indirect=["X_y_optional"] )<def_stmt>test_get_best_not_failing_model X_y_optional negative_data best_model_name rank expected_error<block_start>X,y=X_y_optional<line_sep># data contains 0 y[y<l>1]=1<if_stmt>negative_data<block_start>y[-1]=-1<block_end>models=[ExponentialSmoothingWrapper(freq="D" seasonal="mul") get_sklearn_wrapper(DummyRegressor strategy="constant" constant=-1000) ]<line_sep>models=models<if>expected_error<is><none><else>models[:1]<line_sep>grid_search=GridSearchCV(estimator=Pipeline([("model" "passthrough")]) param_grid=[{"model":models}] scoring=get_scorer("neg_mean_absolute_error") cv=FinerTimeSplit(n_splits=1 horizon=5) refit=<false> error_score=np.nan )<line_sep>grid_search.fit(X y)<if_stmt>expected_error<block_start><with_stmt>pytest.raises(expected_error)<block_start>get_best_not_failing_model(grid_search X y)<block_end><block_end><else_stmt><block_start>best_param_rank=get_best_not_failing_model(grid_search X y)<assert_stmt>isinstance(best_param_rank dict)<assert_stmt>best_param_rank["params"]["model"].__class__.__name__<eq>best_model_name<assert_stmt>best_param_rank["rank"]<eq>rank<block_end><block_end>
# SPDX-License-Identifier: MIT # Copyright (C) 2018-present iced project and contributors # ⚠️This file was generated by GENERATOR!🦹‍♂️ # pylint: disable=invalid-name # pylint: disable=line-too-long # pylint: disable=too-many-lines """ Mnemonic condition code selector (eg. ``JG`` / ``JNLE``) """<import_stmt>typing<if_stmt>typing.TYPE_CHECKING<block_start><import_from_stmt>._iced_x86_py CC_g<block_end><else_stmt><block_start>CC_g=int<block_end>G:CC_g=0# type: ignore """ ``JG``, ``CMOVG``, ``SETG`` """<line_sep>NLE:CC_g=1# type: ignore """ ``JNLE``, ``CMOVNLE``, ``SETNLE`` """<line_sep>
<class_stmt>Solution<block_start><def_stmt>divisorGame self N:int<arrow>bool<block_start><return><true><if>N%2<eq>0<else><false><block_end><block_end>
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Gabbi specific exceptions."""<class_stmt>GabbiDataLoadError(ValueError)<block_start>"""An exception to alert when data streams cannot be loaded."""<line_sep><pass><block_end><class_stmt>GabbiFormatError(ValueError)<block_start>"""An exception to encapsulate poorly formed test data."""<line_sep><pass><block_end><class_stmt>GabbiSyntaxWarning(SyntaxWarning)<block_start>"""A warning about syntax that is not desirable."""<line_sep><pass><block_end>
# -*- coding: UTF-8 -*- <import_stmt>config<import_stmt>gevent<import_stmt>availability.check<import_from_stmt>persistence persister<import_stmt>time<def_stmt>crawl_worker queue_verification queue_persistence<block_start>""" 爬取下来的代理检测可用性的进程 :param queue_verification: 待验证代理队列 :param queue_persistence: 已验证待保存代理队列 :return: """<while_stmt><true><block_start>spawns=list()<for_stmt>i range(config.COROUTINE_NUM)<block_start>proxy=queue_verification.get()<line_sep>spawns.append(gevent.spawn(availability.check.crawl_handle 'http' proxy queue_persistence))<line_sep>spawns.append(gevent.spawn(availability.check.crawl_handle 'https' proxy queue_persistence))<block_end>gevent.joinall(spawns)<block_end><block_end><def_stmt>store_worker <block_start>""" 已保存的代理每隔一段时间重新验证可用性的进程 """<while_stmt><true><block_start>all_proxies=persister.list(count='all' columns='all')<line_sep>spawns=list()<for_stmt>proxy all_proxies<block_start><if_stmt>proxy['protocol']<eq>'http'<block_start>spawns.append(gevent.spawn(availability.check.store_handle 'http' proxy persister))<block_end><else_stmt><block_start>spawns.append(gevent.spawn(availability.check.store_handle 'https' proxy persister))<block_end><if_stmt>len(spawns)<eq>config.COROUTINE_NUM<block_start>gevent.joinall(spawns)<line_sep>spawns.clear()<block_end><block_end>gevent.joinall(spawns)<line_sep>spawns.clear()<line_sep>time.sleep(config.PROXY_STORE_CHECK_SEC)<block_end><block_end>
<import_from_stmt>typing Optional Any List Dict Union<import_from_stmt>.default_arg DefaultArg NotGiven<import_from_stmt>.internal_utils _to_dict_without_not_given _is_iterable<import_from_stmt>.types TypeAndValue<class_stmt>UserAddress<block_start>country:Union[Optional[str] DefaultArg]<line_sep>locality:Union[Optional[str] DefaultArg]<line_sep>postal_code:Union[Optional[str] DefaultArg]<line_sep>primary:Union[Optional[bool] DefaultArg]<line_sep>region:Union[Optional[str] DefaultArg]<line_sep>street_address:Union[Optional[str] DefaultArg]<line_sep>unknown_fields:Dict[str Any]<def_stmt>__init__ self * country:Union[Optional[str] DefaultArg]=NotGiven locality:Union[Optional[str] DefaultArg]=NotGiven postal_code:Union[Optional[str] DefaultArg]=NotGiven primary:Union[Optional[bool] DefaultArg]=NotGiven region:Union[Optional[str] DefaultArg]=NotGiven street_address:Union[Optional[str] DefaultArg]=NotGiven **kwargs <arrow><none><block_start>self.country=country<line_sep>self.locality=locality<line_sep>self.postal_code=postal_code<line_sep>self.primary=primary<line_sep>self.region=region<line_sep>self.street_address=street_address<line_sep>self.unknown_fields=kwargs<block_end><def_stmt>to_dict self<arrow>dict<block_start><return>_to_dict_without_not_given(self)<block_end><block_end><class_stmt>UserEmail(TypeAndValue)<block_start><pass><block_end><class_stmt>UserPhoneNumber(TypeAndValue)<block_start><pass><block_end><class_stmt>UserRole(TypeAndValue)<block_start><pass><block_end><class_stmt>UserGroup<block_start>display:Union[Optional[str] DefaultArg]<line_sep>value:Union[Optional[str] DefaultArg]<line_sep>unknown_fields:Dict[str Any]<def_stmt>__init__ self * display:Union[Optional[str] DefaultArg]=NotGiven value:Union[Optional[str] DefaultArg]=NotGiven **kwargs <arrow><none><block_start>self.display=display<line_sep>self.value=value<line_sep>self.unknown_fields=kwargs<block_end><def_stmt>to_dict self<arrow>dict<block_start><return>_to_dict_without_not_given(self)<block_end><block_end><class_stmt>UserMeta<block_start>created:Union[Optional[str] DefaultArg]<line_sep>location:Union[Optional[str] DefaultArg]<line_sep>unknown_fields:Dict[str Any]<def_stmt>__init__ self created:Union[Optional[str] DefaultArg]=NotGiven location:Union[Optional[str] DefaultArg]=NotGiven **kwargs <arrow><none><block_start>self.created=created<line_sep>self.location=location<line_sep>self.unknown_fields=kwargs<block_end><def_stmt>to_dict self<arrow>dict<block_start><return>_to_dict_without_not_given(self)<block_end><block_end><class_stmt>UserName<block_start>family_name:Union[Optional[str] DefaultArg]<line_sep>given_name:Union[Optional[str] DefaultArg]<line_sep>unknown_fields:Dict[str Any]<def_stmt>__init__ self family_name:Union[Optional[str] DefaultArg]=NotGiven given_name:Union[Optional[str] DefaultArg]=NotGiven **kwargs <arrow><none><block_start>self.family_name=family_name<line_sep>self.given_name=given_name<line_sep>self.unknown_fields=kwargs<block_end><def_stmt>to_dict self<arrow>dict<block_start><return>_to_dict_without_not_given(self)<block_end><block_end><class_stmt>UserPhoto<block_start>type:Union[Optional[str] DefaultArg]<line_sep>value:Union[Optional[str] DefaultArg]<line_sep>unknown_fields:Dict[str Any]<def_stmt>__init__ self type:Union[Optional[str] DefaultArg]=NotGiven value:Union[Optional[str] DefaultArg]=NotGiven **kwargs <arrow><none><block_start>self.type=type<line_sep>self.value=value<line_sep>self.unknown_fields=kwargs<block_end><def_stmt>to_dict self<arrow>dict<block_start><return>_to_dict_without_not_given(self)<block_end><block_end><class_stmt>User<block_start>active:Union[Optional[bool] DefaultArg]<line_sep>addresses:Union[Optional[List[UserAddress]] DefaultArg]<line_sep>display_name:Union[Optional[str] DefaultArg]<line_sep>emails:Union[Optional[List[TypeAndValue]] DefaultArg]<line_sep>external_id:Union[Optional[str] DefaultArg]<line_sep>groups:Union[Optional[List[UserGroup]] DefaultArg]<line_sep>id:Union[Optional[str] DefaultArg]<line_sep>meta:Union[Optional[UserMeta] DefaultArg]<line_sep>name:Union[Optional[UserName] DefaultArg]<line_sep>nick_name:Union[Optional[str] DefaultArg]<line_sep>phone_numbers:Union[Optional[List[TypeAndValue]] DefaultArg]<line_sep>photos:Union[Optional[List[UserPhoto]] DefaultArg]<line_sep>profile_url:Union[Optional[str] DefaultArg]<line_sep>roles:Union[Optional[List[TypeAndValue]] DefaultArg]<line_sep>schemas:Union[Optional[List[str]] DefaultArg]<line_sep>timezone:Union[Optional[str] DefaultArg]<line_sep>title:Union[Optional[str] DefaultArg]<line_sep>user_name:Union[Optional[str] DefaultArg]<line_sep>unknown_fields:Dict[str Any]<def_stmt>__init__ self * active:Union[Optional[bool] DefaultArg]=NotGiven addresses:Union[Optional[List[Union[UserAddress Dict[str Any]]]] DefaultArg]=NotGiven display_name:Union[Optional[str] DefaultArg]=NotGiven emails:Union[Optional[List[Union[TypeAndValue Dict[str Any]]]] DefaultArg]=NotGiven external_id:Union[Optional[str] DefaultArg]=NotGiven groups:Union[Optional[List[Union[UserGroup Dict[str Any]]]] DefaultArg]=NotGiven id:Union[Optional[str] DefaultArg]=NotGiven meta:Union[Optional[Union[UserMeta Dict[str Any]]] DefaultArg]=NotGiven name:Union[Optional[Union[UserName Dict[str Any]]] DefaultArg]=NotGiven nick_name:Union[Optional[str] DefaultArg]=NotGiven phone_numbers:Union[Optional[List[Union[TypeAndValue Dict[str Any]]]] DefaultArg]=NotGiven photos:Union[Optional[List[Union[UserPhoto Dict[str Any]]]] DefaultArg]=NotGiven profile_url:Union[Optional[str] DefaultArg]=NotGiven roles:Union[Optional[List[Union[TypeAndValue Dict[str Any]]]] DefaultArg]=NotGiven schemas:Union[Optional[List[str]] DefaultArg]=NotGiven timezone:Union[Optional[str] DefaultArg]=NotGiven title:Union[Optional[str] DefaultArg]=NotGiven user_name:Union[Optional[str] DefaultArg]=NotGiven **kwargs <arrow><none><block_start>self.active=active<line_sep>self.addresses=(# type: ignore [a<if>isinstance(a UserAddress)<else>UserAddress(**a)<for>a addresses]<if>_is_iterable(addresses)<else>addresses)<line_sep>self.display_name=display_name<line_sep>self.emails=(# type: ignore [a<if>isinstance(a TypeAndValue)<else>TypeAndValue(**a)<for>a emails]<if>_is_iterable(emails)<else>emails)<line_sep>self.external_id=external_id<line_sep>self.groups=(# type: ignore [a<if>isinstance(a UserGroup)<else>UserGroup(**a)<for>a groups]<if>_is_iterable(groups)<else>groups)<line_sep>self.id=id<line_sep>self.meta=(# type: ignore UserMeta(**meta)<if>meta<is><not><none><and>isinstance(meta dict)<else>meta)<line_sep>self.name=(# type: ignore UserName(**name)<if>name<is><not><none><and>isinstance(name dict)<else>name)<line_sep>self.nick_name=nick_name<line_sep>self.phone_numbers=(# type: ignore [a<if>isinstance(a TypeAndValue)<else>TypeAndValue(**a)<for>a phone_numbers]<if>_is_iterable(phone_numbers)<else>phone_numbers)<line_sep>self.photos=(# type: ignore [a<if>isinstance(a UserPhoto)<else>UserPhoto(**a)<for>a photos]<if>_is_iterable(photos)<else>photos)<line_sep>self.profile_url=profile_url<line_sep>self.roles=(# type: ignore [a<if>isinstance(a TypeAndValue)<else>TypeAndValue(**a)<for>a roles]<if>_is_iterable(roles)<else>roles)<line_sep>self.schemas=schemas<line_sep>self.timezone=timezone<line_sep>self.title=title<line_sep>self.user_name=user_name<line_sep>self.unknown_fields=kwargs<block_end><def_stmt>to_dict self<block_start><return>_to_dict_without_not_given(self)<block_end><def_stmt>__repr__ self<block_start><return>f"<slack_sdk.scim.{self.__class__.__name__}: {self.to_dict()}>"<block_end><block_end>
<import_from_stmt>rest_framework permissions<import_from_stmt>csp settings<import_from_stmt>rest_framework.exceptions PermissionDenied<class_stmt>IsWorker(permissions.BasePermission)<block_start><def_stmt>has_permission self request view<block_start><return>request.user.profile.is_worker<block_end><block_end><class_stmt>IsRequester(permissions.BasePermission)<block_start><def_stmt>has_object_permission self request view object<block_start><return>request.user.profile.is_requester<block_end><block_end><class_stmt>CanCreateAccount(permissions.BasePermission)<block_start><def_stmt>has_permission self request view<block_start><if_stmt>view.action<eq>'create'<and><not>(request.user.is_staff<or>settings.REGISTRATION_ALLOWED)<block_start><raise>PermissionDenied(detail='We are currently in closed beta. '<concat>'If you\'d like an account, email <EMAIL> '<concat>'with a short description of what you\'d like to use Daemo for.')<block_end><return><true><block_end><block_end>
# Copyright 2018 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This library provides a random password generator."""<import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<import_stmt>random<import_from_stmt>absl flags<import_from_stmt>absl logging<line_sep>_MIN=8<line_sep>_MAX=100<line_sep>FLAGS=flags.FLAGS<line_sep>flags.DEFINE_integer('password_length' _MAX 'The length of the password to be generated for the Grab n Go Role Account.'<concat>'\nNOTE: The length must be between 8 and 100 and must be compliant with '<concat>'the G Suite Admin password settings.\nThe Security Settings can be found '<concat>'in the Google Admin console: admin.google.com')<line_sep>flags.register_validator('password_length' <lambda>length:length<ge>_MIN<and>length<le>_MAX 'Password length must be between {} and {} characters.'.format(_MIN _MAX) )<def_stmt>generate length<block_start>"""Generates a new password of a given length. Args: length: int, the length of the password to generate. Returns: A random password of type string with the given length. Raises: ValueError: if the length provided is invalid. """<if_stmt>length<l>_MIN<or>length<g>_MAX<block_start><raise>ValueError('password length must be between {!r} and {!r} characters length '<concat>'provided was: {!r}'.format(_MIN _MAX length))<block_end>logging.debug('Generating a password with length: %r.' length)<line_sep>chars=('abcdefghijklmnopqrstuvwxyz'<concat>'ABCDEFGHIJKLMNOPQRSTUVWXYZ'<concat>'0123456789'<concat>'!$%^&*()-_=+@:;~#,.<>? ')<line_sep>password=''<line_sep>rand=random.SystemRandom()<while_stmt>len(password)<l>length<block_start>password<augadd>rand.choice(chars)<block_end><return>password<block_end>