content stringlengths 0 1.55M |
|---|
# coding: utf-8
<import_stmt>chainer<class_stmt>Range(chainer.Chain)<block_start><def_stmt>forward self x<block_start><return>range(x)<block_end><block_end><class_stmt>RangeStop(chainer.Chain)<block_start><def_stmt>forward self x y<block_start><return>range(x y)<block_end><block_end><class_stmt>RangeStep(chainer.Chain)<block_start><def_stmt>forward self x y z<block_start><return>range(x y z)<block_end><block_end><class_stmt>RangeListComp(chainer.Chain)<block_start><def_stmt>forward self xs ps p<block_start>y1=[xs[x x+2]<for>x range(p)]<line_sep>y2=[xs[ps[x] ps[x]+3]<for>x range(p)]<line_sep><return>y1 y2<block_end><block_end># ======================================
<import_from_stmt>chainer_compiler ch2o<import_stmt>numpy<as>np<if_stmt>__name__<eq>'__main__'<block_start>ch2o.generate_testcase(Range [5])<line_sep>ch2o.generate_testcase(RangeStop() [5 8] subname='stop')<line_sep>ch2o.generate_testcase(RangeStep() [5 19 2] subname='step')<line_sep>wn=5<line_sep>v=np.random.rand(10 20).astype(np.float32)<line_sep>w=np.random.randint(0 5 size=wn)<line_sep>p=np.int64(wn)<line_sep>ch2o.generate_testcase(RangeListComp [v w p] subname='list_comp')<block_end> |
# -*- coding: utf-8 -*-
#
# Copyright 2019 SoloKeys Developers
#
# Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
# http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
# http://opensource.org/licenses/MIT>, at your option. This file may not be
# copied, modified, or distributed except according to those terms.
<import_from_stmt>numbers Number<import_from_stmt>threading Event Timer<def_stmt>to_websafe data<block_start>data=data.replace("+" "-")<line_sep>data=data.replace("/" "_")<line_sep>data=data.replace("=" "")<line_sep><return>data<block_end><def_stmt>from_websafe data<block_start>data=data.replace("-" "+")<line_sep>data=data.replace("_" "/")<line_sep><return>data+"=="[:(3<times>len(data))%4]<block_end><class_stmt>Timeout(object)<block_start>"""Utility class for adding a timeout to an event.
:param time_or_event: A number, in seconds, or a threading.Event object.
:ivar event: The Event associated with the Timeout.
:ivar timer: The Timer associated with the Timeout, if any.
"""<def_stmt>__init__ self time_or_event<block_start><if_stmt>isinstance(time_or_event Number)<block_start>self.event=Event()<line_sep>self.timer=Timer(time_or_event self.event.set)<block_end><else_stmt><block_start>self.event=time_or_event<line_sep>self.timer=<none><block_end><block_end><def_stmt>__enter__ self<block_start><if_stmt>self.timer<block_start>self.timer.start()<block_end><return>self.event<block_end><def_stmt>__exit__ self exc_type exc_val exc_tb<block_start><if_stmt>self.timer<block_start>self.timer.cancel()<line_sep>self.timer.join()<block_end><block_end><block_end> |
<import_stmt>sys<import_from_stmt>unittest.mock MagicMock<import_stmt>pytest<import_from_stmt>lightning_transformers.core.nlp HFBackboneConfig HFTransformerDataConfig<import_from_stmt>lightning_transformers.task.nlp.text_classification TextClassificationDataModule TextClassificationTransformer <line_sep>@pytest.mark.skipif(sys.platform<eq>"win32" reason="Currently Windows is not supported")<def_stmt>test_smoke_train_e2e script_runner<block_start>script_runner.hf_train(task="text_classification" dataset="emotion" model="prajjwal1/bert-tiny")<block_end>@pytest.mark.skipif(sys.platform<eq>"win32" reason="Currently Windows is not supported")<def_stmt>test_smoke_train_default_dataset script_runner<block_start>script_runner.hf_train(task="text_classification" model="prajjwal1/bert-tiny" cmd_args=['dataset.cfg.dataset_name="emotion"'])<block_end><def_stmt>test_smoke_predict_e2e script_runner<block_start>y=script_runner.hf_predict(['+x="Lightning rocks!"'] task="text_classification" model="prajjwal1/bert-tiny")<assert_stmt>len(y)<eq>1<assert_stmt>isinstance(y[0]["score"] float)<block_end><def_stmt>test_predict_from_ckpt_path script_runner tmpdir<block_start>script_runner.hf_train(task="text_classification" dataset="emotion" model="prajjwal1/bert-tiny" cmd_args=[f"trainer.default_root_dir={tmpdir}"] fast_dev_run=0 )<line_sep>ckpt_path=tmpdir/"checkpoints"/"epoch=0-step=0.ckpt"<assert_stmt>ckpt_path.exists()<line_sep>y=script_runner.hf_predict(['+x="Lightning rocks!"' f'+checkpoint_path="{ckpt_path}"'] task="text_classification" model="prajjwal1/bert-tiny" )<assert_stmt>len(y)<eq>1<assert_stmt>isinstance(y[0]["score"] float)<block_end><def_stmt>test_model_has_correct_cfg <block_start>model=TextClassificationTransformer(HFBackboneConfig(pretrained_model_name_or_path="bert-base-cased"))<assert_stmt>model.hparams.downstream_model_type<eq>"transformers.AutoModelForSequenceClassification"<block_end><def_stmt>test_datamodule_has_correct_cfg <block_start>tokenizer=MagicMock()<line_sep>dm=TextClassificationDataModule(tokenizer)<assert_stmt>type(dm.cfg)<is>HFTransformerDataConfig<assert_stmt>dm.tokenizer<is>tokenizer<block_end> |
# Time: O(n)
# Space: O(1)
<class_stmt>Solution(object)# @param {TreeNode} root
# @param {TreeNode} p
# @param {TreeNode} q
# @return {TreeNode}
<block_start><def_stmt>lowestCommonAncestor self root p q<block_start>s,b=sorted([p.val q.val])<while_stmt><not>s<le>root.val<le>b# Keep searching since root is outside of [s, b].
<block_start>root=root.left<if>s<le>root.val<else>root.right<block_end># s <= root.val <= b.
<return>root<block_end><block_end> |
<import_from_stmt>functools partial<import_from_stmt>itertools groupby<import_from_stmt>couchdbkit ResourceNotFound<import_from_stmt>corehq.apps.domain SHARED_DOMAIN UNKNOWN_DOMAIN<import_from_stmt>corehq.blobs CODES<import_from_stmt>corehq.blobs.mixin BlobHelper BlobMetaRef<import_from_stmt>corehq.blobs.models BlobMigrationState BlobMeta<import_from_stmt>corehq.form_processor.backends.sql.dbaccessors ReindexAccessor<import_from_stmt>corehq.util.doc_processor.sql SqlDocumentProvider<import_stmt>corehq.apps.accounting.models<as>acct<import_stmt>corehq.apps.app_manager.models<as>apps<import_stmt>corehq.apps.hqmedia.models<as>hqmedia<import_from_stmt>corehq.apps.builds.models CommCareBuild<import_from_stmt>corehq.apps.case_importer.tracking.models CaseUploadFileMeta CaseUploadRecord<import_from_stmt>corehq.apps.domain.models Domain<import_from_stmt>corehq.apps.export models<as>exports<import_from_stmt>corehq.apps.ota.models DemoUserRestore<import_from_stmt>corehq.apps.users.models CommCareUser<import_stmt>casexml.apps.case.models<as>cases<import_stmt>couchforms.models<as>xform<class_stmt>MultiDbMigrator(object)<block_start><def_stmt>__init__ self slug couch_types sql_reindexers<block_start>self.slug=slug<line_sep>self.couch_types=couch_types<line_sep>self.sql_reindexers=sql_reindexers<block_end><def_stmt>iter_migrators self<block_start><import_from_stmt>. migrate<as>mod<line_sep>NoStateMigrator,SqlMigrator,BlobMetaMigrator=make_migrators(mod)<line_sep>couch_migrator=partial(BlobMetaMigrator blob_helper=couch_blob_helper)<def_stmt>db_key doc_type<block_start><if_stmt>isinstance(doc_type tuple)<block_start>doc_type=doc_type[1]<block_end><return>doc_type.get_db().dbname<block_end><for_stmt>key,types groupby(sorted(self.couch_types key=db_key) key=db_key)<block_start>slug="%s-%s"%(self.slug key)<line_sep><yield>NoStateMigrator(slug list(types) couch_migrator)<block_end><for_stmt>rex self.sql_reindexers<block_start>slug="%s-%s"%(self.slug rex.model_class.__name__)<line_sep><yield>SqlMigrator(slug rex() BlobMetaMigrator)<block_end><block_end><def_stmt>migrate self filename *args **kw<block_start><def_stmt>filen n<block_start><return><none><if>filename<is><none><else>"{}.{}".format(filename n)<block_end>migrated=0<line_sep>skipped=0<for_stmt>n,item enumerate(self.iter_migrators())<block_start>one_migrated,one_skipped=item.migrate(filen(n) *args **kw)<line_sep>migrated<augadd>one_migrated<line_sep>skipped<augadd>one_skipped<line_sep>print("\n")<block_end><if_stmt><not>skipped<block_start>BlobMigrationState.objects.get_or_create(slug=self.slug)[0].save()<block_end><return>migrated skipped<block_end><block_end><def_stmt>make_migrators mod# defer class definitions to work around circular import
<block_start><class_stmt>BlobMetaMigrator(mod.BaseDocMigrator)<block_start>"""Migrate blob metadata to BlobMeta model"""<def_stmt>__init__ self *args **kw<block_start>super(BlobMetaMigrator self).__init__(*args **kw)<line_sep>self.total_blobs=0<block_end><def_stmt>migrate self doc<block_start><if_stmt><not>doc.get("external_blobs")<block_start><return><true><block_end>type_code=self.get_type_code(doc)<line_sep>obj=self.blob_helper(doc self.couchdb type_code)<line_sep>domain=obj.domain<if_stmt>domain<is><none><block_start>self.error(obj {"error":"unknown-domain" "doc_type":obj.doc_type "doc_id":obj._id })<line_sep>domain=UNKNOWN_DOMAIN<block_end><if_stmt>getattr(obj "_attachments" <none>)<block_start>self.error(obj {"error":"ignored-couch-attachments" "doc_type":obj.doc_type "doc_id":obj._id "domain":obj.domain "attachments":obj._attachments })<block_end><with_stmt>BlobMeta.get_cursor_for_partition_value(doc['_id'])<as>cursor<block_start><for_stmt>name,meta obj.external_blobs.items()<block_start><if_stmt>meta.blobmeta_id<is><not><none># blobmeta already saved
<block_start><continue><block_end>cursor.execute("""
INSERT INTO blobs_blobmeta (
domain,
type_code,
parent_id,
name,
key,
content_type,
content_length,
created_on
) VALUES (%s, %s, %s, %s, %s, %s, %s, CLOCK_TIMESTAMP())
ON CONFLICT (key) DO NOTHING
""" params=[domain type_code doc["_id"] name meta.key meta.content_type meta.content_length<or>0 ])<line_sep>self.total_blobs<augadd>1<block_end><block_end><return><true><block_end><def_stmt>error self obj doc<block_start>print("Error: %s %r"%(doc["error"] obj))<line_sep>super(BlobMetaMigrator self).write_backup(doc)<block_end><block_end><class_stmt>NoStateMigrator(mod.Migrator)<block_start><def_stmt>write_migration_completed_state self<block_start><pass><block_end><block_end><class_stmt>SqlMigrator(NoStateMigrator)<block_start><def_stmt>__init__ self slug reindexer doc_migrator_class<block_start>types=[reindexer.model_class]<def_stmt>doc_migrator *args **kw<block_start>kw["blob_helper"]=reindexer.blob_helper<line_sep>kw["get_type_code"]=reindexer.get_type_code<line_sep><return>doc_migrator_class(*args **kw)<block_end>super(SqlMigrator self).__init__(slug types doc_migrator)<line_sep>self.reindexer=reindexer<block_end><def_stmt>get_document_provider self<block_start><return>SqlDocumentProvider(self.iteration_key self.reindexer)<block_end><block_end><return>NoStateMigrator SqlMigrator BlobMetaMigrator<block_end><class_stmt>SqlBlobHelper(object)<block_start>"""Adapt a SQL model object to look like a BlobHelper
This is currently built on the assumtion that the SQL model only
references a single blob, and the blob name is not used.
"""<def_stmt>__init__ self obj key domain reindexer<block_start>self.obj=obj<line_sep>self.domain=domain<line_sep>self.blobs={"":BlobMetaRef(key=key **reindexer.blob_kwargs(obj))}<line_sep>self.external_blobs=self.blobs<block_end><def_stmt>__repr__ self<block_start><return>"<%s %s domain=%s id=%s>"%(type(self).__name__ self.doc_type self.domain self._id )<block_end>@property<def_stmt>_id self# NOTE unlike couch documents, this is different from `doc["_id"]`,
# the value used to set `BlobMeta.parent_id`. This value should
# only be used to identify the record in in case of error.
<block_start><return>self.obj.id<block_end>@property<def_stmt>doc_type self<block_start><return>type(self.obj).__name__<block_end><block_end><def_stmt>sql_blob_helper key_attr<block_start><def_stmt>blob_helper self doc *ignored<block_start>"""This has the same signature as BlobHelper
:returns: Object having parts of BlobHelper interface needed
for blob migrations (currently only used by BlobMetaMigrator).
"""<line_sep>obj=doc["_obj_not_json"]<line_sep>domain=self.get_domain(obj)<line_sep><return>SqlBlobHelper(obj getattr(obj key_attr) domain self)<block_end><return>blob_helper<block_end><class_stmt>PkReindexAccessor(ReindexAccessor)<block_start>@property<def_stmt>id_field self<block_start><return>'id'<block_end><def_stmt>get_doc self *args **kw# only used for retries; BlobMetaMigrator doesn't retry
<block_start><raise>NotImplementedError<block_end><def_stmt>doc_to_json self obj id<block_start><return>{"_id":str(id) "_obj_not_json":obj "external_blobs":<true>}<block_end><block_end><class_stmt>CaseUploadFileMetaReindexAccessor(PkReindexAccessor)<block_start>model_class=CaseUploadFileMeta<line_sep>blob_helper=sql_blob_helper("identifier")<def_stmt>doc_to_json self obj<block_start><return>PkReindexAccessor.doc_to_json(self obj self.get_domain(obj))<block_end>@staticmethod<def_stmt>get_type_code doc<block_start><return>CODES.data_import<block_end><def_stmt>get_domain self obj<block_start><try_stmt><block_start><return>CaseUploadRecord.objects.get(upload_file_meta_id=obj.id).domain<block_end><except_stmt>CaseUploadRecord.DoesNotExist<block_start><return><none><block_end><block_end><def_stmt>blob_kwargs self obj<block_start><return>{"content_length":obj.length}<block_end><block_end><class_stmt>DemoUserRestoreReindexAccessor(PkReindexAccessor)<block_start>model_class=DemoUserRestore<line_sep>blob_helper=sql_blob_helper("restore_blob_id")<def_stmt>doc_to_json self obj<block_start><return>PkReindexAccessor.doc_to_json(self obj obj.demo_user_id<or>"DemoUserRestore")<block_end>@staticmethod<def_stmt>get_type_code doc<block_start><return>CODES.demo_user_restore<block_end><def_stmt>get_domain self obj<block_start><try_stmt><block_start><return>CommCareUser.get(obj.demo_user_id).domain<block_end><except_stmt>ResourceNotFound<block_start><return><none><block_end><block_end><def_stmt>blob_kwargs self obj<block_start><return>{"content_length":obj.content_length "content_type":"text/xml"}<block_end><block_end><def_stmt>couch_blob_helper doc *args **kw<block_start>obj=BlobHelper(doc *args **kw)<line_sep>get_domain=DOMAIN_MAP.get(obj.doc_type)<if_stmt>get_domain<is><not><none><block_start><assert_stmt><not>hasattr(obj "domain") obj<line_sep>obj.domain=get_domain(doc)<block_end><elif_stmt><not>hasattr(obj "domain")<block_start>obj.domain=<none># will trigger "unknown-domain" error
<block_end><return>obj<block_end><def_stmt>get_shared_domain doc<block_start><return>SHARED_DOMAIN<block_end><def_stmt>get_invoice_domain doc<block_start><if_stmt>doc.get("is_wire")<block_start><try_stmt><block_start><return>acct.WireInvoice.objects.get(id=int(doc["invoice_id"])).domain<block_end><except_stmt>acct.WireInvoice.DoesNotExist<block_start><return><none># trigger "unknown-domain" error
<block_end><block_end># customer invoice has no domain
<return>UNKNOWN_DOMAIN<block_end>DOMAIN_MAP={"InvoicePdf":get_invoice_domain "CommCareBuild":get_shared_domain "CommCareAudio":get_shared_domain "CommCareImage":get_shared_domain "CommCareVideo":get_shared_domain "CommCareMultimedia":get_shared_domain }<line_sep>migrate_metadata=<lambda>:MultiDbMigrator("migrate_metadata" couch_types=[apps.Application apps.LinkedApplication apps.RemoteApp ("Application-Deleted" apps.Application) ("RemoteApp-Deleted" apps.RemoteApp) apps.SavedAppBuild CommCareBuild Domain acct.InvoicePdf hqmedia.CommCareAudio hqmedia.CommCareImage hqmedia.CommCareVideo hqmedia.CommCareMultimedia cases.CommCareCase ('CommCareCase-deleted' cases.CommCareCase) ('CommCareCase-Deleted' cases.CommCareCase) ('CommCareCase-Deleted-Deleted' cases.CommCareCase) exports.CaseExportInstance exports.FormExportInstance exports.SMSExportInstance ] sql_reindexers=[CaseUploadFileMetaReindexAccessor DemoUserRestoreReindexAccessor ] )<line_sep> |
# Copyright (c) ZenML GmbH 2021. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Orchestrator for simple AWS VM backend"""<import_stmt>os<import_stmt>time<import_from_stmt>typing Text Dict Any<import_from_stmt>zenml.backends.orchestrator.aws utils<import_from_stmt>zenml.backends.orchestrator OrchestratorBaseBackend<import_from_stmt>zenml.repo Repository<import_from_stmt>zenml.standards standard_keys<as>keys<import_from_stmt>zenml.utils path_utils<import_from_stmt>zenml.constants ZENML_BASE_IMAGE_NAME<import_from_stmt>zenml.logger get_logger<line_sep>logger=get_logger(__name__)<line_sep>EXTRACTED_TAR_DIR_NAME='zenml_working'<line_sep>STAGING_AREA='staging'<line_sep>TAR_PATH_ARG='tar_path'<class_stmt>OrchestratorAWSBackend(OrchestratorBaseBackend)<block_start>"""
Orchestrates pipeline on a AWS EC2 instance
"""<def_stmt>__init__ self iam_role:Text instance_type:Text='t2.micro' instance_image:Text='ami-02e9f4e447e4cda79' zenml_image:Text=<none> region:Text=<none> key_name:Text=<none> security_group:Text=<none> min_count:int=1 max_count:int=1 **kwargs<block_start>"""
Base class for the orchestrator backend on AWS
:param iam_role: the name of the role created in AWS IAM
:param instance_type: the type of the EC2 instance, defaults to
t2.micro
:param instance_image: the image for the EC2 instance, defaults to the
public image: Deep Learning AMI (Amazon Linux 2) Version 39.0
:param zenml_image: refers to the image with ZenML
:param region: the name of the region that AWS is working on
:param key_name: the name of the key to be used whilst creating the
instance on EC2
:param security_group: the name of a selected security group
:param min_count: the minimum number of instances, defaults to 1
:param max_count: the maximum number of instances, defaults to 1
"""<line_sep>self.session=utils.setup_session()<line_sep>self.region=utils.setup_region(region)<line_sep>self.ec2_client=self.session.client('ec2')<line_sep>self.ec2_resource=self.session.resource('ec2')<line_sep>self.instance_type=instance_type<line_sep>self.instance_image=instance_image<line_sep>self.zenml_image=zenml_image<line_sep>self.key_name=key_name<line_sep>self.min_count=min_count<line_sep>self.max_count=max_count<if_stmt>security_group<is><not><none><block_start>self.security_group=[security_group]<block_end><else_stmt><block_start>self.security_group=security_group<block_end>self.iam_role={'Name':iam_role}<if_stmt>zenml_image<is><none><block_start>self.zenml_image=ZENML_BASE_IMAGE_NAME<block_end><else_stmt><block_start>self.zenml_image=zenml_image<block_end>super(OrchestratorBaseBackend self).__init__(instance_type=self.instance_type instance_image=self.instance_image zenml_image=self.zenml_image region=self.region key_name=self.key_name min_count=self.min_count max_count=self.max_count security_group=self.security_group iam_role=self.iam_role **kwargs )<block_end>@staticmethod<def_stmt>make_unique_name name<block_start><return>f'{name}-{time.asctime()}'<block_end><def_stmt>launch_instance self config<block_start>startup=utils.get_startup_script(config self.region self.zenml_image)<line_sep>args={'ImageId':self.instance_image 'InstanceType':self.instance_type 'IamInstanceProfile':self.iam_role 'MaxCount':self.max_count 'MinCount':self.min_count 'UserData':startup}<if_stmt>self.security_group<block_start>args['SecurityGroups']=self.security_group<block_end><if_stmt>self.key_name<block_start>args['KeyName']=self.key_name<block_end><return>self.ec2_resource.create_instances(**args)<block_end><def_stmt>run self config:[Dict Any]# Extract the paths to create the tar
<block_start>logger.info('Orchestrating pipeline on AWS..')<line_sep>repo:Repository=Repository.get_instance()<line_sep>repo_path=repo.path<line_sep>config_dir=repo.zenml_config.config_dir<line_sep>tar_file_name=f'{EXTRACTED_TAR_DIR_NAME}_{str(int(time.time()))}.tar.gz'<line_sep>path_to_tar=os.path.join(config_dir tar_file_name)<line_sep># Create tarfile but exclude .zenml folder if exists
path_utils.create_tarfile(repo_path path_to_tar)<line_sep>logger.info(f'Created tar of current repository at: {path_to_tar}')<line_sep># Upload tar to artifact store
store_path=config[keys.GlobalKeys.ARTIFACT_STORE]<line_sep>store_staging_area=os.path.join(store_path STAGING_AREA)<line_sep>store_path_to_tar=os.path.join(store_staging_area tar_file_name)<line_sep>path_utils.copy(path_to_tar store_path_to_tar)<line_sep>logger.info(f'Copied tar to artifact store at: {store_path_to_tar}')<line_sep># Remove tar
path_utils.rm_dir(path_to_tar)<line_sep>logger.info(f'Removed tar at: {path_to_tar}')<line_sep># Append path of tar in config orchestrator utils
config[keys.GlobalKeys.BACKEND][keys.BackendKeys.ARGS][TAR_PATH_ARG]=store_path_to_tar<line_sep># Launch the instance
self.launch_instance(config)<block_end><block_end> |
"""This module allows adding a semantic hub layout to NDEx CX networkx. This
is useful when a network is centered around a single hub node. The
layout generated here allocates different classes of nodes into segments
around the hub and then gives them random coordinates within that segment."""<import_stmt>json<import_stmt>math<import_stmt>random<import_stmt>networkx<import_from_stmt>collections defaultdict<def_stmt>get_aspect cx aspect_name<block_start>"""Return an aspect given the name of the aspect"""<if_stmt>isinstance(cx dict)<block_start><return>cx.get(aspect_name)<block_end><for_stmt>entry cx<block_start><if_stmt>list(entry.keys())[0]<eq>aspect_name<block_start><return>entry[aspect_name]<block_end><block_end><block_end><def_stmt>edge_type_to_class edge_type<block_start>"""Return the edge class for layout purposes based on the edge type"""<line_sep>edge_type=edge_type.lower()<if_stmt>'amount'<in>edge_type<block_start><return>'amount'<block_end><if_stmt>edge_type<in>('activation' 'inhibition')<block_start><return>'activity'<block_end><if_stmt>edge_type<eq>'complex'<block_start><return>'complex'<block_end><else_stmt><block_start><return>'modification'<block_end><block_end><def_stmt>classify_nodes graph hub:int<block_start>"""Classify each node based on its type and relationship to the hub."""<line_sep>node_stats=defaultdict(<lambda>:defaultdict(list))<for_stmt>u,v,data graph.edges(data=<true>)# This means the node is downstream of the hub
<block_start><if_stmt>hub<eq>u<block_start>h,o=u v<if_stmt>data['i']<ne>'complex'<block_start>node_stats[o]['up'].append(-1)<block_end><else_stmt><block_start>node_stats[o]['up'].append(0)<block_end><block_end># This means the node is upstream of the hub
<elif_stmt>hub<eq>v<block_start>h,o=v u<if_stmt>data['i']<ne>'complex'<block_start>node_stats[o]['up'].append(1)<block_end><else_stmt><block_start>node_stats[o]['up'].append(0)<block_end><block_end><else_stmt><block_start><continue><block_end>node_stats[o]['interaction'].append(edge_type_to_class(data['i']))<block_end>node_classes={}<for_stmt>node_id,stats node_stats.items()<block_start>up=max(set(stats['up']) key=stats['up'].count)<line_sep># Special case: if up is not 0 then we should exclude complexes
# from the edge_type states so that we don't end up with
# (-1, complex, ...) or (1, complex, ...) as the node class
interactions=[i<for>i stats['interaction']<if><not>(up<ne>0<and>i<eq>'complex')]<line_sep>edge_type=max(set(interactions) key=interactions.count)<line_sep>node_type=graph.nodes[node_id]['type']<line_sep>node_classes[node_id]=(up edge_type node_type)<block_end><return>node_classes<block_end><def_stmt>get_attributes aspect id<block_start>"""Return the attributes pointing to a given ID in a given aspect."""<line_sep>attributes={}<for_stmt>entry aspect<block_start><if_stmt>entry['po']<eq>id<block_start>attributes[entry['n']]=entry['v']<block_end><block_end><return>attributes<block_end><def_stmt>cx_to_networkx cx<block_start>"""Return a MultiDiGraph representation of a CX network."""<line_sep>graph=networkx.MultiDiGraph()<for_stmt>node_entry get_aspect(cx 'nodes')<block_start>id=node_entry['@id']<line_sep>attrs=get_attributes(get_aspect(cx 'nodeAttributes') id)<line_sep>attrs['n']=node_entry['n']<line_sep>graph.add_node(id **attrs)<block_end><for_stmt>edge_entry get_aspect(cx 'edges')<block_start>id=edge_entry['@id']<line_sep>attrs=get_attributes(get_aspect(cx 'edgeAttributes') id)<line_sep>attrs['i']=edge_entry['i']<line_sep>graph.add_edge(edge_entry['s'] edge_entry['t'] key=id **attrs)<block_end><return>graph<block_end><def_stmt>get_quadrant_from_class node_class<block_start>"""Return the ID of the segment of the plane corresponding to a class."""<line_sep>up,edge_type,_=node_class<if_stmt>up<eq>0<block_start><return>0<if>random.random()<l>0.5<else>7<block_end>mappings={(-1 'modification'):1 (-1 'amount'):2 (-1 'activity'):3 (1 'activity'):4 (1 'amount'):5 (1 'modification'):6}<line_sep><return>mappings[(up edge_type)]<block_end><def_stmt>get_coordinates node_class<block_start>"""Generate coordinates for a node in a given class."""<line_sep>quadrant_size=(2<times>math.pi/8.0)<line_sep>quadrant=get_quadrant_from_class(node_class)<line_sep>begin_angle=quadrant_size<times>quadrant<line_sep>r=200+800<times>random.random()<line_sep>alpha=begin_angle+random.random()<times>quadrant_size<line_sep>x=r<times>math.cos(alpha)<line_sep>y=r<times>math.sin(alpha)<line_sep><return>x y<block_end><def_stmt>get_layout_aspect hub node_classes<block_start>"""Get the full layout aspect with coordinates for each node."""<line_sep>aspect=[{'node':hub 'x':0.0 'y':0.0}]<for_stmt>node,node_class node_classes.items()<block_start><if_stmt>node<eq>hub<block_start><continue><block_end>x,y=get_coordinates(node_class)<line_sep>aspect.append({'node':node 'x':x 'y':y})<block_end><return>aspect<block_end><def_stmt>get_node_by_name graph name<block_start>"""Return a node ID given its name."""<for_stmt>id,attrs graph.nodes(data=<true>)<block_start><if_stmt>attrs['n']<eq>name<block_start><return>id<block_end><block_end><block_end><def_stmt>add_semantic_hub_layout cx hub:str<block_start>"""Attach a layout aspect to a CX network given a hub node."""<line_sep>graph=cx_to_networkx(cx)<line_sep>hub_node=get_node_by_name(graph hub)<line_sep>node_classes=classify_nodes(graph hub_node)<line_sep>layout_aspect=get_layout_aspect(hub_node node_classes)<line_sep>cx['cartesianLayout']=layout_aspect<block_end><if_stmt>__name__<eq>'__main__'<block_start><with_stmt>open('CDK13.cx' 'r')<as>fh<block_start>cx=json.load(fh)<block_end>add_semantic_hub_layout(cx 'CDK13')<block_end> |
# -*- coding: utf-8 -*-
<import_stmt>ctypes<import_stmt>multiprocessing<import_stmt>os<import_stmt>subprocess<import_stmt>sys<import_from_stmt>setuptools Extension find_packages setup<import_from_stmt>setuptools.command.build_ext build_ext<class_stmt>CMakeExtension(Extension)<block_start><def_stmt>__init__ self name sourcedir=""<block_start>Extension.__init__(self name sources=[])<line_sep>self.sourcedir=os.path.abspath(sourcedir)<block_end><block_end><class_stmt>CMakeBuild(build_ext)<block_start><def_stmt>build_extension self ext<block_start>extdir=os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name)))<line_sep># required for auto-detection of auxiliary "native" libs
<if_stmt><not>extdir.endswith(os.path.sep)<block_start>extdir<augadd>os.path.sep<block_end>debug=int(os.environ.get("DEBUG" 0))<if>self.debug<is><none><else>self.debug<line_sep>cfg="Debug"<if>debug<else>"Release"<line_sep># CMake lets you override the generator - we need to check this.
# Can be set with Conda-Build, for example.
cmake_generator=os.environ.get("CMAKE_GENERATOR" "")<line_sep># Set Python_EXECUTABLE instead if you use PYBIND11_FINDPYTHON
# EXAMPLE_VERSION_INFO shows you how to pass a value into the C++ code
# from Python.
cmake_args=[f"-DBUILD_PYTHON_BINDINGS=ON" f"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={extdir}" f"-DPYTHON_EXECUTABLE={sys.executable}" f"-DCMAKE_BUILD_TYPE={cfg}" # not used on MSVC, but no harm
]<line_sep>build_args=[]<line_sep># Adding CMake arguments set as environment variable
# (needed e.g. to build for ARM OSx on conda-forge)
<if_stmt>"CMAKE_ARGS"<in>os.environ<block_start>cmake_args<augadd>[item<for>item os.environ["CMAKE_ARGS"].split(" ")<if>item]<block_end># Single config generators are handled "normally"
single_config=any(x<in>cmake_generator<for>x {"NMake" "Ninja"})<line_sep># Multi-config generators have a different way to specify configs
<if_stmt><not>single_config<block_start>cmake_args<augadd>[f"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{cfg.upper()}={extdir}"]<line_sep>build_args<augadd>["--config" cfg]<block_end># Set CMAKE_BUILD_PARALLEL_LEVEL to control the parallel build level across all generators.
<if_stmt>"CMAKE_BUILD_PARALLEL_LEVEL"<not><in>os.environ# passing --global-option="build_ext" --global-option="-j8" to pip seems not to work,
# and launches 2 time the entire build process. Therefore if nothing has specified the
# parallel jobs so far, we are going to hack it here. Not the best design, but pip just
# doesn't seem to care about this flag, CMake 3.12+ only.
<block_start>self.parallel=multiprocessing.cpu_count()<if><not>self.parallel<else>self.parallel<line_sep>build_args<augadd>[f"-j{self.parallel}"]<block_end><if_stmt><not>os.path.exists(self.build_temp)<block_start>os.makedirs(self.build_temp)<block_end>subprocess.check_call(["cmake" ext.sourcedir]+cmake_args cwd=self.build_temp)<line_sep>subprocess.check_call(["cmake" "--build" "."]+build_args cwd=self.build_temp)<block_end><block_end>setup(packages=find_packages("src") package_dir={"":"src"} ext_modules=[CMakeExtension("vdbfusion.pybind.vdbfusion_pybind")] cmdclass={"build_ext":CMakeBuild} )<line_sep> |
<import_stmt>cv2 time<line_sep>#TODO: fix ipcam
#import urllib2, base64
<import_stmt>numpy<as>np<class_stmt>ipCamera(object)<block_start><def_stmt>__init__ self url user=<none> password=<none><block_start>self.url=url<line_sep>auth_encoded=base64.encodestring('%s:%s'%(user password))[:-1]<line_sep>self.req=urllib2.Request(self.url)<line_sep>self.req.add_header('Authorization' 'Basic %s'%auth_encoded)<block_end><def_stmt>get_frame self<block_start>response=urllib2.urlopen(self.req)<line_sep>img_array=np.asarray(bytearray(response.read()) dtype=np.uint8)<line_sep>frame=cv2.imdecode(img_array 1)<line_sep><return>frame<block_end><block_end><class_stmt>Camera(object)<block_start><def_stmt>__init__ self camera=0<block_start>self.cam=cv2.VideoCapture(camera)<line_sep>self.valid=<false><try_stmt><block_start>resp=self.cam.read()<line_sep>self.shape=resp[1].shape<line_sep>self.valid=<true><block_end><except_stmt><block_start>self.shape=<none><block_end><block_end><def_stmt>get_frame self<block_start><if_stmt>self.valid<block_start>_,frame=self.cam.read()<block_end><else_stmt><block_start>frame=np.ones((480 640 3) dtype=np.uint8)<line_sep>col=(0 256 256)<line_sep>cv2.putText(frame "(Error: Camera not accessible)" (65 220) cv2.FONT_HERSHEY_PLAIN 2 col)<block_end><return>frame<block_end><def_stmt>release self<block_start>self.cam.release()<block_end><block_end> |
# These constants can be set by the external UI-layer process, don't change them manually
is_ui_process=<false><line_sep>execution_id=''<line_sep>task_id=''<line_sep>executable_name='insomniac'<line_sep>do_location_permission_dialog_checks=<true># no need in these checks if location permission is denied beforehand
<def_stmt>callback profile_name<block_start><pass><block_end>hardban_detected_callback=callback<line_sep>softban_detected_callback=callback<def_stmt>is_insomniac <block_start><return>execution_id<eq>''<block_end> |
defaults='''
const vec4 light = vec4(4.0, 3.0, 10.0, 0.0);
const vec4 eye = vec4(4.0, 3.0, 2.0, 0.0);
const mat4 mvp = mat4(
-0.8147971034049988, -0.7172931432723999, -0.7429299354553223, -0.7427813410758972,
1.0863960981369019, -0.5379698276519775, -0.5571974515914917, -0.5570859909057617,
0.0, 2.2415409088134766, -0.37146496772766113, -0.3713906705379486,
0.0, 0.0, 5.186222076416016, 5.385164737701416
);
'''<line_sep> |
# Copyright (c) 2017 OpenAI (http://openai.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
<import_stmt>numpy<as>np<import_stmt>scipy.signal<def_stmt>discount x gamma<block_start>"""
computes discounted sums along 0th dimension of x.
inputs
------
x: ndarray
gamma: float
outputs
-------
y: ndarray with same shape as x, satisfying
y[t] = x[t] + gamma*x[t+1] + gamma^2*x[t+2] + ... + gamma^k x[t+k],
where k = len(x) - t - 1
"""<assert_stmt>x.ndim<ge>1<line_sep><return>scipy.signal.lfilter([1] [1 -gamma] x[::-1] axis=0)[::-1]<block_end><def_stmt>explained_variance ypred y<block_start>"""
Computes fraction of variance that ypred explains about y.
Returns 1 - Var[y-ypred] / Var[y]
interpretation:
ev=0 => might as well have predicted zero
ev=1 => perfect prediction
ev<0 => worse than just predicting zero
"""<assert_stmt>y.ndim<eq>1<and>ypred.ndim<eq>1<line_sep>vary=np.var(y)<line_sep><return>np.nan<if>vary<eq>0<else>1-np.var(y-ypred)/vary<block_end><def_stmt>explained_variance_2d ypred y<block_start><assert_stmt>y.ndim<eq>2<and>ypred.ndim<eq>2<line_sep>vary=np.var(y axis=0)<line_sep>out=1-np.var(y-ypred)/vary<line_sep>out[vary<l>1e-10]=0<line_sep><return>out<block_end><def_stmt>ncc ypred y<block_start><return>np.corrcoef(ypred y)[1 0]<block_end><def_stmt>flatten_arrays arrs<block_start><return>np.concatenate([arr.flat<for>arr arrs])<block_end><def_stmt>unflatten_vector vec shapes<block_start>i=0<line_sep>arrs=[]<for_stmt>shape shapes<block_start>size=np.prod(shape)<line_sep>arr=vec[i:i+size].reshape(shape)<line_sep>arrs.append(arr)<line_sep>i<augadd>size<block_end><return>arrs<block_end><def_stmt>discount_with_boundaries X New gamma<block_start>"""
X: 2d array of floats, time x features
New: 2d array of bools, indicating when a new episode has started
"""<line_sep>Y=np.zeros_like(X)<line_sep>T=X.shape[0]<line_sep>Y[T-1]=X[T-1]<for_stmt>t range(T-2 -1 -1)<block_start>Y[t]=X[t]+gamma<times>Y[t+1]<times>(1-New[t+1])<block_end><return>Y<block_end><def_stmt>test_discount_with_boundaries <block_start>gamma=0.9<line_sep>x=np.array([1.0 2.0 3.0 4.0] 'float32')<line_sep>starts=[1.0 0.0 0.0 1.0]<line_sep>y=discount_with_boundaries(x starts gamma)<assert_stmt>np.allclose(y [1+gamma<times>2+gamma<power>2<times>3 2+gamma<times>3 3 4])<block_end> |
# -*- coding: utf-8 -*-
<import_stmt>json<import_stmt>re<import_stmt>scrapy<import_from_stmt>locations.items GeojsonPointItem<import_from_stmt>locations.hours OpeningHours<class_stmt>BayshoreHealthcareSpider(scrapy.Spider)<block_start>name="bayshore_healthcare"<line_sep>item_attributes={'brand':"Bayshore Healthcare"}<line_sep>allowed_domains=['bayshore.ca']<def_stmt>start_requests self<block_start>url='https://www.bayshore.ca/wp-admin/admin-ajax.php?action=location_finder&language=en'<line_sep>headers={'origin':'https://www.bayshore.ca' 'Referer':'https://www.bayshore.ca/locations/'}<line_sep>formdata={'search_type':'location' }<line_sep><yield>scrapy.http.FormRequest(url self.parse method='POST' headers=headers formdata=formdata)<block_end><def_stmt>parse self response<block_start>stores=json.loads(response.body)<for_stmt>store stores["result"]["entries"]<block_start>full_addr=store["address"]<line_sep>addr=re.search(r'^(.*?)<' full_addr).groups()[0]<line_sep>city=re.search(r'>(.*?),' full_addr).groups()[0]<line_sep>state=re.search(r',\s([A-Z]{2})\s' full_addr).groups()[0]<line_sep>postal=re.search(r',\s[A-Z]{2}\s(.*)$' full_addr).groups()[0]<line_sep>coords=store["latlng"].split(",")<line_sep>lat=coords[0]<line_sep>lng=coords[1]<line_sep>properties={'ref':store["id"] 'name':store["name"] 'addr_full':addr 'city':city 'state':state 'postcode':postal 'country':"CA" 'lat':lat 'lon':lng 'phone':store["local_telephone"] 'website':"https://www.bayshore.ca"+store["url"]}<line_sep><yield>GeojsonPointItem(**properties)<block_end><block_end><block_end> |
"""HF-SEF dataset."""<import_from_stmt>.hf_sef data_path<line_sep> |
<import_from_stmt>interact *<def_stmt>eva_model <block_start>parser=ArgumentParser()<line_sep>parser.add_argument('--gpt2' action='store_true' help="use gpt2")<line_sep>parser.add_argument("--model_checkpoint" type=str default="./models/" help="Path, url or short name of the model")<line_sep>parser.add_argument("--max_history" type=int default=2 help="Number of previous utterances to keep in history")<line_sep>parser.add_argument("--device" type=str default="cuda"<if>torch.cuda.is_available()<else>"cpu" help="Device (cuda or cpu)")<line_sep>parser.add_argument("--no_sample" action='store_true' help="Set to use greedy decoding instead of sampling")<line_sep>parser.add_argument("--max_length" type=int default=30 help="Maximum length of the output utterances")<line_sep>parser.add_argument("--min_length" type=int default=1 help="Minimum length of the output utterances")<line_sep>parser.add_argument("--seed" type=int default=42 help="Seed")<line_sep>parser.add_argument("--temperature" type=int default=0.7 help="Sampling softmax temperature")<line_sep>parser.add_argument("--top_k" type=int default=0 help="Filter top-k tokens before sampling (<=0: no filtering)")<line_sep>parser.add_argument("--top_p" type=float default=0.9 help="Nucleus filtering (top-p) before sampling (<=0.0: no filtering)")<line_sep>args=parser.parse_args()<line_sep>logging.basicConfig(level=logging.INFO)<line_sep>logger=logging.getLogger(__file__)<line_sep>logger.info(pformat(args))<if_stmt>args.model_checkpoint<eq>""<block_start>logging.error("Checkpoint needed!")<line_sep><return><block_end>random.seed(args.seed)<line_sep>torch.random.manual_seed(args.seed)<line_sep>torch.cuda.manual_seed(args.seed)<line_sep>logger.info("Get pretrained model and tokenizer")<line_sep>tokenizer_class=BertTokenizer<line_sep>model_class=OpenAIGPTLMHeadModel<if><not>args.gpt2<else>GPT2LMHeadModel<line_sep>tokenizer=tokenizer_class.from_pretrained(args.model_checkpoint do_lower_case=<true>)<line_sep>model=model_class.from_pretrained(args.model_checkpoint)<line_sep>model.to(args.device)<line_sep>model.eval()<line_sep><return>model tokenizer args<block_end>history=[]<line_sep>model,tokenizer,args=eva_model()<def_stmt>chat_response raw_text<block_start><global>history<def_stmt>tokenize obj<block_start><if_stmt>isinstance(obj str)<block_start><return>tokenizer.convert_tokens_to_ids(tokenizer.tokenize(obj))<block_end><if_stmt>isinstance(obj dict)<block_start><return>dict((n tokenize(o))<for>n,o obj.items())<block_end><return>list(tokenize(o)<for>o obj)<block_end>raw_text=" ".join(list(raw_text.replace(" " "")))<line_sep>history.append(tokenize(raw_text))<with_stmt>torch.no_grad()<block_start>out_ids=sample_sequence(history tokenizer model args)<block_end>history.append(out_ids)<line_sep>history=history[-(2<times>args.max_history+1):]<line_sep>out_text=tokenizer.decode(out_ids skip_special_tokens=<true>)<line_sep>#print(out_text)
<return>out_text<block_end>print(0)<line_sep> |
<import_stmt>pytest<import_stmt>tensorflow<as>tf<import_from_stmt>deepctr.estimator DeepFMEstimator<import_from_stmt>deepctr.models DeepFM<import_from_stmt>..utils check_model get_test_data SAMPLE_SIZE get_test_data_estimator check_estimator Estimator_TEST_TF1<line_sep>@pytest.mark.parametrize('hidden_size,sparse_feature_num' [((2 ) 1) #
((3 ) 2)]# (True, (32,), 3), (False, (32,), 1)
)<def_stmt>test_DeepFM hidden_size sparse_feature_num<block_start>model_name="DeepFM"<line_sep>sample_size=SAMPLE_SIZE<line_sep>x,y,feature_columns=get_test_data(sample_size sparse_feature_num=sparse_feature_num dense_feature_num=sparse_feature_num)<line_sep>model=DeepFM(feature_columns feature_columns dnn_hidden_units=hidden_size dnn_dropout=0.5)<line_sep>check_model(model model_name x y)<block_end>@pytest.mark.parametrize('hidden_size,sparse_feature_num' [((3 ) 2)]# (True, (32,), 3), (False, (32,), 1)
)<def_stmt>test_DeepFMEstimator hidden_size sparse_feature_num<block_start><if_stmt><not>Estimator_TEST_TF1<and>tf.__version__<l>"2.2.0"<block_start><return><block_end>sample_size=SAMPLE_SIZE<line_sep>linear_feature_columns,dnn_feature_columns,input_fn=get_test_data_estimator(sample_size sparse_feature_num=sparse_feature_num dense_feature_num=sparse_feature_num classification=<false>)<line_sep>model=DeepFMEstimator(linear_feature_columns dnn_feature_columns dnn_hidden_units=hidden_size dnn_dropout=0.5 task="regression")<line_sep>check_estimator(model input_fn)<block_end><if_stmt>__name__<eq>"__main__"<block_start><pass><block_end> |
# -*- coding: utf-8 -*-
<import_from_stmt>datetime timedelta<import_from_stmt>.mixins.interval WordableIntervalMixin <import_from_stmt>.constants SECONDS_PER_DAY SECONDS_PER_HOUR SECONDS_PER_MINUTE <def_stmt>_divide_and_round a b<block_start>"""divide a by b and round result to the nearest integer
When the ratio is exactly half-way between two integers,
the even integer is returned.
"""<line_sep># Based on the reference implementation for divmod_near
# in Objects/longobject.c.
q,r=divmod(a b)<line_sep># round up if either r / b > 0.5, or r / b == 0.5 and q is odd.
# The expression r / b > 0.5 is equivalent to 2 * r > b if b is
# positive, 2 * r < b if b negative.
r<augmul>2<line_sep>greater_than_half=r<g>b<if>b<g>0<else>r<l>b<if_stmt>greater_than_half<or>r<eq>b<and>q%2<eq>1<block_start>q<augadd>1<block_end><return>q<block_end><class_stmt>BaseInterval(timedelta)<block_start>"""
Base class for all inherited interval classes.
"""<line_sep>_y=<none><line_sep>_m=<none><line_sep>_w=<none><line_sep>_d=<none><line_sep>_h=<none><line_sep>_i=<none><line_sep>_s=<none><line_sep>_invert=<none><def_stmt>__new__ cls days=0 seconds=0 microseconds=0 milliseconds=0 minutes=0 hours=0 weeks=0<block_start>self=timedelta.__new__(cls days seconds microseconds milliseconds minutes hours weeks)<line_sep># Intuitive normalization
total=self.total_seconds()<line_sep>m=1<if_stmt>total<l>0<block_start>m=-1<block_end>self._microseconds=round(total%m<times>1e6)<line_sep>self._seconds=abs(int(total))%SECONDS_PER_DAY<times>m<line_sep>self._days=abs(int(total))<floordiv>SECONDS_PER_DAY<times>m<line_sep><return>self<block_end><def_stmt>total_minutes self<block_start><return>self.total_seconds()/SECONDS_PER_MINUTE<block_end><def_stmt>total_hours self<block_start><return>self.total_seconds()/SECONDS_PER_HOUR<block_end><def_stmt>total_days self<block_start><return>self.total_seconds()/SECONDS_PER_DAY<block_end><def_stmt>total_weeks self<block_start><return>self.total_days()/7<block_end>@property<def_stmt>weeks self<block_start><return>abs(self.days)<floordiv>7<times>self._sign(self._days)<block_end>@property<def_stmt>days self<block_start><return>self._days<block_end>@property<def_stmt>remaining_days self<block_start><return>abs(self._days)%7<times>self._sign(self._days)<block_end>@property<def_stmt>hours self<block_start><if_stmt>self._h<is><none><block_start>seconds=self._seconds<line_sep>self._h=0<if_stmt>abs(seconds)<ge>3600<block_start>self._h=(abs(seconds)<floordiv>3600%24)<times>self._sign(seconds)<block_end><block_end><return>self._h<block_end>@property<def_stmt>minutes self<block_start><if_stmt>self._i<is><none><block_start>seconds=self._seconds<line_sep>self._i=0<if_stmt>abs(seconds)<ge>60<block_start>self._i=(abs(seconds)<floordiv>60%60)<times>self._sign(seconds)<block_end><block_end><return>self._i<block_end>@property<def_stmt>seconds self<block_start><return>self._seconds<block_end>@property<def_stmt>remaining_seconds self<block_start><if_stmt>self._s<is><none><block_start>self._s=self._seconds<line_sep>self._s=abs(self._s)%60<times>self._sign(self._s)<block_end><return>self._s<block_end>@property<def_stmt>microseconds self<block_start><return>self._microseconds<block_end>@property<def_stmt>invert self<block_start><if_stmt>self._invert<is><none><block_start>self._invert=self.total_seconds()<l>0<block_end><return>self._invert<block_end><def_stmt>in_weeks self<block_start><return>int(self.total_weeks())<block_end><def_stmt>in_days self<block_start><return>int(self.total_days())<block_end><def_stmt>in_hours self<block_start><return>int(self.total_hours())<block_end><def_stmt>in_minutes self<block_start><return>int(self.total_minutes())<block_end><def_stmt>in_seconds self<block_start><return>int(self.total_seconds())<block_end><def_stmt>_sign self value<block_start><if_stmt>value<l>0<block_start><return>-1<block_end><return>1<block_end><def_stmt>as_timedelta self<block_start>"""
Return the interval as a native timedelta.
:rtype: timedelta
"""<line_sep><return>timedelta(seconds=self.total_seconds())<block_end><block_end><class_stmt>Interval(WordableIntervalMixin BaseInterval)<block_start>"""
Replacement for the standard timedelta class.
Provides several improvements over the base class.
"""<line_sep>@classmethod<def_stmt>instance cls delta<block_start>"""
Creates a Interval from a timedelta
:type delta: timedelta
:rtype: Interval
"""<line_sep><return>cls(days=delta.days seconds=delta.seconds microseconds=delta.microseconds)<block_end><def_stmt>__add__ self other<block_start><if_stmt>isinstance(other timedelta)<block_start><return>self.__class__(seconds=self.total_seconds()+other.total_seconds())<block_end><return>NotImplemented<block_end>__radd__=__add__<def_stmt>__sub__ self other<block_start><if_stmt>isinstance(other timedelta)<block_start><return>self.__class__(seconds=self.total_seconds()-other.total_seconds())<block_end><return>NotImplemented<block_end><def_stmt>__neg__ self<block_start><return>self.__class__(seconds=-self.total_seconds())<block_end><def_stmt>_to_microseconds self<block_start><return>((self._days<times>(24<times>3600)+self._seconds)<times>1000000+self._microseconds)<block_end><def_stmt>__mul__ self other<block_start><if_stmt>isinstance(other int)<block_start><return>self.__class__(seconds=self.total_seconds()<times>other)<block_end><if_stmt>isinstance(other float)<block_start>usec=self._to_microseconds()<line_sep>a,b=other.as_integer_ratio()<line_sep><return>self.__class__(0 0 _divide_and_round(usec<times>a b))<block_end><return>NotImplemented<block_end>__rmul__=__mul__<def_stmt>__floordiv__ self other<block_start><if_stmt><not>isinstance(other (int timedelta))<block_start><return>NotImplemented<block_end>usec=self._to_microseconds()<if_stmt>isinstance(other timedelta)<block_start><return>usec<floordiv>other._to_microseconds()<block_end><if_stmt>isinstance(other int)<block_start><return>self.__class__(0 0 usec<floordiv>other)<block_end><block_end><def_stmt>__truediv__ self other<block_start><if_stmt><not>isinstance(other (int float timedelta))<block_start><return>NotImplemented<block_end>usec=self._to_microseconds()<if_stmt>isinstance(other timedelta)<block_start><return>usec/other._to_microseconds()<block_end><if_stmt>isinstance(other int)<block_start><return>self.__class__(0 0 _divide_and_round(usec other))<block_end><if_stmt>isinstance(other float)<block_start>a,b=other.as_integer_ratio()<line_sep><return>self.__class__(0 0 _divide_and_round(b<times>usec a))<block_end><block_end>__div__=__floordiv__<def_stmt>__mod__ self other<block_start><if_stmt>isinstance(other timedelta)<block_start>r=self._to_microseconds()%other._to_microseconds()<line_sep><return>self.__class__(0 0 r)<block_end><return>NotImplemented<block_end><def_stmt>__divmod__ self other<block_start><if_stmt>isinstance(other timedelta)<block_start>q,r=divmod(self._to_microseconds() other._to_microseconds())<line_sep><return>q self.__class__(0 0 r)<block_end><return>NotImplemented<block_end><block_end>Interval.min=Interval(-999999999)<line_sep>Interval.max=Interval(days=999999999 hours=23 minutes=59 seconds=59 microseconds=999999)<line_sep>Interval.resolution=Interval(microseconds=1)<class_stmt>AbsoluteInterval(Interval)<block_start>"""
Interval that expresses a time difference in absolute values.
"""<def_stmt>__new__ cls days=0 seconds=0 microseconds=0 milliseconds=0 minutes=0 hours=0 weeks=0<block_start>self=timedelta.__new__(cls days seconds microseconds milliseconds minutes hours weeks)<line_sep># We need to compute the total_seconds() value
# on a native timedelta object
delta=timedelta(days seconds microseconds milliseconds minutes hours weeks)<line_sep># Intuitive normalization
self._total=delta.total_seconds()<line_sep>total=abs(self._total)<line_sep>self._microseconds=round(total%1<times>1e6)<line_sep>self._seconds=int(total)%SECONDS_PER_DAY<line_sep>self._days=int(total)<floordiv>SECONDS_PER_DAY<line_sep><return>self<block_end><def_stmt>total_seconds self<block_start><return>abs(self._total)<block_end>@property<def_stmt>invert self<block_start><if_stmt>self._invert<is><none><block_start>self._invert=self._total<l>0<block_end><return>self._invert<block_end><block_end> |
<import_stmt>os<import_stmt>pytest<import_stmt>taichi<as>ti<import_from_stmt>taichi approx<def_stmt>run_mpm88_test <block_start>dim=2<line_sep>N=64<line_sep>n_particles=N<times>N<line_sep>n_grid=128<line_sep>dx=1/n_grid<line_sep>inv_dx=1/dx<line_sep>dt=2.0e-4<line_sep>p_vol=(dx<times>0.5)<power>2<line_sep>p_rho=1<line_sep>p_mass=p_vol<times>p_rho<line_sep>E=400<line_sep>x=ti.Vector.field(dim dtype=ti.f32 shape=n_particles)<line_sep>v=ti.Vector.field(dim dtype=ti.f32 shape=n_particles)<line_sep>C=ti.Matrix.field(dim dim dtype=ti.f32 shape=n_particles)<line_sep>J=ti.field(dtype=ti.f32 shape=n_particles)<line_sep>grid_v=ti.Vector.field(dim dtype=ti.f32 shape=(n_grid n_grid))<line_sep>grid_m=ti.field(dtype=ti.f32 shape=(n_grid n_grid))<line_sep>@ti.kernel<def_stmt>substep <block_start><for_stmt>p x<block_start>base=(x[p]<times>inv_dx-0.5).cast(int)<line_sep>fx=x[p]<times>inv_dx-base.cast(float)<line_sep>w=[0.5<times>(1.5-fx)<power>2 0.75-(fx-1)<power>2 0.5<times>(fx-0.5)<power>2]<line_sep>stress=-dt<times>p_vol<times>(J[p]-1)<times>4<times>inv_dx<times>inv_dx<times>E<line_sep>affine=ti.Matrix([[stress 0] [0 stress]])+p_mass<times>C[p]<for_stmt>i ti.static(range(3))<block_start><for_stmt>j ti.static(range(3))<block_start>offset=ti.Vector([i j])<line_sep>dpos=(offset.cast(float)-fx)<times>dx<line_sep>weight=w[i][0]<times>w[j][1]<line_sep>grid_v[base+offset].atomic_add(weight<times>(p_mass<times>v[p]+affine@dpos))<line_sep>grid_m[base+offset].atomic_add(weight<times>p_mass)<block_end><block_end><block_end><for_stmt>i,j grid_m<block_start><if_stmt>grid_m[i j]<g>0<block_start>bound=3<line_sep>inv_m=1/grid_m[i j]<line_sep>grid_v[i j]=inv_m<times>grid_v[i j]<line_sep>grid_v[i j][1]<augsub>dt<times>9.8<if_stmt>i<l>bound<and>grid_v[i j][0]<l>0<block_start>grid_v[i j][0]=0<block_end><if_stmt>i<g>n_grid-bound<and>grid_v[i j][0]<g>0<block_start>grid_v[i j][0]=0<block_end><if_stmt>j<l>bound<and>grid_v[i j][1]<l>0<block_start>grid_v[i j][1]=0<block_end><if_stmt>j<g>n_grid-bound<and>grid_v[i j][1]<g>0<block_start>grid_v[i j][1]=0<block_end><block_end><block_end><for_stmt>p x<block_start>base=(x[p]<times>inv_dx-0.5).cast(int)<line_sep>fx=x[p]<times>inv_dx-base.cast(float)<line_sep>w=[0.5<times>(1.5-fx)<power>2 0.75-(fx-1.0)<power>2 0.5<times>(fx-0.5)<power>2]<line_sep>new_v=ti.Vector.zero(ti.f32 2)<line_sep>new_C=ti.Matrix.zero(ti.f32 2 2)<for_stmt>i ti.static(range(3))<block_start><for_stmt>j ti.static(range(3))<block_start>dpos=ti.Vector([i j]).cast(float)-fx<line_sep>g_v=grid_v[base+ti.Vector([i j])]<line_sep>weight=w[i][0]<times>w[j][1]<line_sep>new_v<augadd>weight<times>g_v<line_sep>new_C<augadd>4<times>weight<times>g_v.outer_product(dpos)<times>inv_dx<block_end><block_end>v[p]=new_v<line_sep>x[p]<augadd>dt<times>v[p]<line_sep>J[p]<augmul>1+dt<times>new_C.trace()<line_sep>C[p]=new_C<block_end><block_end># gui = ti._lib.core.GUI("MPM88", ti.core_veci(512, 512))
# canvas = gui.get_canvas()
<for_stmt>i range(n_particles)<block_start>x[i]=[i%N/N<times>0.4+0.2 i/N/N<times>0.4+0.05]<line_sep>v[i]=[0 -3]<line_sep>J[i]=1<block_end><for_stmt>frame range(10)<block_start><for_stmt>s range(50)<block_start>grid_v.fill([0 0])<line_sep>grid_m.fill(0)<line_sep>substep()<block_end><block_end>pos=x.to_numpy()<line_sep>pos[: 1]<augmul>2<line_sep>regression=[0.31722742 0.15826741 0.10224003 0.07810827 ]<for_stmt>i range(4)<block_start><assert_stmt>(pos<power>(i+1)).mean()<eq>approx(regression[i] rel=1e-2)<block_end><block_end>@ti.test()<def_stmt>test_mpm88 <block_start>run_mpm88_test()<block_end><def_stmt>_is_appveyor # AppVeyor adds `APPVEYOR=True` ('true' on Ubuntu)
# https://www.appveyor.com/docs/environment-variables/
<block_start><return>os.getenv('APPVEYOR' '').lower()<eq>'true'<block_end>#TODO: Remove exclude of ti.metal
@pytest.mark.skipif(_is_appveyor() reason='Stuck on Appveyor.')@ti.test(require=ti.extension.async_mode exclude=[ti.metal] async_mode=<true>)<def_stmt>test_mpm88_async # It seems that all async tests on Appveyor run super slow. For example,
# on Appveyor, 10+ tests have passed during the execution of
# test_fuse_dense_x2y2z. Maybe thread synchronizations are expensive?
<block_start>run_mpm88_test()<block_end>@ti.test(arch=[ti.cpu ti.cuda ti.opengl])<def_stmt>test_mpm88_numpy_and_ndarray <block_start><import_stmt>numpy<as>np<line_sep>dim=2<line_sep>N=64<line_sep>n_particles=N<times>N<line_sep>n_grid=128<line_sep>dx=1/n_grid<line_sep>inv_dx=1/dx<line_sep>dt=2.0e-4<line_sep>p_vol=(dx<times>0.5)<power>2<line_sep>p_rho=1<line_sep>p_mass=p_vol<times>p_rho<line_sep>E=400<line_sep>@ti.kernel<def_stmt>substep x:ti.any_arr(element_dim=1) v:ti.any_arr(element_dim=1) C:ti.any_arr(element_dim=2) J:ti.any_arr() grid_v:ti.any_arr(element_dim=1) grid_m:ti.any_arr()<block_start><for_stmt>p x<block_start>base=(x[p]<times>inv_dx-0.5).cast(int)<line_sep>fx=x[p]<times>inv_dx-base.cast(float)<line_sep>w=[0.5<times>(1.5-fx)<power>2 0.75-(fx-1)<power>2 0.5<times>(fx-0.5)<power>2]<line_sep>stress=-dt<times>p_vol<times>(J[p]-1)<times>4<times>inv_dx<times>inv_dx<times>E<line_sep>affine=ti.Matrix([[stress 0] [0 stress]])+p_mass<times>C[p]<for_stmt>i ti.static(range(3))<block_start><for_stmt>j ti.static(range(3))<block_start>offset=ti.Vector([i j])<line_sep>dpos=(offset.cast(float)-fx)<times>dx<line_sep>weight=w[i][0]<times>w[j][1]<line_sep>grid_v[base+offset].atomic_add(weight<times>(p_mass<times>v[p]+affine@dpos))<line_sep>grid_m[base+offset].atomic_add(weight<times>p_mass)<block_end><block_end><block_end><for_stmt>i,j grid_m<block_start><if_stmt>grid_m[i j]<g>0<block_start>bound=3<line_sep>inv_m=1/grid_m[i j]<line_sep>grid_v[i j]=inv_m<times>grid_v[i j]<line_sep>grid_v[i j][1]<augsub>dt<times>9.8<if_stmt>i<l>bound<and>grid_v[i j][0]<l>0<block_start>grid_v[i j][0]=0<block_end><if_stmt>i<g>n_grid-bound<and>grid_v[i j][0]<g>0<block_start>grid_v[i j][0]=0<block_end><if_stmt>j<l>bound<and>grid_v[i j][1]<l>0<block_start>grid_v[i j][1]=0<block_end><if_stmt>j<g>n_grid-bound<and>grid_v[i j][1]<g>0<block_start>grid_v[i j][1]=0<block_end><block_end><block_end><for_stmt>p x<block_start>base=(x[p]<times>inv_dx-0.5).cast(int)<line_sep>fx=x[p]<times>inv_dx-base.cast(float)<line_sep>w=[0.5<times>(1.5-fx)<power>2 0.75-(fx-1.0)<power>2 0.5<times>(fx-0.5)<power>2]<line_sep>new_v=ti.Vector.zero(ti.f32 2)<line_sep>new_C=ti.Matrix.zero(ti.f32 2 2)<for_stmt>i ti.static(range(3))<block_start><for_stmt>j ti.static(range(3))<block_start>dpos=ti.Vector([i j]).cast(float)-fx<line_sep>g_v=grid_v[base+ti.Vector([i j])]<line_sep>weight=w[i][0]<times>w[j][1]<line_sep>new_v<augadd>weight<times>g_v<line_sep>new_C<augadd>4<times>weight<times>g_v.outer_product(dpos)<times>inv_dx<block_end><block_end>v[p]=new_v<line_sep>x[p]<augadd>dt<times>v[p]<line_sep>J[p]<augmul>1+dt<times>new_C.trace()<line_sep>C[p]=new_C<block_end><block_end><def_stmt>run_test x v C J grid_v grid_m<block_start><for_stmt>i range(n_particles)<block_start>x[i]=[i%N/N<times>0.4+0.2 i/N/N<times>0.4+0.05]<line_sep>v[i]=[0 -3]<line_sep>J[i]=1<block_end><for_stmt>frame range(10)<block_start><for_stmt>s range(50)<block_start>grid_v.fill(0)<line_sep>grid_m.fill(0)<line_sep>substep(x v C J grid_v grid_m)<block_end><block_end>pos=x<if>isinstance(x np.ndarray)<else>x.to_numpy()<line_sep>pos[: 1]<augmul>2<line_sep>regression=[0.31722742 0.15826741 0.10224003 0.07810827 ]<for_stmt>i range(4)<block_start><assert_stmt>(pos<power>(i+1)).mean()<eq>approx(regression[i] rel=1e-2)<block_end><block_end><def_stmt>test_numpy <block_start>x=np.zeros((n_particles dim) dtype=np.float32)<line_sep>v=np.zeros((n_particles dim) dtype=np.float32)<line_sep>C=np.zeros((n_particles dim dim) dtype=np.float32)<line_sep>J=np.zeros(n_particles dtype=np.float32)<line_sep>grid_v=np.zeros((n_grid n_grid dim) dtype=np.float32)<line_sep>grid_m=np.zeros((n_grid n_grid) dtype=np.float32)<line_sep>run_test(x v C J grid_v grid_m)<block_end><def_stmt>test_ndarray <block_start>x=ti.Vector.ndarray(dim ti.f32 n_particles)<line_sep>v=ti.Vector.ndarray(dim ti.f32 n_particles)<line_sep>C=ti.Matrix.ndarray(dim dim ti.f32 n_particles)<line_sep>J=ti.ndarray(ti.f32 n_particles)<line_sep>grid_v=ti.Vector.ndarray(dim ti.f32 (n_grid n_grid))<line_sep>grid_m=ti.ndarray(ti.f32 (n_grid n_grid))<line_sep>run_test(x v C J grid_v grid_m)<block_end>test_numpy()<line_sep>test_ndarray()<block_end> |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the debug events writer Python class."""<import_stmt>glob<import_stmt>json<as>json_lib<import_stmt>os<import_stmt>re<import_stmt>threading<import_stmt>time<import_from_stmt>absl.testing parameterized<import_from_stmt>tensorflow.core.protobuf debug_event_pb2<import_from_stmt>tensorflow.python.debug.lib debug_events_reader<import_from_stmt>tensorflow.python.debug.lib debug_events_writer<import_from_stmt>tensorflow.python.debug.lib dumping_callback_test_lib<import_from_stmt>tensorflow.python.framework ops<import_from_stmt>tensorflow.python.framework test_util<import_from_stmt>tensorflow.python.framework versions<import_from_stmt>tensorflow.python.platform googletest<class_stmt>DebugEventsWriterTest(dumping_callback_test_lib.DumpingCallbackTestBase parameterized.TestCase)<block_start><def_stmt>testMultiThreadedConstructorCallWorks self<block_start><def_stmt>init_writer <block_start>debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id)<block_end>num_threads=4<line_sep>threads=[]<for_stmt>_ range(num_threads)<block_start>thread=threading.Thread(target=init_writer)<line_sep>thread.start()<line_sep>threads.append(thread)<block_end><for_stmt>thread threads<block_start>thread.join()<block_end># Verify that there is only one debug event file of each type.
metadata_paths=glob.glob(os.path.join(self.dump_root "*.metadata"))<line_sep>self.assertLen(metadata_paths 1)<line_sep>source_files_paths=glob.glob(os.path.join(self.dump_root "*.source_files"))<line_sep>self.assertLen(source_files_paths 1)<line_sep>stack_frames_paths=glob.glob(os.path.join(self.dump_root "*.stack_frames"))<line_sep>self.assertLen(stack_frames_paths 1)<line_sep>graphs_paths=glob.glob(os.path.join(self.dump_root "*.graphs"))<line_sep>self.assertLen(graphs_paths 1)<line_sep>self._readAndCheckMetadataFile()<block_end><def_stmt>testWriteSourceFilesAndStackFrames self<block_start>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id)<line_sep>num_protos=10<for_stmt>i range(num_protos)<block_start>source_file=debug_event_pb2.SourceFile()<line_sep>source_file.file_path="/home/tf2user/main.py"<line_sep>source_file.host_name="machine.cluster"<line_sep>source_file.lines.append("print(%d)"%i)<line_sep>writer.WriteSourceFile(source_file)<line_sep>stack_frame=debug_event_pb2.StackFrameWithId()<line_sep>stack_frame.id="stack_%d"%i<line_sep>stack_frame.file_line_col.file_index=i<times>10<line_sep>writer.WriteStackFrameWithId(stack_frame)<block_end>writer.FlushNonExecutionFiles()<with_stmt>debug_events_reader.DebugEventsReader(self.dump_root)<as>reader<block_start>actuals=list(item.debug_event.source_file<for>item reader.source_files_iterator())<line_sep>self.assertLen(actuals num_protos)<for_stmt>i range(num_protos)<block_start>self.assertEqual(actuals[i].file_path "/home/tf2user/main.py")<line_sep>self.assertEqual(actuals[i].host_name "machine.cluster")<line_sep>self.assertEqual(actuals[i].lines ["print(%d)"%i])<block_end>actuals=list(item.debug_event.stack_frame_with_id<for>item reader.stack_frames_iterator())<line_sep>self.assertLen(actuals num_protos)<for_stmt>i range(num_protos)<block_start>self.assertEqual(actuals[i].id "stack_%d"%i)<line_sep>self.assertEqual(actuals[i].file_line_col.file_index i<times>10)<block_end><block_end><block_end><def_stmt>testWriteGraphOpCreationAndDebuggedGraphs self<block_start>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id)<line_sep>num_op_creations=10<for_stmt>i range(num_op_creations)<block_start>graph_op_creation=debug_event_pb2.GraphOpCreation()<line_sep>graph_op_creation.op_type="Conv2D"<line_sep>graph_op_creation.op_name="Conv2D_%d"%i<line_sep>writer.WriteGraphOpCreation(graph_op_creation)<block_end>debugged_graph=debug_event_pb2.DebuggedGraph()<line_sep>debugged_graph.graph_id="deadbeaf"<line_sep>debugged_graph.graph_name="MyGraph1"<line_sep>writer.WriteDebuggedGraph(debugged_graph)<line_sep>writer.FlushNonExecutionFiles()<line_sep>reader=debug_events_reader.DebugEventsReader(self.dump_root)<line_sep>actuals=list(item.debug_event<for>item reader.graphs_iterator())<line_sep>self.assertLen(actuals num_op_creations+1)<for_stmt>i range(num_op_creations)<block_start>self.assertEqual(actuals[i].graph_op_creation.op_type "Conv2D")<line_sep>self.assertEqual(actuals[i].graph_op_creation.op_name "Conv2D_%d"%i)<block_end>self.assertEqual(actuals[num_op_creations].debugged_graph.graph_id "deadbeaf")<block_end><def_stmt>testConcurrentWritesToNonExecutionFilesWorks self<block_start>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id)<line_sep>source_file_state={"counter":0 "lock":threading.Lock()}<def_stmt>writer_source_file <block_start>source_file=debug_event_pb2.SourceFile()<with_stmt>source_file_state["lock"]<block_start>source_file.file_path="/home/tf2user/file_%d.py"%source_file_state["counter"]<line_sep>source_file_state["counter"]<augadd>1<block_end>writer.WriteSourceFile(source_file)<line_sep># More-frequent-than-necessary concurrent flushing is not recommended,
# but tolerated.
writer.FlushNonExecutionFiles()<block_end>stack_frame_state={"counter":0 "lock":threading.Lock()}<def_stmt>write_stack_frame <block_start>stack_frame=debug_event_pb2.StackFrameWithId()<with_stmt>stack_frame_state["lock"]<block_start>stack_frame.id="stack_frame_%d"%stack_frame_state["counter"]<line_sep>stack_frame_state["counter"]<augadd>1<block_end>writer.WriteStackFrameWithId(stack_frame)<line_sep># More-frequent-than-necessary concurrent flushing is not recommended,
# but tolerated.
writer.FlushNonExecutionFiles()<block_end>graph_op_state={"counter":0 "lock":threading.Lock()}<def_stmt>write_graph_op_creation <block_start>graph_op_creation=debug_event_pb2.GraphOpCreation()<with_stmt>graph_op_state["lock"]<block_start>graph_op_creation.op_name="Op%d"%graph_op_state["counter"]<line_sep>graph_op_state["counter"]<augadd>1<block_end>writer.WriteGraphOpCreation(graph_op_creation)<line_sep># More-frequent-than-necessary concurrent flushing is not recommended,
# but tolerated.
writer.FlushNonExecutionFiles()<block_end>num_threads=9<line_sep>threads=[]<for_stmt>i range(num_threads)<block_start><if_stmt>i%3<eq>0<block_start>target=writer_source_file<block_end><elif_stmt>i%3<eq>1<block_start>target=write_stack_frame<block_end><else_stmt><block_start>target=write_graph_op_creation<block_end>thread=threading.Thread(target=target)<line_sep>thread.start()<line_sep>threads.append(thread)<block_end><for_stmt>thread threads<block_start>thread.join()<block_end># Verify the content of the .source_files file.
<with_stmt>debug_events_reader.DebugEventsReader(self.dump_root)<as>reader<block_start>source_files_iter=reader.source_files_iterator()<line_sep>actuals=list(item.debug_event.source_file<for>item source_files_iter)<line_sep>file_paths=sorted([actual.file_path<for>actual actuals])<line_sep>self.assertEqual(file_paths ["/home/tf2user/file_0.py" "/home/tf2user/file_1.py" "/home/tf2user/file_2.py"])<block_end># Verify the content of the .stack_frames file.
actuals=list(item.debug_event.stack_frame_with_id<for>item reader.stack_frames_iterator())<line_sep>stack_frame_ids=sorted([actual.id<for>actual actuals])<line_sep>self.assertEqual(stack_frame_ids ["stack_frame_0" "stack_frame_1" "stack_frame_2"])<line_sep># Verify the content of the .graphs file.
actuals=list(item.debug_event.graph_op_creation<for>item reader.graphs_iterator())<line_sep>graph_op_names=sorted([actual.op_name<for>actual actuals])<line_sep>self.assertEqual(graph_op_names ["Op0" "Op1" "Op2"])<block_end><def_stmt>testWriteAndReadMetadata self<block_start>t0=time.time()<line_sep>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id)<line_sep>writer.Close()<with_stmt>debug_events_reader.DebugDataReader(self.dump_root)<as>reader<block_start>self.assertIsInstance(reader.starting_wall_time() float)<line_sep>self.assertGreaterEqual(reader.starting_wall_time() t0)<line_sep>self.assertEqual(reader.tensorflow_version() versions.__version__)<line_sep>self.assertTrue(reader.tfdbg_run_id())<block_end><block_end><def_stmt>testWriteExecutionEventsWithCircularBuffer self<block_start>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id)<line_sep>num_execution_events=debug_events_writer.DEFAULT_CIRCULAR_BUFFER_SIZE<times>2<for_stmt>i range(num_execution_events)<block_start>execution=debug_event_pb2.Execution()<line_sep>execution.op_type="OpType%d"%i<line_sep>writer.WriteExecution(execution)<block_end><with_stmt>debug_events_reader.DebugDataReader(self.dump_root)<as>reader# Before FlushExecutionFiles() is called. No data should have been written
# to the file.
<block_start>reader.update()<line_sep>self.assertFalse(reader.executions())<line_sep>writer.FlushExecutionFiles()<line_sep>reader.update()<line_sep>executions=reader.executions()<for_stmt>i,execution enumerate(executions)<block_start>self.assertEqual(execution.op_type "OpType%d"%(i+debug_events_writer.DEFAULT_CIRCULAR_BUFFER_SIZE))<block_end><block_end><block_end><def_stmt>testWriteExecutionEventsWithoutCircularBufferBehavior self# A circular buffer size of 0 abolishes the circular buffer behavior.
<block_start>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id 0)<line_sep>num_execution_events=debug_events_writer.DEFAULT_CIRCULAR_BUFFER_SIZE<times>2<for_stmt>i range(num_execution_events)<block_start>execution=debug_event_pb2.Execution()<line_sep>execution.op_type="OpType%d"%i<line_sep>writer.WriteExecution(execution)<block_end>writer.FlushExecutionFiles()<with_stmt>debug_events_reader.DebugDataReader(self.dump_root)<as>reader<block_start>reader.update()<line_sep>executions=reader.executions()<line_sep>self.assertLen(executions num_execution_events)<for_stmt>i,execution enumerate(executions)<block_start>self.assertEqual(execution.op_type "OpType%d"%i)<block_end><block_end><block_end><def_stmt>testWriteGraphExecutionTraceEventsWithCircularBuffer self<block_start>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id)<line_sep>num_execution_events=debug_events_writer.DEFAULT_CIRCULAR_BUFFER_SIZE<times>2<for_stmt>i range(num_execution_events)<block_start>trace=debug_event_pb2.GraphExecutionTrace()<line_sep>trace.op_name="Op%d"%i<line_sep>writer.WriteGraphExecutionTrace(trace)<block_end><with_stmt>debug_events_reader.DebugEventsReader(self.dump_root)<as>reader<block_start>actuals=list(reader.graph_execution_traces_iterators()[0])<line_sep># Before FlushExecutionFiles() is called. No data should have been written
# to the file.
self.assertEmpty(actuals)<line_sep>writer.FlushExecutionFiles()<line_sep>actuals=list(item.debug_event.graph_execution_trace<for>item reader.graph_execution_traces_iterators()[0])<line_sep>self.assertLen(actuals debug_events_writer.DEFAULT_CIRCULAR_BUFFER_SIZE)<for_stmt>i range(debug_events_writer.DEFAULT_CIRCULAR_BUFFER_SIZE)<block_start>self.assertEqual(actuals[i].op_name "Op%d"%(i+debug_events_writer.DEFAULT_CIRCULAR_BUFFER_SIZE))<block_end><block_end><block_end><def_stmt>testWriteGraphExecutionTraceEventsWithoutCircularBufferBehavior self# A circular buffer size of 0 abolishes the circular buffer behavior.
<block_start>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id 0)<line_sep>num_execution_events=debug_events_writer.DEFAULT_CIRCULAR_BUFFER_SIZE<times>2<for_stmt>i range(num_execution_events)<block_start>trace=debug_event_pb2.GraphExecutionTrace()<line_sep>trace.op_name="Op%d"%i<line_sep>writer.WriteGraphExecutionTrace(trace)<block_end>writer.FlushExecutionFiles()<with_stmt>debug_events_reader.DebugEventsReader(self.dump_root)<as>reader<block_start>actuals=list(item.debug_event.graph_execution_trace<for>item reader.graph_execution_traces_iterators()[0])<block_end>self.assertLen(actuals num_execution_events)<for_stmt>i range(num_execution_events)<block_start>self.assertEqual(actuals[i].op_name "Op%d"%i)<block_end><block_end><def_stmt>testConcurrentWritesToExecutionFiles self<block_start>circular_buffer_size=5<line_sep>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id circular_buffer_size)<line_sep>debugged_graph=debug_event_pb2.DebuggedGraph(graph_id="graph1" graph_name="graph1")<line_sep>writer.WriteDebuggedGraph(debugged_graph)<line_sep>execution_state={"counter":0 "lock":threading.Lock()}<def_stmt>write_execution <block_start>execution=debug_event_pb2.Execution()<with_stmt>execution_state["lock"]<block_start>execution.op_type="OpType%d"%execution_state["counter"]<line_sep>execution_state["counter"]<augadd>1<block_end>writer.WriteExecution(execution)<block_end>graph_execution_trace_state={"counter":0 "lock":threading.Lock()}<def_stmt>write_graph_execution_trace <block_start><with_stmt>graph_execution_trace_state["lock"]<block_start>op_name="Op%d"%graph_execution_trace_state["counter"]<line_sep>graph_op_creation=debug_event_pb2.GraphOpCreation(op_type="FooOp" op_name=op_name graph_id="graph1")<line_sep>trace=debug_event_pb2.GraphExecutionTrace(op_name=op_name tfdbg_context_id="graph1")<line_sep>graph_execution_trace_state["counter"]<augadd>1<block_end>writer.WriteGraphOpCreation(graph_op_creation)<line_sep>writer.WriteGraphExecutionTrace(trace)<block_end>threads=[]<for_stmt>i range(circular_buffer_size<times>4)<block_start><if_stmt>i%2<eq>0<block_start>target=write_execution<block_end><else_stmt><block_start>target=write_graph_execution_trace<block_end>thread=threading.Thread(target=target)<line_sep>thread.start()<line_sep>threads.append(thread)<block_end><for_stmt>thread threads<block_start>thread.join()<block_end>writer.FlushNonExecutionFiles()<line_sep>writer.FlushExecutionFiles()<with_stmt>debug_events_reader.DebugDataReader(self.dump_root)<as>reader<block_start>reader.update()<line_sep># Verify the content of the .execution file.
executions=reader.executions()<line_sep>executed_op_types=[execution.op_type<for>execution executions]<line_sep>self.assertLen(executed_op_types circular_buffer_size)<line_sep>self.assertLen(executed_op_types len(set(executed_op_types)))<line_sep># Verify the content of the .graph_execution_traces file.
op_names=[trace.op_name<for>trace reader.graph_execution_traces()]<line_sep>self.assertLen(op_names circular_buffer_size)<line_sep>self.assertLen(op_names len(set(op_names)))<block_end><block_end><def_stmt>testConcurrentSourceFileRandomReads self<block_start>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id)<for_stmt>i range(100)<block_start>source_file=debug_event_pb2.SourceFile(host_name="localhost" file_path="/tmp/file_%d.py"%i)<line_sep>source_file.lines.append("# File %d"%i)<line_sep>writer.WriteSourceFile(source_file)<block_end>writer.FlushNonExecutionFiles()<line_sep>reader=debug_events_reader.DebugDataReader(self.dump_root)<line_sep>reader.update()<line_sep>lines=[<none>]<times>100<def_stmt>read_job_1 # Read in the reverse order to enhance randomness of the read access.
<block_start><for_stmt>i range(49 -1 -1)<block_start>lines[i]=reader.source_lines("localhost" "/tmp/file_%d.py"%i)<block_end><block_end><def_stmt>read_job_2 <block_start><for_stmt>i range(99 49 -1)<block_start>lines[i]=reader.source_lines("localhost" "/tmp/file_%d.py"%i)<block_end><block_end>thread_1=threading.Thread(target=read_job_1)<line_sep>thread_2=threading.Thread(target=read_job_2)<line_sep>thread_1.start()<line_sep>thread_2.start()<line_sep>thread_1.join()<line_sep>thread_2.join()<for_stmt>i range(100)<block_start>self.assertEqual(lines[i] ["# File %d"%i])<block_end><block_end><def_stmt>testConcurrentExecutionUpdateAndRandomRead self<block_start>circular_buffer_size=-1<line_sep>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id circular_buffer_size)<line_sep>writer_state={"counter":0 "done":<false>}<with_stmt>debug_events_reader.DebugDataReader(self.dump_root)<as>reader<block_start><def_stmt>write_and_update_job <block_start><while_stmt><true><block_start><if_stmt>writer_state["done"]<block_start><break><block_end>execution=debug_event_pb2.Execution()<line_sep>execution.op_type="OpType%d"%writer_state["counter"]<line_sep>writer_state["counter"]<augadd>1<line_sep>writer.WriteExecution(execution)<line_sep>writer.FlushExecutionFiles()<line_sep>reader.update()<block_end><block_end># On the sub-thread, keep writing and reading new Execution protos.
write_and_update_thread=threading.Thread(target=write_and_update_job)<line_sep>write_and_update_thread.start()<line_sep># On the main thread, do concurrent random read.
<while_stmt><true><block_start>exec_digests=reader.executions(digest=<true>)<if_stmt>exec_digests<block_start>exec_0=reader.read_execution(exec_digests[0])<line_sep>self.assertEqual(exec_0.op_type "OpType0")<line_sep>writer_state["done"]=<true><line_sep><break><block_end><else_stmt><block_start>time.sleep(0.1)<line_sep><continue><block_end><block_end>write_and_update_thread.join()<block_end><block_end><def_stmt>testConcurrentExecutionRandomReads self<block_start>circular_buffer_size=-1<line_sep>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id circular_buffer_size)<for_stmt>i range(100)<block_start>execution=debug_event_pb2.Execution()<line_sep>execution.op_type="OpType%d"%i<line_sep>writer.WriteExecution(execution)<block_end>writer.FlushNonExecutionFiles()<line_sep>writer.FlushExecutionFiles()<line_sep>reader=debug_events_reader.DebugDataReader(self.dump_root)<line_sep>reader.update()<line_sep>executions=[<none>]<times>100<def_stmt>read_job_1 <block_start>execution_digests=reader.executions(digest=<true>)<line_sep># Read in the reverse order to enhance randomness of the read access.
<for_stmt>i range(49 -1 -1)<block_start>execution=reader.read_execution(execution_digests[i])<line_sep>executions[i]=execution<block_end><block_end><def_stmt>read_job_2 <block_start>execution_digests=reader.executions(digest=<true>)<for_stmt>i range(99 49 -1)<block_start>execution=reader.read_execution(execution_digests[i])<line_sep>executions[i]=execution<block_end><block_end>thread_1=threading.Thread(target=read_job_1)<line_sep>thread_2=threading.Thread(target=read_job_2)<line_sep>thread_1.start()<line_sep>thread_2.start()<line_sep>thread_1.join()<line_sep>thread_2.join()<for_stmt>i range(100)<block_start>self.assertEqual(executions[i].op_type "OpType%d"%i)<block_end><block_end><def_stmt>testConcurrentGraphExecutionTraceUpdateAndRandomRead self<block_start>circular_buffer_size=-1<line_sep>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id circular_buffer_size)<line_sep>debugged_graph=debug_event_pb2.DebuggedGraph(graph_id="graph1" graph_name="graph1")<line_sep>writer.WriteDebuggedGraph(debugged_graph)<line_sep>writer_state={"counter":0 "done":<false>}<with_stmt>debug_events_reader.DebugDataReader(self.dump_root)<as>reader<block_start><def_stmt>write_and_update_job <block_start><while_stmt><true><block_start><if_stmt>writer_state["done"]<block_start><break><block_end>op_name="Op%d"%writer_state["counter"]<line_sep>graph_op_creation=debug_event_pb2.GraphOpCreation(op_type="FooOp" op_name=op_name graph_id="graph1")<line_sep>writer.WriteGraphOpCreation(graph_op_creation)<line_sep>trace=debug_event_pb2.GraphExecutionTrace(op_name=op_name tfdbg_context_id="graph1")<line_sep>writer.WriteGraphExecutionTrace(trace)<line_sep>writer_state["counter"]<augadd>1<line_sep>writer.FlushNonExecutionFiles()<line_sep>writer.FlushExecutionFiles()<line_sep>reader.update()<block_end><block_end># On the sub-thread, keep writing and reading new GraphExecutionTraces.
write_and_update_thread=threading.Thread(target=write_and_update_job)<line_sep>write_and_update_thread.start()<line_sep># On the main thread, do concurrent random read.
<while_stmt><true><block_start>digests=reader.graph_execution_traces(digest=<true>)<if_stmt>digests<block_start>trace_0=reader.read_graph_execution_trace(digests[0])<line_sep>self.assertEqual(trace_0.op_name "Op0")<line_sep>writer_state["done"]=<true><line_sep><break><block_end><else_stmt><block_start>time.sleep(0.1)<line_sep><continue><block_end><block_end>write_and_update_thread.join()<block_end><block_end><def_stmt>testConcurrentGraphExecutionTraceRandomReads self<block_start>circular_buffer_size=-1<line_sep>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id circular_buffer_size)<line_sep>debugged_graph=debug_event_pb2.DebuggedGraph(graph_id="graph1" graph_name="graph1")<line_sep>writer.WriteDebuggedGraph(debugged_graph)<for_stmt>i range(100)<block_start>op_name="Op%d"%i<line_sep>graph_op_creation=debug_event_pb2.GraphOpCreation(op_type="FooOp" op_name=op_name graph_id="graph1")<line_sep>writer.WriteGraphOpCreation(graph_op_creation)<line_sep>trace=debug_event_pb2.GraphExecutionTrace(op_name=op_name tfdbg_context_id="graph1")<line_sep>writer.WriteGraphExecutionTrace(trace)<block_end>writer.FlushNonExecutionFiles()<line_sep>writer.FlushExecutionFiles()<line_sep>reader=debug_events_reader.DebugDataReader(self.dump_root)<line_sep>reader.update()<line_sep>traces=[<none>]<times>100<def_stmt>read_job_1 <block_start>digests=reader.graph_execution_traces(digest=<true>)<for_stmt>i range(49 -1 -1)<block_start>traces[i]=reader.read_graph_execution_trace(digests[i])<block_end><block_end><def_stmt>read_job_2 <block_start>digests=reader.graph_execution_traces(digest=<true>)<for_stmt>i range(99 49 -1)<block_start>traces[i]=reader.read_graph_execution_trace(digests[i])<block_end><block_end>thread_1=threading.Thread(target=read_job_1)<line_sep>thread_2=threading.Thread(target=read_job_2)<line_sep>thread_1.start()<line_sep>thread_2.start()<line_sep>thread_1.join()<line_sep>thread_2.join()<for_stmt>i range(100)<block_start>self.assertEqual(traces[i].op_name "Op%d"%i)<block_end><block_end>@parameterized.named_parameters(("Begin1End3" 1 3 1 3) ("Begin0End3" 0 3 0 3) ("Begin0EndNeg1" 0 -1 0 4) ("BeginNoneEnd3" <none> 3 0 3) ("Begin2EndNone" 2 <none> 2 5) ("BeginNoneEndNone" <none> <none> 0 5) )<def_stmt>testRangeReadingExecutions self begin end expected_begin expected_end<block_start>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id circular_buffer_size=-1)<for_stmt>i range(5)<block_start>execution=debug_event_pb2.Execution(op_type="OpType%d"%i)<line_sep>writer.WriteExecution(execution)<block_end>writer.FlushExecutionFiles()<line_sep>writer.Close()<with_stmt>debug_events_reader.DebugDataReader(self.dump_root)<as>reader<block_start>reader.update()<line_sep>executions=reader.executions(begin=begin end=end)<block_end>self.assertLen(executions expected_end-expected_begin)<line_sep>self.assertEqual(executions[0].op_type "OpType%d"%expected_begin)<line_sep>self.assertEqual(executions[-1].op_type "OpType%d"%(expected_end-1))<block_end>@parameterized.named_parameters(("Begin1End3" 1 3 1 3) ("Begin0End3" 0 3 0 3) ("Begin0EndNeg1" 0 -1 0 4) ("BeginNoneEnd3" <none> 3 0 3) ("Begin2EndNone" 2 <none> 2 5) ("BeginNoneEndNone" <none> <none> 0 5) )<def_stmt>testRangeReadingGraphExecutionTraces self begin end expected_begin expected_end<block_start>writer=debug_events_writer.DebugEventsWriter(self.dump_root self.tfdbg_run_id circular_buffer_size=-1)<line_sep>debugged_graph=debug_event_pb2.DebuggedGraph(graph_id="graph1" graph_name="graph1")<line_sep>writer.WriteDebuggedGraph(debugged_graph)<for_stmt>i range(5)<block_start>op_name="Op_%d"%i<line_sep>graph_op_creation=debug_event_pb2.GraphOpCreation(op_name=op_name graph_id="graph1")<line_sep>writer.WriteGraphOpCreation(graph_op_creation)<line_sep>trace=debug_event_pb2.GraphExecutionTrace(op_name=op_name tfdbg_context_id="graph1")<line_sep>writer.WriteGraphExecutionTrace(trace)<block_end>writer.FlushNonExecutionFiles()<line_sep>writer.FlushExecutionFiles()<line_sep>writer.Close()<with_stmt>debug_events_reader.DebugDataReader(self.dump_root)<as>reader<block_start>reader.update()<line_sep>traces=reader.graph_execution_traces(begin=begin end=end)<block_end>self.assertLen(traces expected_end-expected_begin)<line_sep>self.assertEqual(traces[0].op_name "Op_%d"%expected_begin)<line_sep>self.assertEqual(traces[-1].op_name "Op_%d"%(expected_end-1))<block_end><block_end><class_stmt>MultiSetReaderTest(dumping_callback_test_lib.DumpingCallbackTestBase)<block_start>"""Test for DebugDataReader for multiple file sets under a dump root."""<def_stmt>testReadingTwoFileSetsWithTheSameDumpRootSucceeds self# To simulate a multi-host data dump, we first generate file sets in two
# different directories, with the same tfdbg_run_id, and then combine them.
<block_start>tfdbg_run_id="foo"<for_stmt>i range(2)<block_start>writer=debug_events_writer.DebugEventsWriter(os.path.join(self.dump_root str(i)) tfdbg_run_id circular_buffer_size=-1)<if_stmt>i<eq>0<block_start>debugged_graph=debug_event_pb2.DebuggedGraph(graph_id="graph1" graph_name="graph1")<line_sep>writer.WriteDebuggedGraph(debugged_graph)<line_sep>op_name="Op_0"<line_sep>graph_op_creation=debug_event_pb2.GraphOpCreation(op_type="FooOp" op_name=op_name graph_id="graph1")<line_sep>writer.WriteGraphOpCreation(graph_op_creation)<line_sep>op_name="Op_1"<line_sep>graph_op_creation=debug_event_pb2.GraphOpCreation(op_type="FooOp" op_name=op_name graph_id="graph1")<line_sep>writer.WriteGraphOpCreation(graph_op_creation)<block_end><for_stmt>_ range(10)<block_start>trace=debug_event_pb2.GraphExecutionTrace(op_name="Op_%d"%i tfdbg_context_id="graph1")<line_sep>writer.WriteGraphExecutionTrace(trace)<line_sep>writer.FlushNonExecutionFiles()<line_sep>writer.FlushExecutionFiles()<block_end><block_end># Move all files from the subdirectory /1 to subdirectory /0.
dump_root_0=os.path.join(self.dump_root "0")<line_sep>src_paths=glob.glob(os.path.join(self.dump_root "1" "*"))<for_stmt>src_path src_paths<block_start>dst_path=os.path.join(dump_root_0 # Rename the file set to avoid file name collision.
re.sub(r"(tfdbg_events\.\d+)" r"\g<1>1" os.path.basename(src_path)))<line_sep>os.rename(src_path dst_path)<block_end><with_stmt>debug_events_reader.DebugDataReader(dump_root_0)<as>reader<block_start>reader.update()<line_sep># Verify the content of the .graph_execution_traces file.
trace_digests=reader.graph_execution_traces(digest=<true>)<line_sep>self.assertLen(trace_digests 20)<for_stmt>_ range(10)<block_start>trace=reader.read_graph_execution_trace(trace_digests[i])<line_sep>self.assertEqual(trace.op_name "Op_0")<block_end><for_stmt>_ range(10)<block_start>trace=reader.read_graph_execution_trace(trace_digests[i+10])<line_sep>self.assertEqual(trace.op_name "Op_1")<block_end><block_end><block_end><def_stmt>testReadingTwoFileSetsWithTheDifferentRootsLeadsToError self# To simulate a multi-host data dump, we first generate file sets in two
# different directories, with different tfdbg_run_ids, and then combine
# them.
<block_start><for_stmt>i range(2)<block_start>writer=debug_events_writer.DebugEventsWriter(os.path.join(self.dump_root str(i)) "run_id_%d"%i circular_buffer_size=-1)<line_sep>writer.FlushNonExecutionFiles()<line_sep>writer.FlushExecutionFiles()<block_end># Move all files from the subdirectory /1 to subdirectory /0.
dump_root_0=os.path.join(self.dump_root "0")<line_sep>src_paths=glob.glob(os.path.join(self.dump_root "1" "*"))<for_stmt>src_path src_paths<block_start>dst_path=os.path.join(dump_root_0 # Rename the file set to avoid file name collision.
re.sub(r"(tfdbg_events\.\d+)" r"\g<1>1" os.path.basename(src_path)))<line_sep>os.rename(src_path dst_path)<block_end><with_stmt>self.assertRaisesRegex(ValueError r"Found multiple \(2\) tfdbg2 runs")<block_start>debug_events_reader.DebugDataReader(dump_root_0)<block_end><block_end><block_end><class_stmt>DataObjectsTest(test_util.TensorFlowTestCase parameterized.TestCase)<block_start><def_stmt>jsonRoundTripCheck self obj<block_start>self.assertEqual(json_lib.dumps(json_lib.loads(json_lib.dumps(obj)) sort_keys=<true>) json_lib.dumps(obj sort_keys=<true>))<block_end><def_stmt>testExecutionDigestWithNoOutputToJson self<block_start>execution_digest=debug_events_reader.ExecutionDigest(1234 5678 "FooOp" output_tensor_device_ids=<none>)<line_sep>json=execution_digest.to_json()<line_sep>self.jsonRoundTripCheck(json)<line_sep>self.assertEqual(json["wall_time"] 1234)<line_sep>self.assertEqual(json["op_type"] "FooOp")<line_sep>self.assertEqual(json["output_tensor_device_ids"] <none>)<block_end><def_stmt>testExecutionDigestWithTwoOutputsToJson self<block_start>execution_digest=debug_events_reader.ExecutionDigest(1234 5678 "FooOp" output_tensor_device_ids=[1357 2468])<line_sep>json=execution_digest.to_json()<line_sep>self.jsonRoundTripCheck(json)<line_sep>self.assertEqual(json["wall_time"] 1234)<line_sep>self.assertEqual(json["op_type"] "FooOp")<line_sep>self.assertEqual(json["output_tensor_device_ids"] (1357 2468))<block_end><def_stmt>testExecutionNoGraphNoInputToJson self<block_start>execution_digest=debug_events_reader.ExecutionDigest(1234 5678 "FooOp" output_tensor_device_ids=[1357])<line_sep>execution=debug_events_reader.Execution(execution_digest "localhost" ("a1" "b2") debug_event_pb2.TensorDebugMode.CURT_HEALTH graph_id=<none> input_tensor_ids=<none> output_tensor_ids=[2468] debug_tensor_values=([1 0] ))<line_sep>json=execution.to_json()<line_sep>self.jsonRoundTripCheck(json)<line_sep>self.assertEqual(json["wall_time"] 1234)<line_sep>self.assertEqual(json["op_type"] "FooOp")<line_sep>self.assertEqual(json["output_tensor_device_ids"] (1357 ))<line_sep>self.assertEqual(json["host_name"] "localhost")<line_sep>self.assertEqual(json["stack_frame_ids"] ("a1" "b2"))<line_sep>self.assertEqual(json["tensor_debug_mode"] debug_event_pb2.TensorDebugMode.CURT_HEALTH)<line_sep>self.assertIsNone(json["graph_id"])<line_sep>self.assertIsNone(json["input_tensor_ids"])<line_sep>self.assertEqual(json["output_tensor_ids"] (2468 ))<line_sep>self.assertEqual(json["debug_tensor_values"] ([1 0] ))<block_end><def_stmt>testExecutionNoGraphNoInputButWithOutputToJson self<block_start>execution_digest=debug_events_reader.ExecutionDigest(1234 5678 "FooOp" output_tensor_device_ids=[1357])<line_sep>execution=debug_events_reader.Execution(execution_digest "localhost" ("a1" "b2") debug_event_pb2.TensorDebugMode.FULL_HEALTH graph_id="abcd" input_tensor_ids=[13 37] output_tensor_ids=<none> debug_tensor_values=<none>)<line_sep>json=execution.to_json()<line_sep>self.jsonRoundTripCheck(json)<line_sep>self.assertEqual(json["wall_time"] 1234)<line_sep>self.assertEqual(json["op_type"] "FooOp")<line_sep>self.assertEqual(json["output_tensor_device_ids"] (1357 ))<line_sep>self.assertEqual(json["host_name"] "localhost")<line_sep>self.assertEqual(json["stack_frame_ids"] ("a1" "b2"))<line_sep>self.assertEqual(json["tensor_debug_mode"] debug_event_pb2.TensorDebugMode.FULL_HEALTH)<line_sep>self.assertEqual(json["graph_id"] "abcd")<line_sep>self.assertEqual(json["input_tensor_ids"] (13 37))<line_sep>self.assertIsNone(json["output_tensor_ids"])<line_sep>self.assertIsNone(json["debug_tensor_values"])<block_end>@parameterized.named_parameters(("EmptyList" []) ("None" <none>) )<def_stmt>testExecutionWithNoOutputTensorsReturnsZeroForNumOutputs self output_tensor_ids<block_start>execution=debug_events_reader.Execution(debug_events_reader.ExecutionDigest(1234 5678 "FooOp") "localhost" ("a1" "b2") debug_event_pb2.TensorDebugMode.FULL_HEALTH graph_id="abcd" input_tensor_ids=[13 37] output_tensor_ids=output_tensor_ids debug_tensor_values=<none>)<line_sep>self.assertEqual(execution.num_outputs 0)<block_end><def_stmt>testDebuggedDeviceToJons self<block_start>debugged_device=debug_events_reader.DebuggedDevice("/TPU:3" 4)<line_sep>self.assertEqual(debugged_device.to_json() {"device_name":"/TPU:3" "device_id":4 })<block_end><def_stmt>testDebuggedGraphToJonsWitouthNameInnerOuterGraphIds self<block_start>debugged_graph=debug_events_reader.DebuggedGraph(<none> "b1c2" outer_graph_id=<none> )<line_sep>self.assertEqual(debugged_graph.to_json() {"name":<none> "graph_id":"b1c2" "outer_graph_id":<none> "inner_graph_ids":[] })<block_end><def_stmt>testDebuggedGraphToJonsWithNameAndInnerOuterGraphIds self<block_start>debugged_graph=debug_events_reader.DebuggedGraph("loss_function" "b1c2" outer_graph_id="a0b1" )<line_sep>debugged_graph.add_inner_graph_id("c2d3")<line_sep>debugged_graph.add_inner_graph_id("c2d3e4")<line_sep>self.assertEqual(debugged_graph.to_json() {"name":"loss_function" "graph_id":"b1c2" "outer_graph_id":"a0b1" "inner_graph_ids":["c2d3" "c2d3e4"] })<block_end>@parameterized.named_parameters(("EmptyList" []) ("None" <none>) )<def_stmt>testGraphOpDigestWithNoOutpusReturnsNumOutputsZero self output_tensor_ids<block_start>op_creation_digest=debug_events_reader.GraphOpCreationDigest(1234 5678 "deadbeef" "FooOp" "Model_1/Foo_2" output_tensor_ids "machine.cluster" ("a1" "a2") input_names=<none> device_name=<none>)<line_sep>self.assertEqual(op_creation_digest.num_outputs 0)<block_end><def_stmt>testGraphOpCreationDigestNoInputNoDeviceNameToJson self<block_start>op_creation_digest=debug_events_reader.GraphOpCreationDigest(1234 5678 "deadbeef" "FooOp" "Model_1/Foo_2" [135] "machine.cluster" ("a1" "a2") input_names=<none> device_name=<none>)<line_sep>json=op_creation_digest.to_json()<line_sep>self.jsonRoundTripCheck(json)<line_sep>self.assertEqual(json["wall_time"] 1234)<line_sep>self.assertEqual(json["graph_id"] "deadbeef")<line_sep>self.assertEqual(json["op_type"] "FooOp")<line_sep>self.assertEqual(json["op_name"] "Model_1/Foo_2")<line_sep>self.assertEqual(json["output_tensor_ids"] (135 ))<line_sep>self.assertEqual(json["host_name"] "machine.cluster")<line_sep>self.assertEqual(json["stack_frame_ids"] ("a1" "a2"))<line_sep>self.assertIsNone(json["input_names"])<line_sep>self.assertIsNone(json["device_name"])<block_end><def_stmt>testGraphOpCreationDigestWithInputsAndDeviceNameToJson self<block_start>op_creation_digest=debug_events_reader.GraphOpCreationDigest(1234 5678 "deadbeef" "FooOp" "Model_1/Foo_2" [135] "machine.cluster" ("a1" "a2") input_names=["Bar_1" "Qux_2"] device_name="/device:GPU:0")<line_sep>json=op_creation_digest.to_json()<line_sep>self.jsonRoundTripCheck(json)<line_sep>self.assertEqual(json["wall_time"] 1234)<line_sep>self.assertEqual(json["graph_id"] "deadbeef")<line_sep>self.assertEqual(json["op_type"] "FooOp")<line_sep>self.assertEqual(json["op_name"] "Model_1/Foo_2")<line_sep>self.assertEqual(json["output_tensor_ids"] (135 ))<line_sep>self.assertEqual(json["host_name"] "machine.cluster")<line_sep>self.assertEqual(json["stack_frame_ids"] ("a1" "a2"))<line_sep>self.assertEqual(json["input_names"] ("Bar_1" "Qux_2"))<line_sep>self.assertEqual(json["device_name"] "/device:GPU:0")<block_end><def_stmt>testGraphExecutionTraceDigestToJson self<block_start>trace_digest=debug_events_reader.GraphExecutionTraceDigest(1234 5678 "FooOp" "Model_1/Foo_2" 1 "deadbeef")<line_sep>json=trace_digest.to_json()<line_sep>self.assertEqual(json["wall_time"] 1234)<line_sep>self.assertEqual(json["op_type"] "FooOp")<line_sep>self.assertEqual(json["op_name"] "Model_1/Foo_2")<line_sep>self.assertEqual(json["output_slot"] 1)<line_sep>self.assertEqual(json["graph_id"] "deadbeef")<block_end><def_stmt>testGraphExecutionTraceWithTensorDebugValueAndDeviceNameToJson self<block_start>trace_digest=debug_events_reader.GraphExecutionTraceDigest(1234 5678 "FooOp" "Model_1/Foo_2" 1 "deadbeef")<line_sep>trace=debug_events_reader.GraphExecutionTrace(trace_digest ["g1" "g2" "deadbeef"] debug_event_pb2.TensorDebugMode.CURT_HEALTH debug_tensor_value=[3 1] device_name="/device:GPU:0")<line_sep>json=trace.to_json()<line_sep>self.assertEqual(json["wall_time"] 1234)<line_sep>self.assertEqual(json["op_type"] "FooOp")<line_sep>self.assertEqual(json["op_name"] "Model_1/Foo_2")<line_sep>self.assertEqual(json["output_slot"] 1)<line_sep>self.assertEqual(json["graph_id"] "deadbeef")<line_sep>self.assertEqual(json["graph_ids"] ("g1" "g2" "deadbeef"))<line_sep>self.assertEqual(json["tensor_debug_mode"] debug_event_pb2.TensorDebugMode.CURT_HEALTH)<line_sep>self.assertEqual(json["debug_tensor_value"] (3 1))<line_sep>self.assertEqual(json["device_name"] "/device:GPU:0")<block_end><def_stmt>testGraphExecutionTraceNoTensorDebugValueNoDeviceNameToJson self<block_start>trace_digest=debug_events_reader.GraphExecutionTraceDigest(1234 5678 "FooOp" "Model_1/Foo_2" 1 "deadbeef")<line_sep>trace=debug_events_reader.GraphExecutionTrace(trace_digest ["g1" "g2" "deadbeef"] debug_event_pb2.TensorDebugMode.NO_TENSOR debug_tensor_value=<none> device_name=<none>)<line_sep>json=trace.to_json()<line_sep>self.assertEqual(json["wall_time"] 1234)<line_sep>self.assertEqual(json["op_type"] "FooOp")<line_sep>self.assertEqual(json["op_name"] "Model_1/Foo_2")<line_sep>self.assertEqual(json["output_slot"] 1)<line_sep>self.assertEqual(json["graph_id"] "deadbeef")<line_sep>self.assertEqual(json["graph_ids"] ("g1" "g2" "deadbeef"))<line_sep>self.assertEqual(json["tensor_debug_mode"] debug_event_pb2.TensorDebugMode.NO_TENSOR)<line_sep>self.assertIsNone(json["debug_tensor_value"])<line_sep>self.assertIsNone(json["device_name"])<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>ops.enable_eager_execution()<line_sep>googletest.main()<block_end> |
<import_stmt>unittest<import_stmt>torch<import_from_stmt>pytorch_adapt.hooks ISTLossHook<import_from_stmt>pytorch_adapt.layers ISTLoss<import_from_stmt>.utils assertRequiresGrad get_models_and_data<class_stmt>TestITL(unittest.TestCase)<block_start><def_stmt>test_ist_loss_hook self<block_start>torch.manual_seed(334)<line_sep>h=ISTLossHook()<line_sep>(G _ _ src_imgs _ target_imgs src_domain target_domain )=get_models_and_data()<line_sep>outputs,losses=h(locals())<line_sep>self.assertTrue(G.count<eq>2)<line_sep>assertRequiresGrad(self outputs)<line_sep>outputs,losses2=h({**locals() **outputs})<line_sep>assertRequiresGrad(self outputs)<line_sep>self.assertTrue(G.count<eq>2)<line_sep>self.assertTrue(losses<eq>losses2)<line_sep>src_features=G(src_imgs)<line_sep>target_features=G(target_imgs)<line_sep>loss_fn=ISTLoss()<line_sep>self.assertTrue(losses["ist_loss"]<eq>loss_fn(torch.cat([src_features target_features] dim=0) torch.cat([src_domain target_domain] dim=0) ))<block_end><block_end> |
<import_from_stmt>brownie *<import_stmt>json<def_stmt>main <block_start>thisNetwork=network.show_active()<if_stmt>thisNetwork<eq>"development"<block_start>acct=accounts[0]<line_sep># configFile = open('./scripts/contractInteraction/testnet_contracts.json')
<block_end><elif_stmt>thisNetwork<eq>"testnet"<or>thisNetwork<eq>"rsk-mainnet"<block_start>acct=accounts.load("rskdeployer")<block_end><else_stmt><block_start><raise>Exception("network not supported")<block_end><if_stmt>thisNetwork<eq>"rsk-mainnet"<block_start>configFile=open('./scripts/contractInteraction/mainnet_contracts.json')<block_end><elif_stmt>thisNetwork<eq>"testnet"<block_start>configFile=open('./scripts/contractInteraction/testnet_contracts.json')<block_end>contracts=json.load(configFile)<line_sep>timelockOwnerAddress=contracts['timelockOwner']<line_sep>multiSigKeyHolders=acct.deploy(MultiSigKeyHolders)<line_sep>multiSigKeyHolders.transferOwnership(timelockOwnerAddress)<block_end> |
# Copyright (C) 2011-2012 Yaco Sistemas (http://www.yaco.es)
# Copyright (C) 2010 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<import_from_stmt>saml2.cache Cache<class_stmt>DjangoSessionCacheAdapter(dict)<block_start>"""A cache of things that are stored in the Django Session"""<line_sep>key_prefix='_saml2'<def_stmt>__init__ self django_session key_suffix<block_start>self.session=django_session<line_sep>self.key=self.key_prefix+key_suffix<line_sep>super(DjangoSessionCacheAdapter self).__init__(self._get_objects())<block_end><def_stmt>_get_objects self<block_start><return>self.session.get(self.key {})<block_end><def_stmt>_set_objects self objects<block_start>self.session[self.key]=objects<block_end><def_stmt>sync self# Changes in inner objects do not cause session invalidation
# https://docs.djangoproject.com/en/1.9/topics/http/sessions/#when-sessions-are-saved
#add objects to session
<block_start>self._set_objects(dict(self))<line_sep>#invalidate session
self.session.modified=<true><block_end><block_end><class_stmt>OutstandingQueriesCache(object)<block_start>"""Handles the queries that have been sent to the IdP and have not
been replied yet.
"""<def_stmt>__init__ self django_session<block_start>self._db=DjangoSessionCacheAdapter(django_session '_outstanding_queries')<block_end><def_stmt>outstanding_queries self<block_start><return>self._db._get_objects()<block_end><def_stmt>set self saml2_session_id came_from<block_start>self._db[saml2_session_id]=came_from<line_sep>self._db.sync()<block_end><def_stmt>delete self saml2_session_id<block_start><if_stmt>saml2_session_id<in>self._db<block_start><del_stmt>self._db[saml2_session_id]<line_sep>self._db.sync()<block_end><block_end><block_end><class_stmt>IdentityCache(Cache)<block_start>"""Handles information about the users that have been succesfully
logged in.
This information is useful because when the user logs out we must
know where does he come from in order to notify such IdP/AA.
The current implementation stores this information in the Django session.
"""<def_stmt>__init__ self django_session<block_start>self._db=DjangoSessionCacheAdapter(django_session '_identities')<line_sep>self._sync=<true><block_end><block_end><class_stmt>StateCache(DjangoSessionCacheAdapter)<block_start>"""Store state information that is needed to associate a logout
request with its response.
"""<def_stmt>__init__ self django_session<block_start>super(StateCache self).__init__(django_session '_state')<block_end><block_end> |
# coding=utf-8
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Recognizing the flow of time in a story is a crucial aspect of understanding it. Prior work related to time has primarily focused on identifying temporal expressions or relative sequencing of events, but here we propose computationally annotating each line of a book with wall clock times, even in the absence of explicit time-descriptive phrases. To do so, we construct a data set of hourly time phrases from 52,183 fictional books."""<import_stmt>csv<import_stmt>os<import_stmt>datasets<line_sep>_CITATION="""\
@misc{kim2020time,
title={What time is it? Temporal Analysis of Novels},
author={<NAME> and <NAME> and <NAME>},
year={2020},
eprint={2011.04124},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
"""<line_sep>_DESCRIPTION="""\
A clean data resource containing all explicit time references in a dataset of 52,183 novels whose full text is available via Project Gutenberg.
"""<line_sep>_HOMEPAGE="https://github.com/allenkim/what-time-is-it"<line_sep>_LICENSE="[More Information needed]"<line_sep># The HuggingFace dataset library don't host the datasets but only point to the original files
# This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
_URLs={"gutenberg":"https://github.com/TevenLeScao/what-time-is-it/blob/master/gutenberg_time_phrases.zip?raw=true" }<class_stmt>GutenbergTime(datasets.GeneratorBasedBuilder)<block_start>"""Novel extracts with time-of-the-day information"""<line_sep>VERSION=datasets.Version("1.1.3")<line_sep>BUILDER_CONFIGS=[datasets.BuilderConfig(name="gutenberg" description="Data pulled from the Gutenberg project") ]<def_stmt>_info self<block_start>features=datasets.Features({"guten_id":datasets.Value("string") "hour_reference":datasets.Value("string") "time_phrase":datasets.Value("string") "is_ambiguous":datasets.Value("bool_") "time_pos_start":datasets.Value("int64") "time_pos_end":datasets.Value("int64") "tok_context":datasets.Value("string") })<line_sep><return>datasets.DatasetInfo(description=_DESCRIPTION features=features supervised_keys=<none> homepage=_HOMEPAGE license=_LICENSE citation=_CITATION )<block_end><def_stmt>_split_generators self dl_manager<block_start>"""Returns SplitGenerators."""<line_sep>my_urls=_URLs[self.config.name]<line_sep>data=dl_manager.download_and_extract(my_urls)<line_sep><return>[datasets.SplitGenerator(name=datasets.Split.TRAIN # These kwargs will be passed to _generate_examples
gen_kwargs={"filepath":os.path.join(data "gutenberg_time_phrases.csv") "split":"train" } )]<block_end><def_stmt>_generate_examples self filepath split<block_start><with_stmt>open(filepath encoding="utf8")<as>f<block_start>data=csv.reader(f)<line_sep>next(data)<for_stmt>id_,row enumerate(data)<block_start><yield>id_ {"guten_id":row[0] "hour_reference":row[1] "time_phrase":row[2] "is_ambiguous":row[3] "time_pos_start":row[4] "time_pos_end":row[5] "tok_context":row[6] }<block_end><block_end><block_end><block_end> |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""NumpyIODataset"""<import_stmt>numpy<as>np<import_stmt>tensorflow<as>tf<import_from_stmt>tensorflow_io.python.ops core_ops<class_stmt>NumpyIODataset(tf.data.Dataset)<block_start>"""NumpyIODataset"""<def_stmt>__init__ self a internal=<true><block_start>"""NumpyIODataset."""<with_stmt>tf.name_scope("NumpyIODataset")<block_start><assert_stmt>internal<line_sep>entries=a<def_stmt>p entry<block_start>address,_=entry.__array_interface__["data"]<line_sep>shape=entry.shape<line_sep>dtype=tf.as_dtype(entry.dtype)<line_sep><return>address "" "" shape dtype<block_end>flatten=tf.nest.flatten(entries)<assert_stmt>all([entry.shape[0]<eq>flatten[0].shape[0]<for>entry flatten])<line_sep>params=[p(entry)<for>entry flatten]<def_stmt>f start stop<block_start><return>tf.nest.pack_sequence_as(entries [core_ops.io_numpy_read(address=address filename=filename array=array shape=shape start=start stop=stop dtype=dtype )<for>address,filename,array,shape,dtype params] )<block_end>step=1024<line_sep>total=tf.constant(flatten[0].shape[0] tf.int64)<line_sep>indices_start=tf.data.Dataset.range(0 total step)<line_sep>indices_stop=indices_start.skip(1).concatenate(tf.data.Dataset.from_tensor_slices([total]))<line_sep>dataset=tf.data.Dataset.zip((indices_start indices_stop))<line_sep>dataset=dataset.map(f)<line_sep>dataset=dataset.unbatch()<line_sep>self._dataset=dataset<line_sep>self._holder=[np.array(entry copy=<false>)<for>entry flatten]<line_sep>super().__init__(self._dataset._variant_tensor)<block_end><block_end># pylint: disable=protected-access
<def_stmt>_inputs self<block_start><return>[]<block_end>@property<def_stmt>element_spec self<block_start><return>self._dataset.element_spec<block_end><block_end><class_stmt>NumpyFileIODataset(tf.data.Dataset)<block_start>"""NumpyFileIODataset"""<def_stmt>__init__ self filename spec=<none> internal=<true><block_start>"""NumpyFileIODataset."""<with_stmt>tf.name_scope("NumpyFileIODataset")<block_start><assert_stmt>internal<if_stmt>tf.executing_eagerly()<block_start>arrays,shapes,dtypes=core_ops.io_numpy_info(filename=filename)<line_sep>arrays=tf.unstack(arrays)<line_sep>shapes=tf.unstack(shapes)<line_sep>dtypes=tf.unstack(dtypes)<line_sep>dtypes=[tf.as_dtype(dtype.numpy())<for>dtype dtypes]<line_sep>entries=list(zip(shapes dtypes arrays))<line_sep>entries=[tf.TensorSpec(shape dtype array)<for>(shape dtype array) entries]<line_sep>indices=<none><if_stmt>all([e.numpy().decode().startswith("arr_")<for>e arrays])<block_start><try_stmt><block_start>indices=[int(e.numpy()[4:])<for>e arrays]<block_end><except_stmt>ValueError<block_start><pass><block_end><block_end><if_stmt>indices<is><not><none><block_start>values=list(indices)<line_sep>values.sort()<if_stmt><not>all([k<eq>v<for>k,v enumerate(values)])<block_start>indices=<none><block_end><block_end># if indices is continuously, then construct a tuple, otherwise a dict.
<if_stmt>indices<is><not><none><block_start>entries=dict(zip(indices entries))<line_sep>entries=tuple([entries[index]<for>index sorted(indices)])<block_end><else_stmt><block_start>indices=[index.numpy().decode()<for>index tf.unstack(arrays)]<line_sep>entries=dict(zip(indices entries))<block_end>flatten=tf.nest.flatten(entries)<line_sep>shapes=[entry.shape<for>entry flatten]<assert_stmt>all([shape[0]<eq>shapes[0][0]<for>shape shapes])<block_end><else_stmt><block_start><assert_stmt>spec<is><not><none><if_stmt>isinstance(spec tuple)<block_start>entries=tuple([tf.TensorSpec(<none> (v<if>isinstance(v tf.dtypes.DType)<else>v.dtype) "arr_{}".format(i) )<for>i,v enumerate(spec)])<block_end><else_stmt><block_start>entries={k:tf.TensorSpec(<none> (v<if>isinstance(v tf.dtypes.DType)<else>v.dtype) k)<for>k,v spec.items()}<block_end>flatten=tf.nest.flatten(entries)<def_stmt>shape_f entry<block_start>shape,_=core_ops.io_numpy_spec(filename=filename array=entry.name)<line_sep><return>shape<block_end>shapes=[shape_f(entry)<for>entry flatten]<block_end><def_stmt>p entry shape<block_start><return>0 filename entry.name shape entry.dtype<block_end>params=[p(entry shape)<for>entry,shape zip(flatten shapes)]<def_stmt>f start stop<block_start><return>tf.nest.pack_sequence_as(entries [core_ops.io_numpy_read(address=address filename=filename array=array shape=shape start=start stop=stop dtype=dtype )<for>address,filename,array,shape,dtype params] )<block_end>step=1024<line_sep>total=tf.cast(shapes[0][0] tf.int64)<line_sep>indices_start=tf.data.Dataset.range(0 total step)<line_sep>indices_stop=indices_start.skip(1).concatenate(tf.data.Dataset.from_tensor_slices([total]))<line_sep>dataset=tf.data.Dataset.zip((indices_start indices_stop))<line_sep>dataset=dataset.map(f)<line_sep>dataset=dataset.unbatch()<line_sep>self._dataset=dataset<line_sep>super().__init__(self._dataset._variant_tensor)<block_end><block_end># pylint: disable=protected-access
<def_stmt>_inputs self<block_start><return>[]<block_end>@property<def_stmt>element_spec self<block_start><return>self._dataset.element_spec<block_end><block_end> |
'''Autogenerated by xml_generate script, do not edit!'''<import_from_stmt>OpenGL platform<as>_p arrays<line_sep># Code generation uses this
<import_from_stmt>OpenGL.raw.GL _types<as>_cs<line_sep># End users want this...
<import_from_stmt>OpenGL.raw.GL._types *<import_from_stmt>OpenGL.raw.GL _errors<import_from_stmt>OpenGL.constant Constant<as>_C<import_stmt>ctypes<line_sep>_EXTENSION_NAME='GL_ARB_viewport_array'<def_stmt>_f function<block_start><return>_p.createFunction(function _p.PLATFORM.GL 'GL_ARB_viewport_array' error_checker=_errors._error_checker)<block_end>GL_DEPTH_RANGE=_C('GL_DEPTH_RANGE' 0x0B70)<line_sep>GL_FIRST_VERTEX_CONVENTION=_C('GL_FIRST_VERTEX_CONVENTION' 0x8E4D)<line_sep>GL_LAST_VERTEX_CONVENTION=_C('GL_LAST_VERTEX_CONVENTION' 0x8E4E)<line_sep>GL_LAYER_PROVOKING_VERTEX=_C('GL_LAYER_PROVOKING_VERTEX' 0x825E)<line_sep>GL_MAX_VIEWPORTS=_C('GL_MAX_VIEWPORTS' 0x825B)<line_sep>GL_PROVOKING_VERTEX=_C('GL_PROVOKING_VERTEX' 0x8E4F)<line_sep>GL_SCISSOR_BOX=_C('GL_SCISSOR_BOX' 0x0C10)<line_sep>GL_SCISSOR_TEST=_C('GL_SCISSOR_TEST' 0x0C11)<line_sep>GL_UNDEFINED_VERTEX=_C('GL_UNDEFINED_VERTEX' 0x8260)<line_sep>GL_VIEWPORT=_C('GL_VIEWPORT' 0x0BA2)<line_sep>GL_VIEWPORT_BOUNDS_RANGE=_C('GL_VIEWPORT_BOUNDS_RANGE' 0x825D)<line_sep>GL_VIEWPORT_INDEX_PROVOKING_VERTEX=_C('GL_VIEWPORT_INDEX_PROVOKING_VERTEX' 0x825F)<line_sep>GL_VIEWPORT_SUBPIXEL_BITS=_C('GL_VIEWPORT_SUBPIXEL_BITS' 0x825C)<line_sep>@_f@_p.types(<none> _cs.GLuint _cs.GLsizei arrays.GLdoubleArray)<def_stmt>glDepthRangeArrayv first count v<block_start><pass><block_end>@_f@_p.types(<none> _cs.GLuint _cs.GLdouble _cs.GLdouble)<def_stmt>glDepthRangeIndexed index n f<block_start><pass><block_end>@_f@_p.types(<none> _cs.GLenum _cs.GLuint arrays.GLdoubleArray)<def_stmt>glGetDoublei_v target index data<block_start><pass><block_end>@_f@_p.types(<none> _cs.GLenum _cs.GLuint arrays.GLfloatArray)<def_stmt>glGetFloati_v target index data<block_start><pass><block_end>@_f@_p.types(<none> _cs.GLuint _cs.GLsizei arrays.GLintArray)<def_stmt>glScissorArrayv first count v<block_start><pass><block_end>@_f@_p.types(<none> _cs.GLuint _cs.GLint _cs.GLint _cs.GLsizei _cs.GLsizei)<def_stmt>glScissorIndexed index left bottom width height<block_start><pass><block_end>@_f@_p.types(<none> _cs.GLuint arrays.GLintArray)<def_stmt>glScissorIndexedv index v<block_start><pass><block_end>@_f@_p.types(<none> _cs.GLuint _cs.GLsizei arrays.GLfloatArray)<def_stmt>glViewportArrayv first count v<block_start><pass><block_end>@_f@_p.types(<none> _cs.GLuint _cs.GLfloat _cs.GLfloat _cs.GLfloat _cs.GLfloat)<def_stmt>glViewportIndexedf index x y w h<block_start><pass><block_end>@_f@_p.types(<none> _cs.GLuint arrays.GLfloatArray)<def_stmt>glViewportIndexedfv index v<block_start><pass><block_end> |
<import_stmt>FWCore.ParameterSet.Config<as>cms<line_sep>siTrackerMultiRecHitUpdator=cms.ESProducer("SiTrackerMultiRecHitUpdatorESProducer" ComponentName=cms.string('SiTrackerMultiRecHitUpdator') TTRHBuilder=cms.string('WithAngleAndTemplate') HitPropagator=cms.string('trackingRecHitPropagator') #AnnealingProgram = cms.vdouble(80.0, 9.0, 4.0, 1.0, 1.0, 1.0),
AnnealingProgram=cms.vdouble(30.0 18.0 14.0 11.0 6.0 4.0 2.0 1.0) ChiSquareCut1D=cms.double(10.8276) ChiSquareCut2D=cms.double(13.8155) Debug=cms.bool(<false>))<line_sep> |
<import_stmt>numpy<as>np<import_from_stmt>typing Callable<import_from_stmt>.base_score BaseScore<class_stmt>BleiLaffertyScore(BaseScore)<block_start>"""
This score implements method described in 2009 paper
Blei, <NAME>., and <NAME>erty. "Topic models." Text Mining.
Chapman and Hall/CRC, 2009. 101-124.
At the core this score helps to discover tokens that are most likely
to describe given topic. Summing up that score helps to estimate how
well the model distinguishes between topics. The higher this score - better
"""<def_stmt>__init__ self name:str=<none> num_top_tokens:int=30 should_compute:Callable[[int] bool]=<none><block_start>"""
Parameters
----------
name:
name of the score
num_top_tokens : int
now many tokens we consider to be
"""<line_sep>super().__init__(name=name should_compute=should_compute)<line_sep>self.num_top_tokens=num_top_tokens<block_end><def_stmt>__repr__ self<block_start><return>f'{self.__class__.__name__}(num_top_tokens={self.num_top_tokens})'<block_end><def_stmt>_compute_blei_scores self phi<block_start>"""
Computes Blei score
phi[wt] * [log(phi[wt]) - 1/T sum_k log(phi[wk])]
Parameters
----------
phi : pd.Dataframe
phi matrix of the model
Returns
-------
score : pd.Dataframe
wheighted phi matrix
"""<line_sep># noqa: W291
topic_number=phi.shape[1]<line_sep>blei_eps=1e-42<line_sep>log_phi=np.log(phi+blei_eps)<line_sep>numerator=np.sum(log_phi axis=1)<line_sep>numerator=numerator[: np.newaxis]<if_stmt>hasattr(log_phi "values")<block_start>multiplier=log_phi.values-numerator/topic_number<block_end><else_stmt><block_start>multiplier=log_phi-numerator/topic_number<block_end>scores=phi<times>multiplier<line_sep><return>scores<block_end><def_stmt>call self model **kwargs<block_start>modalities=list(model.class_ids.keys())<line_sep>score=0<for_stmt>modality modalities<block_start>phi=model.get_phi(class_ids=modality)<line_sep>modality_scores=np.sort(self._compute_blei_scores(phi).values)<line_sep>score<augadd>np.sum(modality_scores[-self.num_top_tokens: :])<block_end><if_stmt>modalities<is><none><block_start>phi=model.get_phi()<line_sep>modality_scores=np.sort(self._compute_blei_scores(phi).values)<line_sep>score=np.sum(modality_scores[-self.num_top_tokens: :])<block_end><return>score<block_end><block_end> |
# File: cortadora.py
# Del capítulo 15 de _Algoritmos Genéticos con Python_
#
# Author: <NAME> <<EMAIL>>
# Copyright (c) 2017 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
<import_from_stmt>enum Enum<class_stmt>ContenidoDelCampo(Enum)<block_start>Hierba=' #'<line_sep>Cortado=' .'<line_sep>Cortador='C'<def_stmt>__str__ self<block_start><return>self.value<block_end><block_end><class_stmt>Dirección<block_start><def_stmt>__init__ self índice xOffset yOffset símbolo<block_start>self.Índice=índice<line_sep>self.XOffset=xOffset<line_sep>self.YOffset=yOffset<line_sep>self.Símbolo=símbolo<block_end><def_stmt>mover_de self ubicación distancia=1<block_start><return>Ubicación(ubicación.X+distancia<times>self.XOffset ubicación.Y+distancia<times>self.YOffset)<block_end><block_end><class_stmt>Direcciones(Enum)<block_start>Norte=Dirección(0 0 -1 '^')<line_sep>Este=Dirección(1 1 0 '>')<line_sep>Sur=Dirección(2 0 1 'v')<line_sep>Oeste=Dirección(3 -1 0 '<')<line_sep>@staticmethod<def_stmt>obtener_dirección_después_de_girar_a_la_izquierda_90_grados dirección<block_start>nuevoÍndice=dirección.Índice-1<if>dirección.Índice<g>0<else>len(Direcciones)-1<line_sep>nuevaDirección=next(i<for>i Direcciones<if>i.value.Índice<eq>nuevoÍndice)<line_sep><return>nuevaDirección.value<block_end>@staticmethod<def_stmt>obtener_dirección_después_de_girar_a_la_derecha_90_grados dirección<block_start>nuevoÍndice=dirección.Índice+1<if>dirección.Índice<l>len(Direcciones)-1<else>0<line_sep>nuevaDirección=next(i<for>i Direcciones<if>i.value.Índice<eq>nuevoÍndice)<line_sep><return>nuevaDirección.value<block_end><block_end><class_stmt>Ubicación<block_start><def_stmt>__init__ self x y<block_start>self.X,self.Y=x y<block_end><def_stmt>mover self xOffset yOffset<block_start><return>Ubicación(self.X+xOffset self.Y+yOffset)<block_end><block_end><class_stmt>Cortadora<block_start><def_stmt>__init__ self ubicación dirección<block_start>self.Ubicación=ubicación<line_sep>self.Dirección=dirección<line_sep>self.CuentaDePasos=0<block_end><def_stmt>girar_a_la_izquierda self<block_start>self.CuentaDePasos<augadd>1<line_sep>self.Dirección=Direcciones.obtener_dirección_después_de_girar_a_la_izquierda_90_grados(self.Dirección)<block_end><def_stmt>corta self campo<block_start>nuevaUbicación=self.Dirección.mover_de(self.Ubicación)<line_sep>nuevaUbicación,esVálida=campo.arreglar_ubicación(nuevaUbicación)<if_stmt>esVálida<block_start>self.Ubicación=nuevaUbicación<line_sep>self.CuentaDePasos<augadd>1<line_sep>campo.ajuste(self.Ubicación self.CuentaDePasos<if>self.CuentaDePasos<g>9<else>" {}".format(self.CuentaDePasos))<block_end><block_end><def_stmt>salta self campo adelante derecha<block_start>nuevaUbicación=self.Dirección.mover_de(self.Ubicación adelante)<line_sep>derechaDirección=Direcciones.obtener_dirección_después_de_girar_a_la_derecha_90_grados(self.Dirección)<line_sep>nuevaUbicación=derechaDirección.mover_de(nuevaUbicación derecha)<line_sep>nuevaUbicación,esVálida=campo.arreglar_ubicación(nuevaUbicación)<if_stmt>esVálida<block_start>self.Ubicación=nuevaUbicación<line_sep>self.CuentaDePasos<augadd>1<line_sep>campo.ajuste(self.Ubicación self.CuentaDePasos<if>self.CuentaDePasos<g>9<else>" {}".format(self.CuentaDePasos))<block_end><block_end><block_end><class_stmt>Campo<block_start><def_stmt>__init__ self anchura altura contenidoInicial<block_start>self.Campo=[[contenidoInicial]<times>anchura<for>_ range(altura)]<line_sep>self.Anchura=anchura<line_sep>self.Altura=altura<block_end><def_stmt>ajuste self ubicación símbolo<block_start>self.Campo[ubicación.Y][ubicación.X]=símbolo<block_end><def_stmt>cuente_cortada self<block_start><return>sum(1<for>fila range(self.Altura)<for>columna range(self.Anchura)<if>self.Campo[fila][columna]<ne>ContenidoDelCampo.Hierba)<block_end><def_stmt>mostrar self cortadora<block_start><for_stmt>índiceDeFilas range(self.Altura)<block_start><if_stmt>índiceDeFilas<ne>cortadora.Ubicación.Y<block_start>fila=' '.join(map(str self.Campo[índiceDeFilas]))<block_end><else_stmt><block_start>r=self.Campo[índiceDeFilas][:]<line_sep>r[cortadora.Ubicación.X]="{}{}".format(ContenidoDelCampo.Cortador cortadora.Dirección.Símbolo)<line_sep>fila=' '.join(map(str r))<block_end>print(fila)<block_end><block_end><block_end><class_stmt>CampoValidando(Campo)<block_start><def_stmt>__init__ self anchura altura contenidoInicial<block_start>super().__init__(anchura altura contenidoInicial)<block_end><def_stmt>arreglar_ubicación self ubicación<block_start><if_stmt>ubicación.X<ge>self.Anchura<or>ubicación.X<l>0<or>ubicación.Y<ge>self.Altura<or>ubicación.Y<l>0<block_start><return><none> <false><block_end><return>ubicación <true><block_end><block_end><class_stmt>CampoToroidal(Campo)<block_start><def_stmt>__init__ self anchura altura contenidoInicial<block_start>super().__init__(anchura altura contenidoInicial)<block_end><def_stmt>arreglar_ubicación self ubicación<block_start>nuevaUbicación=Ubicación(ubicación.X ubicación.Y)<if_stmt>nuevaUbicación.X<l>0<block_start>nuevaUbicación.X<augadd>self.Anchura<block_end><elif_stmt>nuevaUbicación.X<ge>self.Anchura<block_start>nuevaUbicación.X<augmod>self.Anchura<block_end><if_stmt>nuevaUbicación.Y<l>0<block_start>nuevaUbicación.Y<augadd>self.Altura<block_end><elif_stmt>nuevaUbicación.Y<ge>self.Altura<block_start>nuevaUbicación.Y<augmod>self.Altura<block_end><return>nuevaUbicación <true><block_end><block_end> |
<import_from_stmt>indra.databases efo_client<import_from_stmt>indra.databases.efo_client _client<as>client<def_stmt>test_efo_client_loaded <block_start><assert_stmt>'efo'<eq>client.prefix<assert_stmt>client.entries<assert_stmt>client.name_to_id<block_end><def_stmt>test_efo_id_to_name <block_start><assert_stmt>'muscle measurement'<eq>efo_client.get_efo_name_from_efo_id('0004515')<block_end><def_stmt>test_efo_name_to_id <block_start><assert_stmt>'0004515'<eq>efo_client.get_efo_id_from_efo_name('muscle measurement')<block_end> |
# This contribution was made by: <NAME>
# Date: 12/15/2020
<import_stmt>logging<import_stmt>re<import_stmt>aiohttp<import_stmt>discord<import_stmt>discord.ext.commands<as>commands<import_stmt>bot.bot_secrets<as>bot_secrets<import_stmt>bot.extensions<as>ext<import_from_stmt>bot.consts Colors<import_from_stmt>bot.messaging.events Events<line_sep>log=logging.getLogger(__name__)<line_sep>API_URL='https://www.dictionaryapi.com/api/v3/references/collegiate/json/'<class_stmt>defineCog(commands.Cog)<block_start><def_stmt>__init__ self bot<block_start>self.bot=bot<block_end><def_stmt>getPageData self jsonData word<block_start>pages=[]<line_sep># If the word is found, the JSON will return a dictionary of information.
<if_stmt>(isinstance(jsonData[0] dict))# For words with several definitions, it will return several dictionaries.
<block_start><for_stmt>wordData jsonData# Stems of the given word (Past Tense, Future Tense, Perfect Tense, etc.)
<block_start>wordStems=wordData.get('meta' {}).get('stems' [])<line_sep># Syllables of the given word
syllableData=wordData.get('hwi' {}).get('hw' '')<line_sep># Pronunciation of the given word (With those weird letters)
pronunc=[]<line_sep>prsData=wordData.get('hwi' {}).get('prs' [])<for_stmt>soundData prsData<block_start>pronunc.append(soundData.get('mw' ''))<block_end># Type of the given word (Noun, Verb, Adjective, etc.)
wordType=wordData.get('fl' '')<line_sep># Definitions of the given word
definitions=[]<line_sep>defData=wordData.get('shortdef' [])<for_stmt>defin defData<block_start>definitions.append(defin)<block_end># Turn data into one long string (represents a page)
template='Tenses: '<for_stmt>s enumerate(wordStems)<block_start>template<augadd>s[1]<if_stmt>s[0]<ne>len(wordStems)-1<block_start>template<augadd>', '<block_end><block_end>template<augadd>'\n'<line_sep>template<augadd>f'Syllables: {syllableData}\n'<line_sep>template<augadd>'Pronunciation: '<for_stmt>s enumerate(pronunc)<block_start>template<augadd>s[1]<if_stmt>s[0]<ne>len(pronunc)-1<block_start>template<augadd>', '<block_end><block_end>template<augadd>'\n'<line_sep>template<augadd>f'Word Type: {wordType}\n'<line_sep>template<augadd>'\n'<for_stmt>s enumerate(definitions)<block_start>page=f'{template}Definition: {s[1]}'<line_sep>page=page.replace('*' ' | ')<line_sep>pages.append(page)<block_end><block_end><block_end># If the word cannot be found, the JSON returns a list of other possible suggestions.
<elif_stmt>isinstance(jsonData[0] str)<block_start>template=f'Word not found, see also: '<for_stmt>s enumerate(jsonData)<block_start>template=f'{template} {s[1]}'<if_stmt>s[0]<ne>len(jsonData)-1<block_start>template=f'{template}, '<block_end><block_end>pages=[template]<block_end><return>pages<block_end>@ext.command()@ext.long_help('Gets the dictionary defintion of any given word')@ext.short_help('Gets a words definition')@ext.example('define hello')<async_keyword><def_stmt>define self ctx word<block_start>"""
Given a word, find its definition and any other relevant information
USE: define <word>
EXAMPLE: define schadenfreude
For phrases, use underscores
EXAMPLE: define computer_science
Letters, numbers, and special characters (_, &, and -) are supported
"""<line_sep>self.api_key=bot_secrets.secrets.merriam_key<line_sep># Remove any characters besides &, _, or - that are not in ranges a-z, A-Z, or 0-9
# per the ASCII Table https://www.asciitable.com
word=re.sub("[^a-zA-Z0-9 &_-]+" "" word)<line_sep>actualWord=word.replace('_' ' ')<line_sep>word=word.replace('_' '%20').lower()<line_sep>url=f'{API_URL}{word}?key={self.api_key}'<line_sep>wordPages=[]<line_sep># Try Except for catching errors that could give away the API key
<try_stmt><block_start><async_keyword><with_stmt>aiohttp.request('get' url)<as>response<block_start><if_stmt>response.status<eq>200<block_start>jsonData=<await>response.json()<line_sep>wordPages=self.getPageData(jsonData word)<block_end><else_stmt><block_start>embed=discord.Embed(title='Merriam_Webster Dictionary' color=Colors.Error)<line_sep>ErrMsg=f'Oh No! There appears to be an issue! Yell at one of the developers with the following code.\nError Code: {response.status}'<line_sep>embed.add_field(name='Error with API' value=ErrMsg inline=<false>)<line_sep><await>ctx.send(embed=embed)<line_sep><return><block_end><await>self.bot.messenger.publish(Events.on_set_pageable_text embed_name='Merriam-Webster Dictionary' field_title=f'Word: {actualWord}' pages=wordPages author=ctx.author channel=ctx.channel)<block_end><block_end><except_stmt>Exception<as>err<block_start>err_str=str(err)<line_sep>err_str=re.sub(self.api_key "CLASSIFIED" err_str)<line_sep><raise>Exception(err_str).with_traceback(err.__traceback__)<block_end><block_end><block_end><def_stmt>setup bot<block_start>bot.add_cog(defineCog(bot))<block_end> |
<import_stmt>requests<class_stmt>WeiboSession(requests.Session)<block_start><def_stmt>__init__ self username password<block_start>super(WeiboSession self).__init__()<line_sep>self.__username=username<line_sep>self.__password=password<block_end><def_stmt>__del__ self<block_start>self.close()<block_end><def_stmt>login self<block_start>loginURL="http://passport.weibo.cn/sso/login"<line_sep>data={"username":self.__username "password":self.__password "savestate":"1" "r":"http://m.weibo.cn/" "ec":"0" "entry":"mweibo" "mainpageflag":"1" }<line_sep>self.headers.update({"Referer":"http://passport.weibo.cn/signin/login?entry=mweibo&res=wel&wm=3349&r=http%3A%2F%2Fm.weibo.cn%2F&sudaref=passport.weibo.cn&retcode=6102" })<line_sep>retJson=self.post(loginURL data=data).json()<if_stmt>retJson["retcode"]<eq>20000000<block_start><for_stmt>tmpURL retJson["data"]["crossdomainlist"].values()<block_start>self.get(tmpURL)<block_end>myURL="http://weibo.cn/"<line_sep>self.get(myURL)<block_end><block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>weibo=WeiboSession("" "")<block_end> |
# coding: utf-8
<import_stmt>smtplib<import_from_stmt>vilya.config SMTP_SERVER<def_stmt>send_mail msg<block_start>fromaddr=msg["From"]<line_sep>toaddrs=[]<if_stmt>msg['To']<block_start>toaddrs<augadd>[addr.strip()<for>addr msg["To"].split(',')]<block_end><if_stmt>msg["Cc"]<block_start>toaddrs<augadd>[addr.strip()<for>addr msg["Cc"].split(',')]<block_end>smtp=smtplib.SMTP(SMTP_SERVER)<line_sep>smtp.sendmail(fromaddr toaddrs msg.as_string())<line_sep>smtp.quit()<block_end> |
<import_from_stmt>flask Flask jsonify request<import_from_stmt>flask.views MethodView<line_sep>app=Flask(__name__)<line_sep>languages=[{'name':'JavaScript'} {'name':'Python'} {'name':'Ruby'}]<def_stmt>get_language name<block_start><return>[language<for>language languages<if>language['name']<eq>name][0]<block_end><class_stmt>Language(MethodView)<block_start><def_stmt>get self language_name<block_start><if_stmt>language_name<block_start><return>jsonify({'language':get_language(language_name)})<block_end><else_stmt><block_start><return>jsonify({'languages':languages})<block_end><block_end><def_stmt>post self<block_start>new_language_name=request.json['name']<line_sep>language={'name':new_language_name}<line_sep>languages.append(language)<line_sep><return>jsonify({'language':get_language(new_language_name)}) 201<block_end><def_stmt>put self language_name<block_start>language=get_language(language_name)<line_sep>new_language_name=request.json['name']<line_sep>language['name']=new_language_name<line_sep><return>jsonify({'language':get_language(new_language_name)})<block_end><def_stmt>delete self language_name<block_start>language=get_language(language_name)<line_sep>languages.remove(language)<line_sep><return>'' 204<block_end><block_end>language_view=Language.as_view('language_api')<line_sep>app.add_url_rule('/language' methods=['POST'] view_func=language_view)<line_sep>app.add_url_rule('/language' methods=['GET'] defaults={'language_name':<none>} view_func=language_view)<line_sep>app.add_url_rule('/language/<language_name>' methods=['GET' 'PUT' 'DELETE'] view_func=language_view)<line_sep> |
<import_stmt>cPickle<as>pkl<import_stmt>gzip<import_stmt>os<import_stmt>re<import_stmt>sys<import_stmt>numpy<import_stmt>math<import_stmt>random<import_from_stmt>binary_tree BinaryTree<def_stmt>convert_ptb_to_tree line<block_start>index=0<line_sep>tree=<none><line_sep>line=line.rstrip()<line_sep>stack=[]<line_sep>parts=line.split()<for_stmt>p_i,p enumerate(parts)# opening of a bracket, create a new node, take parent from top of stack
<block_start><if_stmt>p<eq>'('<block_start><if_stmt>tree<is><none><block_start>tree=BinaryTree(index)<block_end><else_stmt><block_start>add_descendant(tree index stack[-1])<block_end># add the newly created node to the stack and increment the index
stack.append(index)<line_sep>index<augadd>1<block_end># close of a bracket, pop node on top of the stack
<elif_stmt>p<eq>')'<block_start>stack.pop(-1)<block_end># otherwise, create a new node, take parent from top of stack, and set word
<else_stmt><block_start>add_descendant(tree index stack[-1])<line_sep>tree.set_word(index p)<line_sep>index<augadd>1<block_end><block_end><return>tree<block_end><def_stmt>add_descendant tree index parent_index# add to the left first if possible, then to the right
<block_start><if_stmt>tree.has_left_descendant_at_node(parent_index)<block_start><if_stmt>tree.has_right_descendant_at_node(parent_index)<block_start>sys.exit("Node "+str(parent_index)+" already has two children")<block_end><else_stmt><block_start>tree.add_right_descendant(index parent_index)<block_end><block_end><else_stmt><block_start>tree.add_left_descendant(index parent_index)<block_end><block_end><def_stmt>fopen filename mode='r'<block_start><if_stmt>filename.endswith('.gz')<block_start><return>gzip.open(filename mode)<block_end><return>open(filename mode)<block_end><class_stmt>TextIterator<block_start>"""Simple Bitext iterator."""<def_stmt>__init__ self source target label dict batch_size=128 n_words=-1 maxlen=500 shuffle=<true><block_start>self.source=fopen(source 'r')<line_sep>self.target=fopen(target 'r')<line_sep>self.label=fopen(label 'r')<with_stmt>open(dict 'rb')<as>f<block_start>self.dict=pkl.load(f)<block_end>self.batch_size=batch_size<line_sep>self.n_words=n_words<line_sep>self.maxlen=maxlen<line_sep>self.shuffle=shuffle<line_sep>self.end_of_data=<false><line_sep>self.source_buffer=[]<line_sep>self.target_buffer=[]<line_sep>self.label_buffer=[]<line_sep>self.k=batch_size<times>20<block_end><def_stmt>__iter__ self<block_start><return>self<block_end><def_stmt>reset self<block_start>self.source.seek(0)<line_sep>self.target.seek(0)<line_sep>self.label.seek(0)<block_end><def_stmt>next self<block_start><if_stmt>self.end_of_data<block_start>self.end_of_data=<false><line_sep>self.reset()<line_sep><raise>StopIteration<block_end>source=[]<line_sep>target=[]<line_sep>label=[]<line_sep># fill buffer, if it's empty
<assert_stmt>len(self.source_buffer)<eq>len(self.target_buffer) 'Buffer size mismatch!'<assert_stmt>len(self.source_buffer)<eq>len(self.label_buffer) 'Buffer size mismatch!'<if_stmt>len(self.source_buffer)<eq>0<block_start><for_stmt>k_ xrange(self.k)<block_start>ss=self.source.readline()<if_stmt>ss<eq>""<block_start><break><block_end>tt=self.target.readline()<if_stmt>tt<eq>""<block_start><break><block_end>ll=self.label.readline()<if_stmt>ll<eq>""<block_start><break><block_end>ss=convert_ptb_to_tree(ss)<line_sep>words_ss,left_mask_ss,right_mask_ss=ss.convert_to_sequence_and_masks(ss.root)<line_sep>words_ss=[self.dict[w]<if>w<in>self.dict<else>1<for>w words_ss]<if_stmt>self.n_words<g>0<block_start>words_ss=[w<if>w<l>self.n_words<else>1<for>w words_ss]<block_end>ss=(words_ss left_mask_ss right_mask_ss)<line_sep>tt=convert_ptb_to_tree(tt)<line_sep>words_tt,left_mask_tt,right_mask_tt=tt.convert_to_sequence_and_masks(tt.root)<line_sep>words_tt=[self.dict[w]<if>w<in>self.dict<else>1<for>w words_tt]<if_stmt>self.n_words<g>0<block_start>words_tt=[w<if>w<l>self.n_words<else>1<for>w words_tt]<block_end>tt=(words_tt left_mask_tt right_mask_tt)<if_stmt>len(words_ss)<g>self.maxlen<or>len(words_tt)<g>self.maxlen<block_start><continue><block_end>self.source_buffer.append(ss)<line_sep>self.target_buffer.append(tt)<line_sep>self.label_buffer.append(ll.strip())<block_end><if_stmt>self.shuffle# sort by target buffer
<block_start>tlen=numpy.array([len(t[0])<for>t self.target_buffer])<line_sep>tidx=tlen.argsort()<line_sep># shuffle mini-batch
tindex=[]<line_sep>small_index=range(int(math.ceil(len(tidx)<times>1./self.batch_size)))<line_sep>random.shuffle(small_index)<for_stmt>i small_index<block_start><if_stmt>(i+1)<times>self.batch_size<g>len(tidx)<block_start>tindex.extend(tidx[i<times>self.batch_size:])<block_end><else_stmt><block_start>tindex.extend(tidx[i<times>self.batch_size:(i+1)<times>self.batch_size])<block_end><block_end>tidx=tindex<line_sep>_sbuf=[self.source_buffer[i]<for>i tidx]<line_sep>_tbuf=[self.target_buffer[i]<for>i tidx]<line_sep>_lbuf=[self.label_buffer[i]<for>i tidx]<line_sep>self.source_buffer=_sbuf<line_sep>self.target_buffer=_tbuf<line_sep>self.label_buffer=_lbuf<block_end><block_end><if_stmt>len(self.source_buffer)<eq>0<or>len(self.target_buffer)<eq>0<or>len(self.label_buffer)<eq>0<block_start>self.end_of_data=<false><line_sep>self.reset()<line_sep><raise>StopIteration<block_end><try_stmt># actual work here
<block_start><while_stmt><true># read from source file and map to word index
<block_start><try_stmt><block_start>ss=self.source_buffer.pop(0)<line_sep>tt=self.target_buffer.pop(0)<line_sep>ll=self.label_buffer.pop(0)<block_end><except_stmt>IndexError<block_start><break><block_end>source.append(ss)<line_sep>target.append(tt)<line_sep>label.append(ll)<if_stmt>len(source)<ge>self.batch_size<or>len(target)<ge>self.batch_size<or>len(label)<ge>self.batch_size<block_start><break><block_end><block_end><block_end><except_stmt>IOError<block_start>self.end_of_data=<true><block_end><if_stmt>len(source)<le>0<or>len(target)<le>0<or>len(label)<le>0<block_start>self.end_of_data=<false><line_sep>self.reset()<line_sep><raise>StopIteration<block_end><return>source target label<block_end><block_end> |
"""
viewaction
"""<import_from_future_stmt> absolute_import division print_function<import_stmt>logging<import_from_stmt>PySide QtGui QtCore<import_from_stmt>PySide.QtCore Qt<import_from_stmt>mceditlib.util.lazyprop weakrefprop<import_from_stmt>mcedit2.util.settings Settings<line_sep>log=logging.getLogger(__name__)<class_stmt>ViewAction(QtCore.QObject)<block_start>button=Qt.NoButton<line_sep>modifiers=Qt.NoModifier<line_sep>key=0<line_sep>labelText="Unknown Action"<line_sep>hidden=<false># Hide from configuration
settingsKey=NotImplemented<line_sep>acceptsMouseWheel=<false><line_sep>WHEEL_UP=0x100<line_sep>WHEEL_DOWN=0x200<line_sep>_buttonNames=<none><def_stmt>__init__ self<block_start>"""
An action that can be bound to a keypress or mouse button click, drag, or
movement with the bound key or button held.
"""<line_sep>super(ViewAction self).__init__()<if_stmt>self.settingsKey<is><not><none><block_start>settings=Settings()<line_sep>prefix="keybindings/"<try_stmt><block_start>modifiers=int(settings.value(prefix+self.settingsKey+"/modifiers" self.modifiers))<line_sep>button=int(settings.value(prefix+self.settingsKey+"/button" self.button))<line_sep>key=int(settings.value(prefix+self.settingsKey+"/key" self.key))<block_end><except_stmt>Exception<as>e<block_start>log.error("Error while reading key binding:")<block_end><else_stmt><block_start>self.modifiers=modifiers<line_sep>self.button=button<line_sep>self.key=key<block_end><block_end><block_end><def_stmt>__repr__ self<block_start><return>"%s(button=%s, key=%s, modifiers=%s)"%(self.__class__.__name__ self.button self.key self.modifiers)<block_end><def_stmt>setBinding self button key modifiers<block_start>self.button=button<line_sep>self.key=key<line_sep>self.modifiers=modifiers<if_stmt>self.settingsKey<is><not><none><block_start>settings=Settings()<line_sep>prefix="keybindings/"<line_sep>settings.setValue(prefix+self.settingsKey+"/button" self.button)<line_sep>settings.setValue(prefix+self.settingsKey+"/key" self.key)<line_sep>settings.setValue(prefix+self.settingsKey+"/modifiers" int(self.modifiers))<block_end><block_end><def_stmt>matchKeyEvent self event<block_start>key=event.key()<line_sep>modifiers=event.modifiers()<if_stmt>key<in>(Qt.Key_Shift Qt.Key_Control Qt.Key_Alt Qt.Key_Meta)<block_start>modifiers=self.modifiers<block_end># pressing modifier key by itself has modifiers set, but releasing modifiers does not
matched=self.key<eq>key<if_stmt>event.type<eq>QtCore.QEvent.KeyPress# Only match modifiers on key press, ignore modifiers on release to handle
# input sequences like: S down, Shift down, S up, Shift up
<block_start>matched<augand>(self.modifiers<eq>modifiers)<block_end><return>matched<block_end><def_stmt>matchModifiers self event<block_start><return>(self.modifiers<is><none><or>self.modifiers<eq>event.modifiers())<block_end><def_stmt>mouseMoveEvent self event<block_start>"""
Called when the mouse moves while the bound keys or buttons are pressed.
:type event: QtGui.QMouseEvent
"""<block_end><def_stmt>mousePressEvent self event<block_start>"""
Called when the bound mouse button is pressed. By default, calls buttonPressEvent.
:type event: QtGui.QMouseEvent
"""<line_sep>self.buttonPressEvent(event)<block_end><def_stmt>mouseReleaseEvent self event<block_start>"""
Called when the bound mouse button is released. By default, calls buttonReleaseEvent
:type event: QtGui.QMouseEvent
"""<line_sep>self.buttonReleaseEvent(event)<block_end><def_stmt>keyPressEvent self event<block_start>"""
Called when the bound key is pressed. By default, calls buttonPressEvent.
:type event: QtGui.QKeyEvent
"""<line_sep>self.buttonPressEvent(event)<block_end><def_stmt>keyReleaseEvent self event<block_start>"""
Called when the bound key is released. By default, calls buttonReleaseEvent
:type event: QtGui.QKeyEvent
"""<line_sep>self.buttonReleaseEvent(event)<block_end><def_stmt>buttonPressEvent self event<block_start>"""
Called by mousePressEvent and keyPressEvent.
Implement this to handle button-press events if it doesn't matter whether the action is bound to a key or
mouse button.
:type event: QtGui.QEvent
"""<block_end><def_stmt>buttonReleaseEvent self event<block_start>"""
Called by mouseReleaseEvent and keyReleaseEvent.
Implement this to handle button-release events if it doesn't matter whether the action is bound to a key or
mouse button.
:type event: QtGui.QEvent
"""<block_end><def_stmt>buttonName self buttons<block_start><if_stmt>ViewAction._buttonNames<is><none><block_start>ViewAction._buttonNames=[(Qt.LeftButton self.tr("Left Button")) (Qt.RightButton self.tr("Right Button")) (Qt.MiddleButton self.tr("Middle Button")) (ViewAction.WHEEL_UP self.tr("Mousewheel Up")) (ViewAction.WHEEL_DOWN self.tr("Mousewheel Down")) ]<block_end>parts=[name<for>mask,name self._buttonNames<if>buttons&mask]<line_sep><return>"+".join(parts)<block_end><def_stmt>describeKeys self<block_start>modifierKeyNames={Qt.Key_Shift:self.tr("Shift") Qt.Key_Control:self.tr("Control") Qt.Key_Alt:self.tr("Alt") Qt.Key_Meta:self.tr("Meta") }<line_sep>s=modifierKeyNames.get(self.key)# QKeySequence returns weird strings when only a modifier is pressed
<if_stmt>s<is><none><block_start><try_stmt><block_start>s=QtGui.QKeySequence(self.key|self.modifiers).toString()<block_end><except_stmt>TypeError<block_start>log.error("KEY: %r MOD: %r" self.key self.modifiers)<line_sep><raise><block_end><block_end><if_stmt>self.key<eq>0<block_start>s=s[:-2]<block_end><if_stmt>self.button<ne>Qt.NoButton<block_start><if_stmt>len(s)<block_start>s<augadd>"+"<block_end>s<augadd>self.buttonName(self.button)<block_end><return>s<block_end><block_end><class_stmt>UseToolMouseAction(ViewAction)<block_start>button=Qt.LeftButton<line_sep>labelText="Use Tool (Don't change!)"<line_sep>hidden=<true><line_sep>settingsKey=<none><line_sep>modifiers=<none># really?
editorTab=weakrefprop()<def_stmt>__init__ self editorTab<block_start>super(UseToolMouseAction self).__init__()<line_sep>self.editorTab=editorTab<block_end><def_stmt>mousePressEvent self event<block_start>self.editorTab.editorSession.viewMousePress(event)<line_sep>event.view.update()<block_end><def_stmt>mouseMoveEvent self event<block_start>self.editorTab.editorSession.viewMouseDrag(event)<line_sep>event.view.update()<block_end><def_stmt>mouseReleaseEvent self event<block_start>self.editorTab.editorSession.viewMouseRelease(event)<line_sep>event.view.update()<block_end><block_end><class_stmt>TrackingMouseAction(ViewAction)<block_start>button=Qt.NoButton<line_sep>hidden=<true><line_sep>modifiers=<none><line_sep>labelText="Mouse Tracking (Don't change!)"<line_sep>settingsKey=<none><line_sep>editorTab=weakrefprop()<def_stmt>__init__ self editorTab<block_start>super(TrackingMouseAction self).__init__()<line_sep>self.editorTab=editorTab<block_end><def_stmt>mouseMoveEvent self event<block_start>self.editorTab.editorSession.viewMouseMove(event)<block_end><block_end><class_stmt>MoveViewMouseAction(ViewAction)<block_start>button=Qt.RightButton<line_sep>labelText="Pan View"<line_sep>settingsKey="worldview/general/holdToMove"<def_stmt>buttonPressEvent self event<block_start>x,y=event.x() event.y()<line_sep>self.dragStart=event.view.unprojectAtHeight(x y 0)<line_sep>self.startOffset=event.view.centerPoint<line_sep>log.debug("Drag start %s" self.dragStart)<line_sep>event.view.update()<block_end><def_stmt>mouseMoveEvent self event<block_start>x=event.x()<line_sep>y=event.y()<line_sep>log.debug("mouseMoveEvent %s" (x y))<if_stmt>self.dragStart<block_start>d=event.view.unprojectAtHeight(x y 0)-self.dragStart<line_sep>event.view.centerPoint<augsub>d<line_sep>log.debug("Drag continue delta %s" d)<line_sep>event.view.update()<block_end><block_end><def_stmt>buttonReleaseEvent self event<block_start>x,y=event.x() event.y()<line_sep>self.dragStart=<none><line_sep>log.debug("Drag end")<line_sep>event.view.update()<block_end><block_end><def_stmt>ZoomWheelActions <block_start>maxScale=16.<line_sep>minScale=1./64<line_sep>zooms=[]<line_sep>_i=minScale<while_stmt>_i<l>maxScale<block_start>zooms.append(_i)<line_sep>_i<augmul>2.0<block_end><def_stmt>zoom view scale (mx my)# Get mouse position in world coordinates
<block_start>worldPos=view.unprojectAtHeight(mx my 0)<if_stmt>scale<ne>view.scale<block_start>view.scale=scale<line_sep># Get the new position under the mouse, find its distance from the old position,
# and shift the centerPoint by that amount.
newWorldPos=view.unprojectAtHeight(mx my 0)<line_sep>delta=newWorldPos-worldPos<line_sep>view.centerPoint=view.centerPoint-delta<line_sep>log.debug("zoom offset %s, pos %s, delta %s, scale %s" view.centerPoint (mx my) delta view.scale)<block_end><block_end><class_stmt>ZoomInAction(ViewAction)<block_start>settingsKey="worldview.general.zoom_in"<line_sep>button=ViewAction.WHEEL_UP<line_sep>acceptsMouseWheel=<true><line_sep>labelText="Zoom In"<def_stmt>buttonPressEvent self event<block_start>log.debug(self.labelText)<line_sep>mousePos=(event.x() event.y())<line_sep>i=zooms.index(event.view.scale)<if_stmt>i<g>0<block_start>zoom(event.view zooms[i-1] mousePos)<block_end><block_end><block_end><class_stmt>ZoomOutAction(ViewAction)<block_start>settingsKey="worldview.general.zoom_out"<line_sep>button=ViewAction.WHEEL_DOWN<line_sep>acceptsMouseWheel=<true><line_sep>labelText="Zoom Out"<def_stmt>buttonPressEvent self event<block_start>log.debug(self.labelText)<line_sep>mousePos=(event.x() event.y())<line_sep>i=zooms.index(event.view.scale)<if_stmt>i<l>len(zooms)-1<block_start>zoom(event.view zooms[i+1] mousePos)<block_end><block_end><block_end><return>[ZoomInAction() ZoomOutAction()]<block_end> |
<import_stmt>argparse<import_stmt>baselineUtils<import_stmt>torch<import_stmt>torch.utils.data<import_stmt>torch.nn<as>nn<import_stmt>torch.nn.functional<as>F<import_stmt>os<import_stmt>time<import_from_stmt>transformer.batch subsequent_mask<import_from_stmt>torch.optim Adam SGD RMSprop Adagrad<import_from_stmt>transformer.noam_opt NoamOpt<import_stmt>numpy<as>np<import_stmt>scipy.io<import_stmt>json<import_stmt>pickle<import_from_stmt>torch.utils.tensorboard SummaryWriter<def_stmt>main <block_start>parser=argparse.ArgumentParser(description='Train the individual Transformer model')<line_sep>parser.add_argument('--dataset_folder' type=str default='datasets')<line_sep>parser.add_argument('--dataset_name' type=str default='zara1')<line_sep>parser.add_argument('--obs' type=int default=8)<line_sep>parser.add_argument('--preds' type=int default=12)<line_sep>parser.add_argument('--emb_size' type=int default=512)<line_sep>parser.add_argument('--heads' type=int default=8)<line_sep>parser.add_argument('--layers' type=int default=6)<line_sep>parser.add_argument('--dropout' type=float default=0.1)<line_sep>parser.add_argument('--cpu' action='store_true')<line_sep>parser.add_argument('--output_folder' type=str default='Output')<line_sep>parser.add_argument('--val_size' type=int default=0)<line_sep>parser.add_argument('--gpu_device' type=str default="0")<line_sep>parser.add_argument('--verbose' action='store_true')<line_sep>parser.add_argument('--max_epoch' type=int default=100)<line_sep>parser.add_argument('--batch_size' type=int default=100)<line_sep>parser.add_argument('--validation_epoch_start' type=int default=30)<line_sep>parser.add_argument('--resume_train' action='store_true')<line_sep>parser.add_argument('--delim' type=str default='\t')<line_sep>parser.add_argument('--name' type=str default="zara1")<line_sep>parser.add_argument('--factor' type=float default=1.)<line_sep>parser.add_argument('--evaluate' type=bool default=<true>)<line_sep>parser.add_argument('--save_step' type=int default=1)<line_sep>args=parser.parse_args()<line_sep>model_name=args.name<try_stmt><block_start>os.mkdir('models')<block_end><except_stmt><block_start><pass><block_end><try_stmt><block_start>os.mkdir('output')<block_end><except_stmt><block_start><pass><block_end><try_stmt><block_start>os.mkdir('output/QuantizedTF')<block_end><except_stmt><block_start><pass><block_end><try_stmt><block_start>os.mkdir(f'models/QuantizedTF')<block_end><except_stmt><block_start><pass><block_end><try_stmt><block_start>os.mkdir(f'output/QuantizedTF/{args.name}')<block_end><except_stmt><block_start><pass><block_end><try_stmt><block_start>os.mkdir(f'models/QuantizedTF/{args.name}')<block_end><except_stmt><block_start><pass><block_end>log=SummaryWriter('logs/%s'%model_name)<line_sep>#os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu_device
device=torch.device("cuda")<if_stmt>args.cpu<or><not>torch.cuda.is_available()<block_start>device=torch.device("cpu")<block_end>args.verbose=<true><line_sep>## creation of the dataloaders for train and validation
<if_stmt>args.val_size<eq>0<block_start>train_dataset,_=baselineUtils.create_dataset(args.dataset_folder args.dataset_name 0 args.obs args.preds delim=args.delim train=<true> verbose=args.verbose)<line_sep>val_dataset,_=baselineUtils.create_dataset(args.dataset_folder args.dataset_name 0 args.obs args.preds delim=args.delim train=<false> verbose=args.verbose)<block_end><else_stmt><block_start>train_dataset,val_dataset=baselineUtils.create_dataset(args.dataset_folder args.dataset_name args.val_size args.obs args.preds delim=args.delim train=<true> verbose=args.verbose)<block_end>test_dataset,_=baselineUtils.create_dataset(args.dataset_folder args.dataset_name 0 args.obs args.preds delim=args.delim train=<false> eval=<true> verbose=args.verbose)<line_sep>mat=scipy.io.loadmat(os.path.join(args.dataset_folder args.dataset_name "clusters.mat"))<line_sep>clusters=mat['centroids']<import_stmt>quantized_TF<line_sep>model=quantized_TF.QuantizedTF(clusters.shape[0] clusters.shape[0]+1 clusters.shape[0] N=args.layers d_model=args.emb_size d_ff=1024 h=args.heads dropout=args.dropout).to(device)<line_sep>tr_dl=torch.utils.data.DataLoader(train_dataset batch_size=args.batch_size shuffle=<true> num_workers=0)<line_sep>val_dl=torch.utils.data.DataLoader(val_dataset batch_size=args.batch_size shuffle=<true> num_workers=0)<line_sep>test_dl=torch.utils.data.DataLoader(test_dataset batch_size=args.batch_size shuffle=<false> num_workers=0)<line_sep>#optim = SGD(list(a.parameters())+list(model.parameters())+list(generator.parameters()),lr=0.01)
#sched=torch.optim.lr_scheduler.StepLR(optim,0.0005)
optim=NoamOpt(args.emb_size args.factor len(tr_dl)<times>5 torch.optim.Adam(model.parameters() lr=0 betas=(0.9 0.98) eps=1e-9))<line_sep>#optim=Adagrad(list(a.parameters())+list(model.parameters())+list(generator.parameters()),lr=0.01,lr_decay=0.001)
epoch=0<while_stmt>epoch<l>args.max_epoch<block_start>epoch_loss=0<line_sep>model.train()<for_stmt>id_b,batch enumerate(tr_dl)<block_start>optim.optimizer.zero_grad()<line_sep>scale=np.random.uniform(0.5 4)<line_sep>#rot_mat = np.array([[np.cos(r), np.sin(r)], [-np.sin(r), np.cos(r)]])
n_in_batch=batch['src'].shape[0]<line_sep>speeds_inp=batch['src'][: 1: 2:4]<times>scale<line_sep>inp=torch.tensor(scipy.spatial.distance.cdist(speeds_inp.reshape(-1 2) clusters).argmin(axis=1).reshape(n_in_batch -1)).to(device)<line_sep>speeds_trg=batch['trg'][: : 2:4]<times>scale<line_sep>target=torch.tensor(scipy.spatial.distance.cdist(speeds_trg.reshape(-1 2) clusters).argmin(axis=1).reshape(n_in_batch -1)).to(device)<line_sep>src_att=torch.ones((inp.shape[0] 1 inp.shape[1])).to(device)<line_sep>trg_att=subsequent_mask(target.shape[1]).repeat(n_in_batch 1 1).to(device)<line_sep>start_of_seq=torch.tensor([clusters.shape[0]]).repeat(n_in_batch).unsqueeze(1).to(device)<line_sep>dec_inp=torch.cat((start_of_seq target[: :-1]) 1)<line_sep>out=model(inp dec_inp src_att trg_att)<line_sep>loss=F.cross_entropy(out.view(-1 out.shape[-1]) target.view(-1) reduction='mean')<line_sep>loss.backward()<line_sep>optim.step()<line_sep>print("epoch %03i/%03i frame %04i / %04i loss: %7.4f"%(epoch args.max_epoch id_b len(tr_dl) loss.item()))<line_sep>epoch_loss<augadd>loss.item()<block_end>#sched.step()
log.add_scalar('Loss/train' epoch_loss/len(tr_dl) epoch)<with_stmt>torch.no_grad()<block_start>model.eval()<line_sep>gt=[]<line_sep>pr=[]<line_sep>val_loss=0<line_sep>step=0<for_stmt>batch val_dl# rot_mat = np.array([[np.cos(r), np.sin(r)], [-np.sin(r), np.cos(r)]])
<block_start>n_in_batch=batch['src'].shape[0]<line_sep>speeds_inp=batch['src'][: 1: 2:4]<line_sep>inp=torch.tensor(scipy.spatial.distance.cdist(speeds_inp.contiguous().reshape(-1 2) clusters).argmin(axis=1).reshape(n_in_batch -1)).to(device)<line_sep>speeds_trg=batch['trg'][: : 2:4]<line_sep>target=torch.tensor(scipy.spatial.distance.cdist(speeds_trg.contiguous().reshape(-1 2) clusters).argmin(axis=1).reshape(n_in_batch -1)).to(device)<line_sep>src_att=torch.ones((inp.shape[0] 1 inp.shape[1])).to(device)<line_sep>trg_att=subsequent_mask(target.shape[1]).repeat(n_in_batch 1 1).to(device)<line_sep>start_of_seq=torch.tensor([clusters.shape[0]]).repeat(n_in_batch).unsqueeze(1).to(device)<line_sep>dec_inp=torch.cat((start_of_seq target[: :-1]) 1)<line_sep>out=model(inp dec_inp src_att trg_att)<line_sep>loss=F.cross_entropy(out.contiguous().view(-1 out.shape[-1]) target.contiguous().view(-1) reduction='mean')<line_sep>print("val epoch %03i/%03i frame %04i / %04i loss: %7.4f"%(epoch args.max_epoch step len(val_dl) loss.item()))<line_sep>val_loss<augadd>loss.item()<line_sep>step<augadd>1<block_end>log.add_scalar('validation/loss' val_loss/len(val_dl) epoch)<if_stmt>args.evaluate# DETERMINISTIC MODE
<block_start>model.eval()<line_sep>model.eval()<line_sep>gt=[]<line_sep>pr=[]<line_sep>inp_=[]<line_sep>peds=[]<line_sep>frames=[]<line_sep>dt=[]<for_stmt>batch test_dl<block_start>inp_.append(batch['src'][: : 0:2])<line_sep>gt.append(batch['trg'][: : 0:2])<line_sep>frames.append(batch['frames'])<line_sep>peds.append(batch['peds'])<line_sep>dt.append(batch['dataset'])<line_sep>n_in_batch=batch['src'].shape[0]<line_sep>speeds_inp=batch['src'][: 1: 2:4]<line_sep>gt_b=batch['trg'][: : 0:2]<line_sep>inp=torch.tensor(scipy.spatial.distance.cdist(speeds_inp.reshape(-1 2) clusters).argmin(axis=1).reshape(n_in_batch -1)).to(device)<line_sep>src_att=torch.ones((inp.shape[0] 1 inp.shape[1])).to(device)<line_sep>trg_att=subsequent_mask(target.shape[1]).repeat(n_in_batch 1 1).to(device)<line_sep>start_of_seq=torch.tensor([clusters.shape[0]]).repeat(n_in_batch).unsqueeze(1).to(device)<line_sep>dec_inp=start_of_seq<for_stmt>i range(args.preds)<block_start>trg_att=subsequent_mask(dec_inp.shape[1]).repeat(n_in_batch 1 1).to(device)<line_sep>out=model(inp dec_inp src_att trg_att)<line_sep>dec_inp=torch.cat((dec_inp out[: -1:].argmax(dim=2)) 1)<block_end>preds_tr_b=clusters[dec_inp[: 1:].cpu().numpy()].cumsum(1)+batch['src'][: -1: 0:2].cpu().numpy()<line_sep>pr.append(preds_tr_b)<block_end>peds=np.concatenate(peds 0)<line_sep>frames=np.concatenate(frames 0)<line_sep>dt=np.concatenate(dt 0)<line_sep>gt=np.concatenate(gt 0)<line_sep>dt_names=test_dataset.data['dataset_name']<line_sep>pr=np.concatenate(pr 0)<line_sep>mad,fad,errs=baselineUtils.distance_metrics(gt pr)<line_sep>log.add_scalar('eval/DET_mad' mad epoch)<line_sep>log.add_scalar('eval/DET_fad' fad epoch)<line_sep>scipy.io.savemat(f"output/QuantizedTF/{args.name}/{epoch:05d}.mat" {'input':inp 'gt':gt 'pr':pr 'peds':peds 'frames':frames 'dt':dt 'dt_names':dt_names})<line_sep># MULTI MODALITY
<if_stmt><false><block_start>num_samples=20<line_sep>model.eval()<line_sep>gt=[]<line_sep>pr_all={}<for_stmt>sam range(num_samples)<block_start>pr_all[sam]=[]<block_end><for_stmt>batch test_dl# rot_mat = np.array([[np.cos(r), np.sin(r)], [-np.sin(r), np.cos(r)]])
<block_start>n_in_batch=batch['src'].shape[0]<line_sep>speeds_inp=batch['src'][: 1: 2:4]<line_sep>gt_b=batch['trg'][: : 0:2]<line_sep>gt.append(gt_b)<line_sep>inp=torch.tensor(scipy.spatial.distance.cdist(speeds_inp.reshape(-1 2) clusters).argmin(axis=1).reshape(n_in_batch -1)).to(device)<line_sep>src_att=torch.ones((inp.shape[0] 1 inp.shape[1])).to(device)<line_sep>trg_att=subsequent_mask(target.shape[1]).repeat(n_in_batch 1 1).to(device)<line_sep>start_of_seq=torch.tensor([clusters.shape[0]]).repeat(n_in_batch).unsqueeze(1).to(device)<for_stmt>sam range(num_samples)<block_start>dec_inp=start_of_seq<for_stmt>i range(args.preds)<block_start>trg_att=subsequent_mask(dec_inp.shape[1]).repeat(n_in_batch 1 1).to(device)<line_sep>out=model.predict(inp dec_inp src_att trg_att)<line_sep>h=out[: -1]<line_sep>dec_inp=torch.cat((dec_inp torch.multinomial(h 1)) 1)<block_end>preds_tr_b=clusters[dec_inp[: 1:].cpu().numpy()].cumsum(1)+batch['src'][: -1: 0:2].cpu().numpy()<line_sep>pr_all[sam].append(preds_tr_b)<block_end><block_end>gt=np.concatenate(gt 0)<line_sep>#pr=np.concatenate(pr,0)
samp={}<for_stmt>k pr_all.keys()<block_start>samp[k]={}<line_sep>samp[k]['pr']=np.concatenate(pr_all[k] 0)<line_sep>samp[k]['mad'],samp[k]['fad'],samp[k]['err']=baselineUtils.distance_metrics(gt samp[k]['pr'])<block_end>ev=[samp[i]['err']<for>i range(num_samples)]<line_sep>e20=np.stack(ev -1)<line_sep>mad_samp=e20.mean(1).min(-1).mean()<line_sep>fad_samp=e20[: -1].min(-1).mean()<line_sep>#mad,fad,errs=baselineUtils.distance_metrics(gt,pr)
log.add_scalar('eval/MM_mad' mad_samp epoch)<line_sep>log.add_scalar('eval/MM_fad' fad_samp epoch)<block_end><block_end><if_stmt>epoch%args.save_step<eq>0<block_start>torch.save(model.state_dict() f'models/QuantizedTF/{args.name}/{epoch:05d}.pth')<block_end><block_end>epoch<augadd>1<block_end>ab=1<block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end> |
<import_stmt>pytest<import_from_stmt>yahooquery Screener<def_stmt>test_screener <block_start>s=Screener()<assert_stmt>s.get_screeners("most_actives")<is><not><none><block_end><def_stmt>test_available_screeners <block_start>s=Screener()<assert_stmt>s.available_screeners<is><not><none><block_end><def_stmt>test_bad_screener <block_start><with_stmt>pytest.raises(ValueError)<block_start>s=Screener()<assert_stmt>s.get_screeners("most_active")<block_end><block_end> |
"""Approximate information operators."""<import_stmt>abc<import_from_stmt>probnum.diffeq.odefilter information_operators<class_stmt>ApproximationStrategy(abc.ABC)<block_start>"""Interface for approximation strategies.
Turn an information operator into an approximate information operator that converts
into a :class:`ODEFilter` compatible :class:`Transition`.
"""<def_stmt>__call__ self information_operator:information_operators.InformationOperator<arrow>information_operators.ApproximateInformationOperator<block_start>"""Derive a tractable approximation of an information operator."""<line_sep><raise>NotImplementedError<block_end><block_end> |
"""Tileset and font related functions.
Tilesets can be loaded as a whole from tile-sheets or True-Type fonts, or they
can be put together from multiple tile images by loading them separately
using :any:`Tileset.set_tile`.
A major restriction with libtcod is that all tiles must be the same size and
tiles can't overlap when rendered. For sprite-based rendering it can be
useful to use `an alternative library for graphics rendering
<https://wiki.python.org/moin/PythonGameLibraries>`_ while continuing to use
python-tcod's pathfinding and field-of-view algorithms.
"""<import_from_future_stmt> annotations<import_stmt>itertools<import_stmt>os<import_from_stmt>pathlib Path<import_from_stmt>typing Any Iterable Optional Tuple Union<import_stmt>numpy<as>np<import_from_stmt>numpy.typing ArrayLike NDArray<import_stmt>tcod.console<import_from_stmt>tcod._internal _check _console _raise_tcod_error deprecate<import_from_stmt>tcod.loader ffi lib<class_stmt>Tileset<block_start>"""A collection of graphical tiles.
This class is provisional, the API may change in the future.
"""<def_stmt>__init__ self tile_width:int tile_height:int<arrow><none><block_start>self._tileset_p=ffi.gc(lib.TCOD_tileset_new(tile_width tile_height) lib.TCOD_tileset_delete )<block_end>@classmethod<def_stmt>_claim cls cdata:Any<arrow>Tileset<block_start>"""Return a new Tileset that owns the provided TCOD_Tileset* object."""<line_sep>self:Tileset=object.__new__(cls)<if_stmt>cdata<eq>ffi.NULL<block_start><raise>RuntimeError("Tileset initialized with nullptr.")<block_end>self._tileset_p=ffi.gc(cdata lib.TCOD_tileset_delete)<line_sep><return>self<block_end>@property<def_stmt>tile_width self<arrow>int<block_start>"""The width of the tile in pixels."""<line_sep><return>int(lib.TCOD_tileset_get_tile_width_(self._tileset_p))<block_end>@property<def_stmt>tile_height self<arrow>int<block_start>"""The height of the tile in pixels."""<line_sep><return>int(lib.TCOD_tileset_get_tile_height_(self._tileset_p))<block_end>@property<def_stmt>tile_shape self<arrow>Tuple[int int]<block_start>"""The shape (height, width) of the tile in pixels."""<line_sep><return>self.tile_height self.tile_width<block_end><def_stmt>__contains__ self codepoint:int<arrow>bool<block_start>"""Test if a tileset has a codepoint with ``n in tileset``."""<line_sep><return>bool(lib.TCOD_tileset_get_tile_(self._tileset_p codepoint ffi.NULL)<eq>0)<block_end><def_stmt>get_tile self codepoint:int<arrow>NDArray[np.uint8]<block_start>"""Return a copy of a tile for the given codepoint.
If the tile does not exist yet then a blank array will be returned.
The tile will have a shape of (height, width, rgba) and a dtype of
uint8. Note that most grey-scale tiles will only use the alpha
channel and will usually have a solid white color channel.
"""<line_sep>tile=np.zeros(self.tile_shape+(4 ) dtype=np.uint8)<line_sep>lib.TCOD_tileset_get_tile_(self._tileset_p codepoint ffi.from_buffer("struct TCOD_ColorRGBA*" tile) )<line_sep><return>tile<block_end><def_stmt>set_tile self codepoint:int tile:Union[ArrayLike NDArray[np.uint8]]<arrow><none><block_start>"""Upload a tile into this array.
Args:
codepoint (int): The Unicode codepoint you are assigning to.
If the tile is a sprite rather than a common glyph then consider assigning it to a
`Private Use Area <https://en.wikipedia.org/wiki/Private_Use_Areas>`_.
tile (Union[ArrayLike, NDArray[np.uint8]]):
The pixels to use for this tile in row-major order and must be in the same shape as :any:`tile_shape`.
`tile` can be an RGBA array with the shape of ``(height, width, rgba)``, or a grey-scale array with the
shape ``(height, width)``.
The `tile` array will be converted to a dtype of ``np.uint8``.
An RGB array as an input is too ambiguous and an alpha channel must be added, for example if an image has a key
color than the key color pixels must have their alpha channel set to zero.
This data may be immediately sent to VRAM, which can be a slow operation.
Example::
# Examples use imageio for image loading, see https://imageio.readthedocs.io
tileset: tcod.tileset.Tileset # This example assumes you are modifying an existing tileset.
# Normal usage when a tile already has its own alpha channel.
# The loaded tile must be the correct shape for the tileset you assign it to.
# The tile is assigned to a private use area and will not conflict with any exiting codepoint.
tileset.set_tile(0x100000, imageio.load("rgba_tile.png"))
# Load a greyscale tile.
tileset.set_tile(0x100001, imageio.load("greyscale_tile.png"), pilmode="L")
# If you are stuck with an RGB array then you can use the red channel as the input: `rgb[:, :, 0]`
# Loads an RGB sprite without a background.
tileset.set_tile(0x100002, imageio.load("rgb_no_background.png", pilmode="RGBA"))
# If you're stuck with an RGB array then you can pad the channel axis with an alpha of 255:
# rgba = np.pad(rgb, pad_width=((0, 0), (0, 0), (0, 1)), constant_values=255)
# Loads an RGB sprite with a key color background.
KEY_COLOR = np.asarray((255, 0, 255), dtype=np.uint8)
sprite_rgb = imageio.load("rgb_tile.png")
# Compare the RGB colors to KEY_COLOR, compress full matches to a 2D mask.
sprite_mask = (sprite_rgb != KEY_COLOR).all(axis=2)
# Generate the alpha array, with 255 as the foreground and 0 as the background.
sprite_alpha = sprite_mask.astype(np.uint8) * 255
# Combine the RGB and alpha arrays into an RGBA array.
sprite_rgba = np.append(sprite_rgb, sprite_alpha, axis=2)
tileset.set_tile(0x100003, sprite_rgba)
"""<line_sep>tile=np.ascontiguousarray(tile dtype=np.uint8)<if_stmt>tile.shape<eq>self.tile_shape<block_start>full_tile=np.empty(self.tile_shape+(4 ) dtype=np.uint8)<line_sep>full_tile[: : :3]=255<line_sep>full_tile[: : 3]=tile<line_sep><return>self.set_tile(codepoint full_tile)<block_end>required=self.tile_shape+(4 )<if_stmt>tile.shape<ne>required<block_start>note=""<if_stmt>len(tile.shape)<eq>3<and>tile.shape[2]<eq>3<block_start>note=("\nNote: An RGB array is too ambiguous,"<concat>" an alpha channel must be added to this array to divide the background/foreground areas.")<block_end><raise>ValueError(f"Tile shape must be {required} or {self.tile_shape}, got {tile.shape}.{note}")<block_end>lib.TCOD_tileset_set_tile_(self._tileset_p codepoint ffi.from_buffer("struct TCOD_ColorRGBA*" tile) )<block_end><def_stmt>render self console:tcod.console.Console<arrow>NDArray[np.uint8]<block_start>"""Render an RGBA array, using console with this tileset.
`console` is the Console object to render, this can not be the root
console.
The output array will be a np.uint8 array with the shape of:
``(con_height * tile_height, con_width * tile_width, 4)``.
.. versionadded:: 11.9
"""<if_stmt><not>console<block_start><raise>ValueError("'console' must not be the root console.")<block_end>width=console.width<times>self.tile_width<line_sep>height=console.height<times>self.tile_height<line_sep>out=np.empty((height width 4) np.uint8)<line_sep>out[:]=9<line_sep>surface_p=ffi.gc(lib.SDL_CreateRGBSurfaceWithFormatFrom(ffi.from_buffer("void*" out) width height 32 out.strides[0] lib.SDL_PIXELFORMAT_RGBA32 ) lib.SDL_FreeSurface )<with_stmt>surface_p<block_start><with_stmt>ffi.new("SDL_Surface**" surface_p)<as>surface_p_p<block_start>_check(lib.TCOD_tileset_render_to_surface(self._tileset_p _console(console) ffi.NULL surface_p_p ))<block_end><block_end><return>out<block_end><def_stmt>remap self codepoint:int x:int y:int=0<arrow><none><block_start>"""Reassign a codepoint to a character in this tileset.
`codepoint` is the Unicode codepoint to assign.
`x` and `y` is the position of the tilesheet to assign to `codepoint`.
This is the tile position itself, not the pixel position of the tile.
Large values of `x` will wrap to the next row, so using `x` by itself
is equivalent to `Tile Index` in the :any:`charmap-reference`.
This is normally used on loaded tilesheets. Other methods of Tileset
creation won't have reliable tile indexes.
.. versionadded:: 11.12
"""<line_sep>tile_i=x+y<times>self._tileset_p.virtual_columns<if_stmt><not>(0<le>tile_i<l>self._tileset_p.tiles_count)<block_start><raise>IndexError("Tile %i is non-existent and can't be assigned."<concat>" (Tileset has %i tiles.)"%(tile_i self._tileset_p.tiles_count))<block_end>_check(lib.TCOD_tileset_assign_tile(self._tileset_p tile_i codepoint ))<block_end><block_end>@deprecate("Using the default tileset is deprecated.")<def_stmt>get_default <arrow>Tileset<block_start>"""Return a reference to the default Tileset.
.. versionadded:: 11.10
.. deprecated:: 11.13
The default tileset is deprecated.
With contexts this is no longer needed.
"""<line_sep><return>Tileset._claim(lib.TCOD_get_default_tileset())<block_end>@deprecate("Using the default tileset is deprecated.")<def_stmt>set_default tileset:Tileset<arrow><none><block_start>"""Set the default tileset.
The display will use this new tileset immediately.
.. versionadded:: 11.10
.. deprecated:: 11.13
The default tileset is deprecated.
With contexts this is no longer needed.
"""<line_sep>lib.TCOD_set_default_tileset(tileset._tileset_p)<block_end><def_stmt>load_truetype_font path:Union[str Path] tile_width:int tile_height:int<arrow>Tileset<block_start>"""Return a new Tileset from a `.ttf` or `.otf` file.
Same as :any:`set_truetype_font`, but returns a :any:`Tileset` instead.
You can send this Tileset to :any:`set_default`.
This function is provisional. The API may change.
"""<if_stmt><not>os.path.exists(path)<block_start><raise>RuntimeError("File not found:\n\t%s"%(os.path.realpath(path) ))<block_end>cdata=lib.TCOD_load_truetype_font_(str(path).encode() tile_width tile_height)<if_stmt><not>cdata<block_start><raise>RuntimeError(ffi.string(lib.TCOD_get_error()))<block_end><return>Tileset._claim(cdata)<block_end>@deprecate("Accessing the default tileset is deprecated.")<def_stmt>set_truetype_font path:Union[str Path] tile_width:int tile_height:int<arrow><none><block_start>"""Set the default tileset from a `.ttf` or `.otf` file.
`path` is the file path for the font file.
`tile_width` and `tile_height` are the desired size of the tiles in the new
tileset. The font will be scaled to fit the given `tile_height` and
`tile_width`.
This function must be called before :any:`tcod.console_init_root`. Once
the root console is setup you may call this function again to change the
font. The tileset can be changed but the window will not be resized
automatically.
.. versionadded:: 9.2
.. deprecated:: 11.13
This function does not support contexts.
Use :any:`load_truetype_font` instead.
"""<if_stmt><not>os.path.exists(path)<block_start><raise>RuntimeError("File not found:\n\t%s"%(os.path.realpath(path) ))<block_end><if_stmt>lib.TCOD_tileset_load_truetype_(str(path).encode() tile_width tile_height)<block_start><raise>RuntimeError(ffi.string(lib.TCOD_get_error()))<block_end><block_end><def_stmt>load_bdf path:Union[str Path]<arrow>Tileset<block_start>"""Return a new Tileset from a `.bdf` file.
For the best results the font should be monospace, cell-based, and
single-width. As an example, a good set of fonts would be the
`Unicode fonts and tools for X11 <https://www.cl.cam.ac.uk/~mgk25/ucs-fonts.html>`_
package.
Pass the returned Tileset to :any:`tcod.tileset.set_default` and it will
take effect when `tcod.console_init_root` is called.
.. versionadded:: 11.10
"""<line_sep># noqa: E501
<if_stmt><not>os.path.exists(path)<block_start><raise>RuntimeError("File not found:\n\t%s"%(os.path.realpath(path) ))<block_end>cdata=lib.TCOD_load_bdf(str(path).encode())<if_stmt><not>cdata<block_start><raise>RuntimeError(ffi.string(lib.TCOD_get_error()).decode())<block_end><return>Tileset._claim(cdata)<block_end><def_stmt>load_tilesheet path:Union[str Path] columns:int rows:int charmap:Optional[Iterable[int]]<arrow>Tileset<block_start>"""Return a new Tileset from a simple tilesheet image.
`path` is the file path to a PNG file with the tileset.
`columns` and `rows` is the shape of the tileset. Tiles are assumed to
take up the entire space of the image.
`charmap` is a sequence of codepoints to map the tilesheet to in row-major order.
This is a list or generator of codepoints which map the tiles like this: ``charmap[tile_index] = codepoint``.
For common tilesets `charmap` should be :any:`tcod.tileset.CHARMAP_CP437`.
Generators will be sliced so :any:`itertools.count` can be used which will
give all tiles the same codepoint as their index, but this will not map
tiles onto proper Unicode.
If `None` is used then no tiles will be mapped, you will need to use
:any:`Tileset.remap` to assign codepoints to this Tileset.
.. versionadded:: 11.12
"""<if_stmt><not>os.path.exists(path)<block_start><raise>RuntimeError("File not found:\n\t%s"%(os.path.realpath(path) ))<block_end>mapping=[]<if_stmt>charmap<is><not><none><block_start>mapping=list(itertools.islice(charmap columns<times>rows))<block_end>cdata=lib.TCOD_tileset_load(str(path).encode() columns rows len(mapping) mapping)<if_stmt><not>cdata<block_start>_raise_tcod_error()<block_end><return>Tileset._claim(cdata)<block_end><def_stmt>procedural_block_elements * tileset:Tileset<arrow><none><block_start>"""Overwrites the block element codepoints in `tileset` with prodecually generated glyphs.
Args:
tileset (Tileset): A :any:`Tileset` with tiles of any shape.
This will overwrite all of the codepoints `listed here <https://en.wikipedia.org/wiki/Block_Elements>`_
except for the shade glyphs.
This function is useful for other functions such as :any:`Console.draw_semigraphics` which use more types of block
elements than are found in Code Page 437.
.. versionadded:: 13.1
Example::
>>> tileset = tcod.tileset.Tileset(8, 8)
>>> tcod.tileset.procedural_block_elements(tileset=tileset)
>>> tileset.get_tile(0x259E)[:, :, 3] # "▞" Quadrant upper right and lower left.
array([[ 0, 0, 0, 0, 255, 255, 255, 255],
[ 0, 0, 0, 0, 255, 255, 255, 255],
[ 0, 0, 0, 0, 255, 255, 255, 255],
[ 0, 0, 0, 0, 255, 255, 255, 255],
[255, 255, 255, 255, 0, 0, 0, 0],
[255, 255, 255, 255, 0, 0, 0, 0],
[255, 255, 255, 255, 0, 0, 0, 0],
[255, 255, 255, 255, 0, 0, 0, 0]], dtype=uint8)
>>> tileset.get_tile(0x2581)[:, :, 3] # "▁" Lower one eighth block.
array([[ 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0],
[255, 255, 255, 255, 255, 255, 255, 255]], dtype=uint8)
>>> tileset.get_tile(0x258D)[:, :, 3] # "▍" Left three eighths block.
array([[255, 255, 255, 0, 0, 0, 0, 0],
[255, 255, 255, 0, 0, 0, 0, 0],
[255, 255, 255, 0, 0, 0, 0, 0],
[255, 255, 255, 0, 0, 0, 0, 0],
[255, 255, 255, 0, 0, 0, 0, 0],
[255, 255, 255, 0, 0, 0, 0, 0],
[255, 255, 255, 0, 0, 0, 0, 0],
[255, 255, 255, 0, 0, 0, 0, 0]], dtype=uint8)
"""<line_sep>quadrants:NDArray[np.uint8]=np.zeros(tileset.tile_shape dtype=np.uint8)<line_sep>half_height=tileset.tile_height<floordiv>2<line_sep>half_width=tileset.tile_width<floordiv>2<line_sep>quadrants[:half_height :half_width]=0b1000# Top-left.
quadrants[:half_height half_width:]=0b0100# Top-right.
quadrants[half_height: :half_width]=0b0010# Bottom-left.
quadrants[half_height: half_width:]=0b0001# Bottom-right.
<for_stmt>codepoint,quad_mask ((0x2580 0b1100) # "▀" Upper half block.
(0x2584 0b0011) # "▄" Lower half block.
(0x2588 0b1111) # "█" Full block.
(0x258C 0b1010) # "▌" Left half block.
(0x2590 0b0101) # "▐" Right half block.
(0x2596 0b0010) # "▖" Quadrant lower left.
(0x2597 0b0001) # "▗" Quadrant lower right.
(0x2598 0b1000) # "▘" Quadrant upper left.
(0x2599 0b1011) # "▙" Quadrant upper left and lower left and lower right.
(0x259A 0b1001) # "▚" Quadrant upper left and lower right.
(0x259B 0b1110) # "▛" Quadrant upper left and upper right and lower left.
(0x259C 0b1101) # "▜" Quadrant upper left and upper right and lower right.
(0x259D 0b0100) # "▝" Quadrant upper right.
(0x259E 0b0110) # "▞" Quadrant upper right and lower left.
(0x259F 0b0111) # "▟" Quadrant upper right and lower left and lower right.
)<block_start>alpha:NDArray[np.uint8]=np.asarray((quadrants&quad_mask)<ne>0 dtype=np.uint8)<times>255<line_sep>tileset.set_tile(codepoint alpha)<block_end><for_stmt>codepoint,axis,fraction,negative ((0x2581 0 7 <true>) # "▁" Lower one eighth block.
(0x2582 0 6 <true>) # "▂" Lower one quarter block.
(0x2583 0 5 <true>) # "▃" Lower three eighths block.
(0x2585 0 3 <true>) # "▅" Lower five eighths block.
(0x2586 0 2 <true>) # "▆" Lower three quarters block.
(0x2587 0 1 <true>) # "▇" Lower seven eighths block.
(0x2589 1 7 <false>) # "▉" Left seven eighths block.
(0x258A 1 6 <false>) # "▊" Left three quarters block.
(0x258B 1 5 <false>) # "▋" Left five eighths block.
(0x258D 1 3 <false>) # "▍" Left three eighths block.
(0x258E 1 2 <false>) # "▎" Left one quarter block.
(0x258F 1 1 <false>) # "▏" Left one eighth block.
(0x2594 0 1 <false>) # "▔" Upper one eighth block.
(0x2595 1 7 <true>) # "▕" Right one eighth block .
)<block_start>indexes=[slice(<none>) slice(<none>)]<line_sep>divide=tileset.tile_shape[axis]<times>fraction<floordiv>8<line_sep># If negative then shade from the far corner, otherwise shade from the near corner.
indexes[axis]=slice(divide <none>)<if>negative<else>slice(<none> divide)<line_sep>alpha=np.zeros(tileset.tile_shape dtype=np.uint8)<line_sep>alpha[tuple(indexes)]=255<line_sep>tileset.set_tile(codepoint alpha)<block_end><block_end>CHARMAP_CP437=[0x0000 0x263A 0x263B 0x2665 0x2666 0x2663 0x2660 0x2022 0x25D8 0x25CB 0x25D9 0x2642 0x2640 0x266A 0x266B 0x263C 0x25BA 0x25C4 0x2195 0x203C 0x00B6 0x00A7 0x25AC 0x21A8 0x2191 0x2193 0x2192 0x2190 0x221F 0x2194 0x25B2 0x25BC 0x0020 0x0021 0x0022 0x0023 0x0024 0x0025 0x0026 0x0027 0x0028 0x0029 0x002A 0x002B 0x002C 0x002D 0x002E 0x002F 0x0030 0x0031 0x0032 0x0033 0x0034 0x0035 0x0036 0x0037 0x0038 0x0039 0x003A 0x003B 0x003C 0x003D 0x003E 0x003F 0x0040 0x0041 0x0042 0x0043 0x0044 0x0045 0x0046 0x0047 0x0048 0x0049 0x004A 0x004B 0x004C 0x004D 0x004E 0x004F 0x0050 0x0051 0x0052 0x0053 0x0054 0x0055 0x0056 0x0057 0x0058 0x0059 0x005A 0x005B 0x005C 0x005D 0x005E 0x005F 0x0060 0x0061 0x0062 0x0063 0x0064 0x0065 0x0066 0x0067 0x0068 0x0069 0x006A 0x006B 0x006C 0x006D 0x006E 0x006F 0x0070 0x0071 0x0072 0x0073 0x0074 0x0075 0x0076 0x0077 0x0078 0x0079 0x007A 0x007B 0x007C 0x007D 0x007E 0x007F 0x00C7 0x00FC 0x00E9 0x00E2 0x00E4 0x00E0 0x00E5 0x00E7 0x00EA 0x00EB 0x00E8 0x00EF 0x00EE 0x00EC 0x00C4 0x00C5 0x00C9 0x00E6 0x00C6 0x00F4 0x00F6 0x00F2 0x00FB 0x00F9 0x00FF 0x00D6 0x00DC 0x00A2 0x00A3 0x00A5 0x20A7 0x0192 0x00E1 0x00ED 0x00F3 0x00FA 0x00F1 0x00D1 0x00AA 0x00BA 0x00BF 0x2310 0x00AC 0x00BD 0x00BC 0x00A1 0x00AB 0x00BB 0x2591 0x2592 0x2593 0x2502 0x2524 0x2561 0x2562 0x2556 0x2555 0x2563 0x2551 0x2557 0x255D 0x255C 0x255B 0x2510 0x2514 0x2534 0x252C 0x251C 0x2500 0x253C 0x255E 0x255F 0x255A 0x2554 0x2569 0x2566 0x2560 0x2550 0x256C 0x2567 0x2568 0x2564 0x2565 0x2559 0x2558 0x2552 0x2553 0x256B 0x256A 0x2518 0x250C 0x2588 0x2584 0x258C 0x2590 0x2580 0x03B1 0x00DF 0x0393 0x03C0 0x03A3 0x03C3 0x00B5 0x03C4 0x03A6 0x0398 0x03A9 0x03B4 0x221E 0x03C6 0x03B5 0x2229 0x2261 0x00B1 0x2265 0x2264 0x2320 0x2321 0x00F7 0x2248 0x00B0 0x2219 0x00B7 0x221A 0x207F 0x00B2 0x25A0 0x00A0 ]<line_sep>"""A code page 437 character mapping.
See :ref:`code-page-437` for more info and a table of glyphs.
.. versionadded:: 11.12
"""<line_sep>CHARMAP_TCOD=[0x20 0x21 0x22 0x23 0x24 0x25 0x26 0x27 0x28 0x29 0x2A 0x2B 0x2C 0x2D 0x2E 0x2F 0x30 0x31 0x32 0x33 0x34 0x35 0x36 0x37 0x38 0x39 0x3A 0x3B 0x3C 0x3D 0x3E 0x3F 0x40 0x5B 0x5C 0x5D 0x5E 0x5F 0x60 0x7B 0x7C 0x7D 0x7E 0x2591 0x2592 0x2593 0x2502 0x2500 0x253C 0x2524 0x2534 0x251C 0x252C 0x2514 0x250C 0x2510 0x2518 0x2598 0x259D 0x2580 0x2596 0x259A 0x2590 0x2597 0x2191 0x2193 0x2190 0x2192 0x25B2 0x25BC 0x25C4 0x25BA 0x2195 0x2194 0x2610 0x2611 0x25CB 0x25C9 0x2551 0x2550 0x256C 0x2563 0x2569 0x2560 0x2566 0x255A 0x2554 0x2557 0x255D 0x00 0x00 0x00 0x00 0x00 0x00 0x00 0x41 0x42 0x43 0x44 0x45 0x46 0x47 0x48 0x49 0x4A 0x4B 0x4C 0x4D 0x4E 0x4F 0x50 0x51 0x52 0x53 0x54 0x55 0x56 0x57 0x58 0x59 0x5A 0x00 0x00 0x00 0x00 0x00 0x00 0x61 0x62 0x63 0x64 0x65 0x66 0x67 0x68 0x69 0x6A 0x6B 0x6C 0x6D 0x6E 0x6F 0x70 0x71 0x72 0x73 0x74 0x75 0x76 0x77 0x78 0x79 0x7A 0x00 0x00 0x00 0x00 0x00 0x00 ]<line_sep>"""The layout used by older libtcod fonts, in Unicode.
This layout is non-standard, and it's not recommend to make a font for it, but
you might need it to load an existing font made for libtcod.
This character map is in Unicode, so old code using the non-Unicode
`tcod.CHAR_*` constants will need to be updated.
See :ref:`deprecated-tcod-layout` for a table of glyphs used in this character
map.
.. versionadded:: 11.12
"""<line_sep> |
<import_stmt>numpy<as>np<import_stmt>unittest<import_from_stmt>chainer testing<import_from_stmt>chainercv.experimental.links.model.pspnet convolution_crop<class_stmt>TestConvolutionCrop(unittest.TestCase)<block_start><def_stmt>test_convolution_crop self<block_start>size=(8 6)<line_sep>stride=(8 6)<line_sep>n_channel=3<line_sep>img=np.random.uniform(size=(n_channel 16 12)).astype(np.float32)<line_sep>crop_imgs,param=convolution_crop(img size stride return_param=<true>)<line_sep>self.assertEqual(crop_imgs.shape (4 n_channel)+size)<line_sep>self.assertEqual(crop_imgs.dtype np.float32)<for_stmt>y range(2)<block_start><for_stmt>x range(2)<block_start>self.assertEqual(param['y_slices'][2<times>y+x].start 8<times>y)<line_sep>self.assertEqual(param['y_slices'][2<times>y+x].stop 8<times>(y+1))<line_sep>self.assertEqual(param['x_slices'][2<times>y+x].start 6<times>x)<line_sep>self.assertEqual(param['x_slices'][2<times>y+x].stop 6<times>(x+1))<block_end><block_end><for_stmt>i range(4)<block_start>self.assertEqual(param['crop_y_slices'][i].start 0)<line_sep>self.assertEqual(param['crop_y_slices'][i].stop 8)<line_sep>self.assertEqual(param['crop_x_slices'][i].start 0)<line_sep>self.assertEqual(param['crop_x_slices'][i].stop 6)<block_end><block_end><block_end>testing.run_module(__name__ __file__)<line_sep> |
# -*- coding: utf-8 -*-
"""Tools for diff'ing two xonsh history files in a meaningful fashion."""<import_stmt>difflib<import_stmt>datetime<import_stmt>itertools<import_stmt>argparse<import_from_stmt>xonsh.lazyjson LazyJSON<import_from_stmt>xonsh.tools print_color<line_sep>NO_COLOR_S="{NO_COLOR}"<line_sep>RED_S="{RED}"<line_sep>GREEN_S="{GREEN}"<line_sep>BOLD_RED_S="{BOLD_RED}"<line_sep>BOLD_GREEN_S="{BOLD_GREEN}"<line_sep># intern some strings
REPLACE_S="replace"<line_sep>DELETE_S="delete"<line_sep>INSERT_S="insert"<line_sep>EQUAL_S="equal"<def_stmt>bold_str_diff a b sm=<none><block_start><if_stmt>sm<is><none><block_start>sm=difflib.SequenceMatcher()<block_end>aline=RED_S+"- "<line_sep>bline=GREEN_S+"+ "<line_sep>sm.set_seqs(a b)<for_stmt>tag,i1,i2,j1,j2 sm.get_opcodes()<block_start><if_stmt>tag<eq>REPLACE_S<block_start>aline<augadd>BOLD_RED_S+a[i1:i2]+RED_S<line_sep>bline<augadd>BOLD_GREEN_S+b[j1:j2]+GREEN_S<block_end><elif_stmt>tag<eq>DELETE_S<block_start>aline<augadd>BOLD_RED_S+a[i1:i2]+RED_S<block_end><elif_stmt>tag<eq>INSERT_S<block_start>bline<augadd>BOLD_GREEN_S+b[j1:j2]+GREEN_S<block_end><elif_stmt>tag<eq>EQUAL_S<block_start>aline<augadd>a[i1:i2]<line_sep>bline<augadd>b[j1:j2]<block_end><else_stmt><block_start><raise>RuntimeError("tag not understood")<block_end><block_end><return>aline+NO_COLOR_S+"\n"+bline+NO_COLOR_S+"\n"<block_end><def_stmt>redline line<block_start><return>"{red}- {line}{no_color}\n".format(red=RED_S line=line no_color=NO_COLOR_S)<block_end><def_stmt>greenline line<block_start><return>"{green}+ {line}{no_color}\n".format(green=GREEN_S line=line no_color=NO_COLOR_S)<block_end><def_stmt>highlighted_ndiff a b<block_start>"""Returns a highlighted string, with bold characters where different."""<line_sep>s=""<line_sep>sm=difflib.SequenceMatcher()<line_sep>sm.set_seqs(a b)<line_sep>linesm=difflib.SequenceMatcher()<for_stmt>tag,i1,i2,j1,j2 sm.get_opcodes()<block_start><if_stmt>tag<eq>REPLACE_S<block_start><for_stmt>aline,bline itertools.zip_longest(a[i1:i2] b[j1:j2])<block_start><if_stmt>bline<is><none><block_start>s<augadd>redline(aline)<block_end><elif_stmt>aline<is><none><block_start>s<augadd>greenline(bline)<block_end><else_stmt><block_start>s<augadd>bold_str_diff(aline bline sm=linesm)<block_end><block_end><block_end><elif_stmt>tag<eq>DELETE_S<block_start><for_stmt>aline a[i1:i2]<block_start>s<augadd>redline(aline)<block_end><block_end><elif_stmt>tag<eq>INSERT_S<block_start><for_stmt>bline b[j1:j2]<block_start>s<augadd>greenline(bline)<block_end><block_end><elif_stmt>tag<eq>EQUAL_S<block_start><for_stmt>aline a[i1:i2]<block_start>s<augadd>" "+aline+"\n"<block_end><block_end><else_stmt><block_start><raise>RuntimeError("tag not understood")<block_end><block_end><return>s<block_end><class_stmt>HistoryDiffer(object)<block_start>"""This class helps diff two xonsh history files."""<def_stmt>__init__ self afile bfile reopen=<false> verbose=<false><block_start>"""
Parameters
----------
afile : file handle or str
The first file to diff
bfile : file handle or str
The second file to diff
reopen : bool, optional
Whether or not to reopen the file handles each time. The default here is
opposite from the LazyJSON default because we know that we will be doing
a lot of reading so it is best to keep the handles open.
verbose : bool, optional
Whether to print a verbose amount of information.
"""<line_sep>self.a=LazyJSON(afile reopen=reopen)<line_sep>self.b=LazyJSON(bfile reopen=reopen)<line_sep>self.verbose=verbose<line_sep>self.sm=difflib.SequenceMatcher(autojunk=<false>)<block_end><def_stmt>__del__ self<block_start>self.a.close()<line_sep>self.b.close()<block_end><def_stmt>__str__ self<block_start><return>self.format()<block_end><def_stmt>_header_line self lj<block_start>s=lj._f.name<if>hasattr(lj._f "name")<else>""<line_sep>s<augadd>" ("+lj["sessionid"]+")"<line_sep>s<augadd>" [locked]"<if>lj["locked"]<else>" [unlocked]"<line_sep>ts=lj["ts"].load()<line_sep>ts0=datetime.datetime.fromtimestamp(ts[0])<line_sep>s<augadd>" started: "+ts0.isoformat(" ")<if_stmt>ts[1]<is><not><none><block_start>ts1=datetime.datetime.fromtimestamp(ts[1])<line_sep>s<augadd>" stopped: "+ts1.isoformat(" ")+" runtime: "+str(ts1-ts0)<block_end><return>s<block_end><def_stmt>header self<block_start>"""Computes a header string difference."""<line_sep>s="{red}--- {aline}{no_color}\n"<concat>"{green}+++ {bline}{no_color}"<line_sep>s=s.format(aline=self._header_line(self.a) bline=self._header_line(self.b) red=RED_S green=GREEN_S no_color=NO_COLOR_S )<line_sep><return>s<block_end><def_stmt>_env_both_diff self in_both aenv benv<block_start>sm=self.sm<line_sep>s=""<for_stmt>key sorted(in_both)<block_start>aval=aenv[key]<line_sep>bval=benv[key]<if_stmt>aval<eq>bval<block_start><continue><block_end>s<augadd>"{0!r} is in both, but differs\n".format(key)<line_sep>s<augadd>bold_str_diff(aval bval sm=sm)+"\n"<block_end><return>s<block_end><def_stmt>_env_in_one_diff self x y color xid xenv<block_start>only_x=sorted(x-y)<if_stmt>len(only_x)<eq>0<block_start><return>""<block_end><if_stmt>self.verbose<block_start>xstr=",\n".join([" {0!r}: {1!r}".format(key xenv[key])<for>key only_x])<line_sep>xstr="\n"+xstr<block_end><else_stmt><block_start>xstr=", ".join(["{0!r}".format(key)<for>key only_x])<block_end>in_x="These vars are only in {color}{xid}{no_color}: {{{xstr}}}\n\n"<line_sep><return>in_x.format(xid=xid color=color no_color=NO_COLOR_S xstr=xstr)<block_end><def_stmt>envdiff self<block_start>"""Computes the difference between the environments."""<line_sep>aenv=self.a["env"].load()<line_sep>benv=self.b["env"].load()<line_sep>akeys=frozenset(aenv)<line_sep>bkeys=frozenset(benv)<line_sep>in_both=akeys&bkeys<if_stmt>len(in_both)<eq>len(akeys)<eq>len(bkeys)<block_start>keydiff=self._env_both_diff(in_both aenv benv)<if_stmt>len(keydiff)<eq>0<block_start><return>""<block_end>in_a=in_b=""<block_end><else_stmt><block_start>keydiff=self._env_both_diff(in_both aenv benv)<line_sep>in_a=self._env_in_one_diff(akeys bkeys RED_S self.a["sessionid"] aenv)<line_sep>in_b=self._env_in_one_diff(bkeys akeys GREEN_S self.b["sessionid"] benv)<block_end>s="Environment\n-----------\n"+in_a+keydiff+in_b<line_sep><return>s<block_end><def_stmt>_cmd_in_one_diff self inp i xlj xid color<block_start>s="cmd #{i} only in {color}{xid}{no_color}:\n"<line_sep>s=s.format(i=i color=color xid=xid no_color=NO_COLOR_S)<line_sep>lines=inp.splitlines()<line_sep>lt="{color}{pre}{no_color} {line}\n"<line_sep>s<augadd>lt.format(color=color no_color=NO_COLOR_S line=lines[0] pre=">>>")<for_stmt>line lines[1:]<block_start>s<augadd>lt.format(color=color no_color=NO_COLOR_S line=line pre="...")<block_end><if_stmt><not>self.verbose<block_start><return>s+"\n"<block_end>out=xlj["cmds"][0].get("out" "Note: no output stored")<line_sep>s<augadd>out.rstrip()+"\n\n"<line_sep><return>s<block_end><def_stmt>_cmd_out_and_rtn_diff self i j<block_start>s=""<line_sep>aout=self.a["cmds"][i].get("out" <none>)<line_sep>bout=self.b["cmds"][j].get("out" <none>)<if_stmt>aout<is><none><and>bout<is><none># s += 'Note: neither output stored\n'
<block_start><pass><block_end><elif_stmt>bout<is><none><block_start>aid=self.a["sessionid"]<line_sep>s<augadd>"Note: only {red}{aid}{no_color} output stored\n".format(red=RED_S aid=aid no_color=NO_COLOR_S)<block_end><elif_stmt>aout<is><none><block_start>bid=self.b["sessionid"]<line_sep>s<augadd>"Note: only {green}{bid}{no_color} output stored\n".format(green=GREEN_S bid=bid no_color=NO_COLOR_S)<block_end><elif_stmt>aout<ne>bout<block_start>s<augadd>"Outputs differ\n"<line_sep>s<augadd>highlighted_ndiff(aout.splitlines() bout.splitlines())<block_end><else_stmt><block_start><pass><block_end>artn=self.a["cmds"][i]["rtn"]<line_sep>brtn=self.b["cmds"][j]["rtn"]<if_stmt>artn<ne>brtn<block_start>s<augadd>("Return vals {red}{artn}{no_color} & {green}{brtn}{no_color} differ\n").format(red=RED_S green=GREEN_S no_color=NO_COLOR_S artn=artn brtn=brtn)<block_end><return>s<block_end><def_stmt>_cmd_replace_diff self i ainp aid j binp bid<block_start>s=("cmd #{i} in {red}{aid}{no_color} is replaced by \n"<concat>"cmd #{j} in {green}{bid}{no_color}:\n")<line_sep>s=s.format(i=i aid=aid j=j bid=bid red=RED_S green=GREEN_S no_color=NO_COLOR_S)<line_sep>s<augadd>highlighted_ndiff(ainp.splitlines() binp.splitlines())<if_stmt><not>self.verbose<block_start><return>s+"\n"<block_end>s<augadd>self._cmd_out_and_rtn_diff(i j)<line_sep><return>s+"\n"<block_end><def_stmt>cmdsdiff self<block_start>"""Computes the difference of the commands themselves."""<line_sep>aid=self.a["sessionid"]<line_sep>bid=self.b["sessionid"]<line_sep>ainps=[c["inp"]<for>c self.a["cmds"]]<line_sep>binps=[c["inp"]<for>c self.b["cmds"]]<line_sep>sm=self.sm<line_sep>sm.set_seqs(ainps binps)<line_sep>s=""<for_stmt>tag,i1,i2,j1,j2 sm.get_opcodes()<block_start><if_stmt>tag<eq>REPLACE_S<block_start>zipper=itertools.zip_longest<for_stmt>i,ainp,j,binp zipper(range(i1 i2) ainps[i1:i2] range(j1 j2) binps[j1:j2])<block_start><if_stmt>j<is><none><block_start>s<augadd>self._cmd_in_one_diff(ainp i self.a aid RED_S)<block_end><elif_stmt>i<is><none><block_start>s<augadd>self._cmd_in_one_diff(binp j self.b bid GREEN_S)<block_end><else_stmt><block_start>self._cmd_replace_diff(i ainp aid j binp bid)<block_end><block_end><block_end><elif_stmt>tag<eq>DELETE_S<block_start><for_stmt>i,inp enumerate(ainps[i1:i2] i1)<block_start>s<augadd>self._cmd_in_one_diff(inp i self.a aid RED_S)<block_end><block_end><elif_stmt>tag<eq>INSERT_S<block_start><for_stmt>j,inp enumerate(binps[j1:j2] j1)<block_start>s<augadd>self._cmd_in_one_diff(inp j self.b bid GREEN_S)<block_end><block_end><elif_stmt>tag<eq>EQUAL_S<block_start><for_stmt>i,j zip(range(i1 i2) range(j1 j2))<block_start>odiff=self._cmd_out_and_rtn_diff(i j)<if_stmt>len(odiff)<g>0<block_start>h=("cmd #{i} in {red}{aid}{no_color} input is the same as \n"<concat>"cmd #{j} in {green}{bid}{no_color}, but output differs:\n")<line_sep>s<augadd>h.format(i=i aid=aid j=j bid=bid red=RED_S green=GREEN_S no_color=NO_COLOR_S )<line_sep>s<augadd>odiff+"\n"<block_end><block_end><block_end><else_stmt><block_start><raise>RuntimeError("tag not understood")<block_end><block_end><if_stmt>len(s)<eq>0<block_start><return>s<block_end><return>"Commands\n--------\n"+s<block_end><def_stmt>format self<block_start>"""Formats the difference between the two history files."""<line_sep>s=self.header()<line_sep>ed=self.envdiff()<if_stmt>len(ed)<g>0<block_start>s<augadd>"\n\n"+ed<block_end>cd=self.cmdsdiff()<if_stmt>len(cd)<g>0<block_start>s<augadd>"\n\n"+cd<block_end><return>s.rstrip()<block_end><block_end>_HD_PARSER=<none><def_stmt>dh_create_parser p=<none><block_start><global>_HD_PARSER<line_sep>p_was_none=p<is><none><if_stmt>_HD_PARSER<is><not><none><and>p_was_none<block_start><return>_HD_PARSER<block_end><if_stmt>p_was_none<block_start>p=argparse.ArgumentParser("diff-history" description="diffs two xonsh history files")<block_end>p.add_argument("--reopen" dest="reopen" default=<false> action="store_true" help="make lazy file loading reopen files each time" )<line_sep>p.add_argument("-v" "--verbose" dest="verbose" default=<false> action="store_true" help="whether to print even more information" )<line_sep>p.add_argument("a" help="first file in diff")<line_sep>p.add_argument("b" help="second file in diff")<if_stmt>p_was_none<block_start>_HD_PARSER=p<block_end><return>p<block_end><def_stmt>dh_main_action ns hist=<none> stdout=<none> stderr=<none><block_start>hd=HistoryDiffer(ns.a ns.b reopen=ns.reopen verbose=ns.verbose)<line_sep>print_color(hd.format() file=stdout)<block_end> |
<import_stmt>itertools<import_stmt>numpy<as>np<import_stmt>networkx<as>nx<import_stmt>vocab<def_stmt>coref_score instance property_id<block_start><return>[instance.subject_entity["coref_score"] instance.object_entity["coref_score"]]<block_end><def_stmt>el_score instance property_id<block_start><return>[instance.subject_entity["el_score"] instance.object_entity["el_score"]]<block_end><def_stmt>_entity_linker_types_from_mention entity<block_start>arr=np.zeros(len(vocab.types) np.float32)<for_stmt>i,t enumerate(vocab.types)<block_start><if_stmt>t<in>entity["types"]<block_start>arr[i]=1.0<block_end><block_end><return>arr<block_end><def_stmt>entity_linker_types instance property_id<block_start><return>np.concatenate([_entity_linker_types_from_mention(instance.subject_entity) _entity_linker_types_from_mention(instance.object_entity)])<block_end><def_stmt>wikidata_predicates instance property_id<block_start><return><none><block_end><def_stmt>text_score instance property_id<block_start><return>[instance.text_instance.scores[property_id]]<block_end> |
<import_stmt>json<import_stmt>os<import_from_stmt>typing List Tuple<import_stmt>pytest<line_sep>@pytest.fixture(scope='module')<def_stmt>here <block_start><return>os.path.abspath(os.path.dirname(__file__))<block_end>@pytest.fixture(scope='module')<def_stmt>accounts here<arrow>List[Tuple]<or><none><block_start>"""Return account list"""<line_sep>accounts_path=os.path.join(here 'config')<if_stmt><not>os.path.exists(accounts_path)<block_start><return><none><block_end><with_stmt>open(accounts_path 'r')<as>f<block_start>raw=f.read()<block_end><return>json.loads(raw)<block_end> |
<import_stmt>datetime<import_from_stmt>typing TYPE_CHECKING Generator<import_stmt>Evtx.Evtx<as>evtx<import_from_stmt>lxml etree<import_from_stmt>beagle.common.logging logger<import_from_stmt>beagle.datasources.base_datasource DataSource<import_from_stmt>beagle.transformers.evtx_transformer WinEVTXTransformer<if_stmt>TYPE_CHECKING<block_start><import_from_stmt>beagle.transformer.base_transformer Transformer<import_from_stmt>typing List<block_end><class_stmt>WinEVTX(DataSource)<block_start>"""Parses Windows .evtx files. Yields events one by one using the `python-evtx` library.
Parameters
----------
evtx_log_file : str
The path to the windows evtx file to parse.
"""<line_sep>name="Windows EVTX File"<line_sep>transformers=[WinEVTXTransformer]# type: List[Transformer]
category="Windows Event Logs"<def_stmt>__init__ self evtx_log_file:str<arrow><none><block_start>self.file_path=evtx_log_file<line_sep>logger.info(f"Setting up WinEVTX for {self.file_path}")<block_end><def_stmt>events self<arrow>Generator[dict <none> <none>]<block_start><with_stmt>evtx.Evtx(self.file_path)<as>log<block_start><for_stmt>record log.records()# Get the lxml object
<block_start><yield>self.parse_record(record.lxml())<block_end><block_end><block_end><def_stmt>metadata self<arrow>dict<block_start>"""Get the hostname by inspecting the first record.
Returns
-------
dict
>>> {"hostname": str}
"""<with_stmt>evtx.Evtx(self.file_path)<as>log<block_start><for_stmt>record log.records()# Get the lxml object
<block_start>event=self.parse_record(record.lxml())<line_sep><break><block_end><block_end><return>{"hostname":event["computer"]}<block_end><def_stmt>parse_record self record:etree.ElementTree name=""<arrow>dict<block_start>"""Recursivly converts a etree.ElementTree record to a JSON dictionary
with one level.
Parameters
----------
record : etree.ElementTree
Current record to parse
name : str, optional
Name of the current key we are at.
Returns
-------
dict
JSON represntation of the event
"""<line_sep>data={}<for_stmt>node record<block_start>next_name=node.tag.split("}")[-1]<line_sep># Recurse
data.update(self.parse_record(node next_name))<block_end><if_stmt>record.attrib<and>record.text<block_start>key=f"{name}_{record.keys()[0]}".lower()<line_sep># Use attributes if we're in EventData
<if_stmt>"EventData"<in>record.getparent().tag<block_start>key<augadd>f"_{record.values()[0]}".lower()<block_end>data[key]=record.text<block_end><elif_stmt>record.attrib<block_start><for_stmt>k,val record.attrib.items()<block_start>key=f"{name}_{k}".lower()<line_sep>data[key]=val<block_end><block_end><else_stmt><block_start>curr_name=record.tag.split("}")[-1]<line_sep>key=f"{curr_name}".lower()<line_sep>data[key]=record.text<block_end><if_stmt>key<eq>"timecreated_systemtime"<block_start>time=datetime.datetime.strptime(data["timecreated_systemtime"] "%Y-%m-%d %H:%M:%S.%f")<line_sep>epoch=int(time.strftime("%s"))<line_sep>data["timecreated_systemtime"]=epoch<block_end><return>data<block_end><block_end> |
"""The ``dgl.dataloading`` package contains:
* Data loader classes for iterating over a set of nodes or edges in a graph and generates
computation dependency via neighborhood sampling methods.
* Various sampler classes that perform neighborhood sampling for multi-layer GNNs.
* Negative samplers for link prediction.
For a holistic explanation on how different components work together.
Read the user guide :ref:`guide-minibatch`.
.. note::
This package is experimental and the interfaces may be subject
to changes in future releases. It currently only has implementations in PyTorch.
"""<import_from_stmt>.neighbor *<import_from_stmt>.dataloader *<import_from_stmt>.cluster_gcn *<import_from_stmt>.shadow *<import_from_stmt>. negative_sampler<import_from_stmt>.async_transferer AsyncTransferer<import_from_stmt>.. backend<as>F<if_stmt>F.get_preferred_backend()<eq>'pytorch'<block_start><import_from_stmt>.pytorch *<block_end> |
<import_stmt>os<import_from_stmt>argparse ArgumentTypeError<import_stmt>pytest<import_from_stmt>common.serializers.json_serializer JsonSerializer<import_from_stmt>ledger.genesis_txn.genesis_txn_file_util genesis_txn_file<import_from_stmt>plenum.bls.bls_key_manager_file BlsKeyManagerFile<import_from_stmt>plenum.common.constants NYM VERKEY ROLE TARGET_NYM ALIAS NODE DATA CLIENT_IP CLIENT_PORT NODE_IP NODE_PORT SERVICES BLS_KEY VALIDATOR TRUSTEE STEWARD BLS_KEY_PROOF CURRENT_TXN_PAYLOAD_VERSIONS<import_from_stmt>plenum.common.test_network_setup TestNetworkSetup<import_from_stmt>plenum.common.txn_util getTxnOrderedFields get_seq_no get_txn_id get_payload_data get_type get_version get_protocol_version<import_from_stmt>plenum.common.util randomString<import_from_stmt>storage store_utils<import_from_stmt>stp_zmq.zstack ZStack<line_sep>portsStart=9600<line_sep>NODE_COUNT=4<line_sep>CLIENT_COUNT=8<line_sep>TRUSTEE_COUNT=1<line_sep>@pytest.fixture()<def_stmt>params tconf<block_start>steward_defs,node_defs=TestNetworkSetup.gen_defs(ips=<none> nodeCount=NODE_COUNT starting_port=portsStart)<line_sep>client_defs=TestNetworkSetup.gen_client_defs(clientCount=CLIENT_COUNT)<line_sep>trustee_def=TestNetworkSetup.gen_trustee_def(1)<line_sep>nodeParamsFile=randomString()<line_sep><return>steward_defs node_defs client_defs trustee_def nodeParamsFile<block_end>@pytest.fixture()<def_stmt>bootstrap params tdir tconf<block_start>steward_defs,node_defs,client_defs,trustee_def,nodeParamsFile=params<line_sep>TestNetworkSetup.bootstrapTestNodesCore(config=tconf network="test" appendToLedgers=<false> domainTxnFieldOrder=getTxnOrderedFields() trustee_def=trustee_def steward_defs=steward_defs node_defs=node_defs client_defs=client_defs localNodes=1 nodeParamsFileName=nodeParamsFile chroot=tdir)<block_end>@pytest.fixture()<def_stmt>config_helper config_helper_class tdir tconf<block_start><return>config_helper_class(tconf chroot=tdir)<block_end>@pytest.fixture()<def_stmt>genesis_dir config_helper<block_start><return>config_helper.genesis_dir<block_end>@pytest.fixture()<def_stmt>keys_dir config_helper<block_start><return>config_helper.keys_dir<block_end>@pytest.fixture()<def_stmt>domain_genesis_file genesis_dir config_helper<block_start><return>os.path.join(genesis_dir genesis_txn_file(TestNetworkSetup.domain_ledger_file_name(config_helper.config)))<block_end>@pytest.fixture()<def_stmt>pool_genesis_file genesis_dir config_helper<block_start><return>os.path.join(genesis_dir genesis_txn_file(TestNetworkSetup.pool_ledger_file_name(config_helper.config)))<block_end><def_stmt>test_bootstrap_test_node_creates_genesis_files bootstrap genesis_dir domain_genesis_file pool_genesis_file<block_start><assert_stmt>os.path.exists(genesis_dir)<assert_stmt>os.path.exists(domain_genesis_file)<assert_stmt>os.path.exists(pool_genesis_file)<block_end><def_stmt>test_bootstrap_test_node_creates_keys bootstrap keys_dir params<block_start><assert_stmt>os.path.exists(keys_dir)<line_sep>_,node_defs,_,_,_=params<line_sep># only Node1 is local, that is has keys generated
node_name=node_defs[0].name<line_sep>node_keys_folder=os.path.join(keys_dir node_name)<assert_stmt>os.path.exists(node_keys_folder)<assert_stmt>os.path.exists(os.path.join(node_keys_folder ZStack.PublicKeyDirName))<assert_stmt>os.path.exists(os.path.join(node_keys_folder ZStack.PrivateKeyDirName))<assert_stmt>os.path.exists(os.path.join(node_keys_folder ZStack.VerifKeyDirName))<assert_stmt>os.path.exists(os.path.join(node_keys_folder ZStack.SigKeyDirName))<assert_stmt>os.path.exists(os.path.join(node_keys_folder BlsKeyManagerFile.BLS_KEYS_DIR_NAME))<block_end><def_stmt>test_domain_genesis_txns bootstrap domain_genesis_file<block_start>serializer=JsonSerializer()<with_stmt>open(domain_genesis_file)<as>f<block_start>i=0<for_stmt>line store_utils.cleanLines(f.readlines())<block_start>txn=serializer.deserialize(line)<assert_stmt>get_seq_no(txn)<assert_stmt>get_payload_data(txn)<assert_stmt>get_type(txn)<eq>NYM<assert_stmt>get_version(txn)<eq>"1"<assert_stmt>get_protocol_version(txn)<is><none><assert_stmt>get_payload_data(txn)[VERKEY]<assert_stmt>get_payload_data(txn)[TARGET_NYM]<assert_stmt>ALIAS<not><in>get_payload_data(txn)<line_sep># expect Trustees, then Stewards, then Clients
<if_stmt>0<le>i<l>TRUSTEE_COUNT<block_start>expected_role=TRUSTEE<block_end><elif_stmt>TRUSTEE_COUNT<le>i<l>TRUSTEE_COUNT+NODE_COUNT<block_start>expected_role=STEWARD<block_end><else_stmt><block_start>expected_role=<none><block_end><assert_stmt>get_payload_data(txn).get(ROLE)<eq>expected_role<line_sep>i<augadd>1<block_end><block_end><block_end><def_stmt>test_pool_genesis_txns bootstrap pool_genesis_file<block_start>serializer=JsonSerializer()<with_stmt>open(pool_genesis_file)<as>f<block_start><for_stmt>line store_utils.cleanLines(f.readlines())<block_start>txn=serializer.deserialize(line)<assert_stmt>get_seq_no(txn)<assert_stmt>get_txn_id(txn)<assert_stmt>get_payload_data(txn)<assert_stmt>get_type(txn)<eq>NODE<assert_stmt>get_version(txn)<eq>"1"<assert_stmt>get_protocol_version(txn)<is><none><assert_stmt>get_payload_data(txn)[TARGET_NYM]<line_sep>data=get_payload_data(txn).get(DATA)<assert_stmt>data<assert_stmt>data[ALIAS]<assert_stmt>data[CLIENT_IP]<assert_stmt>data[CLIENT_PORT]<assert_stmt>data[NODE_IP]<assert_stmt>data[NODE_PORT]<assert_stmt>data[SERVICES]<eq>[VALIDATOR]<assert_stmt>data[BLS_KEY]<assert_stmt>data[BLS_KEY_PROOF]<block_end><block_end><block_end><def_stmt>test_check_valid_ip_host params tdir tconf<block_start>_,_,client_defs,trustee_def,nodeParamsFile=params<line_sep>valid=['192.168.3.11,172.16.17.32' 'ec2-54-173-9-185.compute-1.amazonaws.com,ec2-52-38-24-189.compute-1.amazonaws.com' 'ec2-54-173-9-185.compute-1.amazonaws.com,172.16.17.32,192.168.3.11' '172.16.17.32,ec2-54-173-9-185.compute-1.amazonaws.com,192.168.3.11' 'ledger.net,ledger.net']<line_sep>invalid=['34.200.79()3.65,172.16.17.32' '172.16.17.32,ec2-54-173$-9-185.compute-1.amazonaws.com,192.168.3.11' '172.16.17.32,ec2-54-173-9-185.com$pute-1.amazonaws.com,192.168.3.11' '172.16.17.32,ec2-54-173-9-185.com&pute-1.amazonaws.com,192.168.3.11' '172.16.17.32,ec2-54-173-9-185.com*pute-1.amazonaws.com,192.168.3.11' ]<for_stmt>v valid<block_start><assert_stmt>v.split(',')<eq>TestNetworkSetup._bootstrap_args_type_ips_hosts(v)<line_sep>steward_defs,node_defs=TestNetworkSetup.gen_defs(ips=<none> nodeCount=2 starting_port=portsStart)<line_sep>TestNetworkSetup.bootstrapTestNodesCore(config=tconf network="test" appendToLedgers=<false> domainTxnFieldOrder=getTxnOrderedFields() trustee_def=trustee_def steward_defs=steward_defs node_defs=node_defs client_defs=client_defs localNodes=1 nodeParamsFileName=nodeParamsFile chroot=tdir)<block_end><for_stmt>v invalid<block_start><with_stmt>pytest.raises(ArgumentTypeError)<block_start>TestNetworkSetup._bootstrap_args_type_ips_hosts(v)<block_end><block_end><block_end> |
# Copyright 2018 The Texar Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Adversarial losses.
"""<import_stmt>tensorflow<as>tf<def_stmt>binary_adversarial_losses real_data fake_data discriminator_fn mode="max_real"<block_start>"""Computes adversarial losses of real/fake binary discrimination game.
.. role:: python(code)
:language: python
Args:
real_data (Tensor or array): Real data of shape
`[num_real_examples, ...]`.
fake_data (Tensor or array): Fake data of shape
`[num_fake_examples, ...]`. `num_real_examples` does not
necessarily equal `num_fake_examples`.
discriminator_fn: A callable takes data (e.g., :attr:`real_data` and
:attr:`fake_data`) and returns the logits of being real. The
signature of `discriminator_fn` must be:
:python:`logits, ... = discriminator_fn(data)`.
The return value of `discriminator_fn` can be the logits, or
a tuple where the logits are the first element.
mode (str): Mode of the generator loss. Either "max_real" or "min_fake".
- **"max_real"** (default): minimizing the generator loss is to\
maximize the probability of fake data being classified as real.
- **"min_fake"**: minimizing the generator loss is to minimize the\
probability of fake data being classified as fake.
Returns:
A tuple `(generator_loss, discriminator_loss)` each of which is
a scalar Tensor, loss to be minimized.
"""<line_sep>real_logits=discriminator_fn(real_data)<if_stmt>isinstance(real_logits (list tuple))<block_start>real_logits=real_logits[0]<block_end>real_loss=tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=real_logits labels=tf.ones_like(real_logits)))<line_sep>fake_logits=discriminator_fn(fake_data)<if_stmt>isinstance(fake_logits (list tuple))<block_start>fake_logits=fake_logits[0]<block_end>fake_loss=tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=fake_logits labels=tf.zeros_like(fake_logits)))<line_sep>d_loss=real_loss+fake_loss<if_stmt>mode<eq>"min_fake"<block_start>g_loss=-fake_loss<block_end><elif_stmt>mode<eq>"max_real"<block_start>g_loss=tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=fake_logits labels=tf.ones_like(fake_logits)))<block_end><else_stmt><block_start><raise>ValueError("Unknown mode: %s. Only 'min_fake' and 'max_real' "<concat>"are allowed.")<block_end><return>g_loss d_loss<block_end> |
# Copyright 2019-2021 ETH Zurich and the DaCe authors. All rights reserved.
#
# This sample shows a DEPTH deep pipeline, where each stage adds 1 to the
# integer input stream.
#
# It is intended for running hardware_emulation or hardware xilinx targets.
<import_stmt>dace<import_stmt>numpy<as>np<line_sep># add symbols
N=dace.symbol('N')<line_sep># add sdfg
sdfg=dace.SDFG('pipeline')<line_sep># add state
state=sdfg.add_state('device_state')<line_sep># add constants
depth=10<line_sep>sdfg.add_constant('DEPTH' depth)<line_sep># add arrays
sdfg.add_array('A' [N] dtype=dace.int32 storage=dace.StorageType.CPU_Heap)<line_sep>sdfg.add_array('B' [N] dtype=dace.int32 storage=dace.StorageType.CPU_Heap)<line_sep>sdfg.add_array('fpga_A' [N] dtype=dace.int32 transient=<true> storage=dace.StorageType.FPGA_Global)<line_sep>sdfg.add_array('fpga_B' [N] dtype=dace.int32 transient=<true> storage=dace.StorageType.FPGA_Global)<line_sep># add streams
sdfg.add_stream('A_stream' dtype=dace.int32 transient=<true> storage=dace.StorageType.FPGA_Local)<line_sep>sdfg.add_stream('B_stream' dtype=dace.int32 transient=<true> storage=dace.StorageType.FPGA_Local)<line_sep># add custom rtl tasklet
rtl_tasklet=state.add_tasklet(name='rtl_tasklet' inputs={'a'} outputs={'b'} code='''
/*
Convention:
|--------------------------------------------------------|
| |
-->| ap_aclk (clock input) |
-->| ap_areset (reset input, rst on high) |
-->| ap_start (start pulse from host) |
<--| ap_done (tells the host that the kernel is done) |
| |
| For each input: For each output: |
| |
-->| s_axis_{input}_tvalid reg m_axis_{output}_tvalid |-->
-->| s_axis_{input}_tdata reg m_axis_{output}_tdata |-->
<--| reg s_axis_{input}_tready m_axis_{output}_tready |<--
-->| s_axis_{input}_tkeep reg m_axis_{output}_tkeep |-->
-->| s_axis_{input}_tlast reg m_axis_{output}_tlast |-->
| |
|--------------------------------------------------------|
*/
assign ap_done = 1; // free-running kernel
reg [DEPTH-1:0] tvalids;
reg [31:0] tdatas [DEPTH-1:0];
reg [DEPTH-1:0] treadys;
integer i;
always @(posedge ap_aclk) begin
if (ap_areset) begin
for (i = 0; i < DEPTH; i = i + 1) begin
tvalids[i] = 0;
tdatas[i] = 0;
treadys[i] = 1;
end
s_axis_a_tready = 1;
m_axis_b_tvalid = 0;
m_axis_b_tdata = 0;
end else begin
// Handle m_axis
if (!m_axis_b_tvalid || (m_axis_b_tvalid && m_axis_b_tready)) begin
m_axis_b_tvalid = tvalids[DEPTH-1];
m_axis_b_tdata = tdatas[DEPTH-1];
tvalids[DEPTH-1] = 0;
tvalids[DEPTH-1] = 0;
end
treadys[DEPTH-1] = !m_axis_b_tvalid;
// Handle intermediates
for (i = DEPTH-1; i > 0; i = i - 1) begin
if (tvalids[i-1] && treadys[i-1]) begin
tvalids[i] = tvalids[i-1];
tdatas[i] = tdatas[i-1] + 1;
tvalids[i-1] = 0;
tdatas[i-1] = 0;
end
treadys[i-1] = !tvalids[i];
end
// Handle s_axis
if (s_axis_a_tvalid && s_axis_a_tready) begin
tvalids[0] = s_axis_a_tvalid;
tdatas[0] = s_axis_a_tdata + 1;
end
s_axis_a_tready = !tvalids[0];
end
end
''' language=dace.Language.SystemVerilog)<line_sep># add read and write tasklets
read_a=state.add_tasklet('read_a' {'inp'} {'out'} 'out = inp')<line_sep>write_b=state.add_tasklet('write_b' {'inp'} {'out'} 'out = inp')<line_sep># add read and write maps
read_a_entry,read_a_exit=state.add_map('read_a_map' dict(i='0:N') schedule=dace.ScheduleType.FPGA_Device)<line_sep>write_b_entry,write_b_exit=state.add_map('write_b_map' dict(i='0:N') schedule=dace.ScheduleType.FPGA_Device)<line_sep># add read_a memlets and access nodes
read_a_inp=state.add_read('fpga_A')<line_sep>read_a_out=state.add_write('A_stream')<line_sep>state.add_memlet_path(read_a_inp read_a_entry read_a dst_conn='inp' memlet=dace.Memlet('fpga_A[i]'))<line_sep>state.add_memlet_path(read_a read_a_exit read_a_out src_conn='out' memlet=dace.Memlet('A_stream[0]'))<line_sep># add tasklet memlets
A=state.add_read('A_stream')<line_sep>B=state.add_write('B_stream')<line_sep>state.add_memlet_path(A rtl_tasklet dst_conn='a' memlet=dace.Memlet('A_stream[0]'))<line_sep>state.add_memlet_path(rtl_tasklet B src_conn='b' memlet=dace.Memlet('B_stream[0]'))<line_sep># add write_b memlets and access nodes
write_b_inp=state.add_read('B_stream')<line_sep>write_b_out=state.add_write('fpga_B')<line_sep>state.add_memlet_path(write_b_inp write_b_entry write_b dst_conn='inp' memlet=dace.Memlet('B_stream[0]'))<line_sep>state.add_memlet_path(write_b write_b_exit write_b_out src_conn='out' memlet=dace.Memlet('fpga_B[i]'))<line_sep># add copy to device state
copy_to_device=sdfg.add_state('copy_to_device')<line_sep>cpu_a=copy_to_device.add_read('A')<line_sep>dev_a=copy_to_device.add_write('fpga_A')<line_sep>copy_to_device.add_memlet_path(cpu_a dev_a memlet=dace.Memlet('A[0:N]'))<line_sep>sdfg.add_edge(copy_to_device state dace.InterstateEdge())<line_sep># add copy to host state
copy_to_host=sdfg.add_state('copy_to_host')<line_sep>dev_b=copy_to_host.add_read('fpga_B')<line_sep>cpu_b=copy_to_host.add_write('B')<line_sep>copy_to_host.add_memlet_path(dev_b cpu_b memlet=dace.Memlet('B[0:N]'))<line_sep>sdfg.add_edge(state copy_to_host dace.InterstateEdge())<line_sep># validate sdfg
sdfg.validate()<line_sep>######################################################################
<if_stmt>__name__<eq>'__main__'# init data structures
<block_start>N.set(8192)<line_sep>a=np.random.randint(0 100 N.get()).astype(np.int32)<line_sep>b=np.zeros((N.get() )).astype(np.int32)<line_sep># show initial values
print("a={}, b={}".format(a b))<line_sep># call program
sdfg(A=a B=b N=N)<line_sep># show result
print("a={}, b={}".format(a b))<line_sep># check result
<for_stmt>i range(N.get())<block_start><assert_stmt>b[i]<eq>a[i]+depth<block_end><block_end> |
<import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<line_sep>__license__='MIT'<line_sep>__maintainer__=['<NAME>']<line_sep>__email__=['<EMAIL>']<line_sep> |
<import_stmt>json<import_stmt>os<import_from_stmt>dataclasses dataclass field<import_stmt>blobfile<as>bf<import_stmt>numpy<as>np<import_stmt>torch<import_from_stmt>summarize_from_feedback.datasets jsonl_encoding<import_from_stmt>summarize_from_feedback.query_response_model ModelSpec<import_from_stmt>summarize_from_feedback.reward_model RewardModel<import_from_stmt>summarize_from_feedback.task_data make_jsonl_samples_iter<import_from_stmt>summarize_from_feedback.tasks TaskHParams<import_from_stmt>summarize_from_feedback.utils Timer hyperparams<import_from_stmt>summarize_from_feedback.utils.assertions assert_shape_eq assert_eq<import_from_stmt>summarize_from_feedback.utils.logging_utils setup_logging_with_pacific_tz<import_from_stmt>summarize_from_feedback.utils.torch_utils to_numpy<line_sep>"""
Evaluates a reward model on a set of query-responses examples. The output will contain the same
json data as the input along with an extra key containing the predicted reward.
"""<line_sep>@dataclass<class_stmt>HParams(hyperparams.HParams)<block_start>reward_model_spec:ModelSpec=field(default_factory=ModelSpec)<line_sep>task:TaskHParams=field(default_factory=TaskHParams)<line_sep>input_path:str=<none># Should contain files samples.0.jsonl, samples.1.jsonl, ...
fp16_activations:bool=<true><line_sep>output_key:str="predicted_reward"<block_end><def_stmt>main H:HParams<block_start>layout=H.reward_model_spec.run_params.all_gpu_layout()<line_sep>reward_model=RewardModel(task_hparams=H.task spec=H.reward_model_spec layout=layout)<line_sep>setup_logging_with_pacific_tz()<line_sep>act_dtype=torch.float16<if>H.fp16_activations<else>torch.float32<line_sep>results_dir=bf.join(os.environ.get("OUTPUT_DIR" os.path.join("/tmp/jobs" os.getenv("JOB_NAME"))) "results")<line_sep>bf.makedirs(results_dir)<if_stmt>layout.is_logging_rank<block_start><with_stmt>open(bf.join(results_dir "task_hparams.json") "w")<as>f<block_start>json.dump(H.task.to_json() f)<block_end><with_stmt>open(bf.join(results_dir "hparams.json") "w")<as>f<block_start>json.dump(H.to_json() f)<block_end><block_end># Creates files for printing. Only the replica root prints the files
output_file_name=os.devnull<if_stmt>layout.is_replica_root<block_start>fname=f"samples.{layout.replica_idx}.jsonl"<line_sep>output_file_name=bf.join(results_dir fname)<line_sep>print(f"Outputs will be written to {output_file_name}")<block_end>input_iter=make_jsonl_samples_iter(H.input_path layout=layout)<line_sep>replica_rewards=[]<with_stmt>open(output_file_name "a")<as>out_f<block_start>input_idx=0<for_stmt>input input_iter<block_start><with_stmt>Timer()<as>timer<block_start>query_tokens=torch.tensor(input["context_tokens"])<line_sep>assert_shape_eq(query_tokens (H.task.query.length ) "Context tokens shape mismatch")<line_sep>response_tokens=torch.tensor(input["sample_tokens"])<line_sep>assert_eq(response_tokens.dim() 2)<line_sep>n_responses=response_tokens.size(0)<line_sep>results=reward_model.reward(query_tokens=query_tokens.unsqueeze(0) response_tokens=response_tokens.unsqueeze(0) act_dtype=act_dtype )<line_sep>rewards=to_numpy(results["reward"].reshape((n_responses )))<if_stmt>layout.is_replica_root<block_start>replica_rewards.append(rewards)<line_sep>output={**input H.output_key:rewards}<line_sep>out_f.write((json.dumps(jsonl_encoding.encode_example(output))+"\n"))<block_end><block_end>input_idx<augadd>1<if_stmt>layout.is_replica_root<block_start>print(f"Batch {input_idx}. Took {timer.interval} seconds")<block_end><block_end><if_stmt>layout.is_replica_root<block_start>print(f"Wrote {input_idx} batches to {output_file_name}")<line_sep>replica_rewards=np.stack(replica_rewards axis=0)<line_sep>all_rewards=reward_model.dp_comm.mpi_all_gather(replica_rewards "rewards")<if_stmt>layout.replica_idx<eq>0<block_start>all_rewards=np.concatenate(all_rewards axis=0)<line_sep>print(f"Mean reward: {all_rewards.mean():.3f}")<if_stmt>all_rewards.shape[1]<g>1<block_start>print(f"Stddev within a query: {all_rewards.std(axis=1 ddof=1).mean():.3}")<block_end>print(f"Stddev across queries: {all_rewards.std(axis=0 ddof=1).mean():.3}")<block_end><block_end><block_end><return>dict(output_path=results_dir)<block_end> |
<import_stmt>argparse<import_stmt>imagesize<import_stmt>os<import_stmt>subprocess<line_sep>parser=argparse.ArgumentParser(description='MegaDepth Undistortion')<line_sep>parser.add_argument('--colmap_path' type=str required=<true> help='path to colmap executable')<line_sep>parser.add_argument('--base_path' type=str required=<true> help='path to MegaDepth')<line_sep>args=parser.parse_args()<line_sep>sfm_path=os.path.join(args.base_path 'MegaDepth_v1_SfM')<line_sep>base_depth_path=os.path.join(args.base_path 'MegaDepth_v1')<line_sep>output_path=os.path.join(args.base_path 'Undistorted_SfM')<line_sep>os.mkdir(output_path)<for_stmt>scene_name os.listdir(base_depth_path)<block_start>current_output_path=os.path.join(output_path scene_name)<line_sep>os.mkdir(current_output_path)<line_sep>image_path=os.path.join(base_depth_path scene_name 'dense0' 'imgs')<if_stmt><not>os.path.exists(image_path)<block_start><continue><block_end># Find the maximum image size in scene.
max_image_size=0<for_stmt>image_name os.listdir(image_path)<block_start>max_image_size=max(max_image_size max(imagesize.get(os.path.join(image_path image_name))))<block_end># Undistort the images and update the reconstruction.
subprocess.call([os.path.join(args.colmap_path 'colmap') 'image_undistorter' '--image_path' os.path.join(sfm_path scene_name 'images') '--input_path' os.path.join(sfm_path scene_name 'sparse' 'manhattan' '0') '--output_path' current_output_path '--max_image_size' str(max_image_size)])<line_sep># Transform the reconstruction to raw text format.
sparse_txt_path=os.path.join(current_output_path 'sparse-txt')<line_sep>os.mkdir(sparse_txt_path)<line_sep>subprocess.call([os.path.join(args.colmap_path 'colmap') 'model_converter' '--input_path' os.path.join(current_output_path 'sparse') '--output_path' sparse_txt_path '--output_type' 'TXT'])<block_end> |
<import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<import_stmt>shutil<import_stmt>sys<import_stmt>tempfile<import_from_stmt>observations.r.cholera cholera<def_stmt>test_cholera <block_start>"""Test module cholera.py by downloading
cholera.csv and testing shape of
extracted data has 38 rows and 15 columns
"""<line_sep>test_path=tempfile.mkdtemp()<line_sep>x_train,metadata=cholera(test_path)<try_stmt><block_start><assert_stmt>x_train.shape<eq>(38 15)<block_end><except_stmt><block_start>shutil.rmtree(test_path)<line_sep><raise>()<block_end><block_end> |
<import_from_stmt>lightning Lightning<import_from_stmt>lightning.types.base Base<import_from_stmt>functools wraps<import_stmt>inspect<def_stmt>viztype VizType# wrapper that passes inputs to cleaning function and creates viz
<block_start>@wraps(VizType.clean)<def_stmt>plotter self *args **kwargs<block_start><if_stmt>kwargs['height']<is><none><and>kwargs['width']<is><none><block_start><if_stmt>self.size<ne>'full'<block_start>kwargs['width']=SIZES[self.size]<block_end><block_end><if_stmt>self.local_enabled<block_start><if_stmt>hasattr(VizType '_local')<and>VizType._local<eq><false><block_start>name=VizType._func<if>hasattr(VizType 'func')<else>VizType._name<line_sep>print("Plots of type '%s' not yet supported in local mode"%name)<block_end><else_stmt><block_start>viz=VizType._baseplot_local(VizType._name *args **kwargs)<line_sep><return>viz<block_end><block_end><else_stmt><block_start><if_stmt><not>hasattr(self 'session')<block_start>self.create_session()<block_end><if_stmt>VizType._name<eq>'plot'<block_start><if_stmt>'type'<not><in>kwargs<block_start><raise>ValueError("Must specify a type for custom plots")<block_end><else_stmt><block_start>type=kwargs['type']<del_stmt>kwargs['type']<block_end>viz=VizType._baseplot(self.session type *args **kwargs)<block_end><else_stmt><block_start>viz=VizType._baseplot(self.session VizType._name *args **kwargs)<block_end>self.session.visualizations.append(viz)<line_sep><return>viz<block_end><block_end># get desired function name if different than plot type
<if_stmt>hasattr(VizType '_func')<block_start>func=VizType._func<block_end><else_stmt><block_start>func=VizType._name<block_end># crazy hack to give the dynamically generated function the correct signature
# based on: http://emptysqua.re/blog/copying-a-python-functions-signature/
# NOTE currently only handles functions with keyword arguments with defaults of None
options={}<if_stmt>hasattr(VizType '_options')<block_start>options=VizType._options<block_end><def_stmt>parse val<block_start><if_stmt>isinstance(val str)<block_start><return>"'"+val+"'"<block_end><else_stmt><block_start><return>val<block_end><block_end>formatted_options=', '.join(['%s=%s'%(key parse(value.get('default')))<for>(key value) options.items()])<line_sep>argspec=inspect.getargspec(VizType.clean)<line_sep>formatted_args=inspect.formatargspec(*argspec)<line_sep>fndef='lambda self, %s, %s: plotter(self,%s, %s)'%(formatted_args.lstrip('(').rstrip(')') formatted_options formatted_args[1:].replace('=None' '').rstrip(')') ', '.join('%s=%s'%(key key)<for>key options.keys()))<line_sep>fake_fn=eval(fndef {'plotter':plotter})<line_sep>plotter=wraps(VizType.clean)(fake_fn)<line_sep># manually assign a plot-specific name (instead of 'clean')
plotter.__name__=func<if_stmt>plotter.__doc__<block_start>plotter.__doc__<augadd>Base._doc<block_end># add plotter to class
setattr(Lightning func plotter)<line_sep><return>VizType<block_end>SIZES={'small':400 'medium':600 'large':800 }<line_sep> |
<import_stmt>json<import_from_stmt>typing Dict Optional<import_stmt>logging<import_from_stmt>rich.logging RichHandler<import_from_stmt>ciphey.iface Checker Config ParamSpec T registry<line_sep>@registry.register<class_stmt>JsonChecker(Checker[str])<block_start>"""
This object is effectively a prebuilt quorum (with requirement 1) of common patterns
"""<def_stmt>check self text:T<arrow>Optional[str]<block_start>logging.debug("Trying json checker")<line_sep># https://github.com/Ciphey/Ciphey/issues/389
<if_stmt>text.isdigit()<block_start><return><none><block_end><try_stmt><block_start>json.loads(text)<line_sep><return>""<block_end><except_stmt>ValueError<block_start><return><none><block_end><block_end><def_stmt>getExpectedRuntime self text:T<arrow>float# TODO: actually bench this
<block_start><return>1e-7<times>len(text)<block_end># From benchmarks I found online
<def_stmt>__init__ self config:Config<block_start>super().__init__(config)<block_end>@staticmethod<def_stmt>getParams <arrow>Optional[Dict[str ParamSpec]]<block_start><pass><block_end><block_end> |
"""
Helpers for dealing with HTML input.
"""<import_stmt>re<import_from_stmt>django.utils.datastructures MultiValueDict<def_stmt>is_html_input dictionary# MultiDict type datastructures are used to represent HTML form input,
# which may have more than one value for each key.
<block_start><return>hasattr(dictionary 'getlist')<block_end><def_stmt>parse_html_list dictionary prefix='' default=<none><block_start>"""
Used to support list values in HTML forms.
Supports lists of primitives and/or dictionaries.
* List of primitives.
{
'[0]': 'abc',
'[1]': 'def',
'[2]': 'hij'
}
-->
[
'abc',
'def',
'hij'
]
* List of dictionaries.
{
'[0]foo': 'abc',
'[0]bar': 'def',
'[1]foo': 'hij',
'[1]bar': 'klm',
}
-->
[
{'foo': 'abc', 'bar': 'def'},
{'foo': 'hij', 'bar': 'klm'}
]
:returns a list of objects, or the value specified in ``default`` if the list is empty
"""<line_sep>ret={}<line_sep>regex=re.compile(r'^%s\[([0-9]+)\](.*)$'%re.escape(prefix))<for_stmt>field,value dictionary.items()<block_start>match=regex.match(field)<if_stmt><not>match<block_start><continue><block_end>index,key=match.groups()<line_sep>index=int(index)<if_stmt><not>key<block_start>ret[index]=value<block_end><elif_stmt>isinstance(ret.get(index) dict)<block_start>ret[index][key]=value<block_end><else_stmt><block_start>ret[index]=MultiValueDict({key:[value]})<block_end><block_end># return the items of the ``ret`` dict, sorted by key, or ``default`` if the dict is empty
<return>[ret[item]<for>item sorted(ret)]<if>ret<else>default<block_end><def_stmt>parse_html_dict dictionary prefix=''<block_start>"""
Used to support dictionary values in HTML forms.
{
'profile.username': 'example',
'profile.email': '<EMAIL>',
}
-->
{
'profile': {
'username': 'example',
'email': '<EMAIL>'
}
}
"""<line_sep>ret=MultiValueDict()<line_sep>regex=re.compile(r'^%s\.(.+)$'%re.escape(prefix))<for_stmt>field dictionary<block_start>match=regex.match(field)<if_stmt><not>match<block_start><continue><block_end>key=match.groups()[0]<line_sep>value=dictionary.getlist(field)<line_sep>ret.setlist(key value)<block_end><return>ret<block_end> |
<import_from_stmt>topaz.module ClassDef<import_from_stmt>topaz.objects.objectobject W_Object<import_from_stmt>topaz.modules.ffi.function W_FFIFunctionObject<import_from_stmt>rpython.rlib jit<class_stmt>W_VariadicInvokerObject(W_Object)<block_start>classdef=ClassDef('VariadicInvoker' W_Object.classdef)<def_stmt>__init__ self space<block_start>W_Object.__init__(self space)<line_sep>self.w_info=<none><line_sep>self.w_handle=<none><block_end>@classdef.singleton_method('allocate')<def_stmt>singleton_method_allocate self space args_w<block_start><return>W_VariadicInvokerObject(space)<block_end>@classdef.method('initialize')<def_stmt>method_initialize self space w_handle w_arg_types w_ret_type w_options=<none><block_start>self.w_ret_type=w_ret_type<line_sep>self.w_options=w_options<line_sep>self.w_handle=w_handle<if_stmt>w_options<is><none><block_start>w_type_map=space.newhash()<block_end><else_stmt><block_start>w_key=space.newsymbol('type_map')<line_sep>w_type_map=space.send(w_options '[]' [w_key])<block_end>space.send(self 'init' [w_arg_types w_type_map])<block_end>@classdef.method('invoke' arg_values_w='array')<def_stmt>method_invoke self space w_arg_types arg_values_w<block_start>w_func_cls=space.getclassfor(W_FFIFunctionObject)<line_sep>w_func=space.send(w_func_cls 'new' [self.w_ret_type w_arg_types self.w_handle self.w_options])<line_sep><return>self._dli_call(space w_func arg_values_w)<block_end>@jit.dont_look_inside<def_stmt>_dli_call self space w_func arg_values_w# XXX we are missing argument promotion for the variadic arguments here
# see
# http://stackoverflow.com/questions/1255775/default-argument-promotions-in-c-function-calls
<block_start><return>space.send(w_func 'call' arg_values_w)<block_end><block_end> |
"""
Tests for __models__
"""<import_stmt>re<import_from_stmt>asynctest.mock CoroutineMock patch<import_from_stmt>tortoise Tortoise<import_from_stmt>tortoise.contrib test<import_from_stmt>tortoise.exceptions ConfigurationError<import_from_stmt>tortoise.utils get_schema_sql<class_stmt>TestGenerateSchema(test.SimpleTestCase)<block_start><async_keyword><def_stmt>setUp self<block_start><try_stmt><block_start>Tortoise.apps={}<line_sep>Tortoise._connections={}<line_sep>Tortoise._inited=<false><block_end><except_stmt>ConfigurationError<block_start><pass><block_end>Tortoise._inited=<false><line_sep>self.sqls=""<line_sep>self.post_sqls=""<line_sep>self.engine=test.getDBConfig(app_label="models" modules=[])["connections"]["models"]["engine"]<block_end><async_keyword><def_stmt>tearDown self<block_start>Tortoise._connections={}<line_sep><await>Tortoise._reset_apps()<block_end><async_keyword><def_stmt>init_for self module:str safe=<false><arrow><none><block_start><if_stmt>self.engine<ne>"tortoise.backends.sqlite"<block_start><raise>test.SkipTest("sqlite only")<block_end><with_stmt>patch("tortoise.backends.sqlite.client.SqliteClient.create_connection" new=CoroutineMock())<block_start><await>Tortoise.init({"connections":{"default":{"engine":"tortoise.backends.sqlite" "credentials":{"file_path":":memory:"} }} "apps":{"models":{"models":[module] "default_connection":"default"}} })<line_sep>self.sqls=get_schema_sql(Tortoise._connections["default"] safe).split(";\n")<block_end><block_end><def_stmt>get_sql self text:str<arrow>str<block_start><return>str(re.sub(r"[ \t\n\r]+" " " [sql<for>sql self.sqls<if>text<in>sql][0]))<block_end><async_keyword><def_stmt>test_good self<block_start><await>self.init_for("tests.model_setup.models__models__good")<line_sep>self.assertIn("goodtournament" "; ".join(self.sqls))<line_sep>self.assertIn("inaclasstournament" "; ".join(self.sqls))<line_sep>self.assertNotIn("badtournament" "; ".join(self.sqls))<block_end><async_keyword><def_stmt>test_bad self<block_start><await>self.init_for("tests.model_setup.models__models__bad")<line_sep>self.assertNotIn("goodtournament" "; ".join(self.sqls))<line_sep>self.assertNotIn("inaclasstournament" "; ".join(self.sqls))<line_sep>self.assertIn("badtournament" "; ".join(self.sqls))<block_end><block_end> |
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
<import_from_future_stmt> absolute_import<import_stmt>json<import_from_stmt>ast literal_eval<import_from_stmt>string Template<import_from_stmt>stepfunctions.workflow.widgets.utils format_time get_elapsed_ms AWS_TABLE_CSS sagemaker_console_link <line_sep>LAMBDA_SERVICE_NAME="lambda"<line_sep>LAMBDA_FUNCTION_RESOURCE_TYPE="function"<line_sep>LAMBDA_ARN_SEGMENT_LENGTH=7<line_sep>SAGEMAKER_JOB_NAME_MAP={'createTrainingJob':'Sagemaker training job' 'createTrainingJob.sync':'Sagemaker training job' 'createTransformJob':'Sagemaker transform job' 'createTransformJob.sync':'Sagemaker transform job' 'createModel':'Sagemaker model' 'createModel.sync':'Sagemaker model' 'createEndpointConfig':'Sagemaker endpoint configuration' 'createEndpointConfig.sync':'Sagemaker endpoint configuration' 'createEndpoint':'Sagemaker endpoint' 'createEndpoint.sync':'Sagemaker endpoint'}<line_sep>TABLE_TEMPLATE="""
<style>
$aws_table_css
$custom_css
</style>
<table class="table-widget">
<thead>
<tr>
<th style="width: 60px">ID</th>
<th>Type</th>
<th>Step</th>
<th>Resource</th>
<th>Elapsed Time (ms)</th>
<th>Timestamp</th>
</tr>
</thead>
<tbody>
{table_rows}
</tbody>
</table>
<script type="text/javascript">
$js
</script>
"""<line_sep>TABLE_ROW_TEMPLATE="""
<tr class="awsui-table-row">
<td class="awsui-util-pl-xs clickable-cell">
<div class="toggle-icon"></div>
<span>$event_id</span>
</td>
<td>$event_type</td>
<td>$step</td>
<td><a $resource_url target="_blank">$resource</a></td>
<td>$elapsed_time</td>
<td>$timestamp</td>
</tr>
<tr class="hide">
<td class="execution-event-detail" colspan="6">
<pre>$event_detail</pre>
</td>
</tr>
"""<line_sep>JS_TEMPLATE="""
var clickableCells = document.getElementsByClassName("clickable-cell");
for (var cell of clickableCells) {
cell.addEventListener("click", function(e) {
var currentRow = e.srcElement.closest("tr");
var toggleRow = currentRow.nextElementSibling;
var toggleArrow = currentRow.getElementsByClassName("toggle-icon")[0];
toggleRow.classList.toggle("hide");
toggleArrow.classList.toggle("open");
});
}
"""<line_sep>CSS_TEMPLATE="""
.table-widget .clickable-cell {
padding-left: 0.1em;
cursor: pointer;
}
.toggle-icon {
display: inline-block;
width: 0;
height: 0;
border-top: 5px solid transparent;
border-left: 8px solid #545b64;
border-bottom: 5px solid transparent;
margin-right: 5px;
}
.toggle-icon.open {
-webkit-transform: rotate(90deg);
-ms-transform: rotate(90deg);
transform: rotate(90deg);
}
"""<class_stmt>EventsTableWidget(object)<block_start><def_stmt>__init__ self events<block_start>self.eventIdToLambdaArnMap={}<line_sep>self.previous_step_name=""<line_sep>self.previous_job_name=""<line_sep>start_datetime=<none><if_stmt>len(events)<g>0<block_start>start_datetime=events[0].get("timestamp")<block_end>table_rows=[Template(TABLE_ROW_TEMPLATE).substitute(event_id=str(event.get("id")) event_type=event.get("type") step=self._get_step(event) resource=self._get_resource(event <true>) resource_url=self._get_resource_url(event) elapsed_time=get_elapsed_ms(start_datetime event.get("timestamp")) timestamp=format_time(event.get("timestamp")) event_detail=self._format_event_detail(event))<for>event events]<line_sep>self.template=Template(TABLE_TEMPLATE.format(table_rows='\n'.join(table_rows)))<block_end><def_stmt>show self<block_start><return>self.template.safe_substitute({'aws_table_css':AWS_TABLE_CSS 'custom_css':CSS_TEMPLATE 'js':JS_TEMPLATE})<block_end><def_stmt>_get_step_detail self event<block_start>switcher={"ChoiceStateEntered":event.get("stateEnteredEventDetails" {}) "ChoiceStateExited":event.get("stateExitedEventDetails" {}) "FailStateEntered":event.get("stateEnteredEventDetails" {}) "MapStateEntered":event.get("stateEnteredEventDetails" {}) "MapStateExited":event.get("stateExitedEventDetails" {}) "ParallelStateEntered":event.get("stateEnteredEventDetails" {}) "ParallelStateExited":event.get("stateExitedEventDetails" {}) "PassStateEntered":event.get("stateEnteredEventDetails" {}) "PassStateExited":event.get("stateExitedEventDetails" {}) "SucceedStateEntered":event.get("stateEnteredEventDetails" {}) "SucceedStateExited":event.get("stateExitedEventDetails" {}) "TaskStateEntered":event.get("stateEnteredEventDetails" {}) "TaskStateExited":event.get("stateExitedEventDetails" {}) "WaitStateEntered":event.get("stateEnteredEventDetails" {}) "WaitStateExited":event.get("stateExitedEventDetails" {}) "MapIterationAborted":event.get("mapIterationAbortedEventDetails" {}) "MapIterationFailed":event.get("mapIterationFailedEventDetails" {}) "MapIterationStarted":event.get("mapIterationStartedEventDetails" {}) "MapIterationSucceeded":event.get("mapIterationSucceededEventDetails" {}) "ExecutionFailed":event.get("executionFailedEventDetails" {}) "ExecutionStarted":event.get("executionStartedEventDetails" {}) "ExecutionSucceeded":event.get("executionSucceededEventDetails" {}) "ExecutionAborted":event.get("executionAbortedEventDetails" {}) "ExecutionTimedOut":event.get("executionTimedOutEventDetails" {}) "LambdaFunctionScheduled":event.get("lambdaFunctionScheduledEventDetails" {}) "LambdaFunctionScheduleFailed":event.get("lambdaFunctionScheduleFailedEventDetails" {}) "LambdaFunctionStartFailed":event.get("lambdaFunctionStartFailedEventDetails" {}) "LambdaFunctionSucceeded":event.get("lambdaFunctionSucceededEventDetails" {}) "LambdaFunctionFailed":event.get("lambdaFunctionFailedEventDetails" {}) "LambdaFunctionTimedOut":event.get("lambdaFunctionTimedOutEventDetails" {}) "TaskStarted":event.get("taskStartedEventDetails" {}) "TaskSubmitted":event.get("taskSubmittedEventDetails" {}) "TaskScheduled":event.get("taskScheduledEventDetails" {}) "TaskSucceeded":event.get("taskSucceededEventDetails" {}) "TaskFailed":event.get("taskFailedEventDetails" {})}<line_sep><return>switcher.get(event.get("type") {})<block_end># Tries to get step name, if it can not find, return the previous step's name
<def_stmt>_get_step self event<block_start><if_stmt>event.get("type")<in>("ExecutionFailed" "ExecutionStarted" "ExecutionSucceeded" "ExecutionAborted" "ExecutionTimedOut")<block_start>step_name=""<line_sep>self.previous_step_name=""<block_end><else_stmt><block_start>step_name=self._get_step_detail(event).get("name")<if_stmt><not>step_name<block_start>step_name=self.previous_step_name<block_end><else_stmt><block_start>self.previous_step_name=step_name<block_end><block_end><return>step_name<block_end><def_stmt>_get_resource self event mapped_value=<false># check that it is a lambda, sagemaker or just a regular execution
<block_start><if_stmt>self._is_correct_lambda_arn_sequence(self._get_lambda_arn(event))<block_start><return>"Lambda"<block_end># check if it has a resource
<elif_stmt>self._has_resource(event)# check if it is a sagemaker resource
<block_start>step_details=self._get_step_detail(event)<if_stmt>step_details.get("resourceType")<eq>"sagemaker"<block_start>sagemaker_resource=step_details.get("resource")<if_stmt>mapped_value<block_start><return>SAGEMAKER_JOB_NAME_MAP[sagemaker_resource]<block_end><return>sagemaker_resource<block_end><return>"Step Functions execution"<block_end># if not a resource, return -
<return>"-"<block_end><def_stmt>_get_resource_url self event<block_start>resource=self._get_resource(event)<if_stmt>"createTrainingJob"<in>resource<block_start>job_name=self._get_sagemaker_resource_job_name(event "TrainingJobName")<line_sep><return>'href="{}"'.format(sagemaker_console_link('jobs' job_name))<block_end><if_stmt>"createTransformJob"<in>resource<block_start>job_name=self._get_sagemaker_resource_job_name(event "TransformJobName")<line_sep><return>'href="{}"'.format(sagemaker_console_link('transformJobs' job_name))<block_end><if_stmt>"createModel"<in>resource<block_start>job_name=self._get_sagemaker_resource_job_name(event "ModelName")<line_sep><return>'href="{}"'.format(sagemaker_console_link('models' job_name))<block_end><if_stmt>"createEndpointConfig"<in>resource<block_start>job_name=self._get_sagemaker_resource_job_name(event "EndpointConfigName")<line_sep><return>'href="{}"'.format(sagemaker_console_link('endpointConfig' job_name))<block_end><if_stmt>"createEndpoint"<in>resource<block_start>job_name=self._get_sagemaker_resource_job_name(event "EndpointName")<line_sep><return>'href="{}"'.format(sagemaker_console_link('endpoints' job_name))<block_end>self.previous_job_name=""<line_sep><return>"class='disabled'"<block_end><def_stmt>_get_sagemaker_resource_job_name self event job_name_key<block_start>step_details=self._get_step_detail(event)<line_sep>job_name=literal_eval(step_details.get("parameters" "{}")).get(job_name_key "")<if_stmt>job_name<eq>""<block_start>job_name=self.previous_job_name<block_end><else_stmt><block_start>self.previous_job_name=job_name<block_end><return>job_name<block_end><def_stmt>_has_resource self event<block_start><return>event.get("type")<in>("TaskSucceeded" "TaskSubmitted" "TaskScheduled" "TaskStarted")<block_end><def_stmt>_get_lambda_arn self event<block_start>resource_arn="-"<line_sep>event_type=event.get("type")<if_stmt>event_type<eq>"LambdaFunctionScheduled"<block_start>resource_arn=event.get("lambdaFunctionScheduledEventDetails").get("resource")<block_end><elif_stmt>event_type<in>{"LambdaFunctionScheduleFailed" "LambdaFunctionFailed" "LambdaFunctionStartFailed" "LambdaFunctionStarted" "LambdaFunctionSucceeded" "LambdaFunctionTimedOut"}<block_start>resource_arn=self.eventIdToLambdaArnMap[event.get("previousEventId")]<block_end>self.eventIdToLambdaArnMap[event.get("id")]=resource_arn<line_sep><return>resource_arn<block_end><def_stmt>_is_correct_lambda_arn_sequence self lambda_arn<block_start>lambda_arn_segments=lambda_arn.split(":")<line_sep><return>(len(lambda_arn_segments)<eq>LAMBDA_ARN_SEGMENT_LENGTH<and>lambda_arn_segments[2]<eq>LAMBDA_SERVICE_NAME<and>lambda_arn_segments[5]<eq>LAMBDA_FUNCTION_RESOURCE_TYPE)<block_end><def_stmt>_format_event_detail self event<block_start>event_details=self._get_step_detail(event)<line_sep>self._unpack_to_proper_dict(event_details)<line_sep><return>json.dumps(event_details indent=4)<block_end><def_stmt>_unpack_to_proper_dict self dictionary<block_start><for_stmt>k,v dictionary.items()<block_start><if_stmt>isinstance(v dict)<block_start>self._unpack_to_proper_dict(v)<block_end><else_stmt><block_start>dictionary[k]=self._load_json(v)<block_end><block_end><block_end><def_stmt>_load_json self value<block_start><try_stmt><block_start><return>json.loads(value)<block_end><except_stmt>ValueError<as>e<block_start><return>value<block_end><block_end><block_end> |
<import_stmt>logging<import_stmt>abc<import_stmt>datetime<import_stmt>traceback<import_stmt>urllib.parse<import_stmt>sqlalchemy.exc<import_stmt>common.database<as>db<line_sep># import RawArchiver.TimedTriggers.RawRollingRewalkTrigger
# def exposed_raw_rewalk_old():
# '''
# Trigger the rewalking system on the rawarchiver
# '''
# run = RawArchiver.TimedTriggers.RawRollingRewalkTrigger.RollingRawRewalkTrigger()
# run.go()
|
<import_from_future_stmt> print_function absolute_import division<import_stmt>KratosMultiphysics<import_stmt>KratosMultiphysics.ParticleMechanicsApplication<as>KratosParticle<import_stmt>KratosMultiphysics.KratosUnittest<as>KratosUnittest<class_stmt>TestGenerateMPMParticleCondition(KratosUnittest.TestCase)<block_start><def_stmt>_generate_particle_condition_and_check self current_model dimension geometry_element num_particle expected_num_particle<block_start>KratosMultiphysics.Logger.GetDefaultOutput().SetSeverity(KratosMultiphysics.Logger.Severity.WARNING)<line_sep># Initialize model part
## Material model part definition
material_point_model_part=current_model.CreateModelPart("dummy_name")<line_sep>material_point_model_part.ProcessInfo.SetValue(KratosMultiphysics.DOMAIN_SIZE dimension)<line_sep>## Initial material model part definition
initial_mesh_model_part=current_model.CreateModelPart("Initial_dummy_name")<line_sep>initial_mesh_model_part.ProcessInfo.SetValue(KratosMultiphysics.DOMAIN_SIZE dimension)<line_sep>## Grid model part definition
grid_model_part=current_model.CreateModelPart("Background_Grid")<line_sep>grid_model_part.ProcessInfo.SetValue(KratosMultiphysics.DOMAIN_SIZE dimension)<line_sep># Create element and nodes for background grids
sub_background=grid_model_part.CreateSubModelPart("test_background")<line_sep>self._create_nodes(sub_background dimension geometry_element)<line_sep>self._create_elements(sub_background dimension geometry_element)<line_sep>self._create_condition(sub_background dimension geometry_element)<for_stmt>condition grid_model_part.Conditions<block_start>condition.SetValue(KratosParticle.PARTICLES_PER_CONDITION num_particle)<line_sep>condition.SetValue(KratosParticle.MPC_BOUNDARY_CONDITION_TYPE 1)<block_end># Create element and nodes for initial meshes
sub_mp=initial_mesh_model_part.CreateSubModelPart("test")<line_sep>sub_mp.GetProperties()[1].SetValue(KratosParticle.PARTICLES_PER_ELEMENT 4)<line_sep># Generate MP Conditions
KratosParticle.GenerateMaterialPointCondition(grid_model_part initial_mesh_model_part material_point_model_part)<line_sep># Check total number of element
particle_counter=material_point_model_part.NumberOfConditions()<line_sep>self.assertEqual(expected_num_particle particle_counter)<block_end><def_stmt>_create_nodes self initial_mp dimension geometry_element<block_start>initial_mp.CreateNewNode(1 -0.5 -0.5 0.0)<line_sep>initial_mp.CreateNewNode(2 0.5 -0.5 0.0)<line_sep>initial_mp.CreateNewNode(3 0.5 0.5 0.0)<line_sep>initial_mp.CreateNewNode(4 -0.5 0.5 0.0)<if_stmt>(dimension<eq>3)<block_start>initial_mp.CreateNewNode(5 -0.5 -0.5 1.0)<line_sep>initial_mp.CreateNewNode(6 0.5 -0.5 1.0)<line_sep>initial_mp.CreateNewNode(7 0.5 0.5 1.0)<line_sep>initial_mp.CreateNewNode(8 -0.5 0.5 1.0)<block_end><block_end><def_stmt>_create_elements self initial_mp dimension geometry_element<block_start><if_stmt>(dimension<eq>2)<block_start>initial_mp.CreateNewElement("UpdatedLagrangian2D4N" 1 [1 2 3 4] initial_mp.GetProperties()[1])<block_end><else_stmt><block_start>initial_mp.CreateNewElement("UpdatedLagrangian3D8N" 1 [1 2 3 4 5 6 7 8] initial_mp.GetProperties()[1])<block_end>KratosMultiphysics.VariableUtils().SetFlag(KratosMultiphysics.ACTIVE <true> initial_mp.Elements)<block_end><def_stmt>_create_condition self initial_mp dimension geometry_element<block_start><if_stmt>(dimension<eq>2)<block_start><if_stmt>(geometry_element<eq>"Point")<block_start>initial_mp.CreateNewCondition("PointCondition2D1N" 1 [1] initial_mp.GetProperties()[1])<block_end><elif_stmt>(geometry_element<eq>"Line")<block_start>initial_mp.CreateNewCondition("LineCondition2D2N" 1 [1 2] initial_mp.GetProperties()[1])<block_end><block_end><else_stmt><block_start><if_stmt>(geometry_element<eq>"Point")<block_start>initial_mp.CreateNewCondition("PointCondition3D1N" 1 [1] initial_mp.GetProperties()[1])<block_end><elif_stmt>(geometry_element<eq>"Line")<block_start>initial_mp.CreateNewCondition("LineCondition3D2N" 1 [1 2] initial_mp.GetProperties()[1])<block_end><elif_stmt>(geometry_element<eq>"Triangle")<block_start>initial_mp.CreateNewCondition("SurfaceCondition3D3N" 1 [1 6 8] initial_mp.GetProperties()[1])<block_end><elif_stmt>(geometry_element<eq>"Quadrilateral")<block_start>initial_mp.CreateNewCondition("SurfaceCondition3D4N" 1 [2 4 8 6] initial_mp.GetProperties()[1])<block_end><block_end>KratosMultiphysics.VariableUtils().SetFlag(KratosMultiphysics.BOUNDARY <true> initial_mp.Conditions)<block_end>## Point2D - automatic, 1, and default
<def_stmt>test_GenerateMPMParticleConditionPoint2DAutomatic self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=2 geometry_element="Point" num_particle=0 expected_num_particle=1)<block_end><def_stmt>test_GenerateMPMParticleConditionPoint2D1P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=2 geometry_element="Point" num_particle=1 expected_num_particle=1)<block_end><def_stmt>test_GenerateMPMParticleConditionPoint2DDefault self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=2 geometry_element="Point" num_particle=50 expected_num_particle=1)<block_end>## Line2D - automatic and 2, 3, 4, 5, and default
<def_stmt>test_GenerateMPMParticleConditionLine2DAutomatic self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=2 geometry_element="Line" num_particle=0 expected_num_particle=1)<block_end><def_stmt>test_GenerateMPMParticleConditionLine2D1P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=2 geometry_element="Line" num_particle=1 expected_num_particle=1)<block_end><def_stmt>test_GenerateMPMParticleConditionLine2D2P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=2 geometry_element="Line" num_particle=2 expected_num_particle=2)<block_end><def_stmt>test_GenerateMPMParticleConditionLine2D3P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=2 geometry_element="Line" num_particle=3 expected_num_particle=3)<block_end><def_stmt>test_GenerateMPMParticleConditionLine2D4P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=2 geometry_element="Line" num_particle=4 expected_num_particle=4)<block_end><def_stmt>test_GenerateMPMParticleConditionLine2D5P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=2 geometry_element="Line" num_particle=5 expected_num_particle=5)<block_end><def_stmt>test_GenerateMPMParticleConditionLine2DDefault self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=2 geometry_element="Line" num_particle=50 expected_num_particle=1)<block_end>## Point3D - automatic, 1, and default
<def_stmt>test_GenerateMPMParticleConditionPoint3DAutomatic self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Point" num_particle=0 expected_num_particle=1)<block_end><def_stmt>test_GenerateMPMParticleConditionPoint3D1P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Point" num_particle=1 expected_num_particle=1)<block_end><def_stmt>test_GenerateMPMParticleConditionPoint3DDefault self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Point" num_particle=50 expected_num_particle=1)<block_end>## Line3D - automatic and 2, 3, 4, 5, and default
<def_stmt>test_GenerateMPMParticleConditionLine3DAutomatic self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Line" num_particle=0 expected_num_particle=1)<block_end><def_stmt>test_GenerateMPMParticleConditionLine3D1P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Line" num_particle=1 expected_num_particle=1)<block_end><def_stmt>test_GenerateMPMParticleConditionLine3D2P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Line" num_particle=2 expected_num_particle=2)<block_end><def_stmt>test_GenerateMPMParticleConditionLine3D3P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Line" num_particle=3 expected_num_particle=3)<block_end><def_stmt>test_GenerateMPMParticleConditionLine3D4P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Line" num_particle=4 expected_num_particle=4)<block_end><def_stmt>test_GenerateMPMParticleConditionLine3D5P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Line" num_particle=5 expected_num_particle=5)<block_end><def_stmt>test_GenerateMPMParticleConditionLine3DDefault self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Line" num_particle=50 expected_num_particle=1)<block_end>## Triangle3D - automatic, 1, 3, 6, 12, and default
<def_stmt>test_GenerateMPMParticleConditionTriangle3DAutomatic self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Triangle" num_particle=0 expected_num_particle=1)<block_end><def_stmt>test_GenerateMPMParticleConditionTriangle3D1P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Triangle" num_particle=1 expected_num_particle=1)<block_end><def_stmt>test_GenerateMPMParticleConditionTriangle3D3P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Triangle" num_particle=3 expected_num_particle=3)<block_end><def_stmt>test_GenerateMPMParticleConditionTriangle3D6P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Triangle" num_particle=6 expected_num_particle=6)<block_end><def_stmt>test_GenerateMPMParticleConditionTriangle3D12P self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Triangle" num_particle=12 expected_num_particle=12)<block_end><def_stmt>test_GenerateMPMParticleConditionTriangle3DDefault self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Triangle" num_particle=50 expected_num_particle=1)<block_end>## Quadrilateral3D - automatic, 1 ,4, 9, 16 and default
<def_stmt>test_GenerateMPMParticleConditionQuadrilateral3DAutomatic self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Quadrilateral" num_particle=0 expected_num_particle=1)<block_end><def_stmt>test_GenerateMPMParticleConditionQuadrilateral3D4N self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Quadrilateral" num_particle=4 expected_num_particle=4)<block_end><def_stmt>test_GenerateMPMParticleConditionQuadrilateral3D9N self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Quadrilateral" num_particle=9 expected_num_particle=9)<block_end><def_stmt>test_GenerateMPMParticleConditionQuadrilateral3D16N self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Quadrilateral" num_particle=16 expected_num_particle=16)<block_end><def_stmt>test_GenerateMPMParticleConditionQuadrilateral3DDefault self<block_start>current_model=KratosMultiphysics.Model()<line_sep>self._generate_particle_condition_and_check(current_model dimension=3 geometry_element="Quadrilateral" num_particle=50 expected_num_particle=1)<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>KratosUnittest.main()<block_end> |
# -*- coding: utf-8 -*-
"""
overholt.users
~~~~~~~~~~~~~~
overholt users package
"""<import_from_stmt>..core Service<import_from_stmt>.models User<class_stmt>UsersService(Service)<block_start>__model__=User<block_end> |
<import_stmt>plotly.graph_objs<as>go<import_from_stmt>plotly.offline plot<def_stmt>time_series_cpu_per_task_plot df_resources resource_type label<block_start><if_stmt>resource_type<eq>"psutil_process_cpu_percent"<block_start>yaxis=dict(title="CPU utilization")<block_end><else_stmt><block_start>yaxis=dict(title='Accumulated CPU user time (seconds)')<block_end>fig=go.Figure(data=[go.Scatter(x=df_resources['timestamp'] y=df_resources[resource_type])] layout=go.Layout(xaxis=dict(tickformat='%m-%d\n%H:%M:%S' autorange=<true> title='Time') yaxis=yaxis title=label))<line_sep><return>plot(fig show_link=<false> output_type="div" include_plotlyjs=<false>)<block_end><def_stmt>time_series_memory_per_task_plot df_resources resource_type label<block_start><if_stmt>resource_type<eq>"psutil_process_memory_percent"<block_start>yaxis=dict(title="Memory utilization")<line_sep>data=[go.Scatter(x=df_resources['timestamp'] y=df_resources[resource_type])]<block_end><else_stmt><block_start>yaxis=dict(title='Memory usage (GB)')<line_sep>data=[go.Scatter(x=df_resources['timestamp'] y=[num/1000000000<for>num df_resources[resource_type].astype(float)])]<block_end>fig=go.Figure(data=data layout=go.Layout(xaxis=dict(tickformat='%m-%d\n%H:%M:%S' autorange=<true> title='Time') yaxis=yaxis title=label))<line_sep><return>plot(fig show_link=<false> output_type="div" include_plotlyjs=<false>)<block_end> |
#-*- coding:utf-8 -*-
<import_stmt>unittest<import_stmt>sys<import_stmt>simple_db_migrate.core<import_from_stmt>mock patch Mock MagicMock call sentinel<import_from_stmt>simple_db_migrate.oracle Oracle<import_from_stmt>tests BaseTest<class_stmt>OracleTest(BaseTest)<block_start><def_stmt>setUp self<block_start>super(OracleTest self).setUp()<line_sep>self.execute_returns={}<line_sep>self.fetchone_returns={'select count(*) from db_version':[0]}<line_sep>self.close_returns={}<line_sep>self.last_execute_command=''<line_sep>self.last_execute_commands=[]<line_sep>self.config_dict={'database_script_encoding':'utf8' 'database_encoding':'American_America.UTF8' 'database_host':'somehost' 'database_user':'root' 'database_password':'<PASSWORD>' 'database_name':'SID' 'database_version_table':'db_version' 'drop_db_first':<false>}<line_sep>self.config_mock=MagicMock(spec_set=dict wraps=self.config_dict)<line_sep>self.cursor_mock=Mock(**{"execute":Mock(side_effect=self.execute_side_effect) "close":Mock(side_effect=self.close_side_effect) "fetchone":Mock(side_effect=self.fetchone_side_effect) "setinputsizes":Mock(return_value=<none>) "rowcount":0})<line_sep>self.db_mock=Mock(**{"cursor.return_value":self.cursor_mock})<line_sep>self.db_driver_mock=Mock(**{"connect.return_value":self.db_mock "CLOB":"CLOB"})<line_sep>self.stdin_mock=Mock(**{"readline.return_value":"dba_user"})<line_sep>self.getpass_mock=Mock(return_value="dba_password")<block_end>@patch.dict('sys.modules' cx_Oracle=MagicMock())<def_stmt>test_it_should_use_cx_Oracle_as_driver self<block_start>sys.modules['cx_Oracle'].connect.return_value=self.db_mock<line_sep>Oracle(self.config_mock)<line_sep>self.assertNotEqual(0 sys.modules['cx_Oracle'].connect.call_count)<block_end>@patch.dict('sys.modules' cx_Oracle=MagicMock())<def_stmt>test_it_should_use_default_port self<block_start>sys.modules['cx_Oracle'].connect.return_value=self.db_mock<line_sep>sys.modules['cx_Oracle'].makedsn.side_effect=self.makedsn_side_effect<line_sep>Oracle(self.config_mock)<line_sep>self.assertEqual(call(dsn="(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST=somehost)(PORT=1521)))(CONNECT_DATA=(SID=SID)))" password='<PASSWORD>' user='root') sys.modules['cx_Oracle'].connect.call_args)<block_end>@patch.dict('sys.modules' cx_Oracle=MagicMock())<def_stmt>test_it_should_use_given_configuration self<block_start>sys.modules['cx_Oracle'].connect.return_value=self.db_mock<line_sep>sys.modules['cx_Oracle'].makedsn.side_effect=self.makedsn_side_effect<line_sep>self.config_dict['database_port']=9876<line_sep>Oracle(self.config_mock)<line_sep>self.assertEqual(call(dsn="(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST=somehost)(PORT=9876)))(CONNECT_DATA=(SID=SID)))" password='<PASSWORD>' user='root') sys.modules['cx_Oracle'].connect.call_args)<block_end>@patch.dict('sys.modules' cx_Oracle=MagicMock())<def_stmt>test_it_should_use_database_name_as_dsn_when_database_host_is_not_set self<block_start>sys.modules['cx_Oracle'].connect.return_value=self.db_mock<line_sep>self.config_dict['database_host']=<none><line_sep>Oracle(self.config_mock)<line_sep>self.assertEqual(call(dsn='SID' password='<PASSWORD>' user='root') sys.modules['cx_Oracle'].connect.call_args)<block_end><def_stmt>test_it_should_stop_process_when_an_error_occur_during_connect_database self<block_start>self.db_driver_mock.connect.side_effect=Exception("error when connecting")<try_stmt><block_start>Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>self.fail("it should not get here")<block_end><except_stmt>Exception<as>e<block_start>self.assertEqual("could not connect to database: error when connecting" str(e))<block_end>self.assertEqual(0 self.db_mock.commit.call_count)<line_sep>self.assertEqual(0 self.db_mock.close.call_count)<line_sep>self.assertEqual(0 self.cursor_mock.execute.call_count)<line_sep>self.assertEqual(0 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_create_database_and_version_table_on_init_if_not_exists self<block_start>self.first_return=Exception("could not connect to database: ORA-01017 invalid user/password")<def_stmt>connect_side_effect *args **kwargs<block_start>ret=sentinel.DEFAULT<if_stmt>(kwargs['user']<eq>'root')<and>self.first_return<block_start>ret=self.first_return<line_sep>self.first_return=<none><line_sep><raise>ret<block_end><return>ret<block_end>self.db_driver_mock.connect.side_effect=connect_side_effect<line_sep>self.execute_returns["select version from db_version"]=Exception("Table doesn't exist")<line_sep>Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>self.assertEqual(1 self.db_mock.rollback.call_count)<line_sep>self.assertEqual(8 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(4 self.db_mock.commit.call_count)<line_sep>self.assertEqual(7 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call('create user root identified by migration_test') call('grant connect, resource to root') call('grant create public synonym to root') call('grant drop public synonym to root') call('select version from db_version') call("create table db_version ( id number(11) not null, version varchar2(20) default '0' NOT NULL, label varchar2(255), name varchar2(255), sql_up clob, sql_down clob, CONSTRAINT db_version_pk PRIMARY KEY (id) ENABLE)") call('drop sequence db_version_seq') call('create sequence db_version_seq start with 1 increment by 1 nomaxvalue') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')")]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(7 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_ignore_errors_while_dropping_the_sequence_duringthe_create_database_process self<block_start>self.first_return=Exception("could not connect to database: ORA-01017 invalid user/password")<def_stmt>connect_side_effect *args **kwargs<block_start>ret=sentinel.DEFAULT<if_stmt>(kwargs['user']<eq>'root')<and>self.first_return<block_start>ret=self.first_return<line_sep>self.first_return=<none><line_sep><raise>ret<block_end><return>ret<block_end>self.db_driver_mock.connect.side_effect=connect_side_effect<line_sep>self.execute_returns["select version from db_version"]=Exception("Table doesn't exist")<line_sep>self.execute_returns["drop sequence db_version_seq"]=Exception("Sequence doesn't exist")<line_sep>Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>self.assertEqual(2 self.db_mock.rollback.call_count)<line_sep>self.assertEqual(8 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(3 self.db_mock.commit.call_count)<line_sep>self.assertEqual(7 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call('create user root identified by migration_test') call('grant connect, resource to root') call('grant create public synonym to root') call('grant drop public synonym to root') call('select version from db_version') call("create table db_version ( id number(11) not null, version varchar2(20) default '0' NOT NULL, label varchar2(255), name varchar2(255), sql_up clob, sql_down clob, CONSTRAINT db_version_pk PRIMARY KEY (id) ENABLE)") call('drop sequence db_version_seq') call('create sequence db_version_seq start with 1 increment by 1 nomaxvalue') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')")]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(7 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_create_version_table_on_init_if_not_exists self<block_start>self.execute_returns["select version from db_version"]=Exception("Table doesn't exist")<line_sep>Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>self.assertEqual(7 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(4 self.db_mock.commit.call_count)<line_sep>self.assertEqual(7 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call('select version from db_version') call("create table db_version ( id number(11) not null, version varchar2(20) default '0' NOT NULL, label varchar2(255), name varchar2(255), sql_up clob, sql_down clob, CONSTRAINT db_version_pk PRIMARY KEY (id) ENABLE)") call('drop sequence db_version_seq') call('create sequence db_version_seq start with 1 increment by 1 nomaxvalue') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')")]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(6 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_drop_database_on_init_if_its_asked self<block_start>select_elements_to_drop_sql="""\
SELECT 'DROP PUBLIC SYNONYM ' || SYNONYM_NAME ||';' FROM ALL_SYNONYMS \
WHERE OWNER = 'PUBLIC' AND TABLE_OWNER = '%s' \
UNION ALL \
SELECT 'DROP SYNONYM ' || SYNONYM_NAME ||';' FROM ALL_SYNONYMS \
WHERE OWNER = '%s' AND TABLE_OWNER = '%s' \
UNION ALL \
SELECT 'DROP ' || OBJECT_TYPE || ' ' || OBJECT_NAME ||';' FROM USER_OBJECTS \
WHERE OBJECT_TYPE <> 'TABLE' AND OBJECT_TYPE <> 'INDEX' AND \
OBJECT_TYPE<>'TRIGGER' AND OBJECT_TYPE<>'LOB' \
UNION ALL \
SELECT 'DROP ' || OBJECT_TYPE || ' ' || OBJECT_NAME ||' CASCADE CONSTRAINTS;' FROM USER_OBJECTS \
WHERE OBJECT_TYPE = 'TABLE' AND OBJECT_NAME NOT LIKE 'BIN$%%'"""%('ROOT' 'ROOT' 'ROOT')<line_sep>self.config_dict["drop_db_first"]=<true><line_sep>self.fetchone_returns[select_elements_to_drop_sql]=[("DELETE TABLE DB_VERSION CASCADE CONSTRAINTS;" )]<line_sep>self.execute_returns["select version from db_version"]=Exception("Table doesn't exist")<line_sep>Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>self.assertEqual(9 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(5 self.db_mock.commit.call_count)<line_sep>self.assertEqual(9 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call(select_elements_to_drop_sql) call('DELETE TABLE DB_VERSION CASCADE CONSTRAINTS') call('select version from db_version') call("create table db_version ( id number(11) not null, version varchar2(20) default '0' NOT NULL, label varchar2(255), name varchar2(255), sql_up clob, sql_down clob, CONSTRAINT db_version_pk PRIMARY KEY (id) ENABLE)") call('drop sequence db_version_seq') call('create sequence db_version_seq start with 1 increment by 1 nomaxvalue') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')")]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(8 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_create_user_when_it_does_not_exists_during_drop_database_selecting_elements_to_drop self<block_start>select_elements_to_drop_sql="""\
SELECT 'DROP PUBLIC SYNONYM ' || SYNONYM_NAME ||';' FROM ALL_SYNONYMS \
WHERE OWNER = 'PUBLIC' AND TABLE_OWNER = '%s' \
UNION ALL \
SELECT 'DROP SYNONYM ' || SYNONYM_NAME ||';' FROM ALL_SYNONYMS \
WHERE OWNER = '%s' AND TABLE_OWNER = '%s' \
UNION ALL \
SELECT 'DROP ' || OBJECT_TYPE || ' ' || OBJECT_NAME ||';' FROM USER_OBJECTS \
WHERE OBJECT_TYPE <> 'TABLE' AND OBJECT_TYPE <> 'INDEX' AND \
OBJECT_TYPE<>'TRIGGER' AND OBJECT_TYPE<>'LOB' \
UNION ALL \
SELECT 'DROP ' || OBJECT_TYPE || ' ' || OBJECT_NAME ||' CASCADE CONSTRAINTS;' FROM USER_OBJECTS \
WHERE OBJECT_TYPE = 'TABLE' AND OBJECT_NAME NOT LIKE 'BIN$%%'"""%('ROOT' 'ROOT' 'ROOT')<line_sep>self.config_dict["drop_db_first"]=<true><line_sep>self.execute_returns[select_elements_to_drop_sql]=Exception("could not connect to database: ORA-01017 invalid user/password")<line_sep>Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>self.assertEqual(6 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(2 self.db_mock.commit.call_count)<line_sep>self.assertEqual(6 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call(select_elements_to_drop_sql) call('create user root identified by migration_test') call('grant connect, resource to root') call('grant create public synonym to root') call('grant drop public synonym to root') call('select version from db_version') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')")]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(5 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_stop_process_when_an_error_occur_during_create_user self<block_start>select_elements_to_drop_sql="""\
SELECT 'DROP PUBLIC SYNONYM ' || SYNONYM_NAME ||';' FROM ALL_SYNONYMS \
WHERE OWNER = 'PUBLIC' AND TABLE_OWNER = '%s' \
UNION ALL \
SELECT 'DROP SYNONYM ' || SYNONYM_NAME ||';' FROM ALL_SYNONYMS \
WHERE OWNER = '%s' AND TABLE_OWNER = '%s' \
UNION ALL \
SELECT 'DROP ' || OBJECT_TYPE || ' ' || OBJECT_NAME ||';' FROM USER_OBJECTS \
WHERE OBJECT_TYPE <> 'TABLE' AND OBJECT_TYPE <> 'INDEX' AND \
OBJECT_TYPE<>'TRIGGER' AND OBJECT_TYPE<>'LOB' \
UNION ALL \
SELECT 'DROP ' || OBJECT_TYPE || ' ' || OBJECT_NAME ||' CASCADE CONSTRAINTS;' FROM USER_OBJECTS \
WHERE OBJECT_TYPE = 'TABLE' AND OBJECT_NAME NOT LIKE 'BIN$%%'"""%('ROOT' 'ROOT' 'ROOT')<line_sep>self.config_dict["drop_db_first"]=<true><line_sep>self.execute_returns[select_elements_to_drop_sql]=Exception("could not connect to database: ORA-01017 invalid user/password")<line_sep>self.execute_returns['grant create public synonym to root']=Exception("error when granting")<try_stmt><block_start>Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>self.fail("it should not get here")<block_end><except_stmt>Exception<as>e<block_start>self.assertEqual("check error: error when granting" str(e))<block_end>self.assertEqual(2 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(0 self.db_mock.commit.call_count)<line_sep>self.assertEqual(2 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call(select_elements_to_drop_sql) call('create user root identified by migration_test') call('grant connect, resource to root') call('grant create public synonym to root')]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(2 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_stop_process_when_an_error_occur_during_drop_database_selecting_elements_to_drop self<block_start>select_elements_to_drop_sql="""\
SELECT 'DROP PUBLIC SYNONYM ' || SYNONYM_NAME ||';' FROM ALL_SYNONYMS \
WHERE OWNER = 'PUBLIC' AND TABLE_OWNER = '%s' \
UNION ALL \
SELECT 'DROP SYNONYM ' || SYNONYM_NAME ||';' FROM ALL_SYNONYMS \
WHERE OWNER = '%s' AND TABLE_OWNER = '%s' \
UNION ALL \
SELECT 'DROP ' || OBJECT_TYPE || ' ' || OBJECT_NAME ||';' FROM USER_OBJECTS \
WHERE OBJECT_TYPE <> 'TABLE' AND OBJECT_TYPE <> 'INDEX' AND \
OBJECT_TYPE<>'TRIGGER' AND OBJECT_TYPE<>'LOB' \
UNION ALL \
SELECT 'DROP ' || OBJECT_TYPE || ' ' || OBJECT_NAME ||' CASCADE CONSTRAINTS;' FROM USER_OBJECTS \
WHERE OBJECT_TYPE = 'TABLE' AND OBJECT_NAME NOT LIKE 'BIN$%%'"""%('ROOT' 'ROOT' 'ROOT')<line_sep>self.config_dict["drop_db_first"]=<true><line_sep>self.execute_returns[select_elements_to_drop_sql]=Exception("error when dropping")<try_stmt><block_start>Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>self.fail("it should not get here")<block_end><except_stmt>Exception<as>e<block_start>self.assertEqual("error when dropping" str(e))<block_end>self.assertEqual(0 self.db_mock.commit.call_count)<line_sep>self.assertEqual(1 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call(select_elements_to_drop_sql)]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(1 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_stop_process_when_an_error_occur_during_drop_elements_from_database_and_user_asked_to_stop self<block_start>select_elements_to_drop_sql="""\
SELECT 'DROP PUBLIC SYNONYM ' || SYNONYM_NAME ||';' FROM ALL_SYNONYMS \
WHERE OWNER = 'PUBLIC' AND TABLE_OWNER = '%s' \
UNION ALL \
SELECT 'DROP SYNONYM ' || SYNONYM_NAME ||';' FROM ALL_SYNONYMS \
WHERE OWNER = '%s' AND TABLE_OWNER = '%s' \
UNION ALL \
SELECT 'DROP ' || OBJECT_TYPE || ' ' || OBJECT_NAME ||';' FROM USER_OBJECTS \
WHERE OBJECT_TYPE <> 'TABLE' AND OBJECT_TYPE <> 'INDEX' AND \
OBJECT_TYPE<>'TRIGGER' AND OBJECT_TYPE<>'LOB' \
UNION ALL \
SELECT 'DROP ' || OBJECT_TYPE || ' ' || OBJECT_NAME ||' CASCADE CONSTRAINTS;' FROM USER_OBJECTS \
WHERE OBJECT_TYPE = 'TABLE' AND OBJECT_NAME NOT LIKE 'BIN$%%'"""%('ROOT' 'ROOT' 'ROOT')<line_sep>self.config_dict["drop_db_first"]=<true><line_sep>self.fetchone_returns[select_elements_to_drop_sql]=[("DELETE TABLE DB_VERSION CASCADE CONSTRAINTS;" ) ("DELETE TABLE AUX CASCADE CONSTRAINTS;" )]<line_sep>self.execute_returns["DELETE TABLE DB_VERSION CASCADE CONSTRAINTS"]=Exception("error dropping table")<line_sep>self.stdin_mock.readline.return_value="n"<try_stmt><block_start>Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>self.fail("it should not get here")<block_end><except_stmt>Exception<as>e<block_start>self.assertEqual("can't drop database objects for user 'root'" str(e))<block_end>self.assertEqual(1 self.db_mock.rollback.call_count)<line_sep>self.assertEqual(1 self.db_mock.commit.call_count)<line_sep>self.assertEqual(3 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call(select_elements_to_drop_sql) call('DELETE TABLE DB_VERSION CASCADE CONSTRAINTS') call('DELETE TABLE AUX CASCADE CONSTRAINTS')]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(3 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_not_stop_process_when_an_error_occur_during_drop_elements_from_database_and_user_asked_to_continue self<block_start>select_elements_to_drop_sql="""\
SELECT 'DROP PUBLIC SYNONYM ' || SYNONYM_NAME ||';' FROM ALL_SYNONYMS \
WHERE OWNER = 'PUBLIC' AND TABLE_OWNER = '%s' \
UNION ALL \
SELECT 'DROP SYNONYM ' || SYNONYM_NAME ||';' FROM ALL_SYNONYMS \
WHERE OWNER = '%s' AND TABLE_OWNER = '%s' \
UNION ALL \
SELECT 'DROP ' || OBJECT_TYPE || ' ' || OBJECT_NAME ||';' FROM USER_OBJECTS \
WHERE OBJECT_TYPE <> 'TABLE' AND OBJECT_TYPE <> 'INDEX' AND \
OBJECT_TYPE<>'TRIGGER' AND OBJECT_TYPE<>'LOB' \
UNION ALL \
SELECT 'DROP ' || OBJECT_TYPE || ' ' || OBJECT_NAME ||' CASCADE CONSTRAINTS;' FROM USER_OBJECTS \
WHERE OBJECT_TYPE = 'TABLE' AND OBJECT_NAME NOT LIKE 'BIN$%%'"""%('ROOT' 'ROOT' 'ROOT')<line_sep>self.config_dict["drop_db_first"]=<true><line_sep>self.fetchone_returns[select_elements_to_drop_sql]=[("DELETE TABLE DB_VERSION CASCADE CONSTRAINTS;" ) ("DELETE TABLE AUX CASCADE CONSTRAINTS;" )]<line_sep>self.execute_returns["DELETE TABLE DB_VERSION CASCADE CONSTRAINTS"]=Exception("error dropping table")<line_sep>self.stdin_mock.readline.return_value="y"<line_sep>Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>self.assertEqual(1 self.db_mock.rollback.call_count)<line_sep>self.assertEqual(3 self.db_mock.commit.call_count)<line_sep>self.assertEqual(7 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call(select_elements_to_drop_sql) call('DELETE TABLE DB_VERSION CASCADE CONSTRAINTS') call('DELETE TABLE AUX CASCADE CONSTRAINTS') call('select version from db_version') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')")]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(6 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_execute_migration_up_and_update_schema_version self<block_start>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>oracle.change("create table spam();" "20090212112104" "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration" "create table spam();" "drop table spam;")<line_sep>self.assertEqual(6 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(4 self.db_mock.commit.call_count)<line_sep>self.assertEqual(6 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call('select version from db_version') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')") call('create table spam()') call('insert into db_version (id, version, label, name, sql_up, sql_down) values (db_version_seq.nextval, :version, :label, :migration_file_name, :sql_up, :sql_down)' {'label':<none> 'sql_up':'create table spam();' 'version':'20090212112104' 'sql_down':'drop table spam;' 'migration_file_name':'20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration'})]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(5 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_execute_migration_down_and_update_schema_version self<block_start>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>oracle.change("drop table spam;" "20090212112104" "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration" "create table spam();" "drop table spam;" <false>)<line_sep>self.assertEqual(6 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(4 self.db_mock.commit.call_count)<line_sep>self.assertEqual(6 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call('select version from db_version') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')") call('drop table spam') call('delete from db_version where version = :version' {'version':'20090212112104'})]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(5 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_use_label_version_when_updating_schema_version self<block_start>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>oracle.change("create table spam();" "20090212112104" "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration" "create table spam();" "drop table spam;" label_version="label")<line_sep>self.assertEqual(6 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(4 self.db_mock.commit.call_count)<line_sep>self.assertEqual(6 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call('select version from db_version') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')") call('create table spam()') call('insert into db_version (id, version, label, name, sql_up, sql_down) values (db_version_seq.nextval, :version, :label, :migration_file_name, :sql_up, :sql_down)' {'label':"label" 'sql_up':'create table spam();' 'version':'20090212112104' 'sql_down':'drop table spam;' 'migration_file_name':'20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration'})]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(5 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_enforce_sql_up_and_sql_down_type_size_when_updating_schema_version self<block_start>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>oracle.change("create table spam();" "20090212112104" "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration" "create table spam();" "drop table spam;" label_version="label")<line_sep>self.assertEqual([call(sql_down='CLOB' sql_up='CLOB')] self.cursor_mock.setinputsizes.mock_calls)<block_end><def_stmt>test_it_should_raise_whem_migration_sql_has_a_syntax_error self<block_start>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>self.assertRaisesWithMessage(Exception "error executing migration: invalid sql syntax 'create table foo(); create table spam());'" oracle.change "create table foo(); create table spam());" "20090212112104" "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration" "create table spam());" "drop table spam;" label_version="label")<block_end><def_stmt>test_it_should_raise_whem_migration_sql_has_a_syntax_error_sql_with_codec_error self<block_start>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>expected_raised_message=u"error executing migration: invalid sql syntax 'create table foo(); create table spam()); -- ônibus'"<if_stmt>(sys.version_info<l>(3 0))<block_start>expected_raised_message=expected_raised_message.encode("utf-8")<block_end>self.assertRaisesWithMessage(Exception expected_raised_message oracle.change u"create table foo(); create table spam()); -- ônibus" "20090212112104" "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration" "create table foo(); create table spam());" "drop table spam;" label_version="label")<block_end><def_stmt>test_it_should_stop_process_when_an_error_occur_during_database_change self<block_start>self.execute_returns["insert into spam"]=Exception("invalid sql")<try_stmt><block_start>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>oracle.change("create table spam(); insert into spam" "20090212112104" "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration" "create table spam();" "drop table spam;" label_version="label")<block_end><except_stmt>Exception<as>e<block_start>self.assertEqual("error executing migration: invalid sql\n\n[ERROR DETAILS] SQL command was:\ninsert into spam" str(e))<line_sep>self.assertTrue(isinstance(e simple_db_migrate.core.exceptions.MigrationException))<block_end>self.assertEqual(1 self.db_mock.rollback.call_count)<line_sep>self.assertEqual(5 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(2 self.db_mock.commit.call_count)<line_sep>self.assertEqual(5 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call('select version from db_version') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')") call('create table spam()') call('insert into spam')]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(4 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_stop_process_when_an_error_occur_during_log_schema_version self<block_start>self.execute_returns['insert into db_version (id, version, label, name, sql_up, sql_down) values (db_version_seq.nextval, :version, :label, :migration_file_name, :sql_up, :sql_down)']=Exception("invalid sql")<try_stmt><block_start>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>oracle.change("create table spam();" "20090212112104" "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration" "create table spam();" "drop table spam;" label_version="label")<block_end><except_stmt>Exception<as>e<block_start>self.assertEqual('error logging migration: invalid sql\n\n[ERROR DETAILS] SQL command was:\n20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration' str(e))<line_sep>self.assertTrue(isinstance(e simple_db_migrate.core.exceptions.MigrationException))<block_end>self.assertEqual(6 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(1 self.db_mock.rollback.call_count)<line_sep>self.assertEqual(3 self.db_mock.commit.call_count)<line_sep>self.assertEqual(6 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call('select version from db_version') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')") call('create table spam()') call('insert into db_version (id, version, label, name, sql_up, sql_down) values (db_version_seq.nextval, :version, :label, :migration_file_name, :sql_up, :sql_down)' {'label':'label' 'sql_up':'create table spam();' 'version':'20090212112104' 'sql_down':'drop table spam;' 'migration_file_name':'20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration'})]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(4 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_log_execution_when_a_function_is_given_when_updating_schema_version self<block_start>execution_log_mock=Mock()<line_sep>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>oracle.change("create table spam();" "20090212112104" "20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration" "create table spam();" "drop table spam;" execution_log=execution_log_mock)<line_sep>expected_execution_log_calls=[call('create table spam()\n-- 0 row(s) affected\n') call('migration 20090212112104_test_it_should_execute_migration_down_and_update_schema_version.migration registered\n')]<line_sep>self.assertEqual(expected_execution_log_calls execution_log_mock.mock_calls)<block_end><def_stmt>test_it_should_get_current_schema_version self<block_start>self.fetchone_returns={'select count(*) from db_version':[0] 'select version from db_version order by id desc':["0"]}<line_sep>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>self.assertEqual("0" oracle.get_current_schema_version())<line_sep>self.assertEqual(5 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(2 self.db_mock.commit.call_count)<line_sep>self.assertEqual(5 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call('select version from db_version') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')") call('select version from db_version order by id desc')]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(4 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_get_all_schema_versions self<block_start>expected_versions=[]<line_sep>expected_versions.append("0")<line_sep>expected_versions.append("20090211120001")<line_sep>expected_versions.append("20090211120002")<line_sep>expected_versions.append("20090211120003")<line_sep>self.fetchone_returns["select version from db_version order by id"]=list(zip(expected_versions))<line_sep>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>schema_versions=oracle.get_all_schema_versions()<line_sep>self.assertEqual(len(expected_versions) len(schema_versions))<for_stmt>version schema_versions<block_start>self.assertTrue(version<in>expected_versions)<block_end>self.assertEqual(5 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(2 self.db_mock.commit.call_count)<line_sep>self.assertEqual(5 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call('select version from db_version') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')") call('select version from db_version order by id')]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(4 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_get_all_schema_migrations self<block_start>expected_versions=[]<line_sep>expected_versions.append([1 "0" <none> <none> <none> <none>])<line_sep>expected_versions.append([2 "20090211120001" "label" "20090211120001_name" Mock(**{"read.return_value":"sql_up"}) Mock(**{"read.return_value":"sql_down"})])<line_sep>self.fetchone_returns["select id, version, label, name, sql_up, sql_down from db_version order by id"]=list(expected_versions)<line_sep>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>schema_migrations=oracle.get_all_schema_migrations()<line_sep>self.assertEqual(len(expected_versions) len(schema_migrations))<for_stmt>index,migration enumerate(schema_migrations)<block_start>self.assertEqual(migration.id expected_versions[index][0])<line_sep>self.assertEqual(migration.version expected_versions[index][1])<line_sep>self.assertEqual(migration.label expected_versions[index][2])<line_sep>self.assertEqual(migration.file_name expected_versions[index][3])<line_sep>self.assertEqual(migration.sql_up expected_versions[index][4]<and>expected_versions[index][4].read()<or>"")<line_sep>self.assertEqual(migration.sql_down expected_versions[index][5]<and>expected_versions[index][5].read()<or>"")<block_end>self.assertEqual(5 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(2 self.db_mock.commit.call_count)<line_sep>self.assertEqual(5 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call('select version from db_version') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')") call('select id, version, label, name, sql_up, sql_down from db_version order by id')]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(4 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_parse_sql_statements self#TODO include other types of sql
<block_start>sql="create table eggs; drop table spam; ; ;\
CREATE OR REPLACE FUNCTION simple \n\
RETURN VARCHAR2 IS \n\
BEGIN \n\
RETURN 'Simple Function'; \n\
END simple; \n\
/ \n\
drop table eggs; \n\
create or replace procedure proc_db_migrate(dias_fim_mes out number) \n\
as v number; \n\
begin \n\
SELECT LAST_DAY(SYSDATE) - SYSDATE \"Days Left\" \n\
into v \n\
FROM DUAL; \n\
dias_fim_mes := v; \n\
end; \n\
\t/ \n\
create OR RePLaCe TRIGGER \"FOLDER_TR\" \n\
BEFORE INSERT ON \"FOLDER\" \n\
FOR EACH ROW WHEN \n\
(\n\
new.\"FOLDER_ID\" IS NULL \n\
)\n\
BEGIN\n\
SELECT \"FOLDER_SQ\".nextval\n\
INTO :new.\"FOLDER_ID\"\n\
FROM dual;\n\
EnD;\n\
/\n\
CREATE OR REPLACE\t PACKAGE pkg_dbm \n\
AS \n\
FUNCTION getArea (i_rad NUMBER) \n\
RETURN NUMBER;\n\
PROCEDURE p_print (i_str1 VARCHAR2 := 'hello',\n\
i_str2 VARCHAR2 := 'world', \n\
i_end VARCHAR2 := '!');\n\
END;\n\
/ \n\
CREATE OR REPLACE\n PACKAGE BODY pkg_dbm \n\
AS \n\
FUNCTION getArea (i_rad NUMBER) \n\
RETURN NUMBER \n\
IS \n\
v_pi NUMBER := 3.14; \n\
BEGIN \n\
RETURN v_pi * (i_rad ** 2); \n\
END; \n\
PROCEDURE p_print (i_str1 VARCHAR2 := 'hello', i_str2 VARCHAR2 := 'world', i_end VARCHAR2 := '!') \n\
IS \n\
BEGIN \n\
DBMS_OUTPUT.put_line (i_str1 || ',' || i_str2 || i_end); \n\
END; \n\
END; \n\
/ \n\
DECLARE\n\
counter NUMBER(10,8) := 2; \r\n\
pi NUMBER(8,7) := 3.1415926; \n\
test NUMBER(10,8) NOT NULL := 10;\n\
BEGIN \n\
counter := pi/counter; \n\
pi := pi/3; \n\
dbms_output.put_line(counter); \n\
dbms_output.put_line(pi); \n\
END; \n\
/ \n\
BEGIN \n\
dbms_output.put_line('teste de bloco anonimo'); \n\
dbms_output.put_line(select 1 from dual); \n\
END; \n\
/ "<line_sep>statements=Oracle._parse_sql_statements(sql)<line_sep>self.assertEqual(10 len(statements))<line_sep>self.assertEqual('create table eggs' statements[0])<line_sep>self.assertEqual('drop table spam' statements[1])<line_sep>self.assertEqual("CREATE OR REPLACE FUNCTION simple \n\
RETURN VARCHAR2 IS \n\
BEGIN \n\
RETURN 'Simple Function'; \n\
END simple;" statements[2])<line_sep>self.assertEqual('drop table eggs' statements[3])<line_sep>self.assertEqual('create or replace procedure proc_db_migrate(dias_fim_mes out number) \n\
as v number; \n\
begin \n\
SELECT LAST_DAY(SYSDATE) - SYSDATE \"Days Left\" \n\
into v \n\
FROM DUAL; \n\
dias_fim_mes := v; \n\
end;' statements[4])<line_sep>self.assertEqual('create OR RePLaCe TRIGGER \"FOLDER_TR\" \n\
BEFORE INSERT ON \"FOLDER\" \n\
FOR EACH ROW WHEN \n\
(\n\
new.\"FOLDER_ID\" IS NULL \n\
)\n\
BEGIN\n\
SELECT \"FOLDER_SQ\".nextval\n\
INTO :new.\"FOLDER_ID\"\n\
FROM dual;\n\
EnD;' statements[5])<line_sep>self.assertEqual("CREATE OR REPLACE\t PACKAGE pkg_dbm \n\
AS \n\
FUNCTION getArea (i_rad NUMBER) \n\
RETURN NUMBER;\n\
PROCEDURE p_print (i_str1 VARCHAR2 := 'hello',\n\
i_str2 VARCHAR2 := 'world', \n\
i_end VARCHAR2 := '!');\n\
END;" statements[6])<line_sep>self.assertEqual("CREATE OR REPLACE\n PACKAGE BODY pkg_dbm \n\
AS \n\
FUNCTION getArea (i_rad NUMBER) \n\
RETURN NUMBER \n\
IS \n\
v_pi NUMBER := 3.14; \n\
BEGIN \n\
RETURN v_pi * (i_rad ** 2); \n\
END; \n\
PROCEDURE p_print (i_str1 VARCHAR2 := 'hello', i_str2 VARCHAR2 := 'world', i_end VARCHAR2 := '!') \n\
IS \n\
BEGIN \n\
DBMS_OUTPUT.put_line (i_str1 || ',' || i_str2 || i_end); \n\
END; \n\
END;" statements[7])<line_sep>self.assertEqual("DECLARE\n\
counter NUMBER(10,8) := 2; \r\n\
pi NUMBER(8,7) := 3.1415926; \n\
test NUMBER(10,8) NOT NULL := 10;\n\
BEGIN \n\
counter := pi/counter; \n\
pi := pi/3; \n\
dbms_output.put_line(counter); \n\
dbms_output.put_line(pi); \n\
END;" statements[8])<line_sep>self.assertEqual("BEGIN \n\
dbms_output.put_line('teste de bloco anonimo'); \n\
dbms_output.put_line(select 1 from dual); \n\
END;" statements[9])<block_end><def_stmt>test_it_should_parse_sql_statements_with_html_inside self<block_start>sql=u"""
create table eggs;
INSERT INTO widget_parameter_domain (widget_parameter_id, label, value)
VALUES ((SELECT MAX(widget_parameter_id)
FROM widget_parameter), "Carros", '<div class="box-zap-geral">
<div class="box-zap box-zap-autos">
<a class="logo" target="_blank" title="ZAP" href="http://www.zap.com.br/Parceiros/g1/RedirG1.aspx?CodParceriaLink=42&URL=http://www.zap.com.br">');
drop table spam;
"""<line_sep>statements=Oracle._parse_sql_statements(sql)<line_sep>expected_sql_with_html="""INSERT INTO widget_parameter_domain (widget_parameter_id, label, value)
VALUES ((SELECT MAX(widget_parameter_id)
FROM widget_parameter), "Carros", '<div class="box-zap-geral">
<div class="box-zap box-zap-autos">
<a class="logo" target="_blank" title="ZAP" href="http://www.zap.com.br/Parceiros/g1/RedirG1.aspx?CodParceriaLink=42&URL=http://www.zap.com.br">')"""<line_sep>self.assertEqual(3 len(statements))<line_sep>self.assertEqual('create table eggs' statements[0])<line_sep>self.assertEqual(expected_sql_with_html statements[1])<line_sep>self.assertEqual('drop table spam' statements[2])<block_end><def_stmt>test_it_should_get_none_for_a_non_existent_version_in_database self<block_start>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>ret=oracle.get_version_id_from_version_number('xxx')<line_sep>self.assertEqual(<none> ret)<line_sep>self.assertEqual(5 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(2 self.db_mock.commit.call_count)<line_sep>self.assertEqual(5 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call('select version from db_version') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')") call("select id from db_version where version = 'xxx' order by id desc")]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(4 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_get_most_recent_version_for_a_existent_label_in_database self<block_start>self.fetchone_returns["select version from db_version where label = 'xxx' order by id desc"]=["vesion" "version2" "version3"]<line_sep>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>ret=oracle.get_version_number_from_label('xxx')<line_sep>self.assertEqual("vesion" ret)<line_sep>self.assertEqual(5 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(2 self.db_mock.commit.call_count)<line_sep>self.assertEqual(5 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call('select version from db_version') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')") call("select version from db_version where label = 'xxx' order by id desc")]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(4 self.cursor_mock.close.call_count)<block_end><def_stmt>test_it_should_get_none_for_a_non_existent_label_in_database self<block_start>oracle=Oracle(self.config_mock self.db_driver_mock self.getpass_mock self.stdin_mock)<line_sep>ret=oracle.get_version_number_from_label('xxx')<line_sep>self.assertEqual(<none> ret)<line_sep>self.assertEqual(5 self.db_driver_mock.connect.call_count)<line_sep>self.assertEqual(2 self.db_mock.commit.call_count)<line_sep>self.assertEqual(5 self.db_mock.close.call_count)<line_sep>expected_execute_calls=[call('select version from db_version') call('select count(*) from db_version') call("insert into db_version (id, version) values (db_version_seq.nextval, '0')") call("select version from db_version where label = 'xxx' order by id desc")]<line_sep>self.assertEqual(expected_execute_calls self.cursor_mock.execute.mock_calls)<line_sep>self.assertEqual(4 self.cursor_mock.close.call_count)<block_end><def_stmt>side_effect self returns default_value<block_start>commands=len(self.last_execute_commands)<if_stmt>commands<g>0<block_start>self.last_execute_command=self.last_execute_commands[commands-1]<block_end>value=result=returns.pop(self.last_execute_command default_value)<if_stmt>isinstance(result Exception)<block_start><if_stmt>commands<g>0<block_start>self.last_execute_commands.pop()<block_end><raise>result<block_end><if_stmt>isinstance(result list)<and>len(result)<g>0<and>(isinstance(result[0] tuple)<or>isinstance(result[0] list))<block_start>returns[self.last_execute_command]=result<line_sep>value=result.pop(0)<block_end><elif_stmt>isinstance(result list)<and>len(result)<eq>0<block_start>value=<none><block_end><if_stmt>commands<g>0<and>self.execute_returns.get(self.last_execute_command <none>)<is><none><and>self.fetchone_returns.get(self.last_execute_command <none>)<is><none><and>self.close_returns.get(self.last_execute_command <none>)<is><none><block_start>self.last_execute_commands.pop()<block_end><return>value<block_end><def_stmt>execute_side_effect self *args<block_start>self.last_execute_commands.append(args[0])<line_sep><return>self.side_effect(self.execute_returns 0)<block_end><def_stmt>fetchone_side_effect self *args<block_start><return>self.side_effect(self.fetchone_returns <none>)<block_end><def_stmt>close_side_effect self *args<block_start><return>self.side_effect(self.close_returns <none>)<block_end><def_stmt>makedsn_side_effect self host port sid<block_start><return>"(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST=%s)(PORT=%s)))(CONNECT_DATA=(SID=%s)))"%(host port sid)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>unittest.main()<block_end> |
# coding=utf-8
#
# Copyright 2014 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""BIG-IP® system dns module
REST URI
``http://localhost/mgmt/tm/transaction``
REST Kind
``tm:transaction*``
"""<import_from_stmt>f5.bigip.resource Collection<import_from_stmt>f5.bigip.resource Resource<class_stmt>Transactions(Collection)<block_start>"""This class is a context manager for iControl transactions.
Upon successful exit of the with statement, the transaction will be
submitted, otherwise it will be rolled back.
NOTE: This feature was added to BIGIP in version 11.0.0.
Example:
> bigip = BigIP(<args>)
> tx = bigip.transactions.transaction
> with TransactionContextManager(tx) as api:
> api.net.pools.pool.create(name="foo")
> api.sys.dbs.db.update(name="setup.run", value="false")
> <perform actions inside a transaction>
>
> # transaction is committed when you exit the "with" statement.
"""<def_stmt>__init__ self api<block_start>super(Transactions self).__init__(api)<line_sep>self._meta_data['allowed_lazy_attributes']=[Transaction]<line_sep>self._meta_data['attribute_registry']={'tm:transactionstate':Transaction}<block_end><block_end><class_stmt>Transaction(Resource)<block_start><def_stmt>__init__ self transactions<block_start>super(Transaction self).__init__(transactions)<line_sep>self._meta_data['required_json_kind']='tm:transactionstate'<line_sep>self._meta_data['required_creation_parameters']=set()<block_end><block_end> |
# -*- coding: utf-8 -*-
"""genetic-algorithm-python-tutorial.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/161ijkvn8wG_seVtQexm-p3fW3r5p8s_x
# Genetic Algorithm Implementation with Python
* Tutorial: https://towardsai.net/p/computer-science/genetic-algorithm-ga-introduction-with-example-code-e59f9bc58eaf
* Github: https://github.com/towardsai/tutorials/tree/master/genetic-algorithm-tutorial
The Genetic Algorithm is a class of evolutionary algorithm that is broadly inspired by biological evolution. We all know evolution, it is a selection of parents, reproduction, and mutation of offsprings. The main aim of evolution is to reproduce offsprings that are biologically better than their parents. Genetic algorithm is mainly based on natural selection and it tries to simulate the theory of evolution.
"""<import_stmt>numpy<as>np<import_stmt>matplotlib.pyplot<as>plt<import_stmt>copy<line_sep># cost function
<def_stmt>sphere x<block_start>''' This is the problem we will be
optimizing, each chromosome of parent has a cost
which is calculated from this cost function'''<line_sep><return>sum(x<power>2)<block_end><def_stmt>roulette_wheel_selection p<block_start>''' Roulette Wheel Selection is a method of parent
selection for breeding. We take the cummulative sum of probabilities
and select the first parent whose cummulative sum is greater than
random number'''<line_sep>c=np.cumsum(p)<line_sep>r=sum(p)<times>np.random.rand()<line_sep>ind=np.argwhere(r<le>c)<line_sep><return>ind[0][0]<block_end><def_stmt>crossover p1 p2<block_start>''' Performing uniform crossover. Alpha is the flag
that determines which gene of each chromosome is choosen
to be inherited by the offspring. Maultiply the alpha value
with each gene of every chromosome of both the parents and
then add the resultant value to get child chromosome'''<line_sep>c1=copy.deepcopy(p1)<line_sep>c2=copy.deepcopy(p2)<line_sep># Uniform crossover
alpha=np.random.uniform(0 1 *(c1['position'].shape))<line_sep>c1['position']=alpha<times>p1['position']+(1-alpha)<times>p2['position']<line_sep>c2['position']=alpha<times>p2['position']+(1-alpha)<times>p1['position']<line_sep><return>c1 c2<block_end><def_stmt>mutate c mu sigma<block_start>'''
c: child chromosome
mu: mutation rate. % of gene to be modified
sigma: step size of mutation'''<line_sep>y=copy.deepcopy(c)<line_sep>flag=np.random.rand(*(c['position'].shape))<le>mu# array of True and Flase, indicating at which position to perform mutation
ind=np.argwhere(flag)<line_sep>y['position'][ind]<augadd>sigma<times>np.random.randn(*ind.shape)<line_sep><return>y<block_end><def_stmt>bounds c varmin varmax<block_start>''' Defines the upper and lower bound of gene value'''<line_sep>c['position']=np.maximum(c['position'] varmin)<line_sep>c['position']=np.minimum(c['position'] varmax)<block_end><def_stmt>sort arr<block_start>''' Bubble sorting the population + offsoring
in every iteration to get best fit individuals at top'''<line_sep>n=len(arr)<for_stmt>i range(n-1)<block_start><for_stmt>j range(0 n-i-1)<block_start><if_stmt>arr[j]['cost']<g>arr[j+1]['cost']<block_start>arr[j],arr[j+1]=arr[j+1] arr[j]<block_end><block_end><return>arr<block_end><block_end><def_stmt>ga costfunc num_var varmin varmax maxit npop num_children mu sigma beta# Placeholder for each individual
<block_start>population={}<for_stmt>i range(npop)# each inidivdual has position(chromosomes) and cost,
<block_start>population[i]={'position':<none> 'cost':<none>}# create individual as many as population size(npop)
<block_end># Best solution found
bestsol=copy.deepcopy(population)<line_sep>bestsol_cost=np.inf# initial best cost is infinity
# Initialize population - 1st Gen
<for_stmt>i range(npop)<block_start>population[i]['position']=np.random.uniform(varmin varmax num_var)# randomly initialize the chromosomes and cost
population[i]['cost']=costfunc(population[i]['position'])<if_stmt>population[i]['cost']<l>bestsol_cost# if cost of an individual is less(best) than best cost,
<block_start>bestsol=copy.deepcopy(population[i])# replace the best solution with that individual
<block_end><block_end># Best cost of each generation/iteration
bestcost=np.empty(maxit)<line_sep># Main loop
<for_stmt>it range(maxit)# Calculating probability for roulette wheel selection
<block_start>costs=[]<for_stmt>i range(len(population))<block_start>costs.append(population[i]['cost'])# list of all the population cost
<block_end>costs=np.array(costs)<line_sep>avg_cost=np.mean(costs)# taking average of the costs
<if_stmt>avg_cost<ne>0<block_start>costs=costs/avg_cost<block_end>probs=np.exp(-beta<times>costs)# probability is exponensial of -ve beta times costs
<for_stmt>_ range(num_children<floordiv>2)# we will be having two off springs for each crossover
# hence divide number of children by 2
<block_start>'''
-> choosing two parents randomly for mating
-> we are shuffling all the 20 parent individuals and
-> choosing first two of the shuffled array as our parents for mating
Randomly selecting parents by shiffling them.
But we will be using roulette wheel slection
for our algorithm
q = np.random.permutation(npop)
p1 = population[q[0]]
p2 = population[q[1]]
'''<line_sep># Roulette wheel selection
p1=population[roulette_wheel_selection(probs)]<line_sep>p2=population[roulette_wheel_selection(probs)]<line_sep># crossover two parents
c1,c2=crossover(p1 p2)<line_sep># Perform mutation
c1=mutate(c1 mu sigma)<line_sep>c2=mutate(c2 mu sigma)<line_sep># Apply bounds
bounds(c1 varmin varmax)<line_sep>bounds(c2 varmin varmax)<line_sep># Evaluate first off spring
c1['cost']=costfunc(c1['position'])# calculate cost function of child 1
<if_stmt>type(bestsol_cost)<eq>float<block_start><if_stmt>c1['cost']<l>bestsol_cost# replacing best solution in every generation/iteration
<block_start>bestsol_cost=copy.deepcopy(c1)<block_end><block_end><else_stmt><block_start><if_stmt>c1['cost']<l>bestsol_cost['cost']# replacing best solution in every generation/iteration
<block_start>bestsol_cost=copy.deepcopy(c1)<block_end><block_end># Evaluate second off spring
<if_stmt>c2['cost']<l>bestsol_cost['cost']# replacing best solution in every generation/iteration
<block_start>bestsol_cost=copy.deepcopy(c2)<block_end><block_end># Merge, Sort and Select
population[len(population)]=c1<line_sep>population[len(population)]=c2<line_sep>population=sort(population)<line_sep># Store best cost
bestcost[it]=bestsol_cost['cost']<line_sep># Show generation information
print('Iteration {}: Best Cost = {}'.format(it bestcost[it]))<block_end>out=population<line_sep>Bestsol=bestsol<line_sep>bestcost=bestcost<line_sep><return>(out Bestsol bestcost)<block_end># Problem definition
costfunc=sphere<line_sep>num_var=5# number of decicion variables
varmin=-10# lower bound
varmax=10# upper bound
# GA Parameters
maxit=501# number of iterations
npop=20# initial population size
beta=1<line_sep>prop_children=1# proportion of children to population
num_children=int(np.round(prop_children<times>npop/2)<times>2)# making sure it always an even number
mu=0.2# mutation rate 20%, 205 of 5 is 1, mutating 1 gene
sigma=0.1# step size of mutation
# Run GA
out=ga(costfunc num_var varmin varmax maxit npop num_children mu sigma beta)<line_sep># Results
#(out, Bestsol, bestcost)
plt.plot(out[2])<line_sep>plt.xlim(0 maxit)<line_sep>plt.xlabel('Generations')<line_sep>plt.ylabel('Best Cost')<line_sep>plt.title('Genetic Algorithm')<line_sep>plt.grid(<true>)<line_sep>plt.show<line_sep> |
###############################################################################
##
## Copyright (c) Crossbar.io Technologies GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
__all__=['startClient' 'startServer']<import_from_stmt>autobahn.twisted.websocket connectWS listenWS WebSocketClientFactory WebSocketClientProtocol WebSocketServerFactory WebSocketServerProtocol<class_stmt>EchoServerProtocol(WebSocketServerProtocol)<block_start><def_stmt>onMessage self payload isBinary<block_start>self.sendMessage(payload isBinary)<block_end><block_end><class_stmt>EchoServerFactory(WebSocketServerFactory)<block_start>protocol=EchoServerProtocol<def_stmt>__init__ self url debug=<false><block_start>WebSocketServerFactory.__init__(self url debug=debug debugCodePaths=debug)<block_end><block_end><class_stmt>EchoClientProtocol(WebSocketClientProtocol)<block_start><def_stmt>onMessage self payload isBinary<block_start>self.sendMessage(payload isBinary)<block_end><block_end><class_stmt>EchoClientFactory(WebSocketClientFactory)<block_start>protocol=EchoClientProtocol<def_stmt>__init__ self url debug=<false><block_start>WebSocketClientFactory.__init__(self url debug=debug debugCodePaths=debug)<block_end><block_end><def_stmt>startClient wsuri debug=<false><block_start>factory=EchoClientFactory(wsuri debug)<line_sep>connectWS(factory)<line_sep><return><true><block_end><def_stmt>startServer wsuri sslKey=<none> sslCert=<none> debug=<false><block_start>factory=EchoServerFactory(wsuri debug)<if_stmt>sslKey<and>sslCert<block_start>sslContext=ssl.DefaultOpenSSLContextFactory(sslKey sslCert)<block_end><else_stmt><block_start>sslContext=<none><block_end>listenWS(factory sslContext)<line_sep><return><true><block_end> |
# http://pastie.org/pastes/10943132/text
# Copyright (c) 2016 1wd
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
<import_stmt>random<import_stmt>sys<class_stmt>Expr(object)<block_start><def_stmt>__init__ self<block_start><pass><block_end><block_end><class_stmt>ConstExpr(Expr)<block_start><def_stmt>__init__ self val<block_start>self.val=val<block_end><block_end><class_stmt>VarExpr(Expr)<block_start><def_stmt>__init__ self name<block_start>self.name=name<block_end><block_end><class_stmt>BinOp(Expr)<block_start><def_stmt>__init__ self op left right<block_start>self.op=op<line_sep>self.left=left<line_sep>self.right=right<block_end><block_end><class_stmt>FunCallExpr(Expr)<block_start><def_stmt>__init__ self name<block_start>self.name=name<block_end><block_end><class_stmt>Statement(object)<block_start><def_stmt>__init__ self<block_start><pass><block_end><block_end><class_stmt>Assignment(Statement)<block_start><def_stmt>__init__ self lval expr<block_start>self.lval=lval<line_sep>self.expr=expr<block_end><block_end><class_stmt>VarDecl(Statement)<block_start><def_stmt>__init__ self name expr<block_start>self.name=name<line_sep>self.expr=expr<line_sep>self.mut=<false><line_sep>self.used=<false><block_end><block_end><class_stmt>Return(Statement)<block_start><def_stmt>__init__ self expr context<block_start>self.expr=expr<line_sep>self.context=context<block_end><block_end><class_stmt>Print(Statement)<block_start><def_stmt>__init__ self expr<block_start>self.expr=expr<block_end><block_end><class_stmt>FunDecl(object)<block_start><def_stmt>__init__ self name statements return_type<block_start>self.name=name<line_sep>self.statements=statements<line_sep>self.return_type=return_type<line_sep>self.used=<false><block_end><block_end><class_stmt>Program(object)<block_start><def_stmt>__init__ self main functions<block_start>self.main=main<line_sep>self.functions=functions<block_end><block_end>#----------------------------------------------------------
<class_stmt>Context(object)<block_start><def_stmt>__init__ self parent=<none> decl_name=<none><block_start>self.env={}<line_sep>self.id=0<line_sep>self.parent=parent<line_sep>self.decl_name=decl_name<block_end><def_stmt>name self base i<block_start><return>"%s%s"%(base i)<block_end><def_stmt>new_name self base<block_start>self.id<augadd>1<line_sep><return>self.name(base self.id)<block_end><def_stmt>random_name self base<block_start>biased_min=random.randint(1 self.id)<line_sep>i=random.randint(biased_min self.id)<line_sep><return>self.name(base i)<block_end><def_stmt>random_expr self<block_start><return>random.choice([self.random_const_expr self.random_var_expr self.random_binary_op self.random_fun_call ])()<block_end><def_stmt>find_unused self<block_start><for_stmt>decl self.env.values()<block_start><if_stmt><not>decl.used<block_start><return>decl<block_end><block_end><return><none><block_end><def_stmt>force_use_expr self<block_start>expr=self.random_const_expr()<line_sep>decl=self.find_unused()<while_stmt>decl<is><not><none><block_start>left=self.forced_var_expr(decl.name)<line_sep>expr=self.forced_random_binary_op(left expr)<line_sep>decl=self.find_unused()<block_end>decl=self.parent.find_unused()<while_stmt>decl<is><not><none><block_start>left=self.forced_fun_call(decl.name)<line_sep>expr=self.forced_random_binary_op(left expr)<line_sep>decl=self.parent.find_unused()<block_end><return>expr<block_end><def_stmt>random_const_expr self<block_start><return>ConstExpr(str(random.randint(1 1000)))<block_end><def_stmt>forced_var_expr self name<block_start>decl=self.env[name]<line_sep>decl.used=<true><line_sep><return>VarExpr(name)<block_end><def_stmt>random_var_expr self<block_start><if_stmt>self.id<eq>0<block_start><return>self.random_const_expr()<block_end>name=self.random_name('x')<line_sep><return>self.forced_var_expr(name)<block_end><def_stmt>forced_random_binary_op self left right#op = random.choice(["+", "-", "*", "|", "&", "^"])
<block_start>op=random.choice(["|" "&" "^"])<line_sep><return>BinOp(op left right)<block_end><def_stmt>random_binary_op self<block_start>left=self.random_expr()<line_sep>right=self.random_expr()<line_sep><return>self.forced_random_binary_op(left right)<block_end><def_stmt>forced_fun_call self name<block_start>decl=self.parent.env[name]<line_sep>decl.used=<true><line_sep><return>FunCallExpr(name)<block_end><def_stmt>random_fun_call self<block_start><if_stmt>self.parent.id<eq>0<block_start><return>self.random_const_expr()<block_end>name=self.parent.random_name('f')<line_sep><return>self.forced_fun_call(name)<block_end><def_stmt>random_statement self<block_start><return>random.choice([self.random_assignment self.random_var_decl ])()<block_end><def_stmt>random_assignment self<block_start>name=self.random_name('x')<line_sep>decl=self.env[name]<line_sep>expr=self.random_expr()<if_stmt><not>decl.used<block_start>left=self.forced_var_expr(name)<line_sep>expr=self.forced_random_binary_op(left expr)<block_end>decl.used=<false><line_sep>decl.mut=<true><line_sep><return>Assignment(name expr)<block_end><def_stmt>random_return_statement self<block_start><return>Return(self.force_use_expr() self)<block_end><def_stmt>random_print_statement self<block_start><return>Print(self.force_use_expr())<block_end><def_stmt>random_var_decl self<block_start>expr=self.random_expr()<line_sep>name=self.new_name('x')<line_sep>decl=VarDecl(name expr)<line_sep>self.env[name]=decl<line_sep><return>decl<block_end><def_stmt>random_fun_decl self num_statements return_type<block_start>local=Context(self)<line_sep>statements=[]<line_sep>statements.append(local.random_var_decl())<for_stmt>i range(num_statements)<block_start>statements.append(local.random_statement())<block_end><if_stmt>return_type<is><not><none><block_start>statements.append(local.random_return_statement())<block_end><else_stmt><block_start>statements.append(local.random_print_statement())<block_end>name=self.new_name('f')<line_sep>decl=FunDecl(name statements return_type)<line_sep>local.decl=decl<line_sep>self.env[name]=decl<line_sep><return>decl<block_end><def_stmt>random_program self num_funs max_statements_per_fun<block_start>functions=[]<for_stmt>i range(num_funs)<block_start>num_statements=random.randint(1 max_statements_per_fun)<line_sep>fun_decl=self.random_fun_decl(num_statements 'int')<line_sep>functions.append(fun_decl)<block_end>num_statements=random.randint(1 max_statements_per_fun)<line_sep>main=self.random_fun_decl(num_statements <none>)<line_sep><return>Program(main functions)<block_end><block_end>#----------------------------------------------------------
<class_stmt>Lang(object)<block_start>operators={'&':'&' '|':'|' '^':'^' }<def_stmt>__init__ self<block_start>self.indent=0<block_end><def_stmt>write_indent self f<block_start>f.write(' '<times>4<times>self.indent)<block_end><def_stmt>write_statement self f statement<block_start>handlers={VarDecl:self.write_var_decl Assignment:self.write_assignment Return:self.write_return Print:self.write_print }<line_sep>handler=handlers.get(type(statement))<if_stmt>handler<is><not><none><block_start>handler(f statement)<block_end><else_stmt><block_start><raise>Exception("Unknown kind of statement")<block_end><block_end><def_stmt>write_lval self f lval<block_start>f.write(lval)<block_end><def_stmt>write_expr self f expr needs_parens=<false><block_start>handlers={ConstExpr:self.write_const_expr VarExpr:self.write_var_expr BinOp:self.write_bin_op FunCallExpr:self.write_fun_call }<line_sep>handler=handlers.get(type(expr))<if_stmt>handler<is><not><none><block_start>handler(f expr needs_parens)<block_end><else_stmt><block_start><raise>Exception("Unknown kind of expr")<block_end><block_end><def_stmt>write_const_expr self f expr needs_parens<block_start>f.write(expr.val)<block_end><def_stmt>write_var_expr self f expr needs_parens<block_start>f.write(expr.name)<block_end><def_stmt>write_bin_op self f expr needs_parens<block_start><if_stmt>needs_parens<block_start>f.write("(")<block_end>self.write_expr(f expr.left needs_parens=<true>)<line_sep>f.write(" %s "%self.operators[expr.op])<line_sep>self.write_expr(f expr.right needs_parens=<true>)<if_stmt>needs_parens<block_start>f.write(")")<block_end><block_end><def_stmt>write_fun_call self f expr needs_parens<block_start>f.write("%s()"%expr.name)<block_end><block_end><class_stmt>CppLang(Lang)<block_start>ext='cpp'<line_sep>type_names={'int':'int' }<def_stmt>write_program self f program<block_start>f.write('#include <cstdio>\n\n')<for_stmt>fun_decl program.functions<block_start>self.write_fun_decl(f fun_decl)<line_sep>f.write('\n')<block_end>self.write_fun_decl(f program.main main=<true>)<block_end><def_stmt>write_fun_decl self f fun_decl main=<false><block_start><if_stmt>fun_decl.return_type<is><none><block_start>optional_result='int '<block_end><else_stmt><block_start>type_name=self.type_names[fun_decl.return_type]<line_sep>optional_result='%s '%type_name<block_end>fun_name='main'<if>main<else>fun_decl.name<line_sep>f.write('%s %s() {\n'%(optional_result fun_name))<line_sep>self.indent<augadd>1<for_stmt>statement fun_decl.statements<block_start>self.write_statement(f statement)<block_end>self.indent<augsub>1<line_sep>f.write('}\n')<block_end><def_stmt>write_var_decl self f var_decl<block_start>self.write_indent(f)<line_sep>f.write('int ')<line_sep>self.write_lval(f var_decl.name)<line_sep>f.write(' = ')<line_sep>self.write_expr(f var_decl.expr)<line_sep>f.write(';\n')<block_end><def_stmt>write_assignment self f assignment<block_start>self.write_indent(f)<line_sep>self.write_lval(f assignment.lval)<line_sep>f.write(' = ')<line_sep>self.write_expr(f assignment.expr)<line_sep>f.write(';\n')<block_end><def_stmt>write_return self f statement<block_start>self.write_indent(f)<line_sep>f.write('return ')<line_sep>self.write_expr(f statement.expr)<line_sep>f.write(';\n')<block_end><def_stmt>write_print self f statement<block_start>self.write_indent(f)<line_sep>f.write('printf("%i\\n", ')<line_sep>self.write_expr(f statement.expr)<line_sep>f.write(');\n')<block_end><block_end><class_stmt>CLang(CppLang)<block_start>ext='c'<def_stmt>write_program self f program<block_start>f.write('#include <stdio.h>\n\n')<for_stmt>fun_decl program.functions<block_start>self.write_fun_decl(f fun_decl)<line_sep>f.write('\n')<block_end>self.write_fun_decl(f program.main main=<true>)<block_end><block_end><class_stmt>DLang(Lang)<block_start>ext='d'<line_sep>type_names={'int':'int' }<def_stmt>write_program self f program<block_start>f.write('import std.stdio;\n\n')<for_stmt>fun_decl program.functions<block_start>self.write_fun_decl(f fun_decl)<line_sep>f.write('\n')<block_end>self.write_fun_decl(f program.main main=<true>)<block_end><def_stmt>write_fun_decl self f fun_decl main=<false><block_start><if_stmt>fun_decl.return_type<is><none><block_start>optional_result='void '<block_end><else_stmt><block_start>type_name=self.type_names[fun_decl.return_type]<line_sep>optional_result='%s '%type_name<block_end>fun_name='main'<if>main<else>fun_decl.name<line_sep>f.write('%s %s() {\n'%(optional_result fun_name))<line_sep>self.indent<augadd>1<for_stmt>statement fun_decl.statements<block_start>self.write_statement(f statement)<block_end>self.indent<augsub>1<line_sep>f.write('}\n')<block_end><def_stmt>write_var_decl self f var_decl<block_start>self.write_indent(f)<line_sep>f.write('int ')<line_sep>self.write_lval(f var_decl.name)<line_sep>f.write(' = ')<line_sep>self.write_expr(f var_decl.expr)<line_sep>f.write(';\n')<block_end><def_stmt>write_assignment self f assignment<block_start>self.write_indent(f)<line_sep>self.write_lval(f assignment.lval)<line_sep>f.write(' = ')<line_sep>self.write_expr(f assignment.expr)<line_sep>f.write(';\n')<block_end><def_stmt>write_return self f statement<block_start>self.write_indent(f)<line_sep>f.write('return ')<line_sep>self.write_expr(f statement.expr)<line_sep>f.write(';\n')<block_end><def_stmt>write_print self f statement<block_start>self.write_indent(f)<line_sep>f.write('writefln("%d", ')<line_sep>self.write_expr(f statement.expr)<line_sep>f.write(');\n')<block_end><block_end><class_stmt>GoLang(Lang)<block_start>ext='go'<line_sep>type_names={'int':'int' }<def_stmt>write_program self f program<block_start>f.write('package main\n\n')<line_sep>f.write('import "fmt"\n\n')<for_stmt>fun_decl program.functions<block_start>self.write_fun_decl(f fun_decl)<line_sep>f.write('\n')<block_end>self.write_fun_decl(f program.main main=<true>)<block_end><def_stmt>write_fun_decl self f fun_decl main=<false><block_start><if_stmt>fun_decl.return_type<is><none><block_start>optional_result=''<block_end><else_stmt><block_start>type_name=self.type_names[fun_decl.return_type]<line_sep>optional_result=' %s'%type_name<block_end>fun_name='main'<if>main<else>fun_decl.name<line_sep>f.write('func %s()%s {\n'%(fun_name optional_result))<line_sep>self.indent<augadd>1<for_stmt>statement fun_decl.statements<block_start>self.write_statement(f statement)<block_end>self.indent<augsub>1<line_sep>f.write('}\n')<block_end><def_stmt>write_var_decl self f var_decl<block_start>self.write_indent(f)<line_sep>self.write_lval(f var_decl.name)<line_sep>f.write(' := ')<line_sep>self.write_expr(f var_decl.expr)<line_sep>f.write('\n')<block_end><def_stmt>write_assignment self f assignment<block_start>self.write_indent(f)<line_sep>self.write_lval(f assignment.lval)<line_sep>f.write(' = ')<line_sep>self.write_expr(f assignment.expr)<line_sep>f.write('\n')<block_end><def_stmt>write_return self f statement<block_start>self.write_indent(f)<line_sep>f.write('return ')<line_sep>self.write_expr(f statement.expr)<line_sep>f.write('\n')<block_end><def_stmt>write_print self f statement<block_start>self.write_indent(f)<line_sep>f.write('fmt.Printf("%d\\n", ')<line_sep>self.write_expr(f statement.expr)<line_sep>f.write(')\n')<block_end><block_end><class_stmt>PascalLang(Lang)<block_start>ext='pas'<line_sep>type_names={'int':'integer' }<line_sep>operators={'&':'and' '|':'or' '^':'xor' }<def_stmt>write_program self f program<block_start>f.write('program main;\n\n')<for_stmt>fun_decl program.functions<block_start>self.write_fun_decl(f fun_decl)<line_sep>f.write('\n')<block_end>self.write_fun_decl(f program.main main=<true>)<block_end><def_stmt>write_fun_decl self f fun_decl main=<false><block_start><if_stmt><not>main<block_start>fun_name=fun_decl.name<line_sep>type_name=self.type_names[fun_decl.return_type]<line_sep>f.write('function %s() : %s;\n'%(fun_name type_name))<block_end>vars=[s<for>s fun_decl.statements<if>isinstance(s VarDecl)]<if_stmt>vars<block_start>f.write('var\n')<for_stmt>v vars<block_start>type_name=self.type_names['int']<line_sep>f.write(' %s : %s;\n'%(v.name type_name))<block_end><block_end>f.write('begin\n')<line_sep>self.indent<augadd>1<for_stmt>statement fun_decl.statements<block_start>self.write_statement(f statement)<block_end>self.indent<augsub>1<line_sep>f.write('end%s\n'%('.'<if>main<else>';'))<block_end><def_stmt>write_var_decl self f var_decl<block_start>self.write_indent(f)<line_sep>self.write_lval(f var_decl.name)<line_sep>f.write(' := ')<line_sep>self.write_expr(f var_decl.expr)<line_sep>f.write(';\n')<block_end><def_stmt>write_assignment self f assignment<block_start>self.write_indent(f)<line_sep>self.write_lval(f assignment.lval)<line_sep>f.write(' := ')<line_sep>self.write_expr(f assignment.expr)<line_sep>f.write(';\n')<block_end><def_stmt>write_return self f statement<block_start>self.write_indent(f)<line_sep>self.write_lval(f statement.context.decl.name)<line_sep>f.write(' := ')<line_sep>self.write_expr(f statement.expr)<line_sep>f.write(';\n')<block_end><def_stmt>write_print self f statement<block_start>self.write_indent(f)<line_sep>f.write('writeln(')<line_sep>self.write_expr(f statement.expr)<line_sep>f.write(');\n')<block_end><block_end><class_stmt>RustLang(Lang)<block_start>ext='rs'<line_sep>type_names={'int':'i32' }<def_stmt>write_program self f program<block_start><for_stmt>fun_decl program.functions<block_start>self.write_fun_decl(f fun_decl)<line_sep>f.write('\n')<block_end>self.write_fun_decl(f program.main main=<true>)<block_end><def_stmt>write_fun_decl self f fun_decl main=<false><block_start><if_stmt>fun_decl.return_type<is><none><block_start>optional_result=''<block_end><else_stmt><block_start>type_name=self.type_names[fun_decl.return_type]<line_sep>optional_result=' -> %s'%type_name<block_end>fun_name='main'<if>main<else>fun_decl.name<line_sep>f.write('fn %s()%s {\n'%(fun_name optional_result))<line_sep>self.indent<augadd>1<for_stmt>statement fun_decl.statements<block_start>self.write_statement(f statement)<block_end>self.indent<augsub>1<line_sep>f.write('}\n')<block_end><def_stmt>write_var_decl self f var_decl<block_start>self.write_indent(f)<line_sep>f.write('let ')<if_stmt>var_decl.mut<block_start>f.write('mut ')<block_end>self.write_lval(f var_decl.name)<line_sep>f.write(': i32')<line_sep>f.write(' = ')<line_sep>self.write_expr(f var_decl.expr)<line_sep>f.write(';\n')<block_end><def_stmt>write_const_expr self f expr needs_parens<block_start>f.write(expr.val+'i32')<block_end><def_stmt>write_assignment self f assignment<block_start>self.write_indent(f)<line_sep>self.write_lval(f assignment.lval)<line_sep>f.write(' = ')<line_sep>self.write_expr(f assignment.expr)<line_sep>f.write(';\n')<block_end><def_stmt>write_return self f statement<block_start>self.write_indent(f)<line_sep>self.write_expr(f statement.expr)<line_sep>f.write('\n')<block_end><def_stmt>write_print self f statement<block_start>self.write_indent(f)<line_sep>f.write('println!("{}", ')<line_sep>self.write_expr(f statement.expr)<line_sep>f.write(')\n')<block_end><block_end><class_stmt>TplLang(Lang)<block_start>ext='tpl'<line_sep>type_names={'int':'int32' }<def_stmt>write_program self f program<block_start><for_stmt>fun_decl program.functions<block_start>self.write_fun_decl(f fun_decl)<line_sep>f.write('\n')<block_end>self.write_fun_decl(f program.main main=<true>)<block_end><def_stmt>write_fun_decl self f fun_decl main=<false><block_start><if_stmt>fun_decl.return_type<is><none><block_start>optional_result=' -> int'<block_end><else_stmt><block_start>type_name=self.type_names[fun_decl.return_type]<line_sep>optional_result=' -> %s'%type_name<block_end>fun_name='main'<if>main<else>fun_decl.name<line_sep>f.write('fun %s()%s {\n'%(fun_name optional_result))<line_sep>self.indent<augadd>1<for_stmt>statement fun_decl.statements<block_start>self.write_statement(f statement)<block_end>self.indent<augsub>1<line_sep>f.write('}\n')<block_end><def_stmt>write_var_decl self f var_decl<block_start>self.write_indent(f)<line_sep>f.write('var ')<line_sep>self.write_lval(f var_decl.name)<line_sep>f.write(': int32')<line_sep>f.write(' = ')<line_sep>self.write_expr(f var_decl.expr)<line_sep>f.write('\n')<block_end><def_stmt>write_assignment self f assignment<block_start>self.write_indent(f)<line_sep>self.write_lval(f assignment.lval)<line_sep>f.write(' = ')<line_sep>self.write_expr(f assignment.expr)<line_sep>f.write('\n')<block_end><def_stmt>write_return self f statement<block_start>self.write_indent(f)<line_sep>f.write('return ')<line_sep>self.write_expr(f statement.expr)<line_sep>f.write('\n')<block_end><def_stmt>write_print self f statement<block_start>self.write_indent(f)<line_sep>f.write('println("{}", ')<line_sep>self.write_expr(f statement.expr)<line_sep>f.write(')\n')<block_end><block_end>#----------------------------------------------------------
seed=sys.argv[1]<line_sep>num_funs=sys.argv[2]<line_sep>random.seed(seed)<line_sep>c=Context()<line_sep>p=c.random_program(num_funs=int(num_funs) max_statements_per_fun=20)<line_sep>langs=[#CppLang(),
#CLang(),
#DLang(),
#GoLang(),
#PascalLang(),
#RustLang(),
TplLang() ]<for_stmt>lang langs<block_start>filename='test_%s_s%s_n%s.%s'%(lang.ext seed num_funs lang.ext)<with_stmt>open(filename 'w')<as>f<block_start>lang.write_program(f p)<block_end><block_end> |
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<import_stmt>time<import_stmt>traceback<import_stmt>platform<import_stmt>random<import_stmt>socket<import_stmt>netifaces<import_from_stmt>calvin.utilities calvinlogger<import_from_stmt>calvin.runtime.south.storage.twistedimpl.securedht.service_discovery ServiceDiscoveryBase<line_sep>#from calvin.runtime.south.storage.twistedimpl.securedht.security_discovery_exchange import Ca
<import_from_stmt>twisted.internet.protocol DatagramProtocol<import_from_stmt>twisted.web.http datetimeToString<import_from_stmt>twisted.internet reactor defer<line_sep>_log=calvinlogger.get_logger(__name__)<line_sep>SSDP_ADDR='172.16.31.10'<line_sep>SSDP_PORT=1900<line_sep>__version_info__=(0 6 7)<line_sep>__version__='.'.join(map(str __version_info__))<line_sep>SERVER_ID=','.join([platform.system() platform.release() 'UPnP/1.0,Calvin UPnP framework' __version__])<line_sep>SERVICE_UUID='1693326a-abb9-11e4-8dfb-9cb654a16426'<line_sep>CA_SERVICE_UUID='58532fde-e793-11e5-965d-7cd1c3da1305'<line_sep>MS_BOOTSTRAP=('M-SEARCH * HTTP/1.1\r\nHOST: %s:%d\r\nMAN: "ssdp:discover"\r\n'+'MX: 2\r\nST: uuid:%s\r\n\r\n')%(SSDP_ADDR SSDP_PORT SERVICE_UUID)<line_sep>MS_CA=('M-SEARCH * HTTP/1.1\r\nHOST: %s:%d\r\nMAN: "ssdp:discover"\r\n'+'MX: 2\r\nST: uuid:%s\r\n\r\n')%(SSDP_ADDR SSDP_PORT CA_SERVICE_UUID)<line_sep>MS={SERVICE_UUID:MS_BOOTSTRAP CA_SERVICE_UUID:MS_CA}<line_sep>MS_BOOTSTRAP_RESP='HTTP/1.1 200 OK\r\n'+'USN: %s::upnp:rootdevice\r\n'%SERVICE_UUID+'SERVER: %s\r\n'+'last-seen: %s\r\n'+'EXT: \r\n'+'SERVICE: %s\r\n'+'LOCATION: %s\r\n'+'CACHE-CONTROL: max-age=1800\r\n'+'ST: uuid:%s\r\n'%SERVICE_UUID+'DATE: %s\r\n'<line_sep>MS_CA_RESP='HTTP/1.1 200 OK\r\n'+'USN: %s::upnp:rootdevice\r\n'%CA_SERVICE_UUID+'SERVER: %s\r\n'+'last-seen: %s\r\n'+'EXT: \r\n'+'LOCATION: %s\r\n'+'CACHE-CONTROL: max-age=1800\r\n'+'ST: uuid:%s\r\n'%CA_SERVICE_UUID+'DATE: %s\r\n'<line_sep>MS_RESP={SERVICE_UUID:MS_BOOTSTRAP_RESP CA_SERVICE_UUID:MS_CA_RESP}<def_stmt>parse_http_response data<block_start>""" don't try to get the body, there are reponses without """<line_sep>header=data.split('\r\n\r\n')[0]<line_sep>lines=header.split('\r\n')<line_sep>cmd=lines[0].split(' ')<line_sep>lines=map(<lambda>x:x.replace(': ' ':' 1) lines[1:])<line_sep>lines=filter(<lambda>x:len(x)<g>0 lines)<line_sep>headers=[x.split(':' 1)<for>x lines]<line_sep>headers=dict(map(<lambda>x:(x[0].lower() x[1]) headers))<line_sep><return>cmd headers<block_end><class_stmt>ServerBase(DatagramProtocol)<block_start><def_stmt>__init__ self node_id control_uri ips d=<none><block_start>_log.info("Serverbase::_init_: \n\tnode_id={}\n\tcontrol_uri={}\n\tips={}\n\tdserver={}".format(node_id control_uri ips d))<line_sep>self._services={}<line_sep>self._dstarted=d<line_sep>self.ignore_list=[]<line_sep>self.ips=ips<line_sep>self._msearches_resp={sid:{}<for>sid MS.keys()}<line_sep>self._node_id=node_id<line_sep>self._control_uri=control_uri<block_end><def_stmt>startProtocol self<block_start><if_stmt>self._dstarted<block_start>reactor.callLater(0 self._dstarted.callback <true>)<block_end><block_end><def_stmt>datagramReceived self datagram address# Broadcast
<block_start><try_stmt><block_start>cmd,headers=parse_http_response(datagram)<line_sep>_log.debug("ServerBase::Received %s, %s from %r"%(cmd headers address ))<if_stmt>cmd[0]<eq>'M-SEARCH'<and>cmd[1]<eq>'*'<block_start>_log.debug("Ignore list %s ignore %s"%(self.ignore_list address<not><in>self.ignore_list))<line_sep># Only reply to our requests
<if_stmt>SERVICE_UUID<in>headers['st']<and>address<not><in>self.ignore_list<block_start><for_stmt>k,addrs self._services.items()<block_start><for_stmt>addr addrs# Only tell local about local
<block_start><if_stmt>addr[0]<eq>"127.0.0.1"<and>address[0]<ne>"127.0.0.1"<block_start><continue><block_end>response=MS_RESP[SERVICE_UUID]%('%s:%d'%addr str(time.time()) k self._control_uri+"/node/"+self._node_id datetimeToString())<if_stmt>"cert"<in>self._msearches_resp[SERVICE_UUID].keys()<block_start>response<augadd>"CERTIFICATE: {}\r\n\r\n".format(self._msearches_resp[SERVICE_UUID]["cert"])<block_end>_log.debug("ServerBase::Sending response: %s"%repr(response))<line_sep>delay=random.randint(0 min(5 int(headers['mx'])))<line_sep>reactor.callLater(delay self.send_it response address)<block_end><block_end><block_end><elif_stmt>CA_SERVICE_UUID<in>headers['st']<and>address<not><in>self.ignore_list<and>self._msearches_resp[CA_SERVICE_UUID]["sign"]<block_start><for_stmt>k,addrs self._services.items()<block_start><for_stmt>addr addrs# Only tell local about local
<block_start><if_stmt>addr[0]<eq>"127.0.0.1"<and>address[0]<ne>"127.0.0.1"<block_start><continue><block_end><try_stmt><block_start>response=MS_RESP[CA_SERVICE_UUID]%(str(addr) str(time.time()) self._control_uri+"/node/"+self._node_id datetimeToString())<block_end><except_stmt>Exception<as>err<block_start>_log.error("Failed to create response, err={}".format(err))<line_sep><raise><block_end>_log.debug("ServerBase::Sending response: %s"%repr(response))<line_sep>delay=random.randint(0 min(5 int(headers['mx'])))<line_sep>reactor.callLater(delay self.send_it response address)<block_end><block_end><block_end><block_end><block_end><except_stmt>Exception<as>err<block_start>_log.exception("SSDP search received, but failed handling, err={}".format(err))<block_end><block_end><def_stmt>update_params self service_uuid **kwargs<block_start>self._msearches_resp[service_uuid].update(kwargs)<block_end><def_stmt>add_service self service ip port# Service on all interfaces
<block_start><if_stmt>ip<in>["0.0.0.0" ""]<block_start>self._services[service]=[]<for_stmt>a self.ips<block_start>_log.debug("Add service %s, %s:%s"%(service a port))<line_sep>self._services[service].append((a port))<block_end><block_end><else_stmt><block_start>_log.debug("Add service %s, %s:%s"%(service ip port))<line_sep>self._services[service]=[(ip port)]<block_end><block_end><def_stmt>remove_service self service<block_start><if_stmt>service<in>self._services<block_start><del_stmt>self._services[service]<block_end><block_end><def_stmt>set_ignore_list self list_<block_start>self.ignore_list=list_<block_end><def_stmt>send_it self response destination<block_start><try_stmt><block_start><if_stmt>self.transport<block_start>self.transport.write(response destination)<block_end><else_stmt><block_start>_log.debug("No transport yet!")<block_end><block_end><except_stmt>(AttributeError socket.error) msg<block_start>_log.exception("Error in send %s"%repr(msg))<block_end><block_end><def_stmt>stop self<block_start><pass><block_end><block_end><class_stmt>ClientBase(DatagramProtocol)<block_start><def_stmt>__init__ self dclient=<none><block_start>self._dstarted=dclient<line_sep>self._service=<none><line_sep>self._msearches={sid:{'cb':<none> 'stopped':<false> 'stop':<false>}<for>sid MS.keys()}<block_end><def_stmt>startProtocol self<block_start><if_stmt>self._dstarted<block_start>reactor.callLater(0 self._dstarted.callback <true>)<block_end><block_end><def_stmt>datagramReceived self datagram address# Broadcast
<block_start>cmd,headers=parse_http_response(datagram)<line_sep>_log.debug("ClientBase::Received %s, %s from %r"%(cmd headers address ))<if_stmt>cmd[0].startswith('HTTP/1.')<and>cmd[1]<eq>'200'<block_start><if_stmt>SERVICE_UUID<in>headers['st']<block_start>c_address=headers['server'].split(':')<line_sep>c_address[1]=int(c_address[1])<try_stmt><block_start>cert=headers['certificate'].split(':')<line_sep>c_address.extend(cert)<block_end><except_stmt>KeyError<block_start><pass><block_end># Filter on service calvin networks
<if_stmt>self._service<is><none><or>self._service<eq>headers['service']<block_start>_log.debug("ClientBase::Received service %s from %s"%(headers['service'] c_address ))<if_stmt>c_address<block_start><if_stmt>self._msearches[SERVICE_UUID]['cb']<block_start>self._msearches[SERVICE_UUID]['cb']([tuple(c_address)])<block_end><if_stmt>self._msearches[SERVICE_UUID]['stop']<block_start>self.stop(SERVICE_UUID)<block_end><block_end><block_end><block_end><elif_stmt>CA_SERVICE_UUID<in>headers['st']<block_start>_log.error("Deprecated")<block_end><block_end><block_end># c_address = headers['server'].split(':')
# c_address[1] = int(c_address[1])
# try:
# cert = headers['certificate']
# c_address.append(cert)
# except KeyError:
# pass
# # FIXME do we need service filtering for signed certificates
# if c_address and not self.is_stopped(CA_SERVICE_UUID):
# _log.debug("Signed Cert %s" % c_address)
# _log.debug("CA search data: %s" % self._msearches[CA_SERVICE_UUID])
# if self._msearches[CA_SERVICE_UUID]['cb']:
# self._msearches[CA_SERVICE_UUID]['cb'](tuple(c_address))
# if self._msearches[CA_SERVICE_UUID]['stop']:
# self.stop(CA_SERVICE_UUID)
<def_stmt>set_callback self service_uuid callback<block_start>self._msearches[service_uuid]['cb']=callback<block_end><def_stmt>set_service self service<block_start>self._service=service<block_end><def_stmt>is_stopped self service_uuid<block_start><return>self._msearches[service_uuid]['stopped']<block_end><def_stmt>set_autostop self service_uuid stop=<true><block_start>self._msearches[service_uuid]['stop']=stop<block_end><def_stmt>stop self service_uuid<block_start>self._msearches[service_uuid]['stopped']=<true><block_end><block_end><class_stmt>SSDPServiceDiscovery(ServiceDiscoveryBase)<block_start><def_stmt>__init__ self node_id control_uri iface='' ignore_self=<true><block_start>super(SSDPServiceDiscovery self).__init__()<line_sep>self.ignore_self=ignore_self<line_sep>self.iface=''#iface
self.ssdp=<none><line_sep>self.port=<none><line_sep>self.searches={}<line_sep>self.iface_send_list=[]<line_sep>self._node_id=node_id<line_sep>self._control_uri=control_uri<if_stmt>self.iface<in>["0.0.0.0" ""]<block_start><for_stmt>a netifaces.interfaces()<block_start>addrs=netifaces.ifaddresses(a)<line_sep># Ipv4 for now
<if_stmt>netifaces.AF_INET<in>addrs<block_start><for_stmt>a addrs[netifaces.AF_INET]<block_start>self.iface_send_list.append(a['addr'])<block_end><block_end><block_end><block_end><else_stmt><block_start>self.iface_send_list.append(iface)<block_end><block_end><def_stmt>start self<block_start>dserver=defer.Deferred()<line_sep>dclient=defer.Deferred()<try_stmt><block_start>self.ssdp=reactor.listenMulticast(SSDP_PORT ServerBase(self._node_id self._control_uri self.iface_send_list d=dserver) listenMultiple=<true>)<line_sep>self.ssdp.setTTL(5)<for_stmt>iface_ self.iface_send_list<block_start>d=self.ssdp.joinGroup(SSDP_ADDR interface=iface_)<line_sep>d.addErrback(<lambda>x:_log.error("Failed to join multicast group %s:%s, %s" iface_ SSDP_PORT x))<line_sep>d.addCallback(<lambda>x:_log.debug("Joined multicast group %s:%s, %s" iface_ SSDP_PORT x))<block_end><block_end><except_stmt><block_start>_log.exception("Multicast listen join failed!!")<line_sep># Dont start server some one is alerady running locally
<block_end># TODO: Do we need this ?
self.port=reactor.listenMulticast(0 ClientBase(dclient=dclient) interface=self.iface)<line_sep>_log.debug("SSDP Host: %s"%repr(self.port.getHost()))<line_sep># Set ignore port and ips
<if_stmt>self.ssdp<and>self.ignore_self<block_start>self.ssdp.protocol.set_ignore_list([(x self.port.getHost().port)<for>x self.iface_send_list])<block_end><return>dserver dclient<block_end><def_stmt>update_server_params self service_uuid **kwargs<block_start>self.ssdp.protocol.update_params(service_uuid **kwargs)<block_end><def_stmt>start_search self service_uuid **kwargs<block_start>callback=kwargs.pop('callback' <none>)<line_sep>stop=kwargs.pop('stop' <false>)<line_sep># Restart backoff
self.searches.setdefault(service_uuid {})["backoff"]=.2<def_stmt>local_start_msearch <block_start>self.port.protocol.set_callback(service_uuid callback)<line_sep>self.port.protocol.set_autostop(service_uuid stop)<line_sep>self._send_msearch(service_uuid once=<false> kwargs=kwargs)<block_end>reactor.callLater(0 local_start_msearch)<block_end><def_stmt>stop_all_search self<block_start><for_stmt>service_uuid MS.keys()<block_start>self.port.protocol.set_callback(service_uuid <none>)<line_sep>self.port.protocol.stop(service_uuid)<block_end><block_end><def_stmt>stop_search self service_uuid<block_start>_log.debug("Stop search of %s"%service_uuid)<line_sep>self.port.protocol.set_callback(service_uuid <none>)<line_sep>self.port.protocol.stop(service_uuid)<block_end><def_stmt>set_client_filter self service<block_start>self.port.protocol.set_service(service)<block_end><def_stmt>register_service self service ip port<block_start>self.ssdp.protocol.add_service(service ip port)<block_end><def_stmt>unregister_service self service<block_start>self.ssdp.protocol.remove_service(service)<block_end><def_stmt>_send_msearch self service_uuid once=<true> kwargs=<none><block_start><if_stmt>kwargs<is><none><block_start>kwargs={}<block_end><if_stmt>self.port<and><not>self.port.protocol.is_stopped(service_uuid)<block_start><for_stmt>src_ip self.iface_send_list<block_start>self.port.protocol.transport.setOutgoingInterface(src_ip)<line_sep>_log.debug("Sending M-SEARCH... on %s\n%s"%(src_ip MS[service_uuid].format(**kwargs)))<line_sep>self.port.write(MS[service_uuid].format(**kwargs) (SSDP_ADDR SSDP_PORT))<block_end><if_stmt><not>once<and><not>self.port.protocol.is_stopped(service_uuid)<block_start>reactor.callLater(self.searches[service_uuid]["backoff"] self._send_msearch service_uuid once=<false> kwargs=kwargs)<line_sep>_log.debug("Next M-SEARCH in %s seconds"%self.searches[service_uuid]["backoff"])<line_sep>self.searches[service_uuid]["backoff"]=min(600 self.searches[service_uuid]["backoff"]<times>1.5)<block_end><block_end><block_end><def_stmt>search self service_uuid callback **kwargs<block_start>self.port.protocol.set_callback(service_uuid callback)<line_sep>self._send_msearch(service_uuid once=<true> kwargs=kwargs)<block_end><def_stmt>stop self<block_start>dlist=[]<if_stmt>self.ssdp<block_start>dlist.append(self.ssdp.leaveGroup(SSDP_ADDR interface=self.iface))<line_sep>dlist.append(self.ssdp.stopListening())<line_sep>self.ssdp=<none><block_end><if_stmt>self.port<block_start>self.stop_all_search()<line_sep>dlist.append(self.port.stopListening())<line_sep>self.port=<none><block_end><return>defer.DeferredList(dlist)<block_end><block_end> |
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Test that the fuzzer works the way ClusterFuzz invokes it."""<import_stmt>glob<import_stmt>os<import_stmt>shutil<import_stmt>sys<import_stmt>tempfile<import_stmt>unittest<import_stmt>setup<class_stmt>WebBluetoothFuzzerTest(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>self._output_dir=tempfile.mkdtemp()<line_sep>self._resources_path=setup.RetrieveResources()<block_end><def_stmt>tearDown self<block_start>shutil.rmtree(self._output_dir)<line_sep>shutil.rmtree(self._resources_path)<block_end><def_stmt>testCanGenerate100Files self<block_start>sys.argv=['fuzz_main_run.py' '--no_of_files=100' '--input_dir={}'.format(self._output_dir) '--output_dir={}'.format(self._output_dir)]<import_stmt>fuzz_main_run<line_sep>fuzz_main_run.main()<line_sep>written_files=glob.glob(os.path.join(self._output_dir '*.html'))<line_sep>self.assertEquals(100 len(written_files) 'Should have written 100 '<concat>'test files.')<for_stmt>test_case written_files<block_start>self.assertFalse('TRANSFORM'<in>open(test_case).read())<block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end> |
# terrascript/data/cloudsmith-io/cloudsmith.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:14:19 UTC)
<import_stmt>terrascript<class_stmt>cloudsmith_namespace(terrascript.Data)<block_start><pass><block_end><class_stmt>cloudsmith_package_list(terrascript.Data)<block_start><pass><block_end><class_stmt>cloudsmith_repository(terrascript.Data)<block_start><pass><block_end>__all__=["cloudsmith_namespace" "cloudsmith_package_list" "cloudsmith_repository" ]<line_sep> |
# Copyright (c) 2017 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
<import_stmt>importlib<import_stmt>unittest<try_stmt><block_start><import_stmt>unittest.mock<as>mock<block_end><except_stmt>ImportError<block_start><import_stmt>mock<block_end><import_from_stmt>cloudbaseinit exception<import_from_stmt>cloudbaseinit.models network<as>network_model<line_sep>MODPATH="cloudbaseinit.utils.windows.netlbfo"<class_stmt>NetLBFOTest(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>self._wmi_mock=mock.MagicMock()<line_sep>self._mi_mock=mock.MagicMock()<line_sep>self._module_patcher=mock.patch.dict('sys.modules' {'wmi':self._wmi_mock 'mi':self._mi_mock})<line_sep>self._module_patcher.start()<line_sep>self._netlbfo=importlib.import_module(MODPATH)<block_end><def_stmt>tearDown self<block_start>self._module_patcher.stop()<block_end>@mock.patch('time.sleep')@mock.patch(MODPATH+'.NetLBFOTeamManager._get_primary_adapter_name')@mock.patch(MODPATH+'.NetLBFOTeamManager._create_team')@mock.patch(MODPATH+'.NetLBFOTeamManager._add_team_member')@mock.patch(MODPATH+'.NetLBFOTeamManager._set_primary_nic_vlan_id')@mock.patch(MODPATH+'.NetLBFOTeamManager._wait_for_nic')@mock.patch(MODPATH+'.NetLBFOTeamManager.delete_team')<def_stmt>_test_create_team self mock_delete_team mock_wait_for_nic mock_set_primary_nic_vlan_id mock_add_team_member mock_create_team mock_primary_adapter_name mock_time_sleep mode_not_found=<false> lb_algo_not_found=<false> add_team_member_fail=<false><block_start>mock_primary_adapter_name.return_value=mock.sentinel.pri_nic_name<line_sep>mock_create_team.return_value=<none><line_sep>lacp_timer=network_model.BOND_LACP_RATE_FAST<line_sep>members=[mock.sentinel.pri_nic_name mock.sentinel.other_member]<line_sep>conn=self._wmi_mock.WMI.return_value<line_sep>mock_team=mock.Mock()<line_sep>conn.MSFT_NetLbfoTeam.new.return_value=mock_team<line_sep>mock_team_nic=mock.Mock()<line_sep>mock_team_nic.Name=mock.Mock()<line_sep>conn.MSFT_NetLbfoTeamNic.return_value=[mock_team_nic]<if_stmt>mode_not_found<block_start>mode="fake mode"<block_end><else_stmt><block_start>mode=network_model.BOND_TYPE_8023AD<block_end><if_stmt>lb_algo_not_found<block_start>lb_algo="fake lb algo"<block_end><else_stmt><block_start>lb_algo=network_model.BOND_LB_ALGO_L2<block_end><if_stmt>add_team_member_fail<block_start>ex=exception.CloudbaseInitException<line_sep>mock_add_team_member.side_effect=ex<block_end><if_stmt>mode_not_found<or>lb_algo_not_found<block_start>self.assertRaises(exception.ItemNotFoundException self._netlbfo.NetLBFOTeamManager().create_team mock.sentinel.team_name mode lb_algo members mock.sentinel.mac mock.sentinel.pri_nic_name mock.sentinel.vlan_id lacp_timer)<line_sep><return><block_end><elif_stmt>add_team_member_fail<block_start>self.assertRaises(exception.CloudbaseInitException self._netlbfo.NetLBFOTeamManager().create_team mock.sentinel.team_name mode lb_algo members mock.sentinel.mac mock.sentinel.pri_nic_name mock.sentinel.vlan_id lacp_timer)<block_end><else_stmt><block_start>self._netlbfo.NetLBFOTeamManager().create_team(mock.sentinel.team_name mode lb_algo members mock.sentinel.mac mock.sentinel.pri_nic_name mock.sentinel.vlan_id lacp_timer)<block_end><if_stmt><not>add_team_member_fail<block_start>mock_set_primary_nic_vlan_id.assert_called_once_with(conn mock.sentinel.team_name mock.sentinel.vlan_id)<line_sep>mock_create_team.assert_called_once_with(conn mock.sentinel.team_name mock.sentinel.pri_nic_name 2 3 mock.sentinel.pri_nic_name 1)<line_sep>mock_wait_for_nic.assert_called_once_with(mock_team_nic.Name)<line_sep>mock_add_team_member.assert_called_once_with(conn mock.sentinel.team_name mock.sentinel.other_member)<block_end><else_stmt><block_start>mock_add_team_member.assert_called_with(conn mock.sentinel.team_name mock.sentinel.other_member)<line_sep>mock_delete_team.assert_called_with(mock.sentinel.team_name)<line_sep>self.assertEqual(mock_add_team_member.call_count 6)<line_sep>self.assertEqual(mock_delete_team.call_count 6)<block_end><block_end><def_stmt>test_create_team self<block_start>self._test_create_team()<block_end><def_stmt>test_create_team_mode_not_found self<block_start>self._test_create_team(mode_not_found=<true>)<block_end><def_stmt>test_create_team_mode_lb_algo_not_found self<block_start>self._test_create_team(lb_algo_not_found=<true>)<block_end><def_stmt>test_create_team_add_team_member_fail self<block_start>self._test_create_team(add_team_member_fail=<true>)<block_end><def_stmt>test_delete_team self<block_start>conn=self._wmi_mock.WMI.return_value<line_sep>mock_team=mock.Mock()<line_sep>conn.MSFT_NetLbfoTeam.return_value=[mock_team]<line_sep>self._netlbfo.NetLBFOTeamManager().delete_team(mock.sentinel.team_name)<line_sep>conn.MSFT_NetLbfoTeam.assert_called_once_with(name=mock.sentinel.team_name)<line_sep>mock_team.Delete_.assert_called_once_with()<block_end><def_stmt>test_create_team_private self<block_start>conn=self._wmi_mock.WMI.return_value<line_sep>mock_team=mock.Mock()<line_sep>conn.MSFT_NetLbfoTeam.new.return_value=mock_team<line_sep>teaming_mode=1<line_sep>lb_algo=2<line_sep>lacp_timer=1<line_sep>custom_options=[{u'name':u'TeamMembers' u'value_type':self._mi_mock.MI_ARRAY|self._mi_mock.MI_STRING u'value':[mock.sentinel.private_nic_team]} {u'name':u'TeamNicName' u'value_type':self._mi_mock.MI_STRING u'value':mock.sentinel.team_nic_name}]<line_sep>operation_options={u'custom_options':custom_options}<line_sep>self._netlbfo.NetLBFOTeamManager()._create_team(conn mock.sentinel.team_name mock.sentinel.team_nic_name teaming_mode lb_algo mock.sentinel.private_nic_team lacp_timer)<line_sep>self.assertEqual(mock.sentinel.team_name mock_team.Name)<line_sep>self.assertEqual(teaming_mode mock_team.TeamingMode)<line_sep>self.assertEqual(lb_algo mock_team.LoadBalancingAlgorithm)<line_sep>self.assertEqual(lacp_timer mock_team.LacpTimer)<line_sep>mock_team.put.assert_called_once_with(operation_options=operation_options)<block_end>@mock.patch(MODPATH+'.NetLBFOTeamManager._wait_for_nic')<def_stmt>test_add_team_nic self mock_wait_for_nic<block_start>conn=self._wmi_mock.WMI.return_value<line_sep>mock_team_nic=mock.Mock()<line_sep>conn.MSFT_NetLbfoTeamNIC.new.return_value=mock_team_nic<line_sep>self._netlbfo.NetLBFOTeamManager().add_team_nic(mock.sentinel.team_name mock.sentinel.nic_name mock.sentinel.vlan_id)<line_sep>self.assertEqual(mock.sentinel.team_name mock_team_nic.Team)<line_sep>self.assertEqual(mock.sentinel.nic_name mock_team_nic.Name)<line_sep>self.assertEqual(mock.sentinel.vlan_id mock_team_nic.VlanID)<line_sep>mock_team_nic.put.assert_called_once_with()<line_sep>mock_wait_for_nic.assert_called_once_with(mock_team_nic.Name)<block_end>@mock.patch('cloudbaseinit.osutils.factory.get_os_utils')<def_stmt>test_is_available self mock_get_os_utils<block_start>os_utils=mock_get_os_utils.return_value<line_sep>os_utils.check_os_version.return_value=<true><line_sep>os_utils.is_client_os.return_value=<false><with_stmt>mock.patch('sys.platform' 'win32')<block_start>self.assertEqual(<true> self._netlbfo.NetLBFOTeamManager.is_available())<block_end><block_end>@mock.patch('time.sleep')<def_stmt>test_wait_for_nic self mock_sleep<block_start>conn=self._wmi_mock.WMI.return_value<line_sep>conn.Win32_NetworkAdapter.side_effect=[[] [mock.sentinel.net_adapter]]<line_sep>self._netlbfo.NetLBFOTeamManager()._wait_for_nic(mock.sentinel.nic_name)<line_sep>conn.Win32_NetworkAdapter.assert_has_calls([mock.call(NetConnectionID=mock.sentinel.nic_name) mock.call(NetConnectionID=mock.sentinel.nic_name)])<line_sep>mock_sleep.assert_called_once_with(1)<block_end><def_stmt>test_set_primary_nic_vlan_id self<block_start>conn=mock.Mock()<line_sep>mock_team_nic=mock.Mock()<line_sep>conn.MSFT_NetLbfoTeamNIC.return_value=[mock_team_nic]<line_sep>self._netlbfo.NetLBFOTeamManager()._set_primary_nic_vlan_id(conn mock.sentinel.team_name mock.sentinel.vlan_id)<line_sep>custom_options=[{u'name':u'VlanID' u'value_type':self._mi_mock.MI_UINT32 u'value':mock.sentinel.vlan_id}]<line_sep>operation_options={u'custom_options':custom_options}<line_sep>mock_team_nic.put.assert_called_once_with(operation_options=operation_options)<block_end><def_stmt>test_add_team_member self<block_start>conn=mock.Mock()<line_sep>mock_team_member=mock.Mock()<line_sep>conn.MSFT_NetLbfoTeamMember.new.return_value=mock_team_member<line_sep>self._netlbfo.NetLBFOTeamManager()._add_team_member(conn mock.sentinel.team_name mock.sentinel.team_member)<line_sep>custom_options=[{u'name':u'Name' u'value_type':self._mi_mock.MI_STRING u'value':mock.sentinel.team_member}]<line_sep>operation_options={u'custom_options':custom_options}<line_sep>mock_team_member.put.assert_called_once_with(operation_options=operation_options)<line_sep>self.assertEqual(mock.sentinel.team_name mock_team_member.Team)<block_end><def_stmt>_test_get_primary_adapter_name self mac_not_found=<false> member_not_found=<false><block_start>mock_members=[mock.sentinel.team_member]<line_sep>conn=self._wmi_mock.WMI.return_value<if_stmt>mac_not_found<block_start>conn.Win32_NetworkAdapter.return_value=[]<block_end><else_stmt><block_start>conn.Win32_NetworkAdapter.return_value=[mock.sentinel.net_adapter]<block_end><if_stmt>member_not_found<block_start>net_conn_id=mock.sentinel.something_else<block_end><else_stmt><block_start>net_conn_id=mock.sentinel.team_member<block_end>mock.sentinel.net_adapter.NetConnectionID=net_conn_id<if_stmt>mac_not_found<or>member_not_found<block_start>self.assertRaises(exception.ItemNotFoundException self._netlbfo.NetLBFOTeamManager()._get_primary_adapter_name mock_members mock.sentinel.mac)<block_end><else_stmt><block_start>self.assertEqual(mock.sentinel.team_member self._netlbfo.NetLBFOTeamManager()._get_primary_adapter_name(mock_members mock.sentinel.mac))<block_end>conn.Win32_NetworkAdapter.assert_called_once_with(MACAddress=mock.sentinel.mac)<block_end><def_stmt>test_get_primary_adapter_name self<block_start>self._test_get_primary_adapter_name()<block_end><def_stmt>test_get_primary_adapter_name_mac_not_found self<block_start>self._test_get_primary_adapter_name(mac_not_found=<true>)<block_end><def_stmt>test_get_primary_adapter_name_member_not_found self<block_start>self._test_get_primary_adapter_name(member_not_found=<true>)<block_end><block_end> |
"""tests for pudl/output/epacems.py loading functions."""<import_from_stmt>pathlib Path<import_stmt>dask.dataframe<as>dd<import_stmt>pytest<import_from_stmt>pudl.output.epacems epacems<line_sep>@pytest.fixture(scope='module')<def_stmt>epacems_year_and_state etl_params<block_start>"""Find the year and state defined in pudl/package_data/settings/etl_*.yml."""<line_sep># the etl_params data structure alternates dicts and lists so indexing is a pain.
epacems=[item<for>item etl_params['datapkg_bundle_settings'][0]['datasets']<if>'epacems'<in>item.keys()]<line_sep>epacems=epacems[0]['epacems']<line_sep><return>{'years':epacems['epacems_years'] 'states':epacems['epacems_states']}<block_end>@pytest.fixture(scope='session')<def_stmt>epacems_parquet_path pudl_settings_fixture pudl_engine # implicit dependency; ensures .parquet files exist
<block_start>"""Get path to the directory of EPA CEMS .parquet data."""<line_sep>out_dir=Path(pudl_settings_fixture['parquet_dir'] 'epacems')<line_sep><return>out_dir<block_end><def_stmt>test_epacems_subset epacems_year_and_state epacems_parquet_path<block_start>"""Minimal integration test of epacems(). Check if it returns a DataFrame."""<line_sep>path=epacems_parquet_path<line_sep>years=epacems_year_and_state['years']<line_sep># Use only Idaho if multiple states are given
states=epacems_year_and_state['states']<if>len(epacems_year_and_state['states'])<eq>1<else>['ID']<line_sep>actual=epacems(columns=["gross_load_mw"] epacems_path=path years=years states=states)<assert_stmt>isinstance(actual dd.DataFrame)<assert_stmt>actual.shape[0].compute()<g>0<block_end># n rows
<def_stmt>test_epacems_subset_input_validation epacems_year_and_state epacems_parquet_path<block_start>"""Check if invalid inputs raise exceptions."""<line_sep>path=epacems_parquet_path<line_sep>valid_year=epacems_year_and_state['years'][-1]<line_sep>valid_state=epacems_year_and_state['states'][-1]<line_sep>valid_column="gross_load_mw"<line_sep>invalid_state='confederacy'<line_sep>invalid_year=1775<line_sep>invalid_column='clean_coal'<line_sep>combos=[dict(years=[valid_year] states=[valid_state] columns=[invalid_column] ) dict(years=[valid_year] states=[invalid_state] columns=[valid_column] ) dict(years=[invalid_year] states=[valid_state] columns=[valid_column] ) ]<for_stmt>combo combos<block_start><with_stmt>pytest.raises(ValueError)<block_start>epacems(epacems_path=path **combo)<block_end><block_end><block_end> |
<import_from_stmt>gazette.spiders.base.fecam FecamGazetteSpider<class_stmt>ScCuritibanosSpider(FecamGazetteSpider)<block_start>name="sc_curitibanos"<line_sep>FECAM_QUERY="cod_entidade:82"<line_sep>TERRITORY_ID="4204806"<block_end> |
<def_stmt>b <block_start><pass><block_end> |
<import_stmt>math<import_stmt>sys<import_stmt>pygame<import_from_stmt>pygame.constants K_w K_s<import_from_stmt>ple.games.utils.vec2d vec2d<import_from_stmt>ple.games.utils percent_round_int<line_sep>#import base
<import_from_stmt>ple.games.base.pygamewrapper PyGameWrapper<class_stmt>Ball(pygame.sprite.Sprite)<block_start><def_stmt>__init__ self radius speed rng pos_init SCREEN_WIDTH SCREEN_HEIGHT<block_start>pygame.sprite.Sprite.__init__(self)<line_sep>self.rng=rng<line_sep>self.radius=radius<line_sep>self.speed=speed<line_sep>self.pos=vec2d(pos_init)<line_sep>self.pos_before=vec2d(pos_init)<line_sep>self.vel=vec2d((speed -1.0<times>speed))<line_sep>self.SCREEN_HEIGHT=SCREEN_HEIGHT<line_sep>self.SCREEN_WIDTH=SCREEN_WIDTH<line_sep>image=pygame.Surface((radius<times>2 radius<times>2))<line_sep>image.fill((0 0 0 0))<line_sep>image.set_colorkey((0 0 0))<line_sep>pygame.draw.circle(image (255 255 255) (radius radius) radius 0)<line_sep>self.image=image<line_sep>self.rect=self.image.get_rect()<line_sep>self.rect.center=pos_init<block_end><def_stmt>line_intersection self p0_x p0_y p1_x p1_y p2_x p2_y p3_x p3_y<block_start>s1_x=p1_x-p0_x<line_sep>s1_y=p1_y-p0_y<line_sep>s2_x=p3_x-p2_x<line_sep>s2_y=p3_y-p2_y<line_sep>s=(-s1_y<times>(p0_x-p2_x)+s1_x<times>(p0_y-p2_y))/(-s2_x<times>s1_y+s1_x<times>s2_y)<line_sep>t=(s2_x<times>(p0_y-p2_y)-s2_y<times>(p0_x-p2_x))/(-s2_x<times>s1_y+s1_x<times>s2_y)<line_sep><return>(s<ge>0<and>s<le>1<and>t<ge>0<and>t<le>1)<block_end><def_stmt>update self agentPlayer cpuPlayer dt<block_start>self.pos.x<augadd>self.vel.x<times>dt<line_sep>self.pos.y<augadd>self.vel.y<times>dt<line_sep>is_pad_hit=<false><if_stmt>self.pos.x<le>agentPlayer.pos.x+agentPlayer.rect_width<block_start><if_stmt>self.line_intersection(self.pos_before.x self.pos_before.y self.pos.x self.pos.y agentPlayer.pos.x+agentPlayer.rect_width/2 agentPlayer.pos.y-agentPlayer.rect_height/2 agentPlayer.pos.x+agentPlayer.rect_width/2 agentPlayer.pos.y+agentPlayer.rect_height/2)<block_start>self.pos.x=max(0 self.pos.x)<line_sep>self.vel.x=-1<times>(self.vel.x+self.speed<times>0.05)<line_sep>self.vel.y<augadd>agentPlayer.vel.y<times>2.0<line_sep>self.pos.x<augadd>self.radius<line_sep>is_pad_hit=<true><block_end><block_end><if_stmt>self.pos.x<ge>cpuPlayer.pos.x-cpuPlayer.rect_width<block_start><if_stmt>self.line_intersection(self.pos_before.x self.pos_before.y self.pos.x self.pos.y cpuPlayer.pos.x-cpuPlayer.rect_width/2 cpuPlayer.pos.y-cpuPlayer.rect_height/2 cpuPlayer.pos.x-cpuPlayer.rect_width/2 cpuPlayer.pos.y+cpuPlayer.rect_height/2)<block_start>self.pos.x=min(self.SCREEN_WIDTH self.pos.x)<line_sep>self.vel.x=-1<times>(self.vel.x+self.speed<times>0.05)<line_sep>self.vel.y<augadd>cpuPlayer.vel.y<times>0.006<line_sep>self.pos.x<augsub>self.radius<line_sep>is_pad_hit=<true><block_end><block_end># Little randomness in order not to stuck in a static loop
<if_stmt>is_pad_hit<block_start>self.vel.y<augadd>self.rng.random_sample()<times>0.001-0.0005<block_end><if_stmt>self.pos.y-self.radius<le>0<block_start>self.vel.y<augmul>-0.99<line_sep>self.pos.y<augadd>1.0<block_end><if_stmt>self.pos.y+self.radius<ge>self.SCREEN_HEIGHT<block_start>self.vel.y<augmul>-0.99<line_sep>self.pos.y<augsub>1.0<block_end>self.pos_before.x=self.pos.x<line_sep>self.pos_before.y=self.pos.y<line_sep>self.rect.center=(self.pos.x self.pos.y)<block_end><block_end><class_stmt>Player(pygame.sprite.Sprite)<block_start><def_stmt>__init__ self speed rect_width rect_height pos_init SCREEN_WIDTH SCREEN_HEIGHT<block_start>pygame.sprite.Sprite.__init__(self)<line_sep>self.speed=speed<line_sep>self.pos=vec2d(pos_init)<line_sep>self.vel=vec2d((0 0))<line_sep>self.rect_height=rect_height<line_sep>self.rect_width=rect_width<line_sep>self.SCREEN_HEIGHT=SCREEN_HEIGHT<line_sep>self.SCREEN_WIDTH=SCREEN_WIDTH<line_sep>image=pygame.Surface((rect_width rect_height))<line_sep>image.fill((0 0 0 0))<line_sep>image.set_colorkey((0 0 0))<line_sep>pygame.draw.rect(image (255 255 255) (0 0 rect_width rect_height) 0)<line_sep>self.image=image<line_sep>self.rect=self.image.get_rect()<line_sep>self.rect.center=pos_init<block_end><def_stmt>update self dy dt<block_start>self.vel.y<augadd>dy<times>dt<line_sep>self.vel.y<augmul>0.9<line_sep>self.pos.y<augadd>self.vel.y<if_stmt>self.pos.y-self.rect_height/2<le>0<block_start>self.pos.y=self.rect_height/2<line_sep>self.vel.y=0.0<block_end><if_stmt>self.pos.y+self.rect_height/2<ge>self.SCREEN_HEIGHT<block_start>self.pos.y=self.SCREEN_HEIGHT-self.rect_height/2<line_sep>self.vel.y=0.0<block_end>self.rect.center=(self.pos.x self.pos.y)<block_end><def_stmt>updateCpu self ball dt<block_start>dy=0.0<if_stmt>ball.vel.x<ge>0<and>ball.pos.x<ge>self.SCREEN_WIDTH/2<block_start>dy=self.speed<if_stmt>self.pos.y<g>ball.pos.y<block_start>dy=-1.0<times>dy<block_end><block_end><else_stmt><block_start>dy=1.0<times>self.speed/4.0<if_stmt>self.pos.y<g>self.SCREEN_HEIGHT/2.0<block_start>dy=-1.0<times>self.speed/4.0<block_end><block_end><if_stmt>self.pos.y-self.rect_height/2<le>0<block_start>self.pos.y=self.rect_height/2<line_sep>self.vel.y=0.0<block_end><if_stmt>self.pos.y+self.rect_height/2<ge>self.SCREEN_HEIGHT<block_start>self.pos.y=self.SCREEN_HEIGHT-self.rect_height/2<line_sep>self.vel.y=0.0<block_end>self.pos.y<augadd>dy<times>dt<line_sep>self.rect.center=(self.pos.x self.pos.y)<block_end><block_end><class_stmt>Pong(PyGameWrapper)<block_start>"""
Loosely based on code from marti1125's `pong game`_.
.. _pong game: https://github.com/marti1125/pong/
Parameters
----------
width : int
Screen width.
height : int
Screen height, recommended to be same dimension as width.
MAX_SCORE : int (default: 11)
The max number of points the agent or cpu need to score to cause a terminal state.
cpu_speed_ratio: float (default: 0.5)
Speed of opponent (useful for curriculum learning)
players_speed_ratio: float (default: 0.25)
Speed of player (useful for curriculum learning)
ball_speed_ratio: float (default: 0.75)
Speed of ball (useful for curriculum learning)
"""<def_stmt>__init__ self width=64 height=48 cpu_speed_ratio=0.6 players_speed_ratio=0.4 ball_speed_ratio=0.75 MAX_SCORE=11<block_start>actions={"up":K_w "down":K_s}<line_sep>PyGameWrapper.__init__(self width height actions=actions)<line_sep># the %'s come from original values, wanted to keep same ratio when you
# increase the resolution.
self.ball_radius=percent_round_int(height 0.03)<line_sep>self.cpu_speed_ratio=cpu_speed_ratio<line_sep>self.ball_speed_ratio=ball_speed_ratio<line_sep>self.players_speed_ratio=players_speed_ratio<line_sep>self.paddle_width=percent_round_int(width 0.023)<line_sep>self.paddle_height=percent_round_int(height 0.15)<line_sep>self.paddle_dist_to_wall=percent_round_int(width 0.0625)<line_sep>self.MAX_SCORE=MAX_SCORE<line_sep>self.dy=0.0<line_sep>self.score_sum=0.0# need to deal with 11 on either side winning
self.score_counts={"agent":0.0 "cpu":0.0}<block_end><def_stmt>_handle_player_events self<block_start>self.dy=0<if_stmt>__name__<eq>"__main__"# for debugging mode
<block_start>pygame.event.get()<line_sep>keys=pygame.key.get_pressed()<if_stmt>keys[self.actions['up']]<block_start>self.dy=-self.agentPlayer.speed<block_end><elif_stmt>keys[self.actions['down']]<block_start>self.dy=self.agentPlayer.speed<block_end><if_stmt>keys[pygame.QUIT]<block_start>pygame.quit()<line_sep>sys.exit()<block_end>pygame.event.pump()<block_end><else_stmt># consume events from act
<block_start><for_stmt>event pygame.event.get()<block_start><if_stmt>event.type<eq>pygame.QUIT<block_start>pygame.quit()<line_sep>sys.exit()<block_end><if_stmt>event.type<eq>pygame.KEYDOWN<block_start>key=event.key<if_stmt>key<eq>self.actions['up']<block_start>self.dy=-self.agentPlayer.speed<block_end><if_stmt>key<eq>self.actions['down']<block_start>self.dy=self.agentPlayer.speed<block_end><block_end><block_end><block_end><block_end><def_stmt>getGameState self<block_start>"""
Gets a non-visual state representation of the game.
Returns
-------
dict
* player y position.
* players velocity.
* cpu y position.
* ball x position.
* ball y position.
* ball x velocity.
* ball y velocity.
See code for structure.
"""<line_sep>state={"player_y":self.agentPlayer.pos.y "player_velocity":self.agentPlayer.vel.y "cpu_y":self.cpuPlayer.pos.y "ball_x":self.ball.pos.x "ball_y":self.ball.pos.y "ball_velocity_x":self.ball.vel.x "ball_velocity_y":self.ball.vel.y}<line_sep><return>state<block_end><def_stmt>getScore self<block_start><return>self.score_sum<block_end><def_stmt>game_over self# pong used 11 as max score
<block_start><return>(self.score_counts['agent']<eq>self.MAX_SCORE)<or>(self.score_counts['cpu']<eq>self.MAX_SCORE)<block_end><def_stmt>init self<block_start>self.score_counts={"agent":0.0 "cpu":0.0}<line_sep>self.score_sum=0.0<line_sep>self.ball=Ball(self.ball_radius self.ball_speed_ratio<times>self.height self.rng (self.width/2 self.height/2) self.width self.height)<line_sep>self.agentPlayer=Player(self.players_speed_ratio<times>self.height self.paddle_width self.paddle_height (self.paddle_dist_to_wall self.height/2) self.width self.height)<line_sep>self.cpuPlayer=Player(self.cpu_speed_ratio<times>self.height self.paddle_width self.paddle_height (self.width-self.paddle_dist_to_wall self.height/2) self.width self.height)<line_sep>self.players_group=pygame.sprite.Group()<line_sep>self.players_group.add(self.agentPlayer)<line_sep>self.players_group.add(self.cpuPlayer)<line_sep>self.ball_group=pygame.sprite.Group()<line_sep>self.ball_group.add(self.ball)<block_end><def_stmt>reset self<block_start>self.init()<line_sep># after game over set random direction of ball otherwise it will always be the same
self._reset_ball(1<if>self.rng.random_sample()<g>0.5<else>-1)<block_end><def_stmt>_reset_ball self direction<block_start>self.ball.pos.x=self.width/2# move it to the center
# we go in the same direction that they lost in but at starting vel.
self.ball.vel.x=self.ball.speed<times>direction<line_sep>self.ball.vel.y=(self.rng.random_sample()<times>self.ball.speed)-self.ball.speed<times>0.5<block_end><def_stmt>step self dt<block_start>dt<augdiv>1000.0<line_sep>self.screen.fill((0 0 0))<line_sep>self.agentPlayer.speed=self.players_speed_ratio<times>self.height<line_sep>self.cpuPlayer.speed=self.cpu_speed_ratio<times>self.height<line_sep>self.ball.speed=self.ball_speed_ratio<times>self.height<line_sep>self._handle_player_events()<line_sep># doesnt make sense to have this, but include if needed.
self.score_sum<augadd>self.rewards["tick"]<line_sep>self.ball.update(self.agentPlayer self.cpuPlayer dt)<line_sep>is_terminal_state=<false><line_sep># logic
<if_stmt>self.ball.pos.x<le>0<block_start>self.score_sum<augadd>self.rewards["negative"]<line_sep>self.score_counts["cpu"]<augadd>1.0<line_sep>self._reset_ball(-1)<line_sep>is_terminal_state=<true><block_end><if_stmt>self.ball.pos.x<ge>self.width<block_start>self.score_sum<augadd>self.rewards["positive"]<line_sep>self.score_counts["agent"]<augadd>1.0<line_sep>self._reset_ball(1)<line_sep>is_terminal_state=<true><block_end><if_stmt>is_terminal_state# winning
<block_start><if_stmt>self.score_counts['agent']<eq>self.MAX_SCORE<block_start>self.score_sum<augadd>self.rewards["win"]<block_end># losing
<if_stmt>self.score_counts['cpu']<eq>self.MAX_SCORE<block_start>self.score_sum<augadd>self.rewards["loss"]<block_end><block_end><else_stmt><block_start>self.agentPlayer.update(self.dy dt)<line_sep>self.cpuPlayer.updateCpu(self.ball dt)<block_end>self.players_group.draw(self.screen)<line_sep>self.ball_group.draw(self.screen)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start><import_stmt>numpy<as>np<line_sep>pygame.init()<line_sep>game=Pong(width=256 height=200)<line_sep>game.screen=pygame.display.set_mode(game.getScreenDims() 0 32)<line_sep>game.clock=pygame.time.Clock()<line_sep>game.rng=np.random.RandomState(24)<line_sep>game.init()<while_stmt><true><block_start>dt=game.clock.tick_busy_loop(60)<line_sep>game.step(dt)<line_sep>pygame.display.update()<block_end><block_end> |
<def_stmt>test <block_start><assert_stmt>("patterns = list(nlp.pipe(people))"<in>__solution__) "Você está usando nlp.pipe envolvido em uma lista (list)?"<line_sep>__msg__.good("Bom trabalho! Vamos seguir agora com um exemplo prático que "<concat>"usa nlp.pipe para processar documentos com metadados adicionais.")<block_end> |
<import_stmt>sys<import_from_stmt>pathlib Path<line_sep>sys.path.append(str(Path(__file__).resolve().parent))<import_stmt>unittest<import_stmt>nanopq<import_stmt>numpy<as>np<class_stmt>TestSuite(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>np.random.seed(123)<block_end><def_stmt>test_property self<block_start>opq=nanopq.OPQ(M=4 Ks=256)<line_sep>self.assertEqual((opq.M opq.Ks opq.verbose opq.code_dtype) (opq.pq.M opq.pq.Ks opq.pq.verbose opq.pq.code_dtype) )<block_end><def_stmt>test_fit self<block_start>N,D,M,Ks=100 12 4 10<line_sep>X=np.random.random((N D)).astype(np.float32)<line_sep>opq=nanopq.OPQ(M=M Ks=Ks)<line_sep>opq.fit(X)<line_sep>self.assertEqual(opq.Ds D/M)<line_sep>self.assertEqual(opq.codewords.shape (M Ks D/M))<line_sep>self.assertEqual(opq.R.shape (D D))<line_sep>opq2=nanopq.OPQ(M=M Ks=Ks).fit(X)# Can be called as a chain
self.assertTrue(np.allclose(opq.codewords opq2.codewords))<block_end><def_stmt>test_eq self<block_start><import_stmt>copy<line_sep>N,D,M,Ks=100 12 4 10<line_sep>X=np.random.random((N D)).astype(np.float32)<line_sep>opq1=nanopq.OPQ(M=M Ks=Ks)<line_sep>opq2=nanopq.OPQ(M=M Ks=Ks)<line_sep>opq3=copy.deepcopy(opq1)<line_sep>opq4=nanopq.OPQ(M=M Ks=2<times>Ks)<line_sep>self.assertTrue(opq1<eq>opq1)<line_sep>self.assertTrue(opq1<eq>opq2)<line_sep>self.assertTrue(opq1<eq>opq3)<line_sep>self.assertTrue(opq1<ne>opq4)<line_sep>opq1.fit(X)<line_sep>opq2.fit(X)<line_sep>opq3=copy.deepcopy(opq1)<line_sep>opq4.fit(X)<line_sep>self.assertTrue(opq1<eq>opq1)<line_sep>self.assertTrue(opq1<eq>opq2)<line_sep>self.assertTrue(opq1<eq>opq3)<line_sep>self.assertTrue(opq1<ne>opq4)<block_end><def_stmt>test_rotate self<block_start>N,D,M,Ks=100 12 4 10<line_sep>X=np.random.random((N D)).astype(np.float32)<line_sep>opq=nanopq.OPQ(M=M Ks=Ks)<line_sep>opq.fit(X)<line_sep>rotated_vec=opq.rotate(X[0])<line_sep>rotated_vecs=opq.rotate(X[:3])<line_sep>self.assertEqual(rotated_vec.shape (D ))<line_sep>self.assertEqual(rotated_vecs.shape (3 D))<line_sep># Because R is a rotation matrix (R^t * R = I), R^t should be R^(-1)
self.assertAlmostEqual(np.linalg.norm(opq.R.T-np.linalg.inv(opq.R)) 0.0 places=3)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>unittest.main()<block_end> |
# Copyright 2013 Viewfinder Inc. All Rights Reserved.
"""Viewfinder AddFollowersOperation.
This operation adds a set of contacts to an existing viewpoint as followers of that viewpoint.
If a contact is not yet a Viewfinder user, we create a prospective user and link the contact
to that.
"""<line_sep>__authors__=['<EMAIL> (<NAME>)']<import_stmt>json<import_from_stmt>tornado gen<import_from_stmt>viewfinder.backend.base.exceptions LimitExceededError PermissionError<import_from_stmt>viewfinder.backend.db.accounting AccountingAccumulator<import_from_stmt>viewfinder.backend.db.db_client DBKey<import_from_stmt>viewfinder.backend.db.followed Followed<import_from_stmt>viewfinder.backend.db.follower Follower<import_from_stmt>viewfinder.backend.db.lock Lock<import_from_stmt>viewfinder.backend.db.operation Operation<import_from_stmt>viewfinder.backend.db.user User<import_from_stmt>viewfinder.backend.db.viewpoint Viewpoint<import_from_stmt>viewfinder.backend.op.notification_manager NotificationManager<import_from_stmt>viewfinder.backend.op.viewfinder_op ViewfinderOperation<class_stmt>AddFollowersOperation(ViewfinderOperation)<block_start>"""The AddFollowers operation follows the four phase pattern described in the header of
operation_map.py.
"""<def_stmt>__init__ self client act_dict user_id viewpoint_id contact_dicts<block_start>super(AddFollowersOperation self).__init__(client)<line_sep>self._act_dict=act_dict<line_sep>self._user_id=user_id<line_sep>self._viewpoint_id=viewpoint_id<line_sep>self._contact_dicts=contact_dicts<block_end>@classmethod@gen.coroutine<def_stmt>Execute cls client activity user_id viewpoint_id contacts<block_start>"""Entry point called by the operation framework."""<line_sep><yield>AddFollowersOperation(client activity user_id viewpoint_id contacts)._AddFollowers()<block_end>@gen.coroutine<def_stmt>_AddFollowers self<block_start>"""Orchestrates the add followers operation by executing each of the phases in turn."""<line_sep># Lock the viewpoint while adding followers.
lock=<yield>gen.Task(Viewpoint.AcquireLock self._client self._viewpoint_id)<try_stmt><block_start><yield>self._Check()<line_sep>self._client.CheckDBNotModified()<line_sep><yield>self._Update()<line_sep><yield>self._Account()<line_sep><yield>Operation.TriggerFailpoint(self._client)<line_sep><yield>self._Notify()<block_end><finally_stmt><block_start><yield>gen.Task(Viewpoint.ReleaseLock self._client self._viewpoint_id lock)<block_end><block_end>@gen.coroutine<def_stmt>_Check self<block_start>"""Gathers pre-mutation information:
1. Queries for existing followers and viewpoint.
2. Checkpoints list of followers that need to be revived.
3. Checkpoints list of contacts that need to be made prospective users.
4. Checkpoints list of contacts that are already following the viewpoint.
Validates the following:
1. Max follower limit.
2. Permission to add followers.
"""<line_sep># Get the viewpoint to be modified, along with the follower that is adding the additional users.
# This state will not be changed by add followers, and so doesn't need to be part of the checkpoint.
self._viewpoint,self._follower=<yield>gen.Task(Viewpoint.QueryWithFollower self._client self._user_id self._viewpoint_id)<line_sep># Checks permission to add followers.
<if_stmt>self._follower<is><none><or><not>self._follower.CanContribute()<block_start><raise>PermissionError('User %d does not have permission to add followers to viewpoint "%s".'%(self._user_id self._viewpoint_id))<block_end># Start populating the checkpoint if this the first time the operation has been run.
<if_stmt>self._op.checkpoint<is><none># Get all existing followers.
<block_start>self._existing_followers,_=<yield>gen.Task(Viewpoint.QueryFollowers self._client self._viewpoint_id limit=Viewpoint.MAX_FOLLOWERS)<line_sep># Get list of followers which have removed themselves from the viewpoint and will need to be revived.
self._revive_follower_ids=self._GetRevivableFollowers(self._existing_followers)<line_sep># Get a tuple for each contact: (user_exists?, user_id, webapp_dev_id).
self._contact_ids=<yield>self._ResolveContactIds(self._contact_dicts)<line_sep># Set checkpoint.
# Existing followers, followers to revive, and list of contacts need to be check-pointed
# because these sets are changed in the UPDATE phase. If we fail after UPDATE, but before
# NOTIFY, we would not send correct notifications on retry.
checkpoint={'existing':[follower.user_id<for>follower self._existing_followers] 'revive':self._revive_follower_ids 'contacts':self._contact_ids}<line_sep><yield>self._op.SetCheckpoint(self._client checkpoint)<block_end><else_stmt># Restore state from checkpoint.
<block_start>follower_keys=[DBKey(follower_id self._viewpoint_id)<for>follower_id self._op.checkpoint['existing']]<line_sep>self._existing_followers=<yield>gen.Task(Follower.BatchQuery self._client follower_keys <none>)<line_sep>self._revive_follower_ids=self._op.checkpoint['revive']<line_sep>self._contact_ids=self._op.checkpoint['contacts']<block_end>self._contact_user_ids=[user_id<for>user_exists,user_id,webapp_dev_id self._contact_ids]<line_sep># Check if we're about to exceed follower limit on this viewpoint.
<if_stmt>len(self._existing_followers)+len(self._contact_dicts)<g>Viewpoint.MAX_FOLLOWERS<block_start><raise>LimitExceededError('User %d attempted to exceed follower limit on viewpoint "%s" by adding %d followers.'%(self._user_id self._viewpoint_id len(self._contact_dicts)))<block_end><block_end>@gen.coroutine<def_stmt>_Update self<block_start>"""Updates the database:
1. Revives any followers that have removed the viewpoint.
2. Creates prospective users.
3. Adds the followers to the viewpoint.
"""<line_sep># Create any prospective users (may create nested CreateProspective operations).
<yield>self._ResolveContacts(self._contact_dicts self._contact_ids reason='add_follower=%d'%self._user_id)<line_sep># Revive any REMOVED followers.
<yield>gen.Task(Follower.ReviveRemovedFollowers self._client self._existing_followers)<line_sep># Figure out which users need to be added as followers. Note that new followers exclude followers
# from the request that are already following the viewpoint (assuming they're not removed).
existing_follower_ids=set(follower.user_id<for>follower self._existing_followers<if><not>follower.IsRemoved())<line_sep>self._new_follower_ids=[user_id<for>user_id set(self._contact_user_ids)<if>user_id<not><in>existing_follower_ids]<line_sep># Now actually add the followers.
self._new_followers=<yield>self._viewpoint.AddFollowers(self._client self._user_id list(existing_follower_ids) self._new_follower_ids self._op.timestamp)<block_end>@gen.coroutine<def_stmt>_Account self<block_start>"""Makes accounting changes:
1. For revived followers.
2. For new followers.
"""<line_sep>acc_accum=AccountingAccumulator()<line_sep># Make accounting changes for any revived followers.
<yield>acc_accum.ReviveFollowers(self._client self._viewpoint_id self._revive_follower_ids)<line_sep># Make accounting changes for the new followers.
<yield>acc_accum.AddFollowers(self._client self._viewpoint_id self._new_follower_ids)<line_sep><yield>acc_accum.Apply(self._client)<block_end>@gen.coroutine<def_stmt>_Notify self<block_start>"""Creates notifications:
1. Notifies removed followers that conversation has new activity.
2. Notifies users with contacts that have become prospective users.
3. Notifies existing followers of the viewpoint that new followers have been added.
4. Notifies new followers that they have been added to a viewpoint.
"""<line_sep># Creates notifications for any new prospective users.
identity_keys=[contact_dict['identity']<for>contact_dict,(user_exists user_id webapp_dev_id) zip(self._contact_dicts self._contact_ids)<if><not>user_exists]<line_sep><yield>NotificationManager.NotifyCreateProspective(self._client identity_keys self._op.timestamp)<line_sep># Creates notifications for any revived followers.
<yield>NotificationManager.NotifyReviveFollowers(self._client self._viewpoint_id self._revive_follower_ids self._op.timestamp)<line_sep># Creates notification of new viewpoint for each new follower.
<yield>NotificationManager.NotifyAddFollowers(self._client self._viewpoint_id self._existing_followers self._new_followers self._contact_user_ids self._act_dict self._op.timestamp)<block_end><block_end> |
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
<import_from_future_stmt> annotations<import_from_stmt>textwrap dedent<import_stmt>pytest<import_from_stmt>pants.backend.shell.lint.shfmt.rules ShfmtFieldSet ShfmtRequest<import_from_stmt>pants.backend.shell.lint.shfmt.rules rules<as>shfmt_rules<import_from_stmt>pants.backend.shell.target_types ShellSourcesGeneratorTarget<import_from_stmt>pants.backend.shell.target_types rules<as>target_types_rules<import_from_stmt>pants.core.goals.fmt FmtResult<import_from_stmt>pants.core.goals.lint LintResult LintResults<import_from_stmt>pants.core.util_rules config_files external_tool source_files<import_from_stmt>pants.core.util_rules.source_files SourceFiles SourceFilesRequest<import_from_stmt>pants.engine.addresses Address<import_from_stmt>pants.engine.fs CreateDigest Digest FileContent<import_from_stmt>pants.engine.target Target<import_from_stmt>pants.testutil.rule_runner QueryRule RuleRunner<line_sep>@pytest.fixture<def_stmt>rule_runner <arrow>RuleRunner<block_start><return>RuleRunner(rules=[*shfmt_rules() *config_files.rules() *external_tool.rules() *source_files.rules() *target_types_rules() QueryRule(LintResults [ShfmtRequest]) QueryRule(FmtResult [ShfmtRequest]) QueryRule(SourceFiles [SourceFilesRequest]) ] target_types=[ShellSourcesGeneratorTarget] )<block_end>GOOD_FILE="! foo bar >a &\n"<line_sep>BAD_FILE="! foo bar >a &\n"<line_sep># If config is loaded correctly, shfmt will indent the case statements.
NEEDS_CONFIG_FILE=dedent("""\
case foo in
PATTERN_1)
\tbar
\t;;
*)
\tbaz
\t;;
esac
""")<line_sep>FIXED_NEEDS_CONFIG_FILE=dedent("""\
case foo in
\tPATTERN_1)
\t\tbar
\t\t;;
\t*)
\t\tbaz
\t\t;;
esac
""")<def_stmt>run_shfmt rule_runner:RuleRunner targets:list[Target] * extra_args:list[str]|<none>=<none> <arrow>tuple[tuple[LintResult <ellipsis>] FmtResult]<block_start>rule_runner.set_options(["--backend-packages=pants.backend.shell.lint.shfmt" *(extra_args<or>())] env_inherit={"PATH"} )<line_sep>field_sets=[ShfmtFieldSet.create(tgt)<for>tgt targets]<line_sep>lint_results=rule_runner.request(LintResults [ShfmtRequest(field_sets)])<line_sep>input_sources=rule_runner.request(SourceFiles [SourceFilesRequest(field_set.sources<for>field_set field_sets) ] )<line_sep>fmt_result=rule_runner.request(FmtResult [ShfmtRequest(field_sets prior_formatter_result=input_sources.snapshot) ] )<line_sep><return>lint_results.results fmt_result<block_end><def_stmt>get_digest rule_runner:RuleRunner source_files:dict[str str]<arrow>Digest<block_start>files=[FileContent(path content.encode())<for>path,content source_files.items()]<line_sep><return>rule_runner.request(Digest [CreateDigest(files)])<block_end><def_stmt>test_passing rule_runner:RuleRunner<arrow><none><block_start>rule_runner.write_files({"f.sh":GOOD_FILE "BUILD":"shell_sources(name='t')"})<line_sep>tgt=rule_runner.get_target(Address("" target_name="t" relative_file_path="f.sh"))<line_sep>lint_results,fmt_result=run_shfmt(rule_runner [tgt])<assert_stmt>len(lint_results)<eq>1<assert_stmt>lint_results[0].exit_code<eq>0<assert_stmt>lint_results[0].stderr<eq>""<assert_stmt>fmt_result.stdout<eq>""<assert_stmt>fmt_result.output<eq>get_digest(rule_runner {"f.sh":GOOD_FILE})<assert_stmt>fmt_result.did_change<is><false><block_end><def_stmt>test_failing rule_runner:RuleRunner<arrow><none><block_start>rule_runner.write_files({"f.sh":BAD_FILE "BUILD":"shell_sources(name='t')"})<line_sep>tgt=rule_runner.get_target(Address("" target_name="t" relative_file_path="f.sh"))<line_sep>lint_results,fmt_result=run_shfmt(rule_runner [tgt])<assert_stmt>len(lint_results)<eq>1<assert_stmt>lint_results[0].exit_code<eq>1<assert_stmt>"f.sh.orig"<in>lint_results[0].stdout<assert_stmt>fmt_result.stdout<eq>"f.sh\n"<assert_stmt>fmt_result.output<eq>get_digest(rule_runner {"f.sh":GOOD_FILE})<assert_stmt>fmt_result.did_change<is><true><block_end><def_stmt>test_multiple_targets rule_runner:RuleRunner<arrow><none><block_start>rule_runner.write_files({"good.sh":GOOD_FILE "bad.sh":BAD_FILE "BUILD":"shell_sources(name='t')"})<line_sep>tgts=[rule_runner.get_target(Address("" target_name="t" relative_file_path="good.sh")) rule_runner.get_target(Address("" target_name="t" relative_file_path="bad.sh")) ]<line_sep>lint_results,fmt_result=run_shfmt(rule_runner tgts)<assert_stmt>len(lint_results)<eq>1<assert_stmt>lint_results[0].exit_code<eq>1<assert_stmt>"bad.sh.orig"<in>lint_results[0].stdout<assert_stmt>"good.sh"<not><in>lint_results[0].stdout<assert_stmt>"bad.sh\n"<eq>fmt_result.stdout<assert_stmt>fmt_result.output<eq>get_digest(rule_runner {"good.sh":GOOD_FILE "bad.sh":GOOD_FILE})<assert_stmt>fmt_result.did_change<is><true><block_end><def_stmt>test_config_files rule_runner:RuleRunner<arrow><none><block_start>rule_runner.write_files({"a/f.sh":NEEDS_CONFIG_FILE "a/BUILD":"shell_sources()" "a/.editorconfig":"[*.sh]\nswitch_case_indent = true\n" "b/f.sh":NEEDS_CONFIG_FILE "b/BUILD":"shell_sources()" })<line_sep>tgts=[rule_runner.get_target(Address("a" relative_file_path="f.sh")) rule_runner.get_target(Address("b" relative_file_path="f.sh")) ]<line_sep>lint_results,fmt_result=run_shfmt(rule_runner tgts)<assert_stmt>len(lint_results)<eq>1<assert_stmt>lint_results[0].exit_code<eq>1<assert_stmt>"a/f.sh.orig"<in>lint_results[0].stdout<assert_stmt>"b/f.sh.orig"<not><in>lint_results[0].stdout<assert_stmt>fmt_result.stdout<eq>"a/f.sh\n"<assert_stmt>fmt_result.output<eq>get_digest(rule_runner {"a/f.sh":FIXED_NEEDS_CONFIG_FILE "b/f.sh":NEEDS_CONFIG_FILE})<assert_stmt>fmt_result.did_change<is><true><block_end><def_stmt>test_passthrough_args rule_runner:RuleRunner<arrow><none><block_start>rule_runner.write_files({"f.sh":NEEDS_CONFIG_FILE "BUILD":"shell_sources(name='t')"})<line_sep>tgt=rule_runner.get_target(Address("" target_name="t" relative_file_path="f.sh"))<line_sep>lint_results,fmt_result=run_shfmt(rule_runner [tgt] extra_args=["--shfmt-args=-ci"])<assert_stmt>len(lint_results)<eq>1<assert_stmt>lint_results[0].exit_code<eq>1<assert_stmt>"f.sh.orig"<in>lint_results[0].stdout<assert_stmt>fmt_result.stdout<eq>"f.sh\n"<assert_stmt>fmt_result.output<eq>get_digest(rule_runner {"f.sh":FIXED_NEEDS_CONFIG_FILE})<assert_stmt>fmt_result.did_change<is><true><block_end><def_stmt>test_skip rule_runner:RuleRunner<arrow><none><block_start>rule_runner.write_files({"f.sh":BAD_FILE "BUILD":"shell_sources(name='t')"})<line_sep>tgt=rule_runner.get_target(Address("" target_name="t" relative_file_path="f.sh"))<line_sep>lint_results,fmt_result=run_shfmt(rule_runner [tgt] extra_args=["--shfmt-skip"])<assert_stmt><not>lint_results<assert_stmt>fmt_result.skipped<is><true><assert_stmt>fmt_result.did_change<is><false><block_end> |
# -*- coding: utf-8 -*-
"""
equip.bytecode.decl
~~~~~~~~~~~~~~~~~~~
Structured representation of Module, Types, Method, Imports.
:copyright: (c) 2014 by <NAME> (@rgaucher)
:license: Apache 2, see LICENSE for more details.
"""<import_stmt>dis<import_from_stmt>operator attrgetter methodcaller<import_from_stmt>..utils.log logger<import_from_stmt>..visitors.bytecode BytecodeVisitor<import_from_stmt>.utils update_nested_code_object<class_stmt>Declaration(object)<block_start>"""
Base class for the declaration types of object.
"""<line_sep>MODULE=1<line_sep>TYPE=2<line_sep>METHOD=3<line_sep>FIELD=4<line_sep>IMPORT=5<def_stmt>__init__ self kind _code_object<block_start>self._kind=kind<line_sep>self._code_object=_code_object<line_sep>self._parent=<none><line_sep>self._children=[]<line_sep>self._lines=<none><line_sep>self._bytecode=[]<line_sep>self._bytecode_object=<none><line_sep>self._has_changes=<false><block_end>@property<def_stmt>lines self<block_start>"""
A tuple of start/end line numbers that encapsulates this declaration.
"""<line_sep><return>self._lines<block_end>@lines.setter<def_stmt>lines self value<block_start>self._lines=value<block_end>@property<def_stmt>start_lineno self<block_start>"""
Returns the start line number of the declaration.
"""<line_sep><return>self._lines[0]<if>self._lines<else>-1<block_end><def_stmt>get_start_lineno self<block_start><return>self.start_lineno<block_end>@property<def_stmt>end_lineno self<block_start>"""
Returns the end line number of the declaration.
"""<line_sep><return>self._lines[1]<if>self._lines<else>-1<block_end>@property<def_stmt>parent self<block_start>"""
Returns the parent of this declaration or ``None`` if there is
no parent (e.g., for a ``ModuleDeclaration``).
"""<line_sep><return>self._parent<block_end>@parent.setter<def_stmt>parent self value# logger.debug("Set parent. %s", value)
<block_start>self._parent=value<line_sep>self._parent.add_child(self)<block_end>@property<def_stmt>children self<block_start>"""
Returns the children of this declaration.
"""<line_sep><return>self._children<block_end><def_stmt>add_child self child<block_start>"""
Adds a child to this declaration.
:param child: A ``Declaration`` that is a child of the current declaration.
"""<line_sep>self._children.append(child)<line_sep># logger.debug("add_child:: Children: %s", self.children)
# Keep sorting by line number
self._children=sorted(self._children key=methodcaller('get_start_lineno'))<block_end>@property<def_stmt>parent_module self<block_start>"""
Returns the parent module (a ``ModuleDeclaration``) for this declaration.
"""<line_sep><return>self.__get_parent_kind(ModuleDeclaration)<block_end>@property<def_stmt>parent_class self<block_start>"""
Returns the parent class (a ``TypeDeclaration``) for this declaration.
"""<line_sep><return>self.__get_parent_kind(TypeDeclaration)<block_end>@property<def_stmt>parent_method self<block_start>"""
Returns the parent method (a ``MethodDeclaration``) for this declaration.
"""<line_sep><return>self.__get_parent_kind(MethodDeclaration)<block_end><def_stmt>__get_parent_kind self kind<block_start>p=self.parent<while_stmt>p<is><not><none><block_start><if_stmt>isinstance(p kind)<block_start><return>p<block_end>p=p.parent<block_end><return><none><block_end>@property<def_stmt>bytecode self<block_start>"""
Returns the bytecode associated with this declaration.
"""<line_sep><return>self._bytecode<block_end>@bytecode.setter<def_stmt>bytecode self value<block_start>self._bytecode=value<block_end>@property<def_stmt>code_object self<block_start><return>self._code_object<block_end>@code_object.setter<def_stmt>code_object self value<block_start>self._code_object=value<block_end><def_stmt>update_nested_code_object self original_co new_co<block_start>self._code_object=update_nested_code_object(self._code_object original_co new_co)<line_sep>self._has_changes=<true><block_end>@property<def_stmt>has_changes self<block_start><return>self._has_changes<block_end>@has_changes.setter<def_stmt>has_changes self value<block_start>self._has_changes=value<block_end># Mostly reserved
@property<def_stmt>bytecode_object self<block_start><return>self._bytecode_object<block_end>@bytecode_object.setter<def_stmt>bytecode_object self value<block_start>self._bytecode_object=value<block_end><def_stmt>accept self visitor<block_start><if_stmt>isinstance(visitor BytecodeVisitor)<block_start><for_stmt>i xrange(len(self._bytecode))<block_start>index,lineno,op,arg,cflow_in,_=self._bytecode[i]<line_sep>visitor.visit(index op arg=arg lineno=lineno cflow_in=cflow_in)<block_end><block_end><block_end>is_module=<lambda>self:self.kind<eq>Declaration.MODULE<line_sep>is_type=<lambda>self:self.kind<eq>Declaration.TYPE<line_sep>is_method=<lambda>self:self.kind<eq>Declaration.METHOD<line_sep>is_field=<lambda>self:self.kind<eq>Declaration.FIELD<line_sep>is_import=<lambda>self:self.kind<eq>Declaration.IMPORT<line_sep>@property<def_stmt>kind self<block_start><return>self._kind<block_end><block_end><class_stmt>ImportDeclaration(Declaration)<block_start>"""
Models an import statement. It handles relatives/absolute
imports, as well as aliases.
"""<def_stmt>__init__ self code_object<block_start>Declaration.__init__(self Declaration.IMPORT code_object)<line_sep>self._root=<none><line_sep>self._aliases=<none><line_sep>self._live_names=<none><line_sep>self._dots=-1<line_sep>self._star=<false><block_end>@property<def_stmt>star self<block_start><return>self._star<block_end>@star.setter<def_stmt>star self value<block_start>self._star=value<block_end>@property<def_stmt>aliases self<block_start><return>self._aliases<block_end>@aliases.setter<def_stmt>aliases self value<block_start>self._aliases=value<block_end>@property<def_stmt>live_names self<block_start><if_stmt>self._live_names<is><none><block_start>self._live_names=set()<for_stmt>(name alias) self.aliases<block_start><if_stmt>alias<is><none><block_start><if_stmt>'.'<not><in>name<block_start>self._live_names.add(name)<block_end><else_stmt><block_start>live_name=name[:name.rfind('.')]<line_sep>self._live_names.add(live_name)<block_end><block_end><else_stmt><block_start>self._live_names.add(alias)<block_end><block_end><block_end><return>self._live_names<block_end>@property<def_stmt>dots self<block_start><return>self._dots<block_end>@dots.setter<def_stmt>dots self value<block_start>self._dots=value<block_end>@property<def_stmt>root self<block_start><return>self._root<block_end>@root.setter<def_stmt>root self value<block_start>self._root=value<block_end><def_stmt>__eq__ self obj<block_start><return>self.root<eq>obj.root<and>self.aliases<eq>obj.aliases<and>self.dots<eq>obj.dots<block_end><def_stmt>__repr__ self<block_start>skip_import_root=<false><line_sep>import_buffer=''<if_stmt>self.dots<g>0<block_start>import_buffer<augadd>'from '+'.'<times>self.dots<if_stmt>self.root<block_start>import_buffer<augadd>self.root<line_sep>skip_import_root=<true><block_end>import_buffer<augadd>' import '<block_end><elif_stmt>self.root<block_start>import_buffer<augadd>'from '<block_end><else_stmt><block_start>import_buffer<augadd>'import '<block_end><if_stmt>self.root<and><not>skip_import_root<block_start>import_buffer<augadd>self.root+' import '<block_end><if_stmt>self.star<block_start>import_buffer<augadd>'*'<block_end>import_list=[]<for_stmt>aliased_name self.aliases<block_start>local_import=aliased_name[0]<if_stmt>aliased_name[1]<block_start>local_import<augadd>' as '+aliased_name[1]<block_end>import_list.append(local_import)<block_end><if_stmt>import_list<block_start>import_buffer<augadd>', '.join(import_list)<block_end><return>'Import(%s)'%import_buffer<block_end><block_end><class_stmt>ModuleDeclaration(Declaration)<block_start>"""
The module is the object that captures everything under one pyc file.
It contains nested classes and functions, as well as import statements.
"""<def_stmt>__init__ self module_path code_object<block_start>Declaration.__init__(self Declaration.MODULE code_object)<line_sep>self._module_path=module_path<line_sep>self._imports=[]<line_sep>self._classes=<none><line_sep>self._functions=<none><block_end><def_stmt>add_import self importDecl<block_start><if_stmt>importDecl<not><in>self._imports<block_start>self._imports.append(importDecl)<block_end><block_end>@property<def_stmt>imports self<block_start><return>self._imports<block_end>@property<def_stmt>module_path self<block_start><return>self._module_path<block_end>@property<def_stmt>classes self<block_start><if_stmt>self._classes<is><none><block_start>self._classes=[c<for>c self.children<if>c.is_type()]<block_end><return>self._classes<block_end>@property<def_stmt>functions self<block_start><if_stmt>self._functions<is><none><block_start>self._functions=[f<for>f self.children<if>f.is_method()]<block_end><return>self._functions<block_end><def_stmt>__repr__ self<block_start><return>'ModuleDeclaration(path=%s, co=%s)'%(self.module_path self.code_object)<block_end><block_end><class_stmt>TypeDeclaration(Declaration)<block_start>"""
Represent a class declaration. It has a name, as well as a hierarchy
(superclass). The type contains several methods and fields, and can
have nested types.
"""<def_stmt>__init__ self type_name code_object<block_start>Declaration.__init__(self Declaration.TYPE code_object)<line_sep>self._type_name=type_name<line_sep>self._superclasses=set()<line_sep>self._methods=<none><line_sep>self._fields=<none><line_sep>self._nested_types=<none><block_end>@property<def_stmt>type_name self<block_start>"""
Returns the name of the type.
"""<line_sep><return>self._type_name<block_end>@property<def_stmt>superclasses self<block_start><return>self._superclasses<block_end><def_stmt>add_superclass self type_name<block_start>self._superclasses.add(type_name)<block_end>@property<def_stmt>methods self<block_start>"""
Returns a list of ``MethodDeclaration`` that belong to this type.
"""<if_stmt>self._methods<is><none><block_start>self._methods=[f<for>f self.children<if>f.is_method()]<block_end><return>self._methods<block_end>@property<def_stmt>fields self<block_start><return>self.fields<block_end>@property<def_stmt>nested_types self<block_start>"""
Returns a list of ``TypeDeclaration`` that belong to this type.
"""<if_stmt>self._nested_types<is><none><block_start>self._nested_types=[c<for>c self.children<if>c.is_type()]<block_end><return>self._nested_types<block_end><def_stmt>__repr__ self<block_start><return>'TypeDeclaration#%d(name=%s, co=%s, super=%s)'%(self.start_lineno self.type_name self.code_object self.superclasses)<block_end><block_end><class_stmt>MethodDeclaration(Declaration)<block_start>"""
The declaration of a method or a function.
"""<def_stmt>__init__ self method_name code_object<block_start>Declaration.__init__(self Declaration.METHOD code_object)<line_sep>self._method_name=method_name<line_sep>self._formal_parameters=[]<line_sep>self._body=<none><line_sep>self._labels=dis.findlabels(code_object.co_code)<line_sep>self._nested_types=[]<block_end>@property<def_stmt>body self<block_start><return>self._body<block_end>@body.setter<def_stmt>body self value<block_start>self._body=value<block_end>@property<def_stmt>labels self<block_start><return>self._labels<block_end>@property<def_stmt>is_lambda self<block_start><return>self.method_name<eq>'<lambda>'<block_end>@property<def_stmt>formal_parameters self<block_start><return>self._formal_parameters<block_end>@formal_parameters.setter<def_stmt>formal_parameters self value<block_start>self._formal_parameters=value<block_end>@property<def_stmt>method_name self<block_start><return>self._method_name<block_end>@property<def_stmt>nested_types self<block_start><return>self._nested_types<block_end><def_stmt>__repr__ self<block_start><return>'MethodDeclaration#%d(name=%s, args=%s, co=%s)'%(self.start_lineno self.method_name self.formal_params self.code_object)<block_end><block_end><class_stmt>FieldDeclaration(Declaration)<block_start><def_stmt>__init__ self field_name code_object<block_start>Declaration.__init__(self Declaration.FIELD code_object)<line_sep>self._field_name=field_name<block_end>@property<def_stmt>field_name self<block_start><return>self._field_name<block_end><block_end> |
<import_from_stmt>typing List<import_from_stmt>unittest.case TestCase<import_from_stmt>uuid uuid4<import_from_stmt>eventsourcing.application Application<import_from_stmt>eventsourcing.persistence Notification<import_from_stmt>eventsourcing.system AlwaysPull Follower Leader NeverPull ProcessApplication Promptable PullGaps System <import_from_stmt>eventsourcing.tests.test_application_with_popo BankAccounts<import_from_stmt>eventsourcing.tests.test_processapplication EmailProcess<import_from_stmt>eventsourcing.utils get_topic<class_stmt>TestSystem(TestCase)<block_start><def_stmt>test_graph self<block_start>system=System(pipes=[[BankAccounts EmailProcess ] [Application] ])<line_sep>self.assertEqual(len(system.nodes) 3)<line_sep>self.assertEqual(system.nodes["BankAccounts"] get_topic(BankAccounts))<line_sep>self.assertEqual(system.nodes["EmailProcess"] get_topic(EmailProcess))<line_sep>self.assertEqual(system.nodes["Application"] get_topic(Application))<line_sep>self.assertEqual(system.leaders ["BankAccounts"])<line_sep>self.assertEqual(system.followers ["EmailProcess"])<line_sep>self.assertEqual(system.singles ["Application"])<line_sep>self.assertEqual(len(system.edges) 1)<line_sep>self.assertIn(("BankAccounts" "EmailProcess" ) system.edges )<line_sep>self.assertEqual(len(system.singles) 1)<block_end><def_stmt>test_raises_type_error_not_a_follower self<block_start><with_stmt>self.assertRaises(TypeError)<as>cm<block_start>System(pipes=[[BankAccounts Leader ] ])<block_end>exception=cm.exception<line_sep>self.assertEqual(exception.args[0] "Not a follower class: <class 'eventsourcing.system.Leader'>" )<block_end><def_stmt>test_raises_type_error_not_a_processor self<block_start><with_stmt>self.assertRaises(TypeError)<as>cm<block_start>System(pipes=[[BankAccounts Follower EmailProcess ] ])<block_end>exception=cm.exception<line_sep>self.assertEqual(exception.args[0] "Not a process application class: <class 'eventsourcing.system.Follower'>" )<block_end><def_stmt>test_is_leaders_only self<block_start>system=System(pipes=[[Leader ProcessApplication ProcessApplication ] ])<line_sep>self.assertEqual(list(system.leaders_only) ["Leader"])<block_end><def_stmt>test_leader_class self<block_start>system=System(pipes=[[Application ProcessApplication ProcessApplication ] ])<line_sep>self.assertTrue(issubclass(system.leader_cls("Application") Leader))<line_sep>self.assertTrue(issubclass(system.leader_cls("ProcessApplication") Leader))<block_end><block_end><class_stmt>TestLeader(TestCase)<block_start><def_stmt>test self# Define fixture that receives prompts.
<block_start><class_stmt>FollowerFixture(Promptable)<block_start><def_stmt>__init__ self<block_start>self.num_prompts=0<block_end><def_stmt>receive_notifications self leader_name:str notifications:List[Notification]<arrow><none><block_start>self.num_prompts<augadd>1<block_end><block_end># Test fixture is working.
follower=FollowerFixture()<line_sep>follower.receive_notifications("" [])<line_sep>self.assertEqual(follower.num_prompts 1)<line_sep># Construct leader.
leader=Leader()<line_sep>leader.lead(follower)<line_sep># Check follower receives a prompt when there are new events.
leader.notify([Notification(id=1 originator_id=uuid4() originator_version=0 topic="topic1" state=b"" )])<line_sep>self.assertEqual(follower.num_prompts 2)<line_sep># Check follower doesn't receive prompt when no new events.
leader.save()<line_sep>self.assertEqual(follower.num_prompts 2)<block_end><block_end><class_stmt>TestPullMode(TestCase)<block_start><def_stmt>test_always_pull self<block_start>mode=AlwaysPull()<line_sep>self.assertTrue(mode.chose_to_pull(1 1))<line_sep>self.assertTrue(mode.chose_to_pull(2 1))<block_end><def_stmt>test_never_pull self<block_start>mode=NeverPull()<line_sep>self.assertFalse(mode.chose_to_pull(1 1))<line_sep>self.assertFalse(mode.chose_to_pull(2 1))<block_end><def_stmt>test_pull_gaps self<block_start>mode=PullGaps()<line_sep>self.assertFalse(mode.chose_to_pull(1 1))<line_sep>self.assertTrue(mode.chose_to_pull(2 1))<block_end><block_end> |
<import_stmt>os<import_stmt>os.path<as>osp<import_stmt>random<as>rd<import_stmt>subprocess<import_from_stmt>typing Optional Tuple Union<import_stmt>click<import_from_stmt>mim.click CustomCommand param2lowercase<import_from_stmt>mim.utils echo_success exit_with_error get_installed_path highlighted_error is_installed module_full_name recursively_find <line_sep>@click.command(name='train' context_settings=dict(ignore_unknown_options=<true>) cls=CustomCommand)@click.argument('package' type=str callback=param2lowercase)@click.argument('config' type=str)@click.option('-l' '--launcher' type=click.Choice(['none' 'pytorch' 'slurm'] case_sensitive=<false>) default='none' help='Job launcher')@click.option('--port' type=int default=<none> help=('The port used for inter-process communication (only applicable to '<concat>'slurm / pytorch launchers). If set to None, will randomly choose '<concat>'a port between 20000 and 30000. '))@click.option('-G' '--gpus' type=int default=1 help='Number of gpus to use')@click.option('-g' '--gpus-per-node' type=int help=('Number of gpus per node to use '<concat>'(only applicable to launcher == "slurm")'))@click.option('-c' '--cpus-per-task' type=int default=2 help='Number of cpus per task (only applicable to launcher == "slurm")')@click.option('-p' '--partition' type=str help='The partition to use (only applicable to launcher == "slurm")')@click.option('--srun-args' type=str help='Other srun arguments that might be used')@click.option('-y' '--yes' is_flag=<true> help='Don’t ask for confirmation.')@click.argument('other_args' nargs=-1 type=click.UNPROCESSED)<def_stmt>cli package:str config:str gpus:int gpus_per_node:int partition:str cpus_per_task:int=2 launcher:str='none' port:int=<none> srun_args:Optional[str]=<none> yes:bool=<false> other_args:tuple=()<arrow><none><block_start>"""Perform Training.
Example:
\b
# Train models on a single server with CPU by setting `gpus` to 0 and
# 'launcher' to 'none' (if applicable). The training script of the
# corresponding codebase will fail if it doesn't support CPU training.
> mim train mmcls resnet101_b16x8_cifar10.py --work-dir tmp --gpus 0
# Train models on a single server with one GPU
> mim train mmcls resnet101_b16x8_cifar10.py --work-dir tmp --gpus 1
# Train models on a single server with 4 GPUs and pytorch distributed
> mim train mmcls resnet101_b16x8_cifar10.py --work-dir tmp --gpus 4 \
--launcher pytorch
# Train models on a slurm HPC with one 8-GPU node
> mim train mmcls resnet101_b16x8_cifar10.py --launcher slurm --gpus 8 \
--gpus-per-node 8 --partition partition_name --work-dir tmp
# Print help messages of sub-command train
> mim train -h
# Print help messages of sub-command train and the training script of mmcls
> mim train mmcls -h
"""<line_sep>is_success,msg=train(package=package config=config gpus=gpus gpus_per_node=gpus_per_node cpus_per_task=cpus_per_task partition=partition launcher=launcher port=port srun_args=srun_args yes=yes other_args=other_args)<if_stmt>is_success<block_start>echo_success(msg)# type: ignore
<block_end><else_stmt><block_start>exit_with_error(msg)<block_end><block_end><def_stmt>train package:str config:str gpus:int gpus_per_node:int=<none> cpus_per_task:int=2 partition:str=<none> launcher:str='none' port:int=<none> srun_args:Optional[str]=<none> yes:bool=<true> other_args:tuple=()<arrow>Tuple[bool Union[str Exception]]<block_start>"""Train a model with given config.
Args:
package (str): The codebase name.
config (str): The config file path. If not exists, will search in the
config files of the codebase.
gpus (int): Number of gpus used for training.
gpus_per_node (int, optional): Number of gpus per node to use
(only applicable to launcher == "slurm"). Defaults to None.
cpus_per_task (int, optional): Number of cpus per task to use
(only applicable to launcher == "slurm"). Defaults to None.
partition (str, optional): The partition name
(only applicable to launcher == "slurm"). Defaults to None.
launcher (str, optional): The launcher used to launch jobs.
Defaults to 'none'.
port (int | None, optional): The port used for inter-process
communication (only applicable to slurm / pytorch launchers).
Default to None. If set to None, will randomly choose a port
between 20000 and 30000.
srun_args (str, optional): Other srun arguments that might be
used, all arguments should be in a string. Defaults to None.
yes (bool): Don’t ask for confirmation. Default: True.
other_args (tuple, optional): Other arguments, will be passed to the
codebase's training script. Defaults to ().
"""<line_sep>full_name=module_full_name(package)<if_stmt>full_name<eq>''<block_start>msg=f"Can't determine a unique package given abbreviation {package}"<line_sep><raise>ValueError(highlighted_error(msg))<block_end>package=full_name<line_sep># If launcher == "slurm", must have following args
<if_stmt>launcher<eq>'slurm'<block_start>msg=('If launcher is slurm, '<concat>'gpus-per-node and partition should not be None')<line_sep>flag=(gpus_per_node<is><not><none>)<and>(partition<is><not><none>)<assert_stmt>flag msg<block_end><if_stmt>port<is><none><block_start>port=rd.randint(20000 30000)<block_end><if_stmt>launcher<in>['slurm' 'pytorch']<block_start>click.echo(f'Using port {port} for synchronization. ')<block_end><if_stmt><not>is_installed(package)<block_start>msg=(f'The codebase {package} is not installed, '<concat>'do you want to install the latest release? ')<if_stmt>yes<or>click.confirm(msg)<block_start>click.echo(f'Installing {package}')<line_sep>cmd=['mim' 'install' package]<line_sep>ret=subprocess.check_call(cmd)<if_stmt>ret<ne>0<block_start>msg=f'{package} is not successfully installed'<line_sep><raise>RuntimeError(highlighted_error(msg))<block_end><else_stmt><block_start>click.echo(f'{package} is successfully installed')<block_end><block_end><else_stmt><block_start>msg=f'You can not train this model without {package} installed.'<line_sep><return><false> msg<block_end><block_end>pkg_root=get_installed_path(package)<if_stmt><not>osp.exists(config)# configs is put in pkg/.mim in PR #68
<block_start>config_root=osp.join(pkg_root '.mim' 'configs')<if_stmt><not>osp.exists(config_root)# If not pkg/.mim/config, try to search the whole pkg root.
<block_start>config_root=pkg_root<block_end># pkg/.mim/configs is a symbolic link to the real config folder,
# so we need to follow links.
files=recursively_find(pkg_root osp.basename(config) followlinks=<true>)<if_stmt>len(files)<eq>0<block_start>msg=(f"The path {config} doesn't exist and we can not find "<concat>f'the config file in codebase {package}.')<line_sep><raise>ValueError(highlighted_error(msg))<block_end><elif_stmt>len(files)<g>1<block_start>msg=(f"The path {config} doesn't exist and we find multiple "<concat>f'config files with same name in codebase {package}: {files}.')<line_sep><raise>ValueError(highlighted_error(msg))<block_end># Use realpath instead of the symbolic path in pkg/.mim
config_path=osp.realpath(files[0])<line_sep>click.echo(f"The path {config} doesn't exist but we find the config file "<concat>f'in codebase {package}, will use {config_path} instead.')<line_sep>config=config_path<block_end># tools will be put in package/.mim in PR #68
train_script=osp.join(pkg_root '.mim' 'tools' 'train.py')<if_stmt><not>osp.exists(train_script)<block_start>train_script=osp.join(pkg_root 'tools' 'train.py')<block_end>common_args=['--launcher' launcher]+list(other_args)<if_stmt>launcher<eq>'none'<block_start><if_stmt>gpus<block_start>cmd=['python' train_script config '--gpus' str(gpus)]+common_args<block_end><else_stmt><block_start>cmd=['python' train_script config '--device' 'cpu']+common_args<block_end><block_end><elif_stmt>launcher<eq>'pytorch'<block_start>cmd=['python' '-m' 'torch.distributed.launch' f'--nproc_per_node={gpus}' f'--master_port={port}' train_script config]+common_args<block_end><elif_stmt>launcher<eq>'slurm'<block_start>parsed_srun_args=srun_args.split()<if>srun_args<else>[]<line_sep>has_job_name=any([('--job-name'<in>x)<or>('-J'<in>x)<for>x parsed_srun_args])<if_stmt><not>has_job_name<block_start>job_name=osp.splitext(osp.basename(config))[0]<line_sep>parsed_srun_args.append(f'--job-name={job_name}_train')<block_end>cmd=['srun' '-p' f'{partition}' f'--gres=gpu:{gpus_per_node}' f'--ntasks={gpus}' f'--ntasks-per-node={gpus_per_node}' f'--cpus-per-task={cpus_per_task}' '--kill-on-bad-exit=1']+parsed_srun_args+['python' '-u' train_script config]+common_args<block_end>cmd_text=' '.join(cmd)<line_sep>click.echo(f'Training command is {cmd_text}. ')<line_sep>ret=subprocess.check_call(cmd env=dict(os.environ MASTER_PORT=str(port)))<if_stmt>ret<eq>0<block_start><return><true> 'Training finished successfully. '<block_end><else_stmt><block_start><return><false> 'Training not finished successfully. '<block_end><block_end> |
<import_from_stmt>steamctl.argparser register_command<line_sep>epilog="""\
"""<line_sep>@register_command('assistant' help='Helpful automation' epilog=epilog)<def_stmt>cmd_parser cp<block_start><def_stmt>print_help *args **kwargs<block_start>cp.print_help()<block_end>cp.set_defaults(_cmd_func=print_help)<line_sep>sub_cp=cp.add_subparsers(metavar='<subcommand>' dest='subcommand' title='List of sub-commands' description='' )<line_sep>scp_i=sub_cp.add_parser("idle-games" help="Idle up to 32 games for game time")<line_sep>scp_i.set_defaults(_cmd_func=__name__+'.card_idler:cmd_assistant_idle_games')<line_sep>scp_i.add_argument('app_ids' nargs='+' metavar='AppID' type=int help='App ID(s) to idle')<line_sep>scp_i=sub_cp.add_parser("idle-cards" help="Automatic idling for game cards")<line_sep>scp_i.set_defaults(_cmd_func=__name__+'.card_idler:cmd_assistant_idle_cards')<line_sep>scp_i=sub_cp.add_parser("discovery-queue" help="Explore a single discovery queue")<line_sep>scp_i.set_defaults(_cmd_func=__name__+'.discovery_queue:cmd_assistant_discovery_queue')<block_end> |
<import_from_stmt>_Qdoffs *<line_sep> |
r"""
Subwords
A subword of a word `w` is a word obtained by deleting the letters at some
(non necessarily adjacent) positions in `w`. It is not to be confused with the
notion of factor where one keeps adjacent positions in `w`. Sometimes it is
useful to allow repeated uses of the same letter of `w` in a "generalized"
subword. We call this a subword with repetitions.
For example:
- "bnjr" is a subword of the word "bonjour" but not a factor;
- "njo" is both a factor and a subword of the word "bonjour";
- "nr" is a subword of "bonjour";
- "rn" is not a subword of "bonjour";
- "nnu" is not a subword of "bonjour";
- "nnu" is a subword with repetitions of "bonjour";
A word can be given either as a string, as a list or as a tuple.
As repetition can occur in the initial word, the subwords of a given words is
not a set in general but an enumerated multiset!
.. TODO::
- implement subwords with repetitions
- implement the category of EnumeratedMultiset and inheritate from
when needed (i.e. the initial word has repeated letters)
AUTHORS:
- <NAME>: initial version
- <NAME> (2009/02/06): doc improvements + new methods + bug fixes
"""<line_sep>#*****************************************************************************
# Copyright (C) 2007 <NAME> <<EMAIL>>,
# 2014 <NAME> <<EMAIL>>,
#
# Distributed under the terms of the GNU General Public License (GPL)
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# The full text of the GPL is available at:
#
# http://www.gnu.org/licenses/
#*****************************************************************************
<import_stmt>itertools<import_from_stmt>sage.structure.parent Parent<import_from_stmt>sage.categories.finite_enumerated_sets FiniteEnumeratedSets<import_stmt>sage.arith.all<as>arith<import_stmt>sage.misc.prandom<as>prandom<import_from_stmt>sage.rings.integer Integer<import_from_stmt>sage.sets.finite_enumerated_set FiniteEnumeratedSet<def_stmt>_stringification data<block_start>r"""
TESTS::
sage: from sage.combinat.subword import _stringification
sage: _stringification(['a','b','c'])
'abc'
"""<line_sep><return>''.join(data)<block_end><def_stmt>Subwords w k=<none> element_constructor=<none><block_start>"""
Return the set of subwords of ``w``.
INPUT:
- ``w`` -- a word (can be a list, a string, a tuple or a word)
- ``k`` -- an optional integer to specify the length of subwords
- ``element_constructor`` -- an optional function that will be used
to build the subwords
EXAMPLES::
sage: S = Subwords(['a','b','c']); S
Subwords of ['a', 'b', 'c']
sage: S.first()
[]
sage: S.last()
['a', 'b', 'c']
sage: S.list()
[[], ['a'], ['b'], ['c'], ['a', 'b'], ['a', 'c'], ['b', 'c'], ['a', 'b', 'c']]
The same example using string, tuple or a word::
sage: S = Subwords('abc'); S
Subwords of 'abc'
sage: S.list()
['', 'a', 'b', 'c', 'ab', 'ac', 'bc', 'abc']
sage: S = Subwords((1,2,3)); S
Subwords of (1, 2, 3)
sage: S.list()
[(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
sage: w = Word([1,2,3])
sage: S = Subwords(w); S
Subwords of word: 123
sage: S.list()
[word: , word: 1, word: 2, word: 3, word: 12, word: 13, word: 23, word: 123]
Using word with specified length::
sage: S = Subwords(['a','b','c'], 2); S
Subwords of ['a', 'b', 'c'] of length 2
sage: S.list()
[['a', 'b'], ['a', 'c'], ['b', 'c']]
An example that uses the ``element_constructor`` argument::
sage: p = Permutation([3,2,1])
sage: Subwords(p, element_constructor=tuple).list()
[(), (3,), (2,), (1,), (3, 2), (3, 1), (2, 1), (3, 2, 1)]
sage: Subwords(p, 2, element_constructor=tuple).list()
[(3, 2), (3, 1), (2, 1)]
"""<if_stmt>element_constructor<is><none><block_start>datatype=type(w)# 'datatype' is the type of w
<if_stmt>datatype<is>list<or>datatype<is>tuple<block_start>element_constructor=datatype<block_end><elif_stmt>datatype<is>str<block_start>element_constructor=_stringification<block_end><else_stmt><block_start><import_from_stmt>sage.combinat.words.words Words<try_stmt><block_start>alphabet=w.parent().alphabet()<line_sep>element_constructor=Words(alphabet)<block_end><except_stmt>AttributeError<block_start>element_constructor=list<block_end><block_end><block_end><if_stmt>k<is><none><block_start><return>Subwords_w(w element_constructor)<block_end><if_stmt><not>isinstance(k (int Integer))<block_start><raise>ValueError("k should be an integer")<block_end><if_stmt>k<l>0<or>k<g>len(w)<block_start><return>FiniteEnumeratedSet([])<block_end><return>Subwords_wk(w k element_constructor)<block_end><class_stmt>Subwords_w(Parent)<block_start>r"""
Subwords of a given word.
"""<def_stmt>__init__ self w element_constructor<block_start>"""
TESTS::
sage: TestSuite(Subwords([1,2,3])).run()
sage: TestSuite(Subwords('sage')).run()
"""<line_sep>Parent.__init__(self category=FiniteEnumeratedSets())<line_sep>self._w=w<line_sep>self._build=element_constructor<block_end><def_stmt>__eq__ self other<block_start>r"""
Equality test.
TESTS::
sage: Subwords([1,2,3]) == Subwords([1,2,3])
True
sage: Subwords([1,2,3]) == Subwords([1,3,2])
False
"""<line_sep><return>self.__class__<eq>other.__class__<and>self._w<eq>other._w<and>self._build<eq>other._build<block_end><def_stmt>__ne__ self other<block_start>r"""
TESTS::
sage: Subwords([1,2,3]) != Subwords([1,2,3])
False
sage: Subwords([1,2,3]) != Subwords([1,3,2])
True
"""<line_sep><return><not>self<eq>other<block_end><def_stmt>__reduce__ self<block_start>r"""
Pickle (how to construct back the object).
TESTS::
sage: S = Subwords((1,2,3))
sage: S == loads(dumps(S))
True
sage: S = Subwords('123')
sage: S == loads(dumps(S))
True
sage: S = Subwords(('a',(1,2,3),('a','b'),'ir'))
sage: S == loads(dumps(S))
True
"""<line_sep><return>(Subwords_w (self._w self._build))<block_end><def_stmt>__repr__ self<block_start>"""
TESTS::
sage: repr(Subwords([1,2,3])) # indirect doctest
'Subwords of [1, 2, 3]'
"""<line_sep><return>"Subwords of {!r}".format(self._w)<block_end><def_stmt>__contains__ self w<block_start>"""
TESTS::
sage: [] in Subwords([1,2,3,4,3,4,4])
True
sage: [2,3,3,4] in Subwords([1,2,3,4,3,4,4])
True
sage: [5,5,3] in Subwords([1,3,3,5,4,5,3,5])
True
sage: [3,5,5,3] in Subwords([1,3,3,5,4,5,3,5])
True
sage: [3,5,5,3,4] in Subwords([1,3,3,5,4,5,3,5])
False
sage: [2,3,3,4] in Subwords([1,2,3,4,3,4,4])
True
sage: [2,3,3,1] in Subwords([1,2,3,4,3,4,4])
False
"""<line_sep><return>smallest_positions(self._w w)<is><not><false><block_end><def_stmt>cardinality self<block_start>"""
EXAMPLES::
sage: Subwords([1,2,3]).cardinality()
8
"""<line_sep><return>Integer(1)<lshift>len(self._w)<block_end><def_stmt>first self<block_start>"""
EXAMPLES::
sage: Subwords([1,2,3]).first()
[]
sage: Subwords((1,2,3)).first()
()
sage: Subwords('123').first()
''
"""<line_sep><return>self._build([])<block_end><def_stmt>last self<block_start>"""
EXAMPLES::
sage: Subwords([1,2,3]).last()
[1, 2, 3]
sage: Subwords((1,2,3)).last()
(1, 2, 3)
sage: Subwords('123').last()
'123'
"""<line_sep><return>self._build(self._w)<block_end><def_stmt>random_element self<block_start>r"""
Return a random subword with uniform law.
EXAMPLES::
sage: S1 = Subwords([1,2,3,2,1,3])
sage: S2 = Subwords([4,6,6,6,7,4,5,5])
sage: for i in range(100):
....: w = S1.random_element()
....: if w in S2:
....: assert(w == [])
sage: for i in range(100):
....: w = S2.random_element()
....: if w in S1:
....: assert(w == [])
"""<line_sep><return>self._build(elt<for>elt self._w<if>prandom.randint(0 1))<block_end><def_stmt>__iter__ self<block_start>r"""
EXAMPLES::
sage: Subwords([1,2,3]).list()
[[], [1], [2], [3], [1, 2], [1, 3], [2, 3], [1, 2, 3]]
sage: Subwords((1,2,3)).list()
[(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
sage: Subwords('123').list()
['', '1', '2', '3', '12', '13', '23', '123']
"""<line_sep><return>itertools.chain(*[Subwords_wk(self._w i self._build)<for>i range(len(self._w)+1)])<block_end><block_end><class_stmt>Subwords_wk(Subwords_w)<block_start>r"""
Subwords with fixed length of a given word.
"""<def_stmt>__init__ self w k element_constructor<block_start>"""
TESTS::
sage: S = Subwords([1,2,3],2)
sage: S == loads(dumps(S))
True
sage: TestSuite(S).run()
"""<line_sep>Subwords_w.__init__(self w element_constructor)<line_sep>self._k=k<block_end><def_stmt>__eq__ self other<block_start>r"""
Equality test.
TESTS::
sage: Subwords([1,2,3],2) == Subwords([1,2,3],2)
True
sage: Subwords([1,2,3],2) == Subwords([1,3,2],2)
False
sage: Subwords([1,2,3],2) == Subwords([1,2,3],3)
False
"""<line_sep><return>Subwords_w.__eq__(self other)<and>self._k<eq>other._k<block_end><def_stmt>__reduce__ self<block_start>r"""
Pickle (how to construct back the object).
TESTS::
sage: S = Subwords('abc',2)
sage: S == loads(dumps(S))
True
sage: S = Subwords(('a',1,'45',(1,2)))
sage: S == loads(dumps(S))
True
"""<line_sep><return>(Subwords_wk (self._w self._k self._build))<block_end><def_stmt>__repr__ self<block_start>"""
TESTS::
sage: repr(Subwords([1,2,3],2)) # indirect doctest
'Subwords of [1, 2, 3] of length 2'
"""<line_sep><return>"{} of length {}".format(Subwords_w.__repr__(self) self._k)<block_end><def_stmt>__contains__ self w<block_start>"""
TESTS::
sage: [] in Subwords([1, 3, 3, 5, 4, 5, 3, 5],0)
True
sage: [2,3,3,4] in Subwords([1,2,3,4,3,4,4],4)
True
sage: [2,3,3,4] in Subwords([1,2,3,4,3,4,4],3)
False
sage: [5,5,3] in Subwords([1,3,3,5,4,5,3,5],3)
True
sage: [5,5,3] in Subwords([1,3,3,5,4,5,3,5],4)
False
"""<line_sep><return>len(w)<eq>self._k<and>Subwords_w.__contains__(self w)<block_end><def_stmt>cardinality self<block_start>r"""
Returns the number of subwords of w of length k.
EXAMPLES::
sage: Subwords([1,2,3], 2).cardinality()
3
"""<line_sep><return>arith.binomial(Integer(len(self._w)) self._k)<block_end><def_stmt>first self<block_start>r"""
EXAMPLES::
sage: Subwords([1,2,3],2).first()
[1, 2]
sage: Subwords([1,2,3],0).first()
[]
sage: Subwords((1,2,3),2).first()
(1, 2)
sage: Subwords((1,2,3),0).first()
()
sage: Subwords('123',2).first()
'12'
sage: Subwords('123',0).first()
''
"""<line_sep><return>self._build(self._w[i]<for>i range(self._k))<block_end><def_stmt>last self<block_start>r"""
EXAMPLES::
sage: Subwords([1,2,3],2).last()
[2, 3]
sage: Subwords([1,2,3],0).last()
[]
sage: Subwords((1,2,3),2).last()
(2, 3)
sage: Subwords((1,2,3),0).last()
()
sage: Subwords('123',2).last()
'23'
sage: Subwords('123',0).last()
''
TESTS::
sage: Subwords('123', 0).last() # trac 10534
''
"""<line_sep>n=len(self._w)<line_sep><return>self._build(self._w[i]<for>i range(n-self._k n))<block_end><def_stmt>random_element self<block_start>r"""
Return a random subword of given length with uniform law.
EXAMPLES::
sage: S1 = Subwords([1,2,3,2,1],3)
sage: S2 = Subwords([4,4,5,5,4,5,4,4],3)
sage: for i in range(100):
....: w = S1.random_element()
....: if w in S2:
....: assert(w == [])
sage: for i in range(100):
....: w = S2.random_element()
....: if w in S1:
....: assert(w == [])
"""<line_sep>sample=prandom.sample(self._w self._k)<if_stmt>self._build<is>list<block_start><return>sample<block_end><return>self._build(sample)<block_end><def_stmt>__iter__ self<block_start>"""
EXAMPLES::
sage: Subwords([1,2,3],2).list()
[[1, 2], [1, 3], [2, 3]]
sage: Subwords([1,2,3],0).list()
[[]]
sage: Subwords((1,2,3),2).list()
[(1, 2), (1, 3), (2, 3)]
sage: Subwords((1,2,3),0).list()
[()]
sage: Subwords('abc',2).list()
['ab', 'ac', 'bc']
sage: Subwords('abc',0).list()
['']
"""<if_stmt>self._k<g>len(self._w)<block_start><return>iter([])<block_end>iterator=itertools.combinations(self._w self._k)<if_stmt>self._build<is>tuple<block_start><return>iterator<block_end><else_stmt><block_start><return>(self._build(x)<for>x iterator)<block_end><block_end><block_end><def_stmt>smallest_positions word subword pos=0<block_start>"""
Return the smallest positions for which ``subword`` appears as a
subword of ``word``. If ``pos`` is specified, then it returns the positions
of the first appearance of subword starting at ``pos``.
If ``subword`` is not found in ``word``, then return ``False``.
EXAMPLES::
sage: sage.combinat.subword.smallest_positions([1,2,3,4], [2,4])
[1, 3]
sage: sage.combinat.subword.smallest_positions([1,2,3,4,4], [2,4])
[1, 3]
sage: sage.combinat.subword.smallest_positions([1,2,3,3,4,4], [3,4])
[2, 4]
sage: sage.combinat.subword.smallest_positions([1,2,3,3,4,4], [3,4],2)
[2, 4]
sage: sage.combinat.subword.smallest_positions([1,2,3,3,4,4], [3,4],3)
[3, 4]
sage: sage.combinat.subword.smallest_positions([1,2,3,4], [2,3])
[1, 2]
sage: sage.combinat.subword.smallest_positions([1,2,3,4], [5,5])
False
sage: sage.combinat.subword.smallest_positions([1,3,3,4,5],[3,5])
[1, 4]
sage: sage.combinat.subword.smallest_positions([1,3,3,5,4,5,3,5],[3,5,3])
[1, 3, 6]
sage: sage.combinat.subword.smallest_positions([1,3,3,5,4,5,3,5],[3,5,3],2)
[2, 3, 6]
sage: sage.combinat.subword.smallest_positions([1,2,3,4,3,4,4],[2,3,3,1])
False
sage: sage.combinat.subword.smallest_positions([1,3,3,5,4,5,3,5],[3,5,3],3)
False
TESTS:
We check for :trac:`5534`::
sage: w = ["a", "b", "c", "d"]; ww = ["b", "d"]
sage: x = sage.combinat.subword.smallest_positions(w, ww); ww
['b', 'd']
"""<line_sep>pos<augsub>1<line_sep>res=[<none>]<times>len(subword)<for_stmt>i range(len(subword))<block_start><for_stmt>j range(pos+1 len(word)+1)<block_start><if_stmt>j<eq>len(word)<block_start><return><false><block_end><if_stmt>word[j]<eq>subword[i]<block_start>pos=j<line_sep><break><block_end><block_end><if_stmt>pos<ne>j<block_start><return><false><block_end>res[i]=pos<block_end><return>res<block_end> |
<import_from_stmt>collections.abc AsyncIterator <import_from_stmt>typing Any <import_from_stmt>...database DatabaseClient <import_from_stmt>.operations MockedDatabaseOperation <class_stmt>MockedDatabaseClient(DatabaseClient)<block_start>"""For testing purposes"""<def_stmt>__init__ self *args **kwargs<block_start>super().__init__(*args **kwargs)<line_sep>self.kwargs=kwargs<line_sep>self._response=tuple()<block_end><async_keyword><def_stmt>_reset self **kwargs<arrow><none><block_start>"""For testing purposes"""<line_sep>self._response=tuple()<block_end><async_keyword><def_stmt>_execute self operation:MockedDatabaseOperation<arrow><none><block_start>"""For testing purposes"""<line_sep>self._response=operation.response<block_end><async_keyword><def_stmt>_fetch_all self *args **kwargs<arrow>AsyncIterator[Any]<block_start>"""For testing purposes"""<for_stmt>value self._response<block_start><yield>value<block_end><block_end><block_end> |
<import_stmt>torch<import_stmt>numpy<as>np<import_stmt>shutil<import_stmt>os<import_from_stmt>data ljspeech<import_stmt>hparams<as>hp<def_stmt>preprocess_ljspeech filename<block_start>in_dir=filename<line_sep>out_dir=hp.mel_ground_truth<if_stmt><not>os.path.exists(out_dir)<block_start>os.makedirs(out_dir exist_ok=<true>)<block_end>metadata=ljspeech.build_from_path(in_dir out_dir)<line_sep>write_metadata(metadata out_dir)<line_sep>shutil.move(os.path.join(hp.mel_ground_truth "train.txt") os.path.join("data" "train.txt"))<block_end><def_stmt>write_metadata metadata out_dir<block_start><with_stmt>open(os.path.join(out_dir 'train.txt') 'w' encoding='utf-8')<as>f<block_start><for_stmt>m metadata<block_start>f.write(m+'\n')<block_end><block_end><block_end><def_stmt>main <block_start>path=os.path.join("data" "LJSpeech-1.1")<line_sep>preprocess_ljspeech(path)<block_end><if_stmt>__name__<eq>"__main__"<block_start>main()<block_end> |
uctable=[[48] [49] [50] [51] [52] [53] [54] [55] [56] [57] [194 178] [194 179] [194 185] [194 188] [194 189] [194 190] [217 160] [217 161] [217 162] [217 163] [217 164] [217 165] [217 166] [217 167] [217 168] [217 169] [219 176] [219 177] [219 178] [219 179] [219 180] [219 181] [219 182] [219 183] [219 184] [219 185] [223 128] [223 129] [223 130] [223 131] [223 132] [223 133] [223 134] [223 135] [223 136] [223 137] [224 165 166] [224 165 167] [224 165 168] [224 165 169] [224 165 170] [224 165 171] [224 165 172] [224 165 173] [224 165 174] [224 165 175] [224 167 166] [224 167 167] [224 167 168] [224 167 169] [224 167 170] [224 167 171] [224 167 172] [224 167 173] [224 167 174] [224 167 175] [224 167 180] [224 167 181] [224 167 182] [224 167 183] [224 167 184] [224 167 185] [224 169 166] [224 169 167] [224 169 168] [224 169 169] [224 169 170] [224 169 171] [224 169 172] [224 169 173] [224 169 174] [224 169 175] [224 171 166] [224 171 167] [224 171 168] [224 171 169] [224 171 170] [224 171 171] [224 171 172] [224 171 173] [224 171 174] [224 171 175] [224 173 166] [224 173 167] [224 173 168] [224 173 169] [224 173 170] [224 173 171] [224 173 172] [224 173 173] [224 173 174] [224 173 175] [224 173 178] [224 173 179] [224 173 180] [224 173 181] [224 173 182] [224 173 183] [224 175 166] [224 175 167] [224 175 168] [224 175 169] [224 175 170] [224 175 171] [224 175 172] [224 175 173] [224 175 174] [224 175 175] [224 175 176] [224 175 177] [224 175 178] [224 177 166] [224 177 167] [224 177 168] [224 177 169] [224 177 170] [224 177 171] [224 177 172] [224 177 173] [224 177 174] [224 177 175] [224 177 184] [224 177 185] [224 177 186] [224 177 187] [224 177 188] [224 177 189] [224 177 190] [224 179 166] [224 179 167] [224 179 168] [224 179 169] [224 179 170] [224 179 171] [224 179 172] [224 179 173] [224 179 174] [224 179 175] [224 181 166] [224 181 167] [224 181 168] [224 181 169] [224 181 170] [224 181 171] [224 181 172] [224 181 173] [224 181 174] [224 181 175] [224 181 176] [224 181 177] [224 181 178] [224 181 179] [224 181 180] [224 181 181] [224 183 166] [224 183 167] [224 183 168] [224 183 169] [224 183 170] [224 183 171] [224 183 172] [224 183 173] [224 183 174] [224 183 175] [224 185 144] [224 185 145] [224 185 146] [224 185 147] [224 185 148] [224 185 149] [224 185 150] [224 185 151] [224 185 152] [224 185 153] [224 187 144] [224 187 145] [224 187 146] [224 187 147] [224 187 148] [224 187 149] [224 187 150] [224 187 151] [224 187 152] [224 187 153] [224 188 160] [224 188 161] [224 188 162] [224 188 163] [224 188 164] [224 188 165] [224 188 166] [224 188 167] [224 188 168] [224 188 169] [224 188 170] [224 188 171] [224 188 172] [224 188 173] [224 188 174] [224 188 175] [224 188 176] [224 188 177] [224 188 178] [224 188 179] [225 129 128] [225 129 129] [225 129 130] [225 129 131] [225 129 132] [225 129 133] [225 129 134] [225 129 135] [225 129 136] [225 129 137] [225 130 144] [225 130 145] [225 130 146] [225 130 147] [225 130 148] [225 130 149] [225 130 150] [225 130 151] [225 130 152] [225 130 153] [225 141 169] [225 141 170] [225 141 171] [225 141 172] [225 141 173] [225 141 174] [225 141 175] [225 141 176] [225 141 177] [225 141 178] [225 141 179] [225 141 180] [225 141 181] [225 141 182] [225 141 183] [225 141 184] [225 141 185] [225 141 186] [225 141 187] [225 141 188] [225 155 174] [225 155 175] [225 155 176] [225 159 160] [225 159 161] [225 159 162] [225 159 163] [225 159 164] [225 159 165] [225 159 166] [225 159 167] [225 159 168] [225 159 169] [225 159 176] [225 159 177] [225 159 178] [225 159 179] [225 159 180] [225 159 181] [225 159 182] [225 159 183] [225 159 184] [225 159 185] [225 160 144] [225 160 145] [225 160 146] [225 160 147] [225 160 148] [225 160 149] [225 160 150] [225 160 151] [225 160 152] [225 160 153] [225 165 134] [225 165 135] [225 165 136] [225 165 137] [225 165 138] [225 165 139] [225 165 140] [225 165 141] [225 165 142] [225 165 143] [225 167 144] [225 167 145] [225 167 146] [225 167 147] [225 167 148] [225 167 149] [225 167 150] [225 167 151] [225 167 152] [225 167 153] [225 167 154] [225 170 128] [225 170 129] [225 170 130] [225 170 131] [225 170 132] [225 170 133] [225 170 134] [225 170 135] [225 170 136] [225 170 137] [225 170 144] [225 170 145] [225 170 146] [225 170 147] [225 170 148] [225 170 149] [225 170 150] [225 170 151] [225 170 152] [225 170 153] [225 173 144] [225 173 145] [225 173 146] [225 173 147] [225 173 148] [225 173 149] [225 173 150] [225 173 151] [225 173 152] [225 173 153] [225 174 176] [225 174 177] [225 174 178] [225 174 179] [225 174 180] [225 174 181] [225 174 182] [225 174 183] [225 174 184] [225 174 185] [225 177 128] [225 177 129] [225 177 130] [225 177 131] [225 177 132] [225 177 133] [225 177 134] [225 177 135] [225 177 136] [225 177 137] [225 177 144] [225 177 145] [225 177 146] [225 177 147] [225 177 148] [225 177 149] [225 177 150] [225 177 151] [225 177 152] [225 177 153] [226 129 176] [226 129 180] [226 129 181] [226 129 182] [226 129 183] [226 129 184] [226 129 185] [226 130 128] [226 130 129] [226 130 130] [226 130 131] [226 130 132] [226 130 133] [226 130 134] [226 130 135] [226 130 136] [226 130 137] [226 133 144] [226 133 145] [226 133 146] [226 133 147] [226 133 148] [226 133 149] [226 133 150] [226 133 151] [226 133 152] [226 133 153] [226 133 154] [226 133 155] [226 133 156] [226 133 157] [226 133 158] [226 133 159] [226 133 160] [226 133 161] [226 133 162] [226 133 163] [226 133 164] [226 133 165] [226 133 166] [226 133 167] [226 133 168] [226 133 169] [226 133 170] [226 133 171] [226 133 172] [226 133 173] [226 133 174] [226 133 175] [226 133 176] [226 133 177] [226 133 178] [226 133 179] [226 133 180] [226 133 181] [226 133 182] [226 133 183] [226 133 184] [226 133 185] [226 133 186] [226 133 187] [226 133 188] [226 133 189] [226 133 190] [226 133 191] [226 134 128] [226 134 129] [226 134 130] [226 134 133] [226 134 134] [226 134 135] [226 134 136] [226 134 137] [226 145 160] [226 145 161] [226 145 162] [226 145 163] [226 145 164] [226 145 165] [226 145 166] [226 145 167] [226 145 168] [226 145 169] [226 145 170] [226 145 171] [226 145 172] [226 145 173] [226 145 174] [226 145 175] [226 145 176] [226 145 177] [226 145 178] [226 145 179] [226 145 180] [226 145 181] [226 145 182] [226 145 183] [226 145 184] [226 145 185] [226 145 186] [226 145 187] [226 145 188] [226 145 189] [226 145 190] [226 145 191] [226 146 128] [226 146 129] [226 146 130] [226 146 131] [226 146 132] [226 146 133] [226 146 134] [226 146 135] [226 146 136] [226 146 137] [226 146 138] [226 146 139] [226 146 140] [226 146 141] [226 146 142] [226 146 143] [226 146 144] [226 146 145] [226 146 146] [226 146 147] [226 146 148] [226 146 149] [226 146 150] [226 146 151] [226 146 152] [226 146 153] [226 146 154] [226 146 155] [226 147 170] [226 147 171] [226 147 172] [226 147 173] [226 147 174] [226 147 175] [226 147 176] [226 147 177] [226 147 178] [226 147 179] [226 147 180] [226 147 181] [226 147 182] [226 147 183] [226 147 184] [226 147 185] [226 147 186] [226 147 187] [226 147 188] [226 147 189] [226 147 190] [226 147 191] [226 157 182] [226 157 183] [226 157 184] [226 157 185] [226 157 186] [226 157 187] [226 157 188] [226 157 189] [226 157 190] [226 157 191] [226 158 128] [226 158 129] [226 158 130] [226 158 131] [226 158 132] [226 158 133] [226 158 134] [226 158 135] [226 158 136] [226 158 137] [226 158 138] [226 158 139] [226 158 140] [226 158 141] [226 158 142] [226 158 143] [226 158 144] [226 158 145] [226 158 146] [226 158 147] [226 179 189] [227 128 135] [227 128 161] [227 128 162] [227 128 163] [227 128 164] [227 128 165] [227 128 166] [227 128 167] [227 128 168] [227 128 169] [227 128 184] [227 128 185] [227 128 186] [227 134 146] [227 134 147] [227 134 148] [227 134 149] [227 136 160] [227 136 161] [227 136 162] [227 136 163] [227 136 164] [227 136 165] [227 136 166] [227 136 167] [227 136 168] [227 136 169] [227 137 136] [227 137 137] [227 137 138] [227 137 139] [227 137 140] [227 137 141] [227 137 142] [227 137 143] [227 137 145] [227 137 146] [227 137 147] [227 137 148] [227 137 149] [227 137 150] [227 137 151] [227 137 152] [227 137 153] [227 137 154] [227 137 155] [227 137 156] [227 137 157] [227 137 158] [227 137 159] [227 138 128] [227 138 129] [227 138 130] [227 138 131] [227 138 132] [227 138 133] [227 138 134] [227 138 135] [227 138 136] [227 138 137] [227 138 177] [227 138 178] [227 138 179] [227 138 180] [227 138 181] [227 138 182] [227 138 183] [227 138 184] [227 138 185] [227 138 186] [227 138 187] [227 138 188] [227 138 189] [227 138 190] [227 138 191] [234 152 160] [234 152 161] [234 152 162] [234 152 163] [234 152 164] [234 152 165] [234 152 166] [234 152 167] [234 152 168] [234 152 169] [234 155 166] [234 155 167] [234 155 168] [234 155 169] [234 155 170] [234 155 171] [234 155 172] [234 155 173] [234 155 174] [234 155 175] [234 160 176] [234 160 177] [234 160 178] [234 160 179] [234 160 180] [234 160 181] [234 163 144] [234 163 145] [234 163 146] [234 163 147] [234 163 148] [234 163 149] [234 163 150] [234 163 151] [234 163 152] [234 163 153] [234 164 128] [234 164 129] [234 164 130] [234 164 131] [234 164 132] [234 164 133] [234 164 134] [234 164 135] [234 164 136] [234 164 137] [234 167 144] [234 167 145] [234 167 146] [234 167 147] [234 167 148] [234 167 149] [234 167 150] [234 167 151] [234 167 152] [234 167 153] [234 167 176] [234 167 177] [234 167 178] [234 167 179] [234 167 180] [234 167 181] [234 167 182] [234 167 183] [234 167 184] [234 167 185] [234 169 144] [234 169 145] [234 169 146] [234 169 147] [234 169 148] [234 169 149] [234 169 150] [234 169 151] [234 169 152] [234 169 153] [234 175 176] [234 175 177] [234 175 178] [234 175 179] [234 175 180] [234 175 181] [234 175 182] [234 175 183] [234 175 184] [234 175 185] [239 188 144] [239 188 145] [239 188 146] [239 188 147] [239 188 148] [239 188 149] [239 188 150] [239 188 151] [239 188 152] [239 188 153] [240 144 132 135] [240 144 132 136] [240 144 132 137] [240 144 132 138] [240 144 132 139] [240 144 132 140] [240 144 132 141] [240 144 132 142] [240 144 132 143] [240 144 132 144] [240 144 132 145] [240 144 132 146] [240 144 132 147] [240 144 132 148] [240 144 132 149] [240 144 132 150] [240 144 132 151] [240 144 132 152] [240 144 132 153] [240 144 132 154] [240 144 132 155] [240 144 132 156] [240 144 132 157] [240 144 132 158] [240 144 132 159] [240 144 132 160] [240 144 132 161] [240 144 132 162] [240 144 132 163] [240 144 132 164] [240 144 132 165] [240 144 132 166] [240 144 132 167] [240 144 132 168] [240 144 132 169] [240 144 132 170] [240 144 132 171] [240 144 132 172] [240 144 132 173] [240 144 132 174] [240 144 132 175] [240 144 132 176] [240 144 132 177] [240 144 132 178] [240 144 132 179] [240 144 133 128] [240 144 133 129] [240 144 133 130] [240 144 133 131] [240 144 133 132] [240 144 133 133] [240 144 133 134] [240 144 133 135] [240 144 133 136] [240 144 133 137] [240 144 133 138] [240 144 133 139] [240 144 133 140] [240 144 133 141] [240 144 133 142] [240 144 133 143] [240 144 133 144] [240 144 133 145] [240 144 133 146] [240 144 133 147] [240 144 133 148] [240 144 133 149] [240 144 133 150] [240 144 133 151] [240 144 133 152] [240 144 133 153] [240 144 133 154] [240 144 133 155] [240 144 133 156] [240 144 133 157] [240 144 133 158] [240 144 133 159] [240 144 133 160] [240 144 133 161] [240 144 133 162] [240 144 133 163] [240 144 133 164] [240 144 133 165] [240 144 133 166] [240 144 133 167] [240 144 133 168] [240 144 133 169] [240 144 133 170] [240 144 133 171] [240 144 133 172] [240 144 133 173] [240 144 133 174] [240 144 133 175] [240 144 133 176] [240 144 133 177] [240 144 133 178] [240 144 133 179] [240 144 133 180] [240 144 133 181] [240 144 133 182] [240 144 133 183] [240 144 133 184] [240 144 134 138] [240 144 134 139] [240 144 139 161] [240 144 139 162] [240 144 139 163] [240 144 139 164] [240 144 139 165] [240 144 139 166] [240 144 139 167] [240 144 139 168] [240 144 139 169] [240 144 139 170] [240 144 139 171] [240 144 139 172] [240 144 139 173] [240 144 139 174] [240 144 139 175] [240 144 139 176] [240 144 139 177] [240 144 139 178] [240 144 139 179] [240 144 139 180] [240 144 139 181] [240 144 139 182] [240 144 139 183] [240 144 139 184] [240 144 139 185] [240 144 139 186] [240 144 139 187] [240 144 140 160] [240 144 140 161] [240 144 140 162] [240 144 140 163] [240 144 141 129] [240 144 141 138] [240 144 143 145] [240 144 143 146] [240 144 143 147] [240 144 143 148] [240 144 143 149] [240 144 146 160] [240 144 146 161] [240 144 146 162] [240 144 146 163] [240 144 146 164] [240 144 146 165] [240 144 146 166] [240 144 146 167] [240 144 146 168] [240 144 146 169] [240 144 161 152] [240 144 161 153] [240 144 161 154] [240 144 161 155] [240 144 161 156] [240 144 161 157] [240 144 161 158] [240 144 161 159] [240 144 161 185] [240 144 161 186] [240 144 161 187] [240 144 161 188] [240 144 161 189] [240 144 161 190] [240 144 161 191] [240 144 162 167] [240 144 162 168] [240 144 162 169] [240 144 162 170] [240 144 162 171] [240 144 162 172] [240 144 162 173] [240 144 162 174] [240 144 162 175] [240 144 163 187] [240 144 163 188] [240 144 163 189] [240 144 163 190] [240 144 163 191] [240 144 164 150] [240 144 164 151] [240 144 164 152] [240 144 164 153] [240 144 164 154] [240 144 164 155] [240 144 166 188] [240 144 166 189] [240 144 167 128] [240 144 167 129] [240 144 167 130] [240 144 167 131] [240 144 167 132] [240 144 167 133] [240 144 167 134] [240 144 167 135] [240 144 167 136] [240 144 167 137] [240 144 167 138] [240 144 167 139] [240 144 167 140] [240 144 167 141] [240 144 167 142] [240 144 167 143] [240 144 167 146] [240 144 167 147] [240 144 167 148] [240 144 167 149] [240 144 167 150] [240 144 167 151] [240 144 167 152] [240 144 167 153] [240 144 167 154] [240 144 167 155] [240 144 167 156] [240 144 167 157] [240 144 167 158] [240 144 167 159] [240 144 167 160] [240 144 167 161] [240 144 167 162] [240 144 167 163] [240 144 167 164] [240 144 167 165] [240 144 167 166] [240 144 167 167] [240 144 167 168] [240 144 167 169] [240 144 167 170] [240 144 167 171] [240 144 167 172] [240 144 167 173] [240 144 167 174] [240 144 167 175] [240 144 167 176] [240 144 167 177] [240 144 167 178] [240 144 167 179] [240 144 167 180] [240 144 167 181] [240 144 167 182] [240 144 167 183] [240 144 167 184] [240 144 167 185] [240 144 167 186] [240 144 167 187] [240 144 167 188] [240 144 167 189] [240 144 167 190] [240 144 167 191] [240 144 169 128] [240 144 169 129] [240 144 169 130] [240 144 169 131] [240 144 169 132] [240 144 169 133] [240 144 169 134] [240 144 169 135] [240 144 169 189] [240 144 169 190] [240 144 170 157] [240 144 170 158] [240 144 170 159] [240 144 171 171] [240 144 171 172] [240 144 171 173] [240 144 171 174] [240 144 171 175] [240 144 173 152] [240 144 173 153] [240 144 173 154] [240 144 173 155] [240 144 173 156] [240 144 173 157] [240 144 173 158] [240 144 173 159] [240 144 173 184] [240 144 173 185] [240 144 173 186] [240 144 173 187] [240 144 173 188] [240 144 173 189] [240 144 173 190] [240 144 173 191] [240 144 174 169] [240 144 174 170] [240 144 174 171] [240 144 174 172] [240 144 174 173] [240 144 174 174] [240 144 174 175] [240 144 179 186] [240 144 179 187] [240 144 179 188] [240 144 179 189] [240 144 179 190] [240 144 179 191] [240 144 185 160] [240 144 185 161] [240 144 185 162] [240 144 185 163] [240 144 185 164] [240 144 185 165] [240 144 185 166] [240 144 185 167] [240 144 185 168] [240 144 185 169] [240 144 185 170] [240 144 185 171] [240 144 185 172] [240 144 185 173] [240 144 185 174] [240 144 185 175] [240 144 185 176] [240 144 185 177] [240 144 185 178] [240 144 185 179] [240 144 185 180] [240 144 185 181] [240 144 185 182] [240 144 185 183] [240 144 185 184] [240 144 185 185] [240 144 185 186] [240 144 185 187] [240 144 185 188] [240 144 185 189] [240 144 185 190] [240 145 129 146] [240 145 129 147] [240 145 129 148] [240 145 129 149] [240 145 129 150] [240 145 129 151] [240 145 129 152] [240 145 129 153] [240 145 129 154] [240 145 129 155] [240 145 129 156] [240 145 129 157] [240 145 129 158] [240 145 129 159] [240 145 129 160] [240 145 129 161] [240 145 129 162] [240 145 129 163] [240 145 129 164] [240 145 129 165] [240 145 129 166] [240 145 129 167] [240 145 129 168] [240 145 129 169] [240 145 129 170] [240 145 129 171] [240 145 129 172] [240 145 129 173] [240 145 129 174] [240 145 129 175] [240 145 131 176] [240 145 131 177] [240 145 131 178] [240 145 131 179] [240 145 131 180] [240 145 131 181] [240 145 131 182] [240 145 131 183] [240 145 131 184] [240 145 131 185] [240 145 132 182] [240 145 132 183] [240 145 132 184] [240 145 132 185] [240 145 132 186] [240 145 132 187] [240 145 132 188] [240 145 132 189] [240 145 132 190] [240 145 132 191] [240 145 135 144] [240 145 135 145] [240 145 135 146] [240 145 135 147] [240 145 135 148] [240 145 135 149] [240 145 135 150] [240 145 135 151] [240 145 135 152] [240 145 135 153] [240 145 135 161] [240 145 135 162] [240 145 135 163] [240 145 135 164] [240 145 135 165] [240 145 135 166] [240 145 135 167] [240 145 135 168] [240 145 135 169] [240 145 135 170] [240 145 135 171] [240 145 135 172] [240 145 135 173] [240 145 135 174] [240 145 135 175] [240 145 135 176] [240 145 135 177] [240 145 135 178] [240 145 135 179] [240 145 135 180] [240 145 139 176] [240 145 139 177] [240 145 139 178] [240 145 139 179] [240 145 139 180] [240 145 139 181] [240 145 139 182] [240 145 139 183] [240 145 139 184] [240 145 139 185] [240 145 147 144] [240 145 147 145] [240 145 147 146] [240 145 147 147] [240 145 147 148] [240 145 147 149] [240 145 147 150] [240 145 147 151] [240 145 147 152] [240 145 147 153] [240 145 153 144] [240 145 153 145] [240 145 153 146] [240 145 153 147] [240 145 153 148] [240 145 153 149] [240 145 153 150] [240 145 153 151] [240 145 153 152] [240 145 153 153] [240 145 155 128] [240 145 155 129] [240 145 155 130] [240 145 155 131] [240 145 155 132] [240 145 155 133] [240 145 155 134] [240 145 155 135] [240 145 155 136] [240 145 155 137] [240 145 156 176] [240 145 156 177] [240 145 156 178] [240 145 156 179] [240 145 156 180] [240 145 156 181] [240 145 156 182] [240 145 156 183] [240 145 156 184] [240 145 156 185] [240 145 156 186] [240 145 156 187] [240 145 163 160] [240 145 163 161] [240 145 163 162] [240 145 163 163] [240 145 163 164] [240 145 163 165] [240 145 163 166] [240 145 163 167] [240 145 163 168] [240 145 163 169] [240 145 163 170] [240 145 163 171] [240 145 163 172] [240 145 163 173] [240 145 163 174] [240 145 163 175] [240 145 163 176] [240 145 163 177] [240 145 163 178] [240 146 144 128] [240 146 144 129] [240 146 144 130] [240 146 144 131] [240 146 144 132] [240 146 144 133] [240 146 144 134] [240 146 144 135] [240 146 144 136] [240 146 144 137] [240 146 144 138] [240 146 144 139] [240 146 144 140] [240 146 144 141] [240 146 144 142] [240 146 144 143] [240 146 144 144] [240 146 144 145] [240 146 144 146] [240 146 144 147] [240 146 144 148] [240 146 144 149] [240 146 144 150] [240 146 144 151] [240 146 144 152] [240 146 144 153] [240 146 144 154] [240 146 144 155] [240 146 144 156] [240 146 144 157] [240 146 144 158] [240 146 144 159] [240 146 144 160] [240 146 144 161] [240 146 144 162] [240 146 144 163] [240 146 144 164] [240 146 144 165] [240 146 144 166] [240 146 144 167] [240 146 144 168] [240 146 144 169] [240 146 144 170] [240 146 144 171] [240 146 144 172] [240 146 144 173] [240 146 144 174] [240 146 144 175] [240 146 144 176] [240 146 144 177] [240 146 144 178] [240 146 144 179] [240 146 144 180] [240 146 144 181] [240 146 144 182] [240 146 144 183] [240 146 144 184] [240 146 144 185] [240 146 144 186] [240 146 144 187] [240 146 144 188] [240 146 144 189] [240 146 144 190] [240 146 144 191] [240 146 145 128] [240 146 145 129] [240 146 145 130] [240 146 145 131] [240 146 145 132] [240 146 145 133] [240 146 145 134] [240 146 145 135] [240 146 145 136] [240 146 145 137] [240 146 145 138] [240 146 145 139] [240 146 145 140] [240 146 145 141] [240 146 145 142] [240 146 145 143] [240 146 145 144] [240 146 145 145] [240 146 145 146] [240 146 145 147] [240 146 145 148] [240 146 145 149] [240 146 145 150] [240 146 145 151] [240 146 145 152] [240 146 145 153] [240 146 145 154] [240 146 145 155] [240 146 145 156] [240 146 145 157] [240 146 145 158] [240 146 145 159] [240 146 145 160] [240 146 145 161] [240 146 145 162] [240 146 145 163] [240 146 145 164] [240 146 145 165] [240 146 145 166] [240 146 145 167] [240 146 145 168] [240 146 145 169] [240 146 145 170] [240 146 145 171] [240 146 145 172] [240 146 145 173] [240 146 145 174] [240 150 169 160] [240 150 169 161] [240 150 169 162] [240 150 169 163] [240 150 169 164] [240 150 169 165] [240 150 169 166] [240 150 169 167] [240 150 169 168] [240 150 169 169] [240 150 173 144] [240 150 173 145] [240 150 173 146] [240 150 173 147] [240 150 173 148] [240 150 173 149] [240 150 173 150] [240 150 173 151] [240 150 173 152] [240 150 173 153] [240 150 173 155] [240 150 173 156] [240 150 173 157] [240 150 173 158] [240 150 173 159] [240 150 173 160] [240 150 173 161] [240 157 141 160] [240 157 141 161] [240 157 141 162] [240 157 141 163] [240 157 141 164] [240 157 141 165] [240 157 141 166] [240 157 141 167] [240 157 141 168] [240 157 141 169] [240 157 141 170] [240 157 141 171] [240 157 141 172] [240 157 141 173] [240 157 141 174] [240 157 141 175] [240 157 141 176] [240 157 141 177] [240 157 159 142] [240 157 159 143] [240 157 159 144] [240 157 159 145] [240 157 159 146] [240 157 159 147] [240 157 159 148] [240 157 159 149] [240 157 159 150] [240 157 159 151] [240 157 159 152] [240 157 159 153] [240 157 159 154] [240 157 159 155] [240 157 159 156] [240 157 159 157] [240 157 159 158] [240 157 159 159] [240 157 159 160] [240 157 159 161] [240 157 159 162] [240 157 159 163] [240 157 159 164] [240 157 159 165] [240 157 159 166] [240 157 159 167] [240 157 159 168] [240 157 159 169] [240 157 159 170] [240 157 159 171] [240 157 159 172] [240 157 159 173] [240 157 159 174] [240 157 159 175] [240 157 159 176] [240 157 159 177] [240 157 159 178] [240 157 159 179] [240 157 159 180] [240 157 159 181] [240 157 159 182] [240 157 159 183] [240 157 159 184] [240 157 159 185] [240 157 159 186] [240 157 159 187] [240 157 159 188] [240 157 159 189] [240 157 159 190] [240 157 159 191] [240 158 163 135] [240 158 163 136] [240 158 163 137] [240 158 163 138] [240 158 163 139] [240 158 163 140] [240 158 163 141] [240 158 163 142] [240 158 163 143] [240 159 132 128] [240 159 132 129] [240 159 132 130] [240 159 132 131] [240 159 132 132] [240 159 132 133] [240 159 132 134] [240 159 132 135] [240 159 132 136] [240 159 132 137] [240 159 132 138] [240 159 132 139] [240 159 132 140]]<line_sep> |
# Original script: https://github.com/bulletphysics/bullet3/blob/master/examples/pybullet/examples/heightfield.py
<import_stmt>pybullet_data<as>pd<import_stmt>rex_gym.util.pybullet_data<as>rpd<import_stmt>pybullet<as>p<import_stmt>random<import_from_stmt>rex_gym.util flag_mapper<line_sep>FLAG_TO_FILENAME={'mounts':"heightmaps/wm_height_out.png" 'maze':"heightmaps/Maze.png"}<line_sep>ROBOT_INIT_POSITION={'mounts':[0 0 .85] 'plane':[0 0 0.21] 'hills':[0 0 1.98] 'maze':[0 0 0.21] 'random':[0 0 0.21]}<class_stmt>Terrain<block_start><def_stmt>__init__ self terrain_source terrain_id columns=256 rows=256<block_start>random.seed(10)<line_sep>self.terrain_source=terrain_source<line_sep>self.terrain_id=terrain_id<line_sep>self.columns=columns<line_sep>self.rows=rows<block_end><def_stmt>generate_terrain self env height_perturbation_range=0.05<block_start>env.pybullet_client.setAdditionalSearchPath(pd.getDataPath())<line_sep>env.pybullet_client.configureDebugVisualizer(env.pybullet_client.COV_ENABLE_RENDERING 0)<line_sep>height_perturbation_range=height_perturbation_range<line_sep>terrain_data=[0]<times>self.columns<times>self.rows<if_stmt>self.terrain_source<eq>'random'<block_start><for_stmt>j range(int(self.columns/2))<block_start><for_stmt>i range(int(self.rows/2))<block_start>height=random.uniform(0 height_perturbation_range)<line_sep>terrain_data[2<times>i+2<times>j<times>self.rows]=height<line_sep>terrain_data[2<times>i+1+2<times>j<times>self.rows]=height<line_sep>terrain_data[2<times>i+(2<times>j+1)<times>self.rows]=height<line_sep>terrain_data[2<times>i+1+(2<times>j+1)<times>self.rows]=height<block_end><block_end>terrain_shape=env.pybullet_client.createCollisionShape(shapeType=env.pybullet_client.GEOM_HEIGHTFIELD meshScale=[.05 .05 1] heightfieldTextureScaling=(self.rows-1)/2 heightfieldData=terrain_data numHeightfieldRows=self.rows numHeightfieldColumns=self.columns)<line_sep>terrain=env.pybullet_client.createMultiBody(0 terrain_shape)<line_sep>env.pybullet_client.resetBasePositionAndOrientation(terrain [0 0 0] [0 0 0 1])<block_end><if_stmt>self.terrain_source<eq>'csv'<block_start>terrain_shape=env.pybullet_client.createCollisionShape(shapeType=env.pybullet_client.GEOM_HEIGHTFIELD meshScale=[.5 .5 .5] fileName="heightmaps/ground0.txt" heightfieldTextureScaling=128)<line_sep>terrain=env.pybullet_client.createMultiBody(0 terrain_shape)<line_sep>textureId=env.pybullet_client.loadTexture(f"{rpd.getDataPath()}/grass.png")<line_sep>env.pybullet_client.changeVisualShape(terrain -1 textureUniqueId=textureId)<line_sep>env.pybullet_client.resetBasePositionAndOrientation(terrain [1 0 2] [0 0 0 1])<block_end># TODO do this better..
<if_stmt>self.terrain_source<eq>'png'<block_start>terrain_shape=env.pybullet_client.createCollisionShape(shapeType=env.pybullet_client.GEOM_HEIGHTFIELD meshScale=[.1 .1 24<if>self.terrain_id<eq>"mounts"<else>1] fileName=FLAG_TO_FILENAME[self.terrain_id])<line_sep>terrain=env.pybullet_client.createMultiBody(0 terrain_shape)<if_stmt>self.terrain_id<eq>"mounts"<block_start>textureId=env.pybullet_client.loadTexture("heightmaps/gimp_overlay_out.png")<line_sep>env.pybullet_client.changeVisualShape(terrain -1 textureUniqueId=textureId)<line_sep>env.pybullet_client.resetBasePositionAndOrientation(terrain [0 0 2] [0 0 0 1])<block_end><else_stmt><block_start>env.pybullet_client.resetBasePositionAndOrientation(terrain [0 0 0] [0 0 0 1])<block_end><block_end>self.terrain_shape=terrain_shape<line_sep>env.pybullet_client.changeVisualShape(terrain -1 rgbaColor=[1 1 1 1])<line_sep># env.pybullet_client.configureDebugVisualizer(env.pybullet_client.COV_ENABLE_RENDERING, 1)
<block_end><def_stmt>update_terrain self height_perturbation_range=0.05<block_start><if_stmt>self.terrain_source<eq>flag_mapper.TERRAIN_TYPE['random']<block_start>terrain_data=[0]<times>self.columns<times>self.rows<for_stmt>j range(int(self.columns/2))<block_start><for_stmt>i range(int(self.rows/2))<block_start>height=random.uniform(0 height_perturbation_range)<line_sep>terrain_data[2<times>i+2<times>j<times>self.rows]=height<line_sep>terrain_data[2<times>i+1+2<times>j<times>self.rows]=height<line_sep>terrain_data[2<times>i+(2<times>j+1)<times>self.rows]=height<line_sep>terrain_data[2<times>i+1+(2<times>j+1)<times>self.rows]=height<block_end><block_end># GEOM_CONCAVE_INTERNAL_EDGE may help avoid getting stuck at an internal (shared) edge of
# the triangle/heightfield. GEOM_CONCAVE_INTERNAL_EDGE is a bit slower to build though.
flags=p.GEOM_CONCAVE_INTERNAL_EDGE<line_sep># flags = 0
self.terrain_shape=p.createCollisionShape(shapeType=p.GEOM_HEIGHTFIELD flags=flags meshScale=[.05 .05 1] heightfieldTextureScaling=(self.rows-1)/2 heightfieldData=terrain_data numHeightfieldRows=self.rows numHeightfieldColumns=self.columns replaceHeightfieldIndex=self.terrain_shape)<block_end><block_end><block_end> |
"""
Dictionary with auto-expiring values for caching purposes.
Expiration happens on any access, object is locked during cleanup from expired
values. Can not store more than max_len elements - the oldest will be deleted.
>>> ExpiringDict(max_len=100, max_age_seconds=10)
The values stored in the following way:
{
key1: (value1, created_time1),
key2: (value2, created_time2)
}
NOTE: iteration over dict and also keys() do not remove expired values!
"""<import_stmt>time<import_from_stmt>threading RLock<import_stmt>sys<import_from_stmt>typing Any Union<try_stmt><block_start><import_from_stmt>collections OrderedDict<block_end><except_stmt>ImportError# Python < 2.7
<block_start><import_from_stmt>ordereddict OrderedDict<block_end><class_stmt>ExpiringDict(OrderedDict)<block_start><def_stmt>__init__ self max_len max_age_seconds items=<none># type: (Union[int, None], Union[float, None], Union[None,dict,OrderedDict,ExpiringDict]) -> None
<block_start><if_stmt><not>self.__is_instance_of_expiring_dict(items)<block_start>self.__assertions(max_len max_age_seconds)<block_end>OrderedDict.__init__(self)<line_sep>self.max_len=max_len<line_sep>self.max_age=max_age_seconds<line_sep>self.lock=RLock()<if_stmt>sys.version_info<ge>(3 5)<block_start>self._safe_keys=<lambda>:list(self.keys())<block_end><else_stmt><block_start>self._safe_keys=self.keys<block_end><if_stmt>items<is><not><none><block_start><if_stmt>self.__is_instance_of_expiring_dict(items)<block_start>self.__copy_expiring_dict(max_len max_age_seconds items)<block_end><elif_stmt>self.__is_instance_of_dict(items)<block_start>self.__copy_dict(items)<block_end><elif_stmt>self.__is_reduced_result(items)<block_start>self.__copy_reduced_result(items)<block_end><else_stmt><block_start><raise>ValueError('can not unpack items')<block_end><block_end><block_end><def_stmt>__contains__ self key<block_start>""" Return True if the dict has a key, else return False. """<try_stmt><block_start><with_stmt>self.lock<block_start>item=OrderedDict.__getitem__(self key)<if_stmt>time.time()-item[1]<l>self.max_age<block_start><return><true><block_end><else_stmt><block_start><del_stmt>self[key]<block_end><block_end><block_end><except_stmt>KeyError<block_start><pass><block_end><return><false><block_end><def_stmt>__getitem__ self key with_age=<false><block_start>""" Return the item of the dict.
Raises a KeyError if key is not in the map.
"""<with_stmt>self.lock<block_start>item=OrderedDict.__getitem__(self key)<line_sep>item_age=time.time()-item[1]<if_stmt>item_age<l>self.max_age<block_start><if_stmt>with_age<block_start><return>item[0] item_age<block_end><else_stmt><block_start><return>item[0]<block_end><block_end><else_stmt><block_start><del_stmt>self[key]<line_sep><raise>KeyError(key)<block_end><block_end><block_end><def_stmt>__setitem__ self key value set_time=<none><block_start>""" Set d[key] to value. """<with_stmt>self.lock<block_start><if_stmt>len(self)<eq>self.max_len<block_start><if_stmt>key<in>self<block_start><del_stmt>self[key]<block_end><else_stmt><block_start><try_stmt><block_start>self.popitem(last=<false>)<block_end><except_stmt>KeyError<block_start><pass><block_end><block_end><block_end><if_stmt>set_time<is><none><block_start>set_time=time.time()<block_end>OrderedDict.__setitem__(self key (value set_time))<block_end><block_end><def_stmt>pop self key default=<none><block_start>""" Get item from the dict and remove it.
Return default if expired or does not exist. Never raise KeyError.
"""<with_stmt>self.lock<block_start><try_stmt><block_start>item=OrderedDict.__getitem__(self key)<del_stmt>self[key]<line_sep><return>item[0]<block_end><except_stmt>KeyError<block_start><return>default<block_end><block_end><block_end><def_stmt>ttl self key<block_start>""" Return TTL of the `key` (in seconds).
Returns None for non-existent or expired keys.
"""<line_sep>key_value,key_age=self.get(key with_age=<true>)# type: Any, Union[None, float]
<if_stmt>key_age<block_start>key_ttl=self.max_age-key_age<if_stmt>key_ttl<g>0<block_start><return>key_ttl<block_end><block_end><return><none><block_end><def_stmt>get self key default=<none> with_age=<false><block_start>""" Return the value for key if key is in the dictionary, else default. """<try_stmt><block_start><return>self.__getitem__(key with_age)<block_end><except_stmt>KeyError<block_start><if_stmt>with_age<block_start><return>default <none><block_end><else_stmt><block_start><return>default<block_end><block_end><block_end><def_stmt>items self<block_start>""" Return a copy of the dictionary's list of (key, value) pairs. """<line_sep>r=[]<for_stmt>key self._safe_keys()<block_start><try_stmt><block_start>r.append((key self[key]))<block_end><except_stmt>KeyError<block_start><pass><block_end><block_end><return>r<block_end><def_stmt>items_with_timestamp self<block_start>""" Return a copy of the dictionary's list of (key, value, timestamp) triples. """<line_sep>r=[]<for_stmt>key self._safe_keys()<block_start><try_stmt><block_start>r.append((key OrderedDict.__getitem__(self key)))<block_end><except_stmt>KeyError<block_start><pass><block_end><block_end><return>r<block_end><def_stmt>values self<block_start>""" Return a copy of the dictionary's list of values.
See the note for dict.items(). """<line_sep>r=[]<for_stmt>key self._safe_keys()<block_start><try_stmt><block_start>r.append(self[key])<block_end><except_stmt>KeyError<block_start><pass><block_end><block_end><return>r<block_end><def_stmt>fromkeys self<block_start>""" Create a new dictionary with keys from seq and values set to value. """<line_sep><raise>NotImplementedError()<block_end><def_stmt>iteritems self<block_start>""" Return an iterator over the dictionary's (key, value) pairs. """<line_sep><raise>NotImplementedError()<block_end><def_stmt>itervalues self<block_start>""" Return an iterator over the dictionary's values. """<line_sep><raise>NotImplementedError()<block_end><def_stmt>viewitems self<block_start>""" Return a new view of the dictionary's items ((key, value) pairs). """<line_sep><raise>NotImplementedError()<block_end><def_stmt>viewkeys self<block_start>""" Return a new view of the dictionary's keys. """<line_sep><raise>NotImplementedError()<block_end><def_stmt>viewvalues self<block_start>""" Return a new view of the dictionary's values. """<line_sep><raise>NotImplementedError()<block_end><def_stmt>__reduce__ self<block_start>reduced=self.__class__ (self.max_len self.max_age ('reduce_result' self.items_with_timestamp()))<line_sep><return>reduced<block_end><def_stmt>__assertions self max_len max_age_seconds<block_start>self.__assert_max_len(max_len)<line_sep>self.__assert_max_age_seconds(max_age_seconds)<block_end>@staticmethod<def_stmt>__assert_max_len max_len<block_start><assert_stmt>max_len<ge>1<block_end>@staticmethod<def_stmt>__assert_max_age_seconds max_age_seconds<block_start><assert_stmt>max_age_seconds<ge>0<block_end>@staticmethod<def_stmt>__is_reduced_result items<block_start><if_stmt>len(items)<eq>2<and>items[0]<eq>'reduce_result'<block_start><return><true><block_end><return><false><block_end>@staticmethod<def_stmt>__is_instance_of_expiring_dict items<block_start><if_stmt>items<is><not><none><block_start><if_stmt>isinstance(items ExpiringDict)<block_start><return><true><block_end><block_end><return><false><block_end>@staticmethod<def_stmt>__is_instance_of_dict items<block_start><if_stmt>isinstance(items dict)<block_start><return><true><block_end><return><false><block_end><def_stmt>__copy_expiring_dict self max_len max_age_seconds items# type: (Union[int, None], Union[float, None], Any) -> None
<block_start><if_stmt>max_len<is><not><none><block_start>self.__assert_max_len(max_len)<line_sep>self.max_len=max_len<block_end><else_stmt><block_start>self.max_len=items.max_len<block_end><if_stmt>max_age_seconds<is><not><none><block_start>self.__assert_max_age_seconds(max_age_seconds)<line_sep>self.max_age=max_age_seconds<block_end><else_stmt><block_start>self.max_age=items.max_age<block_end>[self.__setitem__(key value set_time)<for>key,(value set_time) items.items_with_timestamp()]<block_end><def_stmt>__copy_dict self items# type: (dict) -> None
<block_start>[self.__setitem__(key value)<for>key,value items.items()]<block_end><def_stmt>__copy_reduced_result self items<block_start>[self.__setitem__(key value set_time)<for>key,(value set_time) items[1]]<block_end><block_end> |
<import_stmt>h5py<import_stmt>numpy<as>np<import_from_stmt>keras.datasets mnist<import_from_stmt>keras.utils to_categorical<line_sep># input image dimensions
img_rows,img_cols=28 28<line_sep># the data, shuffled and split between train and test sets
(x_train y_train),(x_test y_test)=mnist.load_data()<line_sep>x_train=x_train.reshape(x_train.shape[0] img_rows img_cols 1)<line_sep>x_test=x_test.reshape(x_test.shape[0] img_rows img_cols 1)<line_sep>input_shape=(img_rows img_cols 1)<line_sep>x_train=x_train.astype('float16')<line_sep>x_test=x_test.astype('float16')<line_sep>inputs=np.concatenate((x_train x_test))/255<line_sep>labels=np.concatenate((y_train y_test))# ints, 0 to 10
###########################################
# fix mis-labeled image(s) in Keras dataset
labels[10994]=9<line_sep>###########################################
targets=to_categorical(labels).astype("uint8")<line_sep>string=h5py.special_dtype(vlen=str)<line_sep>labels=np.array([str(label)<for>label labels] dtype=string)<line_sep>print("creating h5...")<with_stmt>h5py.File("mnist.h5" "w")<as>h5<block_start>dset=h5.create_dataset('inputs' data=[inputs] compression='gzip' compression_opts=9)<line_sep>dset=h5.create_dataset('targets' data=[targets] compression='gzip' compression_opts=9)<line_sep>dset=h5.create_dataset('labels' data=[labels] compression='gzip' compression_opts=9)<block_end>print("done!")<line_sep> |
# pylint: disable=missing-docstring,invalid-name,too-few-public-methods
<class_stmt>A<block_start>myfield:int<block_end><class_stmt>B(A)<block_start><pass><block_end><class_stmt>C<block_start><pass><block_end><class_stmt>D(C B)<block_start><pass><block_end>a=A()<line_sep>print(a.myfield)<line_sep>b=B()<line_sep>print(b.myfield)<line_sep>d=D()<line_sep>print(d.myfield)<line_sep>c=C()<line_sep>print(c.myfield)# [no-member]
|
# PyZX - Python library for quantum circuit rewriting
# and optimization using the ZX-calculus
# Copyright (C) 2018 - <NAME> and <NAME>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<import_from_stmt>typing Optional<import_from_stmt>.base BaseGraph<import_from_stmt>.graph_s GraphS<try_stmt><block_start><import_stmt>quizx# type: ignore
<block_end><except_stmt>ImportError<block_start>quizx=<none><block_end>backends={'simple':<true> 'quizx-vec':<true>}<def_stmt>Graph backend:Optional[str]=<none><arrow>BaseGraph<block_start>"""Returns an instance of an implementation of :class:`~pyzx.graph.base.BaseGraph`.
By default :class:`~pyzx.graph.graph_s.GraphS` is used.
Currently ``backend`` is allowed to be `simple` (for the default),
or 'graph_tool' and 'igraph'.
This method is the preferred way to instantiate a ZX-diagram in PyZX.
Example:
To construct an empty ZX-diagram, just write::
g = zx.Graph()
"""<if_stmt>backend<is><none><block_start>backend='simple'<block_end><if_stmt>backend<not><in>backends<block_start><raise>KeyError("Unavailable backend '{}'".format(backend))<block_end><if_stmt>backend<eq>'simple'<block_start><return>GraphS()<block_end><if_stmt>backend<eq>'graph_tool'<block_start><return>GraphGT()<block_end><if_stmt>backend<eq>'igraph'<block_start><return>GraphIG()<block_end><if_stmt>backend<eq>'quizx-vec'<block_start><return>quizx.VecGraph()# type: ignore
<block_end><return>GraphS()<block_end>Graph.from_json=GraphS.from_json# type: ignore
Graph.from_tikz=GraphS.from_tikz# type: ignore
<try_stmt><block_start><import_stmt>graph_tool.all<as>gt<import_from_stmt>.graph_gt GraphGT<line_sep>backends['graph_tool']=gt<block_end><except_stmt>ImportError<block_start><pass><block_end><try_stmt><block_start><import_stmt>igraph<as>ig<import_from_stmt>.graph_ig GraphIG<line_sep>backends['igraph']=ig<block_end><except_stmt>ImportError<block_start><pass><block_end> |
# SAME BSTS
# O(N^2) time and space
<def_stmt>sameBsts arrayOne arrayTwo# Write your code here.
<block_start><if_stmt>len(arrayOne)<ne>len(arrayTwo)<block_start><return><false><block_end><if_stmt>len(arrayOne)<eq>0<block_start><return><true><block_end><if_stmt>arrayOne[0]<ne>arrayTwo[0]<block_start><return><false><block_end>leftSubtreeFirst=[num<for>num arrayOne[1:]<if>num<l>arrayOne[0]]<line_sep>rightSubtreeFirst=[num<for>num arrayOne[1:]<if>num<ge>arrayOne[0]]<line_sep>leftSubtreeSecond=[num<for>num arrayTwo[1:]<if>num<l>arrayTwo[0]]<line_sep>rightSubtreeSecond=[num<for>num arrayTwo[1:]<if>num<ge>arrayTwo[0]]<line_sep><return>sameBsts(leftSubtreeFirst leftSubtreeSecond)<and>sameBsts(rightSubtreeFirst rightSubtreeSecond)<block_end># O(N^2) time and O(d) space
<def_stmt>sameBsts arrayOne arrayTwo# Write your code here.
<block_start><return>areSameBsts(arrayOne arrayTwo 0 0 float('-inf') float('inf'))<block_end><def_stmt>areSameBsts arrayOne arrayTwo rootIdxOne rootIdxTwo minVal maxVal<block_start><if_stmt>rootIdxOne<eq>-1<or>rootIdxTwo<eq>-1<block_start><return>rootIdxOne<eq>rootIdxTwo<block_end><if_stmt>arrayOne[rootIdxOne]<ne>arrayTwo[rootIdxTwo]<block_start><return><false><block_end>leftRootIdxOne=getIdxOfFirstSmaller(arrayOne rootIdxOne minVal)<line_sep>leftRootIdxTwo=getIdxOfFirstSmaller(arrayTwo rootIdxTwo minVal)<line_sep>rightRootIdxOne=getIdxOfFirstBiggerOrEqual(arrayOne rootIdxOne maxVal)<line_sep>rightRootIdxTwo=getIdxOfFirstBiggerOrEqual(arrayTwo rootIdxTwo maxVal)<line_sep>currentValue=arrayOne[rootIdxOne]<line_sep>leftAreSame=areSameBsts(arrayOne arrayTwo leftRootIdxOne leftRootIdxTwo minVal currentValue)<line_sep>rightAreSame=areSameBsts(arrayOne arrayTwo rightRootIdxOne rightRootIdxTwo currentValue maxVal)<line_sep><return>leftAreSame<and>rightAreSame<block_end><def_stmt>getIdxOfFirstSmaller array startingIdx minVal<block_start><for_stmt>i range(startingIdx+1 len(array))<block_start><if_stmt>array[i]<l>array[startingIdx]<and>array[i]<ge>minVal<block_start><return>i<block_end><block_end><return>-1<block_end><def_stmt>getIdxOfFirstBiggerOrEqual array startingIdx maxVal<block_start><for_stmt>i range(startingIdx+1 len(array))<block_start><if_stmt>array[i]<ge>array[startingIdx]<and>array[i]<l>maxVal<block_start><return>i<block_end><block_end><return>-1<block_end> |
# -*- coding: utf-8 -*-
"""
IHLS Colour Encoding
====================
Defines the :math:`IHLS` (Improved HLS) colourspace related transformations:
- :func:`colour.RGB_to_IHLS`
- :func:`colour.IHLS_to_RGB`
References
----------
- :cite:`Hanbury2003` : <NAME>. (2003). A 3D-Polar Coordinate Colour
Representation Well Adapted to Image Analysis. In <NAME> & <NAME>
(Eds.), Image Analysis (pp. 804–811). Springer Berlin Heidelberg.
ISBN:978-3-540-45103-7
"""<import_stmt>numpy<as>np<import_from_stmt>colour.algebra vector_dot<import_from_stmt>colour.utilities from_range_1 to_domain_1 tstack tsplit zeros <line_sep>__author__='Colour Developers'<line_sep>__copyright__='Copyright (C) 2013-2021 - Colour Developers'<line_sep>__license__='New BSD License - https://opensource.org/licenses/BSD-3-Clause'<line_sep>__maintainer__='Colour Developers'<line_sep>__email__='<EMAIL>'<line_sep>__status__='Production'<line_sep>__all__=['RGB_to_IHLS' 'IHLS_to_RGB']<line_sep>MATRIX_RGB_TO_YC_1_C_2=np.array([[0.2126 0.7152 0.0722] [1 -0.5 -0.5] [0 -np.sqrt(3)/2 np.sqrt(3)/2] ])<line_sep>"""
*RGB* colourspace to *YC_1C_2* colourspace matrix.
MATRIX_RGB_TO_YC_1_C_2 : array_like, (3, 3)
"""<line_sep>MATRIX_YC_1_C_2_TO_RGB=np.linalg.inv(MATRIX_RGB_TO_YC_1_C_2)<line_sep>"""
*YC_1C_2* colourspace to *RGB* colourspace matrix.
MATRIX_YC_1_C_2_TO_RGB : array_like, (3, 3)
"""<def_stmt>RGB_to_IHLS RGB<block_start>"""
Converts from *RGB* colourspace to *IHLS* (Improved HLS) colourspace.
Parameters
----------
RGB : array-like
*RGB* colourspace array.
Returns
-------
ndarray
*HYS* colourspace array.
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``RGB`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``HYS`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
References
----------
:cite:`Hanbury2003`
Examples
--------
>>> RGB = np.array([0.45595571, 0.03039702, 0.04087245])
>>> RGB_to_IHLS(RGB) # doctest: +ELLIPSIS
array([ 6.2616051..., 0.1216271..., 0.4255586...])
"""<line_sep>RGB=to_domain_1(RGB)<line_sep>R,G,B=tsplit(RGB)<line_sep>Y,C_1,C_2=tsplit(vector_dot(MATRIX_RGB_TO_YC_1_C_2 RGB))<line_sep>C=np.sqrt(C_1<power>2+C_2<power>2)<line_sep>acos_C_1_C_2=zeros(C.shape)<line_sep>acos_C_1_C_2[C<ne>0]=np.arccos(C_1[C<ne>0]/C[C<ne>0])<line_sep>H=np.where(C_2<le>0 acos_C_1_C_2 (np.pi<times>2)-acos_C_1_C_2)<line_sep>S=np.maximum(np.maximum(R G) B)-np.minimum(np.minimum(R G) B)<line_sep>HYS=tstack([H Y S])<line_sep><return>from_range_1(HYS)<block_end><def_stmt>IHLS_to_RGB HYS<block_start>"""
Converts from *IHLS* (Improved HLS) colourspace to *RGB* colourspace.
Parameters
----------
HYS : array-like
*IHLS* colourspace array.
Returns
-------
ndarray
*RGB* colourspace array.
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``HYS`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``RGB`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
References
----------
:cite:`Hanbury2003`
Examples
--------
>>> HYS = np.array([6.26160518, 0.12162712, 0.42555869])
>>> IHLS_to_RGB(HYS) # doctest: +ELLIPSIS
array([ 0.4559557..., 0.0303970..., 0.0408724...])
"""<line_sep>H,Y,S=tsplit(to_domain_1(HYS))<line_sep>pi_3=np.pi/3<line_sep>k=np.floor(H/(pi_3))<line_sep>H_s=H-k<times>(pi_3)<line_sep>C=(np.sqrt(3)<times>S)/(2<times>np.sin((2<times>pi_3)-H_s))<line_sep>C_1=C<times>np.cos(H)<line_sep>C_2=-C<times>np.sin(H)<line_sep>RGB=vector_dot(MATRIX_YC_1_C_2_TO_RGB tstack([Y C_1 C_2]))<line_sep><return>from_range_1(RGB)<block_end> |
<import_stmt>re<import_from_stmt>vaurien.protocols.base BaseProtocol<import_from_stmt>vaurien.util chunked<line_sep>RE_LEN=re.compile('Content-Length: (\d+)' re.M|re.I)<line_sep>RE_KEEPALIVE=re.compile('Connection: Keep-Alive')<line_sep>RE_MEMCACHE_COMMAND=re.compile('(.*)\r\n')<line_sep>EOH='\r\n\r\n'<line_sep>CRLF='\r\n'<class_stmt>Memcache(BaseProtocol)<block_start>"""Memcache protocol.
"""<line_sep>name='memcache'<def_stmt>_handle self source dest to_backend on_between_handle# https://github.com/memcached/memcached/blob/master/doc/protocol.txt
# Sending the query
<block_start>buffer=self._get_data(source)<if_stmt><not>buffer<block_start>self._abort_handling(to_backend dest)<line_sep><return><block_end># sending the first packet
dest.sendall(buffer)<line_sep>on_between_handle()<line_sep># finding the command we sent.
cmd=RE_MEMCACHE_COMMAND.search(buffer)<if_stmt>cmd<is><none># wat ?
<block_start>self._abort_handling(to_backend dest)<line_sep><return><block_end># looking at the command
cmd=cmd.groups()[0]<line_sep>buffer_size=self.option('buffer')<line_sep>cmd_parts=cmd.split()<line_sep>mcmd=cmd_parts[0]<if_stmt>mcmd<in>('set' 'add' 'replace' 'append')<block_start>cmd_size=len(cmd)+len(CRLF)<line_sep>data_size=int(cmd_parts[-1])<line_sep>total_size=cmd_size+data_size<line_sep># grabbing more data if needed
left_to_read=total_size-len(buffer)+len(CRLF)<if_stmt>left_to_read<g>0<block_start><for_stmt>chunk chunked(left_to_read buffer_size)<block_start>data=source.recv(chunk)<line_sep>buffer<augadd>data<line_sep>dest.sendall(data)<block_end><block_end><block_end># Receiving the response now
buffer=self._get_data(dest buffer_size)<line_sep>source.sendall(buffer)<if_stmt>buffer.startswith('VALUE')# we're getting back a value.
<block_start>EOW='END'+CRLF<block_end><else_stmt><block_start>EOW=CRLF<block_end><while_stmt><not>buffer.endswith(EOW)<block_start>data=self._get_data(dest buffer_size)<line_sep>buffer<augadd>data<line_sep>source.sendall(data)<block_end># we're done
<return><true><block_end># keeping connected
<block_end> |
<import_stmt>os<import_stmt>cv2<import_stmt>glob<import_stmt>sys<line_sep>sys.path.append('/usr/local/lib/python2.7/site-packages/')<line_sep>#os.system('/home/adrian/dense_flow/build/extract_cpu -f={} -x={} -y={} -i=tmp/image -b 20 -t 1 -d 3 -o=dir'.format('test.avi', '/flow_x', '/flow_y'))
data_folder='FDD_images/'<line_sep>output_path='FDD_OF/'<line_sep>i=0<if_stmt><not>os.path.exists(output_path)<block_start>os.mkdir(output_path)<block_end>folders=[f<for>f os.listdir(data_folder)<if>os.path.isdir(os.path.join(data_folder f))]<line_sep>folders.sort()<for_stmt>folder folders<block_start>event_folders=[f<for>f os.listdir(data_folder+folder)<if>os.path.isdir(os.path.join(data_folder+folder+'/' f))]<line_sep>event_folders.sort()<for_stmt>event_folder event_folders<block_start>path=data_folder+folder+'/'+event_folder<line_sep>flow=output_path+folder+'/'+event_folder<if_stmt><not>os.path.exists(flow)<block_start>os.makedirs(flow)<block_end><block_end>os.system('/home/anunez/dense_flow2/build/extract_cpu -f={} -x={} -y={} -i=tmp/image -b=20 -t=1 -d=0 -s=1 -o=dir'.format(path flow+'/flow_x' flow+'/flow_y'))<block_end> |
<import_stmt>sys<import_from_stmt>time time<class_stmt>Progress<block_start>"""
"""<def_stmt>__init__ self iterable size=<none> interval=0.1<block_start>"""
Args:
iterable
size (int): max size of iterable
interval (float): update bar interval second, default is `0.1`
Attrs:
BAR_LENGTH (int): bar length, default is `32`
SYMBOL_DONE (str): symbol indicating complation
SYMBOL_REST (str): symbol indicating remaining
prefix (str): string template before progress bar
suffix (str): string template after progress bar
template (str): string template for rendering, `{prefix} {bar} {suffix}`
"""<line_sep>self.iterable=iterable<line_sep>self.interval=interval<line_sep>self.batch=1<line_sep>self.size=size<if_stmt>hasattr(iterable '__len__')<block_start>self.size=len(iterable)<block_end># is pytorch dataloader
<if_stmt>hasattr(iterable 'batch_size')<block_start>self.batch=getattr(iterable 'batch_size')<line_sep>self.size=len(iterable.dataset)<block_end>self.idx=0<line_sep>self.time=<none><line_sep>self.BAR_LENGTH=32<line_sep>self.SYMBOL_DONE='█'<line_sep>self.SYMBOL_REST='.'<line_sep>self.prefix=""<line_sep>self.suffix=""<if_stmt>self.size<is><none><block_start>self.template="{prefix} {done} iters {time:.2f}s {suffix}"<block_end><else_stmt><block_start>self.template="{prefix} {percent:3.0f}%|{bar}| [{done}/{size}] {time:.2f}s {suffix}"<block_end><block_end><def_stmt>__len__ self<block_start><return>self.size<block_end><def_stmt>__iter__ self<block_start>self.reset()<line_sep># reset time
start=time()<line_sep>last_time=start<for_stmt>item self.iterable<block_start><yield>item<line_sep>self.idx<augadd>1<line_sep>curr_time=time()<line_sep>self.time=curr_time-start<line_sep># skip update if delta is too small
<if_stmt>curr_time-last_time<l>self.interval<block_start><continue><block_end>last_time=curr_time<line_sep># update bar
self.flush()<block_end># finally updating for the status of end
self.flush()<line_sep>self.end()<block_end><def_stmt>reset self# reset index
<block_start>self.idx=0<block_end><def_stmt>end self<block_start>self.print('\n')<block_end><def_stmt>flush self<block_start><if_stmt>self.size<is><none><block_start>done=self.idx<times>self.batch<line_sep>percent=0<line_sep>bar=<none><block_end><else_stmt><block_start>done=min(self.idx<times>self.batch self.size)<line_sep>percent=done/self.size<line_sep>bar=(self.SYMBOL_DONE<times>int(percent<times>self.BAR_LENGTH)).ljust(self.BAR_LENGTH self.SYMBOL_REST)<block_end>self.print('\r'+self.template.format(percent=percent<times>100 bar=bar done=done size=self.size time=self.time tps=done/self.time prefix=self.prefix suffix=self.suffix ))<block_end><def_stmt>print self text<block_start>sys.stdout.write(text)<line_sep>sys.stdout.flush()<block_end><block_end> |
<import_from_stmt>django.forms.utils flatatt<import_from_stmt>django.utils.html format_html<import_from_stmt>django_bootstrap5.text text_value<import_from_stmt>django_bootstrap5.utils get_url_attrs<def_stmt>render_script_tag url<block_start>"""Build a script tag."""<line_sep><return>render_tag("script" get_url_attrs(url attr_name="src"))<block_end><def_stmt>render_link_tag url<block_start>"""Build a link tag."""<line_sep>attrs=get_url_attrs(url attr_name="href")<line_sep>attrs["rel"]="stylesheet"<line_sep><return>render_tag("link" attrs=attrs close=<false>)<block_end><def_stmt>render_tag tag attrs=<none> content=<none> close=<true><block_start>"""Render an HTML tag."""<line_sep>attrs_string=flatatt(attrs)<if>attrs<else>""<line_sep>builder="<{tag}{attrs}>{content}"<line_sep>content_string=text_value(content)<if_stmt>content_string<or>close<block_start>builder<augadd>"</{tag}>"<block_end><return>format_html(builder tag=tag attrs=attrs_string content=content_string)<block_end> |
# Copyright (c) Lawrence Livermore National Security, LLC and other VisIt
# Project developers. See the top-level LICENSE file for dates and other
# details. No copyright assignment is required to contribute to VisIt.
"""
file: vpe_flow_npy_ops_example_1.py
author: <NAME> <<EMAIL>>
created: 3/28/2012
description:
vpe flow example demonstrating use of flow.filters.npy_ops.
"""<import_from_stmt>flow *<import_from_stmt>flow.filters pyocl_compile<def_stmt>setup_workspace <block_start>w=Workspace()<line_sep>w.register_filters(pyocl_compile)<line_sep>ctx=w.add_context("pyocl_compile" "root")<line_sep>ctx.start()<line_sep>ctx.add_filter("decompose" "dwdx" {"index":0})<line_sep>ctx.add_filter("decompose" "dwdy" {"index":1})<line_sep>ctx.add_filter("decompose" "dwdz" {"index":2})<line_sep>ctx.add_filter("grad" "dw")<line_sep>ctx.add_filter("mult" "vx_sq")<line_sep>ctx.add_filter("mult" "vy_sq")<line_sep>ctx.add_filter("mult" "vz_sq")<line_sep>ctx.add_filter("add" "v_add_1")<line_sep>ctx.add_filter("add" "v_add")<line_sep>ctx.add_filter("sqrt" "v_sqrt")<line_sep>ctx.connect(":vz" "dw:in")<line_sep>ctx.connect(":dims" "dw:dims")<line_sep>ctx.connect(":x" "dw:x")<line_sep>ctx.connect(":y" "dw:y")<line_sep>ctx.connect(":z" "dw:z")<line_sep>ctx.connect("dw" "dwdx:in")<line_sep>ctx.connect("dw" "dwdy:in")<line_sep>ctx.connect("dw" "dwdz:in")<line_sep>ctx.connect("dwdx" "vx_sq:in_a")<line_sep>ctx.connect("dwdx" "vx_sq:in_b")<line_sep>ctx.connect("dwdy" "vy_sq:in_a")<line_sep>ctx.connect("dwdy" "vy_sq:in_b")<line_sep>ctx.connect("dwdz" "vz_sq:in_a")<line_sep>ctx.connect("dwdz" "vz_sq:in_b")<line_sep>ctx.connect("vx_sq" "v_add_1:in_a")<line_sep>ctx.connect("vy_sq" "v_add_1:in_b")<line_sep>ctx.connect("v_add_1" "v_add:in_a")<line_sep>ctx.connect("vz_sq" "v_add:in_b")<line_sep>ctx.connect("v_add" "v_sqrt:in")<line_sep><return>w<block_end> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.