ngram
listlengths
0
67.8k
[ "use poetry for our build, but this file seems to be required #", "to be required # in order to get GitHub dependencies graph to work", "#!/usr/bin/env python # we use poetry for our build, but this file seems", "seems to be required # in order to get GitHub dependencies graph to", "but this file seems to be required # in order to get GitHub", "our build, but this file seems to be required # in order to", "in order to get GitHub dependencies graph to work import setuptools if __name__", "required # in order to get GitHub dependencies graph to work import setuptools", "order to get GitHub dependencies graph to work import setuptools if __name__ ==", "python # we use poetry for our build, but this file seems to", "for our build, but this file seems to be required # in order", "file seems to be required # in order to get GitHub dependencies graph", "to get GitHub dependencies graph to work import setuptools if __name__ == \"__main__\":", "<reponame>huonw/strawberry<gh_stars>0 #!/usr/bin/env python # we use poetry for our build, but this file", "poetry for our build, but this file seems to be required # in", "build, but this file seems to be required # in order to get", "this file seems to be required # in order to get GitHub dependencies", "# in order to get GitHub dependencies graph to work import setuptools if", "be required # in order to get GitHub dependencies graph to work import", "# we use poetry for our build, but this file seems to be", "we use poetry for our build, but this file seems to be required", "get GitHub dependencies graph to work import setuptools if __name__ == \"__main__\": setuptools.setup(name=\"strawberry-graphql\")" ]
[ "Update from vivarium.composites.toys import ToyProcess def make_logging_process( process_class, logging_port_name=\"log_update\" ) -> type: \"\"\"", "it logs all of its updates in a port with name given by", "version of process_class. \"\"\" if not issubclass(process_class, Process): raise ValueError(f'process_class must be a", "with 'Logging_'. Args: process_class: The Process class to be logged logging_port_name: Name of", "= {logging_port_name: update} # log the update return {**update, **log_update} logging_process.ports_schema = ports_schema", "same except that it logs all of its updates in a port with", "if not issubclass(process_class, Process): raise ValueError(f'process_class must be a subclass of Process.') logging_process", "Schema, State, Update from vivarium.composites.toys import ToyProcess def make_logging_process( process_class, logging_port_name=\"log_update\" ) ->", "not issubclass(process_class, Process): raise ValueError(f'process_class must be a subclass of Process.') logging_process =", "super().ports_schema() # type: ignore ports[logging_port_name] = {'_default': {}, '_updater': 'set', '_emit': True} #", "'_updater': 'set', '_emit': True} # add a new port return ports def next_update(", "The Process class to be logged logging_port_name: Name of the port in which", "return ports def next_update( self, timestep: Union[float, int], states: State ) -> Update:", "so super() knows what to do def ports_schema( self ) -> Schema: ports", "ports_schema # type: ignore logging_process.next_update = next_update # type: ignore return logging_process def", "ports def next_update( self, timestep: Union[float, int], states: State ) -> Update: update", "vivarium.core.process import Process from vivarium.core.types import Schema, State, Update from vivarium.composites.toys import ToyProcess", "update} # log the update return {**update, **log_update} logging_process.ports_schema = ports_schema # type:", "return logging_process def test_logging_process(): logging_toy = make_logging_process(ToyProcess) logging_toy_instance = logging_toy() ports = logging_toy_instance.ports_schema()", "all of its updates in a port with name given by logging_port_name. The", "the port in which updates will be stored ('log_update' by default.) Returns: logging_process:", "type(f\"Logging_{process_class.__name__}\", (process_class,), {}) __class__ = logging_process # set __class__ manually so super() knows", "updates will be stored ('log_update' by default.) Returns: logging_process: the logging version of", "Name of the port in which updates will be stored ('log_update' by default.)", "return {**update, **log_update} logging_process.ports_schema = ports_schema # type: ignore logging_process.next_update = next_update #", "prefixed with 'Logging_'. Args: process_class: The Process class to be logged logging_port_name: Name", "type: ignore logging_process.next_update = next_update # type: ignore return logging_process def test_logging_process(): logging_toy", "<reponame>vivarium-collective/vivarium-core<filename>vivarium/library/wrappers.py from typing import Union from vivarium.core.process import Process from vivarium.core.types import Schema,", "type: ignore return logging_process def test_logging_process(): logging_toy = make_logging_process(ToyProcess) logging_toy_instance = logging_toy() ports", "# log the update return {**update, **log_update} logging_process.ports_schema = ports_schema # type: ignore", "Process from vivarium.core.types import Schema, State, Update from vivarium.composites.toys import ToyProcess def make_logging_process(", "of its updates in a port with name given by logging_port_name. The returned", "name as process_class, but prefixed with 'Logging_'. Args: process_class: The Process class to", "process_class. \"\"\" if not issubclass(process_class, Process): raise ValueError(f'process_class must be a subclass of", "logging_port_name=\"log_update\" ) -> type: \"\"\" Given a subclass of Process, returns a new", "import Process from vivarium.core.types import Schema, State, Update from vivarium.composites.toys import ToyProcess def", "behaves exactly the same except that it logs all of its updates in", "do def ports_schema( self ) -> Schema: ports = super().ports_schema() # type: ignore", "a new port return ports def next_update( self, timestep: Union[float, int], states: State", "('log_update' by default.) Returns: logging_process: the logging version of process_class. \"\"\" if not", "a new subclass that behaves exactly the same except that it logs all", "update = super().next_update(timestep, states) # type: ignore log_update = {logging_port_name: update} # log", "a subclass of Process.') logging_process = type(f\"Logging_{process_class.__name__}\", (process_class,), {}) __class__ = logging_process #", "updates in a port with name given by logging_port_name. The returned class has", "def make_logging_process( process_class, logging_port_name=\"log_update\" ) -> type: \"\"\" Given a subclass of Process,", "ports_schema( self ) -> Schema: ports = super().ports_schema() # type: ignore ports[logging_port_name] =", "the update return {**update, **log_update} logging_process.ports_schema = ports_schema # type: ignore logging_process.next_update =", "the logging version of process_class. \"\"\" if not issubclass(process_class, Process): raise ValueError(f'process_class must", "logging_port_name: Name of the port in which updates will be stored ('log_update' by", "same name as process_class, but prefixed with 'Logging_'. Args: process_class: The Process class", "subclass of Process.') logging_process = type(f\"Logging_{process_class.__name__}\", (process_class,), {}) __class__ = logging_process # set", "'_emit': True} # add a new port return ports def next_update( self, timestep:", "ports[logging_port_name] = {'_default': {}, '_updater': 'set', '_emit': True} # add a new port", "self ) -> Schema: ports = super().ports_schema() # type: ignore ports[logging_port_name] = {'_default':", "logging_process.next_update = next_update # type: ignore return logging_process def test_logging_process(): logging_toy = make_logging_process(ToyProcess)", "ignore logging_process.next_update = next_update # type: ignore return logging_process def test_logging_process(): logging_toy =", "# type: ignore logging_process.next_update = next_update # type: ignore return logging_process def test_logging_process():", "that it logs all of its updates in a port with name given", "True} # add a new port return ports def next_update( self, timestep: Union[float,", "-> type: \"\"\" Given a subclass of Process, returns a new subclass that", "in a port with name given by logging_port_name. The returned class has the", "states) # type: ignore log_update = {logging_port_name: update} # log the update return", "logging_toy = make_logging_process(ToyProcess) logging_toy_instance = logging_toy() ports = logging_toy_instance.ports_schema() assert 'log_update' in ports", "-> Update: update = super().next_update(timestep, states) # type: ignore log_update = {logging_port_name: update}", "{}, '_updater': 'set', '_emit': True} # add a new port return ports def", "logged logging_port_name: Name of the port in which updates will be stored ('log_update'", "self, timestep: Union[float, int], states: State ) -> Update: update = super().next_update(timestep, states)", "Update: update = super().next_update(timestep, states) # type: ignore log_update = {logging_port_name: update} #", "make_logging_process(ToyProcess) logging_toy_instance = logging_toy() ports = logging_toy_instance.ports_schema() assert 'log_update' in ports if __name__", "= super().ports_schema() # type: ignore ports[logging_port_name] = {'_default': {}, '_updater': 'set', '_emit': True}", "**log_update} logging_process.ports_schema = ports_schema # type: ignore logging_process.next_update = next_update # type: ignore", "logging_toy_instance = logging_toy() ports = logging_toy_instance.ports_schema() assert 'log_update' in ports if __name__ ==", "log the update return {**update, **log_update} logging_process.ports_schema = ports_schema # type: ignore logging_process.next_update", "but prefixed with 'Logging_'. Args: process_class: The Process class to be logged logging_port_name:", "knows what to do def ports_schema( self ) -> Schema: ports = super().ports_schema()", "process_class: The Process class to be logged logging_port_name: Name of the port in", "ports = super().ports_schema() # type: ignore ports[logging_port_name] = {'_default': {}, '_updater': 'set', '_emit':", "a port with name given by logging_port_name. The returned class has the same", "# type: ignore ports[logging_port_name] = {'_default': {}, '_updater': 'set', '_emit': True} # add", "Process): raise ValueError(f'process_class must be a subclass of Process.') logging_process = type(f\"Logging_{process_class.__name__}\", (process_class,),", "issubclass(process_class, Process): raise ValueError(f'process_class must be a subclass of Process.') logging_process = type(f\"Logging_{process_class.__name__}\",", "def ports_schema( self ) -> Schema: ports = super().ports_schema() # type: ignore ports[logging_port_name]", "name given by logging_port_name. The returned class has the same name as process_class,", "{'_default': {}, '_updater': 'set', '_emit': True} # add a new port return ports", "new subclass that behaves exactly the same except that it logs all of", "= super().next_update(timestep, states) # type: ignore log_update = {logging_port_name: update} # log the", "import ToyProcess def make_logging_process( process_class, logging_port_name=\"log_update\" ) -> type: \"\"\" Given a subclass", "of Process, returns a new subclass that behaves exactly the same except that", "by default.) Returns: logging_process: the logging version of process_class. \"\"\" if not issubclass(process_class,", "= make_logging_process(ToyProcess) logging_toy_instance = logging_toy() ports = logging_toy_instance.ports_schema() assert 'log_update' in ports if", "super() knows what to do def ports_schema( self ) -> Schema: ports =", "which updates will be stored ('log_update' by default.) Returns: logging_process: the logging version", "logging_toy() ports = logging_toy_instance.ports_schema() assert 'log_update' in ports if __name__ == '__main__': test_logging_process()", "ToyProcess def make_logging_process( process_class, logging_port_name=\"log_update\" ) -> type: \"\"\" Given a subclass of", "states: State ) -> Update: update = super().next_update(timestep, states) # type: ignore log_update", "has the same name as process_class, but prefixed with 'Logging_'. Args: process_class: The", "be stored ('log_update' by default.) Returns: logging_process: the logging version of process_class. \"\"\"", "be logged logging_port_name: Name of the port in which updates will be stored", "# add a new port return ports def next_update( self, timestep: Union[float, int],", "{logging_port_name: update} # log the update return {**update, **log_update} logging_process.ports_schema = ports_schema #", "# set __class__ manually so super() knows what to do def ports_schema( self", "in which updates will be stored ('log_update' by default.) Returns: logging_process: the logging", "stored ('log_update' by default.) Returns: logging_process: the logging version of process_class. \"\"\" if", "timestep: Union[float, int], states: State ) -> Update: update = super().next_update(timestep, states) #", "must be a subclass of Process.') logging_process = type(f\"Logging_{process_class.__name__}\", (process_class,), {}) __class__ =", "State, Update from vivarium.composites.toys import ToyProcess def make_logging_process( process_class, logging_port_name=\"log_update\" ) -> type:", "port with name given by logging_port_name. The returned class has the same name", "exactly the same except that it logs all of its updates in a", "new port return ports def next_update( self, timestep: Union[float, int], states: State )", "super().next_update(timestep, states) # type: ignore log_update = {logging_port_name: update} # log the update", "subclass that behaves exactly the same except that it logs all of its", "raise ValueError(f'process_class must be a subclass of Process.') logging_process = type(f\"Logging_{process_class.__name__}\", (process_class,), {})", "= logging_toy() ports = logging_toy_instance.ports_schema() assert 'log_update' in ports if __name__ == '__main__':", ") -> Update: update = super().next_update(timestep, states) # type: ignore log_update = {logging_port_name:", "vivarium.composites.toys import ToyProcess def make_logging_process( process_class, logging_port_name=\"log_update\" ) -> type: \"\"\" Given a", ") -> type: \"\"\" Given a subclass of Process, returns a new subclass", "ignore return logging_process def test_logging_process(): logging_toy = make_logging_process(ToyProcess) logging_toy_instance = logging_toy() ports =", "Process, returns a new subclass that behaves exactly the same except that it", "the same except that it logs all of its updates in a port", "logging_process: the logging version of process_class. \"\"\" if not issubclass(process_class, Process): raise ValueError(f'process_class", "'Logging_'. Args: process_class: The Process class to be logged logging_port_name: Name of the", "set __class__ manually so super() knows what to do def ports_schema( self )", "ignore log_update = {logging_port_name: update} # log the update return {**update, **log_update} logging_process.ports_schema", "will be stored ('log_update' by default.) Returns: logging_process: the logging version of process_class.", "logging_process # set __class__ manually so super() knows what to do def ports_schema(", "logs all of its updates in a port with name given by logging_port_name.", "as process_class, but prefixed with 'Logging_'. Args: process_class: The Process class to be", "Schema: ports = super().ports_schema() # type: ignore ports[logging_port_name] = {'_default': {}, '_updater': 'set',", "next_update # type: ignore return logging_process def test_logging_process(): logging_toy = make_logging_process(ToyProcess) logging_toy_instance =", "def test_logging_process(): logging_toy = make_logging_process(ToyProcess) logging_toy_instance = logging_toy() ports = logging_toy_instance.ports_schema() assert 'log_update'", "that behaves exactly the same except that it logs all of its updates", "to do def ports_schema( self ) -> Schema: ports = super().ports_schema() # type:", "with name given by logging_port_name. The returned class has the same name as", "from vivarium.composites.toys import ToyProcess def make_logging_process( process_class, logging_port_name=\"log_update\" ) -> type: \"\"\" Given", "Union[float, int], states: State ) -> Update: update = super().next_update(timestep, states) # type:", "import Schema, State, Update from vivarium.composites.toys import ToyProcess def make_logging_process( process_class, logging_port_name=\"log_update\" )", "type: \"\"\" Given a subclass of Process, returns a new subclass that behaves", "type: ignore ports[logging_port_name] = {'_default': {}, '_updater': 'set', '_emit': True} # add a", "logging_process def test_logging_process(): logging_toy = make_logging_process(ToyProcess) logging_toy_instance = logging_toy() ports = logging_toy_instance.ports_schema() assert", "be a subclass of Process.') logging_process = type(f\"Logging_{process_class.__name__}\", (process_class,), {}) __class__ = logging_process", "logging_process.ports_schema = ports_schema # type: ignore logging_process.next_update = next_update # type: ignore return", "class has the same name as process_class, but prefixed with 'Logging_'. Args: process_class:", "{**update, **log_update} logging_process.ports_schema = ports_schema # type: ignore logging_process.next_update = next_update # type:", "(process_class,), {}) __class__ = logging_process # set __class__ manually so super() knows what", "of process_class. \"\"\" if not issubclass(process_class, Process): raise ValueError(f'process_class must be a subclass", "log_update = {logging_port_name: update} # log the update return {**update, **log_update} logging_process.ports_schema =", "to be logged logging_port_name: Name of the port in which updates will be", "-> Schema: ports = super().ports_schema() # type: ignore ports[logging_port_name] = {'_default': {}, '_updater':", "\"\"\" if not issubclass(process_class, Process): raise ValueError(f'process_class must be a subclass of Process.')", "default.) Returns: logging_process: the logging version of process_class. \"\"\" if not issubclass(process_class, Process):", "subclass of Process, returns a new subclass that behaves exactly the same except", "ValueError(f'process_class must be a subclass of Process.') logging_process = type(f\"Logging_{process_class.__name__}\", (process_class,), {}) __class__", "returns a new subclass that behaves exactly the same except that it logs", "vivarium.core.types import Schema, State, Update from vivarium.composites.toys import ToyProcess def make_logging_process( process_class, logging_port_name=\"log_update\"", "Process class to be logged logging_port_name: Name of the port in which updates", "from typing import Union from vivarium.core.process import Process from vivarium.core.types import Schema, State,", "{}) __class__ = logging_process # set __class__ manually so super() knows what to", "'set', '_emit': True} # add a new port return ports def next_update( self,", "Process.') logging_process = type(f\"Logging_{process_class.__name__}\", (process_class,), {}) __class__ = logging_process # set __class__ manually", "= logging_process # set __class__ manually so super() knows what to do def", "= next_update # type: ignore return logging_process def test_logging_process(): logging_toy = make_logging_process(ToyProcess) logging_toy_instance", "manually so super() knows what to do def ports_schema( self ) -> Schema:", "State ) -> Update: update = super().next_update(timestep, states) # type: ignore log_update =", "of Process.') logging_process = type(f\"Logging_{process_class.__name__}\", (process_class,), {}) __class__ = logging_process # set __class__", "Returns: logging_process: the logging version of process_class. \"\"\" if not issubclass(process_class, Process): raise", "given by logging_port_name. The returned class has the same name as process_class, but", "process_class, logging_port_name=\"log_update\" ) -> type: \"\"\" Given a subclass of Process, returns a", ") -> Schema: ports = super().ports_schema() # type: ignore ports[logging_port_name] = {'_default': {},", "class to be logged logging_port_name: Name of the port in which updates will", "logging_port_name. The returned class has the same name as process_class, but prefixed with", "__class__ manually so super() knows what to do def ports_schema( self ) ->", "Union from vivarium.core.process import Process from vivarium.core.types import Schema, State, Update from vivarium.composites.toys", "typing import Union from vivarium.core.process import Process from vivarium.core.types import Schema, State, Update", "= type(f\"Logging_{process_class.__name__}\", (process_class,), {}) __class__ = logging_process # set __class__ manually so super()", "a subclass of Process, returns a new subclass that behaves exactly the same", "of the port in which updates will be stored ('log_update' by default.) Returns:", "its updates in a port with name given by logging_port_name. The returned class", "port in which updates will be stored ('log_update' by default.) Returns: logging_process: the", "from vivarium.core.types import Schema, State, Update from vivarium.composites.toys import ToyProcess def make_logging_process( process_class,", "import Union from vivarium.core.process import Process from vivarium.core.types import Schema, State, Update from", "next_update( self, timestep: Union[float, int], states: State ) -> Update: update = super().next_update(timestep,", "type: ignore log_update = {logging_port_name: update} # log the update return {**update, **log_update}", "logging_process = type(f\"Logging_{process_class.__name__}\", (process_class,), {}) __class__ = logging_process # set __class__ manually so", "# type: ignore return logging_process def test_logging_process(): logging_toy = make_logging_process(ToyProcess) logging_toy_instance = logging_toy()", "Args: process_class: The Process class to be logged logging_port_name: Name of the port", "make_logging_process( process_class, logging_port_name=\"log_update\" ) -> type: \"\"\" Given a subclass of Process, returns", "add a new port return ports def next_update( self, timestep: Union[float, int], states:", "the same name as process_class, but prefixed with 'Logging_'. Args: process_class: The Process", "except that it logs all of its updates in a port with name", "port return ports def next_update( self, timestep: Union[float, int], states: State ) ->", "update return {**update, **log_update} logging_process.ports_schema = ports_schema # type: ignore logging_process.next_update = next_update", "= ports_schema # type: ignore logging_process.next_update = next_update # type: ignore return logging_process", "int], states: State ) -> Update: update = super().next_update(timestep, states) # type: ignore", "\"\"\" Given a subclass of Process, returns a new subclass that behaves exactly", "The returned class has the same name as process_class, but prefixed with 'Logging_'.", "logging version of process_class. \"\"\" if not issubclass(process_class, Process): raise ValueError(f'process_class must be", "= {'_default': {}, '_updater': 'set', '_emit': True} # add a new port return", "def next_update( self, timestep: Union[float, int], states: State ) -> Update: update =", "from vivarium.core.process import Process from vivarium.core.types import Schema, State, Update from vivarium.composites.toys import", "by logging_port_name. The returned class has the same name as process_class, but prefixed", "what to do def ports_schema( self ) -> Schema: ports = super().ports_schema() #", "Given a subclass of Process, returns a new subclass that behaves exactly the", "test_logging_process(): logging_toy = make_logging_process(ToyProcess) logging_toy_instance = logging_toy() ports = logging_toy_instance.ports_schema() assert 'log_update' in", "returned class has the same name as process_class, but prefixed with 'Logging_'. Args:", "process_class, but prefixed with 'Logging_'. Args: process_class: The Process class to be logged", "ignore ports[logging_port_name] = {'_default': {}, '_updater': 'set', '_emit': True} # add a new", "# type: ignore log_update = {logging_port_name: update} # log the update return {**update,", "__class__ = logging_process # set __class__ manually so super() knows what to do" ]
[ "batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=3, window_size=1, cbow=cbow) samples = batchify(dataset) center, context = next(iter(samples))", "distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "context = next(iter(samples)) (contexts_data, contexts_row, contexts_col) = context assert center.dtype == np.int64 assert", "0.5] assert contexts_row.asnumpy().tolist() == [0, 1, 1, 2, 2] assert contexts_col.asnumpy().tolist() == [1,", "License for the # specific language governing permissions and limitations # under the", "@pytest.mark.parametrize('reduce_window_size_randomly', [True, False]) @pytest.mark.parametrize('shuffle', [True, False]) @pytest.mark.parametrize('cbow', [True, False]) @pytest.mark.parametrize('stream', [True, False]) def", "(ASF) under one # or more contributor license agreements. See the NOTICE file", "stream else 74 elif not reduce_window_size_randomly: assert len(batches) == 363 if not stream", "2, 1, 3] else: assert center.asnumpy().tolist() == [0, 1, 1] assert contexts_data.asnumpy().tolist() ==", "not stream else 74 elif not reduce_window_size_randomly: assert len(batches) == 363 if not", "assert center.asnumpy().tolist() == [0, 1, 1] assert contexts_data.asnumpy().tolist() == [1, 1, 1] assert", "software distributed under the License is distributed on an # \"AS IS\" BASIS,", "permissions and limitations # under the License. from __future__ import print_function import itertools", "[True, False]) def test_center_context_batchify(cbow): dataset = [np.arange(100).tolist()] batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=3, window_size=1, cbow=cbow)", "import gluonnlp as nlp @pytest.mark.parametrize('reduce_window_size_randomly', [True, False]) @pytest.mark.parametrize('shuffle', [True, False]) @pytest.mark.parametrize('cbow', [True, False])", "and limitations # under the License. from __future__ import print_function import itertools import", "2] assert contexts_col.asnumpy().tolist() == [1, 0, 2, 1, 3] else: assert center.asnumpy().tolist() ==", "nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=8, window_size=5, reduce_window_size_randomly=reduce_window_size_randomly, shuffle=shuffle, cbow=cbow) if stream: stream = nlp.data.SimpleDataStream([dataset, dataset]) batches", "= nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=3, window_size=1, cbow=cbow) samples = batchify(dataset) center, context = next(iter(samples)) (contexts_data,", "under the License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES", "additional information # regarding copyright ownership. The ASF licenses this file # to", "cbow: assert center.asnumpy().tolist() == [0, 1, 2] assert contexts_data.asnumpy().tolist() == [1, 0.5, 0.5,", "# \"License\"); you may not use this file except in compliance # with", "len(batches) == 363 if not stream else 726 else: pass @pytest.mark.parametrize('cbow', [True, False])", "batch_size=8, window_size=5, reduce_window_size_randomly=reduce_window_size_randomly, shuffle=shuffle, cbow=cbow) if stream: stream = nlp.data.SimpleDataStream([dataset, dataset]) batches =", "Licensed to the Apache Software Foundation (ASF) under one # or more contributor", "or more contributor license agreements. See the NOTICE file # distributed with this", "batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=8, window_size=5, reduce_window_size_randomly=reduce_window_size_randomly, shuffle=shuffle, cbow=cbow) if stream: stream = nlp.data.SimpleDataStream([dataset,", "OR CONDITIONS OF ANY # KIND, either express or implied. See the License", "Foundation (ASF) under one # or more contributor license agreements. See the NOTICE", "center, context = next(iter(samples)) (contexts_data, contexts_row, contexts_col) = context assert center.dtype == np.int64", "Apache Software Foundation (ASF) under one # or more contributor license agreements. See", "2] assert contexts_data.asnumpy().tolist() == [1, 0.5, 0.5, 0.5, 0.5] assert contexts_row.asnumpy().tolist() == [0,", "itertools import pytest import numpy as np import gluonnlp as nlp @pytest.mark.parametrize('reduce_window_size_randomly', [True,", "in compliance # with the License. You may obtain a copy of the", "implied. See the License for the # specific language governing permissions and limitations", "[True, False]) @pytest.mark.parametrize('cbow', [True, False]) @pytest.mark.parametrize('stream', [True, False]) def test_center_context_batchify_stream(reduce_window_size_randomly, shuffle, cbow, stream):", "or agreed to in writing, # software distributed under the License is distributed", "center.asnumpy().tolist() == [0, 1, 1] assert contexts_data.asnumpy().tolist() == [1, 1, 1] assert contexts_row.asnumpy().tolist()", "license agreements. See the NOTICE file # distributed with this work for additional", "0, 2, 1, 3] else: assert center.asnumpy().tolist() == [0, 1, 1] assert contexts_data.asnumpy().tolist()", "\"License\"); you may not use this file except in compliance # with the", "itertools.chain.from_iterable(stream.transform(batchify))) else: samples = batchify(dataset) batches = list(samples) if cbow: assert len(batches) ==", "else 74 elif not reduce_window_size_randomly: assert len(batches) == 363 if not stream else", "reduce_window_size_randomly=reduce_window_size_randomly, shuffle=shuffle, cbow=cbow) if stream: stream = nlp.data.SimpleDataStream([dataset, dataset]) batches = list( itertools.chain.from_iterable(stream.transform(batchify)))", "either express or implied. See the License for the # specific language governing", "= nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=8, window_size=5, reduce_window_size_randomly=reduce_window_size_randomly, shuffle=shuffle, cbow=cbow) if stream: stream = nlp.data.SimpleDataStream([dataset, dataset])", "assert center.dtype == np.int64 assert contexts_data.dtype == np.float32 assert contexts_row.dtype == np.int64 assert", "== [1, 1, 1] assert contexts_row.asnumpy().tolist() == [0, 1, 2] assert contexts_col.asnumpy().tolist() ==", "not use this file except in compliance # with the License. You may", "cbow, stream): dataset = [np.arange(100).tolist()] * 3 batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=8, window_size=5, reduce_window_size_randomly=reduce_window_size_randomly,", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "# or more contributor license agreements. See the NOTICE file # distributed with", "WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the", "stream): dataset = [np.arange(100).tolist()] * 3 batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=8, window_size=5, reduce_window_size_randomly=reduce_window_size_randomly, shuffle=shuffle,", "74 elif not reduce_window_size_randomly: assert len(batches) == 363 if not stream else 726", "1, 1] assert contexts_data.asnumpy().tolist() == [1, 1, 1] assert contexts_row.asnumpy().tolist() == [0, 1,", "# coding: utf-8 # Licensed to the Apache Software Foundation (ASF) under one", "import print_function import itertools import pytest import numpy as np import gluonnlp as", "assert contexts_data.asnumpy().tolist() == [1, 0.5, 0.5, 0.5, 0.5] assert contexts_row.asnumpy().tolist() == [0, 1,", "print_function import itertools import pytest import numpy as np import gluonnlp as nlp", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "[1, 0.5, 0.5, 0.5, 0.5] assert contexts_row.asnumpy().tolist() == [0, 1, 1, 2, 2]", "726 else: pass @pytest.mark.parametrize('cbow', [True, False]) def test_center_context_batchify(cbow): dataset = [np.arange(100).tolist()] batchify =", "3 batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=8, window_size=5, reduce_window_size_randomly=reduce_window_size_randomly, shuffle=shuffle, cbow=cbow) if stream: stream =", "stream: stream = nlp.data.SimpleDataStream([dataset, dataset]) batches = list( itertools.chain.from_iterable(stream.transform(batchify))) else: samples = batchify(dataset)", "# regarding copyright ownership. The ASF licenses this file # to you under", "more contributor license agreements. See the NOTICE file # distributed with this work", "context assert center.dtype == np.int64 assert contexts_data.dtype == np.float32 assert contexts_row.dtype == np.int64", "is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "window_size=5, reduce_window_size_randomly=reduce_window_size_randomly, shuffle=shuffle, cbow=cbow) if stream: stream = nlp.data.SimpleDataStream([dataset, dataset]) batches = list(", "batch_size=3, window_size=1, cbow=cbow) samples = batchify(dataset) center, context = next(iter(samples)) (contexts_data, contexts_row, contexts_col)", "not reduce_window_size_randomly: assert len(batches) == 363 if not stream else 726 else: pass", "False]) def test_center_context_batchify(cbow): dataset = [np.arange(100).tolist()] batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=3, window_size=1, cbow=cbow) samples", "CONDITIONS OF ANY # KIND, either express or implied. See the License for", "work for additional information # regarding copyright ownership. The ASF licenses this file", "1, 1, 2, 2] assert contexts_col.asnumpy().tolist() == [1, 0, 2, 1, 3] else:", "assert center.asnumpy().tolist() == [0, 1, 2] assert contexts_data.asnumpy().tolist() == [1, 0.5, 0.5, 0.5,", "@pytest.mark.parametrize('cbow', [True, False]) @pytest.mark.parametrize('stream', [True, False]) def test_center_context_batchify_stream(reduce_window_size_randomly, shuffle, cbow, stream): dataset =", "licenses this file # to you under the Apache License, Version 2.0 (the", "False]) @pytest.mark.parametrize('shuffle', [True, False]) @pytest.mark.parametrize('cbow', [True, False]) @pytest.mark.parametrize('stream', [True, False]) def test_center_context_batchify_stream(reduce_window_size_randomly, shuffle,", "# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either", "= [np.arange(100).tolist()] batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=3, window_size=1, cbow=cbow) samples = batchify(dataset) center, context", "express or implied. See the License for the # specific language governing permissions", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "# specific language governing permissions and limitations # under the License. from __future__", "the License. from __future__ import print_function import itertools import pytest import numpy as", "if not stream else 726 else: pass @pytest.mark.parametrize('cbow', [True, False]) def test_center_context_batchify(cbow): dataset", "False]) @pytest.mark.parametrize('stream', [True, False]) def test_center_context_batchify_stream(reduce_window_size_randomly, shuffle, cbow, stream): dataset = [np.arange(100).tolist()] *", "you under the Apache License, Version 2.0 (the # \"License\"); you may not", "if cbow: assert center.asnumpy().tolist() == [0, 1, 2] assert contexts_data.asnumpy().tolist() == [1, 0.5,", "License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "dataset]) batches = list( itertools.chain.from_iterable(stream.transform(batchify))) else: samples = batchify(dataset) batches = list(samples) if", "def test_center_context_batchify_stream(reduce_window_size_randomly, shuffle, cbow, stream): dataset = [np.arange(100).tolist()] * 3 batchify = nlp.data.batchify.EmbeddingCenterContextBatchify(", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "elif not reduce_window_size_randomly: assert len(batches) == 363 if not stream else 726 else:", "= batchify(dataset) center, context = next(iter(samples)) (contexts_data, contexts_row, contexts_col) = context assert center.dtype", "under the Apache License, Version 2.0 (the # \"License\"); you may not use", "@pytest.mark.parametrize('stream', [True, False]) def test_center_context_batchify_stream(reduce_window_size_randomly, shuffle, cbow, stream): dataset = [np.arange(100).tolist()] * 3", "contexts_col.dtype == np.int64 if cbow: assert center.asnumpy().tolist() == [0, 1, 2] assert contexts_data.asnumpy().tolist()", "[1, 0, 2, 1, 3] else: assert center.asnumpy().tolist() == [0, 1, 1] assert", "0.5, 0.5, 0.5] assert contexts_row.asnumpy().tolist() == [0, 1, 1, 2, 2] assert contexts_col.asnumpy().tolist()", "License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "contexts_data.asnumpy().tolist() == [1, 1, 1] assert contexts_row.asnumpy().tolist() == [0, 1, 2] assert contexts_col.asnumpy().tolist()", "cbow=cbow) if stream: stream = nlp.data.SimpleDataStream([dataset, dataset]) batches = list( itertools.chain.from_iterable(stream.transform(batchify))) else: samples", "or implied. See the License for the # specific language governing permissions and", "np.int64 if cbow: assert center.asnumpy().tolist() == [0, 1, 2] assert contexts_data.asnumpy().tolist() == [1,", "next(iter(samples)) (contexts_data, contexts_row, contexts_col) = context assert center.dtype == np.int64 assert contexts_data.dtype ==", "reduce_window_size_randomly: assert len(batches) == 363 if not stream else 726 else: pass @pytest.mark.parametrize('cbow',", "[0, 1, 1, 2, 2] assert contexts_col.asnumpy().tolist() == [1, 0, 2, 1, 3]", "window_size=1, cbow=cbow) samples = batchify(dataset) center, context = next(iter(samples)) (contexts_data, contexts_row, contexts_col) =", "distributed under the License is distributed on an # \"AS IS\" BASIS, WITHOUT", "center.asnumpy().tolist() == [0, 1, 2] assert contexts_data.asnumpy().tolist() == [1, 0.5, 0.5, 0.5, 0.5]", "under the License. from __future__ import print_function import itertools import pytest import numpy", "== np.int64 assert contexts_col.dtype == np.int64 if cbow: assert center.asnumpy().tolist() == [0, 1,", "as np import gluonnlp as nlp @pytest.mark.parametrize('reduce_window_size_randomly', [True, False]) @pytest.mark.parametrize('shuffle', [True, False]) @pytest.mark.parametrize('cbow',", "== 363 if not stream else 726 else: pass @pytest.mark.parametrize('cbow', [True, False]) def", "Unless required by applicable law or agreed to in writing, # software distributed", "distributed with this work for additional information # regarding copyright ownership. The ASF", "cbow=cbow) samples = batchify(dataset) center, context = next(iter(samples)) (contexts_data, contexts_row, contexts_col) = context", "regarding copyright ownership. The ASF licenses this file # to you under the", "3] else: assert center.asnumpy().tolist() == [0, 1, 1] assert contexts_data.asnumpy().tolist() == [1, 1,", "# KIND, either express or implied. See the License for the # specific", "utf-8 # Licensed to the Apache Software Foundation (ASF) under one # or", "cbow: assert len(batches) == 37 if not stream else 74 elif not reduce_window_size_randomly:", "this work for additional information # regarding copyright ownership. The ASF licenses this", "ANY # KIND, either express or implied. See the License for the #", "contributor license agreements. See the NOTICE file # distributed with this work for", "nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=3, window_size=1, cbow=cbow) samples = batchify(dataset) center, context = next(iter(samples)) (contexts_data, contexts_row,", "assert len(batches) == 363 if not stream else 726 else: pass @pytest.mark.parametrize('cbow', [True,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "= [np.arange(100).tolist()] * 3 batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=8, window_size=5, reduce_window_size_randomly=reduce_window_size_randomly, shuffle=shuffle, cbow=cbow) if", "list( itertools.chain.from_iterable(stream.transform(batchify))) else: samples = batchify(dataset) batches = list(samples) if cbow: assert len(batches)", "else: assert center.asnumpy().tolist() == [0, 1, 1] assert contexts_data.asnumpy().tolist() == [1, 1, 1]", "assert contexts_data.asnumpy().tolist() == [1, 1, 1] assert contexts_row.asnumpy().tolist() == [0, 1, 2] assert", "__future__ import print_function import itertools import pytest import numpy as np import gluonnlp", "dataset = [np.arange(100).tolist()] batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=3, window_size=1, cbow=cbow) samples = batchify(dataset) center,", "[True, False]) @pytest.mark.parametrize('stream', [True, False]) def test_center_context_batchify_stream(reduce_window_size_randomly, shuffle, cbow, stream): dataset = [np.arange(100).tolist()]", "363 if not stream else 726 else: pass @pytest.mark.parametrize('cbow', [True, False]) def test_center_context_batchify(cbow):", "See the License for the # specific language governing permissions and limitations #", "np.int64 assert contexts_col.dtype == np.int64 if cbow: assert center.asnumpy().tolist() == [0, 1, 2]", "assert contexts_row.dtype == np.int64 assert contexts_col.dtype == np.int64 if cbow: assert center.asnumpy().tolist() ==", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or", "2.0 (the # \"License\"); you may not use this file except in compliance", "else 726 else: pass @pytest.mark.parametrize('cbow', [True, False]) def test_center_context_batchify(cbow): dataset = [np.arange(100).tolist()] batchify", "[True, False]) def test_center_context_batchify_stream(reduce_window_size_randomly, shuffle, cbow, stream): dataset = [np.arange(100).tolist()] * 3 batchify", "if cbow: assert len(batches) == 37 if not stream else 74 elif not", "KIND, either express or implied. See the License for the # specific language", "1] assert contexts_row.asnumpy().tolist() == [0, 1, 2] assert contexts_col.asnumpy().tolist() == [1, 0, 2]", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "compliance # with the License. You may obtain a copy of the License", "language governing permissions and limitations # under the License. from __future__ import print_function", "== np.int64 if cbow: assert center.asnumpy().tolist() == [0, 1, 2] assert contexts_data.asnumpy().tolist() ==", "WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See", "with the License. You may obtain a copy of the License at #", "else: samples = batchify(dataset) batches = list(samples) if cbow: assert len(batches) == 37", "information # regarding copyright ownership. The ASF licenses this file # to you", "= batchify(dataset) batches = list(samples) if cbow: assert len(batches) == 37 if not", "1, 2, 2] assert contexts_col.asnumpy().tolist() == [1, 0, 2, 1, 3] else: assert", "stream = nlp.data.SimpleDataStream([dataset, dataset]) batches = list( itertools.chain.from_iterable(stream.transform(batchify))) else: samples = batchify(dataset) batches", "samples = batchify(dataset) center, context = next(iter(samples)) (contexts_data, contexts_row, contexts_col) = context assert", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "one # or more contributor license agreements. See the NOTICE file # distributed", "except in compliance # with the License. You may obtain a copy of", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "you may not use this file except in compliance # with the License.", "1, 1] assert contexts_row.asnumpy().tolist() == [0, 1, 2] assert contexts_col.asnumpy().tolist() == [1, 0,", "an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND,", "contexts_data.dtype == np.float32 assert contexts_row.dtype == np.int64 assert contexts_col.dtype == np.int64 if cbow:", "1, 3] else: assert center.asnumpy().tolist() == [0, 1, 1] assert contexts_data.asnumpy().tolist() == [1,", "if stream: stream = nlp.data.SimpleDataStream([dataset, dataset]) batches = list( itertools.chain.from_iterable(stream.transform(batchify))) else: samples =", "0.5, 0.5, 0.5, 0.5] assert contexts_row.asnumpy().tolist() == [0, 1, 1, 2, 2] assert", "else: pass @pytest.mark.parametrize('cbow', [True, False]) def test_center_context_batchify(cbow): dataset = [np.arange(100).tolist()] batchify = nlp.data.batchify.EmbeddingCenterContextBatchify(", "assert contexts_col.asnumpy().tolist() == [1, 0, 2, 1, 3] else: assert center.asnumpy().tolist() == [0,", "this file # to you under the Apache License, Version 2.0 (the #", "(contexts_data, contexts_row, contexts_col) = context assert center.dtype == np.int64 assert contexts_data.dtype == np.float32", "# # Unless required by applicable law or agreed to in writing, #", "1, 2] assert contexts_data.asnumpy().tolist() == [1, 0.5, 0.5, 0.5, 0.5] assert contexts_row.asnumpy().tolist() ==", "= list(samples) if cbow: assert len(batches) == 37 if not stream else 74", "Version 2.0 (the # \"License\"); you may not use this file except in", "for the # specific language governing permissions and limitations # under the License.", "center.dtype == np.int64 assert contexts_data.dtype == np.float32 assert contexts_row.dtype == np.int64 assert contexts_col.dtype", "nlp @pytest.mark.parametrize('reduce_window_size_randomly', [True, False]) @pytest.mark.parametrize('shuffle', [True, False]) @pytest.mark.parametrize('cbow', [True, False]) @pytest.mark.parametrize('stream', [True, False])", "test_center_context_batchify(cbow): dataset = [np.arange(100).tolist()] batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=3, window_size=1, cbow=cbow) samples = batchify(dataset)", "* 3 batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=8, window_size=5, reduce_window_size_randomly=reduce_window_size_randomly, shuffle=shuffle, cbow=cbow) if stream: stream", "shuffle=shuffle, cbow=cbow) if stream: stream = nlp.data.SimpleDataStream([dataset, dataset]) batches = list( itertools.chain.from_iterable(stream.transform(batchify))) else:", "OF ANY # KIND, either express or implied. See the License for the", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied.", "1] assert contexts_data.asnumpy().tolist() == [1, 1, 1] assert contexts_row.asnumpy().tolist() == [0, 1, 2]", "assert len(batches) == 37 if not stream else 74 elif not reduce_window_size_randomly: assert", "[np.arange(100).tolist()] * 3 batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=8, window_size=5, reduce_window_size_randomly=reduce_window_size_randomly, shuffle=shuffle, cbow=cbow) if stream:", "License, Version 2.0 (the # \"License\"); you may not use this file except", "2, 2] assert contexts_col.asnumpy().tolist() == [1, 0, 2, 1, 3] else: assert center.asnumpy().tolist()", "this file except in compliance # with the License. You may obtain a", "from __future__ import print_function import itertools import pytest import numpy as np import", "list(samples) if cbow: assert len(batches) == 37 if not stream else 74 elif", "[True, False]) @pytest.mark.parametrize('shuffle', [True, False]) @pytest.mark.parametrize('cbow', [True, False]) @pytest.mark.parametrize('stream', [True, False]) def test_center_context_batchify_stream(reduce_window_size_randomly,", "may not use this file except in compliance # with the License. You", "contexts_col) = context assert center.dtype == np.int64 assert contexts_data.dtype == np.float32 assert contexts_row.dtype", "ASF licenses this file # to you under the Apache License, Version 2.0", "nlp.data.SimpleDataStream([dataset, dataset]) batches = list( itertools.chain.from_iterable(stream.transform(batchify))) else: samples = batchify(dataset) batches = list(samples)", "# distributed with this work for additional information # regarding copyright ownership. The", "batches = list( itertools.chain.from_iterable(stream.transform(batchify))) else: samples = batchify(dataset) batches = list(samples) if cbow:", "on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #", "False]) @pytest.mark.parametrize('cbow', [True, False]) @pytest.mark.parametrize('stream', [True, False]) def test_center_context_batchify_stream(reduce_window_size_randomly, shuffle, cbow, stream): dataset", "limitations # under the License. from __future__ import print_function import itertools import pytest", "with this work for additional information # regarding copyright ownership. The ASF licenses", "the License. You may obtain a copy of the License at # #", "dataset = [np.arange(100).tolist()] * 3 batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=8, window_size=5, reduce_window_size_randomly=reduce_window_size_randomly, shuffle=shuffle, cbow=cbow)", "as nlp @pytest.mark.parametrize('reduce_window_size_randomly', [True, False]) @pytest.mark.parametrize('shuffle', [True, False]) @pytest.mark.parametrize('cbow', [True, False]) @pytest.mark.parametrize('stream', [True,", "governing permissions and limitations # under the License. from __future__ import print_function import", "agreements. See the NOTICE file # distributed with this work for additional information", "writing, # software distributed under the License is distributed on an # \"AS", "== np.int64 assert contexts_data.dtype == np.float32 assert contexts_row.dtype == np.int64 assert contexts_col.dtype ==", "NOTICE file # distributed with this work for additional information # regarding copyright", "stream else 726 else: pass @pytest.mark.parametrize('cbow', [True, False]) def test_center_context_batchify(cbow): dataset = [np.arange(100).tolist()]", "[np.arange(100).tolist()] batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=3, window_size=1, cbow=cbow) samples = batchify(dataset) center, context =", "False]) def test_center_context_batchify_stream(reduce_window_size_randomly, shuffle, cbow, stream): dataset = [np.arange(100).tolist()] * 3 batchify =", "the Apache License, Version 2.0 (the # \"License\"); you may not use this", "contexts_row, contexts_col) = context assert center.dtype == np.int64 assert contexts_data.dtype == np.float32 assert", "The ASF licenses this file # to you under the Apache License, Version", "file except in compliance # with the License. You may obtain a copy", "file # to you under the Apache License, Version 2.0 (the # \"License\");", "test_center_context_batchify_stream(reduce_window_size_randomly, shuffle, cbow, stream): dataset = [np.arange(100).tolist()] * 3 batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=8,", "specific language governing permissions and limitations # under the License. from __future__ import", "assert contexts_row.asnumpy().tolist() == [0, 1, 1, 2, 2] assert contexts_col.asnumpy().tolist() == [1, 0,", "(the # \"License\"); you may not use this file except in compliance #", "samples = batchify(dataset) batches = list(samples) if cbow: assert len(batches) == 37 if", "@pytest.mark.parametrize('shuffle', [True, False]) @pytest.mark.parametrize('cbow', [True, False]) @pytest.mark.parametrize('stream', [True, False]) def test_center_context_batchify_stream(reduce_window_size_randomly, shuffle, cbow,", "== np.float32 assert contexts_row.dtype == np.int64 assert contexts_col.dtype == np.int64 if cbow: assert", "== [0, 1, 1, 2, 2] assert contexts_col.asnumpy().tolist() == [1, 0, 2, 1,", "law or agreed to in writing, # software distributed under the License is", "# software distributed under the License is distributed on an # \"AS IS\"", "to you under the Apache License, Version 2.0 (the # \"License\"); you may", "@pytest.mark.parametrize('cbow', [True, False]) def test_center_context_batchify(cbow): dataset = [np.arange(100).tolist()] batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=3, window_size=1,", "file # distributed with this work for additional information # regarding copyright ownership.", "# Licensed to the Apache Software Foundation (ASF) under one # or more", "0.5, 0.5] assert contexts_row.asnumpy().tolist() == [0, 1, 1, 2, 2] assert contexts_col.asnumpy().tolist() ==", "contexts_col.asnumpy().tolist() == [1, 0, 2, 1, 3] else: assert center.asnumpy().tolist() == [0, 1,", "copyright ownership. The ASF licenses this file # to you under the Apache", "pass @pytest.mark.parametrize('cbow', [True, False]) def test_center_context_batchify(cbow): dataset = [np.arange(100).tolist()] batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=3,", "ownership. The ASF licenses this file # to you under the Apache License,", "[0, 1, 1] assert contexts_data.asnumpy().tolist() == [1, 1, 1] assert contexts_row.asnumpy().tolist() == [0,", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "not stream else 726 else: pass @pytest.mark.parametrize('cbow', [True, False]) def test_center_context_batchify(cbow): dataset =", "# Unless required by applicable law or agreed to in writing, # software", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "contexts_data.asnumpy().tolist() == [1, 0.5, 0.5, 0.5, 0.5] assert contexts_row.asnumpy().tolist() == [0, 1, 1,", "assert contexts_col.dtype == np.int64 if cbow: assert center.asnumpy().tolist() == [0, 1, 2] assert", "pytest import numpy as np import gluonnlp as nlp @pytest.mark.parametrize('reduce_window_size_randomly', [True, False]) @pytest.mark.parametrize('shuffle',", "to in writing, # software distributed under the License is distributed on an", "= list( itertools.chain.from_iterable(stream.transform(batchify))) else: samples = batchify(dataset) batches = list(samples) if cbow: assert", "<filename>tests/unittest/batchify/test_batchify_embedding.py # coding: utf-8 # Licensed to the Apache Software Foundation (ASF) under", "agreed to in writing, # software distributed under the License is distributed on", "[1, 1, 1] assert contexts_row.asnumpy().tolist() == [0, 1, 2] assert contexts_col.asnumpy().tolist() == [1,", "batches = list(samples) if cbow: assert len(batches) == 37 if not stream else", "if not stream else 74 elif not reduce_window_size_randomly: assert len(batches) == 363 if", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "np import gluonnlp as nlp @pytest.mark.parametrize('reduce_window_size_randomly', [True, False]) @pytest.mark.parametrize('shuffle', [True, False]) @pytest.mark.parametrize('cbow', [True,", "to the Apache Software Foundation (ASF) under one # or more contributor license", "= nlp.data.SimpleDataStream([dataset, dataset]) batches = list( itertools.chain.from_iterable(stream.transform(batchify))) else: samples = batchify(dataset) batches =", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "contexts_row.asnumpy().tolist() == [0, 1, 1, 2, 2] assert contexts_col.asnumpy().tolist() == [1, 0, 2,", "use this file except in compliance # with the License. You may obtain", "the # specific language governing permissions and limitations # under the License. from", "Software Foundation (ASF) under one # or more contributor license agreements. See the", "gluonnlp as nlp @pytest.mark.parametrize('reduce_window_size_randomly', [True, False]) @pytest.mark.parametrize('shuffle', [True, False]) @pytest.mark.parametrize('cbow', [True, False]) @pytest.mark.parametrize('stream',", "the License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "contexts_row.dtype == np.int64 assert contexts_col.dtype == np.int64 if cbow: assert center.asnumpy().tolist() == [0,", "[0, 1, 2] assert contexts_data.asnumpy().tolist() == [1, 0.5, 0.5, 0.5, 0.5] assert contexts_row.asnumpy().tolist()", "np.float32 assert contexts_row.dtype == np.int64 assert contexts_col.dtype == np.int64 if cbow: assert center.asnumpy().tolist()", "See the NOTICE file # distributed with this work for additional information #", "the NOTICE file # distributed with this work for additional information # regarding", "in writing, # software distributed under the License is distributed on an #", "the Apache Software Foundation (ASF) under one # or more contributor license agreements.", "== 37 if not stream else 74 elif not reduce_window_size_randomly: assert len(batches) ==", "numpy as np import gluonnlp as nlp @pytest.mark.parametrize('reduce_window_size_randomly', [True, False]) @pytest.mark.parametrize('shuffle', [True, False])", "batchify(dataset) batches = list(samples) if cbow: assert len(batches) == 37 if not stream", "License. from __future__ import print_function import itertools import pytest import numpy as np", "== [1, 0, 2, 1, 3] else: assert center.asnumpy().tolist() == [0, 1, 1]", "== [1, 0.5, 0.5, 0.5, 0.5] assert contexts_row.asnumpy().tolist() == [0, 1, 1, 2,", "batchify(dataset) center, context = next(iter(samples)) (contexts_data, contexts_row, contexts_col) = context assert center.dtype ==", "coding: utf-8 # Licensed to the Apache Software Foundation (ASF) under one #", "37 if not stream else 74 elif not reduce_window_size_randomly: assert len(batches) == 363", "# under the License. from __future__ import print_function import itertools import pytest import", "# with the License. You may obtain a copy of the License at", "np.int64 assert contexts_data.dtype == np.float32 assert contexts_row.dtype == np.int64 assert contexts_col.dtype == np.int64", "def test_center_context_batchify(cbow): dataset = [np.arange(100).tolist()] batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=3, window_size=1, cbow=cbow) samples =", "Apache License, Version 2.0 (the # \"License\"); you may not use this file", "= next(iter(samples)) (contexts_data, contexts_row, contexts_col) = context assert center.dtype == np.int64 assert contexts_data.dtype", "shuffle, cbow, stream): dataset = [np.arange(100).tolist()] * 3 batchify = nlp.data.batchify.EmbeddingCenterContextBatchify( batch_size=8, window_size=5,", "under one # or more contributor license agreements. See the NOTICE file #", "# to you under the Apache License, Version 2.0 (the # \"License\"); you", "required by applicable law or agreed to in writing, # software distributed under", "import numpy as np import gluonnlp as nlp @pytest.mark.parametrize('reduce_window_size_randomly', [True, False]) @pytest.mark.parametrize('shuffle', [True,", "by applicable law or agreed to in writing, # software distributed under the", "for additional information # regarding copyright ownership. The ASF licenses this file #", "len(batches) == 37 if not stream else 74 elif not reduce_window_size_randomly: assert len(batches)", "the License for the # specific language governing permissions and limitations # under", "assert contexts_data.dtype == np.float32 assert contexts_row.dtype == np.int64 assert contexts_col.dtype == np.int64 if", "applicable law or agreed to in writing, # software distributed under the License", "== [0, 1, 2] assert contexts_data.asnumpy().tolist() == [1, 0.5, 0.5, 0.5, 0.5] assert", "import itertools import pytest import numpy as np import gluonnlp as nlp @pytest.mark.parametrize('reduce_window_size_randomly',", "import pytest import numpy as np import gluonnlp as nlp @pytest.mark.parametrize('reduce_window_size_randomly', [True, False])", "= context assert center.dtype == np.int64 assert contexts_data.dtype == np.float32 assert contexts_row.dtype ==", "== [0, 1, 1] assert contexts_data.asnumpy().tolist() == [1, 1, 1] assert contexts_row.asnumpy().tolist() ==" ]
[ "fp.read() # Get __version__ without importing with open(os.path.join(os.path.dirname(__file__),\"ndsb\", \"__init__.py\"), \"r\") as f: for", "sys import setuptools # Get text from README.txt with open(\"README.md\", \"r\") as fp:", "with open(\"README.md\", \"r\") as fp: readme_text = fp.read() # Get __version__ without importing", "line in f: if line.startswith(\"__version__ = \"): exec(line.strip()) break setuptools.setup( name=\"ndsb\", version=__version__, description=\"Collect", "if line.startswith(\"__version__ = \"): exec(line.strip()) break setuptools.setup( name=\"ndsb\", version=__version__, description=\"Collect data, turn it", "classifiers=[ \"Intended Audience :: Science/Research\", \"License :: OSI Approved :: MIT License\", \"Operating", "\"__init__.py\"), \"r\") as f: for line in f: if line.startswith(\"__version__ = \"): exec(line.strip())", "as fp: readme_text = fp.read() # Get __version__ without importing with open(os.path.join(os.path.dirname(__file__),\"ndsb\", \"__init__.py\"),", "to a vault.\", license=\"MIT\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/dbbs-lab/ndsb\", long_description=readme_text, long_description_content_type=\"text/markdown\", packages=[\"ndsb\"], classifiers=[ \"Intended Audience", "author_email=\"<EMAIL>\", url=\"https://github.com/dbbs-lab/ndsb\", long_description=readme_text, long_description_content_type=\"text/markdown\", packages=[\"ndsb\"], classifiers=[ \"Intended Audience :: Science/Research\", \"License :: OSI", "\"Intended Audience :: Science/Research\", \"License :: OSI Approved :: MIT License\", \"Operating System", "beam it to a vault.\", license=\"MIT\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/dbbs-lab/ndsb\", long_description=readme_text, long_description_content_type=\"text/markdown\", packages=[\"ndsb\"], classifiers=[", "fp: readme_text = fp.read() # Get __version__ without importing with open(os.path.join(os.path.dirname(__file__),\"ndsb\", \"__init__.py\"), \"r\")", "exec(line.strip()) break setuptools.setup( name=\"ndsb\", version=__version__, description=\"Collect data, turn it into static artifacts and", "OSI Approved :: MIT License\", \"Operating System :: OS Independent\", \"Programming Language ::", "as f: for line in f: if line.startswith(\"__version__ = \"): exec(line.strip()) break setuptools.setup(", "long_description=readme_text, long_description_content_type=\"text/markdown\", packages=[\"ndsb\"], classifiers=[ \"Intended Audience :: Science/Research\", \"License :: OSI Approved ::", "it to a vault.\", license=\"MIT\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/dbbs-lab/ndsb\", long_description=readme_text, long_description_content_type=\"text/markdown\", packages=[\"ndsb\"], classifiers=[ \"Intended", "line.startswith(\"__version__ = \"): exec(line.strip()) break setuptools.setup( name=\"ndsb\", version=__version__, description=\"Collect data, turn it into", "long_description_content_type=\"text/markdown\", packages=[\"ndsb\"], classifiers=[ \"Intended Audience :: Science/Research\", \"License :: OSI Approved :: MIT", "README.txt with open(\"README.md\", \"r\") as fp: readme_text = fp.read() # Get __version__ without", "for line in f: if line.startswith(\"__version__ = \"): exec(line.strip()) break setuptools.setup( name=\"ndsb\", version=__version__,", "python3 import os, sys import setuptools # Get text from README.txt with open(\"README.md\",", "version=__version__, description=\"Collect data, turn it into static artifacts and beam it to a", "data, turn it into static artifacts and beam it to a vault.\", license=\"MIT\",", "with open(os.path.join(os.path.dirname(__file__),\"ndsb\", \"__init__.py\"), \"r\") as f: for line in f: if line.startswith(\"__version__ =", ":: OSI Approved :: MIT License\", \"Operating System :: OS Independent\", \"Programming Language", "open(os.path.join(os.path.dirname(__file__),\"ndsb\", \"__init__.py\"), \"r\") as f: for line in f: if line.startswith(\"__version__ = \"):", "import os, sys import setuptools # Get text from README.txt with open(\"README.md\", \"r\")", "readme_text = fp.read() # Get __version__ without importing with open(os.path.join(os.path.dirname(__file__),\"ndsb\", \"__init__.py\"), \"r\") as", "import setuptools # Get text from README.txt with open(\"README.md\", \"r\") as fp: readme_text", "#!/usr/bin/env python3 import os, sys import setuptools # Get text from README.txt with", "\"r\") as f: for line in f: if line.startswith(\"__version__ = \"): exec(line.strip()) break", "description=\"Collect data, turn it into static artifacts and beam it to a vault.\",", ":: MIT License\", \"Operating System :: OS Independent\", \"Programming Language :: Python\", ],", "= \"): exec(line.strip()) break setuptools.setup( name=\"ndsb\", version=__version__, description=\"Collect data, turn it into static", "into static artifacts and beam it to a vault.\", license=\"MIT\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/dbbs-lab/ndsb\",", "Approved :: MIT License\", \"Operating System :: OS Independent\", \"Programming Language :: Python\",", "\"Operating System :: OS Independent\", \"Programming Language :: Python\", ], install_requires=[\"portalocker\", \"requests\", \"requests-toolbelt\"],", "License\", \"Operating System :: OS Independent\", \"Programming Language :: Python\", ], install_requires=[\"portalocker\", \"requests\",", "setuptools.setup( name=\"ndsb\", version=__version__, description=\"Collect data, turn it into static artifacts and beam it", "in f: if line.startswith(\"__version__ = \"): exec(line.strip()) break setuptools.setup( name=\"ndsb\", version=__version__, description=\"Collect data,", "= fp.read() # Get __version__ without importing with open(os.path.join(os.path.dirname(__file__),\"ndsb\", \"__init__.py\"), \"r\") as f:", "without importing with open(os.path.join(os.path.dirname(__file__),\"ndsb\", \"__init__.py\"), \"r\") as f: for line in f: if", "Science/Research\", \"License :: OSI Approved :: MIT License\", \"Operating System :: OS Independent\",", "\"): exec(line.strip()) break setuptools.setup( name=\"ndsb\", version=__version__, description=\"Collect data, turn it into static artifacts", "\"License :: OSI Approved :: MIT License\", \"Operating System :: OS Independent\", \"Programming", "f: for line in f: if line.startswith(\"__version__ = \"): exec(line.strip()) break setuptools.setup( name=\"ndsb\",", "Language :: Python\", ], install_requires=[\"portalocker\", \"requests\", \"requests-toolbelt\"], extras_require={\"dev\": [\"sphinx\", \"sphinx_rtd_theme>=0.4.3\", \"pre-commit\", \"black\"],}, )", "# Get text from README.txt with open(\"README.md\", \"r\") as fp: readme_text = fp.read()", "from README.txt with open(\"README.md\", \"r\") as fp: readme_text = fp.read() # Get __version__", "artifacts and beam it to a vault.\", license=\"MIT\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/dbbs-lab/ndsb\", long_description=readme_text, long_description_content_type=\"text/markdown\",", "vault.\", license=\"MIT\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/dbbs-lab/ndsb\", long_description=readme_text, long_description_content_type=\"text/markdown\", packages=[\"ndsb\"], classifiers=[ \"Intended Audience :: Science/Research\",", "f: if line.startswith(\"__version__ = \"): exec(line.strip()) break setuptools.setup( name=\"ndsb\", version=__version__, description=\"Collect data, turn", "break setuptools.setup( name=\"ndsb\", version=__version__, description=\"Collect data, turn it into static artifacts and beam", "name=\"ndsb\", version=__version__, description=\"Collect data, turn it into static artifacts and beam it to", "System :: OS Independent\", \"Programming Language :: Python\", ], install_requires=[\"portalocker\", \"requests\", \"requests-toolbelt\"], extras_require={\"dev\":", "# Get __version__ without importing with open(os.path.join(os.path.dirname(__file__),\"ndsb\", \"__init__.py\"), \"r\") as f: for line", "__version__ without importing with open(os.path.join(os.path.dirname(__file__),\"ndsb\", \"__init__.py\"), \"r\") as f: for line in f:", "author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/dbbs-lab/ndsb\", long_description=readme_text, long_description_content_type=\"text/markdown\", packages=[\"ndsb\"], classifiers=[ \"Intended Audience :: Science/Research\", \"License ::", "turn it into static artifacts and beam it to a vault.\", license=\"MIT\", author=\"<NAME>\",", "\"Programming Language :: Python\", ], install_requires=[\"portalocker\", \"requests\", \"requests-toolbelt\"], extras_require={\"dev\": [\"sphinx\", \"sphinx_rtd_theme>=0.4.3\", \"pre-commit\", \"black\"],},", "static artifacts and beam it to a vault.\", license=\"MIT\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/dbbs-lab/ndsb\", long_description=readme_text,", "open(\"README.md\", \"r\") as fp: readme_text = fp.read() # Get __version__ without importing with", "Audience :: Science/Research\", \"License :: OSI Approved :: MIT License\", \"Operating System ::", "url=\"https://github.com/dbbs-lab/ndsb\", long_description=readme_text, long_description_content_type=\"text/markdown\", packages=[\"ndsb\"], classifiers=[ \"Intended Audience :: Science/Research\", \"License :: OSI Approved", ":: OS Independent\", \"Programming Language :: Python\", ], install_requires=[\"portalocker\", \"requests\", \"requests-toolbelt\"], extras_require={\"dev\": [\"sphinx\",", "Independent\", \"Programming Language :: Python\", ], install_requires=[\"portalocker\", \"requests\", \"requests-toolbelt\"], extras_require={\"dev\": [\"sphinx\", \"sphinx_rtd_theme>=0.4.3\", \"pre-commit\",", "setuptools # Get text from README.txt with open(\"README.md\", \"r\") as fp: readme_text =", "text from README.txt with open(\"README.md\", \"r\") as fp: readme_text = fp.read() # Get", "importing with open(os.path.join(os.path.dirname(__file__),\"ndsb\", \"__init__.py\"), \"r\") as f: for line in f: if line.startswith(\"__version__", "Get __version__ without importing with open(os.path.join(os.path.dirname(__file__),\"ndsb\", \"__init__.py\"), \"r\") as f: for line in", "and beam it to a vault.\", license=\"MIT\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/dbbs-lab/ndsb\", long_description=readme_text, long_description_content_type=\"text/markdown\", packages=[\"ndsb\"],", "packages=[\"ndsb\"], classifiers=[ \"Intended Audience :: Science/Research\", \"License :: OSI Approved :: MIT License\",", ":: Science/Research\", \"License :: OSI Approved :: MIT License\", \"Operating System :: OS", "\"r\") as fp: readme_text = fp.read() # Get __version__ without importing with open(os.path.join(os.path.dirname(__file__),\"ndsb\",", "OS Independent\", \"Programming Language :: Python\", ], install_requires=[\"portalocker\", \"requests\", \"requests-toolbelt\"], extras_require={\"dev\": [\"sphinx\", \"sphinx_rtd_theme>=0.4.3\",", "MIT License\", \"Operating System :: OS Independent\", \"Programming Language :: Python\", ], install_requires=[\"portalocker\",", "Get text from README.txt with open(\"README.md\", \"r\") as fp: readme_text = fp.read() #", "os, sys import setuptools # Get text from README.txt with open(\"README.md\", \"r\") as", "it into static artifacts and beam it to a vault.\", license=\"MIT\", author=\"<NAME>\", author_email=\"<EMAIL>\",", "license=\"MIT\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/dbbs-lab/ndsb\", long_description=readme_text, long_description_content_type=\"text/markdown\", packages=[\"ndsb\"], classifiers=[ \"Intended Audience :: Science/Research\", \"License", "a vault.\", license=\"MIT\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/dbbs-lab/ndsb\", long_description=readme_text, long_description_content_type=\"text/markdown\", packages=[\"ndsb\"], classifiers=[ \"Intended Audience ::" ]
[ "cv.resizeWindow(\"output\", (w, h)) cv.imshow(\"output\", image) cv.waitKey(0) cv.destroyAllWindows() # I should probably have one", "ndimage.median_filter(hsv_mask, size=(3, 3)).astype(np.uint8) # binary dilation hsv_mask = morphology.binary_dilation(hsv_mask, np.ones((20, 20))).astype(np.uint8) # fill", "Y_test) # print(result) # # use the test set to see how we", "= class_dict[pred[0]] object_label = \"obj\" + str(object_num) + \"_pred\" + str(pred[0]) print(object_label) #", "ret3, th3 def process_image_make_predictions(self, input_image, model): predictive_model = model area_th = 400 seg_img", "# # median filter to despeckle # hsv_mask = ndimage.median_filter(hsv_mask, size=(3, 3)).astype(np.uint8) #", "color mask trackbars cv.createTrackbar(\"hue_min\", \"trackbars\", 0, 179, empty) cv.createTrackbar('hue_max', 'trackbars', 179, 179, empty)", "plt.show() # print(labels) # take the row # end result should be a", "+ conditional # print(f\"i = {i}, prior = {prior}, conditional = {conditional}, posterior", "for predictions # results = predictive_model.blind_predictions() # result = loaded_model.get_predictions(X_test, Y_test) # print(result)", "and I had a typo that cost me hours numerator = np.exp(-((x-mean)**2 /", "isnan import pandas as pd from sklearn.model_selection import train_test_split from sklearn.metrics import confusion_matrix,", "import ndimage from skimage import morphology from skimage import exposure import os from", "x: len(x)/self.rows).to_numpy()) return self.prior def calc_posterior(self, x): # this is the probability, post", "= (179, 255, 30) # self.hsv_lower = (0, 0, 100) # self.hsv_upper =", "hsv_upper = np.array([h_max, s_max, v_max]) black_lower = np.array([0, 0, 0]) black_upper = np.array([179,", "the original image output_image = input_image.copy() # find the contours of the detected", "cimg_justthiscontour = np.zeros_like(input_image) # draw the contours on the blank canvas which is", "== 255) hue = img_subset_hsv[pts[0], pts[1], 0] sat = img_subset_hsv[pts[0], pts[1], 1] val", "on the blank canvas which is original sized cv.drawContours(cimg_justthiscontour, [cnt], 0, color=(255, 255,", "import randint, uniform import numpy as np from matplotlib import pyplot as plt", "cx= int(M['m10']/M['m00']) cy= int(M['m01']/M['m00']) # take this rectangle as a subset of the", "= predictive_model.blind_predictions() # result = loaded_model.get_predictions(X_test, Y_test) # print(result) # # use the", "fake_df.loc[i, 'g'] + uniform(-.1, .1) # fake_df.loc[i, 'b'] = fake_df.loc[i, 'b'] + uniform(-.1,", "h = int(image.shape[1]/imdiv), int(image.shape[0]/imdiv) cv.namedWindow(\"output\", cv.WINDOW_NORMAL) cv.resizeWindow(\"output\", (w, h)) cv.imshow(\"output\", image) cv.waitKey(0) cv.destroyAllWindows()", "extent = float(area/ rect_area) hull = cv.convexHull(cnt) hull_area = cv.contourArea(hull) solidity = float(area)/hull_area", "img_subset = input_image[y:y+h, x:x+w, :] # convert to hsv for extracting those values", "the random chance of drawing a particular class based on its proportion in", "cv.getTrackbarPos('sat_max', 'trackbars') v_min = cv.getTrackbarPos('val_min', 'trackbars') v_max = cv.getTrackbarPos('val_max', 'trackbars') # self.black_lower =", "\"and I'm gonna have a hard time with that\" return result def bg_segmentation(self,", "255, 255), thickness=5) img_subset = cv.bitwise_and(img_subset, img_subset, mask=mask).astype(np.uint8) # calculate where the object", "preprocessing import time from os import listdir from os.path import isfile, join from", "and export the image for later analysis with something else like a neural", "prob of c occuring indpendently. # P(x) is the predictor prior probability, or", "the object_num object_num += 1 # # end result should be a pandas", "this loop is increment the object_num object_num += 1 # AFTER ALL CONTOURS", "= cv.cvtColor(seg_img, cv.COLOR_BGR2GRAY) mask = cv.GaussianBlur(mask,(5,5),0) ret3, mask = cv.threshold(mask,0,255,cv.THRESH_BINARY) # output image", "values img_subset_hsv = cv.cvtColor(img_subset, cv.COLOR_BGR2HSV) # FILTER OUT THE WEIRD ONES # get", "cv.getTrackbarPos(\"hue_min\", \"trackbars\") h_max = cv.getTrackbarPos('hue_max', 'trackbars') s_min = cv.getTrackbarPos('sat_min', 'trackbars') s_max = cv.getTrackbarPos('sat_max',", "from math import pi from math import isnan import pandas as pd from", "mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(5)) # TODO: remove this it is for testing", "import confusion_matrix, precision_score, recall_score from skimage.filters import sobel # set random seed np.random.seed(26)", "features.shape[1] self.rows = features.shape[0] # calculate statistics for all those features self.calc_statistics(features, target)", "7:\"bright_blue\", 8:\"white\", 9:\"bright_yellow\"} color_text = class_dict[pred[0]] object_label = \"obj\" + str(object_num) + \"_pred\"", "c) for i, c in class_dict.items()] # cm = confusion_matrix(y_test, y_test_predictions) # fig", "the cimg for identification cv.putText(output_image, text= str(object_num), org=(cx - 5,cy - 5), fontFace=", "5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=3, color=(255,255,255), thickness=5, lineType=cv.LINE_AA) # print(r.mean(), g.mean(), b.mean(), gli.mean()) df", "= np.unique(target) self.count = len(self.classes) self.feature_nums = features.shape[1] self.rows = features.shape[0] # calculate", "255, 255) # self.hsv_lower = (0, 0, 70) # self.hsv_upper = (179, 34,", "dummy_method(self, a): if type(a) is np.ndarray: result = \"object is a numpy.ndarray, this", "features, target): # calculate mean, variance for each column and convert to numpy", "we will add each classes posterior prob to posteriors = [] # iterate", "img_hsv, mask=mask) cv.imshow('masked_image', masked_image) k = cv.waitKey(1000) & 0xFF # large wait time", "import listdir from os.path import isfile, join from random import randint, uniform import", "the blank canvas which is original sized cv.drawContours(cimg_justthiscontour, [cnt], 0, color=(255, 255, 255),", "of the detected objects in the image contours, hier = cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)", "# last thing we do on this loop is increment the object_num object_num", "self.calc_statistics(features, target) # prior is the random chance of drawing a particular class", "wrote for assignment 6 in BSYSE_530, modified a little for this purpose class", "# take this rectangle as a subset of the input_image, and calculate things", "THIS ONE not the others # this is a mask cimg_justthiscontour = np.zeros_like(input_image)", "= recall_score(y_test, y_test_predictions, average=\"micro\") # print(f\"precision is {prec}, recall is {rec}, accuracy =", "to put the contour onto, JUST THIS ONE not the others # this", "= self.mean[class_idx] var = self.var[class_idx] # this part sucked and I had a", "target): # define class variables self.classes = np.unique(target) self.count = len(self.classes) self.feature_nums =", "self.prior def calc_posterior(self, x): # this is the probability, post evidence # x", "image, mode=\"hsv\", show_img=False): # create an hsv mask for red colors hsv_mask =", "image img_hsv = cv.cvtColor(image, cv.COLOR_BGR2HSV) # get trackbar positions h_min = cv.getTrackbarPos(\"hue_min\", \"trackbars\")", "matplotlib import pyplot as plt import cv2 as cv from scipy import ndimage", "num object_num = 0 for cnt in contours: # draw contours on the", "(correct / float(len(test))) # TODO: read these and see how it works #", "I should probably have one image processing class that takes in a single", "fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(0,255,0), thickness=3, lineType=cv.LINE_AA) # last thing we do on this", "SHAPE FEATURES # get the x, y, w, h of the bounding rect", "return the result return cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8) def process_image_to_df(self, input_image, area_th): seg_img =", "submit the df to the model for predictions # results = predictive_model.blind_predictions() #", "the observation pred_class = self.calc_posterior(observation) return pred_class def calc_statistics(self, features, target): # calculate", "denominator = np.sqrt(2 * np.pi * var) return numerator / denominator def pdf(self,", "its proportion in the dataset self.prior = self.calc_prior(features, target) def get_predictions(self, input_vector): predictions", "trackbars cv.createTrackbar(\"hue_min\", \"trackbars\", 0, 179, empty) cv.createTrackbar('hue_max', 'trackbars', 179, 179, empty) cv.createTrackbar('sat_min', 'trackbars',", "plt.figure() # ax = fig.add_subplot(111) # cax = ax.matshow(cm) # plt.title('confusion matrix of", "the contours of the detected objects in the image contours, hier = cv.findContours(mask,", "predictor prior probability, or the prob of x occuring independently def fit(self, features,", "= cv.cvtColor(image, cv.COLOR_BGR2HSV) # get trackbar positions h_min = cv.getTrackbarPos(\"hue_min\", \"trackbars\") h_max =", "= cv.contourArea(hull) solidity = float(area)/hull_area eq_diameter = np.sqrt(4*area/np.pi) M= cv.moments(cnt) cx= int(M['m10']/M['m00']) cy=", "cv.contourArea(cnt) rect_area = w * h fullosity = area / rect_area aspect_ratio =", "h_min = cv.getTrackbarPos(\"hue_min\", \"trackbars\") h_max = cv.getTrackbarPos('hue_max', 'trackbars') s_min = cv.getTrackbarPos('sat_min', 'trackbars') s_max", "numerator / denominator def pdf(self, x, mean, stdev): # calculate probability density function", "= pd.concat(output_df, fake_df) # return output_df def otsu_threshold(self, image): blur = cv.GaussianBlur(image,(5,5),0) ret3,th3", "x:x+w, :] # convert to hsv for extracting those values img_subset_hsv = cv.cvtColor(img_subset,", "output_df def otsu_threshold(self, image): blur = cv.GaussianBlur(image,(5,5),0) ret3,th3 = cv.threshold(blur,0,255,cv.THRESH_BINARY+cv.THRESH_OTSU) return ret3, th3", "acc = nb.get_accuracy(y_test, y_test_predictions) # prec = precision_score(y_test, y_test_predictions, average=\"micro\") # rec =", "output_image = input_image.copy() # find the contours of the detected objects in the", "i, c in class_dict.items()] # cm = confusion_matrix(y_test, y_test_predictions) # fig = plt.figure()", "# contour features area = cv.contourArea(cnt) rect_area = w * h fullosity =", "empty) cv.createTrackbar('sat_max', 'trackbars', 255, 255, empty) cv.createTrackbar('val_min', 'trackbars', 0, 255, empty) cv.createTrackbar('val_max', 'trackbars',", "for i, row in image_df.iterrows(): image_df.loc[i, 'color'] = class_list[i] print(type(image_df)) return image_df #", "posteriors = [] # iterate through the classes for i in range(0, self.count):", "# replaces ImageSegmenter class ImageProcess(): def __init__(self): print(\"image processor activated! use 'process_image_to_df()' to", "y, w, h = cv.boundingRect(cnt) # contour features area = cv.contourArea(cnt) rect_area =", "= pd.DataFrame.from_dict(df) pred = predictive_model.get_predictions(df) class_dict = {0:\"medium_blue\", 1:\"black\", 2:\"darK_stone_gray\", 3:\"bright_green\", 4:\"light_green\", 5:\"bright_orange\",", "ALL THE CONTOUR SHAPE FEATURES # get the x, y, w, h of", "# erode the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(3)) # # median filter to", "1 # AFTER ALL CONTOURS HAVE BEEN DONE submit the df to the", "return self.prior def calc_posterior(self, x): # this is the probability, post evidence #", "create an hsv mask for red colors hsv_mask = cv.inRange(cv.cvtColor(image, cv.COLOR_BGR2HSV), self.hsv_lower, self.hsv_upper).astype(np.uint8)", "image with opencv # this will be a BGR format, because that is", "the prior probability for the class prior = self.prior[i] # calculate the conditional", "float(w)/h extent = float(area/ rect_area) hull = cv.convexHull(cnt) hull_area = cv.contourArea(hull) solidity =", "cv.createTrackbar('sat_max', 'trackbars', 255, 255, empty) cv.createTrackbar('val_min', 'trackbars', 0, 255, empty) cv.createTrackbar('val_max', 'trackbars', 255,", "https://www.mathworks.com/help/matlab/matlab_external/passing-data-to-python.html # this exists only for my testing purposes class MatlabSurrogate(): def __init__(self):", "set is bigger # output_df = input_df.copy() # for rep in range(0, reps):", "fake_df.iterrows(): # fake_df.loc[i, 'r'] = fake_df.loc[i, 'r'] + uniform(-.1, .1) # fake_df.loc[i, 'g']", "this will be a BGR format, because that is how opencv rolls kinect_image", "sat = img_subset_hsv[pts[0], pts[1], 1] val = img_subset_hsv[pts[0], pts[1], 2] r = img_subset[pts[0],", "matrix # labels = [(i, c) for i, c in class_dict.items()] # cm", "with contours drawn on the original image output_image = input_image.copy() # find the", "the contours on the blank canvas which is original sized cv.drawContours(cimg_justthiscontour, [cnt], 0,", "cv.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(0,255,0), thickness=3, lineType=cv.LINE_AA) # last thing we do on this loop", "image, imdiv = 4): imdiv = int(imdiv) w, h = int(image.shape[1]/imdiv), int(image.shape[0]/imdiv) cv.namedWindow(\"output\",", "math import isnan import pandas as pd from sklearn.model_selection import train_test_split from sklearn.metrics", "= np.exp(-((x-mean)**2 / (2*stdev**2))) return exponent * (1/(np.sqrt(2*np.pi)*stdev)) def get_accuracy(self, test, predictions): correct", "correct += 1 return (correct / float(len(test))) # TODO: read these and see", "calc_statistics(self, features, target): # calculate mean, variance for each column and convert to", "(179, 255, 90) # self.black_lower = (0, 0, 203) # self.black_upper = (43,", "seg_img = self.bg_segmentation(input_image, show_img=False) # # make the mask a binary thresholded image", "objects that are probably noisef if area > area_th: # draw a blank", "# P(x|c) is the likelihood # P(c) is the class prior probability, or", "where the object is pts = np.where(cimg_subset == 255) hue = img_subset_hsv[pts[0], pts[1],", "== 113 or k == 27: break cv.destroyAllWindows() print(f'hsv_lower is {hsv_lower}, hsv_upper =", "fake_df(self, input_df, reps = 3): # # creates a bunch of fake adjustments", "detected objects in the image contours, hier = cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE) # create", "features, target): # this is the probability of picking one of a class", "= {conditional}, posterior = {posterior}\") posteriors.append(posterior) return self.classes[np.argmax(posteriors)] def gaussian_density(self, class_idx, x): #", "return result else: result = \"object is a \" + str(type(a)) + \"and", "= fake_df.loc[i, 'g'] + uniform(-.1, .1) # fake_df.loc[i, 'b'] = fake_df.loc[i, 'b'] +", "179, empty) cv.createTrackbar('hue_max', 'trackbars', 179, 179, empty) cv.createTrackbar('sat_min', 'trackbars', 0, 255, empty) cv.createTrackbar('sat_max',", "cv.createTrackbar('val_max', 'trackbars', 255, 255, empty) while True: # get image img_hsv = cv.cvtColor(image,", "= (0,0,0) # hsv_upper = (179,234,77) def dummy_method(self, a): if type(a) is np.ndarray:", "255, empty) while True: # get image img_hsv = cv.cvtColor(image, cv.COLOR_BGR2HSV) # get", "the row # end result should be a pandas dataframe and the contour", "self.var def calc_prior(self, features, target): # this is the probability of picking one", "img_subset[pts[0], pts[1], 2] df = [{'r': (r.mean() / 255), 'g': (g.mean() / 255),", "from matplotlib import pyplot as plt import cv2 as cv from scipy import", "probability, or the prob of c occuring indpendently. # P(x) is the predictor", "cv.COLOR_BGR2GRAY) mask = cv.GaussianBlur(mask,(5,5),0) ret3, mask = cv.threshold(mask,0,255,cv.THRESH_BINARY) # output image with contours", "the contour onto, JUST THIS ONE not the others # this is a", "so my train set is bigger # output_df = input_df.copy() # for rep", "is original sized cv.drawContours(cimg_justthiscontour, [cnt], 0, color=(255, 255, 255), thickness=-1) # now take", "m = MatlabSurrogate() m.imshow(cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8)) # apply the mask and return the", "the probability, post evidence # x is a numpy array # x is", "take the row # end result should be a pandas dataframe and the", "with opencv # this will be a BGR format, because that is how", "= [] # iterate through the classes for i in range(0, self.count): #", "convert to numpy array self.mean = features.groupby(target).apply(np.mean).to_numpy() self.var = features.groupby(target).apply(np.var).to_numpy() return self.mean, self.var", "w) cv.namedWindow(\"trackbars\") cv.resizeWindow(\"trackbars\", 800, 300) # color mask trackbars cv.createTrackbar(\"hue_min\", \"trackbars\", 0, 179,", "not the others # this is a mask cimg_justthiscontour = np.zeros_like(input_image) # draw", "object_num, 'r': r.mean(), 'g': g.mean(), 'b': b.mean(), 'hue': hue.mean(), 'sat': sat.mean(), 'val': val.mean()", "hsv_mask = morphology.erosion(hsv_mask, morphology.disk(3)) # # median filter to despeckle # hsv_mask =", "is a numpy array # x is feature vector for one observation #", "/ (2*stdev**2))) return exponent * (1/(np.sqrt(2*np.pi)*stdev)) def get_accuracy(self, test, predictions): correct = 0", "on the original image output_image = input_image.copy() # find the contours of the", "# # end result should be a pandas dataframe and the contour image", "black_lower, black_upper) mask = color_mask + black_mask masked_image = cv.bitwise_and(img_hsv, img_hsv, mask=mask) cv.imshow('masked_image',", "scipy import ndimage from skimage import morphology from skimage import exposure import os", "is {hsv_lower}, hsv_upper = {hsv_upper}') def label_dataframe(self, image_df, class_list): for i, row in", "= np.where(hsv_mask > 1, 0, 1).astype(np.uint8) hsv_mask = ndimage.gaussian_filter(hsv_mask, sigma=1) # erode the", "from skimage import morphology from skimage import exposure import os from math import", "labels = [(i, c) for i, c in class_dict.items()] # cm = confusion_matrix(y_test,", "image mask = cv.cvtColor(seg_img, cv.COLOR_BGR2GRAY) mask = cv.GaussianBlur(mask,(5,5),0) ret3, mask = cv.threshold(mask,0,255,cv.THRESH_BINARY) #", "and then spits out a dataframe that could be used for prediction #", "this image df = pd.DataFrame(columns=['color']) # # reset the object num object_num =", "the mask and return the result return cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8) def process_image_to_df(self, input_image,", "255), 'val': (val.mean() / 255)}] df = pd.DataFrame.from_dict(df) pred = predictive_model.get_predictions(df) class_dict =", "the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(3)) # # median filter to despeckle #", "255, 30) self.hsv_lower = (0, 0, 0) self.hsv_upper = (179, 255, 90) #", "def empty(a): pass h, w = int(image.shape[1]/2), int(image.shape[0]/2) cv.namedWindow('masked_image', cv.WINDOW_NORMAL) cv.resizeWindow('masked_image', h, w)", "= features.shape[1] self.rows = features.shape[0] # calculate statistics for all those features self.calc_statistics(features,", "the subset of just the area around the contour of interest cimg_subset =", "extracting those values img_subset_hsv = cv.cvtColor(img_subset, cv.COLOR_BGR2HSV) # FILTER OUT THE WEIRD ONES", "import cv2 as cv from scipy import ndimage from skimage import morphology from", "prior probability for the class prior = self.prior[i] # calculate the conditional probability", "'g': (g.mean() / 255), 'b': (b.mean() / 255), 'hue': (hue.mean() / 255), 'sat':", "this is a mask cimg_justthiscontour = np.zeros_like(input_image) # draw the contours on the", "is increment the object_num object_num += 1 # # end result should be", "if test.iloc[i] == predictions[i]: correct += 1 return (correct / float(len(test))) # TODO:", "in image_df.iterrows(): image_df.loc[i, 'color'] = class_list[i] print(type(image_df)) return image_df # def fake_df(self, input_df,", "v_max = cv.getTrackbarPos('val_max', 'trackbars') # self.black_lower = (0, 0, 0) # self.black_upper =", "pts[1], 0] g = img_subset[pts[0], pts[1], 1] b = img_subset[pts[0], pts[1], 2] #", "should be a pandas dataframe and the contour image with numbers return df.sort_values(by='object_num',", "recall_score(y_test, y_test_predictions, average=\"micro\") # print(f\"precision is {prec}, recall is {rec}, accuracy = {acc}\")", "only for my testing purposes class MatlabSurrogate(): def __init__(self): self.state_of_mind = \"Badass.\" def", "255, 30) # self.hsv_lower = (0, 0, 100) # self.hsv_upper = (179, 255,", "(0, 0, 70) # self.hsv_upper = (179, 34, 255) # NOT mask for", "result = self.calc_posterior((input_vector.iloc[i,:])) predictions.append(result) return predictions def predict(self, observation): #call the calc_posterior function", "TODO: remove this it is for testing purposes to show the segmentation if", "import isfile, join from random import randint, uniform import numpy as np from", "return df.sort_values(by='object_num', axis=0, ascending=True), output_image def hsv_slide_tool(self, image): def empty(a): pass h, w", "= {0:\"medium_blue\", 1:\"black\", 2:\"darK_stone_gray\", 3:\"bright_green\", 4:\"light_green\", 5:\"bright_orange\", 6:\"bright_red\", 7:\"bright_blue\", 8:\"white\", 9:\"bright_yellow\"} color_text =", "fucntion (normal dist) mean = self.mean[class_idx] var = self.var[class_idx] # this part sucked", "def pdf(self, x, mean, stdev): # calculate probability density function exponent = np.exp(-((x-mean)**2", "1] val = img_subset_hsv[pts[0], pts[1], 2] r = img_subset[pts[0], pts[1], 0] g =", "5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=3, color=(255,255,255), thickness=5, lineType=cv.LINE_AA) # print(r.mean(), g.mean(), b.mean(),", "> area_th: # draw a blank canvas to put the contour onto, JUST", "for this image df = pd.DataFrame(columns=['color']) # # reset the object num object_num", "fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=3, color=(255,255,255), thickness=5, lineType=cv.LINE_AA) # print(r.mean(), g.mean(), b.mean(), gli.mean()) df =", "on the observation pred_class = self.calc_posterior(observation) return pred_class def calc_statistics(self, features, target): #", "through the classes for i in range(0, self.count): # for each class look", "# for each class look at the prior probability for the class prior", "# output image with contours drawn on the original image output_image = input_image.copy()", "are probably noisef if area > area_th: # draw a blank canvas to", "in the image contours, hier = cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE) # create the df", "CALCULATE ALL THE CONTOUR SHAPE FEATURES # get the x, y, w, h", "the image RGB order or BGR?\" return result else: result = \"object is", "hull = cv.convexHull(cnt) hull_area = cv.contourArea(hull) solidity = float(area)/hull_area eq_diameter = np.sqrt(4*area/np.pi) M=", "predictions): correct = 0 for i in range(len(test)): if test.iloc[i] == predictions[i]: correct", "fake_df.loc[i, 'r'] = fake_df.loc[i, 'r'] + uniform(-.1, .1) # fake_df.loc[i, 'g'] = fake_df.loc[i,", "classifier I wrote for assignment 6 in BSYSE_530, modified a little for this", "to hsv for extracting those values img_subset_hsv = cv.cvtColor(img_subset, cv.COLOR_BGR2HSV) # FILTER OUT", "cv.moments(cnt) cx= int(M['m10']/M['m00']) cy= int(M['m01']/M['m00']) # take this rectangle as a subset of", "= np.exp(-((x-mean)**2 / (2 * var))) denominator = np.sqrt(2 * np.pi * var)", "def acquire_kinect_image(self, filename): # give this function a filename, and it will load", "5:\"bright_orange\", 6:\"bright_red\", 7:\"bright_blue\", 8:\"white\", 9:\"bright_yellow\"} color_text = class_dict[pred[0]] object_label = \"obj\" + str(object_num)", "image with numbers return df.sort_values(by='object_num', axis=0, ascending=True), output_image def hsv_slide_tool(self, image): def empty(a):", "# draw contours on the output image for our personal enjoyment cv.drawContours(output_image, [cnt],", "target): # calculate mean, variance for each column and convert to numpy array", "of x occuring independently def fit(self, features, target): # define class variables self.classes", "cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8) def process_image_to_df(self, input_image, area_th): seg_img = self.bg_segmentation(input_image, show_img=False) # #", "+ str(type(a)) + \"and I'm gonna have a hard time with that\" return", "+= 1 # # end result should be a pandas dataframe and the", "process_image_to_df(self, input_image, area_th): seg_img = self.bg_segmentation(input_image, show_img=False) # # make the mask a", "30]) color_mask = cv.inRange(img_hsv, hsv_lower, hsv_upper) black_mask = cv.inRange(img_hsv, black_lower, black_upper) mask =", "is pts = np.where(cimg_subset == 255) hue = img_subset_hsv[pts[0], pts[1], 0] sat =", "class_idx, x): # calc probability from gaussian denssityy fucntion (normal dist) mean =", "a subset of the input_image, and calculate things within it img_subset = input_image[y:y+h,", "for the contour x, y, w, h = cv.boundingRect(cnt) # contour features area", "features.shape[0] # calculate statistics for all those features self.calc_statistics(features, target) # prior is", "'trackbars') v_min = cv.getTrackbarPos('val_min', 'trackbars') v_max = cv.getTrackbarPos('val_max', 'trackbars') # self.black_lower = (0,", "binary dilation hsv_mask = morphology.binary_dilation(hsv_mask, np.ones((20, 20))).astype(np.uint8) # fill the holes hsv_mask =", "255, 255), thickness=5) # CALCULATE ALL THE CONTOUR SHAPE FEATURES # get the", "labels to the cimg for identification cv.putText(output_image, text= str(object_label), org=(cx - 5,cy -", "BGR format, because that is how opencv rolls kinect_image = cv.imread(filename) print(f\"kinect has", "time with that\" return result def bg_segmentation(self, image, mode=\"hsv\", show_img=False): # create an", "ax = fig.add_subplot(111) # cax = ax.matshow(cm) # plt.title('confusion matrix of the classifier')", "display images resized, using opencv def imshow(self, image, imdiv = 4): imdiv =", "return pred_class def calc_statistics(self, features, target): # calculate mean, variance for each column", "cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE) # create the df that we'll return for this image", "recall_score from skimage.filters import sobel # set random seed np.random.seed(26) # the NaiveBayes", "function on the observation pred_class = self.calc_posterior(observation) return pred_class def calc_statistics(self, features, target):", "calc_posterior(self, x): # this is the probability, post evidence # x is a", "P(x|c) is the likelihood # P(c) is the class prior probability, or the", "if area > area_th: # draw a blank canvas to put the contour", "mask cimg_mask = cv.cvtColor(cimg_subset, cv.COLOR_BGR2GRAY) ret2, mask = cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY) # draw contours on", "for this purpose class NaiveBayes: # P(c|x) = P(x|c) * P(c) / P(x)", "mask=mask) cv.imshow('masked_image', masked_image) k = cv.waitKey(1000) & 0xFF # large wait time if", "+ str(pred[0]) print(object_label) # add the object labels to the cimg for identification", "color=(255, 255, 255), thickness=5) # CALCULATE ALL THE CONTOUR SHAPE FEATURES # get", "imshow(self, image, imdiv = 4): imdiv = int(imdiv) w, h = int(image.shape[1]/imdiv), int(image.shape[0]/imdiv)", "P(x|c) * P(c) / P(x) # P(x|x) is the posterior probability # P(x|c)", "0, 'x': x, 'y': y, 'object_num': object_num, 'r': r.mean(), 'g': g.mean(), 'b': b.mean(),", "self.state_of_mind = \"Badass.\" def acquire_kinect_image(self, filename): # give this function a filename, and", "np.array([h_min, s_min, v_min]) hsv_upper = np.array([h_max, s_max, v_max]) black_lower = np.array([0, 0, 0])", "the calc_posterior function on the observation pred_class = self.calc_posterior(observation) return pred_class def calc_statistics(self,", "return self.mean, self.var def calc_prior(self, features, target): # this is the probability of", "pts[1], 1] val = img_subset_hsv[pts[0], pts[1], 2] r = img_subset[pts[0], pts[1], 0] g", "of fake adjustments to the dataframe so my train set is bigger #", "0, color=(255, 255, 255), thickness=-1) # now take the subset of just the", "is {rec}, accuracy = {acc}\") # # confusion matrix # labels = [(i,", "morphology from skimage import exposure import os from math import pi from math", "had a typo that cost me hours numerator = np.exp(-((x-mean)**2 / (2 *", "return kinect_image # function to display images resized, using opencv def imshow(self, image,", "ret2, mask = cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY) # draw contours on the output image for our", "ImageSegmenter class ImageProcess(): def __init__(self): print(\"image processor activated! use 'process_image_to_df()' to get back", "result return cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8) def process_image_to_df(self, input_image, area_th): seg_img = self.bg_segmentation(input_image, show_img=False)", "this function a filename, and it will load that image with opencv #", "{0:\"medium_blue\", 1:\"black\", 2:\"darK_stone_gray\", 3:\"bright_green\", 4:\"light_green\", 5:\"bright_orange\", 6:\"bright_red\", 7:\"bright_blue\", 8:\"white\", 9:\"bright_yellow\"} color_text = class_dict[pred[0]]", "# use this as a NOT mask hsv_mask = np.where(hsv_mask > 1, 0,", "predictions.append(result) return predictions def predict(self, observation): #call the calc_posterior function on the observation", "i, row in fake_df.iterrows(): # fake_df.loc[i, 'r'] = fake_df.loc[i, 'r'] + uniform(-.1, .1)", "our personal enjoyment cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5) # CALCULATE ALL", "len(x)/self.rows).to_numpy()) return self.prior def calc_posterior(self, x): # this is the probability, post evidence", "= np.sqrt(2 * np.pi * var) return numerator / denominator def pdf(self, x,", "== True): m = MatlabSurrogate() m.imshow(cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8)) # apply the mask and", "cv.waitKey(1000) & 0xFF # large wait time if k == 113 or k", "# this exists only for my testing purposes class MatlabSurrogate(): def __init__(self): self.state_of_mind", "sized cv.drawContours(cimg_justthiscontour, [cnt], 0, color=(255, 255, 255), thickness=-1) # now take the subset", "= (0, 0, 0) self.hsv_upper = (179, 255, 90) # self.black_lower = (0,", "'val': val.mean() }, ignore_index=True) # last thing we do on this loop is", "int(image.shape[0]/2) cv.namedWindow('masked_image', cv.WINDOW_NORMAL) cv.resizeWindow('masked_image', h, w) cv.namedWindow(\"trackbars\") cv.resizeWindow(\"trackbars\", 800, 300) # color mask", "return output_df def otsu_threshold(self, image): blur = cv.GaussianBlur(image,(5,5),0) ret3,th3 = cv.threshold(blur,0,255,cv.THRESH_BINARY+cv.THRESH_OTSU) return ret3,", "m.imshow(cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8)) # apply the mask and return the result return cv.bitwise_and(image,", "r = img_subset[pts[0], pts[1], 0] g = img_subset[pts[0], pts[1], 1] b = img_subset[pts[0],", "mask = color_mask + black_mask masked_image = cv.bitwise_and(img_hsv, img_hsv, mask=mask) cv.imshow('masked_image', masked_image) k", "correct = 0 for i in range(len(test)): if test.iloc[i] == predictions[i]: correct +=", "= {hsv_upper}') def label_dataframe(self, image_df, class_list): for i, row in image_df.iterrows(): image_df.loc[i, 'color']", "# NOT mask for lego_imgs[14] # hsv_lower = (0,0,0) # hsv_upper = (179,234,77)", "def label_dataframe(self, image_df, class_list): for i, row in image_df.iterrows(): image_df.loc[i, 'color'] = class_list[i]", "this is the probability, post evidence # x is a numpy array #", "the conditional = np.sum(self.gaussian_density(i, x)) posterior = prior + conditional # print(f\"i =", "sklearn.metrics import confusion_matrix, precision_score, recall_score from skimage.filters import sobel # set random seed", "x)) posterior = prior + conditional # print(f\"i = {i}, prior = {prior},", "= (0, 0, 203) # self.black_upper = (43, 255, 255) # self.hsv_lower =", "= pd.DataFrame(columns=['color']) # # reset the object num object_num = 0 for cnt", "os import listdir from os.path import isfile, join from random import randint, uniform", "# binary dilation hsv_mask = morphology.binary_dilation(hsv_mask, np.ones((20, 20))).astype(np.uint8) # fill the holes hsv_mask", "predictive_model = model area_th = 400 seg_img = self.bg_segmentation(input_image, show_img=False) # # make", "return (correct / float(len(test))) # TODO: read these and see how it works", "[] for i in range(len(input_vector)): result = self.calc_posterior((input_vector.iloc[i,:])) predictions.append(result) return predictions def predict(self,", "np.array([h_max, s_max, v_max]) black_lower = np.array([0, 0, 0]) black_upper = np.array([179, 255, 30])", "image RGB order or BGR?\" return result else: result = \"object is a", "mask trackbars cv.createTrackbar(\"hue_min\", \"trackbars\", 0, 179, empty) cv.createTrackbar('hue_max', 'trackbars', 179, 179, empty) cv.createTrackbar('sat_min',", "be a BGR format, because that is how opencv rolls kinect_image = cv.imread(filename)", "113 or k == 27: break cv.destroyAllWindows() print(f'hsv_lower is {hsv_lower}, hsv_upper = {hsv_upper}')", "output image with contours drawn on the original image output_image = input_image.copy() #", "# color mask trackbars cv.createTrackbar(\"hue_min\", \"trackbars\", 0, 179, empty) cv.createTrackbar('hue_max', 'trackbars', 179, 179,", "255), 'hue': (hue.mean() / 255), 'sat': (sat.mean() / 255), 'val': (val.mean() / 255)}]", "return self.classes[np.argmax(posteriors)] def gaussian_density(self, class_idx, x): # calc probability from gaussian denssityy fucntion", "cv.getTrackbarPos('sat_min', 'trackbars') s_max = cv.getTrackbarPos('sat_max', 'trackbars') v_min = cv.getTrackbarPos('val_min', 'trackbars') v_max = cv.getTrackbarPos('val_max',", "cv.COLOR_BGR2GRAY) ret2, mask = cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY) # draw contours on the output image for", "= self.calc_posterior((input_vector.iloc[i,:])) predictions.append(result) return predictions def predict(self, observation): #call the calc_posterior function on", "# get rid of tiny objects that are probably noisef if area >", "a hard time with that\" return result def bg_segmentation(self, image, mode=\"hsv\", show_img=False): #", "= float(area)/hull_area eq_diameter = np.sqrt(4*area/np.pi) M= cv.moments(cnt) cx= int(M['m10']/M['m00']) cy= int(M['m01']/M['m00']) # take", "spits out a dataframe that could be used for prediction # replaces ImageSegmenter", "x, y, w, h of the bounding rect for the contour x, y,", "make a binary mask cimg_mask = cv.cvtColor(cimg_subset, cv.COLOR_BGR2GRAY) ret2, mask = cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY) #", "# create an hsv mask for red colors hsv_mask = cv.inRange(cv.cvtColor(image, cv.COLOR_BGR2HSV), self.hsv_lower,", "testing purposes to show the segmentation if (show_img == True): m = MatlabSurrogate()", "make a list that we will add each classes posterior prob to posteriors", "class variables self.classes = np.unique(target) self.count = len(self.classes) self.feature_nums = features.shape[1] self.rows =", "= ndimage.median_filter(hsv_mask, size=(3, 3)).astype(np.uint8) # binary dilation hsv_mask = morphology.binary_dilation(hsv_mask, np.ones((20, 20))).astype(np.uint8) #", "exists only for my testing purposes class MatlabSurrogate(): def __init__(self): self.state_of_mind = \"Badass.\"", "= precision_score(y_test, y_test_predictions, average=\"micro\") # rec = recall_score(y_test, y_test_predictions, average=\"micro\") # print(f\"precision is", "= cv.contourArea(cnt) rect_area = w * h fullosity = area / rect_area aspect_ratio", "creates a bunch of fake adjustments to the dataframe so my train set", "= color_mask + black_mask masked_image = cv.bitwise_and(img_hsv, img_hsv, mask=mask) cv.imshow('masked_image', masked_image) k =", "the probability of picking one of a class at random from the dataset", "black_upper = np.array([179, 255, 30]) color_mask = cv.inRange(img_hsv, hsv_lower, hsv_upper) black_mask = cv.inRange(img_hsv,", "# hsv_lower = (0,0,0) # hsv_upper = (179,234,77) def dummy_method(self, a): if type(a)", "255), thickness=5) # CALCULATE ALL THE CONTOUR SHAPE FEATURES # get the x,", "ascending=True), output_image def hsv_slide_tool(self, image): def empty(a): pass h, w = int(image.shape[1]/2), int(image.shape[0]/2)", "image with contours drawn on the original image output_image = input_image.copy() # find", "cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5) # CALCULATE ALL THE CONTOUR SHAPE", "denominator def pdf(self, x, mean, stdev): # calculate probability density function exponent =", "because that is how opencv rolls kinect_image = cv.imread(filename) print(f\"kinect has acquired the", "x, y, w, h = cv.boundingRect(cnt) # contour features area = cv.contourArea(cnt) rect_area", "float(area)/hull_area eq_diameter = np.sqrt(4*area/np.pi) M= cv.moments(cnt) cx= int(M['m10']/M['m00']) cy= int(M['m01']/M['m00']) # take this", "train set is bigger # output_df = input_df.copy() # for rep in range(0,", "in a single image and then spits out a dataframe that could be", "# and export the image for later analysis with something else like a", "a neural network cv.imwrite(f\"images/train/XX_{object_num}_{randint(10000,99999)}.png\", img_subset) # add the object labels to the cimg", "range(0, self.count): # for each class look at the prior probability for the", "for red colors hsv_mask = cv.inRange(cv.cvtColor(image, cv.COLOR_BGR2HSV), self.hsv_lower, self.hsv_upper).astype(np.uint8) # use this as", "features self.calc_statistics(features, target) # prior is the random chance of drawing a particular", "'trackbars', 0, 255, empty) cv.createTrackbar('sat_max', 'trackbars', 255, 255, empty) cv.createTrackbar('val_min', 'trackbars', 0, 255,", "network cv.imwrite(f\"images/train/XX_{object_num}_{randint(10000,99999)}.png\", img_subset) # add the object labels to the cimg for identification", "300) # color mask trackbars cv.createTrackbar(\"hue_min\", \"trackbars\", 0, 179, empty) cv.createTrackbar('hue_max', 'trackbars', 179,", "= self.var[class_idx] # this part sucked and I had a typo that cost", "the area around the contour of interest cimg_subset = cimg_justthiscontour[y:y+h, x:x+w, :] #", "h)) cv.imshow(\"output\", image) cv.waitKey(0) cv.destroyAllWindows() # I should probably have one image processing", "1] b = img_subset[pts[0], pts[1], 2] df = [{'r': (r.mean() / 255), 'g':", "& 0xFF # large wait time if k == 113 or k ==", "= [] for i in range(len(input_vector)): result = self.calc_posterior((input_vector.iloc[i,:])) predictions.append(result) return predictions def", "confusion matrix # labels = [(i, c) for i, c in class_dict.items()] #", "'b': (b.mean() / 255), 'hue': (hue.mean() / 255), 'sat': (sat.mean() / 255), 'val':", "* var) return numerator / denominator def pdf(self, x, mean, stdev): # calculate", "proportion in the dataset self.prior = self.calc_prior(features, target) def get_predictions(self, input_vector): predictions =", "join from random import randint, uniform import numpy as np from matplotlib import", "interest cimg_subset = cimg_justthiscontour[y:y+h, x:x+w, :] # make a binary mask cimg_mask =", "= float(w)/h extent = float(area/ rect_area) hull = cv.convexHull(cnt) hull_area = cv.contourArea(hull) solidity", "define class variables self.classes = np.unique(target) self.count = len(self.classes) self.feature_nums = features.shape[1] self.rows", "cv.inRange(img_hsv, black_lower, black_upper) mask = color_mask + black_mask masked_image = cv.bitwise_and(img_hsv, img_hsv, mask=mask)", "255), 'b': (b.mean() / 255), 'hue': (hue.mean() / 255), 'sat': (sat.mean() / 255),", "object labels to the cimg for identification cv.putText(output_image, text= str(object_num), org=(cx - 5,cy", "self.mean[class_idx] var = self.var[class_idx] # this part sucked and I had a typo", "= np.sum(self.gaussian_density(i, x)) posterior = prior + conditional # print(f\"i = {i}, prior", "a binary mask cimg_mask = cv.cvtColor(cimg_subset, cv.COLOR_BGR2GRAY) ret2, mask = cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY) # draw", "do on this loop is increment the object_num object_num += 1 # #", "(43, 255, 255) # self.hsv_lower = (0, 0, 70) # self.hsv_upper = (179,", "y_test_predictions) # prec = precision_score(y_test, y_test_predictions, average=\"micro\") # rec = recall_score(y_test, y_test_predictions, average=\"micro\")", "y_test_predictions, average=\"micro\") # print(f\"precision is {prec}, recall is {rec}, accuracy = {acc}\") #", "a pandas df\") self.black_lower = (0, 0, 0) self.black_upper = (179, 255, 30)", "modified a little for this purpose class NaiveBayes: # P(c|x) = P(x|c) *", "c in class_dict.items()] # cm = confusion_matrix(y_test, y_test_predictions) # fig = plt.figure() #", "# for i, row in fake_df.iterrows(): # fake_df.loc[i, 'r'] = fake_df.loc[i, 'r'] +", "thickness=5, lineType=cv.LINE_AA) # print(r.mean(), g.mean(), b.mean(), gli.mean()) df = df.append({'color' : 0, 'x':", "the classes for i in range(0, self.count): # for each class look at", "empty) cv.createTrackbar('hue_max', 'trackbars', 179, 179, empty) cv.createTrackbar('sat_min', 'trackbars', 0, 255, empty) cv.createTrackbar('sat_max', 'trackbars',", "cv.destroyAllWindows() print(f'hsv_lower is {hsv_lower}, hsv_upper = {hsv_upper}') def label_dataframe(self, image_df, class_list): for i,", "loop is increment the object_num object_num += 1 # # end result should", "rect_area) hull = cv.convexHull(cnt) hull_area = cv.contourArea(hull) solidity = float(area)/hull_area eq_diameter = np.sqrt(4*area/np.pi)", "i in range(0, self.count): # for each class look at the prior probability", "if type(a) is np.ndarray: result = \"object is a numpy.ndarray, this is perfect.", "[cnt], 0, color=(255, 255, 255), thickness=5) # CALCULATE ALL THE CONTOUR SHAPE FEATURES", "NOT mask for lego_imgs[14] # hsv_lower = (0,0,0) # hsv_upper = (179,234,77) def", "stdev): # calculate probability density function exponent = np.exp(-((x-mean)**2 / (2*stdev**2))) return exponent", "cax = ax.matshow(cm) # plt.title('confusion matrix of the classifier') # fig.colorbar(cax) # plt.xlabel('Predicted')", "cv.waitKey(0) cv.destroyAllWindows() # I should probably have one image processing class that takes", "one observation # make a list that we will add each classes posterior", "0 for i in range(len(test)): if test.iloc[i] == predictions[i]: correct += 1 return", "we'll return for this image df = pd.DataFrame(columns=['color']) # # reset the object", "* P(c) / P(x) # P(x|x) is the posterior probability # P(x|c) is", "cv.boundingRect(cnt) # contour features area = cv.contourArea(cnt) rect_area = w * h fullosity", "= img_subset_hsv[pts[0], pts[1], 2] r = img_subset[pts[0], pts[1], 0] g = img_subset[pts[0], pts[1],", "= confusion_matrix(y_test, y_test_predictions) # fig = plt.figure() # ax = fig.add_subplot(111) # cax", "from gaussian denssityy fucntion (normal dist) mean = self.mean[class_idx] var = self.var[class_idx] #", "= cv.waitKey(1000) & 0xFF # large wait time if k == 113 or", "solidity = float(area)/hull_area eq_diameter = np.sqrt(4*area/np.pi) M= cv.moments(cnt) cx= int(M['m10']/M['m00']) cy= int(M['m01']/M['m00']) #", "# get the x, y, w, h of the bounding rect for the", "= features.groupby(target).apply(np.mean).to_numpy() self.var = features.groupby(target).apply(np.var).to_numpy() return self.mean, self.var def calc_prior(self, features, target): #", "from random import randint, uniform import numpy as np from matplotlib import pyplot", "from sklearn.model_selection import train_test_split from sklearn.metrics import confusion_matrix, precision_score, recall_score from skimage.filters import", "cv.threshold(mask,0,255,cv.THRESH_BINARY) # output image with contours drawn on the original image output_image =", "mask=mask).astype(np.uint8) # calculate where the object is pts = np.where(cimg_subset == 255) hue", "that\" return result def bg_segmentation(self, image, mode=\"hsv\", show_img=False): # create an hsv mask", "hard time with that\" return result def bg_segmentation(self, image, mode=\"hsv\", show_img=False): # create", "255, empty) cv.createTrackbar('sat_max', 'trackbars', 255, 255, empty) cv.createTrackbar('val_min', 'trackbars', 0, 255, empty) cv.createTrackbar('val_max',", "use 'process_image_to_df()' to get back a pandas df\") self.black_lower = (0, 0, 0)", "occuring indpendently. # P(x) is the predictor prior probability, or the prob of", "the df to the model for predictions # results = predictive_model.blind_predictions() # result", "(179, 255, 30) self.hsv_lower = (0, 0, 0) self.hsv_upper = (179, 255, 90)", "back a pandas df\") self.black_lower = (0, 0, 0) self.black_upper = (179, 255,", "class at random from the dataset self.prior = (features.groupby(target).apply(lambda x: len(x)/self.rows).to_numpy()) return self.prior", "object_num object_num += 1 # # end result should be a pandas dataframe", "= morphology.binary_dilation(hsv_mask, np.ones((20, 20))).astype(np.uint8) # fill the holes hsv_mask = ndimage.binary_fill_holes(hsv_mask).astype(np.uint8) # erode", "70) # self.hsv_upper = (179, 34, 255) # NOT mask for lego_imgs[14] #", "h = cv.boundingRect(cnt) # contour features area = cv.contourArea(cnt) rect_area = w *", "255), thickness=-1) # now take the subset of just the area around the", "org=(cx - 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=3, color=(255,255,255), thickness=5, lineType=cv.LINE_AA) # print(r.mean(),", "the object num object_num = 0 for cnt in contours: # draw contours", "1] b = img_subset[pts[0], pts[1], 2] # and export the image for later", "# this is the probability, post evidence # x is a numpy array", "0, 0]) black_upper = np.array([179, 255, 30]) color_mask = cv.inRange(img_hsv, hsv_lower, hsv_upper) black_mask", "img_subset_hsv = cv.cvtColor(img_subset, cv.COLOR_BGR2HSV) # FILTER OUT THE WEIRD ONES # get rid", "P(x) is the predictor prior probability, or the prob of x occuring independently", "self.hsv_lower = (0, 0, 70) # self.hsv_upper = (179, 34, 255) # NOT", "cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY) # draw contours on the output image for our personal enjoyment cv.drawContours(output_image,", "should be a pandas dataframe and the contour image with numbers return output_image", "pass h, w = int(image.shape[1]/2), int(image.shape[0]/2) cv.namedWindow('masked_image', cv.WINDOW_NORMAL) cv.resizeWindow('masked_image', h, w) cv.namedWindow(\"trackbars\") cv.resizeWindow(\"trackbars\",", "target) # prior is the random chance of drawing a particular class based", "OUT THE WEIRD ONES # get rid of tiny objects that are probably", "int(image.shape[1]/imdiv), int(image.shape[0]/imdiv) cv.namedWindow(\"output\", cv.WINDOW_NORMAL) cv.resizeWindow(\"output\", (w, h)) cv.imshow(\"output\", image) cv.waitKey(0) cv.destroyAllWindows() # I", "= np.array([0, 0, 0]) black_upper = np.array([179, 255, 30]) color_mask = cv.inRange(img_hsv, hsv_lower,", "random seed np.random.seed(26) # the NaiveBayes classifier I wrote for assignment 6 in", "output_df = input_df.copy() # for rep in range(0, reps): # fake_df = input_df.copy()", "area = cv.contourArea(cnt) rect_area = w * h fullosity = area / rect_area", "export the image for later analysis with something else like a neural network", "y, 'object_num': object_num, 'r': r.mean(), 'g': g.mean(), 'b': b.mean(), 'hue': hue.mean(), 'sat': sat.mean(),", "# large wait time if k == 113 or k == 27: break", "[(i, c) for i, c in class_dict.items()] # cm = confusion_matrix(y_test, y_test_predictions) #", "the dataset self.prior = (features.groupby(target).apply(lambda x: len(x)/self.rows).to_numpy()) return self.prior def calc_posterior(self, x): #", "those features self.calc_statistics(features, target) # prior is the random chance of drawing a", "(0, 0, 203) # self.black_upper = (43, 255, 255) # self.hsv_lower = (0,", "get_predictions(self, input_vector): predictions = [] for i in range(len(input_vector)): result = self.calc_posterior((input_vector.iloc[i,:])) predictions.append(result)", "FEATURES # get the x, y, w, h of the bounding rect for", "class prior = self.prior[i] # calculate the conditional probability for the conditional =", "+ black_mask masked_image = cv.bitwise_and(img_hsv, img_hsv, mask=mask) cv.imshow('masked_image', masked_image) k = cv.waitKey(1000) &", "= (features.groupby(target).apply(lambda x: len(x)/self.rows).to_numpy()) return self.prior def calc_posterior(self, x): # this is the", "+ uniform(-.1, .1) # fake_df.loc[i, 'g'] = fake_df.loc[i, 'g'] + uniform(-.1, .1) #", "if k == 113 or k == 27: break cv.destroyAllWindows() print(f'hsv_lower is {hsv_lower},", "cv.cvtColor(seg_img, cv.COLOR_BGR2GRAY) mask = cv.GaussianBlur(mask,(5,5),0) ret3, mask = cv.threshold(mask,0,255,cv.THRESH_BINARY) # output image with", "calc_prior(self, features, target): # this is the probability of picking one of a", ".1) # fake_df.loc[i, 'b'] = fake_df.loc[i, 'b'] + uniform(-.1, .1) # output_df =", "cm = confusion_matrix(y_test, y_test_predictions) # fig = plt.figure() # ax = fig.add_subplot(111) #", "# print(f\"precision is {prec}, recall is {rec}, accuracy = {acc}\") # # confusion", "0, 100) # self.hsv_upper = (179, 255, 255) # create mask hsv_lower =", "sat.mean(), 'val': val.mean() }, ignore_index=True) # last thing we do on this loop", "= self.calc_posterior(observation) return pred_class def calc_statistics(self, features, target): # calculate mean, variance for", "for later analysis with something else like a neural network cv.imwrite(f\"images/train/XX_{object_num}_{randint(10000,99999)}.png\", img_subset) #", "we do on this loop is increment the object_num object_num += 1 #", "# fig.colorbar(cax) # plt.xlabel('Predicted') # plt.ylabel('True') # plt.show() # print(labels) # take the", "despeckle # hsv_mask = ndimage.median_filter(hsv_mask, size=(3, 3)).astype(np.uint8) # binary dilation hsv_mask = morphology.binary_dilation(hsv_mask,", "self.hsv_lower = (0, 0, 100) # self.hsv_upper = (179, 255, 255) # create", "contours on the output image for our personal enjoyment cv.drawContours(output_image, [cnt], 0, color=(255,", "= cv.getTrackbarPos('sat_max', 'trackbars') v_min = cv.getTrackbarPos('val_min', 'trackbars') v_max = cv.getTrackbarPos('val_max', 'trackbars') # self.black_lower", "5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(0,255,0), thickness=3, lineType=cv.LINE_AA) # last thing we", "np from matplotlib import pyplot as plt import cv2 as cv from scipy", "self.calc_posterior(observation) return pred_class def calc_statistics(self, features, target): # calculate mean, variance for each", "y_test_predictions) # fig = plt.figure() # ax = fig.add_subplot(111) # cax = ax.matshow(cm)", "for i in range(0, self.count): # for each class look at the prior", "= nb.get_predictions(X_test) # # scores # acc = nb.get_accuracy(y_test, y_test_predictions) # prec =", "# rec = recall_score(y_test, y_test_predictions, average=\"micro\") # print(f\"precision is {prec}, recall is {rec},", "THE CONTOUR SHAPE FEATURES # get the x, y, w, h of the", "= \"obj\" + str(object_num) + \"_pred\" + str(pred[0]) print(object_label) # add the object", "to the cimg for identification cv.putText(output_image, text= str(object_num), org=(cx - 5,cy - 5),", "of the classifier') # fig.colorbar(cax) # plt.xlabel('Predicted') # plt.ylabel('True') # plt.show() # print(labels)", "exposure import os from math import pi from math import isnan import pandas", "image_df.loc[i, 'color'] = class_list[i] print(type(image_df)) return image_df # def fake_df(self, input_df, reps =", "canvas which is original sized cv.drawContours(cimg_justthiscontour, [cnt], 0, color=(255, 255, 255), thickness=-1) #", "area / rect_area aspect_ratio = float(w)/h extent = float(area/ rect_area) hull = cv.convexHull(cnt)", "lego_imgs[14] # hsv_lower = (0,0,0) # hsv_upper = (179,234,77) def dummy_method(self, a): if", "denssityy fucntion (normal dist) mean = self.mean[class_idx] var = self.var[class_idx] # this part", "= 400 seg_img = self.bg_segmentation(input_image, show_img=False) # # make the mask a binary", "how it works # https://www.mathworks.com/help/matlab/matlab_external/matlab-arrays-as-python-variables.html # https://www.mathworks.com/help/matlab/matlab_external/passing-data-to-python.html # this exists only for my", "M= cv.moments(cnt) cx= int(M['m10']/M['m00']) cy= int(M['m01']/M['m00']) # take this rectangle as a subset", "hsv_mask = morphology.binary_dilation(hsv_mask, np.ones((20, 20))).astype(np.uint8) # fill the holes hsv_mask = ndimage.binary_fill_holes(hsv_mask).astype(np.uint8) #", "self.black_upper = (179, 255, 30) # self.hsv_lower = (0, 0, 100) # self.hsv_upper", "0] sat = img_subset_hsv[pts[0], pts[1], 1] val = img_subset_hsv[pts[0], pts[1], 2] r =", "BSYSE_530, modified a little for this purpose class NaiveBayes: # P(c|x) = P(x|c)", "x, mean, stdev): # calculate probability density function exponent = np.exp(-((x-mean)**2 / (2*stdev**2)))", "cv.COLOR_BGR2HSV), self.hsv_lower, self.hsv_upper).astype(np.uint8) # use this as a NOT mask hsv_mask = np.where(hsv_mask", "predictions # results = predictive_model.blind_predictions() # result = loaded_model.get_predictions(X_test, Y_test) # print(result) #", "skimage.filters import sobel # set random seed np.random.seed(26) # the NaiveBayes classifier I", "(val.mean() / 255)}] df = pd.DataFrame.from_dict(df) pred = predictive_model.get_predictions(df) class_dict = {0:\"medium_blue\", 1:\"black\",", "# acc = nb.get_accuracy(y_test, y_test_predictions) # prec = precision_score(y_test, y_test_predictions, average=\"micro\") # rec", "it img_subset = input_image[y:y+h, x:x+w, :] # convert to hsv for extracting those", "* var))) denominator = np.sqrt(2 * np.pi * var) return numerator / denominator", "h of the bounding rect for the contour x, y, w, h =", "(2*stdev**2))) return exponent * (1/(np.sqrt(2*np.pi)*stdev)) def get_accuracy(self, test, predictions): correct = 0 for", "for i, c in class_dict.items()] # cm = confusion_matrix(y_test, y_test_predictions) # fig =", "# get image img_hsv = cv.cvtColor(image, cv.COLOR_BGR2HSV) # get trackbar positions h_min =", "the output image for our personal enjoyment cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255),", "cv.FONT_HERSHEY_SIMPLEX, fontScale=3, color=(255,255,255), thickness=5, lineType=cv.LINE_AA) # print(r.mean(), g.mean(), b.mean(), gli.mean()) df = df.append({'color'", "print(f\"i = {i}, prior = {prior}, conditional = {conditional}, posterior = {posterior}\") posteriors.append(posterior)", "it is for testing purposes to show the segmentation if (show_img == True):", "the NaiveBayes classifier I wrote for assignment 6 in BSYSE_530, modified a little", "cv.createTrackbar('sat_min', 'trackbars', 0, 255, empty) cv.createTrackbar('sat_max', 'trackbars', 255, 255, empty) cv.createTrackbar('val_min', 'trackbars', 0,", "cv.resizeWindow('masked_image', h, w) cv.namedWindow(\"trackbars\") cv.resizeWindow(\"trackbars\", 800, 300) # color mask trackbars cv.createTrackbar(\"hue_min\", \"trackbars\",", "pred = predictive_model.get_predictions(df) class_dict = {0:\"medium_blue\", 1:\"black\", 2:\"darK_stone_gray\", 3:\"bright_green\", 4:\"light_green\", 5:\"bright_orange\", 6:\"bright_red\", 7:\"bright_blue\",", "average=\"micro\") # print(f\"precision is {prec}, recall is {rec}, accuracy = {acc}\") # #", "# self.black_lower = (0, 0, 203) # self.black_upper = (43, 255, 255) #", "= (43, 255, 255) # self.hsv_lower = (0, 0, 70) # self.hsv_upper =", "uniform(-.1, .1) # output_df = pd.concat(output_df, fake_df) # return output_df def otsu_threshold(self, image):", "'sat': (sat.mean() / 255), 'val': (val.mean() / 255)}] df = pd.DataFrame.from_dict(df) pred =", "filename): # give this function a filename, and it will load that image", "0, 1).astype(np.uint8) hsv_mask = ndimage.gaussian_filter(hsv_mask, sigma=1) # erode the mask hsv_mask = morphology.erosion(hsv_mask,", "list that we will add each classes posterior prob to posteriors = []", "object_num += 1 # AFTER ALL CONTOURS HAVE BEEN DONE submit the df", "df.sort_values(by='object_num', axis=0, ascending=True), output_image def hsv_slide_tool(self, image): def empty(a): pass h, w =", "prior = self.prior[i] # calculate the conditional probability for the conditional = np.sum(self.gaussian_density(i,", "rect for the contour x, y, w, h = cv.boundingRect(cnt) # contour features", "3): # # creates a bunch of fake adjustments to the dataframe so", "from math import isnan import pandas as pd from sklearn.model_selection import train_test_split from", "binary thresholded image mask = cv.cvtColor(seg_img, cv.COLOR_BGR2GRAY) mask = cv.GaussianBlur(mask,(5,5),0) ret3, mask =", "def bg_segmentation(self, image, mode=\"hsv\", show_img=False): # create an hsv mask for red colors", "str(type(a)) + \"and I'm gonna have a hard time with that\" return result", "# self.black_upper = (43, 255, 255) # self.hsv_lower = (0, 0, 70) #", "= cv.getTrackbarPos('sat_min', 'trackbars') s_max = cv.getTrackbarPos('sat_max', 'trackbars') v_min = cv.getTrackbarPos('val_min', 'trackbars') v_max =", "recall is {rec}, accuracy = {acc}\") # # confusion matrix # labels =", "import numpy as np from matplotlib import pyplot as plt import cv2 as", "90) # self.black_lower = (0, 0, 203) # self.black_upper = (43, 255, 255)", "be a pandas dataframe and the contour image with numbers return df.sort_values(by='object_num', axis=0,", "a numpy.ndarray, this is perfect. Is the image RGB order or BGR?\" return", "6:\"bright_red\", 7:\"bright_blue\", 8:\"white\", 9:\"bright_yellow\"} color_text = class_dict[pred[0]] object_label = \"obj\" + str(object_num) +", "# get trackbar positions h_min = cv.getTrackbarPos(\"hue_min\", \"trackbars\") h_max = cv.getTrackbarPos('hue_max', 'trackbars') s_min", "feature vector for one observation # make a list that we will add", "calculate things within it img_subset = input_image[y:y+h, x:x+w, :] # convert to hsv", "def predict(self, observation): #call the calc_posterior function on the observation pred_class = self.calc_posterior(observation)", "import sobel # set random seed np.random.seed(26) # the NaiveBayes classifier I wrote", "is the probability, post evidence # x is a numpy array # x", "probably noisef if area > area_th: # draw a blank canvas to put", "uniform(-.1, .1) # fake_df.loc[i, 'b'] = fake_df.loc[i, 'b'] + uniform(-.1, .1) # output_df", "y_test_predictions, average=\"micro\") # rec = recall_score(y_test, y_test_predictions, average=\"micro\") # print(f\"precision is {prec}, recall", "chance of drawing a particular class based on its proportion in the dataset", "# give this function a filename, and it will load that image with", "'hue': hue.mean(), 'sat': sat.mean(), 'val': val.mean() }, ignore_index=True) # last thing we do", "self.hsv_upper = (179, 255, 90) # self.black_lower = (0, 0, 203) # self.black_upper", "int(M['m10']/M['m00']) cy= int(M['m01']/M['m00']) # take this rectangle as a subset of the input_image,", "or the prob of c occuring indpendently. # P(x) is the predictor prior", "(0, 0, 100) # self.hsv_upper = (179, 255, 255) # create mask hsv_lower", "255, 255) # create mask hsv_lower = np.array([h_min, s_min, v_min]) hsv_upper = np.array([h_max,", "img_subset, mask=mask).astype(np.uint8) # calculate where the object is pts = np.where(cimg_subset == 255)", "cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE) # create the df that we'll return for this image df", "erode the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(5)) # TODO: remove this it is", "kinect_image # function to display images resized, using opencv def imshow(self, image, imdiv", "= [(i, c) for i, c in class_dict.items()] # cm = confusion_matrix(y_test, y_test_predictions)", "cnt in contours: # draw contours on the output image for our personal", "trackbar positions h_min = cv.getTrackbarPos(\"hue_min\", \"trackbars\") h_max = cv.getTrackbarPos('hue_max', 'trackbars') s_min = cv.getTrackbarPos('sat_min',", "is for testing purposes to show the segmentation if (show_img == True): m", "the image contours, hier = cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE) # create the df that", "# print(f\"i = {i}, prior = {prior}, conditional = {conditional}, posterior = {posterior}\")", "blank canvas which is original sized cv.drawContours(cimg_justthiscontour, [cnt], 0, color=(255, 255, 255), thickness=-1)", "# plt.show() # print(labels) # take the row # end result should be", "and calculate things within it img_subset = input_image[y:y+h, x:x+w, :] # convert to", "[] # iterate through the classes for i in range(0, self.count): # for", "is perfect. Is the image RGB order or BGR?\" return result else: result", "= np.sqrt(4*area/np.pi) M= cv.moments(cnt) cx= int(M['m10']/M['m00']) cy= int(M['m01']/M['m00']) # take this rectangle as", "255) # create mask hsv_lower = np.array([h_min, s_min, v_min]) hsv_upper = np.array([h_max, s_max,", "(179, 255, 30) # self.hsv_lower = (0, 0, 100) # self.hsv_upper = (179,", "predict(self, observation): #call the calc_posterior function on the observation pred_class = self.calc_posterior(observation) return", "x occuring independently def fit(self, features, target): # define class variables self.classes =", "s_max, v_max]) black_lower = np.array([0, 0, 0]) black_upper = np.array([179, 255, 30]) color_mask", "cv.createTrackbar('hue_max', 'trackbars', 179, 179, empty) cv.createTrackbar('sat_min', 'trackbars', 0, 255, empty) cv.createTrackbar('sat_max', 'trackbars', 255,", "self.bg_segmentation(input_image, show_img=False) # # make the mask a binary thresholded image mask =", "else: result = \"object is a \" + str(type(a)) + \"and I'm gonna", "images resized, using opencv def imshow(self, image, imdiv = 4): imdiv = int(imdiv)", "mask a binary thresholded image mask = cv.cvtColor(seg_img, cv.COLOR_BGR2GRAY) mask = cv.GaussianBlur(mask,(5,5),0) ret3,", "= cv.inRange(img_hsv, black_lower, black_upper) mask = color_mask + black_mask masked_image = cv.bitwise_and(img_hsv, img_hsv,", "used for prediction # replaces ImageSegmenter class ImageProcess(): def __init__(self): print(\"image processor activated!", "plt.title('confusion matrix of the classifier') # fig.colorbar(cax) # plt.xlabel('Predicted') # plt.ylabel('True') # plt.show()", "use this as a NOT mask hsv_mask = np.where(hsv_mask > 1, 0, 1).astype(np.uint8)", "import pickle from sklearn import preprocessing import time from os import listdir from", "ignore_index=True) # last thing we do on this loop is increment the object_num", "for testing purposes to show the segmentation if (show_img == True): m =", "this it is for testing purposes to show the segmentation if (show_img ==", "how opencv rolls kinect_image = cv.imread(filename) print(f\"kinect has acquired the image with shape", "of interest cimg_subset = cimg_justthiscontour[y:y+h, x:x+w, :] # make a binary mask cimg_mask", "morphology.erosion(hsv_mask, morphology.disk(3)) # # median filter to despeckle # hsv_mask = ndimage.median_filter(hsv_mask, size=(3,", "__init__(self): self.state_of_mind = \"Badass.\" def acquire_kinect_image(self, filename): # give this function a filename,", "function exponent = np.exp(-((x-mean)**2 / (2*stdev**2))) return exponent * (1/(np.sqrt(2*np.pi)*stdev)) def get_accuracy(self, test,", "h, w) cv.namedWindow(\"trackbars\") cv.resizeWindow(\"trackbars\", 800, 300) # color mask trackbars cv.createTrackbar(\"hue_min\", \"trackbars\", 0,", "process_image_make_predictions(self, input_image, model): predictive_model = model area_th = 400 seg_img = self.bg_segmentation(input_image, show_img=False)", "def calc_statistics(self, features, target): # calculate mean, variance for each column and convert", "or BGR?\" return result else: result = \"object is a \" + str(type(a))", "0xFF # large wait time if k == 113 or k == 27:", "import pyplot as plt import cv2 as cv from scipy import ndimage from", "# use the test set to see how we do # y_test_predictions =", "model): predictive_model = model area_th = 400 seg_img = self.bg_segmentation(input_image, show_img=False) # #", "picking one of a class at random from the dataset self.prior = (features.groupby(target).apply(lambda", "# confusion matrix # labels = [(i, c) for i, c in class_dict.items()]", "def process_image_make_predictions(self, input_image, model): predictive_model = model area_th = 400 seg_img = self.bg_segmentation(input_image,", "# # reset the object num object_num = 0 for cnt in contours:", "reset the object num object_num = 0 for cnt in contours: # draw", "# # confusion matrix # labels = [(i, c) for i, c in", "{prec}, recall is {rec}, accuracy = {acc}\") # # confusion matrix # labels", "nb.get_predictions(X_test) # # scores # acc = nb.get_accuracy(y_test, y_test_predictions) # prec = precision_score(y_test,", "class_list): for i, row in image_df.iterrows(): image_df.loc[i, 'color'] = class_list[i] print(type(image_df)) return image_df", "# this will be a BGR format, because that is how opencv rolls", "np.zeros_like(input_image) # draw the contours on the blank canvas which is original sized", "area_th: # draw a blank canvas to put the contour onto, JUST THIS", "mask=hsv_mask).astype(np.uint8) def process_image_to_df(self, input_image, area_th): seg_img = self.bg_segmentation(input_image, show_img=False) # # make the", "print(type(image_df)) return image_df # def fake_df(self, input_df, reps = 3): # # creates", "(normal dist) mean = self.mean[class_idx] var = self.var[class_idx] # this part sucked and", "cv.imwrite(f\"images/train/XX_{object_num}_{randint(10000,99999)}.png\", img_subset) # add the object labels to the cimg for identification cv.putText(output_image,", "set to see how we do # y_test_predictions = nb.get_predictions(X_test) # # scores", "probability, or the prob of x occuring independently def fit(self, features, target): #", "axis=0, ascending=True), output_image def hsv_slide_tool(self, image): def empty(a): pass h, w = int(image.shape[1]/2),", "type(a) is np.ndarray: result = \"object is a numpy.ndarray, this is perfect. Is", "{prior}, conditional = {conditional}, posterior = {posterior}\") posteriors.append(posterior) return self.classes[np.argmax(posteriors)] def gaussian_density(self, class_idx,", "gli.mean()) df = df.append({'color' : 0, 'x': x, 'y': y, 'object_num': object_num, 'r':", "3:\"bright_green\", 4:\"light_green\", 5:\"bright_orange\", 6:\"bright_red\", 7:\"bright_blue\", 8:\"white\", 9:\"bright_yellow\"} color_text = class_dict[pred[0]] object_label = \"obj\"", "ndimage.binary_fill_holes(hsv_mask).astype(np.uint8) # erode the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(5)) # TODO: remove this", "something else like a neural network cv.imwrite(f\"images/train/XX_{object_num}_{randint(10000,99999)}.png\", img_subset) # add the object labels", "a bunch of fake adjustments to the dataframe so my train set is", "org=(cx - 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(0,255,0), thickness=3, lineType=cv.LINE_AA) # last", "the x, y, w, h of the bounding rect for the contour x,", "os from math import pi from math import isnan import pandas as pd", "cv.imshow(\"output\", image) cv.waitKey(0) cv.destroyAllWindows() # I should probably have one image processing class", "that are probably noisef if area > area_th: # draw a blank canvas", "import the needed packages import pickle from sklearn import preprocessing import time from", "erode the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(3)) # # median filter to despeckle", "the bounding rect for the contour x, y, w, h = cv.boundingRect(cnt) #", "var = self.var[class_idx] # this part sucked and I had a typo that", "add each classes posterior prob to posteriors = [] # iterate through the", "large wait time if k == 113 or k == 27: break cv.destroyAllWindows()", "result def bg_segmentation(self, image, mode=\"hsv\", show_img=False): # create an hsv mask for red", "def hsv_slide_tool(self, image): def empty(a): pass h, w = int(image.shape[1]/2), int(image.shape[0]/2) cv.namedWindow('masked_image', cv.WINDOW_NORMAL)", "class_dict[pred[0]] object_label = \"obj\" + str(object_num) + \"_pred\" + str(pred[0]) print(object_label) # add", "for the class prior = self.prior[i] # calculate the conditional probability for the", "(features.groupby(target).apply(lambda x: len(x)/self.rows).to_numpy()) return self.prior def calc_posterior(self, x): # this is the probability,", "# https://www.mathworks.com/help/matlab/matlab_external/matlab-arrays-as-python-variables.html # https://www.mathworks.com/help/matlab/matlab_external/passing-data-to-python.html # this exists only for my testing purposes class", "put the contour onto, JUST THIS ONE not the others # this is", "show_img=False): # create an hsv mask for red colors hsv_mask = cv.inRange(cv.cvtColor(image, cv.COLOR_BGR2HSV),", "and see how it works # https://www.mathworks.com/help/matlab/matlab_external/matlab-arrays-as-python-variables.html # https://www.mathworks.com/help/matlab/matlab_external/passing-data-to-python.html # this exists only", "print(object_label) # add the object labels to the cimg for identification cv.putText(output_image, text=", "sobel # set random seed np.random.seed(26) # the NaiveBayes classifier I wrote for", "# self.hsv_lower = (0, 0, 100) # self.hsv_upper = (179, 255, 255) #", "of just the area around the contour of interest cimg_subset = cimg_justthiscontour[y:y+h, x:x+w,", "posterior = prior + conditional # print(f\"i = {i}, prior = {prior}, conditional", "self.hsv_lower = (0, 0, 0) self.hsv_upper = (179, 255, 90) # self.black_lower =", "the object is pts = np.where(cimg_subset == 255) hue = img_subset_hsv[pts[0], pts[1], 0]", "mode=\"hsv\", show_img=False): # create an hsv mask for red colors hsv_mask = cv.inRange(cv.cvtColor(image,", "on its proportion in the dataset self.prior = self.calc_prior(features, target) def get_predictions(self, input_vector):", "# self.hsv_upper = (179, 34, 255) # NOT mask for lego_imgs[14] # hsv_lower", "from os.path import isfile, join from random import randint, uniform import numpy as", "cv.inRange(cv.cvtColor(image, cv.COLOR_BGR2HSV), self.hsv_lower, self.hsv_upper).astype(np.uint8) # use this as a NOT mask hsv_mask =", "random import randint, uniform import numpy as np from matplotlib import pyplot as", "return predictions def predict(self, observation): #call the calc_posterior function on the observation pred_class", "h fullosity = area / rect_area aspect_ratio = float(w)/h extent = float(area/ rect_area)", "2:\"darK_stone_gray\", 3:\"bright_green\", 4:\"light_green\", 5:\"bright_orange\", 6:\"bright_red\", 7:\"bright_blue\", 8:\"white\", 9:\"bright_yellow\"} color_text = class_dict[pred[0]] object_label =", "= img_subset[pts[0], pts[1], 1] b = img_subset[pts[0], pts[1], 2] # and export the", "increment the object_num object_num += 1 # # end result should be a", "def otsu_threshold(self, image): blur = cv.GaussianBlur(image,(5,5),0) ret3,th3 = cv.threshold(blur,0,255,cv.THRESH_BINARY+cv.THRESH_OTSU) return ret3, th3 def", "contour of interest cimg_subset = cimg_justthiscontour[y:y+h, x:x+w, :] # make a binary mask", "calculate statistics for all those features self.calc_statistics(features, target) # prior is the random", "each column and convert to numpy array self.mean = features.groupby(target).apply(np.mean).to_numpy() self.var = features.groupby(target).apply(np.var).to_numpy()", "for all those features self.calc_statistics(features, target) # prior is the random chance of", "onto, JUST THIS ONE not the others # this is a mask cimg_justthiscontour", "analysis with something else like a neural network cv.imwrite(f\"images/train/XX_{object_num}_{randint(10000,99999)}.png\", img_subset) # add the", "/ float(len(test))) # TODO: read these and see how it works # https://www.mathworks.com/help/matlab/matlab_external/matlab-arrays-as-python-variables.html", "image with shape = {kinect_image.shape}\") return kinect_image # function to display images resized,", "df = pd.DataFrame(columns=['color']) # # reset the object num object_num = 0 for", "this is perfect. Is the image RGB order or BGR?\" return result else:", "independently def fit(self, features, target): # define class variables self.classes = np.unique(target) self.count", "'g'] = fake_df.loc[i, 'g'] + uniform(-.1, .1) # fake_df.loc[i, 'b'] = fake_df.loc[i, 'b']", "noisef if area > area_th: # draw a blank canvas to put the", "the detected objects in the image contours, hier = cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE) #", "255) hue = img_subset_hsv[pts[0], pts[1], 0] sat = img_subset_hsv[pts[0], pts[1], 1] val =", "= P(x|c) * P(c) / P(x) # P(x|x) is the posterior probability #", "dataframe and the contour image with numbers return df.sort_values(by='object_num', axis=0, ascending=True), output_image def", "object is pts = np.where(cimg_subset == 255) hue = img_subset_hsv[pts[0], pts[1], 0] sat", "= cv.cvtColor(cimg_subset, cv.COLOR_BGR2GRAY) ret2, mask = cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY) # draw contours on the output", "lineType=cv.LINE_AA) # last thing we do on this loop is increment the object_num", "features area = cv.contourArea(cnt) rect_area = w * h fullosity = area /", "fontScale=1, color=(0,255,0), thickness=3, lineType=cv.LINE_AA) # last thing we do on this loop is", "fake_df.loc[i, 'b'] = fake_df.loc[i, 'b'] + uniform(-.1, .1) # output_df = pd.concat(output_df, fake_df)", "array # x is feature vector for one observation # make a list", "observation pred_class = self.calc_posterior(observation) return pred_class def calc_statistics(self, features, target): # calculate mean,", "pd.concat(output_df, fake_df) # return output_df def otsu_threshold(self, image): blur = cv.GaussianBlur(image,(5,5),0) ret3,th3 =", "cimg for identification cv.putText(output_image, text= str(object_num), org=(cx - 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX,", "print(f'hsv_lower is {hsv_lower}, hsv_upper = {hsv_upper}') def label_dataframe(self, image_df, class_list): for i, row", "= cv.GaussianBlur(mask,(5,5),0) ret3, mask = cv.threshold(mask,0,255,cv.THRESH_BINARY) # output image with contours drawn on", "pts[1], 2] # and export the image for later analysis with something else", "def get_predictions(self, input_vector): predictions = [] for i in range(len(input_vector)): result = self.calc_posterior((input_vector.iloc[i,:]))", "blur = cv.GaussianBlur(image,(5,5),0) ret3,th3 = cv.threshold(blur,0,255,cv.THRESH_BINARY+cv.THRESH_OTSU) return ret3, th3 def process_image_make_predictions(self, input_image, model):", "cv.cvtColor(cimg_subset, cv.COLOR_BGR2GRAY) ret2, mask = cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY) # draw contours on the output image", "as cv from scipy import ndimage from skimage import morphology from skimage import", "df\") self.black_lower = (0, 0, 0) self.black_upper = (179, 255, 30) self.hsv_lower =", "of tiny objects that are probably noisef if area > area_th: # draw", "x is a numpy array # x is feature vector for one observation", "'sat': sat.mean(), 'val': val.mean() }, ignore_index=True) # last thing we do on this", "vector for one observation # make a list that we will add each", "{i}, prior = {prior}, conditional = {conditional}, posterior = {posterior}\") posteriors.append(posterior) return self.classes[np.argmax(posteriors)]", "def dummy_method(self, a): if type(a) is np.ndarray: result = \"object is a numpy.ndarray,", "0]) black_upper = np.array([179, 255, 30]) color_mask = cv.inRange(img_hsv, hsv_lower, hsv_upper) black_mask =", "a filename, and it will load that image with opencv # this will", "FILTER OUT THE WEIRD ONES # get rid of tiny objects that are", "segmentation if (show_img == True): m = MatlabSurrogate() m.imshow(cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8)) # apply", "= np.zeros_like(input_image) # draw the contours on the blank canvas which is original", "is the random chance of drawing a particular class based on its proportion", "observation): #call the calc_posterior function on the observation pred_class = self.calc_posterior(observation) return pred_class", "NaiveBayes classifier I wrote for assignment 6 in BSYSE_530, modified a little for", "fontScale=3, color=(255,255,255), thickness=5, lineType=cv.LINE_AA) # print(r.mean(), g.mean(), b.mean(), gli.mean()) df = df.append({'color' :", "self.var = features.groupby(target).apply(np.var).to_numpy() return self.mean, self.var def calc_prior(self, features, target): # this is", "'y': y, 'object_num': object_num, 'r': r.mean(), 'g': g.mean(), 'b': b.mean(), 'hue': hue.mean(), 'sat':", "find the contours of the detected objects in the image contours, hier =", "= cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY) # draw contours on the output image for our personal enjoyment", "for the conditional = np.sum(self.gaussian_density(i, x)) posterior = prior + conditional # print(f\"i", "self.var[class_idx] # this part sucked and I had a typo that cost me", "cv.namedWindow(\"output\", cv.WINDOW_NORMAL) cv.resizeWindow(\"output\", (w, h)) cv.imshow(\"output\", image) cv.waitKey(0) cv.destroyAllWindows() # I should probably", "np.array([179, 255, 30]) color_mask = cv.inRange(img_hsv, hsv_lower, hsv_upper) black_mask = cv.inRange(img_hsv, black_lower, black_upper)", "# self.hsv_upper = (179, 255, 255) # create mask hsv_lower = np.array([h_min, s_min,", "+ \"and I'm gonna have a hard time with that\" return result def", "I wrote for assignment 6 in BSYSE_530, modified a little for this purpose", "= img_subset[pts[0], pts[1], 2] df = [{'r': (r.mean() / 255), 'g': (g.mean() /", "variables self.classes = np.unique(target) self.count = len(self.classes) self.feature_nums = features.shape[1] self.rows = features.shape[0]", "= ndimage.gaussian_filter(hsv_mask, sigma=1) # erode the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(3)) # #", "import isnan import pandas as pd from sklearn.model_selection import train_test_split from sklearn.metrics import", "in contours: # draw contours on the output image for our personal enjoyment", "the posterior probability # P(x|c) is the likelihood # P(c) is the class", ":] # make a binary mask cimg_mask = cv.cvtColor(cimg_subset, cv.COLOR_BGR2GRAY) ret2, mask =", "cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5) img_subset = cv.bitwise_and(img_subset, img_subset, mask=mask).astype(np.uint8) #", "the mask a binary thresholded image mask = cv.cvtColor(seg_img, cv.COLOR_BGR2GRAY) mask = cv.GaussianBlur(mask,(5,5),0)", "(0, 0, 0) # self.black_upper = (179, 255, 30) # self.hsv_lower = (0,", "fake adjustments to the dataframe so my train set is bigger # output_df", "fake_df.loc[i, 'r'] + uniform(-.1, .1) # fake_df.loc[i, 'g'] = fake_df.loc[i, 'g'] + uniform(-.1,", "{posterior}\") posteriors.append(posterior) return self.classes[np.argmax(posteriors)] def gaussian_density(self, class_idx, x): # calc probability from gaussian", "# prec = precision_score(y_test, y_test_predictions, average=\"micro\") # rec = recall_score(y_test, y_test_predictions, average=\"micro\") #", "numpy as np from matplotlib import pyplot as plt import cv2 as cv", "print(\"image processor activated! use 'process_image_to_df()' to get back a pandas df\") self.black_lower =", "image, mask=hsv_mask).astype(np.uint8) def process_image_to_df(self, input_image, area_th): seg_img = self.bg_segmentation(input_image, show_img=False) # # make", "sklearn import preprocessing import time from os import listdir from os.path import isfile,", "img_subset_hsv[pts[0], pts[1], 0] sat = img_subset_hsv[pts[0], pts[1], 1] val = img_subset_hsv[pts[0], pts[1], 2]", "ImageProcess(): def __init__(self): print(\"image processor activated! use 'process_image_to_df()' to get back a pandas", "= fake_df.loc[i, 'r'] + uniform(-.1, .1) # fake_df.loc[i, 'g'] = fake_df.loc[i, 'g'] +", "probability from gaussian denssityy fucntion (normal dist) mean = self.mean[class_idx] var = self.var[class_idx]", "* (1/(np.sqrt(2*np.pi)*stdev)) def get_accuracy(self, test, predictions): correct = 0 for i in range(len(test)):", "empty) cv.createTrackbar('val_max', 'trackbars', 255, 255, empty) while True: # get image img_hsv =", "# x is a numpy array # x is feature vector for one", "that we will add each classes posterior prob to posteriors = [] #", "{hsv_lower}, hsv_upper = {hsv_upper}') def label_dataframe(self, image_df, class_list): for i, row in image_df.iterrows():", "prior is the random chance of drawing a particular class based on its", "255) # self.hsv_lower = (0, 0, 70) # self.hsv_upper = (179, 34, 255)", "sigma=1) # erode the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(3)) # # median filter", "2] r = img_subset[pts[0], pts[1], 0] g = img_subset[pts[0], pts[1], 1] b =", "# fake_df.loc[i, 'r'] = fake_df.loc[i, 'r'] + uniform(-.1, .1) # fake_df.loc[i, 'g'] =", "acquired the image with shape = {kinect_image.shape}\") return kinect_image # function to display", "result else: result = \"object is a \" + str(type(a)) + \"and I'm", "rect_area = w * h fullosity = area / rect_area aspect_ratio = float(w)/h", "bounding rect for the contour x, y, w, h = cv.boundingRect(cnt) # contour", "# make a binary mask cimg_mask = cv.cvtColor(cimg_subset, cv.COLOR_BGR2GRAY) ret2, mask = cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY)", "conditional probability for the conditional = np.sum(self.gaussian_density(i, x)) posterior = prior + conditional", "area > area_th: # draw a blank canvas to put the contour onto,", "nb.get_accuracy(y_test, y_test_predictions) # prec = precision_score(y_test, y_test_predictions, average=\"micro\") # rec = recall_score(y_test, y_test_predictions,", "is feature vector for one observation # make a list that we will", "this part sucked and I had a typo that cost me hours numerator", "# P(c) is the class prior probability, or the prob of c occuring", "is the probability of picking one of a class at random from the", "6 in BSYSE_530, modified a little for this purpose class NaiveBayes: # P(c|x)", "pts[1], 2] df = [{'r': (r.mean() / 255), 'g': (g.mean() / 255), 'b':", "# calculate probability density function exponent = np.exp(-((x-mean)**2 / (2*stdev**2))) return exponent *", "prior probability, or the prob of x occuring independently def fit(self, features, target):", "from os import listdir from os.path import isfile, join from random import randint,", "(w, h)) cv.imshow(\"output\", image) cv.waitKey(0) cv.destroyAllWindows() # I should probably have one image", "(hue.mean() / 255), 'sat': (sat.mean() / 255), 'val': (val.mean() / 255)}] df =", "hsv_mask = morphology.erosion(hsv_mask, morphology.disk(5)) # TODO: remove this it is for testing purposes", "red colors hsv_mask = cv.inRange(cv.cvtColor(image, cv.COLOR_BGR2HSV), self.hsv_lower, self.hsv_upper).astype(np.uint8) # use this as a", "# P(x) is the predictor prior probability, or the prob of x occuring", "get the x, y, w, h of the bounding rect for the contour", "at random from the dataset self.prior = (features.groupby(target).apply(lambda x: len(x)/self.rows).to_numpy()) return self.prior def", "/ 255), 'val': (val.mean() / 255)}] df = pd.DataFrame.from_dict(df) pred = predictive_model.get_predictions(df) class_dict", "return cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8) def process_image_to_df(self, input_image, area_th): seg_img = self.bg_segmentation(input_image, show_img=False) #", "= cv.boundingRect(cnt) # contour features area = cv.contourArea(cnt) rect_area = w * h", "k == 27: break cv.destroyAllWindows() print(f'hsv_lower is {hsv_lower}, hsv_upper = {hsv_upper}') def label_dataframe(self,", "and the contour image with numbers return df.sort_values(by='object_num', axis=0, ascending=True), output_image def hsv_slide_tool(self,", "calculate the conditional probability for the conditional = np.sum(self.gaussian_density(i, x)) posterior = prior", "import os from math import pi from math import isnan import pandas as", "return exponent * (1/(np.sqrt(2*np.pi)*stdev)) def get_accuracy(self, test, predictions): correct = 0 for i", "in fake_df.iterrows(): # fake_df.loc[i, 'r'] = fake_df.loc[i, 'r'] + uniform(-.1, .1) # fake_df.loc[i,", "and return the result return cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8) def process_image_to_df(self, input_image, area_th): seg_img", "dist) mean = self.mean[class_idx] var = self.var[class_idx] # this part sucked and I", "little for this purpose class NaiveBayes: # P(c|x) = P(x|c) * P(c) /", "1 return (correct / float(len(test))) # TODO: read these and see how it", "# reset the object num object_num = 0 for cnt in contours: #", "(179, 34, 255) # NOT mask for lego_imgs[14] # hsv_lower = (0,0,0) #", "mean = self.mean[class_idx] var = self.var[class_idx] # this part sucked and I had", "mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(3)) # # median filter to despeckle # hsv_mask", "posterior prob to posteriors = [] # iterate through the classes for i", "df = [{'r': (r.mean() / 255), 'g': (g.mean() / 255), 'b': (b.mean() /", "= (179, 255, 30) self.hsv_lower = (0, 0, 0) self.hsv_upper = (179, 255,", "pred_class def calc_statistics(self, features, target): # calculate mean, variance for each column and", "the dataset self.prior = self.calc_prior(features, target) def get_predictions(self, input_vector): predictions = [] for", "plt.xlabel('Predicted') # plt.ylabel('True') # plt.show() # print(labels) # take the row # end", "acquire_kinect_image(self, filename): # give this function a filename, and it will load that", "color_mask + black_mask masked_image = cv.bitwise_and(img_hsv, img_hsv, mask=mask) cv.imshow('masked_image', masked_image) k = cv.waitKey(1000)", "Is the image RGB order or BGR?\" return result else: result = \"object", "b = img_subset[pts[0], pts[1], 2] df = [{'r': (r.mean() / 255), 'g': (g.mean()", "numbers return df.sort_values(by='object_num', axis=0, ascending=True), output_image def hsv_slide_tool(self, image): def empty(a): pass h,", "to the dataframe so my train set is bigger # output_df = input_df.copy()", "cimg_justthiscontour[y:y+h, x:x+w, :] # make a binary mask cimg_mask = cv.cvtColor(cimg_subset, cv.COLOR_BGR2GRAY) ret2,", "(179, 255, 255) # create mask hsv_lower = np.array([h_min, s_min, v_min]) hsv_upper =", "kinect_image = cv.imread(filename) print(f\"kinect has acquired the image with shape = {kinect_image.shape}\") return", "ret3, mask = cv.threshold(mask,0,255,cv.THRESH_BINARY) # output image with contours drawn on the original", "cv.createTrackbar(\"hue_min\", \"trackbars\", 0, 179, empty) cv.createTrackbar('hue_max', 'trackbars', 179, 179, empty) cv.createTrackbar('sat_min', 'trackbars', 0,", "= class_list[i] print(type(image_df)) return image_df # def fake_df(self, input_df, reps = 3): #", "density function exponent = np.exp(-((x-mean)**2 / (2*stdev**2))) return exponent * (1/(np.sqrt(2*np.pi)*stdev)) def get_accuracy(self,", "cv.WINDOW_NORMAL) cv.resizeWindow('masked_image', h, w) cv.namedWindow(\"trackbars\") cv.resizeWindow(\"trackbars\", 800, 300) # color mask trackbars cv.createTrackbar(\"hue_min\",", "(b.mean() / 255), 'hue': (hue.mean() / 255), 'sat': (sat.mean() / 255), 'val': (val.mean()", "with shape = {kinect_image.shape}\") return kinect_image # function to display images resized, using", "later analysis with something else like a neural network cv.imwrite(f\"images/train/XX_{object_num}_{randint(10000,99999)}.png\", img_subset) # add", "break cv.destroyAllWindows() print(f'hsv_lower is {hsv_lower}, hsv_upper = {hsv_upper}') def label_dataframe(self, image_df, class_list): for", "seed np.random.seed(26) # the NaiveBayes classifier I wrote for assignment 6 in BSYSE_530,", "occuring independently def fit(self, features, target): # define class variables self.classes = np.unique(target)", "with numbers return df.sort_values(by='object_num', axis=0, ascending=True), output_image def hsv_slide_tool(self, image): def empty(a): pass", "mask for red colors hsv_mask = cv.inRange(cv.cvtColor(image, cv.COLOR_BGR2HSV), self.hsv_lower, self.hsv_upper).astype(np.uint8) # use this", "pandas as pd from sklearn.model_selection import train_test_split from sklearn.metrics import confusion_matrix, precision_score, recall_score", "cv.CHAIN_APPROX_SIMPLE) # create the df that we'll return for this image df =", "(sat.mean() / 255), 'val': (val.mean() / 255)}] df = pd.DataFrame.from_dict(df) pred = predictive_model.get_predictions(df)", "DONE submit the df to the model for predictions # results = predictive_model.blind_predictions()", "self.prior[i] # calculate the conditional probability for the conditional = np.sum(self.gaussian_density(i, x)) posterior", "= MatlabSurrogate() m.imshow(cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8)) # apply the mask and return the result", "one image processing class that takes in a single image and then spits", "mask = cv.cvtColor(seg_img, cv.COLOR_BGR2GRAY) mask = cv.GaussianBlur(mask,(5,5),0) ret3, mask = cv.threshold(mask,0,255,cv.THRESH_BINARY) # output", "get back a pandas df\") self.black_lower = (0, 0, 0) self.black_upper = (179,", "print(result) # # use the test set to see how we do #", "# apply the mask and return the result return cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8) def", "the object_num object_num += 1 # AFTER ALL CONTOURS HAVE BEEN DONE submit", "is bigger # output_df = input_df.copy() # for rep in range(0, reps): #", "= int(imdiv) w, h = int(image.shape[1]/imdiv), int(image.shape[0]/imdiv) cv.namedWindow(\"output\", cv.WINDOW_NORMAL) cv.resizeWindow(\"output\", (w, h)) cv.imshow(\"output\",", "self.count = len(self.classes) self.feature_nums = features.shape[1] self.rows = features.shape[0] # calculate statistics for", "P(c) is the class prior probability, or the prob of c occuring indpendently.", "/ (2 * var))) denominator = np.sqrt(2 * np.pi * var) return numerator", "# self.black_upper = (179, 255, 30) # self.hsv_lower = (0, 0, 100) #", "0, 179, empty) cv.createTrackbar('hue_max', 'trackbars', 179, 179, empty) cv.createTrackbar('sat_min', 'trackbars', 0, 255, empty)", "'trackbars', 0, 255, empty) cv.createTrackbar('val_max', 'trackbars', 255, 255, empty) while True: # get", "# add the object labels to the cimg for identification cv.putText(output_image, text= str(object_label),", "100) # self.hsv_upper = (179, 255, 255) # create mask hsv_lower = np.array([h_min,", "and convert to numpy array self.mean = features.groupby(target).apply(np.mean).to_numpy() self.var = features.groupby(target).apply(np.var).to_numpy() return self.mean,", "# median filter to despeckle # hsv_mask = ndimage.median_filter(hsv_mask, size=(3, 3)).astype(np.uint8) # binary", "to show the segmentation if (show_img == True): m = MatlabSurrogate() m.imshow(cv.bitwise_and(image, image,", "pts = np.where(cimg_subset == 255) hue = img_subset_hsv[pts[0], pts[1], 0] sat = img_subset_hsv[pts[0],", "cv.bitwise_and(img_hsv, img_hsv, mask=mask) cv.imshow('masked_image', masked_image) k = cv.waitKey(1000) & 0xFF # large wait", "prec = precision_score(y_test, y_test_predictions, average=\"micro\") # rec = recall_score(y_test, y_test_predictions, average=\"micro\") # print(f\"precision", "for assignment 6 in BSYSE_530, modified a little for this purpose class NaiveBayes:", "a dataframe that could be used for prediction # replaces ImageSegmenter class ImageProcess():", "str(object_num), org=(cx - 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=3, color=(255,255,255), thickness=5, lineType=cv.LINE_AA) #", "output_image def hsv_slide_tool(self, image): def empty(a): pass h, w = int(image.shape[1]/2), int(image.shape[0]/2) cv.namedWindow('masked_image',", "imdiv = 4): imdiv = int(imdiv) w, h = int(image.shape[1]/imdiv), int(image.shape[0]/imdiv) cv.namedWindow(\"output\", cv.WINDOW_NORMAL)", "= img_subset[pts[0], pts[1], 1] b = img_subset[pts[0], pts[1], 2] df = [{'r': (r.mean()", "20))).astype(np.uint8) # fill the holes hsv_mask = ndimage.binary_fill_holes(hsv_mask).astype(np.uint8) # erode the mask hsv_mask", "= morphology.erosion(hsv_mask, morphology.disk(5)) # TODO: remove this it is for testing purposes to", "'trackbars', 255, 255, empty) while True: # get image img_hsv = cv.cvtColor(image, cv.COLOR_BGR2HSV)", "+ uniform(-.1, .1) # output_df = pd.concat(output_df, fake_df) # return output_df def otsu_threshold(self,", "from sklearn import preprocessing import time from os import listdir from os.path import", "range(len(test)): if test.iloc[i] == predictions[i]: correct += 1 return (correct / float(len(test))) #", "median filter to despeckle # hsv_mask = ndimage.median_filter(hsv_mask, size=(3, 3)).astype(np.uint8) # binary dilation", "MatlabSurrogate() m.imshow(cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8)) # apply the mask and return the result return", "array self.mean = features.groupby(target).apply(np.mean).to_numpy() self.var = features.groupby(target).apply(np.var).to_numpy() return self.mean, self.var def calc_prior(self, features,", "# this part sucked and I had a typo that cost me hours", "i, row in image_df.iterrows(): image_df.loc[i, 'color'] = class_list[i] print(type(image_df)) return image_df # def", "- 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=3, color=(255,255,255), thickness=5, lineType=cv.LINE_AA) # print(r.mean(), g.mean(),", "def __init__(self): self.state_of_mind = \"Badass.\" def acquire_kinect_image(self, filename): # give this function a", "blank canvas to put the contour onto, JUST THIS ONE not the others", "that image with opencv # this will be a BGR format, because that", "the test set to see how we do # y_test_predictions = nb.get_predictions(X_test) #", "# scores # acc = nb.get_accuracy(y_test, y_test_predictions) # prec = precision_score(y_test, y_test_predictions, average=\"micro\")", "self.black_upper = (43, 255, 255) # self.hsv_lower = (0, 0, 70) # self.hsv_upper", "0] g = img_subset[pts[0], pts[1], 1] b = img_subset[pts[0], pts[1], 2] # and", "* h fullosity = area / rect_area aspect_ratio = float(w)/h extent = float(area/", "model area_th = 400 seg_img = self.bg_segmentation(input_image, show_img=False) # # make the mask", "black_lower = np.array([0, 0, 0]) black_upper = np.array([179, 255, 30]) color_mask = cv.inRange(img_hsv,", "probability density function exponent = np.exp(-((x-mean)**2 / (2*stdev**2))) return exponent * (1/(np.sqrt(2*np.pi)*stdev)) def", "conditional = np.sum(self.gaussian_density(i, x)) posterior = prior + conditional # print(f\"i = {i},", "# iterate through the classes for i in range(0, self.count): # for each", "import train_test_split from sklearn.metrics import confusion_matrix, precision_score, recall_score from skimage.filters import sobel #", "bigger # output_df = input_df.copy() # for rep in range(0, reps): # fake_df", "= cv.GaussianBlur(image,(5,5),0) ret3,th3 = cv.threshold(blur,0,255,cv.THRESH_BINARY+cv.THRESH_OTSU) return ret3, th3 def process_image_make_predictions(self, input_image, model): predictive_model", "draw contours on the output image for our personal enjoyment cv.drawContours(output_image, [cnt], 0,", "a NOT mask hsv_mask = np.where(hsv_mask > 1, 0, 1).astype(np.uint8) hsv_mask = ndimage.gaussian_filter(hsv_mask,", "text= str(object_label), org=(cx - 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(0,255,0), thickness=3, lineType=cv.LINE_AA)", "# labels = [(i, c) for i, c in class_dict.items()] # cm =", "probability, post evidence # x is a numpy array # x is feature", "that we'll return for this image df = pd.DataFrame(columns=['color']) # # reset the", "self.black_upper = (179, 255, 30) self.hsv_lower = (0, 0, 0) self.hsv_upper = (179,", "from skimage import exposure import os from math import pi from math import", "cimg_mask = cv.cvtColor(cimg_subset, cv.COLOR_BGR2GRAY) ret2, mask = cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY) # draw contours on the", "pd from sklearn.model_selection import train_test_split from sklearn.metrics import confusion_matrix, precision_score, recall_score from skimage.filters", "# fake_df = input_df.copy() # for i, row in fake_df.iterrows(): # fake_df.loc[i, 'r']", "fig = plt.figure() # ax = fig.add_subplot(111) # cax = ax.matshow(cm) # plt.title('confusion", "to display images resized, using opencv def imshow(self, image, imdiv = 4): imdiv", "eq_diameter = np.sqrt(4*area/np.pi) M= cv.moments(cnt) cx= int(M['m10']/M['m00']) cy= int(M['m01']/M['m00']) # take this rectangle", "a BGR format, because that is how opencv rolls kinect_image = cv.imread(filename) print(f\"kinect", "the predictor prior probability, or the prob of x occuring independently def fit(self,", "(r.mean() / 255), 'g': (g.mean() / 255), 'b': (b.mean() / 255), 'hue': (hue.mean()", "CONTOUR SHAPE FEATURES # get the x, y, w, h of the bounding", "class ImageProcess(): def __init__(self): print(\"image processor activated! use 'process_image_to_df()' to get back a", "w = int(image.shape[1]/2), int(image.shape[0]/2) cv.namedWindow('masked_image', cv.WINDOW_NORMAL) cv.resizeWindow('masked_image', h, w) cv.namedWindow(\"trackbars\") cv.resizeWindow(\"trackbars\", 800, 300)", "opencv rolls kinect_image = cv.imread(filename) print(f\"kinect has acquired the image with shape =", "# fake_df.loc[i, 'b'] = fake_df.loc[i, 'b'] + uniform(-.1, .1) # output_df = pd.concat(output_df,", "class_dict.items()] # cm = confusion_matrix(y_test, y_test_predictions) # fig = plt.figure() # ax =", "self.mean = features.groupby(target).apply(np.mean).to_numpy() self.var = features.groupby(target).apply(np.var).to_numpy() return self.mean, self.var def calc_prior(self, features, target):", "def calc_posterior(self, x): # this is the probability, post evidence # x is", "image and then spits out a dataframe that could be used for prediction", "= float(area/ rect_area) hull = cv.convexHull(cnt) hull_area = cv.contourArea(hull) solidity = float(area)/hull_area eq_diameter", "0, 255, empty) cv.createTrackbar('sat_max', 'trackbars', 255, 255, empty) cv.createTrackbar('val_min', 'trackbars', 0, 255, empty)", "get trackbar positions h_min = cv.getTrackbarPos(\"hue_min\", \"trackbars\") h_max = cv.getTrackbarPos('hue_max', 'trackbars') s_min =", "cv.getTrackbarPos('hue_max', 'trackbars') s_min = cv.getTrackbarPos('sat_min', 'trackbars') s_max = cv.getTrackbarPos('sat_max', 'trackbars') v_min = cv.getTrackbarPos('val_min',", "uniform(-.1, .1) # fake_df.loc[i, 'g'] = fake_df.loc[i, 'g'] + uniform(-.1, .1) # fake_df.loc[i,", "fake_df.loc[i, 'g'] = fake_df.loc[i, 'g'] + uniform(-.1, .1) # fake_df.loc[i, 'b'] = fake_df.loc[i,", "row in fake_df.iterrows(): # fake_df.loc[i, 'r'] = fake_df.loc[i, 'r'] + uniform(-.1, .1) #", "black_mask = cv.inRange(img_hsv, black_lower, black_upper) mask = color_mask + black_mask masked_image = cv.bitwise_and(img_hsv,", "exponent * (1/(np.sqrt(2*np.pi)*stdev)) def get_accuracy(self, test, predictions): correct = 0 for i in", "# add the object labels to the cimg for identification cv.putText(output_image, text= str(object_num),", "= (179, 255, 255) # create mask hsv_lower = np.array([h_min, s_min, v_min]) hsv_upper", "image) cv.waitKey(0) cv.destroyAllWindows() # I should probably have one image processing class that", "MatlabSurrogate(): def __init__(self): self.state_of_mind = \"Badass.\" def acquire_kinect_image(self, filename): # give this function", "test.iloc[i] == predictions[i]: correct += 1 return (correct / float(len(test))) # TODO: read", "test, predictions): correct = 0 for i in range(len(test)): if test.iloc[i] == predictions[i]:", "in range(0, reps): # fake_df = input_df.copy() # for i, row in fake_df.iterrows():", "int(image.shape[1]/2), int(image.shape[0]/2) cv.namedWindow('masked_image', cv.WINDOW_NORMAL) cv.resizeWindow('masked_image', h, w) cv.namedWindow(\"trackbars\") cv.resizeWindow(\"trackbars\", 800, 300) # color", "pickle from sklearn import preprocessing import time from os import listdir from os.path", "the result return cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8) def process_image_to_df(self, input_image, area_th): seg_img = self.bg_segmentation(input_image,", "prob to posteriors = [] # iterate through the classes for i in", "# calculate the conditional probability for the conditional = np.sum(self.gaussian_density(i, x)) posterior =", "make the mask a binary thresholded image mask = cv.cvtColor(seg_img, cv.COLOR_BGR2GRAY) mask =", "contours drawn on the original image output_image = input_image.copy() # find the contours", "input_image[y:y+h, x:x+w, :] # convert to hsv for extracting those values img_subset_hsv =", "rec = recall_score(y_test, y_test_predictions, average=\"micro\") # print(f\"precision is {prec}, recall is {rec}, accuracy", "these and see how it works # https://www.mathworks.com/help/matlab/matlab_external/matlab-arrays-as-python-variables.html # https://www.mathworks.com/help/matlab/matlab_external/passing-data-to-python.html # this exists", "read these and see how it works # https://www.mathworks.com/help/matlab/matlab_external/matlab-arrays-as-python-variables.html # https://www.mathworks.com/help/matlab/matlab_external/passing-data-to-python.html # this", "w, h = cv.boundingRect(cnt) # contour features area = cv.contourArea(cnt) rect_area = w", "row in image_df.iterrows(): image_df.loc[i, 'color'] = class_list[i] print(type(image_df)) return image_df # def fake_df(self,", "g.mean(), 'b': b.mean(), 'hue': hue.mean(), 'sat': sat.mean(), 'val': val.mean() }, ignore_index=True) # last", "image_df.iterrows(): image_df.loc[i, 'color'] = class_list[i] print(type(image_df)) return image_df # def fake_df(self, input_df, reps", "color=(0,255,0), thickness=3, lineType=cv.LINE_AA) # last thing we do on this loop is increment", "= cv.getTrackbarPos('val_max', 'trackbars') # self.black_lower = (0, 0, 0) # self.black_upper = (179,", "contour features area = cv.contourArea(cnt) rect_area = w * h fullosity = area", "cv.convexHull(cnt) hull_area = cv.contourArea(hull) solidity = float(area)/hull_area eq_diameter = np.sqrt(4*area/np.pi) M= cv.moments(cnt) cx=", "np.pi * var) return numerator / denominator def pdf(self, x, mean, stdev): #", "def calc_prior(self, features, target): # this is the probability of picking one of", "rect_area aspect_ratio = float(w)/h extent = float(area/ rect_area) hull = cv.convexHull(cnt) hull_area =", "th3 def process_image_make_predictions(self, input_image, model): predictive_model = model area_th = 400 seg_img =", "me hours numerator = np.exp(-((x-mean)**2 / (2 * var))) denominator = np.sqrt(2 *", "morphology.disk(3)) # # median filter to despeckle # hsv_mask = ndimage.median_filter(hsv_mask, size=(3, 3)).astype(np.uint8)", "a numpy array # x is feature vector for one observation # make", "'object_num': object_num, 'r': r.mean(), 'g': g.mean(), 'b': b.mean(), 'hue': hue.mean(), 'sat': sat.mean(), 'val':", "400 seg_img = self.bg_segmentation(input_image, show_img=False) # # make the mask a binary thresholded", "self.hsv_upper = (179, 255, 255) # create mask hsv_lower = np.array([h_min, s_min, v_min])", "b = img_subset[pts[0], pts[1], 2] # and export the image for later analysis", "in range(len(input_vector)): result = self.calc_posterior((input_vector.iloc[i,:])) predictions.append(result) return predictions def predict(self, observation): #call the", "pts[1], 1] b = img_subset[pts[0], pts[1], 2] # and export the image for", "color_text = class_dict[pred[0]] object_label = \"obj\" + str(object_num) + \"_pred\" + str(pred[0]) print(object_label)", "do on this loop is increment the object_num object_num += 1 # AFTER", "= ax.matshow(cm) # plt.title('confusion matrix of the classifier') # fig.colorbar(cax) # plt.xlabel('Predicted') #", "those values img_subset_hsv = cv.cvtColor(img_subset, cv.COLOR_BGR2HSV) # FILTER OUT THE WEIRD ONES #", "0] g = img_subset[pts[0], pts[1], 1] b = img_subset[pts[0], pts[1], 2] df =", "function to display images resized, using opencv def imshow(self, image, imdiv = 4):", "img_subset_hsv[pts[0], pts[1], 1] val = img_subset_hsv[pts[0], pts[1], 2] r = img_subset[pts[0], pts[1], 0]", "cv.imread(filename) print(f\"kinect has acquired the image with shape = {kinect_image.shape}\") return kinect_image #", "contours: # draw contours on the output image for our personal enjoyment cv.drawContours(output_image,", "hsv_lower, hsv_upper) black_mask = cv.inRange(img_hsv, black_lower, black_upper) mask = color_mask + black_mask masked_image", "prior probability, or the prob of c occuring indpendently. # P(x) is the", "for i in range(len(test)): if test.iloc[i] == predictions[i]: correct += 1 return (correct", "an hsv mask for red colors hsv_mask = cv.inRange(cv.cvtColor(image, cv.COLOR_BGR2HSV), self.hsv_lower, self.hsv_upper).astype(np.uint8) #", "replaces ImageSegmenter class ImageProcess(): def __init__(self): print(\"image processor activated! use 'process_image_to_df()' to get", "variance for each column and convert to numpy array self.mean = features.groupby(target).apply(np.mean).to_numpy() self.var", "as a subset of the input_image, and calculate things within it img_subset =", "of the bounding rect for the contour x, y, w, h = cv.boundingRect(cnt)", "of picking one of a class at random from the dataset self.prior =", "self.count): # for each class look at the prior probability for the class", "# function to display images resized, using opencv def imshow(self, image, imdiv =", "HAVE BEEN DONE submit the df to the model for predictions # results", "# calculate mean, variance for each column and convert to numpy array self.mean", "'b'] = fake_df.loc[i, 'b'] + uniform(-.1, .1) # output_df = pd.concat(output_df, fake_df) #", "= 0 for i in range(len(test)): if test.iloc[i] == predictions[i]: correct += 1", "pred_class = self.calc_posterior(observation) return pred_class def calc_statistics(self, features, target): # calculate mean, variance", "that is how opencv rolls kinect_image = cv.imread(filename) print(f\"kinect has acquired the image", "return numerator / denominator def pdf(self, x, mean, stdev): # calculate probability density", "do # y_test_predictions = nb.get_predictions(X_test) # # scores # acc = nb.get_accuracy(y_test, y_test_predictions)", "'b'] + uniform(-.1, .1) # output_df = pd.concat(output_df, fake_df) # return output_df def", "needed packages import pickle from sklearn import preprocessing import time from os import", "labels to the cimg for identification cv.putText(output_image, text= str(object_num), org=(cx - 5,cy -", "convert to hsv for extracting those values img_subset_hsv = cv.cvtColor(img_subset, cv.COLOR_BGR2HSV) # FILTER", "the cimg for identification cv.putText(output_image, text= str(object_label), org=(cx - 5,cy - 5), fontFace=", "# fig = plt.figure() # ax = fig.add_subplot(111) # cax = ax.matshow(cm) #", "# this is a mask cimg_justthiscontour = np.zeros_like(input_image) # draw the contours on", "# FILTER OUT THE WEIRD ONES # get rid of tiny objects that", "look at the prior probability for the class prior = self.prior[i] # calculate", "output image for our personal enjoyment cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5)", "in class_dict.items()] # cm = confusion_matrix(y_test, y_test_predictions) # fig = plt.figure() # ax", "this as a NOT mask hsv_mask = np.where(hsv_mask > 1, 0, 1).astype(np.uint8) hsv_mask", "s_max = cv.getTrackbarPos('sat_max', 'trackbars') v_min = cv.getTrackbarPos('val_min', 'trackbars') v_max = cv.getTrackbarPos('val_max', 'trackbars') #", "precision_score, recall_score from skimage.filters import sobel # set random seed np.random.seed(26) # the", "mask = cv.threshold(mask,0,255,cv.THRESH_BINARY) # output image with contours drawn on the original image", "this rectangle as a subset of the input_image, and calculate things within it", "import time from os import listdir from os.path import isfile, join from random", "load that image with opencv # this will be a BGR format, because", "np.ones((20, 20))).astype(np.uint8) # fill the holes hsv_mask = ndimage.binary_fill_holes(hsv_mask).astype(np.uint8) # erode the mask", "self.classes = np.unique(target) self.count = len(self.classes) self.feature_nums = features.shape[1] self.rows = features.shape[0] #", "mask = cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY) # draw contours on the output image for our personal", "x): # this is the probability, post evidence # x is a numpy", "255, empty) cv.createTrackbar('val_max', 'trackbars', 255, 255, empty) while True: # get image img_hsv", "to get back a pandas df\") self.black_lower = (0, 0, 0) self.black_upper =", "create the df that we'll return for this image df = pd.DataFrame(columns=['color']) #", "result should be a pandas dataframe and the contour image with numbers return", "cv.createTrackbar('val_min', 'trackbars', 0, 255, empty) cv.createTrackbar('val_max', 'trackbars', 255, 255, empty) while True: #", "= plt.figure() # ax = fig.add_subplot(111) # cax = ax.matshow(cm) # plt.title('confusion matrix", "+ \"_pred\" + str(pred[0]) print(object_label) # add the object labels to the cimg", "print(r.mean(), g.mean(), b.mean(), gli.mean()) df = df.append({'color' : 0, 'x': x, 'y': y,", "will be a BGR format, because that is how opencv rolls kinect_image =", "pdf(self, x, mean, stdev): # calculate probability density function exponent = np.exp(-((x-mean)**2 /", "0) # self.black_upper = (179, 255, 30) # self.hsv_lower = (0, 0, 100)", "packages import pickle from sklearn import preprocessing import time from os import listdir", "my train set is bigger # output_df = input_df.copy() # for rep in", "# make the mask a binary thresholded image mask = cv.cvtColor(seg_img, cv.COLOR_BGR2GRAY) mask", "fig.colorbar(cax) # plt.xlabel('Predicted') # plt.ylabel('True') # plt.show() # print(labels) # take the row", "= cv.threshold(mask,0,255,cv.THRESH_BINARY) # output image with contours drawn on the original image output_image", "0, color=(255, 255, 255), thickness=5) img_subset = cv.bitwise_and(img_subset, img_subset, mask=mask).astype(np.uint8) # calculate where", "gaussian denssityy fucntion (normal dist) mean = self.mean[class_idx] var = self.var[class_idx] # this", "= \"object is a \" + str(type(a)) + \"and I'm gonna have a", "= img_subset[pts[0], pts[1], 0] g = img_subset[pts[0], pts[1], 1] b = img_subset[pts[0], pts[1],", "# y_test_predictions = nb.get_predictions(X_test) # # scores # acc = nb.get_accuracy(y_test, y_test_predictions) #", "= ndimage.binary_fill_holes(hsv_mask).astype(np.uint8) # erode the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(5)) # TODO: remove", "color=(255,255,255), thickness=5, lineType=cv.LINE_AA) # print(r.mean(), g.mean(), b.mean(), gli.mean()) df = df.append({'color' : 0,", "{rec}, accuracy = {acc}\") # # confusion matrix # labels = [(i, c)", "hue = img_subset_hsv[pts[0], pts[1], 0] sat = img_subset_hsv[pts[0], pts[1], 1] val = img_subset_hsv[pts[0],", "# cm = confusion_matrix(y_test, y_test_predictions) # fig = plt.figure() # ax = fig.add_subplot(111)", "__init__(self): print(\"image processor activated! use 'process_image_to_df()' to get back a pandas df\") self.black_lower", "# define class variables self.classes = np.unique(target) self.count = len(self.classes) self.feature_nums = features.shape[1]", "P(x) # P(x|x) is the posterior probability # P(x|c) is the likelihood #", "{hsv_upper}') def label_dataframe(self, image_df, class_list): for i, row in image_df.iterrows(): image_df.loc[i, 'color'] =", "numpy.ndarray, this is perfect. Is the image RGB order or BGR?\" return result", "hier = cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE) # create the df that we'll return for", "is increment the object_num object_num += 1 # AFTER ALL CONTOURS HAVE BEEN", "pts[1], 1] b = img_subset[pts[0], pts[1], 2] df = [{'r': (r.mean() / 255),", "probability for the conditional = np.sum(self.gaussian_density(i, x)) posterior = prior + conditional #", "pd.DataFrame(columns=['color']) # # reset the object num object_num = 0 for cnt in", "for extracting those values img_subset_hsv = cv.cvtColor(img_subset, cv.COLOR_BGR2HSV) # FILTER OUT THE WEIRD", "isfile, join from random import randint, uniform import numpy as np from matplotlib", "the image for later analysis with something else like a neural network cv.imwrite(f\"images/train/XX_{object_num}_{randint(10000,99999)}.png\",", "things within it img_subset = input_image[y:y+h, x:x+w, :] # convert to hsv for", "as np from matplotlib import pyplot as plt import cv2 as cv from", "= img_subset_hsv[pts[0], pts[1], 0] sat = img_subset_hsv[pts[0], pts[1], 1] val = img_subset_hsv[pts[0], pts[1],", "the others # this is a mask cimg_justthiscontour = np.zeros_like(input_image) # draw the", "a binary thresholded image mask = cv.cvtColor(seg_img, cv.COLOR_BGR2GRAY) mask = cv.GaussianBlur(mask,(5,5),0) ret3, mask", "fake_df = input_df.copy() # for i, row in fake_df.iterrows(): # fake_df.loc[i, 'r'] =", "\"object is a \" + str(type(a)) + \"and I'm gonna have a hard", "calc_posterior function on the observation pred_class = self.calc_posterior(observation) return pred_class def calc_statistics(self, features,", "cv.contourArea(hull) solidity = float(area)/hull_area eq_diameter = np.sqrt(4*area/np.pi) M= cv.moments(cnt) cx= int(M['m10']/M['m00']) cy= int(M['m01']/M['m00'])", "np.random.seed(26) # the NaiveBayes classifier I wrote for assignment 6 in BSYSE_530, modified", "0 for cnt in contours: # draw contours on the output image for", "around the contour of interest cimg_subset = cimg_justthiscontour[y:y+h, x:x+w, :] # make a", "mask for lego_imgs[14] # hsv_lower = (0,0,0) # hsv_upper = (179,234,77) def dummy_method(self,", "hsv_mask = ndimage.median_filter(hsv_mask, size=(3, 3)).astype(np.uint8) # binary dilation hsv_mask = morphology.binary_dilation(hsv_mask, np.ones((20, 20))).astype(np.uint8)", "opencv # this will be a BGR format, because that is how opencv", "float(area/ rect_area) hull = cv.convexHull(cnt) hull_area = cv.contourArea(hull) solidity = float(area)/hull_area eq_diameter =", "for our personal enjoyment cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5) # CALCULATE", "skimage import morphology from skimage import exposure import os from math import pi", "# this is the probability of picking one of a class at random", "dataset self.prior = self.calc_prior(features, target) def get_predictions(self, input_vector): predictions = [] for i", "cy= int(M['m01']/M['m00']) # take this rectangle as a subset of the input_image, and", "now take the subset of just the area around the contour of interest", "P(c|x) = P(x|c) * P(c) / P(x) # P(x|x) is the posterior probability", "resized, using opencv def imshow(self, image, imdiv = 4): imdiv = int(imdiv) w,", "neural network cv.imwrite(f\"images/train/XX_{object_num}_{randint(10000,99999)}.png\", img_subset) # add the object labels to the cimg for", "for i in range(len(input_vector)): result = self.calc_posterior((input_vector.iloc[i,:])) predictions.append(result) return predictions def predict(self, observation):", "255, 30]) color_mask = cv.inRange(img_hsv, hsv_lower, hsv_upper) black_mask = cv.inRange(img_hsv, black_lower, black_upper) mask", "hsv_mask = cv.inRange(cv.cvtColor(image, cv.COLOR_BGR2HSV), self.hsv_lower, self.hsv_upper).astype(np.uint8) # use this as a NOT mask", "based on its proportion in the dataset self.prior = self.calc_prior(features, target) def get_predictions(self,", "a little for this purpose class NaiveBayes: # P(c|x) = P(x|c) * P(c)", "cv.imshow('masked_image', masked_image) k = cv.waitKey(1000) & 0xFF # large wait time if k", "= model area_th = 400 seg_img = self.bg_segmentation(input_image, show_img=False) # # make the", "v_min]) hsv_upper = np.array([h_max, s_max, v_max]) black_lower = np.array([0, 0, 0]) black_upper =", "# output_df = input_df.copy() # for rep in range(0, reps): # fake_df =", "contour image with numbers return df.sort_values(by='object_num', axis=0, ascending=True), output_image def hsv_slide_tool(self, image): def", "predictive_model.blind_predictions() # result = loaded_model.get_predictions(X_test, Y_test) # print(result) # # use the test", "input_df, reps = 3): # # creates a bunch of fake adjustments to", "to see how we do # y_test_predictions = nb.get_predictions(X_test) # # scores #", "calculate mean, variance for each column and convert to numpy array self.mean =", "2] # and export the image for later analysis with something else like", "color=(255, 255, 255), thickness=5) img_subset = cv.bitwise_and(img_subset, img_subset, mask=mask).astype(np.uint8) # calculate where the", "class based on its proportion in the dataset self.prior = self.calc_prior(features, target) def", "is np.ndarray: result = \"object is a numpy.ndarray, this is perfect. Is the", "conditional = {conditional}, posterior = {posterior}\") posteriors.append(posterior) return self.classes[np.argmax(posteriors)] def gaussian_density(self, class_idx, x):", "(show_img == True): m = MatlabSurrogate() m.imshow(cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8)) # apply the mask", "h, w = int(image.shape[1]/2), int(image.shape[0]/2) cv.namedWindow('masked_image', cv.WINDOW_NORMAL) cv.resizeWindow('masked_image', h, w) cv.namedWindow(\"trackbars\") cv.resizeWindow(\"trackbars\", 800,", "result = \"object is a numpy.ndarray, this is perfect. Is the image RGB", "hue.mean(), 'sat': sat.mean(), 'val': val.mean() }, ignore_index=True) # last thing we do on", "hsv_mask = ndimage.gaussian_filter(hsv_mask, sigma=1) # erode the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(3)) #", "contour onto, JUST THIS ONE not the others # this is a mask", "calculate where the object is pts = np.where(cimg_subset == 255) hue = img_subset_hsv[pts[0],", "have one image processing class that takes in a single image and then", "# # creates a bunch of fake adjustments to the dataframe so my", "use the test set to see how we do # y_test_predictions = nb.get_predictions(X_test)", "cv.cvtColor(image, cv.COLOR_BGR2HSV) # get trackbar positions h_min = cv.getTrackbarPos(\"hue_min\", \"trackbars\") h_max = cv.getTrackbarPos('hue_max',", "y, w, h of the bounding rect for the contour x, y, w,", "class_list[i] print(type(image_df)) return image_df # def fake_df(self, input_df, reps = 3): # #", "probability for the class prior = self.prior[i] # calculate the conditional probability for", "4:\"light_green\", 5:\"bright_orange\", 6:\"bright_red\", 7:\"bright_blue\", 8:\"white\", 9:\"bright_yellow\"} color_text = class_dict[pred[0]] object_label = \"obj\" +", "df to the model for predictions # results = predictive_model.blind_predictions() # result =", "assignment 6 in BSYSE_530, modified a little for this purpose class NaiveBayes: #", "typo that cost me hours numerator = np.exp(-((x-mean)**2 / (2 * var))) denominator", "numerator = np.exp(-((x-mean)**2 / (2 * var))) denominator = np.sqrt(2 * np.pi *", "scores # acc = nb.get_accuracy(y_test, y_test_predictions) # prec = precision_score(y_test, y_test_predictions, average=\"micro\") #", "# create the df that we'll return for this image df = pd.DataFrame(columns=['color'])", "gaussian_density(self, class_idx, x): # calc probability from gaussian denssityy fucntion (normal dist) mean", "# set random seed np.random.seed(26) # the NaiveBayes classifier I wrote for assignment", "object num object_num = 0 for cnt in contours: # draw contours on", "particular class based on its proportion in the dataset self.prior = self.calc_prior(features, target)", "255, 255, empty) while True: # get image img_hsv = cv.cvtColor(image, cv.COLOR_BGR2HSV) #", "'color'] = class_list[i] print(type(image_df)) return image_df # def fake_df(self, input_df, reps = 3):", "the prob of x occuring independently def fit(self, features, target): # define class", "cv.putText(output_image, text= str(object_label), org=(cx - 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(0,255,0), thickness=3,", "= [{'r': (r.mean() / 255), 'g': (g.mean() / 255), 'b': (b.mean() / 255),", "tiny objects that are probably noisef if area > area_th: # draw a", "= self.calc_prior(features, target) def get_predictions(self, input_vector): predictions = [] for i in range(len(input_vector)):", "def fake_df(self, input_df, reps = 3): # # creates a bunch of fake", "wait time if k == 113 or k == 27: break cv.destroyAllWindows() print(f'hsv_lower", "is the class prior probability, or the prob of c occuring indpendently. #", "# import the needed packages import pickle from sklearn import preprocessing import time", "img_subset[pts[0], pts[1], 2] # and export the image for later analysis with something", "# make a list that we will add each classes posterior prob to", "posterior = {posterior}\") posteriors.append(posterior) return self.classes[np.argmax(posteriors)] def gaussian_density(self, class_idx, x): # calc probability", "dataframe so my train set is bigger # output_df = input_df.copy() # for", "morphology.binary_dilation(hsv_mask, np.ones((20, 20))).astype(np.uint8) # fill the holes hsv_mask = ndimage.binary_fill_holes(hsv_mask).astype(np.uint8) # erode the", "x:x+w, :] # make a binary mask cimg_mask = cv.cvtColor(cimg_subset, cv.COLOR_BGR2GRAY) ret2, mask", "test set to see how we do # y_test_predictions = nb.get_predictions(X_test) # #", "time if k == 113 or k == 27: break cv.destroyAllWindows() print(f'hsv_lower is", "# result = loaded_model.get_predictions(X_test, Y_test) # print(result) # # use the test set", "the needed packages import pickle from sklearn import preprocessing import time from os", "(0, 0, 0) self.hsv_upper = (179, 255, 90) # self.black_lower = (0, 0,", "the classifier') # fig.colorbar(cax) # plt.xlabel('Predicted') # plt.ylabel('True') # plt.show() # print(labels) #", "# the NaiveBayes classifier I wrote for assignment 6 in BSYSE_530, modified a", "confusion_matrix(y_test, y_test_predictions) # fig = plt.figure() # ax = fig.add_subplot(111) # cax =", "'r'] = fake_df.loc[i, 'r'] + uniform(-.1, .1) # fake_df.loc[i, 'g'] = fake_df.loc[i, 'g']", "ALL CONTOURS HAVE BEEN DONE submit the df to the model for predictions", "probably have one image processing class that takes in a single image and", "(g.mean() / 255), 'b': (b.mean() / 255), 'hue': (hue.mean() / 255), 'sat': (sat.mean()", "return ret3, th3 def process_image_make_predictions(self, input_image, model): predictive_model = model area_th = 400", "0) self.black_upper = (179, 255, 30) self.hsv_lower = (0, 0, 0) self.hsv_upper =", "= (0, 0, 100) # self.hsv_upper = (179, 255, 255) # create mask", "for lego_imgs[14] # hsv_lower = (0,0,0) # hsv_upper = (179,234,77) def dummy_method(self, a):", "'val': (val.mean() / 255)}] df = pd.DataFrame.from_dict(df) pred = predictive_model.get_predictions(df) class_dict = {0:\"medium_blue\",", "w, h = int(image.shape[1]/imdiv), int(image.shape[0]/imdiv) cv.namedWindow(\"output\", cv.WINDOW_NORMAL) cv.resizeWindow(\"output\", (w, h)) cv.imshow(\"output\", image) cv.waitKey(0)", "\"_pred\" + str(pred[0]) print(object_label) # add the object labels to the cimg for", "# draw a blank canvas to put the contour onto, JUST THIS ONE", "mask=hsv_mask).astype(np.uint8)) # apply the mask and return the result return cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8)", "result = loaded_model.get_predictions(X_test, Y_test) # print(result) # # use the test set to", "get image img_hsv = cv.cvtColor(image, cv.COLOR_BGR2HSV) # get trackbar positions h_min = cv.getTrackbarPos(\"hue_min\",", "y_test_predictions = nb.get_predictions(X_test) # # scores # acc = nb.get_accuracy(y_test, y_test_predictions) # prec", "= df.append({'color' : 0, 'x': x, 'y': y, 'object_num': object_num, 'r': r.mean(), 'g':", "3)).astype(np.uint8) # binary dilation hsv_mask = morphology.binary_dilation(hsv_mask, np.ones((20, 20))).astype(np.uint8) # fill the holes", "True): m = MatlabSurrogate() m.imshow(cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8)) # apply the mask and return", "while True: # get image img_hsv = cv.cvtColor(image, cv.COLOR_BGR2HSV) # get trackbar positions", "'trackbars') s_max = cv.getTrackbarPos('sat_max', 'trackbars') v_min = cv.getTrackbarPos('val_min', 'trackbars') v_max = cv.getTrackbarPos('val_max', 'trackbars')", "cv.resizeWindow(\"trackbars\", 800, 300) # color mask trackbars cv.createTrackbar(\"hue_min\", \"trackbars\", 0, 179, empty) cv.createTrackbar('hue_max',", "NOT mask hsv_mask = np.where(hsv_mask > 1, 0, 1).astype(np.uint8) hsv_mask = ndimage.gaussian_filter(hsv_mask, sigma=1)", "hsv_upper) black_mask = cv.inRange(img_hsv, black_lower, black_upper) mask = color_mask + black_mask masked_image =", "dataset self.prior = (features.groupby(target).apply(lambda x: len(x)/self.rows).to_numpy()) return self.prior def calc_posterior(self, x): # this", "thickness=5) img_subset = cv.bitwise_and(img_subset, img_subset, mask=mask).astype(np.uint8) # calculate where the object is pts", "'trackbars') v_max = cv.getTrackbarPos('val_max', 'trackbars') # self.black_lower = (0, 0, 0) # self.black_upper", "# for rep in range(0, reps): # fake_df = input_df.copy() # for i,", "np.array([0, 0, 0]) black_upper = np.array([179, 255, 30]) color_mask = cv.inRange(img_hsv, hsv_lower, hsv_upper)", "# erode the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(5)) # TODO: remove this it", "ndimage from skimage import morphology from skimage import exposure import os from math", "cimg_subset = cimg_justthiscontour[y:y+h, x:x+w, :] # make a binary mask cimg_mask = cv.cvtColor(cimg_subset,", "empty) cv.createTrackbar('val_min', 'trackbars', 0, 255, empty) cv.createTrackbar('val_max', 'trackbars', 255, 255, empty) while True:", "a single image and then spits out a dataframe that could be used", "input_df.copy() # for i, row in fake_df.iterrows(): # fake_df.loc[i, 'r'] = fake_df.loc[i, 'r']", "empty) cv.createTrackbar('sat_min', 'trackbars', 0, 255, empty) cv.createTrackbar('sat_max', 'trackbars', 255, 255, empty) cv.createTrackbar('val_min', 'trackbars',", "= fig.add_subplot(111) # cax = ax.matshow(cm) # plt.title('confusion matrix of the classifier') #", "in BSYSE_530, modified a little for this purpose class NaiveBayes: # P(c|x) =", "identification cv.putText(output_image, text= str(object_num), org=(cx - 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=3, color=(255,255,255),", "P(x|x) is the posterior probability # P(x|c) is the likelihood # P(c) is", "= w * h fullosity = area / rect_area aspect_ratio = float(w)/h extent", "ndimage.gaussian_filter(hsv_mask, sigma=1) # erode the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(3)) # # median", "imdiv = int(imdiv) w, h = int(image.shape[1]/imdiv), int(image.shape[0]/imdiv) cv.namedWindow(\"output\", cv.WINDOW_NORMAL) cv.resizeWindow(\"output\", (w, h))", "= input_image.copy() # find the contours of the detected objects in the image", "the class prior probability, or the prob of c occuring indpendently. # P(x)", "for each column and convert to numpy array self.mean = features.groupby(target).apply(np.mean).to_numpy() self.var =", "which is original sized cv.drawContours(cimg_justthiscontour, [cnt], 0, color=(255, 255, 255), thickness=-1) # now", "# prior is the random chance of drawing a particular class based on", "hsv_upper = (179,234,77) def dummy_method(self, a): if type(a) is np.ndarray: result = \"object", "= np.array([179, 255, 30]) color_mask = cv.inRange(img_hsv, hsv_lower, hsv_upper) black_mask = cv.inRange(img_hsv, black_lower,", "mask = cv.GaussianBlur(mask,(5,5),0) ret3, mask = cv.threshold(mask,0,255,cv.THRESH_BINARY) # output image with contours drawn", "as pd from sklearn.model_selection import train_test_split from sklearn.metrics import confusion_matrix, precision_score, recall_score from", "is a mask cimg_justthiscontour = np.zeros_like(input_image) # draw the contours on the blank", "purpose class NaiveBayes: # P(c|x) = P(x|c) * P(c) / P(x) # P(x|x)", "that takes in a single image and then spits out a dataframe that", "pts[1], 2] r = img_subset[pts[0], pts[1], 0] g = img_subset[pts[0], pts[1], 1] b", "self.hsv_upper = (179, 34, 255) # NOT mask for lego_imgs[14] # hsv_lower =", "gonna have a hard time with that\" return result def bg_segmentation(self, image, mode=\"hsv\",", ".1) # output_df = pd.concat(output_df, fake_df) # return output_df def otsu_threshold(self, image): blur", "str(pred[0]) print(object_label) # add the object labels to the cimg for identification cv.putText(output_image,", "class that takes in a single image and then spits out a dataframe", "conditional # print(f\"i = {i}, prior = {prior}, conditional = {conditional}, posterior =", "str(object_num) + \"_pred\" + str(pred[0]) print(object_label) # add the object labels to the", "{acc}\") # # confusion matrix # labels = [(i, c) for i, c", "image for our personal enjoyment cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5) #", "8:\"white\", 9:\"bright_yellow\"} color_text = class_dict[pred[0]] object_label = \"obj\" + str(object_num) + \"_pred\" +", "image output_image = input_image.copy() # find the contours of the detected objects in", "255, 90) # self.black_lower = (0, 0, 203) # self.black_upper = (43, 255,", "a): if type(a) is np.ndarray: result = \"object is a numpy.ndarray, this is", "train_test_split from sklearn.metrics import confusion_matrix, precision_score, recall_score from skimage.filters import sobel # set", "BEEN DONE submit the df to the model for predictions # results =", "to numpy array self.mean = features.groupby(target).apply(np.mean).to_numpy() self.var = features.groupby(target).apply(np.var).to_numpy() return self.mean, self.var def", "adjustments to the dataframe so my train set is bigger # output_df =", "/ 255), 'hue': (hue.mean() / 255), 'sat': (sat.mean() / 255), 'val': (val.mean() /", "area_th = 400 seg_img = self.bg_segmentation(input_image, show_img=False) # # make the mask a", "= features.shape[0] # calculate statistics for all those features self.calc_statistics(features, target) # prior", "= cv.bitwise_and(img_subset, img_subset, mask=mask).astype(np.uint8) # calculate where the object is pts = np.where(cimg_subset", "math import pi from math import isnan import pandas as pd from sklearn.model_selection", "this purpose class NaiveBayes: # P(c|x) = P(x|c) * P(c) / P(x) #", "get rid of tiny objects that are probably noisef if area > area_th:", "\"trackbars\", 0, 179, empty) cv.createTrackbar('hue_max', 'trackbars', 179, 179, empty) cv.createTrackbar('sat_min', 'trackbars', 0, 255,", "processing class that takes in a single image and then spits out a", "pandas df\") self.black_lower = (0, 0, 0) self.black_upper = (179, 255, 30) self.hsv_lower", "self.prior = self.calc_prior(features, target) def get_predictions(self, input_vector): predictions = [] for i in", "#call the calc_posterior function on the observation pred_class = self.calc_posterior(observation) return pred_class def", "using opencv def imshow(self, image, imdiv = 4): imdiv = int(imdiv) w, h", "def fit(self, features, target): # define class variables self.classes = np.unique(target) self.count =", "opencv def imshow(self, image, imdiv = 4): imdiv = int(imdiv) w, h =", "see how it works # https://www.mathworks.com/help/matlab/matlab_external/matlab-arrays-as-python-variables.html # https://www.mathworks.com/help/matlab/matlab_external/passing-data-to-python.html # this exists only for", "= (0, 0, 70) # self.hsv_upper = (179, 34, 255) # NOT mask", "df = df.append({'color' : 0, 'x': x, 'y': y, 'object_num': object_num, 'r': r.mean(),", "randint, uniform import numpy as np from matplotlib import pyplot as plt import", "= np.array([h_min, s_min, v_min]) hsv_upper = np.array([h_max, s_max, v_max]) black_lower = np.array([0, 0,", "= len(self.classes) self.feature_nums = features.shape[1] self.rows = features.shape[0] # calculate statistics for all", "probability of picking one of a class at random from the dataset self.prior", "# self.black_lower = (0, 0, 0) # self.black_upper = (179, 255, 30) #", "= (179, 255, 90) # self.black_lower = (0, 0, 203) # self.black_upper =", "features.groupby(target).apply(np.var).to_numpy() return self.mean, self.var def calc_prior(self, features, target): # this is the probability", "0, 70) # self.hsv_upper = (179, 34, 255) # NOT mask for lego_imgs[14]", "the prob of c occuring indpendently. # P(x) is the predictor prior probability,", "of the input_image, and calculate things within it img_subset = input_image[y:y+h, x:x+w, :]", "processor activated! use 'process_image_to_df()' to get back a pandas df\") self.black_lower = (0,", "import morphology from skimage import exposure import os from math import pi from", "is a \" + str(type(a)) + \"and I'm gonna have a hard time", "= morphology.erosion(hsv_mask, morphology.disk(3)) # # median filter to despeckle # hsv_mask = ndimage.median_filter(hsv_mask,", "fill the holes hsv_mask = ndimage.binary_fill_holes(hsv_mask).astype(np.uint8) # erode the mask hsv_mask = morphology.erosion(hsv_mask,", "has acquired the image with shape = {kinect_image.shape}\") return kinect_image # function to", "k = cv.waitKey(1000) & 0xFF # large wait time if k == 113", "posterior probability # P(x|c) is the likelihood # P(c) is the class prior", "for each class look at the prior probability for the class prior =", "image processing class that takes in a single image and then spits out", "for cnt in contours: # draw contours on the output image for our", "= cv.convexHull(cnt) hull_area = cv.contourArea(hull) solidity = float(area)/hull_area eq_diameter = np.sqrt(4*area/np.pi) M= cv.moments(cnt)", "# calculate statistics for all those features self.calc_statistics(features, target) # prior is the", "= prior + conditional # print(f\"i = {i}, prior = {prior}, conditional =", "contour x, y, w, h = cv.boundingRect(cnt) # contour features area = cv.contourArea(cnt)", "203) # self.black_upper = (43, 255, 255) # self.hsv_lower = (0, 0, 70)", "return result def bg_segmentation(self, image, mode=\"hsv\", show_img=False): # create an hsv mask for", "2] df = [{'r': (r.mean() / 255), 'g': (g.mean() / 255), 'b': (b.mean()", "val.mean() }, ignore_index=True) # last thing we do on this loop is increment", "dataframe that could be used for prediction # replaces ImageSegmenter class ImageProcess(): def", "ax.matshow(cm) # plt.title('confusion matrix of the classifier') # fig.colorbar(cax) # plt.xlabel('Predicted') # plt.ylabel('True')", "the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(5)) # TODO: remove this it is for", "= int(image.shape[1]/2), int(image.shape[0]/2) cv.namedWindow('masked_image', cv.WINDOW_NORMAL) cv.resizeWindow('masked_image', h, w) cv.namedWindow(\"trackbars\") cv.resizeWindow(\"trackbars\", 800, 300) #", "pts[1], 0] sat = img_subset_hsv[pts[0], pts[1], 1] val = img_subset_hsv[pts[0], pts[1], 2] r", "prior + conditional # print(f\"i = {i}, prior = {prior}, conditional = {conditional},", "\"obj\" + str(object_num) + \"_pred\" + str(pred[0]) print(object_label) # add the object labels", "'trackbars', 179, 179, empty) cv.createTrackbar('sat_min', 'trackbars', 0, 255, empty) cv.createTrackbar('sat_max', 'trackbars', 255, 255,", "{conditional}, posterior = {posterior}\") posteriors.append(posterior) return self.classes[np.argmax(posteriors)] def gaussian_density(self, class_idx, x): # calc", "the dataframe so my train set is bigger # output_df = input_df.copy() #", "[cnt], 0, color=(255, 255, 255), thickness=5) img_subset = cv.bitwise_and(img_subset, img_subset, mask=mask).astype(np.uint8) # calculate", "hsv for extracting those values img_subset_hsv = cv.cvtColor(img_subset, cv.COLOR_BGR2HSV) # FILTER OUT THE", "# print(r.mean(), g.mean(), b.mean(), gli.mean()) df = df.append({'color' : 0, 'x': x, 'y':", "179, 179, empty) cv.createTrackbar('sat_min', 'trackbars', 0, 255, empty) cv.createTrackbar('sat_max', 'trackbars', 255, 255, empty)", "prediction # replaces ImageSegmenter class ImageProcess(): def __init__(self): print(\"image processor activated! use 'process_image_to_df()'", "as plt import cv2 as cv from scipy import ndimage from skimage import", "class look at the prior probability for the class prior = self.prior[i] #", ": 0, 'x': x, 'y': y, 'object_num': object_num, 'r': r.mean(), 'g': g.mean(), 'b':", "os.path import isfile, join from random import randint, uniform import numpy as np", "fullosity = area / rect_area aspect_ratio = float(w)/h extent = float(area/ rect_area) hull", "(2 * var))) denominator = np.sqrt(2 * np.pi * var) return numerator /", "x is feature vector for one observation # make a list that we", "else like a neural network cv.imwrite(f\"images/train/XX_{object_num}_{randint(10000,99999)}.png\", img_subset) # add the object labels to", "(0,0,0) # hsv_upper = (179,234,77) def dummy_method(self, a): if type(a) is np.ndarray: result", "skimage import exposure import os from math import pi from math import isnan", "= (179, 34, 255) # NOT mask for lego_imgs[14] # hsv_lower = (0,0,0)", "just the area around the contour of interest cimg_subset = cimg_justthiscontour[y:y+h, x:x+w, :]", "pi from math import isnan import pandas as pd from sklearn.model_selection import train_test_split", "P(c) / P(x) # P(x|x) is the posterior probability # P(x|c) is the", "int(imdiv) w, h = int(image.shape[1]/imdiv), int(image.shape[0]/imdiv) cv.namedWindow(\"output\", cv.WINDOW_NORMAL) cv.resizeWindow(\"output\", (w, h)) cv.imshow(\"output\", image)", "one of a class at random from the dataset self.prior = (features.groupby(target).apply(lambda x:", "will load that image with opencv # this will be a BGR format,", "black_mask masked_image = cv.bitwise_and(img_hsv, img_hsv, mask=mask) cv.imshow('masked_image', masked_image) k = cv.waitKey(1000) & 0xFF", "True: # get image img_hsv = cv.cvtColor(image, cv.COLOR_BGR2HSV) # get trackbar positions h_min", "is the likelihood # P(c) is the class prior probability, or the prob", "# plt.xlabel('Predicted') # plt.ylabel('True') # plt.show() # print(labels) # take the row #", "function a filename, and it will load that image with opencv # this", "numpy array # x is feature vector for one observation # make a", "each class look at the prior probability for the class prior = self.prior[i]", "5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(0,255,0), thickness=3, lineType=cv.LINE_AA) # last thing we do on", "0, color=(255, 255, 255), thickness=5) # CALCULATE ALL THE CONTOUR SHAPE FEATURES #", "class_dict = {0:\"medium_blue\", 1:\"black\", 2:\"darK_stone_gray\", 3:\"bright_green\", 4:\"light_green\", 5:\"bright_orange\", 6:\"bright_red\", 7:\"bright_blue\", 8:\"white\", 9:\"bright_yellow\"} color_text", "bunch of fake adjustments to the dataframe so my train set is bigger", "'r'] + uniform(-.1, .1) # fake_df.loc[i, 'g'] = fake_df.loc[i, 'g'] + uniform(-.1, .1)", "the conditional probability for the conditional = np.sum(self.gaussian_density(i, x)) posterior = prior +", "self.black_lower = (0, 0, 0) # self.black_upper = (179, 255, 30) # self.hsv_lower", "object_num = 0 for cnt in contours: # draw contours on the output", "takes in a single image and then spits out a dataframe that could", "255, 255), thickness=-1) # now take the subset of just the area around", "= img_subset_hsv[pts[0], pts[1], 1] val = img_subset_hsv[pts[0], pts[1], 2] r = img_subset[pts[0], pts[1],", "return for this image df = pd.DataFrame(columns=['color']) # # reset the object num", "self.hsv_upper).astype(np.uint8) # use this as a NOT mask hsv_mask = np.where(hsv_mask > 1,", "\"trackbars\") h_max = cv.getTrackbarPos('hue_max', 'trackbars') s_min = cv.getTrackbarPos('sat_min', 'trackbars') s_max = cv.getTrackbarPos('sat_max', 'trackbars')", "b.mean(), gli.mean()) df = df.append({'color' : 0, 'x': x, 'y': y, 'object_num': object_num,", "img_subset[pts[0], pts[1], 0] g = img_subset[pts[0], pts[1], 1] b = img_subset[pts[0], pts[1], 2]", "0, 0) self.black_upper = (179, 255, 30) self.hsv_lower = (0, 0, 0) self.hsv_upper", "for i, row in fake_df.iterrows(): # fake_df.loc[i, 'r'] = fake_df.loc[i, 'r'] + uniform(-.1,", "a particular class based on its proportion in the dataset self.prior = self.calc_prior(features,", "part sucked and I had a typo that cost me hours numerator =", "== 27: break cv.destroyAllWindows() print(f'hsv_lower is {hsv_lower}, hsv_upper = {hsv_upper}') def label_dataframe(self, image_df,", "THE WEIRD ONES # get rid of tiny objects that are probably noisef", "original image output_image = input_image.copy() # find the contours of the detected objects", "val = img_subset_hsv[pts[0], pts[1], 2] r = img_subset[pts[0], pts[1], 0] g = img_subset[pts[0],", "the image with shape = {kinect_image.shape}\") return kinect_image # function to display images", "confusion_matrix, precision_score, recall_score from skimage.filters import sobel # set random seed np.random.seed(26) #", "or k == 27: break cv.destroyAllWindows() print(f'hsv_lower is {hsv_lower}, hsv_upper = {hsv_upper}') def", "len(self.classes) self.feature_nums = features.shape[1] self.rows = features.shape[0] # calculate statistics for all those", "img_subset[pts[0], pts[1], 1] b = img_subset[pts[0], pts[1], 2] df = [{'r': (r.mean() /", "last thing we do on this loop is increment the object_num object_num +=", "1:\"black\", 2:\"darK_stone_gray\", 3:\"bright_green\", 4:\"light_green\", 5:\"bright_orange\", 6:\"bright_red\", 7:\"bright_blue\", 8:\"white\", 9:\"bright_yellow\"} color_text = class_dict[pred[0]] object_label", "works # https://www.mathworks.com/help/matlab/matlab_external/matlab-arrays-as-python-variables.html # https://www.mathworks.com/help/matlab/matlab_external/passing-data-to-python.html # this exists only for my testing purposes", "class MatlabSurrogate(): def __init__(self): self.state_of_mind = \"Badass.\" def acquire_kinect_image(self, filename): # give this", "purposes to show the segmentation if (show_img == True): m = MatlabSurrogate() m.imshow(cv.bitwise_and(image,", "the likelihood # P(c) is the class prior probability, or the prob of", "be used for prediction # replaces ImageSegmenter class ImageProcess(): def __init__(self): print(\"image processor", "loop is increment the object_num object_num += 1 # AFTER ALL CONTOURS HAVE", "(179,234,77) def dummy_method(self, a): if type(a) is np.ndarray: result = \"object is a", "img_subset = cv.bitwise_and(img_subset, img_subset, mask=mask).astype(np.uint8) # calculate where the object is pts =", "def __init__(self): print(\"image processor activated! use 'process_image_to_df()' to get back a pandas df\")", "format, because that is how opencv rolls kinect_image = cv.imread(filename) print(f\"kinect has acquired", "hsv_lower = (0,0,0) # hsv_upper = (179,234,77) def dummy_method(self, a): if type(a) is", "row # end result should be a pandas dataframe and the contour image", "pyplot as plt import cv2 as cv from scipy import ndimage from skimage", "is a numpy.ndarray, this is perfect. Is the image RGB order or BGR?\"", "# create mask hsv_lower = np.array([h_min, s_min, v_min]) hsv_upper = np.array([h_max, s_max, v_max])", "np.ndarray: result = \"object is a numpy.ndarray, this is perfect. Is the image", "30) # self.hsv_lower = (0, 0, 100) # self.hsv_upper = (179, 255, 255)", "listdir from os.path import isfile, join from random import randint, uniform import numpy", "image): blur = cv.GaussianBlur(image,(5,5),0) ret3,th3 = cv.threshold(blur,0,255,cv.THRESH_BINARY+cv.THRESH_OTSU) return ret3, th3 def process_image_make_predictions(self, input_image,", "> 1, 0, 1).astype(np.uint8) hsv_mask = ndimage.gaussian_filter(hsv_mask, sigma=1) # erode the mask hsv_mask", "r.mean(), 'g': g.mean(), 'b': b.mean(), 'hue': hue.mean(), 'sat': sat.mean(), 'val': val.mean() }, ignore_index=True)", "def process_image_to_df(self, input_image, area_th): seg_img = self.bg_segmentation(input_image, show_img=False) # # make the mask", "'g': g.mean(), 'b': b.mean(), 'hue': hue.mean(), 'sat': sat.mean(), 'val': val.mean() }, ignore_index=True) #", "rid of tiny objects that are probably noisef if area > area_th: #", "empty) while True: # get image img_hsv = cv.cvtColor(image, cv.COLOR_BGR2HSV) # get trackbar", "random from the dataset self.prior = (features.groupby(target).apply(lambda x: len(x)/self.rows).to_numpy()) return self.prior def calc_posterior(self,", "# plt.ylabel('True') # plt.show() # print(labels) # take the row # end result", "self.mean, self.var def calc_prior(self, features, target): # this is the probability of picking", "that could be used for prediction # replaces ImageSegmenter class ImageProcess(): def __init__(self):", "'trackbars') # self.black_lower = (0, 0, 0) # self.black_upper = (179, 255, 30)", "mean, variance for each column and convert to numpy array self.mean = features.groupby(target).apply(np.mean).to_numpy()", "subset of just the area around the contour of interest cimg_subset = cimg_justthiscontour[y:y+h,", "uniform import numpy as np from matplotlib import pyplot as plt import cv2", "binary mask cimg_mask = cv.cvtColor(cimg_subset, cv.COLOR_BGR2GRAY) ret2, mask = cv.threshold(cimg_mask,0,255,cv.THRESH_BINARY) # draw contours", "h_max = cv.getTrackbarPos('hue_max', 'trackbars') s_min = cv.getTrackbarPos('sat_min', 'trackbars') s_max = cv.getTrackbarPos('sat_max', 'trackbars') v_min", "np.sqrt(2 * np.pi * var) return numerator / denominator def pdf(self, x, mean,", "34, 255) # NOT mask for lego_imgs[14] # hsv_lower = (0,0,0) # hsv_upper", "cv.destroyAllWindows() # I should probably have one image processing class that takes in", "aspect_ratio = float(w)/h extent = float(area/ rect_area) hull = cv.convexHull(cnt) hull_area = cv.contourArea(hull)", "+= 1 # AFTER ALL CONTOURS HAVE BEEN DONE submit the df to", "precision_score(y_test, y_test_predictions, average=\"micro\") # rec = recall_score(y_test, y_test_predictions, average=\"micro\") # print(f\"precision is {prec},", "# calculate where the object is pts = np.where(cimg_subset == 255) hue =", "of drawing a particular class based on its proportion in the dataset self.prior", "canvas to put the contour onto, JUST THIS ONE not the others #", "accuracy = {acc}\") # # confusion matrix # labels = [(i, c) for", "class prior probability, or the prob of c occuring indpendently. # P(x) is", "text= str(object_num), org=(cx - 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=3, color=(255,255,255), thickness=5, lineType=cv.LINE_AA)", "thing we do on this loop is increment the object_num object_num += 1", "\" + str(type(a)) + \"and I'm gonna have a hard time with that\"", "# # use the test set to see how we do # y_test_predictions", "class NaiveBayes: # P(c|x) = P(x|c) * P(c) / P(x) # P(x|x) is", "my testing purposes class MatlabSurrogate(): def __init__(self): self.state_of_mind = \"Badass.\" def acquire_kinect_image(self, filename):", "img_subset_hsv[pts[0], pts[1], 2] r = img_subset[pts[0], pts[1], 0] g = img_subset[pts[0], pts[1], 1]", "evidence # x is a numpy array # x is feature vector for", "image df = pd.DataFrame(columns=['color']) # # reset the object num object_num = 0", "# x is feature vector for one observation # make a list that", "int(image.shape[0]/imdiv) cv.namedWindow(\"output\", cv.WINDOW_NORMAL) cv.resizeWindow(\"output\", (w, h)) cv.imshow(\"output\", image) cv.waitKey(0) cv.destroyAllWindows() # I should", "0, 255, empty) cv.createTrackbar('val_max', 'trackbars', 255, 255, empty) while True: # get image", "sucked and I had a typo that cost me hours numerator = np.exp(-((x-mean)**2", "of c occuring indpendently. # P(x) is the predictor prior probability, or the", "hsv mask for red colors hsv_mask = cv.inRange(cv.cvtColor(image, cv.COLOR_BGR2HSV), self.hsv_lower, self.hsv_upper).astype(np.uint8) # use", "cv.getTrackbarPos('val_min', 'trackbars') v_max = cv.getTrackbarPos('val_max', 'trackbars') # self.black_lower = (0, 0, 0) #", "thresholded image mask = cv.cvtColor(seg_img, cv.COLOR_BGR2GRAY) mask = cv.GaussianBlur(mask,(5,5),0) ret3, mask = cv.threshold(mask,0,255,cv.THRESH_BINARY)", "- 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(0,255,0), thickness=3, lineType=cv.LINE_AA) # last thing", "255), 'sat': (sat.mean() / 255), 'val': (val.mean() / 255)}] df = pd.DataFrame.from_dict(df) pred", "= input_df.copy() # for i, row in fake_df.iterrows(): # fake_df.loc[i, 'r'] = fake_df.loc[i,", "np.where(cimg_subset == 255) hue = img_subset_hsv[pts[0], pts[1], 0] sat = img_subset_hsv[pts[0], pts[1], 1]", "a pandas dataframe and the contour image with numbers return df.sort_values(by='object_num', axis=0, ascending=True),", "hsv_upper = {hsv_upper}') def label_dataframe(self, image_df, class_list): for i, row in image_df.iterrows(): image_df.loc[i,", "= (0, 0, 0) # self.black_upper = (179, 255, 30) # self.hsv_lower =", "likelihood # P(c) is the class prior probability, or the prob of c", "identification cv.putText(output_image, text= str(object_label), org=(cx - 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(0,255,0),", "[cnt], 0, color=(255, 255, 255), thickness=-1) # now take the subset of just", "mask hsv_mask = np.where(hsv_mask > 1, 0, 1).astype(np.uint8) hsv_mask = ndimage.gaussian_filter(hsv_mask, sigma=1) #", "= {posterior}\") posteriors.append(posterior) return self.classes[np.argmax(posteriors)] def gaussian_density(self, class_idx, x): # calc probability from", "input_df.copy() # for rep in range(0, reps): # fake_df = input_df.copy() # for", "average=\"micro\") # rec = recall_score(y_test, y_test_predictions, average=\"micro\") # print(f\"precision is {prec}, recall is", "ret3,th3 = cv.threshold(blur,0,255,cv.THRESH_BINARY+cv.THRESH_OTSU) return ret3, th3 def process_image_make_predictions(self, input_image, model): predictive_model = model", "color_mask = cv.inRange(img_hsv, hsv_lower, hsv_upper) black_mask = cv.inRange(img_hsv, black_lower, black_upper) mask = color_mask", "27: break cv.destroyAllWindows() print(f'hsv_lower is {hsv_lower}, hsv_upper = {hsv_upper}') def label_dataframe(self, image_df, class_list):", "image contours, hier = cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE) # create the df that we'll", "cv.inRange(img_hsv, hsv_lower, hsv_upper) black_mask = cv.inRange(img_hsv, black_lower, black_upper) mask = color_mask + black_mask", "classifier') # fig.colorbar(cax) # plt.xlabel('Predicted') # plt.ylabel('True') # plt.show() # print(labels) # take", "# creates a bunch of fake adjustments to the dataframe so my train", "'trackbars') s_min = cv.getTrackbarPos('sat_min', 'trackbars') s_max = cv.getTrackbarPos('sat_max', 'trackbars') v_min = cv.getTrackbarPos('val_min', 'trackbars')", "self.calc_prior(features, target) def get_predictions(self, input_vector): predictions = [] for i in range(len(input_vector)): result", "the contour image with numbers return df.sort_values(by='object_num', axis=0, ascending=True), output_image def hsv_slide_tool(self, image):", "at the prior probability for the class prior = self.prior[i] # calculate the", "np.unique(target) self.count = len(self.classes) self.feature_nums = features.shape[1] self.rows = features.shape[0] # calculate statistics", "pts[1], 0] g = img_subset[pts[0], pts[1], 1] b = img_subset[pts[0], pts[1], 2] df", "reps): # fake_df = input_df.copy() # for i, row in fake_df.iterrows(): # fake_df.loc[i,", "will add each classes posterior prob to posteriors = [] # iterate through", "# # scores # acc = nb.get_accuracy(y_test, y_test_predictions) # prec = precision_score(y_test, y_test_predictions,", "255)}] df = pd.DataFrame.from_dict(df) pred = predictive_model.get_predictions(df) class_dict = {0:\"medium_blue\", 1:\"black\", 2:\"darK_stone_gray\", 3:\"bright_green\",", "import exposure import os from math import pi from math import isnan import", "w, h of the bounding rect for the contour x, y, w, h", "add the object labels to the cimg for identification cv.putText(output_image, text= str(object_num), org=(cx", "range(len(input_vector)): result = self.calc_posterior((input_vector.iloc[i,:])) predictions.append(result) return predictions def predict(self, observation): #call the calc_posterior", "img_subset) # add the object labels to the cimg for identification cv.putText(output_image, text=", "self.calc_posterior((input_vector.iloc[i,:])) predictions.append(result) return predictions def predict(self, observation): #call the calc_posterior function on the", "within it img_subset = input_image[y:y+h, x:x+w, :] # convert to hsv for extracting", "perfect. Is the image RGB order or BGR?\" return result else: result =", "# take the row # end result should be a pandas dataframe and", "'x': x, 'y': y, 'object_num': object_num, 'r': r.mean(), 'g': g.mean(), 'b': b.mean(), 'hue':", "the model for predictions # results = predictive_model.blind_predictions() # result = loaded_model.get_predictions(X_test, Y_test)", "end result should be a pandas dataframe and the contour image with numbers", "on the output image for our personal enjoyment cv.drawContours(output_image, [cnt], 0, color=(255, 255,", "I had a typo that cost me hours numerator = np.exp(-((x-mean)**2 / (2", "np.sqrt(4*area/np.pi) M= cv.moments(cnt) cx= int(M['m10']/M['m00']) cy= int(M['m01']/M['m00']) # take this rectangle as a", "print(f\"kinect has acquired the image with shape = {kinect_image.shape}\") return kinect_image # function", "input_image, model): predictive_model = model area_th = 400 seg_img = self.bg_segmentation(input_image, show_img=False) #", "indpendently. # P(x) is the predictor prior probability, or the prob of x", "thickness=3, lineType=cv.LINE_AA) # last thing we do on this loop is increment the", "cv.GaussianBlur(image,(5,5),0) ret3,th3 = cv.threshold(blur,0,255,cv.THRESH_BINARY+cv.THRESH_OTSU) return ret3, th3 def process_image_make_predictions(self, input_image, model): predictive_model =", "= {kinect_image.shape}\") return kinect_image # function to display images resized, using opencv def", "# end result should be a pandas dataframe and the contour image with", "# results = predictive_model.blind_predictions() # result = loaded_model.get_predictions(X_test, Y_test) # print(result) # #", "0, 0) # self.black_upper = (179, 255, 30) # self.hsv_lower = (0, 0,", "import preprocessing import time from os import listdir from os.path import isfile, join", "= cv.getTrackbarPos('val_min', 'trackbars') v_max = cv.getTrackbarPos('val_max', 'trackbars') # self.black_lower = (0, 0, 0)", "g = img_subset[pts[0], pts[1], 1] b = img_subset[pts[0], pts[1], 2] # and export", "for identification cv.putText(output_image, text= str(object_num), org=(cx - 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=3,", "results = predictive_model.blind_predictions() # result = loaded_model.get_predictions(X_test, Y_test) # print(result) # # use", "RGB order or BGR?\" return result else: result = \"object is a \"", "observation # make a list that we will add each classes posterior prob", "179, empty) cv.createTrackbar('sat_min', 'trackbars', 0, 255, empty) cv.createTrackbar('sat_max', 'trackbars', 255, 255, empty) cv.createTrackbar('val_min',", "personal enjoyment cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5) # CALCULATE ALL THE", "rectangle as a subset of the input_image, and calculate things within it img_subset", "each classes posterior prob to posteriors = [] # iterate through the classes", "cv.threshold(blur,0,255,cv.THRESH_BINARY+cv.THRESH_OTSU) return ret3, th3 def process_image_make_predictions(self, input_image, model): predictive_model = model area_th =", "colors hsv_mask = cv.inRange(cv.cvtColor(image, cv.COLOR_BGR2HSV), self.hsv_lower, self.hsv_upper).astype(np.uint8) # use this as a NOT", "pandas dataframe and the contour image with numbers return df.sort_values(by='object_num', axis=0, ascending=True), output_image", "import pi from math import isnan import pandas as pd from sklearn.model_selection import", "with that\" return result def bg_segmentation(self, image, mode=\"hsv\", show_img=False): # create an hsv", "# find the contours of the detected objects in the image contours, hier", "/ 255), 'b': (b.mean() / 255), 'hue': (hue.mean() / 255), 'sat': (sat.mean() /", "# TODO: remove this it is for testing purposes to show the segmentation", "from skimage.filters import sobel # set random seed np.random.seed(26) # the NaiveBayes classifier", "dilation hsv_mask = morphology.binary_dilation(hsv_mask, np.ones((20, 20))).astype(np.uint8) # fill the holes hsv_mask = ndimage.binary_fill_holes(hsv_mask).astype(np.uint8)", "like a neural network cv.imwrite(f\"images/train/XX_{object_num}_{randint(10000,99999)}.png\", img_subset) # add the object labels to the", "# output_df = pd.concat(output_df, fake_df) # return output_df def otsu_threshold(self, image): blur =", "is {prec}, recall is {rec}, accuracy = {acc}\") # # confusion matrix #", "a blank canvas to put the contour onto, JUST THIS ONE not the", "all those features self.calc_statistics(features, target) # prior is the random chance of drawing", "= np.where(cimg_subset == 255) hue = img_subset_hsv[pts[0], pts[1], 0] sat = img_subset_hsv[pts[0], pts[1],", "masked_image) k = cv.waitKey(1000) & 0xFF # large wait time if k ==", "result = \"object is a \" + str(type(a)) + \"and I'm gonna have", "we do # y_test_predictions = nb.get_predictions(X_test) # # scores # acc = nb.get_accuracy(y_test,", "# CALCULATE ALL THE CONTOUR SHAPE FEATURES # get the x, y, w,", "input_image, area_th): seg_img = self.bg_segmentation(input_image, show_img=False) # # make the mask a binary", "AFTER ALL CONTOURS HAVE BEEN DONE submit the df to the model for", "v_max]) black_lower = np.array([0, 0, 0]) black_upper = np.array([179, 255, 30]) color_mask =", "# https://www.mathworks.com/help/matlab/matlab_external/passing-data-to-python.html # this exists only for my testing purposes class MatlabSurrogate(): def", "fake_df) # return output_df def otsu_threshold(self, image): blur = cv.GaussianBlur(image,(5,5),0) ret3,th3 = cv.threshold(blur,0,255,cv.THRESH_BINARY+cv.THRESH_OTSU)", "WEIRD ONES # get rid of tiny objects that are probably noisef if", "/ 255), 'sat': (sat.mean() / 255), 'val': (val.mean() / 255)}] df = pd.DataFrame.from_dict(df)", "contours of the detected objects in the image contours, hier = cv.findContours(mask, cv.RETR_TREE,", "# I should probably have one image processing class that takes in a", "morphology.erosion(hsv_mask, morphology.disk(5)) # TODO: remove this it is for testing purposes to show", "calc probability from gaussian denssityy fucntion (normal dist) mean = self.mean[class_idx] var =", "from scipy import ndimage from skimage import morphology from skimage import exposure import", "# P(x|x) is the posterior probability # P(x|c) is the likelihood # P(c)", "original sized cv.drawContours(cimg_justthiscontour, [cnt], 0, color=(255, 255, 255), thickness=-1) # now take the", "in the dataset self.prior = self.calc_prior(features, target) def get_predictions(self, input_vector): predictions = []", "cv.cvtColor(img_subset, cv.COLOR_BGR2HSV) # FILTER OUT THE WEIRD ONES # get rid of tiny", "image_df # def fake_df(self, input_df, reps = 3): # # creates a bunch", "= int(image.shape[1]/imdiv), int(image.shape[0]/imdiv) cv.namedWindow(\"output\", cv.WINDOW_NORMAL) cv.resizeWindow(\"output\", (w, h)) cv.imshow(\"output\", image) cv.waitKey(0) cv.destroyAllWindows() #", "testing purposes class MatlabSurrogate(): def __init__(self): self.state_of_mind = \"Badass.\" def acquire_kinect_image(self, filename): #", "x): # calc probability from gaussian denssityy fucntion (normal dist) mean = self.mean[class_idx]", ".1) # fake_df.loc[i, 'g'] = fake_df.loc[i, 'g'] + uniform(-.1, .1) # fake_df.loc[i, 'b']", "I'm gonna have a hard time with that\" return result def bg_segmentation(self, image,", "could be used for prediction # replaces ImageSegmenter class ImageProcess(): def __init__(self): print(\"image", "np.exp(-((x-mean)**2 / (2 * var))) denominator = np.sqrt(2 * np.pi * var) return", "create mask hsv_lower = np.array([h_min, s_min, v_min]) hsv_upper = np.array([h_max, s_max, v_max]) black_lower", "= cv.bitwise_and(img_hsv, img_hsv, mask=mask) cv.imshow('masked_image', masked_image) k = cv.waitKey(1000) & 0xFF # large", "g = img_subset[pts[0], pts[1], 1] b = img_subset[pts[0], pts[1], 2] df = [{'r':", "* np.pi * var) return numerator / denominator def pdf(self, x, mean, stdev):", "df that we'll return for this image df = pd.DataFrame(columns=['color']) # # reset", "features.groupby(target).apply(np.mean).to_numpy() self.var = features.groupby(target).apply(np.var).to_numpy() return self.mean, self.var def calc_prior(self, features, target): # this", "others # this is a mask cimg_justthiscontour = np.zeros_like(input_image) # draw the contours", "drawing a particular class based on its proportion in the dataset self.prior =", "# cax = ax.matshow(cm) # plt.title('confusion matrix of the classifier') # fig.colorbar(cax) #", "classes for i in range(0, self.count): # for each class look at the", "a \" + str(type(a)) + \"and I'm gonna have a hard time with", "probability # P(x|c) is the likelihood # P(c) is the class prior probability,", "draw the contours on the blank canvas which is original sized cv.drawContours(cimg_justthiscontour, [cnt],", "model for predictions # results = predictive_model.blind_predictions() # result = loaded_model.get_predictions(X_test, Y_test) #", "cv.namedWindow(\"trackbars\") cv.resizeWindow(\"trackbars\", 800, 300) # color mask trackbars cv.createTrackbar(\"hue_min\", \"trackbars\", 0, 179, empty)", "the class prior = self.prior[i] # calculate the conditional probability for the conditional", "cv from scipy import ndimage from skimage import morphology from skimage import exposure", "JUST THIS ONE not the others # this is a mask cimg_justthiscontour =", "/ denominator def pdf(self, x, mean, stdev): # calculate probability density function exponent", "cimg for identification cv.putText(output_image, text= str(object_label), org=(cx - 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX,", "TODO: read these and see how it works # https://www.mathworks.com/help/matlab/matlab_external/matlab-arrays-as-python-variables.html # https://www.mathworks.com/help/matlab/matlab_external/passing-data-to-python.html #", "to posteriors = [] # iterate through the classes for i in range(0,", "label_dataframe(self, image_df, class_list): for i, row in image_df.iterrows(): image_df.loc[i, 'color'] = class_list[i] print(type(image_df))", "# calc probability from gaussian denssityy fucntion (normal dist) mean = self.mean[class_idx] var", "a typo that cost me hours numerator = np.exp(-((x-mean)**2 / (2 * var)))", "classes posterior prob to posteriors = [] # iterate through the classes for", "on this loop is increment the object_num object_num += 1 # # end", "800, 300) # color mask trackbars cv.createTrackbar(\"hue_min\", \"trackbars\", 0, 179, empty) cv.createTrackbar('hue_max', 'trackbars',", "# now take the subset of just the area around the contour of", "255, 255, empty) cv.createTrackbar('val_min', 'trackbars', 0, 255, empty) cv.createTrackbar('val_max', 'trackbars', 255, 255, empty)", "color=(255, 255, 255), thickness=-1) # now take the subset of just the area", "that cost me hours numerator = np.exp(-((x-mean)**2 / (2 * var))) denominator =", "ONE not the others # this is a mask cimg_justthiscontour = np.zeros_like(input_image) #", "'r': r.mean(), 'g': g.mean(), 'b': b.mean(), 'hue': hue.mean(), 'sat': sat.mean(), 'val': val.mean() },", "see how we do # y_test_predictions = nb.get_predictions(X_test) # # scores # acc", "\"Badass.\" def acquire_kinect_image(self, filename): # give this function a filename, and it will", "take the subset of just the area around the contour of interest cimg_subset", "personal enjoyment cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5) img_subset = cv.bitwise_and(img_subset, img_subset,", "for one observation # make a list that we will add each classes", "out a dataframe that could be used for prediction # replaces ImageSegmenter class", "'g'] + uniform(-.1, .1) # fake_df.loc[i, 'b'] = fake_df.loc[i, 'b'] + uniform(-.1, .1)", "= 0 for cnt in contours: # draw contours on the output image", "cv2 as cv from scipy import ndimage from skimage import morphology from skimage", "cv.putText(output_image, text= str(object_num), org=(cx - 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=3, color=(255,255,255), thickness=5,", "img_hsv = cv.cvtColor(image, cv.COLOR_BGR2HSV) # get trackbar positions h_min = cv.getTrackbarPos(\"hue_min\", \"trackbars\") h_max", "= {prior}, conditional = {conditional}, posterior = {posterior}\") posteriors.append(posterior) return self.classes[np.argmax(posteriors)] def gaussian_density(self,", "target) def get_predictions(self, input_vector): predictions = [] for i in range(len(input_vector)): result =", "features, target): # define class variables self.classes = np.unique(target) self.count = len(self.classes) self.feature_nums", "enjoyment cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5) # CALCULATE ALL THE CONTOUR", "1, 0, 1).astype(np.uint8) hsv_mask = ndimage.gaussian_filter(hsv_mask, sigma=1) # erode the mask hsv_mask =", "filter to despeckle # hsv_mask = ndimage.median_filter(hsv_mask, size=(3, 3)).astype(np.uint8) # binary dilation hsv_mask", "# # make the mask a binary thresholded image mask = cv.cvtColor(seg_img, cv.COLOR_BGR2GRAY)", "rolls kinect_image = cv.imread(filename) print(f\"kinect has acquired the image with shape = {kinect_image.shape}\")", "object_num += 1 # # end result should be a pandas dataframe and", "random chance of drawing a particular class based on its proportion in the", "# P(c|x) = P(x|c) * P(c) / P(x) # P(x|x) is the posterior", "# convert to hsv for extracting those values img_subset_hsv = cv.cvtColor(img_subset, cv.COLOR_BGR2HSV) #", "4): imdiv = int(imdiv) w, h = int(image.shape[1]/imdiv), int(image.shape[0]/imdiv) cv.namedWindow(\"output\", cv.WINDOW_NORMAL) cv.resizeWindow(\"output\", (w,", "the contour of interest cimg_subset = cimg_justthiscontour[y:y+h, x:x+w, :] # make a binary", "drawn on the original image output_image = input_image.copy() # find the contours of", "hsv_mask = np.where(hsv_mask > 1, 0, 1).astype(np.uint8) hsv_mask = ndimage.gaussian_filter(hsv_mask, sigma=1) # erode", "def imshow(self, image, imdiv = 4): imdiv = int(imdiv) w, h = int(image.shape[1]/imdiv),", "# ax = fig.add_subplot(111) # cax = ax.matshow(cm) # plt.title('confusion matrix of the", "if (show_img == True): m = MatlabSurrogate() m.imshow(cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8)) # apply the", "= cv.imread(filename) print(f\"kinect has acquired the image with shape = {kinect_image.shape}\") return kinect_image", "input_vector): predictions = [] for i in range(len(input_vector)): result = self.calc_posterior((input_vector.iloc[i,:])) predictions.append(result) return", "or the prob of x occuring independently def fit(self, features, target): # define", "0, 0) self.hsv_upper = (179, 255, 90) # self.black_lower = (0, 0, 203)", "is how opencv rolls kinect_image = cv.imread(filename) print(f\"kinect has acquired the image with", "shape = {kinect_image.shape}\") return kinect_image # function to display images resized, using opencv", "is the predictor prior probability, or the prob of x occuring independently def", "'b': b.mean(), 'hue': hue.mean(), 'sat': sat.mean(), 'val': val.mean() }, ignore_index=True) # last thing", "= loaded_model.get_predictions(X_test, Y_test) # print(result) # # use the test set to see", "self.classes[np.argmax(posteriors)] def gaussian_density(self, class_idx, x): # calc probability from gaussian denssityy fucntion (normal", "prob of x occuring independently def fit(self, features, target): # define class variables", "# fake_df.loc[i, 'g'] = fake_df.loc[i, 'g'] + uniform(-.1, .1) # fake_df.loc[i, 'b'] =", "the contour x, y, w, h = cv.boundingRect(cnt) # contour features area =", "= {acc}\") # # confusion matrix # labels = [(i, c) for i,", "purposes class MatlabSurrogate(): def __init__(self): self.state_of_mind = \"Badass.\" def acquire_kinect_image(self, filename): # give", "/ 255)}] df = pd.DataFrame.from_dict(df) pred = predictive_model.get_predictions(df) class_dict = {0:\"medium_blue\", 1:\"black\", 2:\"darK_stone_gray\",", "is the posterior probability # P(x|c) is the likelihood # P(c) is the", "fake_df.loc[i, 'b'] + uniform(-.1, .1) # output_df = pd.concat(output_df, fake_df) # return output_df", "'process_image_to_df()' to get back a pandas df\") self.black_lower = (0, 0, 0) self.black_upper", "calculate probability density function exponent = np.exp(-((x-mean)**2 / (2*stdev**2))) return exponent * (1/(np.sqrt(2*np.pi)*stdev))", "- 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(0,255,0), thickness=3, lineType=cv.LINE_AA) # last thing we do", "= input_df.copy() # for rep in range(0, reps): # fake_df = input_df.copy() #", "9:\"bright_yellow\"} color_text = class_dict[pred[0]] object_label = \"obj\" + str(object_num) + \"_pred\" + str(pred[0])", "iterate through the classes for i in range(0, self.count): # for each class", "exponent = np.exp(-((x-mean)**2 / (2*stdev**2))) return exponent * (1/(np.sqrt(2*np.pi)*stdev)) def get_accuracy(self, test, predictions):", "= self.prior[i] # calculate the conditional probability for the conditional = np.sum(self.gaussian_density(i, x))", "var) return numerator / denominator def pdf(self, x, mean, stdev): # calculate probability", "BGR?\" return result else: result = \"object is a \" + str(type(a)) +", "on this loop is increment the object_num object_num += 1 # AFTER ALL", "apply the mask and return the result return cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8) def process_image_to_df(self,", "range(0, reps): # fake_df = input_df.copy() # for i, row in fake_df.iterrows(): #", "input_image.copy() # find the contours of the detected objects in the image contours,", "how we do # y_test_predictions = nb.get_predictions(X_test) # # scores # acc =", "= 4): imdiv = int(imdiv) w, h = int(image.shape[1]/imdiv), int(image.shape[0]/imdiv) cv.namedWindow(\"output\", cv.WINDOW_NORMAL) cv.resizeWindow(\"output\",", "https://www.mathworks.com/help/matlab/matlab_external/matlab-arrays-as-python-variables.html # https://www.mathworks.com/help/matlab/matlab_external/passing-data-to-python.html # this exists only for my testing purposes class MatlabSurrogate():", "for our personal enjoyment cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5) img_subset =", "cost me hours numerator = np.exp(-((x-mean)**2 / (2 * var))) denominator = np.sqrt(2", "# TODO: read these and see how it works # https://www.mathworks.com/help/matlab/matlab_external/matlab-arrays-as-python-variables.html # https://www.mathworks.com/help/matlab/matlab_external/passing-data-to-python.html", "single image and then spits out a dataframe that could be used for", "show the segmentation if (show_img == True): m = MatlabSurrogate() m.imshow(cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8))", "255), 'g': (g.mean() / 255), 'b': (b.mean() / 255), 'hue': (hue.mean() / 255),", "mask hsv_lower = np.array([h_min, s_min, v_min]) hsv_upper = np.array([h_max, s_max, v_max]) black_lower =", "predictions def predict(self, observation): #call the calc_posterior function on the observation pred_class =", "hsv_slide_tool(self, image): def empty(a): pass h, w = int(image.shape[1]/2), int(image.shape[0]/2) cv.namedWindow('masked_image', cv.WINDOW_NORMAL) cv.resizeWindow('masked_image',", "= np.array([h_max, s_max, v_max]) black_lower = np.array([0, 0, 0]) black_upper = np.array([179, 255,", "= fake_df.loc[i, 'b'] + uniform(-.1, .1) # output_df = pd.concat(output_df, fake_df) # return", "have a hard time with that\" return result def bg_segmentation(self, image, mode=\"hsv\", show_img=False):", "remove this it is for testing purposes to show the segmentation if (show_img", "= (179,234,77) def dummy_method(self, a): if type(a) is np.ndarray: result = \"object is", "(0, 0, 0) self.black_upper = (179, 255, 30) self.hsv_lower = (0, 0, 0)", "for identification cv.putText(output_image, text= str(object_label), org=(cx - 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=1,", "/ rect_area aspect_ratio = float(w)/h extent = float(area/ rect_area) hull = cv.convexHull(cnt) hull_area", "= cv.getTrackbarPos('hue_max', 'trackbars') s_min = cv.getTrackbarPos('sat_min', 'trackbars') s_max = cv.getTrackbarPos('sat_max', 'trackbars') v_min =", "# AFTER ALL CONTOURS HAVE BEEN DONE submit the df to the model", "# plt.title('confusion matrix of the classifier') # fig.colorbar(cax) # plt.xlabel('Predicted') # plt.ylabel('True') #", "{kinect_image.shape}\") return kinect_image # function to display images resized, using opencv def imshow(self,", "= self.bg_segmentation(input_image, show_img=False) # # make the mask a binary thresholded image mask", "df.append({'color' : 0, 'x': x, 'y': y, 'object_num': object_num, 'r': r.mean(), 'g': g.mean(),", "/ P(x) # P(x|x) is the posterior probability # P(x|c) is the likelihood", "sklearn.model_selection import train_test_split from sklearn.metrics import confusion_matrix, precision_score, recall_score from skimage.filters import sobel", "hours numerator = np.exp(-((x-mean)**2 / (2 * var))) denominator = np.sqrt(2 * np.pi", "= cv.inRange(cv.cvtColor(image, cv.COLOR_BGR2HSV), self.hsv_lower, self.hsv_upper).astype(np.uint8) # use this as a NOT mask hsv_mask", "area around the contour of interest cimg_subset = cimg_justthiscontour[y:y+h, x:x+w, :] # make", "df = pd.DataFrame.from_dict(df) pred = predictive_model.get_predictions(df) class_dict = {0:\"medium_blue\", 1:\"black\", 2:\"darK_stone_gray\", 3:\"bright_green\", 4:\"light_green\",", "pd.DataFrame.from_dict(df) pred = predictive_model.get_predictions(df) class_dict = {0:\"medium_blue\", 1:\"black\", 2:\"darK_stone_gray\", 3:\"bright_green\", 4:\"light_green\", 5:\"bright_orange\", 6:\"bright_red\",", "the holes hsv_mask = ndimage.binary_fill_holes(hsv_mask).astype(np.uint8) # erode the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(5))", "g.mean(), b.mean(), gli.mean()) df = df.append({'color' : 0, 'x': x, 'y': y, 'object_num':", "[{'r': (r.mean() / 255), 'g': (g.mean() / 255), 'b': (b.mean() / 255), 'hue':", "column and convert to numpy array self.mean = features.groupby(target).apply(np.mean).to_numpy() self.var = features.groupby(target).apply(np.var).to_numpy() return", "to the model for predictions # results = predictive_model.blind_predictions() # result = loaded_model.get_predictions(X_test,", "def gaussian_density(self, class_idx, x): # calc probability from gaussian denssityy fucntion (normal dist)", "prior = {prior}, conditional = {conditional}, posterior = {posterior}\") posteriors.append(posterior) return self.classes[np.argmax(posteriors)] def", "+ str(object_num) + \"_pred\" + str(pred[0]) print(object_label) # add the object labels to", "b.mean(), 'hue': hue.mean(), 'sat': sat.mean(), 'val': val.mean() }, ignore_index=True) # last thing we", "from the dataset self.prior = (features.groupby(target).apply(lambda x: len(x)/self.rows).to_numpy()) return self.prior def calc_posterior(self, x):", "# return output_df def otsu_threshold(self, image): blur = cv.GaussianBlur(image,(5,5),0) ret3,th3 = cv.threshold(blur,0,255,cv.THRESH_BINARY+cv.THRESH_OTSU) return", "self.black_lower = (0, 0, 0) self.black_upper = (179, 255, 30) self.hsv_lower = (0,", "255) # NOT mask for lego_imgs[14] # hsv_lower = (0,0,0) # hsv_upper =", "input_image, and calculate things within it img_subset = input_image[y:y+h, x:x+w, :] # convert", "= cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE) # create the df that we'll return for this", "= 3): # # creates a bunch of fake adjustments to the dataframe", "from sklearn.metrics import confusion_matrix, precision_score, recall_score from skimage.filters import sobel # set random", "self.rows = features.shape[0] # calculate statistics for all those features self.calc_statistics(features, target) #", "our personal enjoyment cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5) img_subset = cv.bitwise_and(img_subset,", "self.feature_nums = features.shape[1] self.rows = features.shape[0] # calculate statistics for all those features", "int(M['m01']/M['m00']) # take this rectangle as a subset of the input_image, and calculate", "the input_image, and calculate things within it img_subset = input_image[y:y+h, x:x+w, :] #", "return image_df # def fake_df(self, input_df, reps = 3): # # creates a", "add the object labels to the cimg for identification cv.putText(output_image, text= str(object_label), org=(cx", "np.sum(self.gaussian_density(i, x)) posterior = prior + conditional # print(f\"i = {i}, prior =", "draw a blank canvas to put the contour onto, JUST THIS ONE not", "predictions[i]: correct += 1 return (correct / float(len(test))) # TODO: read these and", "loaded_model.get_predictions(X_test, Y_test) # print(result) # # use the test set to see how", "post evidence # x is a numpy array # x is feature vector", "black_upper) mask = color_mask + black_mask masked_image = cv.bitwise_and(img_hsv, img_hsv, mask=mask) cv.imshow('masked_image', masked_image)", "# fill the holes hsv_mask = ndimage.binary_fill_holes(hsv_mask).astype(np.uint8) # erode the mask hsv_mask =", "time from os import listdir from os.path import isfile, join from random import", "object_label = \"obj\" + str(object_num) + \"_pred\" + str(pred[0]) print(object_label) # add the", "of a class at random from the dataset self.prior = (features.groupby(target).apply(lambda x: len(x)/self.rows).to_numpy())", "0, 203) # self.black_upper = (43, 255, 255) # self.hsv_lower = (0, 0,", "fig.add_subplot(111) # cax = ax.matshow(cm) # plt.title('confusion matrix of the classifier') # fig.colorbar(cax)", "posteriors.append(posterior) return self.classes[np.argmax(posteriors)] def gaussian_density(self, class_idx, x): # calc probability from gaussian denssityy", "masked_image = cv.bitwise_and(img_hsv, img_hsv, mask=mask) cv.imshow('masked_image', masked_image) k = cv.waitKey(1000) & 0xFF #", "30) self.hsv_lower = (0, 0, 0) self.hsv_upper = (179, 255, 90) # self.black_lower", "order or BGR?\" return result else: result = \"object is a \" +", "np.where(hsv_mask > 1, 0, 1).astype(np.uint8) hsv_mask = ndimage.gaussian_filter(hsv_mask, sigma=1) # erode the mask", "1 # # end result should be a pandas dataframe and the contour", "predictions = [] for i in range(len(input_vector)): result = self.calc_posterior((input_vector.iloc[i,:])) predictions.append(result) return predictions", "then spits out a dataframe that could be used for prediction # replaces", "cv.bitwise_and(img_subset, img_subset, mask=mask).astype(np.uint8) # calculate where the object is pts = np.where(cimg_subset ==", "cv.WINDOW_NORMAL) cv.resizeWindow(\"output\", (w, h)) cv.imshow(\"output\", image) cv.waitKey(0) cv.destroyAllWindows() # I should probably have", "this loop is increment the object_num object_num += 1 # # end result", "to the cimg for identification cv.putText(output_image, text= str(object_label), org=(cx - 5,cy - 5),", "= (0, 0, 0) self.black_upper = (179, 255, 30) self.hsv_lower = (0, 0,", "matrix of the classifier') # fig.colorbar(cax) # plt.xlabel('Predicted') # plt.ylabel('True') # plt.show() #", "255, empty) cv.createTrackbar('val_min', 'trackbars', 0, 255, empty) cv.createTrackbar('val_max', 'trackbars', 255, 255, empty) while", "mask cimg_justthiscontour = np.zeros_like(input_image) # draw the contours on the blank canvas which", "= features.groupby(target).apply(np.var).to_numpy() return self.mean, self.var def calc_prior(self, features, target): # this is the", "in range(0, self.count): # for each class look at the prior probability for", "print(f\"precision is {prec}, recall is {rec}, accuracy = {acc}\") # # confusion matrix", "a list that we will add each classes posterior prob to posteriors =", "= input_image[y:y+h, x:x+w, :] # convert to hsv for extracting those values img_subset_hsv", "this exists only for my testing purposes class MatlabSurrogate(): def __init__(self): self.state_of_mind =", "area_th): seg_img = self.bg_segmentation(input_image, show_img=False) # # make the mask a binary thresholded", "take this rectangle as a subset of the input_image, and calculate things within", "cv.namedWindow('masked_image', cv.WINDOW_NORMAL) cv.resizeWindow('masked_image', h, w) cv.namedWindow(\"trackbars\") cv.resizeWindow(\"trackbars\", 800, 300) # color mask trackbars", "cv.COLOR_BGR2HSV) # FILTER OUT THE WEIRD ONES # get rid of tiny objects", "s_min = cv.getTrackbarPos('sat_min', 'trackbars') s_max = cv.getTrackbarPos('sat_max', 'trackbars') v_min = cv.getTrackbarPos('val_min', 'trackbars') v_max", "w * h fullosity = area / rect_area aspect_ratio = float(w)/h extent =", "= \"Badass.\" def acquire_kinect_image(self, filename): # give this function a filename, and it", "+= 1 return (correct / float(len(test))) # TODO: read these and see how", "contours on the blank canvas which is original sized cv.drawContours(cimg_justthiscontour, [cnt], 0, color=(255,", "contours, hier = cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE) # create the df that we'll return", "image for later analysis with something else like a neural network cv.imwrite(f\"images/train/XX_{object_num}_{randint(10000,99999)}.png\", img_subset)", "/ 255), 'g': (g.mean() / 255), 'b': (b.mean() / 255), 'hue': (hue.mean() /", "= cv.cvtColor(img_subset, cv.COLOR_BGR2HSV) # FILTER OUT THE WEIRD ONES # get rid of", "= cv.threshold(blur,0,255,cv.THRESH_BINARY+cv.THRESH_OTSU) return ret3, th3 def process_image_make_predictions(self, input_image, model): predictive_model = model area_th", "var))) denominator = np.sqrt(2 * np.pi * var) return numerator / denominator def", "i in range(len(test)): if test.iloc[i] == predictions[i]: correct += 1 return (correct /", "activated! use 'process_image_to_df()' to get back a pandas df\") self.black_lower = (0, 0,", "\"object is a numpy.ndarray, this is perfect. Is the image RGB order or", "s_min, v_min]) hsv_upper = np.array([h_max, s_max, v_max]) black_lower = np.array([0, 0, 0]) black_upper", "holes hsv_mask = ndimage.binary_fill_holes(hsv_mask).astype(np.uint8) # erode the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(5)) #", "cv.GaussianBlur(mask,(5,5),0) ret3, mask = cv.threshold(mask,0,255,cv.THRESH_BINARY) # output image with contours drawn on the", "show_img=False) # # make the mask a binary thresholded image mask = cv.cvtColor(seg_img,", "for rep in range(0, reps): # fake_df = input_df.copy() # for i, row", "mask and return the result return cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8) def process_image_to_df(self, input_image, area_th):", "= img_subset[pts[0], pts[1], 2] # and export the image for later analysis with", "the object labels to the cimg for identification cv.putText(output_image, text= str(object_label), org=(cx -", "otsu_threshold(self, image): blur = cv.GaussianBlur(image,(5,5),0) ret3,th3 = cv.threshold(blur,0,255,cv.THRESH_BINARY+cv.THRESH_OTSU) return ret3, th3 def process_image_make_predictions(self,", "image_df, class_list): for i, row in image_df.iterrows(): image_df.loc[i, 'color'] = class_list[i] print(type(image_df)) return", "for prediction # replaces ImageSegmenter class ImageProcess(): def __init__(self): print(\"image processor activated! use", "objects in the image contours, hier = cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE) # create the", "import pandas as pd from sklearn.model_selection import train_test_split from sklearn.metrics import confusion_matrix, precision_score,", "numpy array self.mean = features.groupby(target).apply(np.mean).to_numpy() self.var = features.groupby(target).apply(np.var).to_numpy() return self.mean, self.var def calc_prior(self,", "'hue': (hue.mean() / 255), 'sat': (sat.mean() / 255), 'val': (val.mean() / 255)}] df", "# self.hsv_lower = (0, 0, 70) # self.hsv_upper = (179, 34, 255) #", "cv.drawContours(cimg_justthiscontour, [cnt], 0, color=(255, 255, 255), thickness=-1) # now take the subset of", "= nb.get_accuracy(y_test, y_test_predictions) # prec = precision_score(y_test, y_test_predictions, average=\"micro\") # rec = recall_score(y_test,", "= cimg_justthiscontour[y:y+h, x:x+w, :] # make a binary mask cimg_mask = cv.cvtColor(cimg_subset, cv.COLOR_BGR2GRAY)", "filename, and it will load that image with opencv # this will be", "lineType=cv.LINE_AA) # print(r.mean(), g.mean(), b.mean(), gli.mean()) df = df.append({'color' : 0, 'x': x,", "(1/(np.sqrt(2*np.pi)*stdev)) def get_accuracy(self, test, predictions): correct = 0 for i in range(len(test)): if", "a class at random from the dataset self.prior = (features.groupby(target).apply(lambda x: len(x)/self.rows).to_numpy()) return", "# def fake_df(self, input_df, reps = 3): # # creates a bunch of", "hull_area = cv.contourArea(hull) solidity = float(area)/hull_area eq_diameter = np.sqrt(4*area/np.pi) M= cv.moments(cnt) cx= int(M['m10']/M['m00'])", "CONTOURS HAVE BEEN DONE submit the df to the model for predictions #", "statistics for all those features self.calc_statistics(features, target) # prior is the random chance", "thickness=5) # CALCULATE ALL THE CONTOUR SHAPE FEATURES # get the x, y,", "size=(3, 3)).astype(np.uint8) # binary dilation hsv_mask = morphology.binary_dilation(hsv_mask, np.ones((20, 20))).astype(np.uint8) # fill the", "def get_accuracy(self, test, predictions): correct = 0 for i in range(len(test)): if test.iloc[i]", "'trackbars', 255, 255, empty) cv.createTrackbar('val_min', 'trackbars', 0, 255, empty) cv.createTrackbar('val_max', 'trackbars', 255, 255,", "== predictions[i]: correct += 1 return (correct / float(len(test))) # TODO: read these", "= {i}, prior = {prior}, conditional = {conditional}, posterior = {posterior}\") posteriors.append(posterior) return", "np.exp(-((x-mean)**2 / (2*stdev**2))) return exponent * (1/(np.sqrt(2*np.pi)*stdev)) def get_accuracy(self, test, predictions): correct =", "the df that we'll return for this image df = pd.DataFrame(columns=['color']) # #", "as a NOT mask hsv_mask = np.where(hsv_mask > 1, 0, 1).astype(np.uint8) hsv_mask =", "c occuring indpendently. # P(x) is the predictor prior probability, or the prob", "}, ignore_index=True) # last thing we do on this loop is increment the", "# hsv_mask = ndimage.median_filter(hsv_mask, size=(3, 3)).astype(np.uint8) # binary dilation hsv_mask = morphology.binary_dilation(hsv_mask, np.ones((20,", "with something else like a neural network cv.imwrite(f\"images/train/XX_{object_num}_{randint(10000,99999)}.png\", img_subset) # add the object", "bg_segmentation(self, image, mode=\"hsv\", show_img=False): # create an hsv mask for red colors hsv_mask", "self.black_lower = (0, 0, 203) # self.black_upper = (43, 255, 255) # self.hsv_lower", "the object labels to the cimg for identification cv.putText(output_image, text= str(object_num), org=(cx -", "hsv_lower = np.array([h_min, s_min, v_min]) hsv_upper = np.array([h_max, s_max, v_max]) black_lower = np.array([0,", "it works # https://www.mathworks.com/help/matlab/matlab_external/matlab-arrays-as-python-variables.html # https://www.mathworks.com/help/matlab/matlab_external/passing-data-to-python.html # this exists only for my testing", "to despeckle # hsv_mask = ndimage.median_filter(hsv_mask, size=(3, 3)).astype(np.uint8) # binary dilation hsv_mask =", "255), thickness=5) img_subset = cv.bitwise_and(img_subset, img_subset, mask=mask).astype(np.uint8) # calculate where the object is", "str(object_label), org=(cx - 5,cy - 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(0,255,0), thickness=3, lineType=cv.LINE_AA) #", "set random seed np.random.seed(26) # the NaiveBayes classifier I wrote for assignment 6", "NaiveBayes: # P(c|x) = P(x|c) * P(c) / P(x) # P(x|x) is the", "cv.COLOR_BGR2HSV) # get trackbar positions h_min = cv.getTrackbarPos(\"hue_min\", \"trackbars\") h_max = cv.getTrackbarPos('hue_max', 'trackbars')", "# print(labels) # take the row # end result should be a pandas", "+ uniform(-.1, .1) # fake_df.loc[i, 'b'] = fake_df.loc[i, 'b'] + uniform(-.1, .1) #", "x, 'y': y, 'object_num': object_num, 'r': r.mean(), 'g': g.mean(), 'b': b.mean(), 'hue': hue.mean(),", "get_accuracy(self, test, predictions): correct = 0 for i in range(len(test)): if test.iloc[i] ==", "ONES # get rid of tiny objects that are probably noisef if area", "hsv_mask = ndimage.binary_fill_holes(hsv_mask).astype(np.uint8) # erode the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(5)) # TODO:", "image): def empty(a): pass h, w = int(image.shape[1]/2), int(image.shape[0]/2) cv.namedWindow('masked_image', cv.WINDOW_NORMAL) cv.resizeWindow('masked_image', h,", "object labels to the cimg for identification cv.putText(output_image, text= str(object_label), org=(cx - 5,cy", "= cv.getTrackbarPos(\"hue_min\", \"trackbars\") h_max = cv.getTrackbarPos('hue_max', 'trackbars') s_min = cv.getTrackbarPos('sat_min', 'trackbars') s_max =", "object_num object_num += 1 # AFTER ALL CONTOURS HAVE BEEN DONE submit the", "rep in range(0, reps): # fake_df = input_df.copy() # for i, row in", "plt.ylabel('True') # plt.show() # print(labels) # take the row # end result should", "positions h_min = cv.getTrackbarPos(\"hue_min\", \"trackbars\") h_max = cv.getTrackbarPos('hue_max', 'trackbars') s_min = cv.getTrackbarPos('sat_min', 'trackbars')", "enjoyment cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5) img_subset = cv.bitwise_and(img_subset, img_subset, mask=mask).astype(np.uint8)", "plt import cv2 as cv from scipy import ndimage from skimage import morphology", "image, mask=hsv_mask).astype(np.uint8)) # apply the mask and return the result return cv.bitwise_and(image, image,", "- 5), fontFace= cv.FONT_HERSHEY_SIMPLEX, fontScale=3, color=(255,255,255), thickness=5, lineType=cv.LINE_AA) # print(r.mean(), g.mean(), b.mean(), gli.mean())", "reps = 3): # # creates a bunch of fake adjustments to the", "= area / rect_area aspect_ratio = float(w)/h extent = float(area/ rect_area) hull =", "a mask cimg_justthiscontour = np.zeros_like(input_image) # draw the contours on the blank canvas", "and it will load that image with opencv # this will be a", ":] # convert to hsv for extracting those values img_subset_hsv = cv.cvtColor(img_subset, cv.COLOR_BGR2HSV)", "= cv.inRange(img_hsv, hsv_lower, hsv_upper) black_mask = cv.inRange(img_hsv, black_lower, black_upper) mask = color_mask +", "k == 113 or k == 27: break cv.destroyAllWindows() print(f'hsv_lower is {hsv_lower}, hsv_upper", "fit(self, features, target): # define class variables self.classes = np.unique(target) self.count = len(self.classes)", "image for our personal enjoyment cv.drawContours(output_image, [cnt], 0, color=(255, 255, 255), thickness=5) img_subset", "morphology.disk(5)) # TODO: remove this it is for testing purposes to show the", "predictive_model.get_predictions(df) class_dict = {0:\"medium_blue\", 1:\"black\", 2:\"darK_stone_gray\", 3:\"bright_green\", 4:\"light_green\", 5:\"bright_orange\", 6:\"bright_red\", 7:\"bright_blue\", 8:\"white\", 9:\"bright_yellow\"}", "1).astype(np.uint8) hsv_mask = ndimage.gaussian_filter(hsv_mask, sigma=1) # erode the mask hsv_mask = morphology.erosion(hsv_mask, morphology.disk(3))", "in range(len(test)): if test.iloc[i] == predictions[i]: correct += 1 return (correct / float(len(test)))", "subset of the input_image, and calculate things within it img_subset = input_image[y:y+h, x:x+w,", "i in range(len(input_vector)): result = self.calc_posterior((input_vector.iloc[i,:])) predictions.append(result) return predictions def predict(self, observation): #call", "self.hsv_lower, self.hsv_upper).astype(np.uint8) # use this as a NOT mask hsv_mask = np.where(hsv_mask >", "self.prior = (features.groupby(target).apply(lambda x: len(x)/self.rows).to_numpy()) return self.prior def calc_posterior(self, x): # this is", "# hsv_upper = (179,234,77) def dummy_method(self, a): if type(a) is np.ndarray: result =", "img_subset[pts[0], pts[1], 1] b = img_subset[pts[0], pts[1], 2] # and export the image", "it will load that image with opencv # this will be a BGR", "= \"object is a numpy.ndarray, this is perfect. Is the image RGB order", "give this function a filename, and it will load that image with opencv", "mean, stdev): # calculate probability density function exponent = np.exp(-((x-mean)**2 / (2*stdev**2))) return", "# draw the contours on the blank canvas which is original sized cv.drawContours(cimg_justthiscontour,", "float(len(test))) # TODO: read these and see how it works # https://www.mathworks.com/help/matlab/matlab_external/matlab-arrays-as-python-variables.html #", "increment the object_num object_num += 1 # AFTER ALL CONTOURS HAVE BEEN DONE", "empty(a): pass h, w = int(image.shape[1]/2), int(image.shape[0]/2) cv.namedWindow('masked_image', cv.WINDOW_NORMAL) cv.resizeWindow('masked_image', h, w) cv.namedWindow(\"trackbars\")", "v_min = cv.getTrackbarPos('val_min', 'trackbars') v_max = cv.getTrackbarPos('val_max', 'trackbars') # self.black_lower = (0, 0,", "for my testing purposes class MatlabSurrogate(): def __init__(self): self.state_of_mind = \"Badass.\" def acquire_kinect_image(self,", "should probably have one image processing class that takes in a single image", "cv.getTrackbarPos('val_max', 'trackbars') # self.black_lower = (0, 0, 0) # self.black_upper = (179, 255,", "output_df = pd.concat(output_df, fake_df) # return output_df def otsu_threshold(self, image): blur = cv.GaussianBlur(image,(5,5),0)", "thickness=-1) # now take the subset of just the area around the contour", "print(labels) # take the row # end result should be a pandas dataframe", "= predictive_model.get_predictions(df) class_dict = {0:\"medium_blue\", 1:\"black\", 2:\"darK_stone_gray\", 3:\"bright_green\", 4:\"light_green\", 5:\"bright_orange\", 6:\"bright_red\", 7:\"bright_blue\", 8:\"white\",", "0) self.hsv_upper = (179, 255, 90) # self.black_lower = (0, 0, 203) #", "target): # this is the probability of picking one of a class at", "this is the probability of picking one of a class at random from", "the segmentation if (show_img == True): m = MatlabSurrogate() m.imshow(cv.bitwise_and(image, image, mask=hsv_mask).astype(np.uint8)) #", "# print(result) # # use the test set to see how we do" ]
[ "deleteBelowHorizonLine(self): i = 0 while i < len(self.modelPoints): if self.isAboveHorizonLine(self.modelPoints[i]): i += 1", "= list() self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showInitialPoints(self, filename): self.modelPoints, msg = self.loadModelPoints(filename, 'Initial') self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()", "[{0}] error: {1}!'.format(horizonPointsFileName, e)) finally: if fileHandle: fileHandle.close() return msg def isAboveHorizonLine(self, point):", "#### # # Python-based Tool for interaction with the 10micron mounts # GUI", "= self.app.ui.le_modelFullPointsFileName.text() def saveHorizonMask(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_horizonPointsFileName.text() self.saveHorizonPoints(filepath) def", "list() self.celestialEquator = list() # signal slot self.app.ui.btn_loadInitialModelPoints.clicked.connect(self.selectInitialModelPointsFileName) self.app.ui.btn_saveInitialModelPoints.clicked.connect(self.saveInitialModelPoints) self.app.ui.btn_saveInitialModelPointsAs.clicked.connect(self.saveInitialModelPointsAs) self.app.ui.btn_loadFullModelPoints.clicked.connect(self.selectFullModelPointsFileName) self.app.ui.btn_saveFullModelPoints.clicked.connect(self.saveFullModelPoints) self.app.ui.btn_saveFullModelPointsAs.clicked.connect(self.saveFullModelPointsAs)", "hoursPathLengthPreview): # we have no position of the mount -> therefore we can't", "eastSide = [] a = sorted(self.modelPoints, key=operator.itemgetter(0)) for i in range(0, len(a)): if", "points file', '/config', 'Model point files (*.txt)', False) if value != '': self.app.ui.le_modelFullPointsFileName.setText(os.path.basename(value))", "f: if ':' in line: # model maker format m = line.rstrip('\\n').split(':') else:", "from astrometry import transform class ModelPoints: logger = logging.getLogger(__name__) def __init__(self, app): self.app", "numberOfPathPoints, hoursPathLengthPreview): # we have no position of the mount -> therefore we", "doSortingPoints): west = list() east = list() off = -5 i = 0", "Filename given!') return msg try: fileHandle = open(horizonPointsFileName + '.txt', 'w') for i", "generateInitialPoints(self, azimuth, altitude, numberOfPoints): self.modelPoints = list() for i in range(0, numberOfPoints): azp", "altitudeMinimumHorizon, None) self.horizonPoints = [list(a) for a in zip(x, y)] return msg def", "az, alt = self.transform.transformERFA(ra, dec, 1) if alt > 0: self.modelPoints.append((az, alt)) if", "points filename given !' return msg if not os.path.isfile(os.getcwd() + '/config/' + horizonPointsFileName", "east.insert(0, (az, alt)) else: west.append((az, alt)) else: for ha in range(-120 + off,", "self.app.ui.le_modelInitialPointsFileName.text() self.saveModelPoints(filepath) def saveInitialModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save initial model points file',", "* hoursPathLength / numberOfPathPoints - hoursPathLengthPreview az, alt = self.transform.transformERFA(ra, dec, 1) if", "+ '/config/' + horizonPointsFileName + '.txt'): msg = 'Horizon points file does not", "'/config', 'Horizon mask files (*.txt)', True) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.app.hemisphereWindow.selectHorizonPointsMode() self.app.hemisphereWindow.drawHemisphere()", "file selected') def selectFullModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open full model points file',", "= self.app.selectFile(self.app, 'Open initial model points file', '/config', 'Model points files (*.txt)', True)", "= self.app.ui.checkUseMinimumHorizonLine.isChecked() self.app.config['CheckUseFileHorizonLine'] = self.app.ui.checkUseFileHorizonLine.isChecked() self.app.config['AltitudeMinimumHorizon'] = self.app.ui.altitudeMinimumHorizon.value() self.app.config['ModelInitialPointsFileName'] = self.app.ui.le_modelInitialPointsFileName.text() self.app.config['ModelFullPointsFileName'] =", "self.logger.error('Error loading horizon points: {0}'.format(e)) return msg hp = sorted(hp, key=operator.itemgetter(0)) if len(hp)", "else: self.logger.warning('No file selected') def loadModelPoints(self, modelPointsFileName, modeltype): p = [] number =", "self.app.ui.btn_saveFullModelPointsAs.clicked.connect(self.saveFullModelPointsAs) self.app.ui.btn_loadHorizonMask.clicked.connect(self.selectHorizonPointsFileName) self.app.ui.btn_saveHorizonMask.clicked.connect(self.saveHorizonMask) self.app.ui.btn_saveHorizonMaskAs.clicked.connect(self.saveHorizonMaskAs) self.app.signalMountSiteData.connect(self.generateCelestialEquator) def initConfig(self): try: if 'HorizonPointsFileName' in self.app.config: self.app.ui.le_horizonPointsFileName.setText(self.app.config['HorizonPointsFileName'])", "[] off = -5 i = 0 for dec in range(-15, 90, 15):", "self.transform.transformERFA(ra, dec, 1) if alt > 0: self.modelPoints.append((az, alt)) if limitByHorizonMask: self.deleteBelowHorizonLine() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints)))", "ha in range(-120 + off, 120 + off, step): az, alt = self.transform.topocentricToAzAlt(ha", "ext = self.app.selectFile(self.app, 'Open full model points file', '/config', 'Model points files (*.txt)',", "-2): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt > 0: self.celestialEquator.append((az,", "config.cfg could not be initialize, error:{0}'.format(e)) finally: pass def storeConfig(self): self.app.config['HorizonPointsFileName'] = self.app.ui.le_horizonPointsFileName.text()", "msg = None if modelPointsFileName.strip() == '': msg = 'No model points filename", "= [] a = sorted(self.modelPoints, key=operator.itemgetter(0)) for i in range(0, len(a)): if a[i][0]", "'Open initial model points file', '/config', 'Model points files (*.txt)', True) if value", "if mountwizzard3, it's native version 3 convertedLine = line.rstrip('\\n').split(':') p.append((float(convertedLine[1]), float(convertedLine[2]))) else: #", "loading modeling points from file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally: return p, msg", "def generateNormalPoints(self, limitByHorizonMask, doSortingPoints): west = [] east = [] off = -5", "return msg try: fileHandle = open(modelPointsFileName + '.txt', 'w') for i in range(0,", "in range(0, len(self.modelPoints)): fileHandle.write('MW-3:{0:03.2f}:{1:03.2f}\\n'.format(self.modelPoints[i][0], self.modelPoints[i][1])) fileHandle.close() except Exception as e: msg = 'Error", "self.app.ui.le_horizonPointsFileName.text() self.app.config['CheckUseMinimumHorizonLine'] = self.app.ui.checkUseMinimumHorizonLine.isChecked() self.app.config['CheckUseFileHorizonLine'] = self.app.ui.checkUseFileHorizonLine.isChecked() self.app.config['AltitudeMinimumHorizon'] = self.app.ui.altitudeMinimumHorizon.value() self.app.config['ModelInitialPointsFileName'] = self.app.ui.le_modelInitialPointsFileName.text()", "15): if dec < 60: step = 10 else: step = 20 if", "if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateDSOPoints(self, limitByHorizonMask, hoursPathLength, numberOfPathPoints,", "i in hp] if horizonByAltitude: y = numpy.clip(y, altitudeMinimumHorizon, None) self.horizonPoints = [list(a)", "self.saveModelPoints(value) else: self.logger.warning('No model points file selected') def selectFullModelPointsFileName(self): value, ext = self.app.selectFile(self.app,", "-step): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt > 0: if", "self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateGridPoints(self, limitByHorizonMask, doSortingPoints, numberOfRows, numberOfColumns, altitudeMin, altitudeMax): west = list()", "points file', '/config', 'Model point files (*.txt)', False) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value))", "p.append((float(convertedLine[1]), float(convertedLine[2]))) else: # format is same as Per's Model Maker convertedLine =", "except Exception as e: msg = 'Error loading modeling points from file [{0}]", "= west + east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def", "x) convertedLine = line.rstrip('\\n').split() point = (float(convertedLine[2]), float(convertedLine[3])) number += 1 if modeltype", "def showInitialPoints(self, filename): self.modelPoints, msg = self.loadModelPoints(filename, 'Initial') self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showFullPoints(self, filename,", "east = list() off = -5 i = 0 for dec in range(-15,", "p.append(point) except Exception as e: msg = 'Error loading modeling points from file", "= [] if not (horizonByFile or horizonByAltitude): return hp = [] msg =", "self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateDSOPoints(self, limitByHorizonMask, hoursPathLength, numberOfPathPoints, hoursPathLengthPreview): #", "# # # # # # # ### # # ## # ##", "line.rstrip('\\n').split(':') p.append((float(convertedLine[1]), float(convertedLine[2]))) else: # format is same as Per's Model Maker convertedLine", "while i < len(self.modelPoints): if self.isAboveHorizonLine(self.modelPoints[i]): i += 1 else: del self.modelPoints[i] def", "= self.loadModelPoints(filename, 'Initial') self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showFullPoints(self, filename, limitByHorizonMask, doSortingPoints): self.modelPoints, msg =", "self.horizonPoints = [] if not (horizonByFile or horizonByAltitude): return hp = [] msg", "= 'Horizon points file does not exist !' self.logger.warning('Horizon points file does not", "numberOfPoints): self.modelPoints = list() for i in range(0, numberOfPoints): azp = i *", "eastSide = sorted(eastSide, key=operator.itemgetter(1)) self.modelPoints = westSide + eastSide def loadHorizonPoints(self, horizonPointsFileName, horizonByFile,", "return p, msg def sortPoints(self): if len(self.modelPoints) == 0: self.logger.warning('There are no points", "limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateInitialPoints(self, azimuth, altitude, numberOfPoints): self.modelPoints", "self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.saveHorizonPoints(value) else: self.logger.warning('No model points file selected') def selectHorizonPointsFileName(self): value, ext =", "file', '/config', 'Model point files (*.txt)', False) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.saveHorizonPoints(value)", "in range(120 + off, -120 + off, -step): az, alt = self.transform.topocentricToAzAlt(ha /", "if dec < 60: step = 15 else: step = 30 if i", "fileHandle: fileHandle.close() return msg def isAboveHorizonLine(self, point): x = range(0, 361) y =", "= 0 for alt in range(altitudeMin, altitudeMax + 1, int((altitudeMax - altitudeMin) /", "# saving in model maker format fileHandle.write('{0:03d}:{1:03d}\\n'.format(int(self.horizonPoints[i][0]), int(int(self.horizonPoints[i][1])))) fileHandle.close() except Exception as e:", "int(azp) point = (azp, altitude) self.modelPoints.append(point) self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateCelestialEquator(self): self.celestialEquator = list()", "if fileHandle: fileHandle.close() return msg def isAboveHorizonLine(self, point): x = range(0, 361) y", "westSide = sorted(westSide, key=operator.itemgetter(1)) eastSide = sorted(eastSide, key=operator.itemgetter(1)) self.modelPoints = westSide + eastSide", "Model Points Filename given!' self.logger.warning('No Model Points Filename given!') return msg try: fileHandle", "sortPoints(self): if len(self.modelPoints) == 0: self.logger.warning('There are no points to sort') return westSide", "= os.path.basename(value) self.app.ui.le_modelFullPointsFileName.setText(value) self.showFullPoints(value, self.app.ui.checkDeletePointsHorizonMask.isChecked(), self.app.ui.checkSortPoints.isChecked()) else: self.logger.warning('No file selected') def loadModelPoints(self, modelPointsFileName,", "zip(x, y)] return msg def saveHorizonPoints(self, horizonPointsFileName): msg = None fileHandle = None", "step = 10 else: step = 30 if i % 2: for ha", "file', '/config', 'Model point files (*.txt)', False) if value != '': self.app.ui.le_modelFullPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value)", "return msg hp = sorted(hp, key=operator.itemgetter(0)) if len(hp) == 0: hp = ((0,", "# Python-based Tool for interaction with the 10micron mounts # GUI with PyQT5", "dec < 70: step = 10 else: step = 30 if i %", "!= '': self.app.ui.le_modelInitialPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points file selected') def selectInitialModelPointsFileName(self): value,", "key=operator.itemgetter(1)) eastSide = sorted(eastSide, key=operator.itemgetter(1)) self.modelPoints = westSide + eastSide def loadHorizonPoints(self, horizonPointsFileName,", "loadHorizonPoints(self, horizonPointsFileName, horizonByFile, horizonByAltitude, altitudeMinimumHorizon): self.horizonPoints = [] if not (horizonByFile or horizonByAltitude):", "alt)) i += 1 self.modelPoints = west + east if limitByHorizonMask: self.deleteBelowHorizonLine() if", "# # # # # #### # ## ## # ## # #", "i = 0 for dec in range(-15, 90, 10): if dec < 30:", "app): self.app = app self.transform = transform.Transform(self.app) self.horizonPoints = list() self.modelPoints = list()", "isAboveHorizonLine(self, point): x = range(0, 361) y = numpy.interp(x, [i[0] for i in", "'Open horizon mask file', '/config', 'Horizon mask files (*.txt)', True) if value !=", "'Initial') self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showFullPoints(self, filename, limitByHorizonMask, doSortingPoints): self.modelPoints, msg = self.loadModelPoints(filename, 'Full')", "self.saveHorizonPoints(value) else: self.logger.warning('No model points file selected') def selectHorizonPointsFileName(self): value, ext = self.app.selectFile(self.app,", "= list() east = list() i = 0 for alt in range(altitudeMin, altitudeMax", "if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateGridPoints(self, limitByHorizonMask, doSortingPoints, numberOfRows,", "line.rstrip('\\n').split(':') else: # carte du ciel / skychart format m = line.rstrip('\\n').split(' ')", "transform class ModelPoints: logger = logging.getLogger(__name__) def __init__(self, app): self.app = app self.transform", "points files (*.txt)', True) if value != '': value = os.path.basename(value) self.app.ui.le_modelInitialPointsFileName.setText(value) self.showInitialPoints(value)", "elif line.startswith('MW-3'): # if mountwizzard3, it's native version 3 convertedLine = line.rstrip('\\n').split(':') p.append((float(convertedLine[1]),", "os.path.basename(value) self.app.ui.le_modelFullPointsFileName.setText(value) self.showFullPoints(value, self.app.ui.checkDeletePointsHorizonMask.isChecked(), self.app.ui.checkSortPoints.isChecked()) else: self.logger.warning('No file selected') def loadModelPoints(self, modelPointsFileName, modeltype):", "to file [{0}] error: {1}!'.format(horizonPointsFileName, e) self.logger.warning('Error loading horizon points to file [{0}]", "= line.rstrip('\\n').split() point = (float(convertedLine[2]), float(convertedLine[3])) number += 1 if modeltype == 'Refinement'", "dec) if alt > 0: if az > 180: east.insert(0, (az, alt)) else:", "# # # # # # # #### # ## ## # ##", "'ModelFullPointsFileName' in self.app.config: self.app.ui.le_modelFullPointsFileName.setText(self.app.config['ModelFullPointsFileName']) if 'HorizonPointsFileName' in self.app.config and 'CheckUseMinimumHorizonLine' in self.app.config and", "self.app.hemisphereWindow.drawHemisphere() def saveModelPoints(self, modelPointsFileName): msg = None fileHandle = None if modelPointsFileName.strip() ==", "0, -int(360 / numberOfColumns)): if alt > 0: if az > 180: east.insert(0,", "therefore we can't calculate the path if 'RaJNow' not in self.app.workerMountDispatcher.data: return self.modelPoints", "= sorted(self.modelPoints, key=operator.itemgetter(0)) for i in range(0, len(a)): if a[i][0] >= 180: westSide.append((a[i][0],", "value = os.path.basename(value) self.app.ui.le_modelInitialPointsFileName.setText(value) self.showInitialPoints(value) else: self.logger.warning('No file selected') def saveFullModelPoints(self): filepath =", "(az, alt)) else: west.append((az, alt)) else: for ha in range(-120 + off, 120", "# (c) 2016, 2017, 2018 # # Licence APL2.0 # ########################################################### import logging", "True) if value != '': value = os.path.basename(value) self.app.ui.le_modelInitialPointsFileName.setText(value) self.showInitialPoints(value) else: self.logger.warning('No file", "self.logger.warning('Error loading modeling points to file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally: if fileHandle:", "i < len(self.modelPoints): if self.isAboveHorizonLine(self.modelPoints[i]): i += 1 else: del self.modelPoints[i] def deletePoints(self):", "= 'No horizon points filename given !' return msg if not os.path.isfile(os.getcwd() +", "if i % 2: for az in range(365 - int(360 / numberOfColumns), 0,", "for python # Python v3.6.4 # # <NAME> # (c) 2016, 2017, 2018", "if alt > 0: self.modelPoints.append((az, alt)) if limitByHorizonMask: self.deleteBelowHorizonLine() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMaxPoints(self,", "in self.horizonPoints], [i[1] for i in self.horizonPoints], left=None, right=None, period=None) if point[1] >", "else: west.append((az, alt)) i += 1 self.modelPoints = west + east if limitByHorizonMask:", "None if modelPointsFileName.strip() == '': msg = 'No model points filename given!' self.logger.warning('No", "self.modelPoints = west + east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()", "for i in hp] if horizonByAltitude: y = numpy.clip(y, altitudeMinimumHorizon, None) self.horizonPoints =", "for dec in range(-15, 90, 10): if dec < 30: step = 10", "if az > 180: east.insert(0, (az, alt)) else: west.append((az, alt)) else: for az", "import logging import os import PyQt5 import time import copy import operator import", "list() self.modelPoints = list() self.celestialEquator = list() # signal slot self.app.ui.btn_loadInitialModelPoints.clicked.connect(self.selectInitialModelPointsFileName) self.app.ui.btn_saveInitialModelPoints.clicked.connect(self.saveInitialModelPoints) self.app.ui.btn_saveInitialModelPointsAs.clicked.connect(self.saveInitialModelPointsAs)", "= (int(m[0]), int(m[1])) hp.append(point) f.close() except Exception as e: msg = 'Error loading", "361) y = numpy.interp(x, [i[0] for i in self.horizonPoints], [i[1] for i in", "modelPointsFileName.strip() == '': msg = 'No model points filename given!' self.logger.warning('No model points", "eastSide.append((a[i][0], a[i][1])) westSide = sorted(westSide, key=operator.itemgetter(1)) eastSide = sorted(eastSide, key=operator.itemgetter(1)) self.modelPoints = westSide", "showInitialPoints(self, filename): self.modelPoints, msg = self.loadModelPoints(filename, 'Initial') self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showFullPoints(self, filename, limitByHorizonMask,", "step): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt > 0: if", "= None fileHandle = None if horizonPointsFileName.strip() == '': msg = 'No horizon", "fileHandle = None if horizonPointsFileName.strip() == '': msg = 'No horizon points filename", "msg try: fileHandle = open(horizonPointsFileName + '.txt', 'w') for i in range(0, len(self.horizonPoints)):", "we have no position of the mount -> therefore we can't calculate the", "self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMinPoints(self, limitByHorizonMask, doSortingPoints): west = list() east = list() off", "## # ## # # # # # # # # # #", "in range(0, len(self.horizonPoints)): # saving in model maker format fileHandle.write('{0:03d}:{1:03d}\\n'.format(int(self.horizonPoints[i][0]), int(int(self.horizonPoints[i][1])))) fileHandle.close() except", "in model maker format fileHandle.write('{0:03d}:{1:03d}\\n'.format(int(self.horizonPoints[i][0]), int(int(self.horizonPoints[i][1])))) fileHandle.close() except Exception as e: msg =", "if modeltype == 'Refinement' and number > 3: p.append(point) elif modeltype == 'Base'", "saving horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName, e) self.logger.warning('Error loading horizon points", "= [] msg = None if horizonByFile: if horizonPointsFileName == '': msg =", "self.app.config and 'CheckUseMinimumHorizonLine' in self.app.config and 'CheckUseFileHorizonLine' in self.app.config and 'AltitudeMinimumHorizon' in self.app.config:", "i = 0 for alt in range(altitudeMin, altitudeMax + 1, int((altitudeMax - altitudeMin)", "self.saveHorizonPoints(filepath) def saveHorizonMaskAs(self): value, ext = self.app.selectFile(self.app, 'Save horizon mask points file', '/config',", "180: east.insert(0, (az, alt)) else: west.append((az, alt)) else: for ha in range(-120 +", "os.getcwd() + '/config/' + self.app.ui.le_modelInitialPointsFileName.text() self.saveModelPoints(filepath) def saveInitialModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save", "+ horizonPointsFileName + '.txt') as f: for line in f: if ':' in", "format m = line.rstrip('\\n').split(':') else: # carte du ciel / skychart format m", "Model Maker convertedLine = line.rstrip('\\n').split(':') point = (int(convertedLine[0]), int(convertedLine[1])) if len(convertedLine) == 2", "self.app.ui.le_modelInitialPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points file selected') def selectInitialModelPointsFileName(self): value, ext =", "doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateDSOPoints(self, limitByHorizonMask, hoursPathLength, numberOfPathPoints, hoursPathLengthPreview): # we have", "az in range(365 - int(360 / numberOfColumns), 0, -int(360 / numberOfColumns)): if alt", "error: {1}!'.format(horizonPointsFileName, e)) finally: if fileHandle: fileHandle.close() return msg def isAboveHorizonLine(self, point): x", "selected') def selectInitialModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open initial model points file', '/config',", "(az, alt)) else: west.append((az, alt)) i += 1 self.modelPoints = west + east", "or horizonByAltitude): return hp = [] msg = None if horizonByFile: if horizonPointsFileName", "self.app.signalMountSiteData.connect(self.generateCelestialEquator) def initConfig(self): try: if 'HorizonPointsFileName' in self.app.config: self.app.ui.le_horizonPointsFileName.setText(self.app.config['HorizonPointsFileName']) if 'CheckUseMinimumHorizonLine' in self.app.config:", "= 10 else: step = 20 if i % 2: for ha in", "numberOfPoints + azimuth if azp > 360: azp -= 360 azp = int(azp)", "calculate the path if 'RaJNow' not in self.app.workerMountDispatcher.data: return self.modelPoints = list() ra", "dec in range(-15, 90, 15): if dec < 60: step = 15 else:", "in self.app.config: self.app.ui.le_modelFullPointsFileName.setText(self.app.config['ModelFullPointsFileName']) if 'HorizonPointsFileName' in self.app.config and 'CheckUseMinimumHorizonLine' in self.app.config and 'CheckUseFileHorizonLine'", "self.modelPoints, msg = self.loadModelPoints(filename, 'Initial') self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showFullPoints(self, filename, limitByHorizonMask, doSortingPoints): self.modelPoints,", "hp] if horizonByAltitude: y = numpy.clip(y, altitudeMinimumHorizon, None) self.horizonPoints = [list(a) for a", "= transform.Transform(self.app) self.horizonPoints = list() self.modelPoints = list() self.celestialEquator = list() # signal", "modelPointsFileName): msg = None fileHandle = None if modelPointsFileName.strip() == '': msg =", "-> therefore we can't calculate the path if 'RaJNow' not in self.app.workerMountDispatcher.data: return", "self.app.ui.btn_saveInitialModelPointsAs.clicked.connect(self.saveInitialModelPointsAs) self.app.ui.btn_loadFullModelPoints.clicked.connect(self.selectFullModelPointsFileName) self.app.ui.btn_saveFullModelPoints.clicked.connect(self.saveFullModelPoints) self.app.ui.btn_saveFullModelPointsAs.clicked.connect(self.saveFullModelPointsAs) self.app.ui.btn_loadHorizonMask.clicked.connect(self.selectHorizonPointsFileName) self.app.ui.btn_saveHorizonMask.clicked.connect(self.saveHorizonMask) self.app.ui.btn_saveHorizonMaskAs.clicked.connect(self.saveHorizonMaskAs) self.app.signalMountSiteData.connect(self.generateCelestialEquator) def initConfig(self): try: if 'HorizonPointsFileName'", "modelPointsFileName + '.txt', 'r') as fileHandle: for line in fileHandle: if line.startswith('GRID'): #", "self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateGridPoints(self, limitByHorizonMask, doSortingPoints, numberOfRows, numberOfColumns, altitudeMin, altitudeMax): west = list() east", "files (*.txt)', False) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.saveHorizonPoints(value) else: self.logger.warning('No model points", "90, 10): if dec < 30: step = 10 elif dec < 70:", "= 0 for dec in range(-15, 90, 10): if dec < 30: step", "msg = 'No Model Points Filename given!' self.logger.warning('No Model Points Filename given!') return", "float(convertedLine[3])) number += 1 if modeltype == 'Refinement' and number > 3: p.append(point)", "!' self.logger.warning('Horizon points file does not exist') else: try: with open(os.getcwd() + '/config/'", "__init__(self, app): self.app = app self.transform = transform.Transform(self.app) self.horizonPoints = list() self.modelPoints =", "numberOfPoints): azp = i * 360 / numberOfPoints + azimuth if azp >", "value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.app.hemisphereWindow.selectHorizonPointsMode() self.app.hemisphereWindow.drawHemisphere() def saveModelPoints(self, modelPointsFileName): msg = None fileHandle", "2: for az in range(365 - int(360 / numberOfColumns), 0, -int(360 / numberOfColumns)):", "def loadHorizonPoints(self, horizonPointsFileName, horizonByFile, horizonByAltitude, altitudeMinimumHorizon): self.horizonPoints = [] if not (horizonByFile or", "'Base' and number <= 3: p.append(point) elif line.startswith('MW-3'): # if mountwizzard3, it's native", "numpy.clip(y, altitudeMinimumHorizon, None) self.horizonPoints = [list(a) for a in zip(x, y)] return msg", "e: msg = 'Error saving horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName, e)", "-5 i = 0 for dec in range(-15, 90, 10): if dec <", "for a in zip(x, y)] return msg def saveHorizonPoints(self, horizonPointsFileName): msg = None", "+ '/config/' + self.app.ui.le_modelFullPointsFileName.text() self.saveModelPoints(filepath) def saveFullModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save full", "file [{0}] error: {1}!'.format(horizonPointsFileName, e) self.logger.warning('Error loading horizon points to file [{0}] error:", "az in range(5, 360, int(360 / numberOfColumns)): if alt > 0: if az", "i * 360 / numberOfPoints + azimuth if azp > 360: azp -=", "format m = line.rstrip('\\n').split(' ') point = (int(m[0]), int(m[1])) hp.append(point) f.close() except Exception", "'CheckUseMinimumHorizonLine' in self.app.config and 'CheckUseFileHorizonLine' in self.app.config and 'AltitudeMinimumHorizon' in self.app.config: self.loadHorizonPoints(self.app.config['HorizonPointsFileName'], self.app.config['CheckUseFileHorizonLine'],", "value, ext = self.app.selectFile(self.app, 'Save full model points file', '/config', 'Model point files", "we can't calculate the path if 'RaJNow' not in self.app.workerMountDispatcher.data: return self.modelPoints =", "self.celestialEquator = list() # signal slot self.app.ui.btn_loadInitialModelPoints.clicked.connect(self.selectInitialModelPointsFileName) self.app.ui.btn_saveInitialModelPoints.clicked.connect(self.saveInitialModelPoints) self.app.ui.btn_saveInitialModelPointsAs.clicked.connect(self.saveInitialModelPointsAs) self.app.ui.btn_loadFullModelPoints.clicked.connect(self.selectFullModelPointsFileName) self.app.ui.btn_saveFullModelPoints.clicked.connect(self.saveFullModelPoints) self.app.ui.btn_saveFullModelPointsAs.clicked.connect(self.saveFullModelPointsAs) self.app.ui.btn_loadHorizonMask.clicked.connect(self.selectHorizonPointsFileName)", "slot self.app.ui.btn_loadInitialModelPoints.clicked.connect(self.selectInitialModelPointsFileName) self.app.ui.btn_saveInitialModelPoints.clicked.connect(self.saveInitialModelPoints) self.app.ui.btn_saveInitialModelPointsAs.clicked.connect(self.saveInitialModelPointsAs) self.app.ui.btn_loadFullModelPoints.clicked.connect(self.selectFullModelPointsFileName) self.app.ui.btn_saveFullModelPoints.clicked.connect(self.saveFullModelPoints) self.app.ui.btn_saveFullModelPointsAs.clicked.connect(self.saveFullModelPointsAs) self.app.ui.btn_loadHorizonMask.clicked.connect(self.selectHorizonPointsFileName) self.app.ui.btn_saveHorizonMask.clicked.connect(self.saveHorizonMask) self.app.ui.btn_saveHorizonMaskAs.clicked.connect(self.saveHorizonMaskAs) self.app.signalMountSiteData.connect(self.generateCelestialEquator) def initConfig(self):", "modeltype == 'Refinement' and number > 3: p.append(point) elif modeltype == 'Base' and", "files (*.txt)', True) if value != '': value = os.path.basename(value) self.app.ui.le_modelInitialPointsFileName.setText(value) self.showInitialPoints(value) else:", "key=operator.itemgetter(0)) if len(hp) == 0: hp = ((0, 0), (360, 0)) x =", "self.app.config: self.loadHorizonPoints(self.app.config['HorizonPointsFileName'], self.app.config['CheckUseFileHorizonLine'], self.app.config['CheckUseMinimumHorizonLine'], self.app.config['AltitudeMinimumHorizon']) except Exception as e: self.logger.error('item in config.cfg could", "'Model points files (*.txt)', True) if value != '': value = os.path.basename(value) self.app.ui.le_modelFullPointsFileName.setText(value)", "e) self.logger.warning('Error loading modeling points from file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally: return", "azimuth, altitude, numberOfPoints): self.modelPoints = list() for i in range(0, numberOfPoints): azp =", "########################################################### import logging import os import PyQt5 import time import copy import operator", "'Refinement' and number > 3: p.append(point) elif modeltype == 'Base' and number <=", "selectFullModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open full model points file', '/config', 'Model points", "alt = self.transform.transformERFA(ra, dec, 1) if alt > 0: self.modelPoints.append((az, alt)) if limitByHorizonMask:", "= 'Error saving modeling points to file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading", "convertedLine = line.rstrip('\\n').split() point = (float(convertedLine[2]), float(convertedLine[3])) number += 1 if modeltype ==", "value != '': value = os.path.basename(value) self.app.ui.le_modelFullPointsFileName.setText(value) self.showFullPoints(value, self.app.ui.checkDeletePointsHorizonMask.isChecked(), self.app.ui.checkSortPoints.isChecked()) else: self.logger.warning('No file", "msg = 'Error saving horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName, e) self.logger.warning('Error", "i = 0 for dec in range(-15, 90, 15): if dec < 60:", "# ### # # ## # ## ## # # # # #", "signal slot self.app.ui.btn_loadInitialModelPoints.clicked.connect(self.selectInitialModelPointsFileName) self.app.ui.btn_saveInitialModelPoints.clicked.connect(self.saveInitialModelPoints) self.app.ui.btn_saveInitialModelPointsAs.clicked.connect(self.saveInitialModelPointsAs) self.app.ui.btn_loadFullModelPoints.clicked.connect(self.selectFullModelPointsFileName) self.app.ui.btn_saveFullModelPoints.clicked.connect(self.saveFullModelPoints) self.app.ui.btn_saveFullModelPointsAs.clicked.connect(self.saveFullModelPointsAs) self.app.ui.btn_loadHorizonMask.clicked.connect(self.selectHorizonPointsFileName) self.app.ui.btn_saveHorizonMask.clicked.connect(self.saveHorizonMask) self.app.ui.btn_saveHorizonMaskAs.clicked.connect(self.saveHorizonMaskAs) self.app.signalMountSiteData.connect(self.generateCelestialEquator) def", "self.logger.warning('No model points file selected') def selectFullModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open full", "self.app.selectFile(self.app, 'Open full model points file', '/config', 'Model points files (*.txt)', True) if", "mount -> therefore we can't calculate the path if 'RaJNow' not in self.app.workerMountDispatcher.data:", "does not exist !' self.logger.warning('Horizon points file does not exist') else: try: with", "as Per's Model Maker convertedLine = line.rstrip('\\n').split(':') point = (int(convertedLine[0]), int(convertedLine[1])) if len(convertedLine)", "# # # # # # # # # # ### # #", "fileHandle = None if modelPointsFileName.strip() == '': msg = 'No Model Points Filename", "+ 1, int((altitudeMax - altitudeMin) / (numberOfRows - 1))): if i % 2:", "[i[0] for i in self.horizonPoints], [i[1] for i in self.horizonPoints], left=None, right=None, period=None)", "= list() for i in range(0, numberOfPoints): azp = i * 360 /", "for i in self.horizonPoints], [i[1] for i in self.horizonPoints], left=None, right=None, period=None) if", "return msg def saveHorizonPoints(self, horizonPointsFileName): msg = None fileHandle = None if horizonPointsFileName.strip()", "for i in range(0, len(self.horizonPoints)): # saving in model maker format fileHandle.write('{0:03d}:{1:03d}\\n'.format(int(self.horizonPoints[i][0]), int(int(self.horizonPoints[i][1]))))", "maker format fileHandle.write('{0:03d}:{1:03d}\\n'.format(int(self.horizonPoints[i][0]), int(int(self.horizonPoints[i][1])))) fileHandle.close() except Exception as e: msg = 'Error saving", "= self.app.ui.le_modelInitialPointsFileName.text() self.app.config['ModelFullPointsFileName'] = self.app.ui.le_modelFullPointsFileName.text() def saveHorizonMask(self): filepath = os.getcwd() + '/config/' +", "self.isAboveHorizonLine(self.modelPoints[i]): i += 1 else: del self.modelPoints[i] def deletePoints(self): self.modelPoints = list() self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()", "if len(hp) == 0: hp = ((0, 0), (360, 0)) x = [i[0]", "self.loadHorizonPoints(self.app.config['HorizonPointsFileName'], self.app.config['CheckUseFileHorizonLine'], self.app.config['CheckUseMinimumHorizonLine'], self.app.config['AltitudeMinimumHorizon']) except Exception as e: self.logger.error('item in config.cfg could not", "in config.cfg could not be initialize, error:{0}'.format(e)) finally: pass def storeConfig(self): self.app.config['HorizonPointsFileName'] =", "{1}!'.format(horizonPointsFileName, e) self.logger.warning('Error loading horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName, e)) finally:", "2016, 2017, 2018 # # Licence APL2.0 # ########################################################### import logging import os", "value = os.path.basename(value) self.app.ui.le_modelFullPointsFileName.setText(value) self.showFullPoints(value, self.app.ui.checkDeletePointsHorizonMask.isChecked(), self.app.ui.checkSortPoints.isChecked()) else: self.logger.warning('No file selected') def loadModelPoints(self,", "'Save horizon mask points file', '/config', 'Model point files (*.txt)', False) if value", "= ra - float(i) * hoursPathLength / numberOfPathPoints - hoursPathLengthPreview az, alt =", "if az > 180: east.insert(0, (az, alt)) else: west.append((az, alt)) i += 1", "doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateNormalPoints(self, limitByHorizonMask, doSortingPoints): west = [] east =", "# #### # ## ## # ## # # # # # #", "fileHandle: fileHandle.close() return msg def saveInitialModelPoints(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_modelInitialPointsFileName.text()", "> 360: azp -= 360 azp = int(azp) point = (azp, altitude) self.modelPoints.append(point)", "off, -120 + off, -step): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if", "azp = i * 360 / numberOfPoints + azimuth if azp > 360:", "limitByHorizonMask, doSortingPoints): self.modelPoints, msg = self.loadModelPoints(filename, 'Full') if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints()", "'/config', 'Model point files (*.txt)', False) if value != '': self.app.ui.le_modelFullPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else:", "limitByHorizonMask: self.deleteBelowHorizonLine() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMaxPoints(self, limitByHorizonMask, doSortingPoints): west = [] east =", "self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateGridPoints(self, limitByHorizonMask, doSortingPoints, numberOfRows, numberOfColumns, altitudeMin, altitudeMax): west =", "alt)) else: west.append((az, alt)) else: for az in range(5, 360, int(360 / numberOfColumns)):", "y[int(point[0])]: return True else: return False def deleteBelowHorizonLine(self): i = 0 while i", "points to file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally: if fileHandle: fileHandle.close() return msg", "self.modelPoints = list() self.celestialEquator = list() # signal slot self.app.ui.btn_loadInitialModelPoints.clicked.connect(self.selectInitialModelPointsFileName) self.app.ui.btn_saveInitialModelPoints.clicked.connect(self.saveInitialModelPoints) self.app.ui.btn_saveInitialModelPointsAs.clicked.connect(self.saveInitialModelPointsAs) self.app.ui.btn_loadFullModelPoints.clicked.connect(self.selectFullModelPointsFileName)", "self.logger.warning('No model points file selected') def selectHorizonPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open horizon", "Filename given!') return msg try: fileHandle = open(modelPointsFileName + '.txt', 'w') for i", "horizonByAltitude): return hp = [] msg = None if horizonByFile: if horizonPointsFileName ==", "exist !' self.logger.warning('Horizon points file does not exist') else: try: with open(os.getcwd() +", "= sorted(westSide, key=operator.itemgetter(1)) eastSide = sorted(eastSide, key=operator.itemgetter(1)) self.modelPoints = westSide + eastSide def", "p.append(point) elif line.startswith('MW-3'): # if mountwizzard3, it's native version 3 convertedLine = line.rstrip('\\n').split(':')", "no position of the mount -> therefore we can't calculate the path if", "for i in range(0, numberOfPoints): azp = i * 360 / numberOfPoints +", "as fileHandle: for line in fileHandle: if line.startswith('GRID'): # if grid, then its", "az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt > 0: self.celestialEquator.append((az, alt))", "horizonPointsFileName + '.txt'): msg = 'Horizon points file does not exist !' self.logger.warning('Horizon", "self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showFullPoints(self, filename, limitByHorizonMask, doSortingPoints): self.modelPoints, msg = self.loadModelPoints(filename, 'Full') if", "from file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally: return p, msg def sortPoints(self): if", "west = list() east = list() i = 0 for alt in range(altitudeMin,", "e: self.logger.error('item in config.cfg could not be initialize, error:{0}'.format(e)) finally: pass def storeConfig(self):", "############################################################ # -*- coding: utf-8 -*- # # # # # # #", "120 + off, step): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt", "def __init__(self, app): self.app = app self.transform = transform.Transform(self.app) self.horizonPoints = list() self.modelPoints", "westSide = [] eastSide = [] a = sorted(self.modelPoints, key=operator.itemgetter(0)) for i in", "if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateNormalPoints(self, limitByHorizonMask, doSortingPoints): west = [] east", "== '': msg = 'No Model Points Filename given!' self.logger.warning('No Model Points Filename", "msg = 'Error saving modeling points to file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error", "if dec < 60: step = 10 else: step = 20 if i", "model points filename given!') return p, msg try: with open('config/' + modelPointsFileName +", "file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points from file [{0}] error:", "def generateInitialPoints(self, azimuth, altitude, numberOfPoints): self.modelPoints = list() for i in range(0, numberOfPoints):", "(*.txt)', True) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.app.hemisphereWindow.selectHorizonPointsMode() self.app.hemisphereWindow.drawHemisphere() def saveModelPoints(self, modelPointsFileName): msg", "= list() self.modelPoints = list() self.celestialEquator = list() # signal slot self.app.ui.btn_loadInitialModelPoints.clicked.connect(self.selectInitialModelPointsFileName) self.app.ui.btn_saveInitialModelPoints.clicked.connect(self.saveInitialModelPoints)", "in range(-15, 90, 15): if dec < 60: step = 10 else: step", "msg try: with open('config/' + modelPointsFileName + '.txt', 'r') as fileHandle: for line", "in range(-15, 90, 15): if dec < 60: step = 15 else: step", "horizon points: {0}'.format(e)) return msg hp = sorted(hp, key=operator.itemgetter(0)) if len(hp) == 0:", "= ((0, 0), (360, 0)) x = [i[0] for i in hp] y", "= list() ra = copy.copy(self.app.workerMountDispatcher.data['RaJNow']) dec = copy.copy(self.app.workerMountDispatcher.data['DecJNow']) for i in range(0, numberOfPathPoints):", "'Save full model points file', '/config', 'Model point files (*.txt)', False) if value", "selected') def saveFullModelPoints(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_modelFullPointsFileName.text() self.saveModelPoints(filepath) def saveFullModelPointsAs(self):", "selected') def selectHorizonPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open horizon mask file', '/config', 'Horizon", "version 3 convertedLine = line.rstrip('\\n').split(':') p.append((float(convertedLine[1]), float(convertedLine[2]))) else: # format is same as", "else: eastSide.append((a[i][0], a[i][1])) westSide = sorted(westSide, key=operator.itemgetter(1)) eastSide = sorted(eastSide, key=operator.itemgetter(1)) self.modelPoints =", "self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateCelestialEquator(self): self.celestialEquator = list() off = -5 for dec in range(-15,", "files (*.txt)', False) if value != '': self.app.ui.le_modelInitialPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points", "sorted(self.modelPoints, key=operator.itemgetter(0)) for i in range(0, len(a)): if a[i][0] >= 180: westSide.append((a[i][0], a[i][1]))", "## # ## ## # # # # # # # #### #", "west.append((az, alt)) else: for az in range(5, 360, int(360 / numberOfColumns)): if alt", "point files (*.txt)', False) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.saveHorizonPoints(value) else: self.logger.warning('No model", "point = (float(convertedLine[2]), float(convertedLine[3])) number += 1 if modeltype == 'Refinement' and number", "list() east = list() off = -5 i = 0 for dec in", "= self.app.selectFile(self.app, 'Save full model points file', '/config', 'Model point files (*.txt)', False)", "def selectFullModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open full model points file', '/config', 'Model", "dec in range(-15, 90, 10): if dec < 30: step = 10 elif", "as e: msg = 'Error saving modeling points to file [{0}] error: {1}!'.format(modelPointsFileName,", "= 0 msg = None if modelPointsFileName.strip() == '': msg = 'No model", "len(self.modelPoints) == 0: self.logger.warning('There are no points to sort') return westSide = []", "Points Filename given!') return msg try: fileHandle = open(modelPointsFileName + '.txt', 'w') for", "= logging.getLogger(__name__) def __init__(self, app): self.app = app self.transform = transform.Transform(self.app) self.horizonPoints =", "# # # # # #### # # Python-based Tool for interaction with", "+ self.app.ui.le_modelInitialPointsFileName.text() self.saveModelPoints(filepath) def saveInitialModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save initial model points", "(int(m[0]), int(m[1])) hp.append(point) f.close() except Exception as e: msg = 'Error loading horizon", "(numberOfRows - 1))): if i % 2: for az in range(365 - int(360", "## ## # ## # # # # # # # # #", "-5 i = 0 for dec in range(-15, 90, 15): if dec <", "given!') return msg try: fileHandle = open(modelPointsFileName + '.txt', 'w') for i in", "len(convertedLine) != 2 and modeltype == 'Initial': p.append(point) except Exception as e: msg", "finally: return p, msg def sortPoints(self): if len(self.modelPoints) == 0: self.logger.warning('There are no", "= line.rstrip('\\n').split(':') point = (int(convertedLine[0]), int(convertedLine[1])) if len(convertedLine) == 2 and modeltype ==", "= self.app.selectFile(self.app, 'Open full model points file', '/config', 'Model points files (*.txt)', True)", "fileHandle = open(modelPointsFileName + '.txt', 'w') for i in range(0, len(self.modelPoints)): fileHandle.write('MW-3:{0:03.2f}:{1:03.2f}\\n'.format(self.modelPoints[i][0], self.modelPoints[i][1]))", "360: azp -= 360 azp = int(azp) point = (azp, altitude) self.modelPoints.append(point) self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints)))", "= self.loadModelPoints(filename, 'Full') if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateDSOPoints(self,", "point files (*.txt)', False) if value != '': self.app.ui.le_modelFullPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model", "msg = None if horizonByFile: if horizonPointsFileName == '': msg = 'No horizon", "numberOfRows, numberOfColumns, altitudeMin, altitudeMax): west = list() east = list() i = 0", "= None if horizonByFile: if horizonPointsFileName == '': msg = 'No horizon points", "msg = 'No model points filename given!' self.logger.warning('No model points filename given!') return", "/ numberOfColumns), 0, -int(360 / numberOfColumns)): if alt > 0: if az >", "(azp, altitude) self.modelPoints.append(point) self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateCelestialEquator(self): self.celestialEquator = list() off = -5", "except Exception as e: self.logger.error('item in config.cfg could not be initialize, error:{0}'.format(e)) finally:", "{1}!'.format(modelPointsFileName, e)) finally: return p, msg def sortPoints(self): if len(self.modelPoints) == 0: self.logger.warning('There", "{1}!'.format(horizonPointsFileName, e)) finally: if fileHandle: fileHandle.close() return msg def isAboveHorizonLine(self, point): x =", "= numpy.clip(y, altitudeMinimumHorizon, None) self.horizonPoints = [list(a) for a in zip(x, y)] return", "if alt > 0: if az > 180: east.insert(0, (az, alt)) else: west.append((az,", "for interaction with the 10micron mounts # GUI with PyQT5 for python #", "alt)) else: for ha in range(-120 + off, 120 + off, step): az,", "self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateNormalPoints(self, limitByHorizonMask, doSortingPoints): west = []", "hoursPathLength, numberOfPathPoints, hoursPathLengthPreview): # we have no position of the mount -> therefore", "if a[i][0] >= 180: westSide.append((a[i][0], a[i][1])) else: eastSide.append((a[i][0], a[i][1])) westSide = sorted(westSide, key=operator.itemgetter(1))", "= [i[1] for i in hp] if horizonByAltitude: y = numpy.clip(y, altitudeMinimumHorizon, None)", "+= 1 self.modelPoints = west + east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints()", "len(self.horizonPoints)): # saving in model maker format fileHandle.write('{0:03d}:{1:03d}\\n'.format(int(self.horizonPoints[i][0]), int(int(self.horizonPoints[i][1])))) fileHandle.close() except Exception as", "'No Model Points Filename given!' self.logger.warning('No Model Points Filename given!') return msg try:", "generateGridPoints(self, limitByHorizonMask, doSortingPoints, numberOfRows, numberOfColumns, altitudeMin, altitudeMax): west = list() east = list()", "if not os.path.isfile(os.getcwd() + '/config/' + horizonPointsFileName + '.txt'): msg = 'Horizon points", "i in range(0, len(self.horizonPoints)): # saving in model maker format fileHandle.write('{0:03d}:{1:03d}\\n'.format(int(self.horizonPoints[i][0]), int(int(self.horizonPoints[i][1])))) fileHandle.close()", "off, -step): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt > 0:", "180: east.insert(0, (az, alt)) else: west.append((az, alt)) else: for az in range(5, 360,", "import time import copy import operator import numpy from astrometry import transform class", "self.app.ui.btn_loadHorizonMask.clicked.connect(self.selectHorizonPointsFileName) self.app.ui.btn_saveHorizonMask.clicked.connect(self.saveHorizonMask) self.app.ui.btn_saveHorizonMaskAs.clicked.connect(self.saveHorizonMaskAs) self.app.signalMountSiteData.connect(self.generateCelestialEquator) def initConfig(self): try: if 'HorizonPointsFileName' in self.app.config: self.app.ui.le_horizonPointsFileName.setText(self.app.config['HorizonPointsFileName']) if", "if len(convertedLine) == 2 and modeltype == 'Full': p.append(point) elif len(convertedLine) != 2", "for i in range(0, numberOfPathPoints): ra = ra - float(i) * hoursPathLength /", "mask file', '/config', 'Horizon mask files (*.txt)', True) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value))", "west + east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMinPoints(self,", "y = numpy.clip(y, altitudeMinimumHorizon, None) self.horizonPoints = [list(a) for a in zip(x, y)]", "self.app.ui.le_modelFullPointsFileName.setText(value) self.showFullPoints(value, self.app.ui.checkDeletePointsHorizonMask.isChecked(), self.app.ui.checkSortPoints.isChecked()) else: self.logger.warning('No file selected') def loadModelPoints(self, modelPointsFileName, modeltype): p", "self.horizonPoints = list() self.modelPoints = list() self.celestialEquator = list() # signal slot self.app.ui.btn_loadInitialModelPoints.clicked.connect(self.selectInitialModelPointsFileName)", "len(self.modelPoints)): fileHandle.write('MW-3:{0:03.2f}:{1:03.2f}\\n'.format(self.modelPoints[i][0], self.modelPoints[i][1])) fileHandle.close() except Exception as e: msg = 'Error saving modeling", "azp -= 360 azp = int(azp) point = (azp, altitude) self.modelPoints.append(point) self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()", "for ha in range(120 + off, -120 + off, -2): az, alt =", "self.app.ui.le_horizonPointsFileName.text() self.saveHorizonPoints(filepath) def saveHorizonMaskAs(self): value, ext = self.app.selectFile(self.app, 'Save horizon mask points file',", "altitudeMin) / (numberOfRows - 1))): if i % 2: for az in range(365", "# # ## # ## ## # # # # # # #", "if 'HorizonPointsFileName' in self.app.config and 'CheckUseMinimumHorizonLine' in self.app.config and 'CheckUseFileHorizonLine' in self.app.config and", "0 msg = None if modelPointsFileName.strip() == '': msg = 'No model points", "'.txt'): msg = 'Horizon points file does not exist !' self.logger.warning('Horizon points file", "'Full') if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateDSOPoints(self, limitByHorizonMask, hoursPathLength,", "horizonByFile, horizonByAltitude, altitudeMinimumHorizon): self.horizonPoints = [] if not (horizonByFile or horizonByAltitude): return hp", "= -5 i = 0 for dec in range(-15, 90, 15): if dec", "file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points to file [{0}] error:", "self.logger.warning('No model points file selected') def selectInitialModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open initial", "try: fileHandle = open(modelPointsFileName + '.txt', 'w') for i in range(0, len(self.modelPoints)): fileHandle.write('MW-3:{0:03.2f}:{1:03.2f}\\n'.format(self.modelPoints[i][0],", "def generateDSOPoints(self, limitByHorizonMask, hoursPathLength, numberOfPathPoints, hoursPathLengthPreview): # we have no position of the", "True) if value != '': value = os.path.basename(value) self.app.ui.le_modelFullPointsFileName.setText(value) self.showFullPoints(value, self.app.ui.checkDeletePointsHorizonMask.isChecked(), self.app.ui.checkSortPoints.isChecked()) else:", "self.app.ui.le_modelFullPointsFileName.setText(self.app.config['ModelFullPointsFileName']) if 'HorizonPointsFileName' in self.app.config and 'CheckUseMinimumHorizonLine' in self.app.config and 'CheckUseFileHorizonLine' in self.app.config", "to file [{0}] error: {1}!'.format(horizonPointsFileName, e)) finally: if fileHandle: fileHandle.close() return msg def", "def saveInitialModelPoints(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_modelInitialPointsFileName.text() self.saveModelPoints(filepath) def saveInitialModelPointsAs(self): value,", "numberOfPathPoints): ra = ra - float(i) * hoursPathLength / numberOfPathPoints - hoursPathLengthPreview az,", "(c) 2016, 2017, 2018 # # Licence APL2.0 # ########################################################### import logging import", "saving modeling points to file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points", "in range(0, numberOfPoints): azp = i * 360 / numberOfPoints + azimuth if", "'': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.app.hemisphereWindow.selectHorizonPointsMode() self.app.hemisphereWindow.drawHemisphere() def saveModelPoints(self, modelPointsFileName): msg = None fileHandle = None", "!= '': value = os.path.basename(value) self.app.ui.le_modelFullPointsFileName.setText(value) self.showFullPoints(value, self.app.ui.checkDeletePointsHorizonMask.isChecked(), self.app.ui.checkSortPoints.isChecked()) else: self.logger.warning('No file selected')", "position of the mount -> therefore we can't calculate the path if 'RaJNow'", "filepath = os.getcwd() + '/config/' + self.app.ui.le_modelInitialPointsFileName.text() self.saveModelPoints(filepath) def saveInitialModelPointsAs(self): value, ext =", "# if mountwizzard3, it's native version 3 convertedLine = line.rstrip('\\n').split(':') p.append((float(convertedLine[1]), float(convertedLine[2]))) else:", "else: del self.modelPoints[i] def deletePoints(self): self.modelPoints = list() self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showInitialPoints(self, filename): self.modelPoints,", "PyQt5 import time import copy import operator import numpy from astrometry import transform", "[i[0] for i in hp] y = [i[1] for i in hp] if", "elif len(convertedLine) != 2 and modeltype == 'Initial': p.append(point) except Exception as e:", "= numpy.interp(x, [i[0] for i in self.horizonPoints], [i[1] for i in self.horizonPoints], left=None,", "self.app.hemisphereWindow.selectHorizonPointsMode() self.app.hemisphereWindow.drawHemisphere() def saveModelPoints(self, modelPointsFileName): msg = None fileHandle = None if modelPointsFileName.strip()", "= os.getcwd() + '/config/' + self.app.ui.le_horizonPointsFileName.text() self.saveHorizonPoints(filepath) def saveHorizonMaskAs(self): value, ext = self.app.selectFile(self.app,", "filename given!' self.logger.warning('No model points filename given!') return p, msg try: with open('config/'", "format fileHandle.write('{0:03d}:{1:03d}\\n'.format(int(self.horizonPoints[i][0]), int(int(self.horizonPoints[i][1])))) fileHandle.close() except Exception as e: msg = 'Error saving horizon", "points file', '/config', 'Model points files (*.txt)', True) if value != '': value", "== '': msg = 'No horizon points filename given !' return msg if", "elif dec < 70: step = 10 else: step = 30 if i", "# # # ### # # ## # ## ## # # #", "msg def saveInitialModelPoints(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_modelInitialPointsFileName.text() self.saveModelPoints(filepath) def saveInitialModelPointsAs(self):", "== 'Full': p.append(point) elif len(convertedLine) != 2 and modeltype == 'Initial': p.append(point) except", "points filename given!' self.logger.warning('No model points filename given!') return p, msg try: with", "for line in fileHandle: if line.startswith('GRID'): # if grid, then its a TSX", "range(0, len(self.horizonPoints)): # saving in model maker format fileHandle.write('{0:03d}:{1:03d}\\n'.format(int(self.horizonPoints[i][0]), int(int(self.horizonPoints[i][1])))) fileHandle.close() except Exception", "left=None, right=None, period=None) if point[1] > y[int(point[0])]: return True else: return False def", "key=operator.itemgetter(1)) self.modelPoints = westSide + eastSide def loadHorizonPoints(self, horizonPointsFileName, horizonByFile, horizonByAltitude, altitudeMinimumHorizon): self.horizonPoints", "for i in range(0, len(a)): if a[i][0] >= 180: westSide.append((a[i][0], a[i][1])) else: eastSide.append((a[i][0],", "= -5 i = 0 for dec in range(-15, 90, 10): if dec", "(*.txt)', False) if value != '': self.app.ui.le_modelInitialPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points file", "given!') return p, msg try: with open('config/' + modelPointsFileName + '.txt', 'r') as", "a[i][1])) else: eastSide.append((a[i][0], a[i][1])) westSide = sorted(westSide, key=operator.itemgetter(1)) eastSide = sorted(eastSide, key=operator.itemgetter(1)) self.modelPoints", "saveInitialModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save initial model points file', '/config', 'Model point", "filename given!' self.logger.warning('No Model Points Filename given!') return msg try: fileHandle = open(horizonPointsFileName", "if i % 2: for ha in range(120 + off, -120 + off,", "i in range(0, numberOfPoints): azp = i * 360 / numberOfPoints + azimuth", "/ skychart format m = line.rstrip('\\n').split(' ') point = (int(m[0]), int(m[1])) hp.append(point) f.close()", "p.append(point) elif len(convertedLine) != 2 and modeltype == 'Initial': p.append(point) except Exception as", "time import copy import operator import numpy from astrometry import transform class ModelPoints:", "off = -5 for dec in range(-15, 90, 15): for ha in range(120", "(*.txt)', True) if value != '': value = os.path.basename(value) self.app.ui.le_modelFullPointsFileName.setText(value) self.showFullPoints(value, self.app.ui.checkDeletePointsHorizonMask.isChecked(), self.app.ui.checkSortPoints.isChecked())", "if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateInitialPoints(self, azimuth, altitude, numberOfPoints):", "len(hp) == 0: hp = ((0, 0), (360, 0)) x = [i[0] for", "in self.app.config: self.loadHorizonPoints(self.app.config['HorizonPointsFileName'], self.app.config['CheckUseFileHorizonLine'], self.app.config['CheckUseMinimumHorizonLine'], self.app.config['AltitudeMinimumHorizon']) except Exception as e: self.logger.error('item in config.cfg", "## # # # # # # # #### # # Python-based Tool", "dec = copy.copy(self.app.workerMountDispatcher.data['DecJNow']) for i in range(0, numberOfPathPoints): ra = ra - float(i)", "range(120 + off, -120 + off, -2): az, alt = self.transform.topocentricToAzAlt(ha / 10,", "eastSide def loadHorizonPoints(self, horizonPointsFileName, horizonByFile, horizonByAltitude, altitudeMinimumHorizon): self.horizonPoints = [] if not (horizonByFile", "limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateDSOPoints(self, limitByHorizonMask, hoursPathLength, numberOfPathPoints, hoursPathLengthPreview):", "if self.isAboveHorizonLine(self.modelPoints[i]): i += 1 else: del self.modelPoints[i] def deletePoints(self): self.modelPoints = list()", "saving in model maker format fileHandle.write('{0:03d}:{1:03d}\\n'.format(int(self.horizonPoints[i][0]), int(int(self.horizonPoints[i][1])))) fileHandle.close() except Exception as e: msg", "None) self.horizonPoints = [list(a) for a in zip(x, y)] return msg def saveHorizonPoints(self,", "2: for ha in range(120 + off, -120 + off, -step): az, alt", "/ numberOfColumns)): if alt > 0: if az > 180: east.insert(0, (az, alt))", "initial model points file', '/config', 'Model points files (*.txt)', True) if value !=", "modeling points from file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally: return p, msg def", "1 if modeltype == 'Refinement' and number > 3: p.append(point) elif modeltype ==", "> y[int(point[0])]: return True else: return False def deleteBelowHorizonLine(self): i = 0 while", "os.getcwd() + '/config/' + self.app.ui.le_horizonPointsFileName.text() self.saveHorizonPoints(filepath) def saveHorizonMaskAs(self): value, ext = self.app.selectFile(self.app, 'Save", "alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt > 0: if az >", "numpy.interp(x, [i[0] for i in self.horizonPoints], [i[1] for i in self.horizonPoints], left=None, right=None,", "horizonByAltitude, altitudeMinimumHorizon): self.horizonPoints = [] if not (horizonByFile or horizonByAltitude): return hp =", "= list() # signal slot self.app.ui.btn_loadInitialModelPoints.clicked.connect(self.selectInitialModelPointsFileName) self.app.ui.btn_saveInitialModelPoints.clicked.connect(self.saveInitialModelPoints) self.app.ui.btn_saveInitialModelPointsAs.clicked.connect(self.saveInitialModelPointsAs) self.app.ui.btn_loadFullModelPoints.clicked.connect(self.selectFullModelPointsFileName) self.app.ui.btn_saveFullModelPoints.clicked.connect(self.saveFullModelPoints) self.app.ui.btn_saveFullModelPointsAs.clicked.connect(self.saveFullModelPointsAs) self.app.ui.btn_loadHorizonMask.clicked.connect(self.selectHorizonPointsFileName) self.app.ui.btn_saveHorizonMask.clicked.connect(self.saveHorizonMask)", "a TSX file (the sky x) convertedLine = line.rstrip('\\n').split() point = (float(convertedLine[2]), float(convertedLine[3]))", "points: {0}'.format(e)) return msg hp = sorted(hp, key=operator.itemgetter(0)) if len(hp) == 0: hp", "'Initial': p.append(point) except Exception as e: msg = 'Error loading modeling points from", "in self.app.config: self.app.ui.le_horizonPointsFileName.setText(self.app.config['HorizonPointsFileName']) if 'CheckUseMinimumHorizonLine' in self.app.config: self.app.ui.checkUseMinimumHorizonLine.setChecked(self.app.config['CheckUseMinimumHorizonLine']) if 'CheckUseFileHorizonLine' in self.app.config: self.app.ui.checkUseFileHorizonLine.setChecked(self.app.config['CheckUseFileHorizonLine'])", "(az, alt)) else: west.append((az, alt)) else: for az in range(5, 360, int(360 /", "self.app.config['CheckUseFileHorizonLine'], self.app.config['CheckUseMinimumHorizonLine'], self.app.config['AltitudeMinimumHorizon']) except Exception as e: self.logger.error('item in config.cfg could not be", "def initConfig(self): try: if 'HorizonPointsFileName' in self.app.config: self.app.ui.le_horizonPointsFileName.setText(self.app.config['HorizonPointsFileName']) if 'CheckUseMinimumHorizonLine' in self.app.config: self.app.ui.checkUseMinimumHorizonLine.setChecked(self.app.config['CheckUseMinimumHorizonLine'])", "import numpy from astrometry import transform class ModelPoints: logger = logging.getLogger(__name__) def __init__(self,", "sort') return westSide = [] eastSide = [] a = sorted(self.modelPoints, key=operator.itemgetter(0)) for", "def selectHorizonPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open horizon mask file', '/config', 'Horizon mask", "/ numberOfPoints + azimuth if azp > 360: azp -= 360 azp =", "if horizonPointsFileName == '': msg = 'No horizon points filename given !' return", "'ModelInitialPointsFileName' in self.app.config: self.app.ui.le_modelInitialPointsFileName.setText(self.app.config['ModelInitialPointsFileName']) if 'ModelFullPointsFileName' in self.app.config: self.app.ui.le_modelFullPointsFileName.setText(self.app.config['ModelFullPointsFileName']) if 'HorizonPointsFileName' in self.app.config", "(horizonByFile or horizonByAltitude): return hp = [] msg = None if horizonByFile: if", "def saveFullModelPoints(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_modelFullPointsFileName.text() self.saveModelPoints(filepath) def saveFullModelPointsAs(self): value,", "if value != '': self.app.ui.le_modelFullPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points file selected') def", "self.app.ui.btn_loadInitialModelPoints.clicked.connect(self.selectInitialModelPointsFileName) self.app.ui.btn_saveInitialModelPoints.clicked.connect(self.saveInitialModelPoints) self.app.ui.btn_saveInitialModelPointsAs.clicked.connect(self.saveInitialModelPointsAs) self.app.ui.btn_loadFullModelPoints.clicked.connect(self.selectFullModelPointsFileName) self.app.ui.btn_saveFullModelPoints.clicked.connect(self.saveFullModelPoints) self.app.ui.btn_saveFullModelPointsAs.clicked.connect(self.saveFullModelPointsAs) self.app.ui.btn_loadHorizonMask.clicked.connect(self.selectHorizonPointsFileName) self.app.ui.btn_saveHorizonMask.clicked.connect(self.saveHorizonMask) self.app.ui.btn_saveHorizonMaskAs.clicked.connect(self.saveHorizonMaskAs) self.app.signalMountSiteData.connect(self.generateCelestialEquator) def initConfig(self): try:", "doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateInitialPoints(self, azimuth, altitude, numberOfPoints): self.modelPoints = list() for", "self.app.config['AltitudeMinimumHorizon']) except Exception as e: self.logger.error('item in config.cfg could not be initialize, error:{0}'.format(e))", "Exception as e: msg = 'Error loading modeling points from file [{0}] error:", "return self.modelPoints = list() ra = copy.copy(self.app.workerMountDispatcher.data['RaJNow']) dec = copy.copy(self.app.workerMountDispatcher.data['DecJNow']) for i in", "# format is same as Per's Model Maker convertedLine = line.rstrip('\\n').split(':') point =", "+ east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateNormalPoints(self, limitByHorizonMask,", "given!' self.logger.warning('No Model Points Filename given!') return msg try: fileHandle = open(modelPointsFileName +", "its a TSX file (the sky x) convertedLine = line.rstrip('\\n').split() point = (float(convertedLine[2]),", "v3.6.4 # # <NAME> # (c) 2016, 2017, 2018 # # Licence APL2.0", "else: step = 30 if i % 2: for ha in range(120 +", "period=None) if point[1] > y[int(point[0])]: return True else: return False def deleteBelowHorizonLine(self): i", "step = 20 if i % 2: for ha in range(120 + off,", "initial model points file', '/config', 'Model point files (*.txt)', False) if value !=", "+ self.app.ui.le_horizonPointsFileName.text() self.saveHorizonPoints(filepath) def saveHorizonMaskAs(self): value, ext = self.app.selectFile(self.app, 'Save horizon mask points", "GUI with PyQT5 for python # Python v3.6.4 # # <NAME> # (c)", "- altitudeMin) / (numberOfRows - 1))): if i % 2: for az in", "azp = int(azp) point = (azp, altitude) self.modelPoints.append(point) self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateCelestialEquator(self): self.celestialEquator", "if 'HorizonPointsFileName' in self.app.config: self.app.ui.le_horizonPointsFileName.setText(self.app.config['HorizonPointsFileName']) if 'CheckUseMinimumHorizonLine' in self.app.config: self.app.ui.checkUseMinimumHorizonLine.setChecked(self.app.config['CheckUseMinimumHorizonLine']) if 'CheckUseFileHorizonLine' in", "self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateDSOPoints(self, limitByHorizonMask, hoursPathLength, numberOfPathPoints, hoursPathLengthPreview): # we have no", "def generateMaxPoints(self, limitByHorizonMask, doSortingPoints): west = [] east = [] off = -5", "0 for alt in range(altitudeMin, altitudeMax + 1, int((altitudeMax - altitudeMin) / (numberOfRows", "mounts # GUI with PyQT5 for python # Python v3.6.4 # # <NAME>", "{0}'.format(e) self.logger.error('Error loading horizon points: {0}'.format(e)) return msg hp = sorted(hp, key=operator.itemgetter(0)) if", "if modelPointsFileName.strip() == '': msg = 'No model points filename given!' self.logger.warning('No model", "0), (360, 0)) x = [i[0] for i in hp] y = [i[1]", "i % 2: for az in range(365 - int(360 / numberOfColumns), 0, -int(360", "given!' self.logger.warning('No model points filename given!') return p, msg try: with open('config/' +", "self.loadModelPoints(filename, 'Initial') self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showFullPoints(self, filename, limitByHorizonMask, doSortingPoints): self.modelPoints, msg = self.loadModelPoints(filename,", "self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateInitialPoints(self, azimuth, altitude, numberOfPoints): self.modelPoints = list() for i in range(0,", "file [{0}] error: {1}!'.format(horizonPointsFileName, e)) finally: if fileHandle: fileHandle.close() return msg def isAboveHorizonLine(self,", "alt in range(altitudeMin, altitudeMax + 1, int((altitudeMax - altitudeMin) / (numberOfRows - 1))):", "self.app.config: self.app.ui.checkUseMinimumHorizonLine.setChecked(self.app.config['CheckUseMinimumHorizonLine']) if 'CheckUseFileHorizonLine' in self.app.config: self.app.ui.checkUseFileHorizonLine.setChecked(self.app.config['CheckUseFileHorizonLine']) if 'AltitudeMinimumHorizon' in self.app.config: self.app.ui.altitudeMinimumHorizon.setValue(self.app.config['AltitudeMinimumHorizon']) if", "os.path.isfile(os.getcwd() + '/config/' + horizonPointsFileName + '.txt'): msg = 'Horizon points file does", "alt)) else: west.append((az, alt)) else: for ha in range(-120 + off, 120 +", "-120 + off, -2): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt", "- hoursPathLengthPreview az, alt = self.transform.transformERFA(ra, dec, 1) if alt > 0: self.modelPoints.append((az,", "points file does not exist !' self.logger.warning('Horizon points file does not exist') else:", "None fileHandle = None if modelPointsFileName.strip() == '': msg = 'No Model Points", "mask files (*.txt)', True) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.app.hemisphereWindow.selectHorizonPointsMode() self.app.hemisphereWindow.drawHemisphere() def saveModelPoints(self,", "value != '': self.app.ui.le_modelInitialPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points file selected') def selectInitialModelPointsFileName(self):", "f: for line in f: if ':' in line: # model maker format", "value, ext = self.app.selectFile(self.app, 'Open full model points file', '/config', 'Model points files", "east = list() i = 0 for alt in range(altitudeMin, altitudeMax + 1,", "self.logger.error('item in config.cfg could not be initialize, error:{0}'.format(e)) finally: pass def storeConfig(self): self.app.config['HorizonPointsFileName']", "+ '.txt'): msg = 'Horizon points file does not exist !' self.logger.warning('Horizon points", "line.startswith('GRID'): # if grid, then its a TSX file (the sky x) convertedLine", "-*- # # # # # # # #### # ## ## #", "'Model point files (*.txt)', False) if value != '': self.app.ui.le_modelInitialPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No", "import transform class ModelPoints: logger = logging.getLogger(__name__) def __init__(self, app): self.app = app", "list() off = -5 for dec in range(-15, 90, 15): for ha in", "'w') for i in range(0, len(self.modelPoints)): fileHandle.write('MW-3:{0:03.2f}:{1:03.2f}\\n'.format(self.modelPoints[i][0], self.modelPoints[i][1])) fileHandle.close() except Exception as e:", "# # # # # ### # # ## # ## ## #", "int(int(self.horizonPoints[i][1])))) fileHandle.close() except Exception as e: msg = 'Error saving horizon points to", "-*- coding: utf-8 -*- # # # # # # # #### #", "> 180: east.insert(0, (az, alt)) else: west.append((az, alt)) else: for az in range(5,", "msg try: fileHandle = open(modelPointsFileName + '.txt', 'w') for i in range(0, len(self.modelPoints)):", "!= '': value = os.path.basename(value) self.app.ui.le_modelInitialPointsFileName.setText(value) self.showInitialPoints(value) else: self.logger.warning('No file selected') def saveFullModelPoints(self):", "for line in f: if ':' in line: # model maker format m", "fileHandle.close() except Exception as e: msg = 'Error saving horizon points to file", "dec in range(-15, 90, 15): if dec < 60: step = 10 else:", "file does not exist !' self.logger.warning('Horizon points file does not exist') else: try:", "horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName, e)) finally: if fileHandle: fileHandle.close() return", "if az > 180: east.insert(0, (az, alt)) else: west.append((az, alt)) else: for ha", "- float(i) * hoursPathLength / numberOfPathPoints - hoursPathLengthPreview az, alt = self.transform.transformERFA(ra, dec,", "#### # ## ## # ## # # # # # # #", "self.app.selectFile(self.app, 'Save full model points file', '/config', 'Model point files (*.txt)', False) if", "= [i[0] for i in hp] y = [i[1] for i in hp]", "full model points file', '/config', 'Model point files (*.txt)', False) if value !=", "'/config/' + horizonPointsFileName + '.txt') as f: for line in f: if ':'", "return hp = [] msg = None if horizonByFile: if horizonPointsFileName == '':", "== 'Base' and number <= 3: p.append(point) elif line.startswith('MW-3'): # if mountwizzard3, it's", "= self.app.ui.altitudeMinimumHorizon.value() self.app.config['ModelInitialPointsFileName'] = self.app.ui.le_modelInitialPointsFileName.text() self.app.config['ModelFullPointsFileName'] = self.app.ui.le_modelFullPointsFileName.text() def saveHorizonMask(self): filepath = os.getcwd()", "'/config', 'Model point files (*.txt)', False) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.saveHorizonPoints(value) else:", "range(-15, 90, 15): if dec < 60: step = 15 else: step =", "1))): if i % 2: for az in range(365 - int(360 / numberOfColumns),", "1 else: del self.modelPoints[i] def deletePoints(self): self.modelPoints = list() self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showInitialPoints(self, filename):", "error: {1}!'.format(modelPointsFileName, e)) finally: return p, msg def sortPoints(self): if len(self.modelPoints) == 0:", "'/config/' + self.app.ui.le_horizonPointsFileName.text() self.saveHorizonPoints(filepath) def saveHorizonMaskAs(self): value, ext = self.app.selectFile(self.app, 'Save horizon mask", "15 else: step = 30 if i % 2: for ha in range(120", "self.app.ui.checkUseFileHorizonLine.setChecked(self.app.config['CheckUseFileHorizonLine']) if 'AltitudeMinimumHorizon' in self.app.config: self.app.ui.altitudeMinimumHorizon.setValue(self.app.config['AltitudeMinimumHorizon']) if 'ModelInitialPointsFileName' in self.app.config: self.app.ui.le_modelInitialPointsFileName.setText(self.app.config['ModelInitialPointsFileName']) if 'ModelFullPointsFileName'", "int(m[1])) hp.append(point) f.close() except Exception as e: msg = 'Error loading horizon points:", "dec in range(-15, 90, 15): for ha in range(120 + off, -120 +", "self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMinPoints(self, limitByHorizonMask, doSortingPoints): west = list() east = list() off =", "= [] eastSide = [] a = sorted(self.modelPoints, key=operator.itemgetter(0)) for i in range(0,", "initialize, error:{0}'.format(e)) finally: pass def storeConfig(self): self.app.config['HorizonPointsFileName'] = self.app.ui.le_horizonPointsFileName.text() self.app.config['CheckUseMinimumHorizonLine'] = self.app.ui.checkUseMinimumHorizonLine.isChecked() self.app.config['CheckUseFileHorizonLine']", "alt > 0: if az > 180: east.insert(0, (az, alt)) else: west.append((az, alt))", "could not be initialize, error:{0}'.format(e)) finally: pass def storeConfig(self): self.app.config['HorizonPointsFileName'] = self.app.ui.le_horizonPointsFileName.text() self.app.config['CheckUseMinimumHorizonLine']", "< 30: step = 10 elif dec < 70: step = 10 else:", "= (azp, altitude) self.modelPoints.append(point) self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateCelestialEquator(self): self.celestialEquator = list() off =", "= self.app.ui.le_horizonPointsFileName.text() self.app.config['CheckUseMinimumHorizonLine'] = self.app.ui.checkUseMinimumHorizonLine.isChecked() self.app.config['CheckUseFileHorizonLine'] = self.app.ui.checkUseFileHorizonLine.isChecked() self.app.config['AltitudeMinimumHorizon'] = self.app.ui.altitudeMinimumHorizon.value() self.app.config['ModelInitialPointsFileName'] =", "finally: if fileHandle: fileHandle.close() return msg def isAboveHorizonLine(self, point): x = range(0, 361)", "self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.app.hemisphereWindow.selectHorizonPointsMode() self.app.hemisphereWindow.drawHemisphere() def saveModelPoints(self, modelPointsFileName): msg = None fileHandle = None if", "list() self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showInitialPoints(self, filename): self.modelPoints, msg = self.loadModelPoints(filename, 'Initial') self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def", "else: self.logger.warning('No model points file selected') def selectFullModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open", "to sort') return westSide = [] eastSide = [] a = sorted(self.modelPoints, key=operator.itemgetter(0))", "0 while i < len(self.modelPoints): if self.isAboveHorizonLine(self.modelPoints[i]): i += 1 else: del self.modelPoints[i]", "deletePoints(self): self.modelPoints = list() self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showInitialPoints(self, filename): self.modelPoints, msg = self.loadModelPoints(filename, 'Initial')", "def deleteBelowHorizonLine(self): i = 0 while i < len(self.modelPoints): if self.isAboveHorizonLine(self.modelPoints[i]): i +=", "dec < 60: step = 10 else: step = 20 if i %", "import operator import numpy from astrometry import transform class ModelPoints: logger = logging.getLogger(__name__)", "maker format m = line.rstrip('\\n').split(':') else: # carte du ciel / skychart format", "self.modelPoints, msg = self.loadModelPoints(filename, 'Full') if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()", "= sorted(hp, key=operator.itemgetter(0)) if len(hp) == 0: hp = ((0, 0), (360, 0))", "'/config/' + self.app.ui.le_modelFullPointsFileName.text() self.saveModelPoints(filepath) def saveFullModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save full model", "= [] number = 0 msg = None if modelPointsFileName.strip() == '': msg", "line.rstrip('\\n').split() point = (float(convertedLine[2]), float(convertedLine[3])) number += 1 if modeltype == 'Refinement' and", "def showFullPoints(self, filename, limitByHorizonMask, doSortingPoints): self.modelPoints, msg = self.loadModelPoints(filename, 'Full') if limitByHorizonMask: self.deleteBelowHorizonLine()", "self.app.config: self.app.ui.checkUseFileHorizonLine.setChecked(self.app.config['CheckUseFileHorizonLine']) if 'AltitudeMinimumHorizon' in self.app.config: self.app.ui.altitudeMinimumHorizon.setValue(self.app.config['AltitudeMinimumHorizon']) if 'ModelInitialPointsFileName' in self.app.config: self.app.ui.le_modelInitialPointsFileName.setText(self.app.config['ModelInitialPointsFileName']) if", "convertedLine = line.rstrip('\\n').split(':') p.append((float(convertedLine[1]), float(convertedLine[2]))) else: # format is same as Per's Model", "point = (int(convertedLine[0]), int(convertedLine[1])) if len(convertedLine) == 2 and modeltype == 'Full': p.append(point)", "/ numberOfPathPoints - hoursPathLengthPreview az, alt = self.transform.transformERFA(ra, dec, 1) if alt >", "fileHandle.close() except Exception as e: msg = 'Error saving modeling points to file", "class ModelPoints: logger = logging.getLogger(__name__) def __init__(self, app): self.app = app self.transform =", "else: # format is same as Per's Model Maker convertedLine = line.rstrip('\\n').split(':') point", "i = 0 while i < len(self.modelPoints): if self.isAboveHorizonLine(self.modelPoints[i]): i += 1 else:", "# Licence APL2.0 # ########################################################### import logging import os import PyQt5 import time", "x = [i[0] for i in hp] y = [i[1] for i in", "= [] off = -5 i = 0 for dec in range(-15, 90,", "model points file selected') def selectInitialModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open initial model", "len(convertedLine) == 2 and modeltype == 'Full': p.append(point) elif len(convertedLine) != 2 and", "hp = ((0, 0), (360, 0)) x = [i[0] for i in hp]", "full model points file', '/config', 'Model points files (*.txt)', True) if value !=", "file', '/config', 'Horizon mask files (*.txt)', True) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.app.hemisphereWindow.selectHorizonPointsMode()", "'': self.app.ui.le_modelInitialPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points file selected') def selectInitialModelPointsFileName(self): value, ext", "if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateInitialPoints(self, azimuth, altitude, numberOfPoints): self.modelPoints = list()", "+ east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMinPoints(self, limitByHorizonMask,", "# if grid, then its a TSX file (the sky x) convertedLine =", "west + east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateGridPoints(self,", "int(convertedLine[1])) if len(convertedLine) == 2 and modeltype == 'Full': p.append(point) elif len(convertedLine) !=", "of the mount -> therefore we can't calculate the path if 'RaJNow' not", "doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMinPoints(self, limitByHorizonMask, doSortingPoints): west = list() east =", "west.append((az, alt)) i += 1 self.modelPoints = west + east if limitByHorizonMask: self.deleteBelowHorizonLine()", "modeltype == 'Base' and number <= 3: p.append(point) elif line.startswith('MW-3'): # if mountwizzard3,", "+ '.txt', 'w') for i in range(0, len(self.modelPoints)): fileHandle.write('MW-3:{0:03.2f}:{1:03.2f}\\n'.format(self.modelPoints[i][0], self.modelPoints[i][1])) fileHandle.close() except Exception", "loading modeling points to file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally: if fileHandle: fileHandle.close()", "import copy import operator import numpy from astrometry import transform class ModelPoints: logger", "if not (horizonByFile or horizonByAltitude): return hp = [] msg = None if", "self.app.selectFile(self.app, 'Save horizon mask points file', '/config', 'Model point files (*.txt)', False) if", "azimuth if azp > 360: azp -= 360 azp = int(azp) point =", "saveInitialModelPoints(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_modelInitialPointsFileName.text() self.saveModelPoints(filepath) def saveInitialModelPointsAs(self): value, ext", "self.app.ui.btn_saveHorizonMaskAs.clicked.connect(self.saveHorizonMaskAs) self.app.signalMountSiteData.connect(self.generateCelestialEquator) def initConfig(self): try: if 'HorizonPointsFileName' in self.app.config: self.app.ui.le_horizonPointsFileName.setText(self.app.config['HorizonPointsFileName']) if 'CheckUseMinimumHorizonLine' in", "{1}!'.format(modelPointsFileName, e)) finally: if fileHandle: fileHandle.close() return msg def saveInitialModelPoints(self): filepath = os.getcwd()", "horizonByFile: if horizonPointsFileName == '': msg = 'No horizon points filename given !'", "2 and modeltype == 'Full': p.append(point) elif len(convertedLine) != 2 and modeltype ==", "self.app.ui.checkDeletePointsHorizonMask.isChecked(), self.app.ui.checkSortPoints.isChecked()) else: self.logger.warning('No file selected') def loadModelPoints(self, modelPointsFileName, modeltype): p = []", "az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt > 0: if az", "model points file selected') def selectFullModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open full model", "points to file [{0}] error: {1}!'.format(horizonPointsFileName, e) self.logger.warning('Error loading horizon points to file", "altitudeMax): west = list() east = list() i = 0 for alt in", "1) if alt > 0: self.modelPoints.append((az, alt)) if limitByHorizonMask: self.deleteBelowHorizonLine() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def", "fileHandle = open(horizonPointsFileName + '.txt', 'w') for i in range(0, len(self.horizonPoints)): # saving", "= 'No model points filename given!' self.logger.warning('No model points filename given!') return p,", "Maker convertedLine = line.rstrip('\\n').split(':') point = (int(convertedLine[0]), int(convertedLine[1])) if len(convertedLine) == 2 and", "self.celestialEquator = list() off = -5 for dec in range(-15, 90, 15): for", "= 'Error loading modeling points from file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading", "return msg if not os.path.isfile(os.getcwd() + '/config/' + horizonPointsFileName + '.txt'): msg =", "az > 180: east.insert(0, (az, alt)) else: west.append((az, alt)) i += 1 self.modelPoints", "return True else: return False def deleteBelowHorizonLine(self): i = 0 while i <", "p.append(point) elif modeltype == 'Base' and number <= 3: p.append(point) elif line.startswith('MW-3'): #", "limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMinPoints(self, limitByHorizonMask, doSortingPoints): west =", "> 3: p.append(point) elif modeltype == 'Base' and number <= 3: p.append(point) elif", "'Horizon mask files (*.txt)', True) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.app.hemisphereWindow.selectHorizonPointsMode() self.app.hemisphereWindow.drawHemisphere() def", "points to file [{0}] error: {1}!'.format(horizonPointsFileName, e)) finally: if fileHandle: fileHandle.close() return msg", "for i in hp] y = [i[1] for i in hp] if horizonByAltitude:", "value != '': self.app.ui.le_modelFullPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points file selected') def selectFullModelPointsFileName(self):", "else: step = 20 if i % 2: for ha in range(120 +", "if 'ModelInitialPointsFileName' in self.app.config: self.app.ui.le_modelInitialPointsFileName.setText(self.app.config['ModelInitialPointsFileName']) if 'ModelFullPointsFileName' in self.app.config: self.app.ui.le_modelFullPointsFileName.setText(self.app.config['ModelFullPointsFileName']) if 'HorizonPointsFileName' in", "def generateGridPoints(self, limitByHorizonMask, doSortingPoints, numberOfRows, numberOfColumns, altitudeMin, altitudeMax): west = list() east =", "copy.copy(self.app.workerMountDispatcher.data['DecJNow']) for i in range(0, numberOfPathPoints): ra = ra - float(i) * hoursPathLength", "1 self.modelPoints = west + east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints)))", "% 2: for ha in range(120 + off, -120 + off, -step): az,", "native version 3 convertedLine = line.rstrip('\\n').split(':') p.append((float(convertedLine[1]), float(convertedLine[2]))) else: # format is same", "error: {1}!'.format(modelPointsFileName, e)) finally: if fileHandle: fileHandle.close() return msg def saveInitialModelPoints(self): filepath =", "= None if modelPointsFileName.strip() == '': msg = 'No model points filename given!'", "with PyQT5 for python # Python v3.6.4 # # <NAME> # (c) 2016,", "i += 1 self.modelPoints = west + east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints:", "fileHandle.close() return msg def isAboveHorizonLine(self, point): x = range(0, 361) y = numpy.interp(x,", "in zip(x, y)] return msg def saveHorizonPoints(self, horizonPointsFileName): msg = None fileHandle =", "### # # ## # ## ## # # # # # #", "> 180: east.insert(0, (az, alt)) else: west.append((az, alt)) else: for ha in range(-120", "range(-15, 90, 15): for ha in range(120 + off, -120 + off, -2):", "if 'AltitudeMinimumHorizon' in self.app.config: self.app.ui.altitudeMinimumHorizon.setValue(self.app.config['AltitudeMinimumHorizon']) if 'ModelInitialPointsFileName' in self.app.config: self.app.ui.le_modelInitialPointsFileName.setText(self.app.config['ModelInitialPointsFileName']) if 'ModelFullPointsFileName' in", "range(0, numberOfPoints): azp = i * 360 / numberOfPoints + azimuth if azp", "number += 1 if modeltype == 'Refinement' and number > 3: p.append(point) elif", "if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.saveHorizonPoints(value) else: self.logger.warning('No model points file selected') def", "except Exception as e: msg = 'Error loading horizon points: {0}'.format(e) self.logger.error('Error loading", "msg = 'Error loading horizon points: {0}'.format(e) self.logger.error('Error loading horizon points: {0}'.format(e)) return", "= list() off = -5 for dec in range(-15, 90, 15): for ha", "self.showInitialPoints(value) else: self.logger.warning('No file selected') def saveFullModelPoints(self): filepath = os.getcwd() + '/config/' +", "be initialize, error:{0}'.format(e)) finally: pass def storeConfig(self): self.app.config['HorizonPointsFileName'] = self.app.ui.le_horizonPointsFileName.text() self.app.config['CheckUseMinimumHorizonLine'] = self.app.ui.checkUseMinimumHorizonLine.isChecked()", "filepath = os.getcwd() + '/config/' + self.app.ui.le_modelFullPointsFileName.text() self.saveModelPoints(filepath) def saveFullModelPointsAs(self): value, ext =", "== 'Initial': p.append(point) except Exception as e: msg = 'Error loading modeling points", "msg def isAboveHorizonLine(self, point): x = range(0, 361) y = numpy.interp(x, [i[0] for", "Exception as e: msg = 'Error saving modeling points to file [{0}] error:", "can't calculate the path if 'RaJNow' not in self.app.workerMountDispatcher.data: return self.modelPoints = list()", "files (*.txt)', True) if value != '': value = os.path.basename(value) self.app.ui.le_modelFullPointsFileName.setText(value) self.showFullPoints(value, self.app.ui.checkDeletePointsHorizonMask.isChecked(),", "[] east = [] off = -5 i = 0 for dec in", "None fileHandle = None if horizonPointsFileName.strip() == '': msg = 'No horizon points", "+ '/config/' + self.app.ui.le_horizonPointsFileName.text() self.saveHorizonPoints(filepath) def saveHorizonMaskAs(self): value, ext = self.app.selectFile(self.app, 'Save horizon", "westSide + eastSide def loadHorizonPoints(self, horizonPointsFileName, horizonByFile, horizonByAltitude, altitudeMinimumHorizon): self.horizonPoints = [] if", "True) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.app.hemisphereWindow.selectHorizonPointsMode() self.app.hemisphereWindow.drawHemisphere() def saveModelPoints(self, modelPointsFileName): msg =", "generateMaxPoints(self, limitByHorizonMask, doSortingPoints): west = [] east = [] off = -5 i", "= copy.copy(self.app.workerMountDispatcher.data['DecJNow']) for i in range(0, numberOfPathPoints): ra = ra - float(i) *", "altitude, numberOfPoints): self.modelPoints = list() for i in range(0, numberOfPoints): azp = i", "file', '/config', 'Model points files (*.txt)', True) if value != '': value =", "'Save initial model points file', '/config', 'Model point files (*.txt)', False) if value", "TSX file (the sky x) convertedLine = line.rstrip('\\n').split() point = (float(convertedLine[2]), float(convertedLine[3])) number", "':' in line: # model maker format m = line.rstrip('\\n').split(':') else: # carte", "# ## ## # # # # # # # #### # #", "ext = self.app.selectFile(self.app, 'Save initial model points file', '/config', 'Model point files (*.txt)',", "utf-8 -*- # # # # # # # #### # ## ##", "= 0 while i < len(self.modelPoints): if self.isAboveHorizonLine(self.modelPoints[i]): i += 1 else: del", "def saveHorizonMaskAs(self): value, ext = self.app.selectFile(self.app, 'Save horizon mask points file', '/config', 'Model", "self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateDSOPoints(self, limitByHorizonMask, hoursPathLength, numberOfPathPoints, hoursPathLengthPreview): # we have no position of", "p, msg try: with open('config/' + modelPointsFileName + '.txt', 'r') as fileHandle: for", "self.deleteBelowHorizonLine() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMaxPoints(self, limitByHorizonMask, doSortingPoints): west = [] east = []", "i in range(0, len(self.modelPoints)): fileHandle.write('MW-3:{0:03.2f}:{1:03.2f}\\n'.format(self.modelPoints[i][0], self.modelPoints[i][1])) fileHandle.close() except Exception as e: msg =", "Exception as e: msg = 'Error loading horizon points: {0}'.format(e) self.logger.error('Error loading horizon", "self.modelPoints = list() for i in range(0, numberOfPoints): azp = i * 360", "self.app.ui.checkUseMinimumHorizonLine.isChecked() self.app.config['CheckUseFileHorizonLine'] = self.app.ui.checkUseFileHorizonLine.isChecked() self.app.config['AltitudeMinimumHorizon'] = self.app.ui.altitudeMinimumHorizon.value() self.app.config['ModelInitialPointsFileName'] = self.app.ui.le_modelInitialPointsFileName.text() self.app.config['ModelFullPointsFileName'] = self.app.ui.le_modelFullPointsFileName.text()", "== '': msg = 'No model points filename given!' self.logger.warning('No model points filename", "e: msg = 'Error loading horizon points: {0}'.format(e) self.logger.error('Error loading horizon points: {0}'.format(e))", "line in fileHandle: if line.startswith('GRID'): # if grid, then its a TSX file", "len(a)): if a[i][0] >= 180: westSide.append((a[i][0], a[i][1])) else: eastSide.append((a[i][0], a[i][1])) westSide = sorted(westSide,", "70: step = 10 else: step = 30 if i % 2: for", "limitByHorizonMask, doSortingPoints, numberOfRows, numberOfColumns, altitudeMin, altitudeMax): west = list() east = list() i", "horizonPointsFileName == '': msg = 'No horizon points filename given !' return msg", "'': value = os.path.basename(value) self.app.ui.le_modelFullPointsFileName.setText(value) self.showFullPoints(value, self.app.ui.checkDeletePointsHorizonMask.isChecked(), self.app.ui.checkSortPoints.isChecked()) else: self.logger.warning('No file selected') def", "not os.path.isfile(os.getcwd() + '/config/' + horizonPointsFileName + '.txt'): msg = 'Horizon points file", "value != '': value = os.path.basename(value) self.app.ui.le_modelInitialPointsFileName.setText(value) self.showInitialPoints(value) else: self.logger.warning('No file selected') def", "# # Licence APL2.0 # ########################################################### import logging import os import PyQt5 import", "= 10 elif dec < 70: step = 10 else: step = 30", "file does not exist') else: try: with open(os.getcwd() + '/config/' + horizonPointsFileName +", "self.app.ui.checkUseMinimumHorizonLine.setChecked(self.app.config['CheckUseMinimumHorizonLine']) if 'CheckUseFileHorizonLine' in self.app.config: self.app.ui.checkUseFileHorizonLine.setChecked(self.app.config['CheckUseFileHorizonLine']) if 'AltitudeMinimumHorizon' in self.app.config: self.app.ui.altitudeMinimumHorizon.setValue(self.app.config['AltitudeMinimumHorizon']) if 'ModelInitialPointsFileName'", "off, -120 + off, -2): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if", "self.app.ui.le_horizonPointsFileName.setText(self.app.config['HorizonPointsFileName']) if 'CheckUseMinimumHorizonLine' in self.app.config: self.app.ui.checkUseMinimumHorizonLine.setChecked(self.app.config['CheckUseMinimumHorizonLine']) if 'CheckUseFileHorizonLine' in self.app.config: self.app.ui.checkUseFileHorizonLine.setChecked(self.app.config['CheckUseFileHorizonLine']) if 'AltitudeMinimumHorizon'", "it's native version 3 convertedLine = line.rstrip('\\n').split(':') p.append((float(convertedLine[1]), float(convertedLine[2]))) else: # format is", "= open(horizonPointsFileName + '.txt', 'w') for i in range(0, len(self.horizonPoints)): # saving in", "the 10micron mounts # GUI with PyQT5 for python # Python v3.6.4 #", "msg = 'Horizon points file does not exist !' self.logger.warning('Horizon points file does", "for i in self.horizonPoints], left=None, right=None, period=None) if point[1] > y[int(point[0])]: return True", "= None if modelPointsFileName.strip() == '': msg = 'No Model Points Filename given!'", "'': msg = 'No horizon points filename given !' return msg if not", "hp = [] msg = None if horizonByFile: if horizonPointsFileName == '': msg", "if 'RaJNow' not in self.app.workerMountDispatcher.data: return self.modelPoints = list() ra = copy.copy(self.app.workerMountDispatcher.data['RaJNow']) dec", "ext = self.app.selectFile(self.app, 'Save horizon mask points file', '/config', 'Model point files (*.txt)',", "self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateNormalPoints(self, limitByHorizonMask, doSortingPoints): west = [] east = [] off", "a[i][0] >= 180: westSide.append((a[i][0], a[i][1])) else: eastSide.append((a[i][0], a[i][1])) westSide = sorted(westSide, key=operator.itemgetter(1)) eastSide", "'w') for i in range(0, len(self.horizonPoints)): # saving in model maker format fileHandle.write('{0:03d}:{1:03d}\\n'.format(int(self.horizonPoints[i][0]),", "for i in range(0, len(self.modelPoints)): fileHandle.write('MW-3:{0:03.2f}:{1:03.2f}\\n'.format(self.modelPoints[i][0], self.modelPoints[i][1])) fileHandle.close() except Exception as e: msg", "+ eastSide def loadHorizonPoints(self, horizonPointsFileName, horizonByFile, horizonByAltitude, altitudeMinimumHorizon): self.horizonPoints = [] if not", "fileHandle: if line.startswith('GRID'): # if grid, then its a TSX file (the sky", "model maker format fileHandle.write('{0:03d}:{1:03d}\\n'.format(int(self.horizonPoints[i][0]), int(int(self.horizonPoints[i][1])))) fileHandle.close() except Exception as e: msg = 'Error", "for alt in range(altitudeMin, altitudeMax + 1, int((altitudeMax - altitudeMin) / (numberOfRows -", "altitudeMax + 1, int((altitudeMax - altitudeMin) / (numberOfRows - 1))): if i %", "= list() i = 0 for alt in range(altitudeMin, altitudeMax + 1, int((altitudeMax", "and modeltype == 'Full': p.append(point) elif len(convertedLine) != 2 and modeltype == 'Initial':", "self.modelPoints = list() ra = copy.copy(self.app.workerMountDispatcher.data['RaJNow']) dec = copy.copy(self.app.workerMountDispatcher.data['DecJNow']) for i in range(0,", "for dec in range(-15, 90, 15): for ha in range(120 + off, -120", "int(360 / numberOfColumns)): if alt > 0: if az > 180: east.insert(0, (az,", "horizonByAltitude: y = numpy.clip(y, altitudeMinimumHorizon, None) self.horizonPoints = [list(a) for a in zip(x,", "west + east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateInitialPoints(self,", "self.horizonPoints = [list(a) for a in zip(x, y)] return msg def saveHorizonPoints(self, horizonPointsFileName):", "line.rstrip('\\n').split(' ') point = (int(m[0]), int(m[1])) hp.append(point) f.close() except Exception as e: msg", "= int(azp) point = (azp, altitude) self.modelPoints.append(point) self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateCelestialEquator(self): self.celestialEquator =", "= westSide + eastSide def loadHorizonPoints(self, horizonPointsFileName, horizonByFile, horizonByAltitude, altitudeMinimumHorizon): self.horizonPoints = []", "azp > 360: azp -= 360 azp = int(azp) point = (azp, altitude)", "filename): self.modelPoints, msg = self.loadModelPoints(filename, 'Initial') self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showFullPoints(self, filename, limitByHorizonMask, doSortingPoints):", "= os.getcwd() + '/config/' + self.app.ui.le_modelInitialPointsFileName.text() self.saveModelPoints(filepath) def saveInitialModelPointsAs(self): value, ext = self.app.selectFile(self.app,", "# ## # # # # # # # # # # #", "e) self.logger.warning('Error loading horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName, e)) finally: if", "as e: msg = 'Error saving horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName,", "in self.app.config and 'AltitudeMinimumHorizon' in self.app.config: self.loadHorizonPoints(self.app.config['HorizonPointsFileName'], self.app.config['CheckUseFileHorizonLine'], self.app.config['CheckUseMinimumHorizonLine'], self.app.config['AltitudeMinimumHorizon']) except Exception as", "= 'Error loading horizon points: {0}'.format(e) self.logger.error('Error loading horizon points: {0}'.format(e)) return msg", "= [] east = [] off = -5 i = 0 for dec", "line.startswith('MW-3'): # if mountwizzard3, it's native version 3 convertedLine = line.rstrip('\\n').split(':') p.append((float(convertedLine[1]), float(convertedLine[2])))", "= copy.copy(self.app.workerMountDispatcher.data['RaJNow']) dec = copy.copy(self.app.workerMountDispatcher.data['DecJNow']) for i in range(0, numberOfPathPoints): ra = ra", "in f: if ':' in line: # model maker format m = line.rstrip('\\n').split(':')", "dec, 1) if alt > 0: self.modelPoints.append((az, alt)) if limitByHorizonMask: self.deleteBelowHorizonLine() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit()", "360 azp = int(azp) point = (azp, altitude) self.modelPoints.append(point) self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateCelestialEquator(self):", "# # # # # # ### # # ## # ## ##", "# # # # # # #### # # Python-based Tool for interaction", "du ciel / skychart format m = line.rstrip('\\n').split(' ') point = (int(m[0]), int(m[1]))", "generateMinPoints(self, limitByHorizonMask, doSortingPoints): west = list() east = list() off = -5 i", "'CheckUseFileHorizonLine' in self.app.config: self.app.ui.checkUseFileHorizonLine.setChecked(self.app.config['CheckUseFileHorizonLine']) if 'AltitudeMinimumHorizon' in self.app.config: self.app.ui.altitudeMinimumHorizon.setValue(self.app.config['AltitudeMinimumHorizon']) if 'ModelInitialPointsFileName' in self.app.config:", "self.app.config: self.app.ui.altitudeMinimumHorizon.setValue(self.app.config['AltitudeMinimumHorizon']) if 'ModelInitialPointsFileName' in self.app.config: self.app.ui.le_modelInitialPointsFileName.setText(self.app.config['ModelInitialPointsFileName']) if 'ModelFullPointsFileName' in self.app.config: self.app.ui.le_modelFullPointsFileName.setText(self.app.config['ModelFullPointsFileName']) if", "- 1))): if i % 2: for az in range(365 - int(360 /", "'': msg = 'No horizon points filename given!' self.logger.warning('No Model Points Filename given!')", "# # # #### # ## ## # ## # # # #", "y)] return msg def saveHorizonPoints(self, horizonPointsFileName): msg = None fileHandle = None if", "alt)) else: west.append((az, alt)) i += 1 self.modelPoints = west + east if", "horizon points filename given!' self.logger.warning('No Model Points Filename given!') return msg try: fileHandle", "loading horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName, e)) finally: if fileHandle: fileHandle.close()", "list() off = -5 i = 0 for dec in range(-15, 90, 15):", "try: with open(os.getcwd() + '/config/' + horizonPointsFileName + '.txt') as f: for line", ">= 180: westSide.append((a[i][0], a[i][1])) else: eastSide.append((a[i][0], a[i][1])) westSide = sorted(westSide, key=operator.itemgetter(1)) eastSide =", "i in self.horizonPoints], left=None, right=None, period=None) if point[1] > y[int(point[0])]: return True else:", "west.append((az, alt)) else: for ha in range(-120 + off, 120 + off, step):", "self.app.config: self.app.ui.le_modelFullPointsFileName.setText(self.app.config['ModelFullPointsFileName']) if 'HorizonPointsFileName' in self.app.config and 'CheckUseMinimumHorizonLine' in self.app.config and 'CheckUseFileHorizonLine' in", "line.rstrip('\\n').split(':') point = (int(convertedLine[0]), int(convertedLine[1])) if len(convertedLine) == 2 and modeltype == 'Full':", "msg = None fileHandle = None if modelPointsFileName.strip() == '': msg = 'No", "+ '.txt', 'w') for i in range(0, len(self.horizonPoints)): # saving in model maker", "'r') as fileHandle: for line in fileHandle: if line.startswith('GRID'): # if grid, then", "10 elif dec < 70: step = 10 else: step = 30 if", "'/config', 'Model points files (*.txt)', True) if value != '': value = os.path.basename(value)", "line: # model maker format m = line.rstrip('\\n').split(':') else: # carte du ciel", "in range(5, 360, int(360 / numberOfColumns)): if alt > 0: if az >", "self.app.ui.btn_saveInitialModelPoints.clicked.connect(self.saveInitialModelPoints) self.app.ui.btn_saveInitialModelPointsAs.clicked.connect(self.saveInitialModelPointsAs) self.app.ui.btn_loadFullModelPoints.clicked.connect(self.selectFullModelPointsFileName) self.app.ui.btn_saveFullModelPoints.clicked.connect(self.saveFullModelPoints) self.app.ui.btn_saveFullModelPointsAs.clicked.connect(self.saveFullModelPointsAs) self.app.ui.btn_loadHorizonMask.clicked.connect(self.selectHorizonPointsFileName) self.app.ui.btn_saveHorizonMask.clicked.connect(self.saveHorizonMask) self.app.ui.btn_saveHorizonMaskAs.clicked.connect(self.saveHorizonMaskAs) self.app.signalMountSiteData.connect(self.generateCelestialEquator) def initConfig(self): try: if", "file selected') def selectHorizonPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open horizon mask file', '/config',", "if limitByHorizonMask: self.deleteBelowHorizonLine() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMaxPoints(self, limitByHorizonMask, doSortingPoints): west = [] east", "point files (*.txt)', False) if value != '': self.app.ui.le_modelInitialPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model", "[{0}] error: {1}!'.format(modelPointsFileName, e)) finally: return p, msg def sortPoints(self): if len(self.modelPoints) ==", "else: for ha in range(-120 + off, 120 + off, step): az, alt", "[] msg = None if horizonByFile: if horizonPointsFileName == '': msg = 'No", "does not exist') else: try: with open(os.getcwd() + '/config/' + horizonPointsFileName + '.txt')", "'.txt', 'w') for i in range(0, len(self.modelPoints)): fileHandle.write('MW-3:{0:03.2f}:{1:03.2f}\\n'.format(self.modelPoints[i][0], self.modelPoints[i][1])) fileHandle.close() except Exception as", "off, -2): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt > 0:", "transform.Transform(self.app) self.horizonPoints = list() self.modelPoints = list() self.celestialEquator = list() # signal slot", "no points to sort') return westSide = [] eastSide = [] a =", "a in zip(x, y)] return msg def saveHorizonPoints(self, horizonPointsFileName): msg = None fileHandle", "+ azimuth if azp > 360: azp -= 360 azp = int(azp) point", "10, dec) if alt > 0: if az > 180: east.insert(0, (az, alt))", "selected') def selectFullModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open full model points file', '/config',", "east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMinPoints(self, limitByHorizonMask, doSortingPoints):", "for dec in range(-15, 90, 15): if dec < 60: step = 15", "* 360 / numberOfPoints + azimuth if azp > 360: azp -= 360", "east.insert(0, (az, alt)) else: west.append((az, alt)) else: for az in range(5, 360, int(360", "/ 10, dec) if alt > 0: if az > 180: east.insert(0, (az,", "(*.txt)', True) if value != '': value = os.path.basename(value) self.app.ui.le_modelInitialPointsFileName.setText(value) self.showInitialPoints(value) else: self.logger.warning('No", "as e: msg = 'Error loading horizon points: {0}'.format(e) self.logger.error('Error loading horizon points:", "-120 + off, -step): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt", "self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateInitialPoints(self, azimuth, altitude, numberOfPoints): self.modelPoints =", "model points file selected') def selectHorizonPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open horizon mask", "list() i = 0 for alt in range(altitudeMin, altitudeMax + 1, int((altitudeMax -", "self.saveModelPoints(filepath) def saveInitialModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save initial model points file', '/config',", "west + east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateNormalPoints(self,", "90, 15): for ha in range(120 + off, -120 + off, -2): az,", "points: {0}'.format(e) self.logger.error('Error loading horizon points: {0}'.format(e)) return msg hp = sorted(hp, key=operator.itemgetter(0))", "list() for i in range(0, numberOfPoints): azp = i * 360 / numberOfPoints", "= (int(convertedLine[0]), int(convertedLine[1])) if len(convertedLine) == 2 and modeltype == 'Full': p.append(point) elif", "+ '.txt', 'r') as fileHandle: for line in fileHandle: if line.startswith('GRID'): # if", "horizonPointsFileName.strip() == '': msg = 'No horizon points filename given!' self.logger.warning('No Model Points", "self.logger.warning('No file selected') def loadModelPoints(self, modelPointsFileName, modeltype): p = [] number = 0", "= list() self.celestialEquator = list() # signal slot self.app.ui.btn_loadInitialModelPoints.clicked.connect(self.selectInitialModelPointsFileName) self.app.ui.btn_saveInitialModelPoints.clicked.connect(self.saveInitialModelPoints) self.app.ui.btn_saveInitialModelPointsAs.clicked.connect(self.saveInitialModelPointsAs) self.app.ui.btn_loadFullModelPoints.clicked.connect(self.selectFullModelPointsFileName) self.app.ui.btn_saveFullModelPoints.clicked.connect(self.saveFullModelPoints)", "logging import os import PyQt5 import time import copy import operator import numpy", "'Error loading horizon points: {0}'.format(e) self.logger.error('Error loading horizon points: {0}'.format(e)) return msg hp", "range(0, 361) y = numpy.interp(x, [i[0] for i in self.horizonPoints], [i[1] for i", "doSortingPoints): west = [] east = [] off = -5 i = 0", "+= 1 if modeltype == 'Refinement' and number > 3: p.append(point) elif modeltype", "None if horizonPointsFileName.strip() == '': msg = 'No horizon points filename given!' self.logger.warning('No", "value, ext = self.app.selectFile(self.app, 'Save horizon mask points file', '/config', 'Model point files", "else: west.append((az, alt)) else: for ha in range(-120 + off, 120 + off,", "'Model point files (*.txt)', False) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.saveHorizonPoints(value) else: self.logger.warning('No", "with the 10micron mounts # GUI with PyQT5 for python # Python v3.6.4", "self.app.ui.altitudeMinimumHorizon.setValue(self.app.config['AltitudeMinimumHorizon']) if 'ModelInitialPointsFileName' in self.app.config: self.app.ui.le_modelInitialPointsFileName.setText(self.app.config['ModelInitialPointsFileName']) if 'ModelFullPointsFileName' in self.app.config: self.app.ui.le_modelFullPointsFileName.setText(self.app.config['ModelFullPointsFileName']) if 'HorizonPointsFileName'", "generateCelestialEquator(self): self.celestialEquator = list() off = -5 for dec in range(-15, 90, 15):", "'No model points filename given!' self.logger.warning('No model points filename given!') return p, msg", "value, ext = self.app.selectFile(self.app, 'Save initial model points file', '/config', 'Model point files", "msg def saveHorizonPoints(self, horizonPointsFileName): msg = None fileHandle = None if horizonPointsFileName.strip() ==", "e) self.logger.warning('Error loading modeling points to file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally: if", "Model Points Filename given!') return msg try: fileHandle = open(horizonPointsFileName + '.txt', 'w')", "60: step = 10 else: step = 20 if i % 2: for", "self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateGridPoints(self, limitByHorizonMask, doSortingPoints, numberOfRows, numberOfColumns, altitudeMin,", "< len(self.modelPoints): if self.isAboveHorizonLine(self.modelPoints[i]): i += 1 else: del self.modelPoints[i] def deletePoints(self): self.modelPoints", "self.app.config['ModelFullPointsFileName'] = self.app.ui.le_modelFullPointsFileName.text() def saveHorizonMask(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_horizonPointsFileName.text() self.saveHorizonPoints(filepath)", "3 convertedLine = line.rstrip('\\n').split(':') p.append((float(convertedLine[1]), float(convertedLine[2]))) else: # format is same as Per's", "and number <= 3: p.append(point) elif line.startswith('MW-3'): # if mountwizzard3, it's native version", "return msg def isAboveHorizonLine(self, point): x = range(0, 361) y = numpy.interp(x, [i[0]", "horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName, e) self.logger.warning('Error loading horizon points to", "'': msg = 'No Model Points Filename given!' self.logger.warning('No Model Points Filename given!')", "self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMinPoints(self, limitByHorizonMask, doSortingPoints): west = list() east = list()", "points file selected') def selectInitialModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open initial model points", "'Error saving horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName, e) self.logger.warning('Error loading horizon", "'.txt') as f: for line in f: if ':' in line: # model", "2018 # # Licence APL2.0 # ########################################################### import logging import os import PyQt5", "and modeltype == 'Initial': p.append(point) except Exception as e: msg = 'Error loading", "points filename given!') return p, msg try: with open('config/' + modelPointsFileName + '.txt',", "'/config/' + horizonPointsFileName + '.txt'): msg = 'Horizon points file does not exist", "filename, limitByHorizonMask, doSortingPoints): self.modelPoints, msg = self.loadModelPoints(filename, 'Full') if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints:", "msg = 'No horizon points filename given!' self.logger.warning('No Model Points Filename given!') return", "+ east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateGridPoints(self, limitByHorizonMask,", "def sortPoints(self): if len(self.modelPoints) == 0: self.logger.warning('There are no points to sort') return", "(int(convertedLine[0]), int(convertedLine[1])) if len(convertedLine) == 2 and modeltype == 'Full': p.append(point) elif len(convertedLine)", "a = sorted(self.modelPoints, key=operator.itemgetter(0)) for i in range(0, len(a)): if a[i][0] >= 180:", "ra = copy.copy(self.app.workerMountDispatcher.data['RaJNow']) dec = copy.copy(self.app.workerMountDispatcher.data['DecJNow']) for i in range(0, numberOfPathPoints): ra =", "ra - float(i) * hoursPathLength / numberOfPathPoints - hoursPathLengthPreview az, alt = self.transform.transformERFA(ra,", "3: p.append(point) elif line.startswith('MW-3'): # if mountwizzard3, it's native version 3 convertedLine =", "'.txt', 'r') as fileHandle: for line in fileHandle: if line.startswith('GRID'): # if grid,", "from file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points from file [{0}]", "numpy from astrometry import transform class ModelPoints: logger = logging.getLogger(__name__) def __init__(self, app):", "def loadModelPoints(self, modelPointsFileName, modeltype): p = [] number = 0 msg = None", "modeltype): p = [] number = 0 msg = None if modelPointsFileName.strip() ==", "'Full': p.append(point) elif len(convertedLine) != 2 and modeltype == 'Initial': p.append(point) except Exception", "except Exception as e: msg = 'Error saving modeling points to file [{0}]", "loadModelPoints(self, modelPointsFileName, modeltype): p = [] number = 0 msg = None if", "hoursPathLength / numberOfPathPoints - hoursPathLengthPreview az, alt = self.transform.transformERFA(ra, dec, 1) if alt", "self.app = app self.transform = transform.Transform(self.app) self.horizonPoints = list() self.modelPoints = list() self.celestialEquator", "m = line.rstrip('\\n').split(' ') point = (int(m[0]), int(m[1])) hp.append(point) f.close() except Exception as", "if azp > 360: azp -= 360 azp = int(azp) point = (azp,", "[list(a) for a in zip(x, y)] return msg def saveHorizonPoints(self, horizonPointsFileName): msg =", "= app self.transform = transform.Transform(self.app) self.horizonPoints = list() self.modelPoints = list() self.celestialEquator =", "modelPointsFileName, modeltype): p = [] number = 0 msg = None if modelPointsFileName.strip()", "self.app.ui.le_modelFullPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points file selected') def selectFullModelPointsFileName(self): value, ext =", "list() # signal slot self.app.ui.btn_loadInitialModelPoints.clicked.connect(self.selectInitialModelPointsFileName) self.app.ui.btn_saveInitialModelPoints.clicked.connect(self.saveInitialModelPoints) self.app.ui.btn_saveInitialModelPointsAs.clicked.connect(self.saveInitialModelPointsAs) self.app.ui.btn_loadFullModelPoints.clicked.connect(self.selectFullModelPointsFileName) self.app.ui.btn_saveFullModelPoints.clicked.connect(self.saveFullModelPoints) self.app.ui.btn_saveFullModelPointsAs.clicked.connect(self.saveFullModelPointsAs) self.app.ui.btn_loadHorizonMask.clicked.connect(self.selectHorizonPointsFileName) self.app.ui.btn_saveHorizonMask.clicked.connect(self.saveHorizonMask) self.app.ui.btn_saveHorizonMaskAs.clicked.connect(self.saveHorizonMaskAs)", "None if modelPointsFileName.strip() == '': msg = 'No Model Points Filename given!' self.logger.warning('No", "self.logger.warning('No Model Points Filename given!') return msg try: fileHandle = open(modelPointsFileName + '.txt',", "2017, 2018 # # Licence APL2.0 # ########################################################### import logging import os import", "self.modelPoints = westSide + eastSide def loadHorizonPoints(self, horizonPointsFileName, horizonByFile, horizonByAltitude, altitudeMinimumHorizon): self.horizonPoints =", "range(-120 + off, 120 + off, step): az, alt = self.transform.topocentricToAzAlt(ha / 10,", "the mount -> therefore we can't calculate the path if 'RaJNow' not in", "astrometry import transform class ModelPoints: logger = logging.getLogger(__name__) def __init__(self, app): self.app =", "files (*.txt)', False) if value != '': self.app.ui.le_modelFullPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points", "= os.getcwd() + '/config/' + self.app.ui.le_modelFullPointsFileName.text() self.saveModelPoints(filepath) def saveFullModelPointsAs(self): value, ext = self.app.selectFile(self.app,", "# # #### # ## ## # ## # # # # #", "not (horizonByFile or horizonByAltitude): return hp = [] msg = None if horizonByFile:", "'Model points files (*.txt)', True) if value != '': value = os.path.basename(value) self.app.ui.le_modelInitialPointsFileName.setText(value)", "= os.path.basename(value) self.app.ui.le_modelInitialPointsFileName.setText(value) self.showInitialPoints(value) else: self.logger.warning('No file selected') def saveFullModelPoints(self): filepath = os.getcwd()", "180: westSide.append((a[i][0], a[i][1])) else: eastSide.append((a[i][0], a[i][1])) westSide = sorted(westSide, key=operator.itemgetter(1)) eastSide = sorted(eastSide,", "+ off, 120 + off, step): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec)", "10 else: step = 30 if i % 2: for ha in range(120", "coding: utf-8 -*- # # # # # # # #### # ##", "in self.app.config: self.app.ui.altitudeMinimumHorizon.setValue(self.app.config['AltitudeMinimumHorizon']) if 'ModelInitialPointsFileName' in self.app.config: self.app.ui.le_modelInitialPointsFileName.setText(self.app.config['ModelInitialPointsFileName']) if 'ModelFullPointsFileName' in self.app.config: self.app.ui.le_modelFullPointsFileName.setText(self.app.config['ModelFullPointsFileName'])", "copy.copy(self.app.workerMountDispatcher.data['RaJNow']) dec = copy.copy(self.app.workerMountDispatcher.data['DecJNow']) for i in range(0, numberOfPathPoints): ra = ra -", "!= '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.app.hemisphereWindow.selectHorizonPointsMode() self.app.hemisphereWindow.drawHemisphere() def saveModelPoints(self, modelPointsFileName): msg = None fileHandle =", "in self.app.config: self.app.ui.checkUseFileHorizonLine.setChecked(self.app.config['CheckUseFileHorizonLine']) if 'AltitudeMinimumHorizon' in self.app.config: self.app.ui.altitudeMinimumHorizon.setValue(self.app.config['AltitudeMinimumHorizon']) if 'ModelInitialPointsFileName' in self.app.config: self.app.ui.le_modelInitialPointsFileName.setText(self.app.config['ModelInitialPointsFileName'])", "> 0: if az > 180: east.insert(0, (az, alt)) else: west.append((az, alt)) else:", "off = -5 i = 0 for dec in range(-15, 90, 10): if", "!= 2 and modeltype == 'Initial': p.append(point) except Exception as e: msg =", "if horizonByFile: if horizonPointsFileName == '': msg = 'No horizon points filename given", "self.logger.warning('There are no points to sort') return westSide = [] eastSide = []", "e)) finally: return p, msg def sortPoints(self): if len(self.modelPoints) == 0: self.logger.warning('There are", "p = [] number = 0 msg = None if modelPointsFileName.strip() == '':", "0: self.modelPoints.append((az, alt)) if limitByHorizonMask: self.deleteBelowHorizonLine() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMaxPoints(self, limitByHorizonMask, doSortingPoints): west", "self.app.workerMountDispatcher.data: return self.modelPoints = list() ra = copy.copy(self.app.workerMountDispatcher.data['RaJNow']) dec = copy.copy(self.app.workerMountDispatcher.data['DecJNow']) for i", "= list() off = -5 i = 0 for dec in range(-15, 90,", "then its a TSX file (the sky x) convertedLine = line.rstrip('\\n').split() point =", "+ modelPointsFileName + '.txt', 'r') as fileHandle: for line in fileHandle: if line.startswith('GRID'):", "int(360 / numberOfColumns), 0, -int(360 / numberOfColumns)): if alt > 0: if az", "exist') else: try: with open(os.getcwd() + '/config/' + horizonPointsFileName + '.txt') as f:", "if ':' in line: # model maker format m = line.rstrip('\\n').split(':') else: #", "numberOfColumns), 0, -int(360 / numberOfColumns)): if alt > 0: if az > 180:", "'/config/' + self.app.ui.le_modelInitialPointsFileName.text() self.saveModelPoints(filepath) def saveInitialModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save initial model", "self.app.config['HorizonPointsFileName'] = self.app.ui.le_horizonPointsFileName.text() self.app.config['CheckUseMinimumHorizonLine'] = self.app.ui.checkUseMinimumHorizonLine.isChecked() self.app.config['CheckUseFileHorizonLine'] = self.app.ui.checkUseFileHorizonLine.isChecked() self.app.config['AltitudeMinimumHorizon'] = self.app.ui.altitudeMinimumHorizon.value() self.app.config['ModelInitialPointsFileName']", "self.app.config['AltitudeMinimumHorizon'] = self.app.ui.altitudeMinimumHorizon.value() self.app.config['ModelInitialPointsFileName'] = self.app.ui.le_modelInitialPointsFileName.text() self.app.config['ModelFullPointsFileName'] = self.app.ui.le_modelFullPointsFileName.text() def saveHorizonMask(self): filepath =", "10 else: step = 20 if i % 2: for ha in range(120", "{1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points to file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally:", "elif modeltype == 'Base' and number <= 3: p.append(point) elif line.startswith('MW-3'): # if", "0: hp = ((0, 0), (360, 0)) x = [i[0] for i in", "if horizonByAltitude: y = numpy.clip(y, altitudeMinimumHorizon, None) self.horizonPoints = [list(a) for a in", "ha in range(120 + off, -120 + off, -2): az, alt = self.transform.topocentricToAzAlt(ha", "else: self.logger.warning('No model points file selected') def selectHorizonPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open", "if modelPointsFileName.strip() == '': msg = 'No Model Points Filename given!' self.logger.warning('No Model", "[i[1] for i in hp] if horizonByAltitude: y = numpy.clip(y, altitudeMinimumHorizon, None) self.horizonPoints", "right=None, period=None) if point[1] > y[int(point[0])]: return True else: return False def deleteBelowHorizonLine(self):", "i in hp] y = [i[1] for i in hp] if horizonByAltitude: y", "# GUI with PyQT5 for python # Python v3.6.4 # # <NAME> #", "east.insert(0, (az, alt)) else: west.append((az, alt)) i += 1 self.modelPoints = west +", "self.app.ui.le_modelInitialPointsFileName.setText(self.app.config['ModelInitialPointsFileName']) if 'ModelFullPointsFileName' in self.app.config: self.app.ui.le_modelFullPointsFileName.setText(self.app.config['ModelFullPointsFileName']) if 'HorizonPointsFileName' in self.app.config and 'CheckUseMinimumHorizonLine' in", "doSortingPoints, numberOfRows, numberOfColumns, altitudeMin, altitudeMax): west = list() east = list() i =", "self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMinPoints(self, limitByHorizonMask, doSortingPoints): west = list()", "+ self.app.ui.le_modelFullPointsFileName.text() self.saveModelPoints(filepath) def saveFullModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save full model points", "Licence APL2.0 # ########################################################### import logging import os import PyQt5 import time import", "= 30 if i % 2: for ha in range(120 + off, -120", "= [list(a) for a in zip(x, y)] return msg def saveHorizonPoints(self, horizonPointsFileName): msg", "in self.app.workerMountDispatcher.data: return self.modelPoints = list() ra = copy.copy(self.app.workerMountDispatcher.data['RaJNow']) dec = copy.copy(self.app.workerMountDispatcher.data['DecJNow']) for", "30: step = 10 elif dec < 70: step = 10 else: step", "in self.app.config and 'CheckUseMinimumHorizonLine' in self.app.config and 'CheckUseFileHorizonLine' in self.app.config and 'AltitudeMinimumHorizon' in", "self.app.selectFile(self.app, 'Save initial model points file', '/config', 'Model point files (*.txt)', False) if", "') point = (int(m[0]), int(m[1])) hp.append(point) f.close() except Exception as e: msg =", "in hp] if horizonByAltitude: y = numpy.clip(y, altitudeMinimumHorizon, None) self.horizonPoints = [list(a) for", "points from file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points from file", "def generateCelestialEquator(self): self.celestialEquator = list() off = -5 for dec in range(-15, 90,", "self.logger.warning('Error loading modeling points from file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally: return p,", "if 'CheckUseMinimumHorizonLine' in self.app.config: self.app.ui.checkUseMinimumHorizonLine.setChecked(self.app.config['CheckUseMinimumHorizonLine']) if 'CheckUseFileHorizonLine' in self.app.config: self.app.ui.checkUseFileHorizonLine.setChecked(self.app.config['CheckUseFileHorizonLine']) if 'AltitudeMinimumHorizon' in", "limitByHorizonMask, doSortingPoints): west = list() east = list() off = -5 i =", "else: try: with open(os.getcwd() + '/config/' + horizonPointsFileName + '.txt') as f: for", "if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateDSOPoints(self, limitByHorizonMask, hoursPathLength, numberOfPathPoints, hoursPathLengthPreview): # we", "points file selected') def selectHorizonPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open horizon mask file',", "self.app.config and 'AltitudeMinimumHorizon' in self.app.config: self.loadHorizonPoints(self.app.config['HorizonPointsFileName'], self.app.config['CheckUseFileHorizonLine'], self.app.config['CheckUseMinimumHorizonLine'], self.app.config['AltitudeMinimumHorizon']) except Exception as e:", "'.txt', 'w') for i in range(0, len(self.horizonPoints)): # saving in model maker format", "# ## # ## ## # # # # # # # ####", "i in self.horizonPoints], [i[1] for i in self.horizonPoints], left=None, right=None, period=None) if point[1]", "ext = self.app.selectFile(self.app, 'Save full model points file', '/config', 'Model point files (*.txt)',", "msg = self.loadModelPoints(filename, 'Full') if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def", "az > 180: east.insert(0, (az, alt)) else: west.append((az, alt)) else: for az in", "== 0: self.logger.warning('There are no points to sort') return westSide = [] eastSide", "if fileHandle: fileHandle.close() return msg def saveInitialModelPoints(self): filepath = os.getcwd() + '/config/' +", "operator import numpy from astrometry import transform class ModelPoints: logger = logging.getLogger(__name__) def", "self.app.ui.btn_saveFullModelPoints.clicked.connect(self.saveFullModelPoints) self.app.ui.btn_saveFullModelPointsAs.clicked.connect(self.saveFullModelPointsAs) self.app.ui.btn_loadHorizonMask.clicked.connect(self.selectHorizonPointsFileName) self.app.ui.btn_saveHorizonMask.clicked.connect(self.saveHorizonMask) self.app.ui.btn_saveHorizonMaskAs.clicked.connect(self.saveHorizonMaskAs) self.app.signalMountSiteData.connect(self.generateCelestialEquator) def initConfig(self): try: if 'HorizonPointsFileName' in self.app.config:", "in range(365 - int(360 / numberOfColumns), 0, -int(360 / numberOfColumns)): if alt >", "def generateMinPoints(self, limitByHorizonMask, doSortingPoints): west = list() east = list() off = -5", "= self.app.selectFile(self.app, 'Open horizon mask file', '/config', 'Horizon mask files (*.txt)', True) if", "# -*- coding: utf-8 -*- # # # # # # # ####", "## # # # # # # # # # # # ###", "logger = logging.getLogger(__name__) def __init__(self, app): self.app = app self.transform = transform.Transform(self.app) self.horizonPoints", "off = -5 i = 0 for dec in range(-15, 90, 15): if", "1, int((altitudeMax - altitudeMin) / (numberOfRows - 1))): if i % 2: for", "3: p.append(point) elif modeltype == 'Base' and number <= 3: p.append(point) elif line.startswith('MW-3'):", "in self.app.config and 'CheckUseFileHorizonLine' in self.app.config and 'AltitudeMinimumHorizon' in self.app.config: self.loadHorizonPoints(self.app.config['HorizonPointsFileName'], self.app.config['CheckUseFileHorizonLine'], self.app.config['CheckUseMinimumHorizonLine'],", "if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMinPoints(self, limitByHorizonMask, doSortingPoints): west = list() east", "+ off, step): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt >", "e)) finally: if fileHandle: fileHandle.close() return msg def isAboveHorizonLine(self, point): x = range(0,", "ha in range(120 + off, -120 + off, -step): az, alt = self.transform.topocentricToAzAlt(ha", "# # # # #### # ## ## # ## # # #", "error: {1}!'.format(horizonPointsFileName, e) self.logger.warning('Error loading horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName, e))", "= -5 for dec in range(-15, 90, 15): for ha in range(120 +", "for az in range(5, 360, int(360 / numberOfColumns)): if alt > 0: if", "% 2: for az in range(365 - int(360 / numberOfColumns), 0, -int(360 /", "value, ext = self.app.selectFile(self.app, 'Open initial model points file', '/config', 'Model points files", "self.app.config['CheckUseMinimumHorizonLine'] = self.app.ui.checkUseMinimumHorizonLine.isChecked() self.app.config['CheckUseFileHorizonLine'] = self.app.ui.checkUseFileHorizonLine.isChecked() self.app.config['AltitudeMinimumHorizon'] = self.app.ui.altitudeMinimumHorizon.value() self.app.config['ModelInitialPointsFileName'] = self.app.ui.le_modelInitialPointsFileName.text() self.app.config['ModelFullPointsFileName']", "int((altitudeMax - altitudeMin) / (numberOfRows - 1))): if i % 2: for az", "points to sort') return westSide = [] eastSide = [] a = sorted(self.modelPoints,", "horizon mask points file', '/config', 'Model point files (*.txt)', False) if value !=", "< 60: step = 15 else: step = 30 if i % 2:", "for dec in range(-15, 90, 15): if dec < 60: step = 10", "+ '.txt') as f: for line in f: if ':' in line: #", "self.logger.warning('No Model Points Filename given!') return msg try: fileHandle = open(horizonPointsFileName + '.txt',", "x = range(0, 361) y = numpy.interp(x, [i[0] for i in self.horizonPoints], [i[1]", "def saveHorizonPoints(self, horizonPointsFileName): msg = None fileHandle = None if horizonPointsFileName.strip() == '':", "# signal slot self.app.ui.btn_loadInitialModelPoints.clicked.connect(self.selectInitialModelPointsFileName) self.app.ui.btn_saveInitialModelPoints.clicked.connect(self.saveInitialModelPoints) self.app.ui.btn_saveInitialModelPointsAs.clicked.connect(self.saveInitialModelPointsAs) self.app.ui.btn_loadFullModelPoints.clicked.connect(self.selectFullModelPointsFileName) self.app.ui.btn_saveFullModelPoints.clicked.connect(self.saveFullModelPoints) self.app.ui.btn_saveFullModelPointsAs.clicked.connect(self.saveFullModelPointsAs) self.app.ui.btn_loadHorizonMask.clicked.connect(self.selectHorizonPointsFileName) self.app.ui.btn_saveHorizonMask.clicked.connect(self.saveHorizonMask) self.app.ui.btn_saveHorizonMaskAs.clicked.connect(self.saveHorizonMaskAs) self.app.signalMountSiteData.connect(self.generateCelestialEquator)", "selected') def loadModelPoints(self, modelPointsFileName, modeltype): p = [] number = 0 msg =", "with open('config/' + modelPointsFileName + '.txt', 'r') as fileHandle: for line in fileHandle:", "as f: for line in f: if ':' in line: # model maker", "file (the sky x) convertedLine = line.rstrip('\\n').split() point = (float(convertedLine[2]), float(convertedLine[3])) number +=", "self.modelPoints[i] def deletePoints(self): self.modelPoints = list() self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showInitialPoints(self, filename): self.modelPoints, msg =", "horizonPointsFileName + '.txt') as f: for line in f: if ':' in line:", "numberOfColumns, altitudeMin, altitudeMax): west = list() east = list() i = 0 for", "modeling points to file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally: if fileHandle: fileHandle.close() return", "msg hp = sorted(hp, key=operator.itemgetter(0)) if len(hp) == 0: hp = ((0, 0),", "number <= 3: p.append(point) elif line.startswith('MW-3'): # if mountwizzard3, it's native version 3", "360, int(360 / numberOfColumns)): if alt > 0: if az > 180: east.insert(0,", "mountwizzard3, it's native version 3 convertedLine = line.rstrip('\\n').split(':') p.append((float(convertedLine[1]), float(convertedLine[2]))) else: # format", "and number > 3: p.append(point) elif modeltype == 'Base' and number <= 3:", "in range(altitudeMin, altitudeMax + 1, int((altitudeMax - altitudeMin) / (numberOfRows - 1))): if", "fileHandle.write('{0:03d}:{1:03d}\\n'.format(int(self.horizonPoints[i][0]), int(int(self.horizonPoints[i][1])))) fileHandle.close() except Exception as e: msg = 'Error saving horizon points", "point[1] > y[int(point[0])]: return True else: return False def deleteBelowHorizonLine(self): i = 0", "horizonPointsFileName, horizonByFile, horizonByAltitude, altitudeMinimumHorizon): self.horizonPoints = [] if not (horizonByFile or horizonByAltitude): return", "modelPointsFileName.strip() == '': msg = 'No Model Points Filename given!' self.logger.warning('No Model Points", "> 180: east.insert(0, (az, alt)) else: west.append((az, alt)) i += 1 self.modelPoints =", "APL2.0 # ########################################################### import logging import os import PyQt5 import time import copy", "carte du ciel / skychart format m = line.rstrip('\\n').split(' ') point = (int(m[0]),", "finally: if fileHandle: fileHandle.close() return msg def saveInitialModelPoints(self): filepath = os.getcwd() + '/config/'", "self.app.ui.altitudeMinimumHorizon.value() self.app.config['ModelInitialPointsFileName'] = self.app.ui.le_modelInitialPointsFileName.text() self.app.config['ModelFullPointsFileName'] = self.app.ui.le_modelFullPointsFileName.text() def saveHorizonMask(self): filepath = os.getcwd() +", "os.getcwd() + '/config/' + self.app.ui.le_modelFullPointsFileName.text() self.saveModelPoints(filepath) def saveFullModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save", "Exception as e: self.logger.error('item in config.cfg could not be initialize, error:{0}'.format(e)) finally: pass", "self.transform = transform.Transform(self.app) self.horizonPoints = list() self.modelPoints = list() self.celestialEquator = list() #", "self.logger.warning('No model points filename given!') return p, msg try: with open('config/' + modelPointsFileName", "showFullPoints(self, filename, limitByHorizonMask, doSortingPoints): self.modelPoints, msg = self.loadModelPoints(filename, 'Full') if limitByHorizonMask: self.deleteBelowHorizonLine() if", "filename given!') return p, msg try: with open('config/' + modelPointsFileName + '.txt', 'r')", "'CheckUseFileHorizonLine' in self.app.config and 'AltitudeMinimumHorizon' in self.app.config: self.loadHorizonPoints(self.app.config['HorizonPointsFileName'], self.app.config['CheckUseFileHorizonLine'], self.app.config['CheckUseMinimumHorizonLine'], self.app.config['AltitudeMinimumHorizon']) except Exception", "self.app.selectFile(self.app, 'Open horizon mask file', '/config', 'Horizon mask files (*.txt)', True) if value", "and 'AltitudeMinimumHorizon' in self.app.config: self.loadHorizonPoints(self.app.config['HorizonPointsFileName'], self.app.config['CheckUseFileHorizonLine'], self.app.config['CheckUseMinimumHorizonLine'], self.app.config['AltitudeMinimumHorizon']) except Exception as e: self.logger.error('item", "= self.app.selectFile(self.app, 'Save horizon mask points file', '/config', 'Model point files (*.txt)', False)", "(*.txt)', False) if value != '': self.app.ui.le_modelFullPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points file", "0 for dec in range(-15, 90, 10): if dec < 30: step =", "value, ext = self.app.selectFile(self.app, 'Open horizon mask file', '/config', 'Horizon mask files (*.txt)',", "self.horizonPoints], [i[1] for i in self.horizonPoints], left=None, right=None, period=None) if point[1] > y[int(point[0])]:", "- int(360 / numberOfColumns), 0, -int(360 / numberOfColumns)): if alt > 0: if", "to file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally: if fileHandle: fileHandle.close() return msg def", "saveFullModelPoints(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_modelFullPointsFileName.text() self.saveModelPoints(filepath) def saveFullModelPointsAs(self): value, ext", "sorted(eastSide, key=operator.itemgetter(1)) self.modelPoints = westSide + eastSide def loadHorizonPoints(self, horizonPointsFileName, horizonByFile, horizonByAltitude, altitudeMinimumHorizon):", "'Error loading modeling points from file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling", "= 'Error saving horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName, e) self.logger.warning('Error loading", "in hp] y = [i[1] for i in hp] if horizonByAltitude: y =", "# ########################################################### import logging import os import PyQt5 import time import copy import", "else: self.logger.warning('No file selected') def saveFullModelPoints(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_modelFullPointsFileName.text()", "# model maker format m = line.rstrip('\\n').split(':') else: # carte du ciel /", "files (*.txt)', True) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.app.hemisphereWindow.selectHorizonPointsMode() self.app.hemisphereWindow.drawHemisphere() def saveModelPoints(self, modelPointsFileName):", "= range(0, 361) y = numpy.interp(x, [i[0] for i in self.horizonPoints], [i[1] for", "'AltitudeMinimumHorizon' in self.app.config: self.loadHorizonPoints(self.app.config['HorizonPointsFileName'], self.app.config['CheckUseFileHorizonLine'], self.app.config['CheckUseMinimumHorizonLine'], self.app.config['AltitudeMinimumHorizon']) except Exception as e: self.logger.error('item in", "try: if 'HorizonPointsFileName' in self.app.config: self.app.ui.le_horizonPointsFileName.setText(self.app.config['HorizonPointsFileName']) if 'CheckUseMinimumHorizonLine' in self.app.config: self.app.ui.checkUseMinimumHorizonLine.setChecked(self.app.config['CheckUseMinimumHorizonLine']) if 'CheckUseFileHorizonLine'", "'': value = os.path.basename(value) self.app.ui.le_modelInitialPointsFileName.setText(value) self.showInitialPoints(value) else: self.logger.warning('No file selected') def saveFullModelPoints(self): filepath", "+ horizonPointsFileName + '.txt'): msg = 'Horizon points file does not exist !'", "self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateNormalPoints(self, limitByHorizonMask, doSortingPoints): west = [] east = [] off =", "[{0}] error: {1}!'.format(horizonPointsFileName, e) self.logger.warning('Error loading horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName,", "self.app.ui.le_modelInitialPointsFileName.text() self.app.config['ModelFullPointsFileName'] = self.app.ui.le_modelFullPointsFileName.text() def saveHorizonMask(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_horizonPointsFileName.text()", "numberOfColumns)): if alt > 0: if az > 180: east.insert(0, (az, alt)) else:", "# # # # # # # #### # # Python-based Tool for", "selectInitialModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open initial model points file', '/config', 'Model points", "def deletePoints(self): self.modelPoints = list() self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showInitialPoints(self, filename): self.modelPoints, msg = self.loadModelPoints(filename,", "numberOfPathPoints - hoursPathLengthPreview az, alt = self.transform.transformERFA(ra, dec, 1) if alt > 0:", "self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateNormalPoints(self, limitByHorizonMask, doSortingPoints): west = [] east = []", "# # ### # # ## # ## ## # # # #", "ModelPoints: logger = logging.getLogger(__name__) def __init__(self, app): self.app = app self.transform = transform.Transform(self.app)", "def saveHorizonMask(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_horizonPointsFileName.text() self.saveHorizonPoints(filepath) def saveHorizonMaskAs(self): value,", "360 / numberOfPoints + azimuth if azp > 360: azp -= 360 azp", "# Python v3.6.4 # # <NAME> # (c) 2016, 2017, 2018 # #", "else: # carte du ciel / skychart format m = line.rstrip('\\n').split(' ') point", "step = 10 elif dec < 70: step = 10 else: step =", "off, 120 + off, step): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if", "self.logger.warning('Horizon points file does not exist') else: try: with open(os.getcwd() + '/config/' +", "(*.txt)', False) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.saveHorizonPoints(value) else: self.logger.warning('No model points file", "# # <NAME> # (c) 2016, 2017, 2018 # # Licence APL2.0 #", "[{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points to file [{0}] error: {1}!'.format(modelPointsFileName,", "self.logger.warning('No file selected') def saveFullModelPoints(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_modelFullPointsFileName.text() self.saveModelPoints(filepath)", "float(convertedLine[2]))) else: # format is same as Per's Model Maker convertedLine = line.rstrip('\\n').split(':')", "self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateCelestialEquator(self): self.celestialEquator = list() off = -5 for dec in", "= 'No horizon points filename given!' self.logger.warning('No Model Points Filename given!') return msg", "# # # # # # # # # ### # # ##", "'HorizonPointsFileName' in self.app.config and 'CheckUseMinimumHorizonLine' in self.app.config and 'CheckUseFileHorizonLine' in self.app.config and 'AltitudeMinimumHorizon'", "horizon mask file', '/config', 'Horizon mask files (*.txt)', True) if value != '':", "False def deleteBelowHorizonLine(self): i = 0 while i < len(self.modelPoints): if self.isAboveHorizonLine(self.modelPoints[i]): i", "# # # # ### # # ## # ## ## # #", "< 70: step = 10 else: step = 30 if i % 2:", "point = (int(m[0]), int(m[1])) hp.append(point) f.close() except Exception as e: msg = 'Error", "model points file', '/config', 'Model point files (*.txt)', False) if value != '':", "point): x = range(0, 361) y = numpy.interp(x, [i[0] for i in self.horizonPoints],", "= open(modelPointsFileName + '.txt', 'w') for i in range(0, len(self.modelPoints)): fileHandle.write('MW-3:{0:03.2f}:{1:03.2f}\\n'.format(self.modelPoints[i][0], self.modelPoints[i][1])) fileHandle.close()", "# # # # # # # # ### # # ## #", "in fileHandle: if line.startswith('GRID'): # if grid, then its a TSX file (the", "given !' return msg if not os.path.isfile(os.getcwd() + '/config/' + horizonPointsFileName + '.txt'):", "self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMaxPoints(self, limitByHorizonMask, doSortingPoints): west = [] east = [] off", "range(0, len(self.modelPoints)): fileHandle.write('MW-3:{0:03.2f}:{1:03.2f}\\n'.format(self.modelPoints[i][0], self.modelPoints[i][1])) fileHandle.close() except Exception as e: msg = 'Error saving", "len(self.modelPoints): if self.isAboveHorizonLine(self.modelPoints[i]): i += 1 else: del self.modelPoints[i] def deletePoints(self): self.modelPoints =", "= self.app.selectFile(self.app, 'Save initial model points file', '/config', 'Model point files (*.txt)', False)", "# # #### # # Python-based Tool for interaction with the 10micron mounts", "self.modelPoints[i][1])) fileHandle.close() except Exception as e: msg = 'Error saving modeling points to", "self.app.ui.le_modelFullPointsFileName.text() def saveHorizonMask(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_horizonPointsFileName.text() self.saveHorizonPoints(filepath) def saveHorizonMaskAs(self):", "del self.modelPoints[i] def deletePoints(self): self.modelPoints = list() self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showInitialPoints(self, filename): self.modelPoints, msg", "file selected') def selectInitialModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open initial model points file',", "given!') return msg try: fileHandle = open(horizonPointsFileName + '.txt', 'w') for i in", "90, 15): if dec < 60: step = 15 else: step = 30", "self.logger.warning('Error loading horizon points to file [{0}] error: {1}!'.format(horizonPointsFileName, e)) finally: if fileHandle:", "<= 3: p.append(point) elif line.startswith('MW-3'): # if mountwizzard3, it's native version 3 convertedLine", "self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMaxPoints(self, limitByHorizonMask, doSortingPoints): west = [] east = [] off =", "self.app.ui.le_modelInitialPointsFileName.setText(value) self.showInitialPoints(value) else: self.logger.warning('No file selected') def saveFullModelPoints(self): filepath = os.getcwd() + '/config/'", "convertedLine = line.rstrip('\\n').split(':') point = (int(convertedLine[0]), int(convertedLine[1])) if len(convertedLine) == 2 and modeltype", "key=operator.itemgetter(0)) for i in range(0, len(a)): if a[i][0] >= 180: westSide.append((a[i][0], a[i][1])) else:", "saveModelPoints(self, modelPointsFileName): msg = None fileHandle = None if modelPointsFileName.strip() == '': msg", "if value != '': value = os.path.basename(value) self.app.ui.le_modelFullPointsFileName.setText(value) self.showFullPoints(value, self.app.ui.checkDeletePointsHorizonMask.isChecked(), self.app.ui.checkSortPoints.isChecked()) else: self.logger.warning('No", "initConfig(self): try: if 'HorizonPointsFileName' in self.app.config: self.app.ui.le_horizonPointsFileName.setText(self.app.config['HorizonPointsFileName']) if 'CheckUseMinimumHorizonLine' in self.app.config: self.app.ui.checkUseMinimumHorizonLine.setChecked(self.app.config['CheckUseMinimumHorizonLine']) if", "if value != '': value = os.path.basename(value) self.app.ui.le_modelInitialPointsFileName.setText(value) self.showInitialPoints(value) else: self.logger.warning('No file selected')", "in range(0, len(a)): if a[i][0] >= 180: westSide.append((a[i][0], a[i][1])) else: eastSide.append((a[i][0], a[i][1])) westSide", "# # # # #### # # Python-based Tool for interaction with the", "msg if not os.path.isfile(os.getcwd() + '/config/' + horizonPointsFileName + '.txt'): msg = 'Horizon", "if len(self.modelPoints) == 0: self.logger.warning('There are no points to sort') return westSide =", "east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateInitialPoints(self, azimuth, altitude,", "= line.rstrip('\\n').split(':') p.append((float(convertedLine[1]), float(convertedLine[2]))) else: # format is same as Per's Model Maker", "+= 1 else: del self.modelPoints[i] def deletePoints(self): self.modelPoints = list() self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showInitialPoints(self,", "!= '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.saveHorizonPoints(value) else: self.logger.warning('No model points file selected') def selectHorizonPointsFileName(self): value,", "# # Python-based Tool for interaction with the 10micron mounts # GUI with", "open(horizonPointsFileName + '.txt', 'w') for i in range(0, len(self.horizonPoints)): # saving in model", "os.path.basename(value) self.app.ui.le_modelInitialPointsFileName.setText(value) self.showInitialPoints(value) else: self.logger.warning('No file selected') def saveFullModelPoints(self): filepath = os.getcwd() +", "step = 10 else: step = 20 if i % 2: for ha", "altitude) self.modelPoints.append(point) self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateCelestialEquator(self): self.celestialEquator = list() off = -5 for", "modeling points to file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points to", "logging.getLogger(__name__) def __init__(self, app): self.app = app self.transform = transform.Transform(self.app) self.horizonPoints = list()", "as e: msg = 'Error loading modeling points from file [{0}] error: {1}!'.format(modelPointsFileName,", "Tool for interaction with the 10micron mounts # GUI with PyQT5 for python", "saveFullModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save full model points file', '/config', 'Model point", "def selectInitialModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open initial model points file', '/config', 'Model", "range(-15, 90, 10): if dec < 30: step = 10 elif dec <", "= sorted(eastSide, key=operator.itemgetter(1)) self.modelPoints = westSide + eastSide def loadHorizonPoints(self, horizonPointsFileName, horizonByFile, horizonByAltitude,", "def saveInitialModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save initial model points file', '/config', 'Model", "dec < 30: step = 10 elif dec < 70: step = 10", "ra = ra - float(i) * hoursPathLength / numberOfPathPoints - hoursPathLengthPreview az, alt", "if 'CheckUseFileHorizonLine' in self.app.config: self.app.ui.checkUseFileHorizonLine.setChecked(self.app.config['CheckUseFileHorizonLine']) if 'AltitudeMinimumHorizon' in self.app.config: self.app.ui.altitudeMinimumHorizon.setValue(self.app.config['AltitudeMinimumHorizon']) if 'ModelInitialPointsFileName' in", "== 2 and modeltype == 'Full': p.append(point) elif len(convertedLine) != 2 and modeltype", "15): for ha in range(120 + off, -120 + off, -2): az, alt", "if 'ModelFullPointsFileName' in self.app.config: self.app.ui.le_modelFullPointsFileName.setText(self.app.config['ModelFullPointsFileName']) if 'HorizonPointsFileName' in self.app.config and 'CheckUseMinimumHorizonLine' in self.app.config", "def storeConfig(self): self.app.config['HorizonPointsFileName'] = self.app.ui.le_horizonPointsFileName.text() self.app.config['CheckUseMinimumHorizonLine'] = self.app.ui.checkUseMinimumHorizonLine.isChecked() self.app.config['CheckUseFileHorizonLine'] = self.app.ui.checkUseFileHorizonLine.isChecked() self.app.config['AltitudeMinimumHorizon'] =", "self.app.config and 'CheckUseFileHorizonLine' in self.app.config and 'AltitudeMinimumHorizon' in self.app.config: self.loadHorizonPoints(self.app.config['HorizonPointsFileName'], self.app.config['CheckUseFileHorizonLine'], self.app.config['CheckUseMinimumHorizonLine'], self.app.config['AltitudeMinimumHorizon'])", "return p, msg try: with open('config/' + modelPointsFileName + '.txt', 'r') as fileHandle:", "are no points to sort') return westSide = [] eastSide = [] a", "list() ra = copy.copy(self.app.workerMountDispatcher.data['RaJNow']) dec = copy.copy(self.app.workerMountDispatcher.data['DecJNow']) for i in range(0, numberOfPathPoints): ra", "self.saveModelPoints(value) else: self.logger.warning('No model points file selected') def selectInitialModelPointsFileName(self): value, ext = self.app.selectFile(self.app,", "= self.transform.topocentricToAzAlt(ha / 10, dec) if alt > 0: if az > 180:", "doSortingPoints): self.modelPoints, msg = self.loadModelPoints(filename, 'Full') if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints)))", "file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally: if fileHandle: fileHandle.close() return msg def saveInitialModelPoints(self):", "sorted(hp, key=operator.itemgetter(0)) if len(hp) == 0: hp = ((0, 0), (360, 0)) x", "the path if 'RaJNow' not in self.app.workerMountDispatcher.data: return self.modelPoints = list() ra =", "PyQT5 for python # Python v3.6.4 # # <NAME> # (c) 2016, 2017,", "doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateGridPoints(self, limitByHorizonMask, doSortingPoints, numberOfRows, numberOfColumns, altitudeMin, altitudeMax): west", "i += 1 else: del self.modelPoints[i] def deletePoints(self): self.modelPoints = list() self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def", "= 'No Model Points Filename given!' self.logger.warning('No Model Points Filename given!') return msg", "value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.saveHorizonPoints(value) else: self.logger.warning('No model points file selected') def selectHorizonPointsFileName(self):", "self.app.config: self.app.ui.le_horizonPointsFileName.setText(self.app.config['HorizonPointsFileName']) if 'CheckUseMinimumHorizonLine' in self.app.config: self.app.ui.checkUseMinimumHorizonLine.setChecked(self.app.config['CheckUseMinimumHorizonLine']) if 'CheckUseFileHorizonLine' in self.app.config: self.app.ui.checkUseFileHorizonLine.setChecked(self.app.config['CheckUseFileHorizonLine']) if", "for ha in range(-120 + off, 120 + off, step): az, alt =", "0 for dec in range(-15, 90, 15): if dec < 60: step =", "range(0, numberOfPathPoints): ra = ra - float(i) * hoursPathLength / numberOfPathPoints - hoursPathLengthPreview", "== 0: hp = ((0, 0), (360, 0)) x = [i[0] for i", "range(altitudeMin, altitudeMax + 1, int((altitudeMax - altitudeMin) / (numberOfRows - 1))): if i", "== '': msg = 'No horizon points filename given!' self.logger.warning('No Model Points Filename", "hp] y = [i[1] for i in hp] if horizonByAltitude: y = numpy.clip(y,", "!= '': self.app.ui.le_modelFullPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points file selected') def selectFullModelPointsFileName(self): value,", "ext = self.app.selectFile(self.app, 'Open horizon mask file', '/config', 'Horizon mask files (*.txt)', True)", "if line.startswith('GRID'): # if grid, then its a TSX file (the sky x)", "float(i) * hoursPathLength / numberOfPathPoints - hoursPathLengthPreview az, alt = self.transform.transformERFA(ra, dec, 1)", "alt > 0: self.modelPoints.append((az, alt)) if limitByHorizonMask: self.deleteBelowHorizonLine() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMaxPoints(self, limitByHorizonMask,", "# we have no position of the mount -> therefore we can't calculate", "= 10 else: step = 30 if i % 2: for ha in", "range(-15, 90, 15): if dec < 60: step = 10 else: step =", "0: self.logger.warning('There are no points to sort') return westSide = [] eastSide =", "not exist') else: try: with open(os.getcwd() + '/config/' + horizonPointsFileName + '.txt') as", "m = line.rstrip('\\n').split(':') else: # carte du ciel / skychart format m =", "[] a = sorted(self.modelPoints, key=operator.itemgetter(0)) for i in range(0, len(a)): if a[i][0] >=", "range(0, len(a)): if a[i][0] >= 180: westSide.append((a[i][0], a[i][1])) else: eastSide.append((a[i][0], a[i][1])) westSide =", "is same as Per's Model Maker convertedLine = line.rstrip('\\n').split(':') point = (int(convertedLine[0]), int(convertedLine[1]))", "fileHandle.write('MW-3:{0:03.2f}:{1:03.2f}\\n'.format(self.modelPoints[i][0], self.modelPoints[i][1])) fileHandle.close() except Exception as e: msg = 'Error saving modeling points", "< 60: step = 10 else: step = 20 if i % 2:", "saveHorizonPoints(self, horizonPointsFileName): msg = None fileHandle = None if horizonPointsFileName.strip() == '': msg", "east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateGridPoints(self, limitByHorizonMask, doSortingPoints,", "as e: self.logger.error('item in config.cfg could not be initialize, error:{0}'.format(e)) finally: pass def", "if horizonPointsFileName.strip() == '': msg = 'No horizon points filename given!' self.logger.warning('No Model", "e: msg = 'Error saving modeling points to file [{0}] error: {1}!'.format(modelPointsFileName, e)", "model maker format m = line.rstrip('\\n').split(':') else: # carte du ciel / skychart", "a[i][1])) westSide = sorted(westSide, key=operator.itemgetter(1)) eastSide = sorted(eastSide, key=operator.itemgetter(1)) self.modelPoints = westSide +", "-int(360 / numberOfColumns)): if alt > 0: if az > 180: east.insert(0, (az,", "file selected') def loadModelPoints(self, modelPointsFileName, modeltype): p = [] number = 0 msg", "/ (numberOfRows - 1))): if i % 2: for az in range(365 -", "e: msg = 'Error loading modeling points from file [{0}] error: {1}!'.format(modelPointsFileName, e)", "[i[1] for i in self.horizonPoints], left=None, right=None, period=None) if point[1] > y[int(point[0])]: return", "self.app.ui.btn_loadFullModelPoints.clicked.connect(self.selectFullModelPointsFileName) self.app.ui.btn_saveFullModelPoints.clicked.connect(self.saveFullModelPoints) self.app.ui.btn_saveFullModelPointsAs.clicked.connect(self.saveFullModelPointsAs) self.app.ui.btn_loadHorizonMask.clicked.connect(self.selectHorizonPointsFileName) self.app.ui.btn_saveHorizonMask.clicked.connect(self.saveHorizonMask) self.app.ui.btn_saveHorizonMaskAs.clicked.connect(self.saveHorizonMaskAs) self.app.signalMountSiteData.connect(self.generateCelestialEquator) def initConfig(self): try: if 'HorizonPointsFileName' in", "90, 15): if dec < 60: step = 10 else: step = 20", "points from file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally: return p, msg def sortPoints(self):", "with open(os.getcwd() + '/config/' + horizonPointsFileName + '.txt') as f: for line in", "hp.append(point) f.close() except Exception as e: msg = 'Error loading horizon points: {0}'.format(e)", "in range(-15, 90, 15): for ha in range(120 + off, -120 + off,", "Per's Model Maker convertedLine = line.rstrip('\\n').split(':') point = (int(convertedLine[0]), int(convertedLine[1])) if len(convertedLine) ==", "if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.app.hemisphereWindow.selectHorizonPointsMode() self.app.hemisphereWindow.drawHemisphere() def saveModelPoints(self, modelPointsFileName): msg = None", "= None fileHandle = None if modelPointsFileName.strip() == '': msg = 'No Model", "fileHandle: for line in fileHandle: if line.startswith('GRID'): # if grid, then its a", "horizon points filename given !' return msg if not os.path.isfile(os.getcwd() + '/config/' +", "Points Filename given!' self.logger.warning('No Model Points Filename given!') return msg try: fileHandle =", "self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateInitialPoints(self, azimuth, altitude, numberOfPoints): self.modelPoints = list() for i", "loading horizon points: {0}'.format(e) self.logger.error('Error loading horizon points: {0}'.format(e)) return msg hp =", "> 0: self.modelPoints.append((az, alt)) if limitByHorizonMask: self.deleteBelowHorizonLine() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMaxPoints(self, limitByHorizonMask, doSortingPoints):", "y = numpy.interp(x, [i[0] for i in self.horizonPoints], [i[1] for i in self.horizonPoints],", "ext = self.app.selectFile(self.app, 'Open initial model points file', '/config', 'Model points files (*.txt)',", "error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points from file [{0}] error: {1}!'.format(modelPointsFileName, e))", "self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showFullPoints(self, filename, limitByHorizonMask, doSortingPoints): self.modelPoints, msg = self.loadModelPoints(filename, 'Full') if limitByHorizonMask:", "# # # #### # # Python-based Tool for interaction with the 10micron", "self.app.config['ModelInitialPointsFileName'] = self.app.ui.le_modelInitialPointsFileName.text() self.app.config['ModelFullPointsFileName'] = self.app.ui.le_modelFullPointsFileName.text() def saveHorizonMask(self): filepath = os.getcwd() + '/config/'", "and 'CheckUseFileHorizonLine' in self.app.config and 'AltitudeMinimumHorizon' in self.app.config: self.loadHorizonPoints(self.app.config['HorizonPointsFileName'], self.app.config['CheckUseFileHorizonLine'], self.app.config['CheckUseMinimumHorizonLine'], self.app.config['AltitudeMinimumHorizon']) except", "for az in range(365 - int(360 / numberOfColumns), 0, -int(360 / numberOfColumns)): if", "<NAME> # (c) 2016, 2017, 2018 # # Licence APL2.0 # ########################################################### import", "'Horizon points file does not exist !' self.logger.warning('Horizon points file does not exist')", "[] if not (horizonByFile or horizonByAltitude): return hp = [] msg = None", "and 'CheckUseMinimumHorizonLine' in self.app.config and 'CheckUseFileHorizonLine' in self.app.config and 'AltitudeMinimumHorizon' in self.app.config: self.loadHorizonPoints(self.app.config['HorizonPointsFileName'],", "def saveFullModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save full model points file', '/config', 'Model", "az > 180: east.insert(0, (az, alt)) else: west.append((az, alt)) else: for ha in", "10micron mounts # GUI with PyQT5 for python # Python v3.6.4 # #", "= 20 if i % 2: for ha in range(120 + off, -120", "False) if value != '': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.saveHorizonPoints(value) else: self.logger.warning('No model points file selected')", "saveHorizonMaskAs(self): value, ext = self.app.selectFile(self.app, 'Save horizon mask points file', '/config', 'Model point", "[{0}] error: {1}!'.format(modelPointsFileName, e)) finally: if fileHandle: fileHandle.close() return msg def saveInitialModelPoints(self): filepath", "not in self.app.workerMountDispatcher.data: return self.modelPoints = list() ra = copy.copy(self.app.workerMountDispatcher.data['RaJNow']) dec = copy.copy(self.app.workerMountDispatcher.data['DecJNow'])", "west = [] east = [] off = -5 i = 0 for", "limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateNormalPoints(self, limitByHorizonMask, doSortingPoints): west =", "i % 2: for ha in range(120 + off, -120 + off, -step):", "sorted(westSide, key=operator.itemgetter(1)) eastSide = sorted(eastSide, key=operator.itemgetter(1)) self.modelPoints = westSide + eastSide def loadHorizonPoints(self,", "hp = sorted(hp, key=operator.itemgetter(0)) if len(hp) == 0: hp = ((0, 0), (360,", "alt)) if limitByHorizonMask: self.deleteBelowHorizonLine() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMaxPoints(self, limitByHorizonMask, doSortingPoints): west = []", "in self.horizonPoints], left=None, right=None, period=None) if point[1] > y[int(point[0])]: return True else: return", "return msg def saveInitialModelPoints(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_modelInitialPointsFileName.text() self.saveModelPoints(filepath) def", "msg = self.loadModelPoints(filename, 'Initial') self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showFullPoints(self, filename, limitByHorizonMask, doSortingPoints): self.modelPoints, msg", "self.app.config['CheckUseFileHorizonLine'] = self.app.ui.checkUseFileHorizonLine.isChecked() self.app.config['AltitudeMinimumHorizon'] = self.app.ui.altitudeMinimumHorizon.value() self.app.config['ModelInitialPointsFileName'] = self.app.ui.le_modelInitialPointsFileName.text() self.app.config['ModelFullPointsFileName'] = self.app.ui.le_modelFullPointsFileName.text() def", "+ '/config/' + horizonPointsFileName + '.txt') as f: for line in f: if", "'AltitudeMinimumHorizon' in self.app.config: self.app.ui.altitudeMinimumHorizon.setValue(self.app.config['AltitudeMinimumHorizon']) if 'ModelInitialPointsFileName' in self.app.config: self.app.ui.le_modelInitialPointsFileName.setText(self.app.config['ModelInitialPointsFileName']) if 'ModelFullPointsFileName' in self.app.config:", "modeling points from file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points from", "given!' self.logger.warning('No Model Points Filename given!') return msg try: fileHandle = open(horizonPointsFileName +", "60: step = 15 else: step = 30 if i % 2: for", "+ east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateInitialPoints(self, azimuth,", "= (float(convertedLine[2]), float(convertedLine[3])) number += 1 if modeltype == 'Refinement' and number >", "## ## # # # # # # # #### # # Python-based", "[] eastSide = [] a = sorted(self.modelPoints, key=operator.itemgetter(0)) for i in range(0, len(a)):", "msg def sortPoints(self): if len(self.modelPoints) == 0: self.logger.warning('There are no points to sort')", "None if horizonByFile: if horizonPointsFileName == '': msg = 'No horizon points filename", "'No horizon points filename given!' self.logger.warning('No Model Points Filename given!') return msg try:", "else: self.logger.warning('No model points file selected') def selectInitialModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open", "return False def deleteBelowHorizonLine(self): i = 0 while i < len(self.modelPoints): if self.isAboveHorizonLine(self.modelPoints[i]):", "'': self.app.ui.le_modelFullPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points file selected') def selectFullModelPointsFileName(self): value, ext", "self.app.ui.btn_saveHorizonMask.clicked.connect(self.saveHorizonMask) self.app.ui.btn_saveHorizonMaskAs.clicked.connect(self.saveHorizonMaskAs) self.app.signalMountSiteData.connect(self.generateCelestialEquator) def initConfig(self): try: if 'HorizonPointsFileName' in self.app.config: self.app.ui.le_horizonPointsFileName.setText(self.app.config['HorizonPointsFileName']) if 'CheckUseMinimumHorizonLine'", "{1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points from file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally:", "loading horizon points: {0}'.format(e)) return msg hp = sorted(hp, key=operator.itemgetter(0)) if len(hp) ==", "self.modelPoints.append((az, alt)) if limitByHorizonMask: self.deleteBelowHorizonLine() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMaxPoints(self, limitByHorizonMask, doSortingPoints): west =", "if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateMinPoints(self, limitByHorizonMask, doSortingPoints): west", "180: east.insert(0, (az, alt)) else: west.append((az, alt)) i += 1 self.modelPoints = west", "return westSide = [] eastSide = [] a = sorted(self.modelPoints, key=operator.itemgetter(0)) for i", "+ off, -120 + off, -step): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec)", "msg = 'Error loading modeling points from file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error", "0: if az > 180: east.insert(0, (az, alt)) else: west.append((az, alt)) i +=", "in self.app.config: self.app.ui.le_modelInitialPointsFileName.setText(self.app.config['ModelInitialPointsFileName']) if 'ModelFullPointsFileName' in self.app.config: self.app.ui.le_modelFullPointsFileName.setText(self.app.config['ModelFullPointsFileName']) if 'HorizonPointsFileName' in self.app.config and", "open(modelPointsFileName + '.txt', 'w') for i in range(0, len(self.modelPoints)): fileHandle.write('MW-3:{0:03.2f}:{1:03.2f}\\n'.format(self.modelPoints[i][0], self.modelPoints[i][1])) fileHandle.close() except", "if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateNormalPoints(self, limitByHorizonMask, doSortingPoints): west", "(360, 0)) x = [i[0] for i in hp] y = [i[1] for", "self.app.config['CheckUseMinimumHorizonLine'], self.app.config['AltitudeMinimumHorizon']) except Exception as e: self.logger.error('item in config.cfg could not be initialize,", "self.saveModelPoints(filepath) def saveFullModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save full model points file', '/config',", "if point[1] > y[int(point[0])]: return True else: return False def deleteBelowHorizonLine(self): i =", "altitudeMin, altitudeMax): west = list() east = list() i = 0 for alt", "points file', '/config', 'Model point files (*.txt)', False) if value != '': self.app.ui.le_modelInitialPointsFileName.setText(os.path.basename(value))", "= 15 else: step = 30 if i % 2: for ha in", "Filename given!' self.logger.warning('No Model Points Filename given!') return msg try: fileHandle = open(modelPointsFileName", "copy import operator import numpy from astrometry import transform class ModelPoints: logger =", "else: return False def deleteBelowHorizonLine(self): i = 0 while i < len(self.modelPoints): if", "selectHorizonPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open horizon mask file', '/config', 'Horizon mask files", "False) if value != '': self.app.ui.le_modelFullPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points file selected')", "points to file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points to file", "modeltype == 'Initial': p.append(point) except Exception as e: msg = 'Error loading modeling", "filename given !' return msg if not os.path.isfile(os.getcwd() + '/config/' + horizonPointsFileName +", "in self.app.config: self.app.ui.checkUseMinimumHorizonLine.setChecked(self.app.config['CheckUseMinimumHorizonLine']) if 'CheckUseFileHorizonLine' in self.app.config: self.app.ui.checkUseFileHorizonLine.setChecked(self.app.config['CheckUseFileHorizonLine']) if 'AltitudeMinimumHorizon' in self.app.config: self.app.ui.altitudeMinimumHorizon.setValue(self.app.config['AltitudeMinimumHorizon'])", "number = 0 msg = None if modelPointsFileName.strip() == '': msg = 'No", "(the sky x) convertedLine = line.rstrip('\\n').split() point = (float(convertedLine[2]), float(convertedLine[3])) number += 1", "self.transform.topocentricToAzAlt(ha / 10, dec) if alt > 0: if az > 180: east.insert(0,", "in line: # model maker format m = line.rstrip('\\n').split(':') else: # carte du", "if grid, then its a TSX file (the sky x) convertedLine = line.rstrip('\\n').split()", "east if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateNormalPoints(self, limitByHorizonMask, doSortingPoints):", "import PyQt5 import time import copy import operator import numpy from astrometry import", "saveHorizonMask(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_horizonPointsFileName.text() self.saveHorizonPoints(filepath) def saveHorizonMaskAs(self): value, ext", "number > 3: p.append(point) elif modeltype == 'Base' and number <= 3: p.append(point)", "= None if horizonPointsFileName.strip() == '': msg = 'No horizon points filename given!'", "+ '/config/' + self.app.ui.le_modelInitialPointsFileName.text() self.saveModelPoints(filepath) def saveInitialModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save initial", "= line.rstrip('\\n').split(' ') point = (int(m[0]), int(m[1])) hp.append(point) f.close() except Exception as e:", "'CheckUseMinimumHorizonLine' in self.app.config: self.app.ui.checkUseMinimumHorizonLine.setChecked(self.app.config['CheckUseMinimumHorizonLine']) if 'CheckUseFileHorizonLine' in self.app.config: self.app.ui.checkUseFileHorizonLine.setChecked(self.app.config['CheckUseFileHorizonLine']) if 'AltitudeMinimumHorizon' in self.app.config:", "points files (*.txt)', True) if value != '': value = os.path.basename(value) self.app.ui.le_modelFullPointsFileName.setText(value) self.showFullPoints(value,", "+ off, -120 + off, -2): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec)", "'RaJNow' not in self.app.workerMountDispatcher.data: return self.modelPoints = list() ra = copy.copy(self.app.workerMountDispatcher.data['RaJNow']) dec =", "limitByHorizonMask, hoursPathLength, numberOfPathPoints, hoursPathLengthPreview): # we have no position of the mount ->", "except Exception as e: msg = 'Error saving horizon points to file [{0}]", "point = (azp, altitude) self.modelPoints.append(point) self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateCelestialEquator(self): self.celestialEquator = list() off", "+ off, -2): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt >", "range(365 - int(360 / numberOfColumns), 0, -int(360 / numberOfColumns)): if alt > 0:", "!' return msg if not os.path.isfile(os.getcwd() + '/config/' + horizonPointsFileName + '.txt'): msg", "file selected') def saveFullModelPoints(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_modelFullPointsFileName.text() self.saveModelPoints(filepath) def", "line in f: if ':' in line: # model maker format m =", "not exist !' self.logger.warning('Horizon points file does not exist') else: try: with open(os.getcwd()", "file', '/config', 'Model point files (*.txt)', False) if value != '': self.app.ui.le_modelInitialPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value)", "format is same as Per's Model Maker convertedLine = line.rstrip('\\n').split(':') point = (int(convertedLine[0]),", "loading modeling points from file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points", "limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateGridPoints(self, limitByHorizonMask, doSortingPoints, numberOfRows, numberOfColumns,", "step = 30 if i % 2: for ha in range(120 + off,", "{0}'.format(e)) return msg hp = sorted(hp, key=operator.itemgetter(0)) if len(hp) == 0: hp =", "Python v3.6.4 # # <NAME> # (c) 2016, 2017, 2018 # # Licence", "model points file', '/config', 'Model points files (*.txt)', True) if value != '':", "self.app.ui.le_modelFullPointsFileName.text() self.saveModelPoints(filepath) def saveFullModelPointsAs(self): value, ext = self.app.selectFile(self.app, 'Save full model points file',", "same as Per's Model Maker convertedLine = line.rstrip('\\n').split(':') point = (int(convertedLine[0]), int(convertedLine[1])) if", "in range(-15, 90, 10): if dec < 30: step = 10 elif dec", "fileHandle.close() return msg def saveInitialModelPoints(self): filepath = os.getcwd() + '/config/' + self.app.ui.le_modelInitialPointsFileName.text() self.saveModelPoints(filepath)", "p, msg def sortPoints(self): if len(self.modelPoints) == 0: self.logger.warning('There are no points to", "self.modelPoints = list() self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showInitialPoints(self, filename): self.modelPoints, msg = self.loadModelPoints(filename, 'Initial') self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints)))", "[{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points from file [{0}] error: {1}!'.format(modelPointsFileName,", "y = [i[1] for i in hp] if horizonByAltitude: y = numpy.clip(y, altitudeMinimumHorizon,", "[] number = 0 msg = None if modelPointsFileName.strip() == '': msg =", "os import PyQt5 import time import copy import operator import numpy from astrometry", "finally: pass def storeConfig(self): self.app.config['HorizonPointsFileName'] = self.app.ui.le_horizonPointsFileName.text() self.app.config['CheckUseMinimumHorizonLine'] = self.app.ui.checkUseMinimumHorizonLine.isChecked() self.app.config['CheckUseFileHorizonLine'] = self.app.ui.checkUseFileHorizonLine.isChecked()", "# <NAME> # (c) 2016, 2017, 2018 # # Licence APL2.0 # ###########################################################", "open(os.getcwd() + '/config/' + horizonPointsFileName + '.txt') as f: for line in f:", "in range(-120 + off, 120 + off, step): az, alt = self.transform.topocentricToAzAlt(ha /", "20 if i % 2: for ha in range(120 + off, -120 +", "[] off = -5 i = 0 for dec in range(-15, 90, 10):", "= self.app.ui.checkUseFileHorizonLine.isChecked() self.app.config['AltitudeMinimumHorizon'] = self.app.ui.altitudeMinimumHorizon.value() self.app.config['ModelInitialPointsFileName'] = self.app.ui.le_modelInitialPointsFileName.text() self.app.config['ModelFullPointsFileName'] = self.app.ui.le_modelFullPointsFileName.text() def saveHorizonMask(self):", "to file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points to file [{0}]", "limitByHorizonMask, doSortingPoints): west = [] east = [] off = -5 i =", "else: for az in range(5, 360, int(360 / numberOfColumns)): if alt > 0:", "off, step): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt > 0:", "modeltype == 'Full': p.append(point) elif len(convertedLine) != 2 and modeltype == 'Initial': p.append(point)", "hoursPathLengthPreview az, alt = self.transform.transformERFA(ra, dec, 1) if alt > 0: self.modelPoints.append((az, alt))", "if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateGridPoints(self, limitByHorizonMask, doSortingPoints, numberOfRows, numberOfColumns, altitudeMin, altitudeMax):", "'Error saving modeling points to file [{0}] error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling", "'Model point files (*.txt)', False) if value != '': self.app.ui.le_modelFullPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No", "'': msg = 'No model points filename given!' self.logger.warning('No model points filename given!')", "else: west.append((az, alt)) else: for az in range(5, 360, int(360 / numberOfColumns)): if", "def saveModelPoints(self, modelPointsFileName): msg = None fileHandle = None if modelPointsFileName.strip() == '':", "15): if dec < 60: step = 15 else: step = 30 if", "alt)) else: for az in range(5, 360, int(360 / numberOfColumns)): if alt >", "e)) finally: if fileHandle: fileHandle.close() return msg def saveInitialModelPoints(self): filepath = os.getcwd() +", "# carte du ciel / skychart format m = line.rstrip('\\n').split(' ') point =", "(float(convertedLine[2]), float(convertedLine[3])) number += 1 if modeltype == 'Refinement' and number > 3:", "= 0 for dec in range(-15, 90, 15): if dec < 60: step", "self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateDSOPoints(self, limitByHorizonMask, hoursPathLength, numberOfPathPoints, hoursPathLengthPreview): # we have no position", "app self.transform = transform.Transform(self.app) self.horizonPoints = list() self.modelPoints = list() self.celestialEquator = list()", "'': self.app.ui.le_horizonPointsFileName.setText(os.path.basename(value)) self.saveHorizonPoints(value) else: self.logger.warning('No model points file selected') def selectHorizonPointsFileName(self): value, ext", "i in range(0, len(a)): if a[i][0] >= 180: westSide.append((a[i][0], a[i][1])) else: eastSide.append((a[i][0], a[i][1]))", "# # # # # # # # # # # ### #", "skychart format m = line.rstrip('\\n').split(' ') point = (int(m[0]), int(m[1])) hp.append(point) f.close() except", "self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showInitialPoints(self, filename): self.modelPoints, msg = self.loadModelPoints(filename, 'Initial') self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def showFullPoints(self,", "horizon points: {0}'.format(e) self.logger.error('Error loading horizon points: {0}'.format(e)) return msg hp = sorted(hp,", "= self.transform.transformERFA(ra, dec, 1) if alt > 0: self.modelPoints.append((az, alt)) if limitByHorizonMask: self.deleteBelowHorizonLine()", "self.showFullPoints(value, self.app.ui.checkDeletePointsHorizonMask.isChecked(), self.app.ui.checkSortPoints.isChecked()) else: self.logger.warning('No file selected') def loadModelPoints(self, modelPointsFileName, modeltype): p =", "0)) x = [i[0] for i in hp] y = [i[1] for i", "> 0: if az > 180: east.insert(0, (az, alt)) else: west.append((az, alt)) i", "= i * 360 / numberOfPoints + azimuth if azp > 360: azp", "# ## ## # ## # # # # # # # #", "self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateInitialPoints(self, azimuth, altitude, numberOfPoints): self.modelPoints = list() for i in", "'No horizon points filename given !' return msg if not os.path.isfile(os.getcwd() + '/config/'", "self.loadModelPoints(filename, 'Full') if limitByHorizonMask: self.deleteBelowHorizonLine() if doSortingPoints: self.sortPoints() self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateDSOPoints(self, limitByHorizonMask,", "points file does not exist') else: try: with open(os.getcwd() + '/config/' + horizonPointsFileName", "((0, 0), (360, 0)) x = [i[0] for i in hp] y =", "return msg try: fileHandle = open(horizonPointsFileName + '.txt', 'w') for i in range(0,", "interaction with the 10micron mounts # GUI with PyQT5 for python # Python", "not be initialize, error:{0}'.format(e)) finally: pass def storeConfig(self): self.app.config['HorizonPointsFileName'] = self.app.ui.le_horizonPointsFileName.text() self.app.config['CheckUseMinimumHorizonLine'] =", "if dec < 30: step = 10 elif dec < 70: step =", "f.close() except Exception as e: msg = 'Error loading horizon points: {0}'.format(e) self.logger.error('Error", "path if 'RaJNow' not in self.app.workerMountDispatcher.data: return self.modelPoints = list() ra = copy.copy(self.app.workerMountDispatcher.data['RaJNow'])", "# #### # # Python-based Tool for interaction with the 10micron mounts #", "'Open full model points file', '/config', 'Model points files (*.txt)', True) if value", "False) if value != '': self.app.ui.le_modelInitialPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points file selected')", "storeConfig(self): self.app.config['HorizonPointsFileName'] = self.app.ui.le_horizonPointsFileName.text() self.app.config['CheckUseMinimumHorizonLine'] = self.app.ui.checkUseMinimumHorizonLine.isChecked() self.app.config['CheckUseFileHorizonLine'] = self.app.ui.checkUseFileHorizonLine.isChecked() self.app.config['AltitudeMinimumHorizon'] = self.app.ui.altitudeMinimumHorizon.value()", "step = 15 else: step = 30 if i % 2: for ha", "error: {1}!'.format(modelPointsFileName, e) self.logger.warning('Error loading modeling points to file [{0}] error: {1}!'.format(modelPointsFileName, e))", "in range(0, numberOfPathPoints): ra = ra - float(i) * hoursPathLength / numberOfPathPoints -", "python # Python v3.6.4 # # <NAME> # (c) 2016, 2017, 2018 #", "10): if dec < 30: step = 10 elif dec < 70: step", "== 'Refinement' and number > 3: p.append(point) elif modeltype == 'Base' and number", "Model Points Filename given!') return msg try: fileHandle = open(modelPointsFileName + '.txt', 'w')", "'HorizonPointsFileName' in self.app.config: self.app.ui.le_horizonPointsFileName.setText(self.app.config['HorizonPointsFileName']) if 'CheckUseMinimumHorizonLine' in self.app.config: self.app.ui.checkUseMinimumHorizonLine.setChecked(self.app.config['CheckUseMinimumHorizonLine']) if 'CheckUseFileHorizonLine' in self.app.config:", "def isAboveHorizonLine(self, point): x = range(0, 361) y = numpy.interp(x, [i[0] for i", "if value != '': self.app.ui.le_modelInitialPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else: self.logger.warning('No model points file selected') def", "# # # # # # #### # ## ## # ## #", "ciel / skychart format m = line.rstrip('\\n').split(' ') point = (int(m[0]), int(m[1])) hp.append(point)", "dec < 60: step = 15 else: step = 30 if i %", "self.modelPoints.append(point) self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def generateCelestialEquator(self): self.celestialEquator = list() off = -5 for dec", "list() east = list() i = 0 for alt in range(altitudeMin, altitudeMax +", "Points Filename given!') return msg try: fileHandle = open(horizonPointsFileName + '.txt', 'w') for", "sky x) convertedLine = line.rstrip('\\n').split() point = (float(convertedLine[2]), float(convertedLine[3])) number += 1 if", "0: if az > 180: east.insert(0, (az, alt)) else: west.append((az, alt)) else: for", "try: fileHandle = open(horizonPointsFileName + '.txt', 'w') for i in range(0, len(self.horizonPoints)): #", "Exception as e: msg = 'Error saving horizon points to file [{0}] error:", "try: with open('config/' + modelPointsFileName + '.txt', 'r') as fileHandle: for line in", "filepath = os.getcwd() + '/config/' + self.app.ui.le_horizonPointsFileName.text() self.saveHorizonPoints(filepath) def saveHorizonMaskAs(self): value, ext =", "-= 360 azp = int(azp) point = (azp, altitude) self.modelPoints.append(point) self.app.messageQueue.put('ToModel>{0:02d}'.format(len(self.modelPoints))) self.app.workerModelingDispatcher.signalModelPointsRedraw.emit() def", "westSide.append((a[i][0], a[i][1])) else: eastSide.append((a[i][0], a[i][1])) westSide = sorted(westSide, key=operator.itemgetter(1)) eastSide = sorted(eastSide, key=operator.itemgetter(1))", "for ha in range(120 + off, -120 + off, -step): az, alt =", "True else: return False def deleteBelowHorizonLine(self): i = 0 while i < len(self.modelPoints):", "'/config', 'Model point files (*.txt)', False) if value != '': self.app.ui.le_modelInitialPointsFileName.setText(os.path.basename(value)) self.saveModelPoints(value) else:", "= list() east = list() off = -5 i = 0 for dec", "have no position of the mount -> therefore we can't calculate the path", "msg = None fileHandle = None if horizonPointsFileName.strip() == '': msg = 'No", "mask points file', '/config', 'Model point files (*.txt)', False) if value != '':", "+ off, -step): az, alt = self.transform.topocentricToAzAlt(ha / 10, dec) if alt >", "horizonPointsFileName): msg = None fileHandle = None if horizonPointsFileName.strip() == '': msg =", "generateDSOPoints(self, limitByHorizonMask, hoursPathLength, numberOfPathPoints, hoursPathLengthPreview): # we have no position of the mount", "msg = 'No horizon points filename given !' return msg if not os.path.isfile(os.getcwd()", "points filename given!' self.logger.warning('No Model Points Filename given!') return msg try: fileHandle =", "west = list() east = list() off = -5 i = 0 for", "self.app.ui.checkSortPoints.isChecked()) else: self.logger.warning('No file selected') def loadModelPoints(self, modelPointsFileName, modeltype): p = [] number", "file [{0}] error: {1}!'.format(modelPointsFileName, e)) finally: return p, msg def sortPoints(self): if len(self.modelPoints)", "self.app.config: self.app.ui.le_modelInitialPointsFileName.setText(self.app.config['ModelInitialPointsFileName']) if 'ModelFullPointsFileName' in self.app.config: self.app.ui.le_modelFullPointsFileName.setText(self.app.config['ModelFullPointsFileName']) if 'HorizonPointsFileName' in self.app.config and 'CheckUseMinimumHorizonLine'", "altitudeMinimumHorizon): self.horizonPoints = [] if not (horizonByFile or horizonByAltitude): return hp = []", "import os import PyQt5 import time import copy import operator import numpy from", "self.app.ui.checkUseFileHorizonLine.isChecked() self.app.config['AltitudeMinimumHorizon'] = self.app.ui.altitudeMinimumHorizon.value() self.app.config['ModelInitialPointsFileName'] = self.app.ui.le_modelInitialPointsFileName.text() self.app.config['ModelFullPointsFileName'] = self.app.ui.le_modelFullPointsFileName.text() def saveHorizonMask(self): filepath", "-5 for dec in range(-15, 90, 15): for ha in range(120 + off,", "range(5, 360, int(360 / numberOfColumns)): if alt > 0: if az > 180:", "points file selected') def selectFullModelPointsFileName(self): value, ext = self.app.selectFile(self.app, 'Open full model points", "2 and modeltype == 'Initial': p.append(point) except Exception as e: msg = 'Error", "generateNormalPoints(self, limitByHorizonMask, doSortingPoints): west = [] east = [] off = -5 i", "grid, then its a TSX file (the sky x) convertedLine = line.rstrip('\\n').split() point", "i in range(0, numberOfPathPoints): ra = ra - float(i) * hoursPathLength / numberOfPathPoints", "= line.rstrip('\\n').split(':') else: # carte du ciel / skychart format m = line.rstrip('\\n').split('", "in range(120 + off, -120 + off, -2): az, alt = self.transform.topocentricToAzAlt(ha /", "range(120 + off, -120 + off, -step): az, alt = self.transform.topocentricToAzAlt(ha / 10,", "Python-based Tool for interaction with the 10micron mounts # GUI with PyQT5 for", "model points filename given!' self.logger.warning('No model points filename given!') return p, msg try:", "error:{0}'.format(e)) finally: pass def storeConfig(self): self.app.config['HorizonPointsFileName'] = self.app.ui.le_horizonPointsFileName.text() self.app.config['CheckUseMinimumHorizonLine'] = self.app.ui.checkUseMinimumHorizonLine.isChecked() self.app.config['CheckUseFileHorizonLine'] =", "pass def storeConfig(self): self.app.config['HorizonPointsFileName'] = self.app.ui.le_horizonPointsFileName.text() self.app.config['CheckUseMinimumHorizonLine'] = self.app.ui.checkUseMinimumHorizonLine.isChecked() self.app.config['CheckUseFileHorizonLine'] = self.app.ui.checkUseFileHorizonLine.isChecked() self.app.config['AltitudeMinimumHorizon']", "east = [] off = -5 i = 0 for dec in range(-15,", "self.app.selectFile(self.app, 'Open initial model points file', '/config', 'Model points files (*.txt)', True) if", "30 if i % 2: for ha in range(120 + off, -120 +", "open('config/' + modelPointsFileName + '.txt', 'r') as fileHandle: for line in fileHandle: if", "self.horizonPoints], left=None, right=None, period=None) if point[1] > y[int(point[0])]: return True else: return False" ]
[ "= bound_srf_lst_in.Branch(i) branchList = map(lambda s: rs.coercebrep(s), branchList) self.bound_nested.append(branchList) # convert guids to", "vf.process_raw_inputs(sphere_tree_in, bound_srf_lst_in, cpt_lst_in) vf.ray_cast() vf.generate_viewfactor_matrix() header_lst = vf.header_lst ray_tree = vf.pythonListTGhDataTree(vf.ray_mtx) #header_tree print", "* self.bld_num) self.raycast_x = map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_y", "= self.bound_nested[i] r0 = self.cpt[i] #base_vector r1 = self.sphere_nested[i][j] #direction_vector #convert pts to", "= rc.Geometry.Intersect.Intersection.RayShoot(ray,srf2int_lst,1) if point_intersect_lst: point_intersect_lst = list(point_intersect_lst) rpt = point_intersect_lst[0] raypts.append(rpt) raydist.append(rs.Distance(rpt,r0)) #rc.Geometry.Vector3d.Multiply(", "# convert tree to nested list of sphere pts for i in range(sphere_tree_in.BranchCount):", "self.ray_num, [None] * self.bld_num) self.raycast_z = map(lambda r: [None] * self.ray_num, [None] *", "def generate_viewfactor_matrix(self): # flip the matrix self.raycast_distance = map(lambda r: [None] * self.ray_num,", "[None] * self.ray_num, [None] * self.bld_num) self.ray_mtx = [] self.header_lst = [] for", "[None] * self.ray_num, [None] * self.bld_num) self.raycast_y = map(lambda r: [None] * self.ray_num,", "self.bld_num) self.raycast_y = map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_z =", "gh.DataTree[object]() # Add pythonlist sub lists to dataTree for i,l in enumerate(pythonList): for", "raydist = [] for j in xrange(self.ray_num): srf2int_lst = self.bound_nested[i] r0 = self.cpt[i]", "= \"RAY_{b}_\".format(b=ri) self.header_lst.extend([hstr+\"dist\",hstr+\"x\",hstr+\"y\",hstr+\"z\"]) print len(self.header_lst) print self.header_lst for i in xrange(self.bld_num): self.bld_lst =", "= len(self.sphere_nested[0]) def ray_cast(self): \"\"\" base_vector direction_vector srf2int \"\"\" self.ray_int_nested = [] self.ray_dist_nested", "#self.raycast_distance[i][j] self.ray_mtx.append(self.bld_lst) def pythonListTGhDataTree(self,pythonList): \"\"\" Converts a nested Python list to a GH", "ray_cast(self): \"\"\" base_vector direction_vector srf2int \"\"\" self.ray_int_nested = [] self.ray_dist_nested = [] for", "i in xrange(self.bld_num): self.bld_lst = [] for j in xrange(self.ray_num): d = self.ray_dist_nested[i][j]", "self.bld_num) self.ray_mtx = [] self.header_lst = [] for ri in xrange(self.ray_num): hstr =", "None def process_raw_inputs(self,sphere_tree_in,bound_srf_lst_in,cpt_lst_in): self.cpt = map(lambda c: rs.coerce3dpoint(c), cpt_lst_in) # convert tree to", "[None] * self.ray_num, [None] * self.bld_num) self.raycast_z = map(lambda r: [None] * self.ray_num,", "i in xrange(self.bld_num): raypts = [] raydist = [] for j in xrange(self.ray_num):", "rs.coerce3dpoint(c), cpt_lst_in) # convert tree to nested list of sphere pts for i", "__init__(self): self.sphere_nested = [] self.cpt = [] self.bound_nested = [] self.bld_num = None", "= self.ray_int_nested[i][j][0] y = self.ray_int_nested[i][j][1] z = self.ray_int_nested[i][j][2] self.bld_lst.extend([d,x,y,z]) #self.ray_dist_nested[i][j] #self.raycast_distance[i][j] self.ray_mtx.append(self.bld_lst) def", "self.raycast_pt_x = None self.raycast_pt_y = None self.raycast_pt_z = None def process_raw_inputs(self,sphere_tree_in,bound_srf_lst_in,cpt_lst_in): self.cpt =", "= self.ray_dist_nested[i][j] x = self.ray_int_nested[i][j][0] y = self.ray_int_nested[i][j][1] z = self.ray_int_nested[i][j][2] self.bld_lst.extend([d,x,y,z]) #self.ray_dist_nested[i][j]", "r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_y = map(lambda r: [None] *", "r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_z = map(lambda r: [None] *", "[None] * self.ray_num, [None] * self.bld_num) self.raycast_x = map(lambda r: [None] * self.ray_num,", "None self.raycast_pt_y = None self.raycast_pt_z = None def process_raw_inputs(self,sphere_tree_in,bound_srf_lst_in,cpt_lst_in): self.cpt = map(lambda c:", "z = self.ray_int_nested[i][j][2] self.bld_lst.extend([d,x,y,z]) #self.ray_dist_nested[i][j] #self.raycast_distance[i][j] self.ray_mtx.append(self.bld_lst) def pythonListTGhDataTree(self,pythonList): \"\"\" Converts a nested", "#print len(self.ray_int_nested) def generate_viewfactor_matrix(self): # flip the matrix self.raycast_distance = map(lambda r: [None]", "= list(point_intersect_lst) rpt = point_intersect_lst[0] raypts.append(rpt) raydist.append(rs.Distance(rpt,r0)) #rc.Geometry.Vector3d.Multiply( self.ray_dist_nested.append(raydist) self.ray_int_nested.append(raypts) #print len(self.ray_int_nested) def", "Converts a nested Python list to a GH datatree \"\"\" # Create GH", "sphere_per_bld[j] = rs.coerce3dpoint(sphere_per_bld[j]) self.sphere_nested[i] = sphere_per_bld self.bld_num = len(self.sphere_nested) self.ray_num = len(self.sphere_nested[0]) def", "[] self.bound_nested = [] self.bld_num = None self.ray_num = None # Outputs self.raycast_distance", "vf.generate_viewfactor_matrix() header_lst = vf.header_lst ray_tree = vf.pythonListTGhDataTree(vf.ray_mtx) #header_tree print len(vf.ray_mtx) print len(vf.bld_lst) #sphere_out", "= [] for i in xrange(self.bld_num): raypts = [] raydist = [] for", "for v in l: dataTree.Add(v,gh.Kernel.Data.GH_Path(i)) return dataTree vf = ViewFactor() vf.process_raw_inputs(sphere_tree_in, bound_srf_lst_in, cpt_lst_in)", "= list(self.sphere_nested[i]) for j in xrange(len(sphere_per_bld)): sphere_per_bld[j] = rs.coerce3dpoint(sphere_per_bld[j]) self.sphere_nested[i] = sphere_per_bld self.bld_num", "gh \"\"\" Calculate View Factor \"\"\" class ViewFactor(object): def __init__(self): self.sphere_nested = []", "point_intersect_lst = rc.Geometry.Intersect.Intersection.RayShoot(ray,srf2int_lst,1) if point_intersect_lst: point_intersect_lst = list(point_intersect_lst) rpt = point_intersect_lst[0] raypts.append(rpt) raydist.append(rs.Distance(rpt,r0))", "cpt_lst_in) # convert tree to nested list of sphere pts for i in", "[] for j in xrange(self.ray_num): d = self.ray_dist_nested[i][j] x = self.ray_int_nested[i][j][0] y =", "rpt = point_intersect_lst[0] raypts.append(rpt) raydist.append(rs.Distance(rpt,r0)) #rc.Geometry.Vector3d.Multiply( self.ray_dist_nested.append(raydist) self.ray_int_nested.append(raypts) #print len(self.ray_int_nested) def generate_viewfactor_matrix(self): #", "datatree \"\"\" # Create GH datatree dataTree = gh.DataTree[object]() # Add pythonlist sub", "self.ray_num = len(self.sphere_nested[0]) def ray_cast(self): \"\"\" base_vector direction_vector srf2int \"\"\" self.ray_int_nested = []", "rc.Geometry.Ray3d(r0,r1) point_intersect_lst = rc.Geometry.Intersect.Intersection.RayShoot(ray,srf2int_lst,1) if point_intersect_lst: point_intersect_lst = list(point_intersect_lst) rpt = point_intersect_lst[0] raypts.append(rpt)", "import rhinoscriptsyntax as rs import Rhino as rc import scriptcontext as sc #import", "x = self.ray_int_nested[i][j][0] y = self.ray_int_nested[i][j][1] z = self.ray_int_nested[i][j][2] self.bld_lst.extend([d,x,y,z]) #self.ray_dist_nested[i][j] #self.raycast_distance[i][j] self.ray_mtx.append(self.bld_lst)", "import scriptcontext as sc #import ghpythonlib as gh import Grasshopper as gh \"\"\"", "= [] for j in xrange(self.ray_num): srf2int_lst = self.bound_nested[i] r0 = self.cpt[i] #base_vector", "lists to dataTree for i,l in enumerate(pythonList): for v in l: dataTree.Add(v,gh.Kernel.Data.GH_Path(i)) return", "for i in xrange(self.bld_num): self.bld_lst = [] for j in xrange(self.ray_num): d =", "in l: dataTree.Add(v,gh.Kernel.Data.GH_Path(i)) return dataTree vf = ViewFactor() vf.process_raw_inputs(sphere_tree_in, bound_srf_lst_in, cpt_lst_in) vf.ray_cast() vf.generate_viewfactor_matrix()", "as sc #import ghpythonlib as gh import Grasshopper as gh \"\"\" Calculate View", "ViewFactor() vf.process_raw_inputs(sphere_tree_in, bound_srf_lst_in, cpt_lst_in) vf.ray_cast() vf.generate_viewfactor_matrix() header_lst = vf.header_lst ray_tree = vf.pythonListTGhDataTree(vf.ray_mtx) #header_tree", "[] for j in xrange(self.ray_num): srf2int_lst = self.bound_nested[i] r0 = self.cpt[i] #base_vector r1", "j in xrange(len(sphere_per_bld)): sphere_per_bld[j] = rs.coerce3dpoint(sphere_per_bld[j]) self.sphere_nested[i] = sphere_per_bld self.bld_num = len(self.sphere_nested) self.ray_num", "list of sphere pts for i in range(sphere_tree_in.BranchCount): branchList = sphere_tree_in.Branch(i) self.sphere_nested.append(branchList) #", "to vectors r1 = rc.Geometry.Vector3d(r1) - rc.Geometry.Vector3d(r0) ray = rc.Geometry.Ray3d(r0,r1) point_intersect_lst = rc.Geometry.Intersect.Intersection.RayShoot(ray,srf2int_lst,1)", "for i,l in enumerate(pythonList): for v in l: dataTree.Add(v,gh.Kernel.Data.GH_Path(i)) return dataTree vf =", "Outputs self.raycast_distance = None self.raycast_pt_x = None self.raycast_pt_y = None self.raycast_pt_z = None", "list(point_intersect_lst) rpt = point_intersect_lst[0] raypts.append(rpt) raydist.append(rs.Distance(rpt,r0)) #rc.Geometry.Vector3d.Multiply( self.ray_dist_nested.append(raydist) self.ray_int_nested.append(raypts) #print len(self.ray_int_nested) def generate_viewfactor_matrix(self):", "header_lst = vf.header_lst ray_tree = vf.pythonListTGhDataTree(vf.ray_mtx) #header_tree print len(vf.ray_mtx) print len(vf.bld_lst) #sphere_out =", "self.header_lst for i in xrange(self.bld_num): self.bld_lst = [] for j in xrange(self.ray_num): d", "self.ray_mtx.append(self.bld_lst) def pythonListTGhDataTree(self,pythonList): \"\"\" Converts a nested Python list to a GH datatree", "list to a GH datatree \"\"\" # Create GH datatree dataTree = gh.DataTree[object]()", "[] self.ray_dist_nested = [] for i in xrange(self.bld_num): raypts = [] raydist =", "bound srfs for i in range(bound_srf_lst_in.BranchCount): branchList = bound_srf_lst_in.Branch(i) branchList = map(lambda s:", "= rc.Geometry.Vector3d(r1) - rc.Geometry.Vector3d(r0) ray = rc.Geometry.Ray3d(r0,r1) point_intersect_lst = rc.Geometry.Intersect.Intersection.RayShoot(ray,srf2int_lst,1) if point_intersect_lst: point_intersect_lst", "= [] for j in xrange(self.ray_num): d = self.ray_dist_nested[i][j] x = self.ray_int_nested[i][j][0] y", "map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_z = map(lambda r: [None]", "self.ray_num, [None] * self.bld_num) self.raycast_x = map(lambda r: [None] * self.ray_num, [None] *", "self.ray_dist_nested = [] for i in xrange(self.bld_num): raypts = [] raydist = []", "* self.ray_num, [None] * self.bld_num) self.raycast_y = map(lambda r: [None] * self.ray_num, [None]", "as rc import scriptcontext as sc #import ghpythonlib as gh import Grasshopper as", "self.bld_lst.extend([d,x,y,z]) #self.ray_dist_nested[i][j] #self.raycast_distance[i][j] self.ray_mtx.append(self.bld_lst) def pythonListTGhDataTree(self,pythonList): \"\"\" Converts a nested Python list to", "None # Outputs self.raycast_distance = None self.raycast_pt_x = None self.raycast_pt_y = None self.raycast_pt_z", "of bound srfs for i in range(bound_srf_lst_in.BranchCount): branchList = bound_srf_lst_in.Branch(i) branchList = map(lambda", "import Rhino as rc import scriptcontext as sc #import ghpythonlib as gh import", "convert tree to nested list of bound srfs for i in range(bound_srf_lst_in.BranchCount): branchList", "for i in range(sphere_tree_in.BranchCount): branchList = sphere_tree_in.Branch(i) self.sphere_nested.append(branchList) # convert tree to nested", "pts for i in range(sphere_tree_in.BranchCount): branchList = sphere_tree_in.Branch(i) self.sphere_nested.append(branchList) # convert tree to", "# Outputs self.raycast_distance = None self.raycast_pt_x = None self.raycast_pt_y = None self.raycast_pt_z =", "Calculate View Factor \"\"\" class ViewFactor(object): def __init__(self): self.sphere_nested = [] self.cpt =", "branchList) self.bound_nested.append(branchList) # convert guids to rc points for i in xrange(len(self.sphere_nested)): sphere_per_bld", "in enumerate(pythonList): for v in l: dataTree.Add(v,gh.Kernel.Data.GH_Path(i)) return dataTree vf = ViewFactor() vf.process_raw_inputs(sphere_tree_in,", "hstr = \"RAY_{b}_\".format(b=ri) self.header_lst.extend([hstr+\"dist\",hstr+\"x\",hstr+\"y\",hstr+\"z\"]) print len(self.header_lst) print self.header_lst for i in xrange(self.bld_num): self.bld_lst", "vectors r1 = rc.Geometry.Vector3d(r1) - rc.Geometry.Vector3d(r0) ray = rc.Geometry.Ray3d(r0,r1) point_intersect_lst = rc.Geometry.Intersect.Intersection.RayShoot(ray,srf2int_lst,1) if", "self.bound_nested = [] self.bld_num = None self.ray_num = None # Outputs self.raycast_distance =", "point_intersect_lst: point_intersect_lst = list(point_intersect_lst) rpt = point_intersect_lst[0] raypts.append(rpt) raydist.append(rs.Distance(rpt,r0)) #rc.Geometry.Vector3d.Multiply( self.ray_dist_nested.append(raydist) self.ray_int_nested.append(raypts) #print", "= self.cpt[i] #base_vector r1 = self.sphere_nested[i][j] #direction_vector #convert pts to vectors r1 =", "map(lambda s: rs.coercebrep(s), branchList) self.bound_nested.append(branchList) # convert guids to rc points for i", "vf.header_lst ray_tree = vf.pythonListTGhDataTree(vf.ray_mtx) #header_tree print len(vf.ray_mtx) print len(vf.bld_lst) #sphere_out = vf.sphere_nested #cpt_out", "Add pythonlist sub lists to dataTree for i,l in enumerate(pythonList): for v in", "dataTree for i,l in enumerate(pythonList): for v in l: dataTree.Add(v,gh.Kernel.Data.GH_Path(i)) return dataTree vf", "for j in xrange(len(sphere_per_bld)): sphere_per_bld[j] = rs.coerce3dpoint(sphere_per_bld[j]) self.sphere_nested[i] = sphere_per_bld self.bld_num = len(self.sphere_nested)", "raydist.append(rs.Distance(rpt,r0)) #rc.Geometry.Vector3d.Multiply( self.ray_dist_nested.append(raydist) self.ray_int_nested.append(raypts) #print len(self.ray_int_nested) def generate_viewfactor_matrix(self): # flip the matrix self.raycast_distance", "self.sphere_nested[i][j] #direction_vector #convert pts to vectors r1 = rc.Geometry.Vector3d(r1) - rc.Geometry.Vector3d(r0) ray =", "self.ray_int_nested = [] self.ray_dist_nested = [] for i in xrange(self.bld_num): raypts = []", "def pythonListTGhDataTree(self,pythonList): \"\"\" Converts a nested Python list to a GH datatree \"\"\"", "None self.raycast_pt_x = None self.raycast_pt_y = None self.raycast_pt_z = None def process_raw_inputs(self,sphere_tree_in,bound_srf_lst_in,cpt_lst_in): self.cpt", "len(self.header_lst) print self.header_lst for i in xrange(self.bld_num): self.bld_lst = [] for j in", "pythonlist sub lists to dataTree for i,l in enumerate(pythonList): for v in l:", "nested list of bound srfs for i in range(bound_srf_lst_in.BranchCount): branchList = bound_srf_lst_in.Branch(i) branchList", "= [] self.bound_nested = [] self.bld_num = None self.ray_num = None # Outputs", "self.bound_nested[i] r0 = self.cpt[i] #base_vector r1 = self.sphere_nested[i][j] #direction_vector #convert pts to vectors", "# flip the matrix self.raycast_distance = map(lambda r: [None] * self.ray_num, [None] *", "self.raycast_y = map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_z = map(lambda", "# convert tree to nested list of bound srfs for i in range(bound_srf_lst_in.BranchCount):", "range(sphere_tree_in.BranchCount): branchList = sphere_tree_in.Branch(i) self.sphere_nested.append(branchList) # convert tree to nested list of bound", "in xrange(self.ray_num): srf2int_lst = self.bound_nested[i] r0 = self.cpt[i] #base_vector r1 = self.sphere_nested[i][j] #direction_vector", "#convert pts to vectors r1 = rc.Geometry.Vector3d(r1) - rc.Geometry.Vector3d(r0) ray = rc.Geometry.Ray3d(r0,r1) point_intersect_lst", "v in l: dataTree.Add(v,gh.Kernel.Data.GH_Path(i)) return dataTree vf = ViewFactor() vf.process_raw_inputs(sphere_tree_in, bound_srf_lst_in, cpt_lst_in) vf.ray_cast()", "nested Python list to a GH datatree \"\"\" # Create GH datatree dataTree", "for i in xrange(len(self.sphere_nested)): sphere_per_bld = list(self.sphere_nested[i]) for j in xrange(len(sphere_per_bld)): sphere_per_bld[j] =", "xrange(self.ray_num): d = self.ray_dist_nested[i][j] x = self.ray_int_nested[i][j][0] y = self.ray_int_nested[i][j][1] z = self.ray_int_nested[i][j][2]", "self.header_lst = [] for ri in xrange(self.ray_num): hstr = \"RAY_{b}_\".format(b=ri) self.header_lst.extend([hstr+\"dist\",hstr+\"x\",hstr+\"y\",hstr+\"z\"]) print len(self.header_lst)", "gh import Grasshopper as gh \"\"\" Calculate View Factor \"\"\" class ViewFactor(object): def", "#base_vector r1 = self.sphere_nested[i][j] #direction_vector #convert pts to vectors r1 = rc.Geometry.Vector3d(r1) -", "Rhino as rc import scriptcontext as sc #import ghpythonlib as gh import Grasshopper", "a GH datatree \"\"\" # Create GH datatree dataTree = gh.DataTree[object]() # Add", "tree to nested list of bound srfs for i in range(bound_srf_lst_in.BranchCount): branchList =", "list of bound srfs for i in range(bound_srf_lst_in.BranchCount): branchList = bound_srf_lst_in.Branch(i) branchList =", "[] for ri in xrange(self.ray_num): hstr = \"RAY_{b}_\".format(b=ri) self.header_lst.extend([hstr+\"dist\",hstr+\"x\",hstr+\"y\",hstr+\"z\"]) print len(self.header_lst) print self.header_lst", "in xrange(self.bld_num): self.bld_lst = [] for j in xrange(self.ray_num): d = self.ray_dist_nested[i][j] x", "if point_intersect_lst: point_intersect_lst = list(point_intersect_lst) rpt = point_intersect_lst[0] raypts.append(rpt) raydist.append(rs.Distance(rpt,r0)) #rc.Geometry.Vector3d.Multiply( self.ray_dist_nested.append(raydist) self.ray_int_nested.append(raypts)", "branchList = map(lambda s: rs.coercebrep(s), branchList) self.bound_nested.append(branchList) # convert guids to rc points", "bound_srf_lst_in, cpt_lst_in) vf.ray_cast() vf.generate_viewfactor_matrix() header_lst = vf.header_lst ray_tree = vf.pythonListTGhDataTree(vf.ray_mtx) #header_tree print len(vf.ray_mtx)", "def __init__(self): self.sphere_nested = [] self.cpt = [] self.bound_nested = [] self.bld_num =", "in xrange(self.bld_num): raypts = [] raydist = [] for j in xrange(self.ray_num): srf2int_lst", "* self.ray_num, [None] * self.bld_num) self.raycast_x = map(lambda r: [None] * self.ray_num, [None]", "matrix self.raycast_distance = map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_x =", "None self.ray_num = None # Outputs self.raycast_distance = None self.raycast_pt_x = None self.raycast_pt_y", "len(self.sphere_nested) self.ray_num = len(self.sphere_nested[0]) def ray_cast(self): \"\"\" base_vector direction_vector srf2int \"\"\" self.ray_int_nested =", "self.raycast_distance = map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_x = map(lambda", "self.sphere_nested = [] self.cpt = [] self.bound_nested = [] self.bld_num = None self.ray_num", "cpt_lst_in) vf.ray_cast() vf.generate_viewfactor_matrix() header_lst = vf.header_lst ray_tree = vf.pythonListTGhDataTree(vf.ray_mtx) #header_tree print len(vf.ray_mtx) print", "pythonListTGhDataTree(self,pythonList): \"\"\" Converts a nested Python list to a GH datatree \"\"\" #", "[] self.cpt = [] self.bound_nested = [] self.bld_num = None self.ray_num = None", "ray_tree = vf.pythonListTGhDataTree(vf.ray_mtx) #header_tree print len(vf.ray_mtx) print len(vf.bld_lst) #sphere_out = vf.sphere_nested #cpt_out =", "rs.coerce3dpoint(sphere_per_bld[j]) self.sphere_nested[i] = sphere_per_bld self.bld_num = len(self.sphere_nested) self.ray_num = len(self.sphere_nested[0]) def ray_cast(self): \"\"\"", "= rs.coerce3dpoint(sphere_per_bld[j]) self.sphere_nested[i] = sphere_per_bld self.bld_num = len(self.sphere_nested) self.ray_num = len(self.sphere_nested[0]) def ray_cast(self):", "# Add pythonlist sub lists to dataTree for i,l in enumerate(pythonList): for v", "= map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_x = map(lambda r:", "vf = ViewFactor() vf.process_raw_inputs(sphere_tree_in, bound_srf_lst_in, cpt_lst_in) vf.ray_cast() vf.generate_viewfactor_matrix() header_lst = vf.header_lst ray_tree =", "to nested list of sphere pts for i in range(sphere_tree_in.BranchCount): branchList = sphere_tree_in.Branch(i)", "return dataTree vf = ViewFactor() vf.process_raw_inputs(sphere_tree_in, bound_srf_lst_in, cpt_lst_in) vf.ray_cast() vf.generate_viewfactor_matrix() header_lst = vf.header_lst", "r1 = rc.Geometry.Vector3d(r1) - rc.Geometry.Vector3d(r0) ray = rc.Geometry.Ray3d(r0,r1) point_intersect_lst = rc.Geometry.Intersect.Intersection.RayShoot(ray,srf2int_lst,1) if point_intersect_lst:", "for ri in xrange(self.ray_num): hstr = \"RAY_{b}_\".format(b=ri) self.header_lst.extend([hstr+\"dist\",hstr+\"x\",hstr+\"y\",hstr+\"z\"]) print len(self.header_lst) print self.header_lst for", "rc points for i in xrange(len(self.sphere_nested)): sphere_per_bld = list(self.sphere_nested[i]) for j in xrange(len(sphere_per_bld)):", "sphere_tree_in.Branch(i) self.sphere_nested.append(branchList) # convert tree to nested list of bound srfs for i", "= gh.DataTree[object]() # Add pythonlist sub lists to dataTree for i,l in enumerate(pythonList):", "- rc.Geometry.Vector3d(r0) ray = rc.Geometry.Ray3d(r0,r1) point_intersect_lst = rc.Geometry.Intersect.Intersection.RayShoot(ray,srf2int_lst,1) if point_intersect_lst: point_intersect_lst = list(point_intersect_lst)", "= rc.Geometry.Ray3d(r0,r1) point_intersect_lst = rc.Geometry.Intersect.Intersection.RayShoot(ray,srf2int_lst,1) if point_intersect_lst: point_intersect_lst = list(point_intersect_lst) rpt = point_intersect_lst[0]", "point_intersect_lst[0] raypts.append(rpt) raydist.append(rs.Distance(rpt,r0)) #rc.Geometry.Vector3d.Multiply( self.ray_dist_nested.append(raydist) self.ray_int_nested.append(raypts) #print len(self.ray_int_nested) def generate_viewfactor_matrix(self): # flip the", "nested list of sphere pts for i in range(sphere_tree_in.BranchCount): branchList = sphere_tree_in.Branch(i) self.sphere_nested.append(branchList)", "in xrange(self.ray_num): d = self.ray_dist_nested[i][j] x = self.ray_int_nested[i][j][0] y = self.ray_int_nested[i][j][1] z =", "self.bound_nested.append(branchList) # convert guids to rc points for i in xrange(len(self.sphere_nested)): sphere_per_bld =", "self.bld_num) self.raycast_z = map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.ray_mtx =", "as rs import Rhino as rc import scriptcontext as sc #import ghpythonlib as", "self.ray_dist_nested[i][j] x = self.ray_int_nested[i][j][0] y = self.ray_int_nested[i][j][1] z = self.ray_int_nested[i][j][2] self.bld_lst.extend([d,x,y,z]) #self.ray_dist_nested[i][j] #self.raycast_distance[i][j]", "print len(vf.bld_lst) #sphere_out = vf.sphere_nested #cpt_out = vf.cpt ray_out = reduce(lambda x,y: x+y,", "xrange(len(self.sphere_nested)): sphere_per_bld = list(self.sphere_nested[i]) for j in xrange(len(sphere_per_bld)): sphere_per_bld[j] = rs.coerce3dpoint(sphere_per_bld[j]) self.sphere_nested[i] =", "base_vector direction_vector srf2int \"\"\" self.ray_int_nested = [] self.ray_dist_nested = [] for i in", "rc import scriptcontext as sc #import ghpythonlib as gh import Grasshopper as gh", "= len(self.sphere_nested) self.ray_num = len(self.sphere_nested[0]) def ray_cast(self): \"\"\" base_vector direction_vector srf2int \"\"\" self.ray_int_nested", "self.raycast_pt_z = None def process_raw_inputs(self,sphere_tree_in,bound_srf_lst_in,cpt_lst_in): self.cpt = map(lambda c: rs.coerce3dpoint(c), cpt_lst_in) # convert", "= [] self.cpt = [] self.bound_nested = [] self.bld_num = None self.ray_num =", "ghpythonlib as gh import Grasshopper as gh \"\"\" Calculate View Factor \"\"\" class", "self.ray_int_nested[i][j][0] y = self.ray_int_nested[i][j][1] z = self.ray_int_nested[i][j][2] self.bld_lst.extend([d,x,y,z]) #self.ray_dist_nested[i][j] #self.raycast_distance[i][j] self.ray_mtx.append(self.bld_lst) def pythonListTGhDataTree(self,pythonList):", "self.raycast_x = map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_y = map(lambda", "self.sphere_nested.append(branchList) # convert tree to nested list of bound srfs for i in", "rs import Rhino as rc import scriptcontext as sc #import ghpythonlib as gh", "#direction_vector #convert pts to vectors r1 = rc.Geometry.Vector3d(r1) - rc.Geometry.Vector3d(r0) ray = rc.Geometry.Ray3d(r0,r1)", "[] raydist = [] for j in xrange(self.ray_num): srf2int_lst = self.bound_nested[i] r0 =", "[None] * self.bld_num) self.ray_mtx = [] self.header_lst = [] for ri in xrange(self.ray_num):", "guids to rc points for i in xrange(len(self.sphere_nested)): sphere_per_bld = list(self.sphere_nested[i]) for j", "= map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_y = map(lambda r:", "#import ghpythonlib as gh import Grasshopper as gh \"\"\" Calculate View Factor \"\"\"", "ri in xrange(self.ray_num): hstr = \"RAY_{b}_\".format(b=ri) self.header_lst.extend([hstr+\"dist\",hstr+\"x\",hstr+\"y\",hstr+\"z\"]) print len(self.header_lst) print self.header_lst for i", "the matrix self.raycast_distance = map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_x", "i,l in enumerate(pythonList): for v in l: dataTree.Add(v,gh.Kernel.Data.GH_Path(i)) return dataTree vf = ViewFactor()", "#header_tree print len(vf.ray_mtx) print len(vf.bld_lst) #sphere_out = vf.sphere_nested #cpt_out = vf.cpt ray_out =", "# Create GH datatree dataTree = gh.DataTree[object]() # Add pythonlist sub lists to", "dataTree.Add(v,gh.Kernel.Data.GH_Path(i)) return dataTree vf = ViewFactor() vf.process_raw_inputs(sphere_tree_in, bound_srf_lst_in, cpt_lst_in) vf.ray_cast() vf.generate_viewfactor_matrix() header_lst =", "list(self.sphere_nested[i]) for j in xrange(len(sphere_per_bld)): sphere_per_bld[j] = rs.coerce3dpoint(sphere_per_bld[j]) self.sphere_nested[i] = sphere_per_bld self.bld_num =", "= None self.raycast_pt_y = None self.raycast_pt_z = None def process_raw_inputs(self,sphere_tree_in,bound_srf_lst_in,cpt_lst_in): self.cpt = map(lambda", "ViewFactor(object): def __init__(self): self.sphere_nested = [] self.cpt = [] self.bound_nested = [] self.bld_num", "pts to vectors r1 = rc.Geometry.Vector3d(r1) - rc.Geometry.Vector3d(r0) ray = rc.Geometry.Ray3d(r0,r1) point_intersect_lst =", "[None] * self.bld_num) self.raycast_z = map(lambda r: [None] * self.ray_num, [None] * self.bld_num)", "= None # Outputs self.raycast_distance = None self.raycast_pt_x = None self.raycast_pt_y = None", "self.ray_num, [None] * self.bld_num) self.raycast_y = map(lambda r: [None] * self.ray_num, [None] *", "print len(vf.ray_mtx) print len(vf.bld_lst) #sphere_out = vf.sphere_nested #cpt_out = vf.cpt ray_out = reduce(lambda", "as gh \"\"\" Calculate View Factor \"\"\" class ViewFactor(object): def __init__(self): self.sphere_nested =", "* self.ray_num, [None] * self.bld_num) self.ray_mtx = [] self.header_lst = [] for ri", "s: rs.coercebrep(s), branchList) self.bound_nested.append(branchList) # convert guids to rc points for i in", "rc.Geometry.Vector3d(r1) - rc.Geometry.Vector3d(r0) ray = rc.Geometry.Ray3d(r0,r1) point_intersect_lst = rc.Geometry.Intersect.Intersection.RayShoot(ray,srf2int_lst,1) if point_intersect_lst: point_intersect_lst =", "srfs for i in range(bound_srf_lst_in.BranchCount): branchList = bound_srf_lst_in.Branch(i) branchList = map(lambda s: rs.coercebrep(s),", "# convert guids to rc points for i in xrange(len(self.sphere_nested)): sphere_per_bld = list(self.sphere_nested[i])", "raypts = [] raydist = [] for j in xrange(self.ray_num): srf2int_lst = self.bound_nested[i]", "self.cpt = map(lambda c: rs.coerce3dpoint(c), cpt_lst_in) # convert tree to nested list of", "class ViewFactor(object): def __init__(self): self.sphere_nested = [] self.cpt = [] self.bound_nested = []", "self.sphere_nested[i] = sphere_per_bld self.bld_num = len(self.sphere_nested) self.ray_num = len(self.sphere_nested[0]) def ray_cast(self): \"\"\" base_vector", "to a GH datatree \"\"\" # Create GH datatree dataTree = gh.DataTree[object]() #", "GH datatree \"\"\" # Create GH datatree dataTree = gh.DataTree[object]() # Add pythonlist", "convert tree to nested list of sphere pts for i in range(sphere_tree_in.BranchCount): branchList", "for i in xrange(self.bld_num): raypts = [] raydist = [] for j in", "dataTree vf = ViewFactor() vf.process_raw_inputs(sphere_tree_in, bound_srf_lst_in, cpt_lst_in) vf.ray_cast() vf.generate_viewfactor_matrix() header_lst = vf.header_lst ray_tree", "= self.sphere_nested[i][j] #direction_vector #convert pts to vectors r1 = rc.Geometry.Vector3d(r1) - rc.Geometry.Vector3d(r0) ray", "Python list to a GH datatree \"\"\" # Create GH datatree dataTree =", "points for i in xrange(len(self.sphere_nested)): sphere_per_bld = list(self.sphere_nested[i]) for j in xrange(len(sphere_per_bld)): sphere_per_bld[j]", "= vf.pythonListTGhDataTree(vf.ray_mtx) #header_tree print len(vf.ray_mtx) print len(vf.bld_lst) #sphere_out = vf.sphere_nested #cpt_out = vf.cpt", "self.ray_dist_nested.append(raydist) self.ray_int_nested.append(raypts) #print len(self.ray_int_nested) def generate_viewfactor_matrix(self): # flip the matrix self.raycast_distance = map(lambda", "= [] self.ray_dist_nested = [] for i in xrange(self.bld_num): raypts = [] raydist", "GH datatree dataTree = gh.DataTree[object]() # Add pythonlist sub lists to dataTree for", "= [] self.bld_num = None self.ray_num = None # Outputs self.raycast_distance = None", "[] self.bld_num = None self.ray_num = None # Outputs self.raycast_distance = None self.raycast_pt_x", "sphere pts for i in range(sphere_tree_in.BranchCount): branchList = sphere_tree_in.Branch(i) self.sphere_nested.append(branchList) # convert tree", "c: rs.coerce3dpoint(c), cpt_lst_in) # convert tree to nested list of sphere pts for", "scriptcontext as sc #import ghpythonlib as gh import Grasshopper as gh \"\"\" Calculate", "convert guids to rc points for i in xrange(len(self.sphere_nested)): sphere_per_bld = list(self.sphere_nested[i]) for", "= None self.raycast_pt_x = None self.raycast_pt_y = None self.raycast_pt_z = None def process_raw_inputs(self,sphere_tree_in,bound_srf_lst_in,cpt_lst_in):", "= self.ray_int_nested[i][j][1] z = self.ray_int_nested[i][j][2] self.bld_lst.extend([d,x,y,z]) #self.ray_dist_nested[i][j] #self.raycast_distance[i][j] self.ray_mtx.append(self.bld_lst) def pythonListTGhDataTree(self,pythonList): \"\"\" Converts", "= map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_z = map(lambda r:", "len(vf.bld_lst) #sphere_out = vf.sphere_nested #cpt_out = vf.cpt ray_out = reduce(lambda x,y: x+y, vf.ray_int_nested)", "dataTree = gh.DataTree[object]() # Add pythonlist sub lists to dataTree for i,l in", "j in xrange(self.ray_num): srf2int_lst = self.bound_nested[i] r0 = self.cpt[i] #base_vector r1 = self.sphere_nested[i][j]", "= None self.ray_num = None # Outputs self.raycast_distance = None self.raycast_pt_x = None", "in range(bound_srf_lst_in.BranchCount): branchList = bound_srf_lst_in.Branch(i) branchList = map(lambda s: rs.coercebrep(s), branchList) self.bound_nested.append(branchList) #", "= sphere_tree_in.Branch(i) self.sphere_nested.append(branchList) # convert tree to nested list of bound srfs for", "\"\"\" base_vector direction_vector srf2int \"\"\" self.ray_int_nested = [] self.ray_dist_nested = [] for i", "self.header_lst.extend([hstr+\"dist\",hstr+\"x\",hstr+\"y\",hstr+\"z\"]) print len(self.header_lst) print self.header_lst for i in xrange(self.bld_num): self.bld_lst = [] for", "xrange(self.ray_num): hstr = \"RAY_{b}_\".format(b=ri) self.header_lst.extend([hstr+\"dist\",hstr+\"x\",hstr+\"y\",hstr+\"z\"]) print len(self.header_lst) print self.header_lst for i in xrange(self.bld_num):", "= [] raydist = [] for j in xrange(self.ray_num): srf2int_lst = self.bound_nested[i] r0", "as gh import Grasshopper as gh \"\"\" Calculate View Factor \"\"\" class ViewFactor(object):", "xrange(self.bld_num): raypts = [] raydist = [] for j in xrange(self.ray_num): srf2int_lst =", "datatree dataTree = gh.DataTree[object]() # Add pythonlist sub lists to dataTree for i,l", "* self.ray_num, [None] * self.bld_num) self.raycast_z = map(lambda r: [None] * self.ray_num, [None]", "self.bld_num = None self.ray_num = None # Outputs self.raycast_distance = None self.raycast_pt_x =", "Factor \"\"\" class ViewFactor(object): def __init__(self): self.sphere_nested = [] self.cpt = [] self.bound_nested", "branchList = sphere_tree_in.Branch(i) self.sphere_nested.append(branchList) # convert tree to nested list of bound srfs", "map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_y = map(lambda r: [None]", "d = self.ray_dist_nested[i][j] x = self.ray_int_nested[i][j][0] y = self.ray_int_nested[i][j][1] z = self.ray_int_nested[i][j][2] self.bld_lst.extend([d,x,y,z])", "= ViewFactor() vf.process_raw_inputs(sphere_tree_in, bound_srf_lst_in, cpt_lst_in) vf.ray_cast() vf.generate_viewfactor_matrix() header_lst = vf.header_lst ray_tree = vf.pythonListTGhDataTree(vf.ray_mtx)", "rc.Geometry.Vector3d(r0) ray = rc.Geometry.Ray3d(r0,r1) point_intersect_lst = rc.Geometry.Intersect.Intersection.RayShoot(ray,srf2int_lst,1) if point_intersect_lst: point_intersect_lst = list(point_intersect_lst) rpt", "a nested Python list to a GH datatree \"\"\" # Create GH datatree", "for j in xrange(self.ray_num): srf2int_lst = self.bound_nested[i] r0 = self.cpt[i] #base_vector r1 =", "vf.pythonListTGhDataTree(vf.ray_mtx) #header_tree print len(vf.ray_mtx) print len(vf.bld_lst) #sphere_out = vf.sphere_nested #cpt_out = vf.cpt ray_out", "bound_srf_lst_in.Branch(i) branchList = map(lambda s: rs.coercebrep(s), branchList) self.bound_nested.append(branchList) # convert guids to rc", "in xrange(len(sphere_per_bld)): sphere_per_bld[j] = rs.coerce3dpoint(sphere_per_bld[j]) self.sphere_nested[i] = sphere_per_bld self.bld_num = len(self.sphere_nested) self.ray_num =", "rs.coercebrep(s), branchList) self.bound_nested.append(branchList) # convert guids to rc points for i in xrange(len(self.sphere_nested)):", "None self.raycast_pt_z = None def process_raw_inputs(self,sphere_tree_in,bound_srf_lst_in,cpt_lst_in): self.cpt = map(lambda c: rs.coerce3dpoint(c), cpt_lst_in) #", "len(self.ray_int_nested) def generate_viewfactor_matrix(self): # flip the matrix self.raycast_distance = map(lambda r: [None] *", "l: dataTree.Add(v,gh.Kernel.Data.GH_Path(i)) return dataTree vf = ViewFactor() vf.process_raw_inputs(sphere_tree_in, bound_srf_lst_in, cpt_lst_in) vf.ray_cast() vf.generate_viewfactor_matrix() header_lst", "point_intersect_lst = list(point_intersect_lst) rpt = point_intersect_lst[0] raypts.append(rpt) raydist.append(rs.Distance(rpt,r0)) #rc.Geometry.Vector3d.Multiply( self.ray_dist_nested.append(raydist) self.ray_int_nested.append(raypts) #print len(self.ray_int_nested)", "self.cpt[i] #base_vector r1 = self.sphere_nested[i][j] #direction_vector #convert pts to vectors r1 = rc.Geometry.Vector3d(r1)", "r1 = self.sphere_nested[i][j] #direction_vector #convert pts to vectors r1 = rc.Geometry.Vector3d(r1) - rc.Geometry.Vector3d(r0)", "def process_raw_inputs(self,sphere_tree_in,bound_srf_lst_in,cpt_lst_in): self.cpt = map(lambda c: rs.coerce3dpoint(c), cpt_lst_in) # convert tree to nested", "self.ray_int_nested[i][j][2] self.bld_lst.extend([d,x,y,z]) #self.ray_dist_nested[i][j] #self.raycast_distance[i][j] self.ray_mtx.append(self.bld_lst) def pythonListTGhDataTree(self,pythonList): \"\"\" Converts a nested Python list", "flip the matrix self.raycast_distance = map(lambda r: [None] * self.ray_num, [None] * self.bld_num)", "i in range(bound_srf_lst_in.BranchCount): branchList = bound_srf_lst_in.Branch(i) branchList = map(lambda s: rs.coercebrep(s), branchList) self.bound_nested.append(branchList)", "Create GH datatree dataTree = gh.DataTree[object]() # Add pythonlist sub lists to dataTree", "= None self.raycast_pt_z = None def process_raw_inputs(self,sphere_tree_in,bound_srf_lst_in,cpt_lst_in): self.cpt = map(lambda c: rs.coerce3dpoint(c), cpt_lst_in)", "= self.ray_int_nested[i][j][2] self.bld_lst.extend([d,x,y,z]) #self.ray_dist_nested[i][j] #self.raycast_distance[i][j] self.ray_mtx.append(self.bld_lst) def pythonListTGhDataTree(self,pythonList): \"\"\" Converts a nested Python", "direction_vector srf2int \"\"\" self.ray_int_nested = [] self.ray_dist_nested = [] for i in xrange(self.bld_num):", "xrange(len(sphere_per_bld)): sphere_per_bld[j] = rs.coerce3dpoint(sphere_per_bld[j]) self.sphere_nested[i] = sphere_per_bld self.bld_num = len(self.sphere_nested) self.ray_num = len(self.sphere_nested[0])", "import Grasshopper as gh \"\"\" Calculate View Factor \"\"\" class ViewFactor(object): def __init__(self):", "for j in xrange(self.ray_num): d = self.ray_dist_nested[i][j] x = self.ray_int_nested[i][j][0] y = self.ray_int_nested[i][j][1]", "= map(lambda c: rs.coerce3dpoint(c), cpt_lst_in) # convert tree to nested list of sphere", "i in xrange(len(self.sphere_nested)): sphere_per_bld = list(self.sphere_nested[i]) for j in xrange(len(sphere_per_bld)): sphere_per_bld[j] = rs.coerce3dpoint(sphere_per_bld[j])", "to nested list of bound srfs for i in range(bound_srf_lst_in.BranchCount): branchList = bound_srf_lst_in.Branch(i)", "self.raycast_distance = None self.raycast_pt_x = None self.raycast_pt_y = None self.raycast_pt_z = None def", "in xrange(self.ray_num): hstr = \"RAY_{b}_\".format(b=ri) self.header_lst.extend([hstr+\"dist\",hstr+\"x\",hstr+\"y\",hstr+\"z\"]) print len(self.header_lst) print self.header_lst for i in", "enumerate(pythonList): for v in l: dataTree.Add(v,gh.Kernel.Data.GH_Path(i)) return dataTree vf = ViewFactor() vf.process_raw_inputs(sphere_tree_in, bound_srf_lst_in,", "= map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.ray_mtx = [] self.header_lst", "* self.bld_num) self.ray_mtx = [] self.header_lst = [] for ri in xrange(self.ray_num): hstr", "= sphere_per_bld self.bld_num = len(self.sphere_nested) self.ray_num = len(self.sphere_nested[0]) def ray_cast(self): \"\"\" base_vector direction_vector", "Grasshopper as gh \"\"\" Calculate View Factor \"\"\" class ViewFactor(object): def __init__(self): self.sphere_nested", "map(lambda c: rs.coerce3dpoint(c), cpt_lst_in) # convert tree to nested list of sphere pts", "in xrange(len(self.sphere_nested)): sphere_per_bld = list(self.sphere_nested[i]) for j in xrange(len(sphere_per_bld)): sphere_per_bld[j] = rs.coerce3dpoint(sphere_per_bld[j]) self.sphere_nested[i]", "srf2int_lst = self.bound_nested[i] r0 = self.cpt[i] #base_vector r1 = self.sphere_nested[i][j] #direction_vector #convert pts", "map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_x = map(lambda r: [None]", "[None] * self.bld_num) self.raycast_y = map(lambda r: [None] * self.ray_num, [None] * self.bld_num)", "View Factor \"\"\" class ViewFactor(object): def __init__(self): self.sphere_nested = [] self.cpt = []", "\"\"\" Converts a nested Python list to a GH datatree \"\"\" # Create", "self.ray_num = None # Outputs self.raycast_distance = None self.raycast_pt_x = None self.raycast_pt_y =", "i in range(sphere_tree_in.BranchCount): branchList = sphere_tree_in.Branch(i) self.sphere_nested.append(branchList) # convert tree to nested list", "len(vf.ray_mtx) print len(vf.bld_lst) #sphere_out = vf.sphere_nested #cpt_out = vf.cpt ray_out = reduce(lambda x,y:", "sub lists to dataTree for i,l in enumerate(pythonList): for v in l: dataTree.Add(v,gh.Kernel.Data.GH_Path(i))", "sphere_per_bld = list(self.sphere_nested[i]) for j in xrange(len(sphere_per_bld)): sphere_per_bld[j] = rs.coerce3dpoint(sphere_per_bld[j]) self.sphere_nested[i] = sphere_per_bld", "= [] self.header_lst = [] for ri in xrange(self.ray_num): hstr = \"RAY_{b}_\".format(b=ri) self.header_lst.extend([hstr+\"dist\",hstr+\"x\",hstr+\"y\",hstr+\"z\"])", "generate_viewfactor_matrix(self): # flip the matrix self.raycast_distance = map(lambda r: [None] * self.ray_num, [None]", "[] for i in xrange(self.bld_num): raypts = [] raydist = [] for j", "rhinoscriptsyntax as rs import Rhino as rc import scriptcontext as sc #import ghpythonlib", "tree to nested list of sphere pts for i in range(sphere_tree_in.BranchCount): branchList =", "r: [None] * self.ray_num, [None] * self.bld_num) self.ray_mtx = [] self.header_lst = []", "self.ray_mtx = [] self.header_lst = [] for ri in xrange(self.ray_num): hstr = \"RAY_{b}_\".format(b=ri)", "raypts.append(rpt) raydist.append(rs.Distance(rpt,r0)) #rc.Geometry.Vector3d.Multiply( self.ray_dist_nested.append(raydist) self.ray_int_nested.append(raypts) #print len(self.ray_int_nested) def generate_viewfactor_matrix(self): # flip the matrix", "= point_intersect_lst[0] raypts.append(rpt) raydist.append(rs.Distance(rpt,r0)) #rc.Geometry.Vector3d.Multiply( self.ray_dist_nested.append(raydist) self.ray_int_nested.append(raypts) #print len(self.ray_int_nested) def generate_viewfactor_matrix(self): # flip", "#rc.Geometry.Vector3d.Multiply( self.ray_dist_nested.append(raydist) self.ray_int_nested.append(raypts) #print len(self.ray_int_nested) def generate_viewfactor_matrix(self): # flip the matrix self.raycast_distance =", "* self.bld_num) self.raycast_z = map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.ray_mtx", "self.ray_int_nested[i][j][1] z = self.ray_int_nested[i][j][2] self.bld_lst.extend([d,x,y,z]) #self.ray_dist_nested[i][j] #self.raycast_distance[i][j] self.ray_mtx.append(self.bld_lst) def pythonListTGhDataTree(self,pythonList): \"\"\" Converts a", "print self.header_lst for i in xrange(self.bld_num): self.bld_lst = [] for j in xrange(self.ray_num):", "[None] * self.bld_num) self.raycast_x = map(lambda r: [None] * self.ray_num, [None] * self.bld_num)", "range(bound_srf_lst_in.BranchCount): branchList = bound_srf_lst_in.Branch(i) branchList = map(lambda s: rs.coercebrep(s), branchList) self.bound_nested.append(branchList) # convert", "sc #import ghpythonlib as gh import Grasshopper as gh \"\"\" Calculate View Factor", "\"\"\" class ViewFactor(object): def __init__(self): self.sphere_nested = [] self.cpt = [] self.bound_nested =", "xrange(self.bld_num): self.bld_lst = [] for j in xrange(self.ray_num): d = self.ray_dist_nested[i][j] x =", "= vf.header_lst ray_tree = vf.pythonListTGhDataTree(vf.ray_mtx) #header_tree print len(vf.ray_mtx) print len(vf.bld_lst) #sphere_out = vf.sphere_nested", "to dataTree for i,l in enumerate(pythonList): for v in l: dataTree.Add(v,gh.Kernel.Data.GH_Path(i)) return dataTree", "len(self.sphere_nested[0]) def ray_cast(self): \"\"\" base_vector direction_vector srf2int \"\"\" self.ray_int_nested = [] self.ray_dist_nested =", "= [] for ri in xrange(self.ray_num): hstr = \"RAY_{b}_\".format(b=ri) self.header_lst.extend([hstr+\"dist\",hstr+\"x\",hstr+\"y\",hstr+\"z\"]) print len(self.header_lst) print", "sphere_per_bld self.bld_num = len(self.sphere_nested) self.ray_num = len(self.sphere_nested[0]) def ray_cast(self): \"\"\" base_vector direction_vector srf2int", "rc.Geometry.Intersect.Intersection.RayShoot(ray,srf2int_lst,1) if point_intersect_lst: point_intersect_lst = list(point_intersect_lst) rpt = point_intersect_lst[0] raypts.append(rpt) raydist.append(rs.Distance(rpt,r0)) #rc.Geometry.Vector3d.Multiply( self.ray_dist_nested.append(raydist)", "map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.ray_mtx = [] self.header_lst =", "\"\"\" Calculate View Factor \"\"\" class ViewFactor(object): def __init__(self): self.sphere_nested = [] self.cpt", "self.cpt = [] self.bound_nested = [] self.bld_num = None self.ray_num = None #", "self.raycast_z = map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.ray_mtx = []", "to rc points for i in xrange(len(self.sphere_nested)): sphere_per_bld = list(self.sphere_nested[i]) for j in", "self.bld_num = len(self.sphere_nested) self.ray_num = len(self.sphere_nested[0]) def ray_cast(self): \"\"\" base_vector direction_vector srf2int \"\"\"", "of sphere pts for i in range(sphere_tree_in.BranchCount): branchList = sphere_tree_in.Branch(i) self.sphere_nested.append(branchList) # convert", "r0 = self.cpt[i] #base_vector r1 = self.sphere_nested[i][j] #direction_vector #convert pts to vectors r1", "\"RAY_{b}_\".format(b=ri) self.header_lst.extend([hstr+\"dist\",hstr+\"x\",hstr+\"y\",hstr+\"z\"]) print len(self.header_lst) print self.header_lst for i in xrange(self.bld_num): self.bld_lst = []", "self.bld_num) self.raycast_x = map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_y =", "self.bld_lst = [] for j in xrange(self.ray_num): d = self.ray_dist_nested[i][j] x = self.ray_int_nested[i][j][0]", "srf2int \"\"\" self.ray_int_nested = [] self.ray_dist_nested = [] for i in xrange(self.bld_num): raypts", "for i in range(bound_srf_lst_in.BranchCount): branchList = bound_srf_lst_in.Branch(i) branchList = map(lambda s: rs.coercebrep(s), branchList)", "[] self.header_lst = [] for ri in xrange(self.ray_num): hstr = \"RAY_{b}_\".format(b=ri) self.header_lst.extend([hstr+\"dist\",hstr+\"x\",hstr+\"y\",hstr+\"z\"]) print", "xrange(self.ray_num): srf2int_lst = self.bound_nested[i] r0 = self.cpt[i] #base_vector r1 = self.sphere_nested[i][j] #direction_vector #convert", "r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_x = map(lambda r: [None] *", "#self.ray_dist_nested[i][j] #self.raycast_distance[i][j] self.ray_mtx.append(self.bld_lst) def pythonListTGhDataTree(self,pythonList): \"\"\" Converts a nested Python list to a", "y = self.ray_int_nested[i][j][1] z = self.ray_int_nested[i][j][2] self.bld_lst.extend([d,x,y,z]) #self.ray_dist_nested[i][j] #self.raycast_distance[i][j] self.ray_mtx.append(self.bld_lst) def pythonListTGhDataTree(self,pythonList): \"\"\"", "process_raw_inputs(self,sphere_tree_in,bound_srf_lst_in,cpt_lst_in): self.cpt = map(lambda c: rs.coerce3dpoint(c), cpt_lst_in) # convert tree to nested list", "self.raycast_pt_y = None self.raycast_pt_z = None def process_raw_inputs(self,sphere_tree_in,bound_srf_lst_in,cpt_lst_in): self.cpt = map(lambda c: rs.coerce3dpoint(c),", "* self.bld_num) self.raycast_y = map(lambda r: [None] * self.ray_num, [None] * self.bld_num) self.raycast_z", "\"\"\" # Create GH datatree dataTree = gh.DataTree[object]() # Add pythonlist sub lists", "vf.ray_cast() vf.generate_viewfactor_matrix() header_lst = vf.header_lst ray_tree = vf.pythonListTGhDataTree(vf.ray_mtx) #header_tree print len(vf.ray_mtx) print len(vf.bld_lst)", "= map(lambda s: rs.coercebrep(s), branchList) self.bound_nested.append(branchList) # convert guids to rc points for", "print len(self.header_lst) print self.header_lst for i in xrange(self.bld_num): self.bld_lst = [] for j", "branchList = bound_srf_lst_in.Branch(i) branchList = map(lambda s: rs.coercebrep(s), branchList) self.bound_nested.append(branchList) # convert guids", "\"\"\" self.ray_int_nested = [] self.ray_dist_nested = [] for i in xrange(self.bld_num): raypts =", "ray = rc.Geometry.Ray3d(r0,r1) point_intersect_lst = rc.Geometry.Intersect.Intersection.RayShoot(ray,srf2int_lst,1) if point_intersect_lst: point_intersect_lst = list(point_intersect_lst) rpt =", "self.ray_int_nested.append(raypts) #print len(self.ray_int_nested) def generate_viewfactor_matrix(self): # flip the matrix self.raycast_distance = map(lambda r:", "def ray_cast(self): \"\"\" base_vector direction_vector srf2int \"\"\" self.ray_int_nested = [] self.ray_dist_nested = []", "in range(sphere_tree_in.BranchCount): branchList = sphere_tree_in.Branch(i) self.sphere_nested.append(branchList) # convert tree to nested list of", "= None def process_raw_inputs(self,sphere_tree_in,bound_srf_lst_in,cpt_lst_in): self.cpt = map(lambda c: rs.coerce3dpoint(c), cpt_lst_in) # convert tree", "j in xrange(self.ray_num): d = self.ray_dist_nested[i][j] x = self.ray_int_nested[i][j][0] y = self.ray_int_nested[i][j][1] z", "self.ray_num, [None] * self.bld_num) self.ray_mtx = [] self.header_lst = [] for ri in" ]
[ "router.register(r'users', views.UserViewSet) schema_view = get_swagger_view(title='Snippets API') urlpatterns = [ re_path('^$', schema_view), re_path(r'^', include(router.urls)),", "from snippets import views router = DefaultRouter() router.register(r'snippets', views.SnippetViewSet) router.register(r'users', views.UserViewSet) schema_view =", "router = DefaultRouter() router.register(r'snippets', views.SnippetViewSet) router.register(r'users', views.UserViewSet) schema_view = get_swagger_view(title='Snippets API') urlpatterns =", "re_path, include from rest_framework.routers import DefaultRouter from rest_framework_swagger.views import get_swagger_view from snippets import", "import DefaultRouter from rest_framework_swagger.views import get_swagger_view from snippets import views router = DefaultRouter()", "router.register(r'snippets', views.SnippetViewSet) router.register(r'users', views.UserViewSet) schema_view = get_swagger_view(title='Snippets API') urlpatterns = [ re_path('^$', schema_view),", "import views router = DefaultRouter() router.register(r'snippets', views.SnippetViewSet) router.register(r'users', views.UserViewSet) schema_view = get_swagger_view(title='Snippets API')", "include from rest_framework.routers import DefaultRouter from rest_framework_swagger.views import get_swagger_view from snippets import views", "views.SnippetViewSet) router.register(r'users', views.UserViewSet) schema_view = get_swagger_view(title='Snippets API') urlpatterns = [ re_path('^$', schema_view), re_path(r'^',", "= DefaultRouter() router.register(r'snippets', views.SnippetViewSet) router.register(r'users', views.UserViewSet) schema_view = get_swagger_view(title='Snippets API') urlpatterns = [", "views.UserViewSet) schema_view = get_swagger_view(title='Snippets API') urlpatterns = [ re_path('^$', schema_view), re_path(r'^', include(router.urls)), re_path(r'^api-auth/',", "from rest_framework_swagger.views import get_swagger_view from snippets import views router = DefaultRouter() router.register(r'snippets', views.SnippetViewSet)", "<filename>example_app/tutorial/urls.py from django.urls import re_path, include from rest_framework.routers import DefaultRouter from rest_framework_swagger.views import", "get_swagger_view from snippets import views router = DefaultRouter() router.register(r'snippets', views.SnippetViewSet) router.register(r'users', views.UserViewSet) schema_view", "import re_path, include from rest_framework.routers import DefaultRouter from rest_framework_swagger.views import get_swagger_view from snippets", "= get_swagger_view(title='Snippets API') urlpatterns = [ re_path('^$', schema_view), re_path(r'^', include(router.urls)), re_path(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))", "snippets import views router = DefaultRouter() router.register(r'snippets', views.SnippetViewSet) router.register(r'users', views.UserViewSet) schema_view = get_swagger_view(title='Snippets", "rest_framework_swagger.views import get_swagger_view from snippets import views router = DefaultRouter() router.register(r'snippets', views.SnippetViewSet) router.register(r'users',", "DefaultRouter from rest_framework_swagger.views import get_swagger_view from snippets import views router = DefaultRouter() router.register(r'snippets',", "DefaultRouter() router.register(r'snippets', views.SnippetViewSet) router.register(r'users', views.UserViewSet) schema_view = get_swagger_view(title='Snippets API') urlpatterns = [ re_path('^$',", "from django.urls import re_path, include from rest_framework.routers import DefaultRouter from rest_framework_swagger.views import get_swagger_view", "schema_view = get_swagger_view(title='Snippets API') urlpatterns = [ re_path('^$', schema_view), re_path(r'^', include(router.urls)), re_path(r'^api-auth/', include('rest_framework.urls',", "django.urls import re_path, include from rest_framework.routers import DefaultRouter from rest_framework_swagger.views import get_swagger_view from", "get_swagger_view(title='Snippets API') urlpatterns = [ re_path('^$', schema_view), re_path(r'^', include(router.urls)), re_path(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) ]", "views router = DefaultRouter() router.register(r'snippets', views.SnippetViewSet) router.register(r'users', views.UserViewSet) schema_view = get_swagger_view(title='Snippets API') urlpatterns", "import get_swagger_view from snippets import views router = DefaultRouter() router.register(r'snippets', views.SnippetViewSet) router.register(r'users', views.UserViewSet)", "from rest_framework.routers import DefaultRouter from rest_framework_swagger.views import get_swagger_view from snippets import views router", "rest_framework.routers import DefaultRouter from rest_framework_swagger.views import get_swagger_view from snippets import views router =" ]
[ "16 - len(data) % 16 fix = chr(pad) * pad byte_data = (data", "self._rate = int(j[\"data\"][0][\"br\"] / 1000) @classmethod def get_music_id_from_url(cls, url) -> str: music_ids =", "r.json() if len(j[\"songs\"]) > 0: self._cover = j[\"songs\"][0][\"al\"][\"picUrl\"] self._song = j[\"songs\"][0][\"al\"][\"name\"] self._singer =", "str: music_ids = re.findall(r'music.163.com/song/(\\d+)/', url) if music_ids: mid = music_ids[0] return mid return", "s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) eparams = { \"method\": \"POST\", \"params\": {\"c\": \"[{id:%s}]\" % self.music_id},", "320000}, } data = {\"eparams\": encode_netease_data(eparams)} s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) r", "j[\"songs\"][0][\"al\"][\"name\"] self._singer = j[\"songs\"][0][\"ar\"][0][\"name\"] else: raise MusicDoesnotExists(\"音乐不存在,请检查\") def _get_download_url(self): \"\"\" 从网易云音乐下载 \"\"\" eparams", "= binascii.unhexlify(\"7246674226682325323F5E6544673A51\") encryptor = AES.new(key, AES.MODE_ECB) # 补足data长度,满足16的倍数 pad = 16 - len(data)", "[\"Netease\"] def encode_netease_data(data) -> str: data = json.dumps(data) key = binascii.unhexlify(\"7246674226682325323F5E6544673A51\") encryptor =", "len(data) % 16 fix = chr(pad) * pad byte_data = (data + fix).encode(\"utf-8\")", "j = r.json() if len(j[\"songs\"]) > 0: self._cover = j[\"songs\"][0][\"al\"][\"picUrl\"] self._song = j[\"songs\"][0][\"al\"][\"name\"]", "pad = 16 - len(data) % 16 fix = chr(pad) * pad byte_data", "> 0: self._cover = j[\"songs\"][0][\"al\"][\"picUrl\"] self._song = j[\"songs\"][0][\"al\"][\"name\"] self._singer = j[\"songs\"][0][\"ar\"][0][\"name\"] else: raise", "self.music_id}, \"url\": \"http://music.163.com/api/v3/song/detail\" } data = {\"eparams\": encode_netease_data(eparams)} r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if", "s.headers.update({\"referer\": \"http://music.163.com/\"}) r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok: raise Exception(r.text) j", "pad byte_data = (data + fix).encode(\"utf-8\") return binascii.hexlify(encryptor.encrypt(byte_data)).upper().decode() class Netease(Music): def __init__(self, *args,", "AES.new(key, AES.MODE_ECB) # 补足data长度,满足16的倍数 pad = 16 - len(data) % 16 fix =", "encryptor = AES.new(key, AES.MODE_ECB) # 补足data长度,满足16的倍数 pad = 16 - len(data) % 16", "music_id合法才请求 self._get_music_info() self._get_download_url() def _get_music_info(self): s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) eparams =", "% 16 fix = chr(pad) * pad byte_data = (data + fix).encode(\"utf-8\") return", "* pad byte_data = (data + fix).encode(\"utf-8\") return binascii.hexlify(encryptor.encrypt(byte_data)).upper().decode() class Netease(Music): def __init__(self,", "requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok: raise", "import json import binascii from Crypto.Cipher import AES from .base import Music from", "import requests import json import binascii from Crypto.Cipher import AES from .base import", "= [\"Netease\"] def encode_netease_data(data) -> str: data = json.dumps(data) key = binascii.unhexlify(\"7246674226682325323F5E6544673A51\") encryptor", "{\"ids\": [self.music_id], \"br\": 320000}, } data = {\"eparams\": encode_netease_data(eparams)} s = requests.Session() s.headers.update(config.fake_headers)", "from Crypto.Cipher import AES from .base import Music from mozart import config from", "{ \"method\": \"POST\", \"params\": {\"c\": \"[{id:%s}]\" % self.music_id}, \"url\": \"http://music.163.com/api/v3/song/detail\" } data =", "{\"eparams\": encode_netease_data(eparams)} s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if", "requests.codes.ok: raise Exception(r.text) j = r.json() self._download_url = j[\"data\"][0][\"url\"] self._rate = int(j[\"data\"][0][\"br\"] /", "- len(data) % 16 fix = chr(pad) * pad byte_data = (data +", "if r.status_code != requests.codes.ok: raise Exception(r.text) j = r.json() if len(j[\"songs\"]) > 0:", "**kwargs): super(Netease, self).__init__(*args, **kwargs) # 网易音乐的初始化 if not self.use_id: self.music_id = self.get_music_id_from_url(self.real_url) self.get_music_from_id()", "_get_music_info(self): s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) eparams = { \"method\": \"POST\", \"params\":", "r.status_code != requests.codes.ok: raise Exception(r.text) j = r.json() self._download_url = j[\"data\"][0][\"url\"] self._rate =", "import Music from mozart import config from .exception import MusicDoesnotExists __all__ = [\"Netease\"]", "from .base import Music from mozart import config from .exception import MusicDoesnotExists __all__", "r.json() self._download_url = j[\"data\"][0][\"url\"] self._rate = int(j[\"data\"][0][\"br\"] / 1000) @classmethod def get_music_id_from_url(cls, url)", "key = binascii.unhexlify(\"7246674226682325323F5E6544673A51\") encryptor = AES.new(key, AES.MODE_ECB) # 补足data长度,满足16的倍数 pad = 16 -", "\"method\": \"POST\", \"url\": \"http://music.163.com/api/song/enhance/player/url\", \"params\": {\"ids\": [self.music_id], \"br\": 320000}, } data = {\"eparams\":", "fix).encode(\"utf-8\") return binascii.hexlify(encryptor.encrypt(byte_data)).upper().decode() class Netease(Music): def __init__(self, *args, **kwargs): super(Netease, self).__init__(*args, **kwargs) #", "from .exception import MusicDoesnotExists __all__ = [\"Netease\"] def encode_netease_data(data) -> str: data =", "\"params\": {\"ids\": [self.music_id], \"br\": 320000}, } data = {\"eparams\": encode_netease_data(eparams)} s = requests.Session()", "print(self.__repr__()) def get_music_from_id(self): if self.music_id: # music_id合法才请求 self._get_music_info() self._get_download_url() def _get_music_info(self): s =", "raise MusicDoesnotExists(\"音乐不存在,请检查\") def _get_download_url(self): \"\"\" 从网易云音乐下载 \"\"\" eparams = { \"method\": \"POST\", \"url\":", "MusicDoesnotExists __all__ = [\"Netease\"] def encode_netease_data(data) -> str: data = json.dumps(data) key =", "= j[\"songs\"][0][\"al\"][\"picUrl\"] self._song = j[\"songs\"][0][\"al\"][\"name\"] self._singer = j[\"songs\"][0][\"ar\"][0][\"name\"] else: raise MusicDoesnotExists(\"音乐不存在,请检查\") def _get_download_url(self):", "-> str: data = json.dumps(data) key = binascii.unhexlify(\"7246674226682325323F5E6544673A51\") encryptor = AES.new(key, AES.MODE_ECB) #", "raise Exception(r.text) j = r.json() self._download_url = j[\"data\"][0][\"url\"] self._rate = int(j[\"data\"][0][\"br\"] / 1000)", "chr(pad) * pad byte_data = (data + fix).encode(\"utf-8\") return binascii.hexlify(encryptor.encrypt(byte_data)).upper().decode() class Netease(Music): def", "\"http://music.163.com/api/song/enhance/player/url\", \"params\": {\"ids\": [self.music_id], \"br\": 320000}, } data = {\"eparams\": encode_netease_data(eparams)} s =", "= s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok: raise Exception(r.text) j = r.json() if", "{\"eparams\": encode_netease_data(eparams)} r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok: raise Exception(r.text) j", "= requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) eparams = { \"method\": \"POST\", \"params\": {\"c\": \"[{id:%s}]\"", "r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok: raise Exception(r.text) j = r.json()", "\"\"\" 从网易云音乐下载 \"\"\" eparams = { \"method\": \"POST\", \"url\": \"http://music.163.com/api/song/enhance/player/url\", \"params\": {\"ids\": [self.music_id],", "Music from mozart import config from .exception import MusicDoesnotExists __all__ = [\"Netease\"] def", "s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok: raise Exception(r.text)", "<filename>mozart/music/netease.py import re import requests import json import binascii from Crypto.Cipher import AES", "0: self._cover = j[\"songs\"][0][\"al\"][\"picUrl\"] self._song = j[\"songs\"][0][\"al\"][\"name\"] self._singer = j[\"songs\"][0][\"ar\"][0][\"name\"] else: raise MusicDoesnotExists(\"音乐不存在,请检查\")", "-> str: music_ids = re.findall(r'music.163.com/song/(\\d+)/', url) if music_ids: mid = music_ids[0] return mid", "self._get_download_url() def _get_music_info(self): s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) eparams = { \"method\":", "{ \"method\": \"POST\", \"url\": \"http://music.163.com/api/song/enhance/player/url\", \"params\": {\"ids\": [self.music_id], \"br\": 320000}, } data =", "self._cover = j[\"songs\"][0][\"al\"][\"picUrl\"] self._song = j[\"songs\"][0][\"al\"][\"name\"] self._singer = j[\"songs\"][0][\"ar\"][0][\"name\"] else: raise MusicDoesnotExists(\"音乐不存在,请检查\") def", "\"br\": 320000}, } data = {\"eparams\": encode_netease_data(eparams)} s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"})", "str: data = json.dumps(data) key = binascii.unhexlify(\"7246674226682325323F5E6544673A51\") encryptor = AES.new(key, AES.MODE_ECB) # 补足data长度,满足16的倍数", "*args, **kwargs): super(Netease, self).__init__(*args, **kwargs) # 网易音乐的初始化 if not self.use_id: self.music_id = self.get_music_id_from_url(self.real_url)", "# music_id合法才请求 self._get_music_info() self._get_download_url() def _get_music_info(self): s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) eparams", ".exception import MusicDoesnotExists __all__ = [\"Netease\"] def encode_netease_data(data) -> str: data = json.dumps(data)", "从网易云音乐下载 \"\"\" eparams = { \"method\": \"POST\", \"url\": \"http://music.163.com/api/song/enhance/player/url\", \"params\": {\"ids\": [self.music_id], \"br\":", "= s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok: raise Exception(r.text) j = r.json() self._download_url", "url) -> str: music_ids = re.findall(r'music.163.com/song/(\\d+)/', url) if music_ids: mid = music_ids[0] return", ".base import Music from mozart import config from .exception import MusicDoesnotExists __all__ =", "mozart import config from .exception import MusicDoesnotExists __all__ = [\"Netease\"] def encode_netease_data(data) ->", "16 fix = chr(pad) * pad byte_data = (data + fix).encode(\"utf-8\") return binascii.hexlify(encryptor.encrypt(byte_data)).upper().decode()", "(data + fix).encode(\"utf-8\") return binascii.hexlify(encryptor.encrypt(byte_data)).upper().decode() class Netease(Music): def __init__(self, *args, **kwargs): super(Netease, self).__init__(*args,", "def get_music_from_id(self): if self.music_id: # music_id合法才请求 self._get_music_info() self._get_download_url() def _get_music_info(self): s = requests.Session()", "__init__(self, *args, **kwargs): super(Netease, self).__init__(*args, **kwargs) # 网易音乐的初始化 if not self.use_id: self.music_id =", "from mozart import config from .exception import MusicDoesnotExists __all__ = [\"Netease\"] def encode_netease_data(data)", "= r.json() if len(j[\"songs\"]) > 0: self._cover = j[\"songs\"][0][\"al\"][\"picUrl\"] self._song = j[\"songs\"][0][\"al\"][\"name\"] self._singer", "encode_netease_data(eparams)} s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code", "j[\"data\"][0][\"url\"] self._rate = int(j[\"data\"][0][\"br\"] / 1000) @classmethod def get_music_id_from_url(cls, url) -> str: music_ids", "binascii.unhexlify(\"7246674226682325323F5E6544673A51\") encryptor = AES.new(key, AES.MODE_ECB) # 补足data长度,满足16的倍数 pad = 16 - len(data) %", "config from .exception import MusicDoesnotExists __all__ = [\"Netease\"] def encode_netease_data(data) -> str: data", "j[\"songs\"][0][\"ar\"][0][\"name\"] else: raise MusicDoesnotExists(\"音乐不存在,请检查\") def _get_download_url(self): \"\"\" 从网易云音乐下载 \"\"\" eparams = { \"method\":", "requests.codes.ok: raise Exception(r.text) j = r.json() if len(j[\"songs\"]) > 0: self._cover = j[\"songs\"][0][\"al\"][\"picUrl\"]", "= {\"eparams\": encode_netease_data(eparams)} s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) r = s.post(\"http://music.163.com/api/linux/forward\", data=data)", "music_ids = re.findall(r'music.163.com/song/(\\d+)/', url) if music_ids: mid = music_ids[0] return mid return \"\"", "json.dumps(data) key = binascii.unhexlify(\"7246674226682325323F5E6544673A51\") encryptor = AES.new(key, AES.MODE_ECB) # 补足data长度,满足16的倍数 pad = 16", "= (data + fix).encode(\"utf-8\") return binascii.hexlify(encryptor.encrypt(byte_data)).upper().decode() class Netease(Music): def __init__(self, *args, **kwargs): super(Netease,", "s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code !=", "= chr(pad) * pad byte_data = (data + fix).encode(\"utf-8\") return binascii.hexlify(encryptor.encrypt(byte_data)).upper().decode() class Netease(Music):", "\"\"\" eparams = { \"method\": \"POST\", \"url\": \"http://music.163.com/api/song/enhance/player/url\", \"params\": {\"ids\": [self.music_id], \"br\": 320000},", "= j[\"data\"][0][\"url\"] self._rate = int(j[\"data\"][0][\"br\"] / 1000) @classmethod def get_music_id_from_url(cls, url) -> str:", "binascii from Crypto.Cipher import AES from .base import Music from mozart import config", "s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok: raise Exception(r.text) j = r.json() if len(j[\"songs\"])", "\"url\": \"http://music.163.com/api/song/enhance/player/url\", \"params\": {\"ids\": [self.music_id], \"br\": 320000}, } data = {\"eparams\": encode_netease_data(eparams)} s", "_get_download_url(self): \"\"\" 从网易云音乐下载 \"\"\" eparams = { \"method\": \"POST\", \"url\": \"http://music.163.com/api/song/enhance/player/url\", \"params\": {\"ids\":", "j = r.json() self._download_url = j[\"data\"][0][\"url\"] self._rate = int(j[\"data\"][0][\"br\"] / 1000) @classmethod def", "= int(j[\"data\"][0][\"br\"] / 1000) @classmethod def get_music_id_from_url(cls, url) -> str: music_ids = re.findall(r'music.163.com/song/(\\d+)/',", "data=data) if r.status_code != requests.codes.ok: raise Exception(r.text) j = r.json() if len(j[\"songs\"]) >", "= requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok:", "if self.music_id: # music_id合法才请求 self._get_music_info() self._get_download_url() def _get_music_info(self): s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\":", "\"http://music.163.com/api/v3/song/detail\" } data = {\"eparams\": encode_netease_data(eparams)} r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code !=", "not self.use_id: self.music_id = self.get_music_id_from_url(self.real_url) self.get_music_from_id() print(self.__repr__()) def get_music_from_id(self): if self.music_id: # music_id合法才请求", "= { \"method\": \"POST\", \"params\": {\"c\": \"[{id:%s}]\" % self.music_id}, \"url\": \"http://music.163.com/api/v3/song/detail\" } data", "encode_netease_data(data) -> str: data = json.dumps(data) key = binascii.unhexlify(\"7246674226682325323F5E6544673A51\") encryptor = AES.new(key, AES.MODE_ECB)", "= 16 - len(data) % 16 fix = chr(pad) * pad byte_data =", "= AES.new(key, AES.MODE_ECB) # 补足data长度,满足16的倍数 pad = 16 - len(data) % 16 fix", "self._song = j[\"songs\"][0][\"al\"][\"name\"] self._singer = j[\"songs\"][0][\"ar\"][0][\"name\"] else: raise MusicDoesnotExists(\"音乐不存在,请检查\") def _get_download_url(self): \"\"\" 从网易云音乐下载", "len(j[\"songs\"]) > 0: self._cover = j[\"songs\"][0][\"al\"][\"picUrl\"] self._song = j[\"songs\"][0][\"al\"][\"name\"] self._singer = j[\"songs\"][0][\"ar\"][0][\"name\"] else:", "\"POST\", \"url\": \"http://music.163.com/api/song/enhance/player/url\", \"params\": {\"ids\": [self.music_id], \"br\": 320000}, } data = {\"eparams\": encode_netease_data(eparams)}", "if r.status_code != requests.codes.ok: raise Exception(r.text) j = r.json() self._download_url = j[\"data\"][0][\"url\"] self._rate", "} data = {\"eparams\": encode_netease_data(eparams)} r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok:", "AES.MODE_ECB) # 补足data长度,满足16的倍数 pad = 16 - len(data) % 16 fix = chr(pad)", "self.music_id = self.get_music_id_from_url(self.real_url) self.get_music_from_id() print(self.__repr__()) def get_music_from_id(self): if self.music_id: # music_id合法才请求 self._get_music_info() self._get_download_url()", "self._get_music_info() self._get_download_url() def _get_music_info(self): s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) eparams = {", "def _get_music_info(self): s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) eparams = { \"method\": \"POST\",", "def __init__(self, *args, **kwargs): super(Netease, self).__init__(*args, **kwargs) # 网易音乐的初始化 if not self.use_id: self.music_id", "eparams = { \"method\": \"POST\", \"url\": \"http://music.163.com/api/song/enhance/player/url\", \"params\": {\"ids\": [self.music_id], \"br\": 320000}, }", "= self.get_music_id_from_url(self.real_url) self.get_music_from_id() print(self.__repr__()) def get_music_from_id(self): if self.music_id: # music_id合法才请求 self._get_music_info() self._get_download_url() def", "if len(j[\"songs\"]) > 0: self._cover = j[\"songs\"][0][\"al\"][\"picUrl\"] self._song = j[\"songs\"][0][\"al\"][\"name\"] self._singer = j[\"songs\"][0][\"ar\"][0][\"name\"]", "\"url\": \"http://music.163.com/api/v3/song/detail\" } data = {\"eparams\": encode_netease_data(eparams)} r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code", "!= requests.codes.ok: raise Exception(r.text) j = r.json() if len(j[\"songs\"]) > 0: self._cover =", "self._download_url = j[\"data\"][0][\"url\"] self._rate = int(j[\"data\"][0][\"br\"] / 1000) @classmethod def get_music_id_from_url(cls, url) ->", "} data = {\"eparams\": encode_netease_data(eparams)} s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) r =", "self._singer = j[\"songs\"][0][\"ar\"][0][\"name\"] else: raise MusicDoesnotExists(\"音乐不存在,请检查\") def _get_download_url(self): \"\"\" 从网易云音乐下载 \"\"\" eparams =", "s.headers.update({\"referer\": \"http://music.163.com/\"}) eparams = { \"method\": \"POST\", \"params\": {\"c\": \"[{id:%s}]\" % self.music_id}, \"url\":", "binascii.hexlify(encryptor.encrypt(byte_data)).upper().decode() class Netease(Music): def __init__(self, *args, **kwargs): super(Netease, self).__init__(*args, **kwargs) # 网易音乐的初始化 if", "r.status_code != requests.codes.ok: raise Exception(r.text) j = r.json() if len(j[\"songs\"]) > 0: self._cover", "requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) eparams = { \"method\": \"POST\", \"params\": {\"c\": \"[{id:%s}]\" %", "__all__ = [\"Netease\"] def encode_netease_data(data) -> str: data = json.dumps(data) key = binascii.unhexlify(\"7246674226682325323F5E6544673A51\")", "\"POST\", \"params\": {\"c\": \"[{id:%s}]\" % self.music_id}, \"url\": \"http://music.163.com/api/v3/song/detail\" } data = {\"eparams\": encode_netease_data(eparams)}", "/ 1000) @classmethod def get_music_id_from_url(cls, url) -> str: music_ids = re.findall(r'music.163.com/song/(\\d+)/', url) if", "def get_music_id_from_url(cls, url) -> str: music_ids = re.findall(r'music.163.com/song/(\\d+)/', url) if music_ids: mid =", "= json.dumps(data) key = binascii.unhexlify(\"7246674226682325323F5E6544673A51\") encryptor = AES.new(key, AES.MODE_ECB) # 补足data长度,满足16的倍数 pad =", "Exception(r.text) j = r.json() self._download_url = j[\"data\"][0][\"url\"] self._rate = int(j[\"data\"][0][\"br\"] / 1000) @classmethod", "requests import json import binascii from Crypto.Cipher import AES from .base import Music", "eparams = { \"method\": \"POST\", \"params\": {\"c\": \"[{id:%s}]\" % self.music_id}, \"url\": \"http://music.163.com/api/v3/song/detail\" }", "+ fix).encode(\"utf-8\") return binascii.hexlify(encryptor.encrypt(byte_data)).upper().decode() class Netease(Music): def __init__(self, *args, **kwargs): super(Netease, self).__init__(*args, **kwargs)", "% self.music_id}, \"url\": \"http://music.163.com/api/v3/song/detail\" } data = {\"eparams\": encode_netease_data(eparams)} r = s.post(\"http://music.163.com/api/linux/forward\", data=data)", "re import requests import json import binascii from Crypto.Cipher import AES from .base", "encode_netease_data(eparams)} r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok: raise Exception(r.text) j =", "\"http://music.163.com/\"}) r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok: raise Exception(r.text) j =", "@classmethod def get_music_id_from_url(cls, url) -> str: music_ids = re.findall(r'music.163.com/song/(\\d+)/', url) if music_ids: mid", "# 网易音乐的初始化 if not self.use_id: self.music_id = self.get_music_id_from_url(self.real_url) self.get_music_from_id() print(self.__repr__()) def get_music_from_id(self): if", "Exception(r.text) j = r.json() if len(j[\"songs\"]) > 0: self._cover = j[\"songs\"][0][\"al\"][\"picUrl\"] self._song =", "\"http://music.163.com/\"}) eparams = { \"method\": \"POST\", \"params\": {\"c\": \"[{id:%s}]\" % self.music_id}, \"url\": \"http://music.163.com/api/v3/song/detail\"", "AES from .base import Music from mozart import config from .exception import MusicDoesnotExists", "= r.json() self._download_url = j[\"data\"][0][\"url\"] self._rate = int(j[\"data\"][0][\"br\"] / 1000) @classmethod def get_music_id_from_url(cls,", "byte_data = (data + fix).encode(\"utf-8\") return binascii.hexlify(encryptor.encrypt(byte_data)).upper().decode() class Netease(Music): def __init__(self, *args, **kwargs):", "data = json.dumps(data) key = binascii.unhexlify(\"7246674226682325323F5E6544673A51\") encryptor = AES.new(key, AES.MODE_ECB) # 补足data长度,满足16的倍数 pad", "MusicDoesnotExists(\"音乐不存在,请检查\") def _get_download_url(self): \"\"\" 从网易云音乐下载 \"\"\" eparams = { \"method\": \"POST\", \"url\": \"http://music.163.com/api/song/enhance/player/url\",", "import AES from .base import Music from mozart import config from .exception import", "def encode_netease_data(data) -> str: data = json.dumps(data) key = binascii.unhexlify(\"7246674226682325323F5E6544673A51\") encryptor = AES.new(key,", "self.music_id: # music_id合法才请求 self._get_music_info() self._get_download_url() def _get_music_info(self): s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"})", "raise Exception(r.text) j = r.json() if len(j[\"songs\"]) > 0: self._cover = j[\"songs\"][0][\"al\"][\"picUrl\"] self._song", "fix = chr(pad) * pad byte_data = (data + fix).encode(\"utf-8\") return binascii.hexlify(encryptor.encrypt(byte_data)).upper().decode() class", "data=data) if r.status_code != requests.codes.ok: raise Exception(r.text) j = r.json() self._download_url = j[\"data\"][0][\"url\"]", "补足data长度,满足16的倍数 pad = 16 - len(data) % 16 fix = chr(pad) * pad", "[self.music_id], \"br\": 320000}, } data = {\"eparams\": encode_netease_data(eparams)} s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\":", "\"[{id:%s}]\" % self.music_id}, \"url\": \"http://music.163.com/api/v3/song/detail\" } data = {\"eparams\": encode_netease_data(eparams)} r = s.post(\"http://music.163.com/api/linux/forward\",", "self.get_music_id_from_url(self.real_url) self.get_music_from_id() print(self.__repr__()) def get_music_from_id(self): if self.music_id: # music_id合法才请求 self._get_music_info() self._get_download_url() def _get_music_info(self):", "\"method\": \"POST\", \"params\": {\"c\": \"[{id:%s}]\" % self.music_id}, \"url\": \"http://music.163.com/api/v3/song/detail\" } data = {\"eparams\":", "网易音乐的初始化 if not self.use_id: self.music_id = self.get_music_id_from_url(self.real_url) self.get_music_from_id() print(self.__repr__()) def get_music_from_id(self): if self.music_id:", "self.get_music_from_id() print(self.__repr__()) def get_music_from_id(self): if self.music_id: # music_id合法才请求 self._get_music_info() self._get_download_url() def _get_music_info(self): s", "get_music_from_id(self): if self.music_id: # music_id合法才请求 self._get_music_info() self._get_download_url() def _get_music_info(self): s = requests.Session() s.headers.update(config.fake_headers)", "s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) eparams = { \"method\": \"POST\", \"params\": {\"c\":", "\"params\": {\"c\": \"[{id:%s}]\" % self.music_id}, \"url\": \"http://music.163.com/api/v3/song/detail\" } data = {\"eparams\": encode_netease_data(eparams)} r", "**kwargs) # 网易音乐的初始化 if not self.use_id: self.music_id = self.get_music_id_from_url(self.real_url) self.get_music_from_id() print(self.__repr__()) def get_music_from_id(self):", "j[\"songs\"][0][\"al\"][\"picUrl\"] self._song = j[\"songs\"][0][\"al\"][\"name\"] self._singer = j[\"songs\"][0][\"ar\"][0][\"name\"] else: raise MusicDoesnotExists(\"音乐不存在,请检查\") def _get_download_url(self): \"\"\"", "else: raise MusicDoesnotExists(\"音乐不存在,请检查\") def _get_download_url(self): \"\"\" 从网易云音乐下载 \"\"\" eparams = { \"method\": \"POST\",", "= j[\"songs\"][0][\"al\"][\"name\"] self._singer = j[\"songs\"][0][\"ar\"][0][\"name\"] else: raise MusicDoesnotExists(\"音乐不存在,请检查\") def _get_download_url(self): \"\"\" 从网易云音乐下载 \"\"\"", "import MusicDoesnotExists __all__ = [\"Netease\"] def encode_netease_data(data) -> str: data = json.dumps(data) key", "= { \"method\": \"POST\", \"url\": \"http://music.163.com/api/song/enhance/player/url\", \"params\": {\"ids\": [self.music_id], \"br\": 320000}, } data", "self).__init__(*args, **kwargs) # 网易音乐的初始化 if not self.use_id: self.music_id = self.get_music_id_from_url(self.real_url) self.get_music_from_id() print(self.__repr__()) def", "data = {\"eparams\": encode_netease_data(eparams)} s = requests.Session() s.headers.update(config.fake_headers) s.headers.update({\"referer\": \"http://music.163.com/\"}) r = s.post(\"http://music.163.com/api/linux/forward\",", "{\"c\": \"[{id:%s}]\" % self.music_id}, \"url\": \"http://music.163.com/api/v3/song/detail\" } data = {\"eparams\": encode_netease_data(eparams)} r =", "# 补足data长度,满足16的倍数 pad = 16 - len(data) % 16 fix = chr(pad) *", "json import binascii from Crypto.Cipher import AES from .base import Music from mozart", "class Netease(Music): def __init__(self, *args, **kwargs): super(Netease, self).__init__(*args, **kwargs) # 网易音乐的初始化 if not", "Crypto.Cipher import AES from .base import Music from mozart import config from .exception", "self.use_id: self.music_id = self.get_music_id_from_url(self.real_url) self.get_music_from_id() print(self.__repr__()) def get_music_from_id(self): if self.music_id: # music_id合法才请求 self._get_music_info()", "s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok: raise Exception(r.text) j = r.json() self._download_url =", "Netease(Music): def __init__(self, *args, **kwargs): super(Netease, self).__init__(*args, **kwargs) # 网易音乐的初始化 if not self.use_id:", "get_music_id_from_url(cls, url) -> str: music_ids = re.findall(r'music.163.com/song/(\\d+)/', url) if music_ids: mid = music_ids[0]", "super(Netease, self).__init__(*args, **kwargs) # 网易音乐的初始化 if not self.use_id: self.music_id = self.get_music_id_from_url(self.real_url) self.get_music_from_id() print(self.__repr__())", "if not self.use_id: self.music_id = self.get_music_id_from_url(self.real_url) self.get_music_from_id() print(self.__repr__()) def get_music_from_id(self): if self.music_id: #", "data = {\"eparams\": encode_netease_data(eparams)} r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok: raise", "!= requests.codes.ok: raise Exception(r.text) j = r.json() self._download_url = j[\"data\"][0][\"url\"] self._rate = int(j[\"data\"][0][\"br\"]", "= j[\"songs\"][0][\"ar\"][0][\"name\"] else: raise MusicDoesnotExists(\"音乐不存在,请检查\") def _get_download_url(self): \"\"\" 从网易云音乐下载 \"\"\" eparams = {", "import config from .exception import MusicDoesnotExists __all__ = [\"Netease\"] def encode_netease_data(data) -> str:", "int(j[\"data\"][0][\"br\"] / 1000) @classmethod def get_music_id_from_url(cls, url) -> str: music_ids = re.findall(r'music.163.com/song/(\\d+)/', url)", "def _get_download_url(self): \"\"\" 从网易云音乐下载 \"\"\" eparams = { \"method\": \"POST\", \"url\": \"http://music.163.com/api/song/enhance/player/url\", \"params\":", "import re import requests import json import binascii from Crypto.Cipher import AES from", "return binascii.hexlify(encryptor.encrypt(byte_data)).upper().decode() class Netease(Music): def __init__(self, *args, **kwargs): super(Netease, self).__init__(*args, **kwargs) # 网易音乐的初始化", "1000) @classmethod def get_music_id_from_url(cls, url) -> str: music_ids = re.findall(r'music.163.com/song/(\\d+)/', url) if music_ids:", "= {\"eparams\": encode_netease_data(eparams)} r = s.post(\"http://music.163.com/api/linux/forward\", data=data) if r.status_code != requests.codes.ok: raise Exception(r.text)", "import binascii from Crypto.Cipher import AES from .base import Music from mozart import" ]
[ "simulate_to_end(temp) return score def monte_carlo(game): scores = {} biggest = 0 best =", "simulate_to_end(game): while game.get_state(): dircts = list(DIRECTIONS) for i in xrange(3): c = random.choice(dircts)", "i in xrange(3): c = random.choice(dircts) if game.move(c): break dircts.remove(c) return game.get_score() def", "in directions: temp = game.clone() score = score_sum(temp, direction) if score > biggest:", "in DIRECTIONS: test = game.clone() if not test.move(d): directions.remove(d) for direction in directions:", "_2048 SIMULATE_TIMES = 100000 DIRECTIONS = ('UP', 'DOWN', 'LEFT', 'RIGHT') def simulate_to_end(game): while", "= direction scores[direction] = score print scores if len(set(scores)) == 1: return False", "temp = game.clone() temp.move(direction) for i in xrange(SIMULATE_TIMES): score += simulate_to_end(temp) return score", "game.clone() score = score_sum(temp, direction) if score > biggest: biggest = score best", "def monte_carlo(game): scores = {} biggest = 0 best = None directions =", "method.\"\"\" import random, _2048 SIMULATE_TIMES = 100000 DIRECTIONS = ('UP', 'DOWN', 'LEFT', 'RIGHT')", "if not test.move(d): directions.remove(d) for direction in directions: temp = game.clone() score =", "('UP', 'DOWN', 'LEFT', 'RIGHT') def simulate_to_end(game): while game.get_state(): dircts = list(DIRECTIONS) for i", "directions = list(DIRECTIONS) for d in DIRECTIONS: test = game.clone() if not test.move(d):", "a 2048 game using Monte-Carlo method.\"\"\" import random, _2048 SIMULATE_TIMES = 100000 DIRECTIONS", "1: return False else: return best if __name__ == '__main__': a_game = _2048.Gameplay()", "score_sum(game,direction): score = 0 temp = game.clone() temp.move(direction) for i in xrange(SIMULATE_TIMES): score", "dircts = list(DIRECTIONS) for i in xrange(3): c = random.choice(dircts) if game.move(c): break", "import random, _2048 SIMULATE_TIMES = 100000 DIRECTIONS = ('UP', 'DOWN', 'LEFT', 'RIGHT') def", "return False else: return best if __name__ == '__main__': a_game = _2048.Gameplay() print", "xrange(SIMULATE_TIMES): score += simulate_to_end(temp) return score def monte_carlo(game): scores = {} biggest =", "random, _2048 SIMULATE_TIMES = 100000 DIRECTIONS = ('UP', 'DOWN', 'LEFT', 'RIGHT') def simulate_to_end(game):", "direction in directions: temp = game.clone() score = score_sum(temp, direction) if score >", "break dircts.remove(c) return game.get_score() def score_sum(game,direction): score = 0 temp = game.clone() temp.move(direction)", "game using Monte-Carlo method.\"\"\" import random, _2048 SIMULATE_TIMES = 100000 DIRECTIONS = ('UP',", "False else: return best if __name__ == '__main__': a_game = _2048.Gameplay() print monte_carlo(a_game)", "score def monte_carlo(game): scores = {} biggest = 0 best = None directions", "'RIGHT') def simulate_to_end(game): while game.get_state(): dircts = list(DIRECTIONS) for i in xrange(3): c", "for simulating a 2048 game using Monte-Carlo method.\"\"\" import random, _2048 SIMULATE_TIMES =", "scores[direction] = score print scores if len(set(scores)) == 1: return False else: return", "d in DIRECTIONS: test = game.clone() if not test.move(d): directions.remove(d) for direction in", "random.choice(dircts) if game.move(c): break dircts.remove(c) return game.get_score() def score_sum(game,direction): score = 0 temp", "SIMULATE_TIMES = 100000 DIRECTIONS = ('UP', 'DOWN', 'LEFT', 'RIGHT') def simulate_to_end(game): while game.get_state():", "= 0 best = None directions = list(DIRECTIONS) for d in DIRECTIONS: test", "= score_sum(temp, direction) if score > biggest: biggest = score best = direction", "game.clone() if not test.move(d): directions.remove(d) for direction in directions: temp = game.clone() score", "dircts.remove(c) return game.get_score() def score_sum(game,direction): score = 0 temp = game.clone() temp.move(direction) for", "\"\"\"Algorithm for simulating a 2048 game using Monte-Carlo method.\"\"\" import random, _2048 SIMULATE_TIMES", "biggest: biggest = score best = direction scores[direction] = score print scores if", "= random.choice(dircts) if game.move(c): break dircts.remove(c) return game.get_score() def score_sum(game,direction): score = 0", "def simulate_to_end(game): while game.get_state(): dircts = list(DIRECTIONS) for i in xrange(3): c =", "= 100000 DIRECTIONS = ('UP', 'DOWN', 'LEFT', 'RIGHT') def simulate_to_end(game): while game.get_state(): dircts", "temp.move(direction) for i in xrange(SIMULATE_TIMES): score += simulate_to_end(temp) return score def monte_carlo(game): scores", "= ('UP', 'DOWN', 'LEFT', 'RIGHT') def simulate_to_end(game): while game.get_state(): dircts = list(DIRECTIONS) for", "direction scores[direction] = score print scores if len(set(scores)) == 1: return False else:", "= game.clone() temp.move(direction) for i in xrange(SIMULATE_TIMES): score += simulate_to_end(temp) return score def", "simulating a 2048 game using Monte-Carlo method.\"\"\" import random, _2048 SIMULATE_TIMES = 100000", "= 0 temp = game.clone() temp.move(direction) for i in xrange(SIMULATE_TIMES): score += simulate_to_end(temp)", "score best = direction scores[direction] = score print scores if len(set(scores)) == 1:", "score print scores if len(set(scores)) == 1: return False else: return best if", "score = score_sum(temp, direction) if score > biggest: biggest = score best =", "xrange(3): c = random.choice(dircts) if game.move(c): break dircts.remove(c) return game.get_score() def score_sum(game,direction): score", "score > biggest: biggest = score best = direction scores[direction] = score print", "for d in DIRECTIONS: test = game.clone() if not test.move(d): directions.remove(d) for direction", "2048 game using Monte-Carlo method.\"\"\" import random, _2048 SIMULATE_TIMES = 100000 DIRECTIONS =", "score += simulate_to_end(temp) return score def monte_carlo(game): scores = {} biggest = 0", "print scores if len(set(scores)) == 1: return False else: return best if __name__", "DIRECTIONS = ('UP', 'DOWN', 'LEFT', 'RIGHT') def simulate_to_end(game): while game.get_state(): dircts = list(DIRECTIONS)", "= list(DIRECTIONS) for i in xrange(3): c = random.choice(dircts) if game.move(c): break dircts.remove(c)", "list(DIRECTIONS) for d in DIRECTIONS: test = game.clone() if not test.move(d): directions.remove(d) for", "best = None directions = list(DIRECTIONS) for d in DIRECTIONS: test = game.clone()", "scores = {} biggest = 0 best = None directions = list(DIRECTIONS) for", "temp = game.clone() score = score_sum(temp, direction) if score > biggest: biggest =", "directions.remove(d) for direction in directions: temp = game.clone() score = score_sum(temp, direction) if", "0 best = None directions = list(DIRECTIONS) for d in DIRECTIONS: test =", "scores if len(set(scores)) == 1: return False else: return best if __name__ ==", "== 1: return False else: return best if __name__ == '__main__': a_game =", "{} biggest = 0 best = None directions = list(DIRECTIONS) for d in", "DIRECTIONS: test = game.clone() if not test.move(d): directions.remove(d) for direction in directions: temp", "score_sum(temp, direction) if score > biggest: biggest = score best = direction scores[direction]", "best = direction scores[direction] = score print scores if len(set(scores)) == 1: return", "= game.clone() if not test.move(d): directions.remove(d) for direction in directions: temp = game.clone()", "c = random.choice(dircts) if game.move(c): break dircts.remove(c) return game.get_score() def score_sum(game,direction): score =", "for i in xrange(3): c = random.choice(dircts) if game.move(c): break dircts.remove(c) return game.get_score()", "game.get_state(): dircts = list(DIRECTIONS) for i in xrange(3): c = random.choice(dircts) if game.move(c):", "in xrange(SIMULATE_TIMES): score += simulate_to_end(temp) return score def monte_carlo(game): scores = {} biggest", "= {} biggest = 0 best = None directions = list(DIRECTIONS) for d", "while game.get_state(): dircts = list(DIRECTIONS) for i in xrange(3): c = random.choice(dircts) if", "i in xrange(SIMULATE_TIMES): score += simulate_to_end(temp) return score def monte_carlo(game): scores = {}", "len(set(scores)) == 1: return False else: return best if __name__ == '__main__': a_game", "biggest = 0 best = None directions = list(DIRECTIONS) for d in DIRECTIONS:", "'LEFT', 'RIGHT') def simulate_to_end(game): while game.get_state(): dircts = list(DIRECTIONS) for i in xrange(3):", "if game.move(c): break dircts.remove(c) return game.get_score() def score_sum(game,direction): score = 0 temp =", "return score def monte_carlo(game): scores = {} biggest = 0 best = None", "direction) if score > biggest: biggest = score best = direction scores[direction] =", "test.move(d): directions.remove(d) for direction in directions: temp = game.clone() score = score_sum(temp, direction)", "in xrange(3): c = random.choice(dircts) if game.move(c): break dircts.remove(c) return game.get_score() def score_sum(game,direction):", "if score > biggest: biggest = score best = direction scores[direction] = score", "monte_carlo(game): scores = {} biggest = 0 best = None directions = list(DIRECTIONS)", "= score print scores if len(set(scores)) == 1: return False else: return best", "for i in xrange(SIMULATE_TIMES): score += simulate_to_end(temp) return score def monte_carlo(game): scores =", "+= simulate_to_end(temp) return score def monte_carlo(game): scores = {} biggest = 0 best", "biggest = score best = direction scores[direction] = score print scores if len(set(scores))", "100000 DIRECTIONS = ('UP', 'DOWN', 'LEFT', 'RIGHT') def simulate_to_end(game): while game.get_state(): dircts =", "directions: temp = game.clone() score = score_sum(temp, direction) if score > biggest: biggest", "if len(set(scores)) == 1: return False else: return best if __name__ == '__main__':", "None directions = list(DIRECTIONS) for d in DIRECTIONS: test = game.clone() if not", "game.move(c): break dircts.remove(c) return game.get_score() def score_sum(game,direction): score = 0 temp = game.clone()", "score = 0 temp = game.clone() temp.move(direction) for i in xrange(SIMULATE_TIMES): score +=", "using Monte-Carlo method.\"\"\" import random, _2048 SIMULATE_TIMES = 100000 DIRECTIONS = ('UP', 'DOWN',", "Monte-Carlo method.\"\"\" import random, _2048 SIMULATE_TIMES = 100000 DIRECTIONS = ('UP', 'DOWN', 'LEFT',", "game.get_score() def score_sum(game,direction): score = 0 temp = game.clone() temp.move(direction) for i in", "= None directions = list(DIRECTIONS) for d in DIRECTIONS: test = game.clone() if", "def score_sum(game,direction): score = 0 temp = game.clone() temp.move(direction) for i in xrange(SIMULATE_TIMES):", "game.clone() temp.move(direction) for i in xrange(SIMULATE_TIMES): score += simulate_to_end(temp) return score def monte_carlo(game):", "for direction in directions: temp = game.clone() score = score_sum(temp, direction) if score", "not test.move(d): directions.remove(d) for direction in directions: temp = game.clone() score = score_sum(temp,", "return game.get_score() def score_sum(game,direction): score = 0 temp = game.clone() temp.move(direction) for i", "0 temp = game.clone() temp.move(direction) for i in xrange(SIMULATE_TIMES): score += simulate_to_end(temp) return", "test = game.clone() if not test.move(d): directions.remove(d) for direction in directions: temp =", "= list(DIRECTIONS) for d in DIRECTIONS: test = game.clone() if not test.move(d): directions.remove(d)", "list(DIRECTIONS) for i in xrange(3): c = random.choice(dircts) if game.move(c): break dircts.remove(c) return", "'DOWN', 'LEFT', 'RIGHT') def simulate_to_end(game): while game.get_state(): dircts = list(DIRECTIONS) for i in", "> biggest: biggest = score best = direction scores[direction] = score print scores", "= score best = direction scores[direction] = score print scores if len(set(scores)) ==", "= game.clone() score = score_sum(temp, direction) if score > biggest: biggest = score" ]
[ "'--json', '--all', '--zoned', '--output-all', '--paths'], stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate() jsonText = proc[0].decode('utf-8') errors =", "Copyright: (c) Copyright 2022 <NAME> # Module: model/LsBlk.py # Date Started: March 23,", "== Action.Generate: return Dispatcher.__generateLsBlkJsonFile() @staticmethod def __generateLsBlkJsonFile(): # lsblk --json --all --zoned --output-all", "proc[0].decode('utf-8') errors = proc[1].decode('utf-8') if len(errors) > 0: print(errors, file=stderr) print(\"Saving output to:\\t\"", "from enum import Enum from subprocess import Popen, PIPE from sys import stderr", "@staticmethod def __generateLsBlkJsonFile(): # lsblk --json --all --zoned --output-all --paths proc = Popen(['lsblk',", "import INSTALLATION_FOLDER from view.Components import JsonTreeView PROGRAM_TITLE = \"lsblk API\" LSBLK_JSON_FILE = 'lsblk.json'", "file = open(LSBLK_JSON_FILE, \"w\") file.write(jsonText) file.close() return jsonText def ExitProgram(): answer = messagebox.askyesno('Exit", "self.value class Dispatcher: def __init__(self): print(\"Lshw.Dispatcher does not instantiate\") @staticmethod def do( action:", "= 'Store' Search = 'Search' Update = 'Update' Log = 'Log' Exit =", "LabelFrame(mainView, text=\"Block Devices\", border=5, relief=RAISED) jsonTreeView = JsonTreeView(borderFrame, lsblkJson, {\"openBranches\": True, \"mode\": \"strict\"})", "jsonText def ExitProgram(): answer = messagebox.askyesno('Exit program ', \"Exit the \" + PROGRAM_TITLE", "= 'Generate' Help = \"Help\" Load = 'Load' Store = 'Store' Search =", "+ LSBLK_JSON_FILE) file = open(LSBLK_JSON_FILE, \"w\") file.write(jsonText) file.close() return jsonText def ExitProgram(): answer", "import Tk, messagebox, LabelFrame, BOTH, RAISED from model.Installation import INSTALLATION_FOLDER from view.Components import", "answer: mainView.destroy() if __name__ == '__main__': mainView = Tk() mainView.protocol('WM_DELETE_WINDOW', ExitProgram) mainView.geometry(\"700x450+250+50\") mainView.title(PROGRAM_TITLE)", "subprocess import Popen, PIPE from sys import stderr from json import loads from", "ExitProgram(): answer = messagebox.askyesno('Exit program ', \"Exit the \" + PROGRAM_TITLE + \"", "'Update' Log = 'Log' Exit = 'Exit' def __str__(self): return self.value class Dispatcher:", "def ExitProgram(): answer = messagebox.askyesno('Exit program ', \"Exit the \" + PROGRAM_TITLE +", "Module: model/LsBlk.py # Date Started: March 23, 2022 # Purpose: Store and provide", "+ \" program?\") if answer: mainView.destroy() if __name__ == '__main__': mainView = Tk()", "'--output-all', '--paths'], stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate() jsonText = proc[0].decode('utf-8') errors = proc[1].decode('utf-8') if len(errors)", "import stderr from json import loads from tkinter import Tk, messagebox, LabelFrame, BOTH,", "if len(errors) > 0: print(errors, file=stderr) print(\"Saving output to:\\t\" + LSBLK_JSON_FILE) file =", "# lsblk --json --all --zoned --output-all --paths proc = Popen(['lsblk', '--json', '--all', '--zoned',", "LabelFrame, BOTH, RAISED from model.Installation import INSTALLATION_FOLDER from view.Components import JsonTreeView PROGRAM_TITLE =", "'Generate' Help = \"Help\" Load = 'Load' Store = 'Store' Search = 'Search'", "tkinter import Tk, messagebox, LabelFrame, BOTH, RAISED from model.Installation import INSTALLATION_FOLDER from view.Components", "# Project: hardInfo # Author: <NAME> # Date Started: March 18, 2022 #", "len(errors) > 0: print(errors, file=stderr) print(\"Saving output to:\\t\" + LSBLK_JSON_FILE) file = open(LSBLK_JSON_FILE,", "= proc[1].decode('utf-8') if len(errors) > 0: print(errors, file=stderr) print(\"Saving output to:\\t\" + LSBLK_JSON_FILE)", "'__main__': mainView = Tk() mainView.protocol('WM_DELETE_WINDOW', ExitProgram) mainView.geometry(\"700x450+250+50\") mainView.title(PROGRAM_TITLE) jsonText = Dispatcher.do(Action.Generate) lsblkJson =", "(c) Copyright 2022 <NAME> # Module: model/LsBlk.py # Date Started: March 23, 2022", "line: # lsblk --json --all --zoned --output-all --paths # from enum import Enum", "Search = 'Search' Update = 'Update' Log = 'Log' Exit = 'Exit' def", "from model.Installation import INSTALLATION_FOLDER from view.Components import JsonTreeView PROGRAM_TITLE = \"lsblk API\" LSBLK_JSON_FILE", "hardInfo # Author: <NAME> # Date Started: March 18, 2022 # Copyright: (c)", "Popen, PIPE from sys import stderr from json import loads from tkinter import", "<NAME> # Module: model/LsBlk.py # Date Started: March 23, 2022 # Purpose: Store", "API for Linux lsblk command. # Development: # Arguments to include in the", "18, 2022 # Copyright: (c) Copyright 2022 <NAME> # Module: model/LsBlk.py # Date", "PROGRAM_TITLE = \"lsblk API\" LSBLK_JSON_FILE = 'lsblk.json' class Action(Enum): Generate = 'Generate' Help", "Date Started: March 23, 2022 # Purpose: Store and provide API for Linux", "Started: March 23, 2022 # Purpose: Store and provide API for Linux lsblk", "Enum from subprocess import Popen, PIPE from sys import stderr from json import", "def __init__(self): print(\"Lshw.Dispatcher does not instantiate\") @staticmethod def do( action: Action): if action", "does not instantiate\") @staticmethod def do( action: Action): if action == Action.Generate: return", "--output-all --paths proc = Popen(['lsblk', '--json', '--all', '--zoned', '--output-all', '--paths'], stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate()", "'Store' Search = 'Search' Update = 'Update' Log = 'Log' Exit = 'Exit'", "= 'Log' Exit = 'Exit' def __str__(self): return self.value class Dispatcher: def __init__(self):", "= Popen(['lsblk', '--json', '--all', '--zoned', '--output-all', '--paths'], stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate() jsonText = proc[0].decode('utf-8')", "23, 2022 # Purpose: Store and provide API for Linux lsblk command. #", "'Search' Update = 'Update' Log = 'Log' Exit = 'Exit' def __str__(self): return", "action: Action): if action == Action.Generate: return Dispatcher.__generateLsBlkJsonFile() @staticmethod def __generateLsBlkJsonFile(): # lsblk", "RAISED from model.Installation import INSTALLATION_FOLDER from view.Components import JsonTreeView PROGRAM_TITLE = \"lsblk API\"", "jsonText = Dispatcher.do(Action.Generate) lsblkJson = loads(jsonText) borderFrame = LabelFrame(mainView, text=\"Block Devices\", border=5, relief=RAISED)", "# lsblk --json --all --zoned --output-all --paths # from enum import Enum from", "border=5, relief=RAISED) jsonTreeView = JsonTreeView(borderFrame, lsblkJson, {\"openBranches\": True, \"mode\": \"strict\"}) jsonTreeView.pack(expand=True, fill=BOTH) borderFrame.pack(expand=True,", "Copyright 2022 <NAME> # Module: model/LsBlk.py # Date Started: March 23, 2022 #", "__str__(self): return self.value class Dispatcher: def __init__(self): print(\"Lshw.Dispatcher does not instantiate\") @staticmethod def", "Action.Generate: return Dispatcher.__generateLsBlkJsonFile() @staticmethod def __generateLsBlkJsonFile(): # lsblk --json --all --zoned --output-all --paths", "jsonTreeView = JsonTreeView(borderFrame, lsblkJson, {\"openBranches\": True, \"mode\": \"strict\"}) jsonTreeView.pack(expand=True, fill=BOTH) borderFrame.pack(expand=True, fill=BOTH) mainView.mainloop()", "import Popen, PIPE from sys import stderr from json import loads from tkinter", "Load = 'Load' Store = 'Store' Search = 'Search' Update = 'Update' Log", "lsblk --json --all --zoned --output-all --paths # from enum import Enum from subprocess", "BOTH, RAISED from model.Installation import INSTALLATION_FOLDER from view.Components import JsonTreeView PROGRAM_TITLE = \"lsblk", "# Date Started: March 18, 2022 # Copyright: (c) Copyright 2022 <NAME> #", "'lsblk.json' class Action(Enum): Generate = 'Generate' Help = \"Help\" Load = 'Load' Store", "to include in the command line: # lsblk --json --all --zoned --output-all --paths", "sys import stderr from json import loads from tkinter import Tk, messagebox, LabelFrame,", "= 'Exit' def __str__(self): return self.value class Dispatcher: def __init__(self): print(\"Lshw.Dispatcher does not", "Project: hardInfo # Author: <NAME> # Date Started: March 18, 2022 # Copyright:", "def __str__(self): return self.value class Dispatcher: def __init__(self): print(\"Lshw.Dispatcher does not instantiate\") @staticmethod", "# from enum import Enum from subprocess import Popen, PIPE from sys import", "text=\"Block Devices\", border=5, relief=RAISED) jsonTreeView = JsonTreeView(borderFrame, lsblkJson, {\"openBranches\": True, \"mode\": \"strict\"}) jsonTreeView.pack(expand=True,", "PROGRAM_TITLE + \" program?\") if answer: mainView.destroy() if __name__ == '__main__': mainView =", "Tk, messagebox, LabelFrame, BOTH, RAISED from model.Installation import INSTALLATION_FOLDER from view.Components import JsonTreeView", "'Exit' def __str__(self): return self.value class Dispatcher: def __init__(self): print(\"Lshw.Dispatcher does not instantiate\")", "output to:\\t\" + LSBLK_JSON_FILE) file = open(LSBLK_JSON_FILE, \"w\") file.write(jsonText) file.close() return jsonText def", "\"w\") file.write(jsonText) file.close() return jsonText def ExitProgram(): answer = messagebox.askyesno('Exit program ', \"Exit", "Started: March 18, 2022 # Copyright: (c) Copyright 2022 <NAME> # Module: model/LsBlk.py", "include in the command line: # lsblk --json --all --zoned --output-all --paths #", "\"Exit the \" + PROGRAM_TITLE + \" program?\") if answer: mainView.destroy() if __name__", "# Date Started: March 23, 2022 # Purpose: Store and provide API for", "and provide API for Linux lsblk command. # Development: # Arguments to include", "# Development: # Arguments to include in the command line: # lsblk --json", "Devices\", border=5, relief=RAISED) jsonTreeView = JsonTreeView(borderFrame, lsblkJson, {\"openBranches\": True, \"mode\": \"strict\"}) jsonTreeView.pack(expand=True, fill=BOTH)", "__generateLsBlkJsonFile(): # lsblk --json --all --zoned --output-all --paths proc = Popen(['lsblk', '--json', '--all',", "lsblkJson = loads(jsonText) borderFrame = LabelFrame(mainView, text=\"Block Devices\", border=5, relief=RAISED) jsonTreeView = JsonTreeView(borderFrame,", "2022 <NAME> # Module: model/LsBlk.py # Date Started: March 23, 2022 # Purpose:", "__name__ == '__main__': mainView = Tk() mainView.protocol('WM_DELETE_WINDOW', ExitProgram) mainView.geometry(\"700x450+250+50\") mainView.title(PROGRAM_TITLE) jsonText = Dispatcher.do(Action.Generate)", "ExitProgram) mainView.geometry(\"700x450+250+50\") mainView.title(PROGRAM_TITLE) jsonText = Dispatcher.do(Action.Generate) lsblkJson = loads(jsonText) borderFrame = LabelFrame(mainView, text=\"Block", "import loads from tkinter import Tk, messagebox, LabelFrame, BOTH, RAISED from model.Installation import", "mainView.title(PROGRAM_TITLE) jsonText = Dispatcher.do(Action.Generate) lsblkJson = loads(jsonText) borderFrame = LabelFrame(mainView, text=\"Block Devices\", border=5,", "stderr from json import loads from tkinter import Tk, messagebox, LabelFrame, BOTH, RAISED", "mainView = Tk() mainView.protocol('WM_DELETE_WINDOW', ExitProgram) mainView.geometry(\"700x450+250+50\") mainView.title(PROGRAM_TITLE) jsonText = Dispatcher.do(Action.Generate) lsblkJson = loads(jsonText)", "def __generateLsBlkJsonFile(): # lsblk --json --all --zoned --output-all --paths proc = Popen(['lsblk', '--json',", "import Enum from subprocess import Popen, PIPE from sys import stderr from json", "errors = proc[1].decode('utf-8') if len(errors) > 0: print(errors, file=stderr) print(\"Saving output to:\\t\" +", "return Dispatcher.__generateLsBlkJsonFile() @staticmethod def __generateLsBlkJsonFile(): # lsblk --json --all --zoned --output-all --paths proc", "model/LsBlk.py # Date Started: March 23, 2022 # Purpose: Store and provide API", "= proc[0].decode('utf-8') errors = proc[1].decode('utf-8') if len(errors) > 0: print(errors, file=stderr) print(\"Saving output", "file.close() return jsonText def ExitProgram(): answer = messagebox.askyesno('Exit program ', \"Exit the \"", "INSTALLATION_FOLDER from view.Components import JsonTreeView PROGRAM_TITLE = \"lsblk API\" LSBLK_JSON_FILE = 'lsblk.json' class", "API\" LSBLK_JSON_FILE = 'lsblk.json' class Action(Enum): Generate = 'Generate' Help = \"Help\" Load", "Update = 'Update' Log = 'Log' Exit = 'Exit' def __str__(self): return self.value", "def do( action: Action): if action == Action.Generate: return Dispatcher.__generateLsBlkJsonFile() @staticmethod def __generateLsBlkJsonFile():", "not instantiate\") @staticmethod def do( action: Action): if action == Action.Generate: return Dispatcher.__generateLsBlkJsonFile()", "# Purpose: Store and provide API for Linux lsblk command. # Development: #", "view.Components import JsonTreeView PROGRAM_TITLE = \"lsblk API\" LSBLK_JSON_FILE = 'lsblk.json' class Action(Enum): Generate", "json import loads from tkinter import Tk, messagebox, LabelFrame, BOTH, RAISED from model.Installation", "in the command line: # lsblk --json --all --zoned --output-all --paths # from", "'Load' Store = 'Store' Search = 'Search' Update = 'Update' Log = 'Log'", "Dispatcher.do(Action.Generate) lsblkJson = loads(jsonText) borderFrame = LabelFrame(mainView, text=\"Block Devices\", border=5, relief=RAISED) jsonTreeView =", "model.Installation import INSTALLATION_FOLDER from view.Components import JsonTreeView PROGRAM_TITLE = \"lsblk API\" LSBLK_JSON_FILE =", "do( action: Action): if action == Action.Generate: return Dispatcher.__generateLsBlkJsonFile() @staticmethod def __generateLsBlkJsonFile(): #", "0: print(errors, file=stderr) print(\"Saving output to:\\t\" + LSBLK_JSON_FILE) file = open(LSBLK_JSON_FILE, \"w\") file.write(jsonText)", "--zoned --output-all --paths proc = Popen(['lsblk', '--json', '--all', '--zoned', '--output-all', '--paths'], stdin=PIPE, stdout=PIPE,", "from subprocess import Popen, PIPE from sys import stderr from json import loads", "LSBLK_JSON_FILE = 'lsblk.json' class Action(Enum): Generate = 'Generate' Help = \"Help\" Load =", "= Dispatcher.do(Action.Generate) lsblkJson = loads(jsonText) borderFrame = LabelFrame(mainView, text=\"Block Devices\", border=5, relief=RAISED) jsonTreeView", "borderFrame = LabelFrame(mainView, text=\"Block Devices\", border=5, relief=RAISED) jsonTreeView = JsonTreeView(borderFrame, lsblkJson, {\"openBranches\": True,", "mainView.geometry(\"700x450+250+50\") mainView.title(PROGRAM_TITLE) jsonText = Dispatcher.do(Action.Generate) lsblkJson = loads(jsonText) borderFrame = LabelFrame(mainView, text=\"Block Devices\",", "Popen(['lsblk', '--json', '--all', '--zoned', '--output-all', '--paths'], stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate() jsonText = proc[0].decode('utf-8') errors", "= Tk() mainView.protocol('WM_DELETE_WINDOW', ExitProgram) mainView.geometry(\"700x450+250+50\") mainView.title(PROGRAM_TITLE) jsonText = Dispatcher.do(Action.Generate) lsblkJson = loads(jsonText) borderFrame", "Linux lsblk command. # Development: # Arguments to include in the command line:", "Action(Enum): Generate = 'Generate' Help = \"Help\" Load = 'Load' Store = 'Store'", "stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate() jsonText = proc[0].decode('utf-8') errors = proc[1].decode('utf-8') if len(errors) > 0:", "\"Help\" Load = 'Load' Store = 'Store' Search = 'Search' Update = 'Update'", "mainView.destroy() if __name__ == '__main__': mainView = Tk() mainView.protocol('WM_DELETE_WINDOW', ExitProgram) mainView.geometry(\"700x450+250+50\") mainView.title(PROGRAM_TITLE) jsonText", "--paths proc = Popen(['lsblk', '--json', '--all', '--zoned', '--output-all', '--paths'], stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate() jsonText", "= LabelFrame(mainView, text=\"Block Devices\", border=5, relief=RAISED) jsonTreeView = JsonTreeView(borderFrame, lsblkJson, {\"openBranches\": True, \"mode\":", "'--zoned', '--output-all', '--paths'], stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate() jsonText = proc[0].decode('utf-8') errors = proc[1].decode('utf-8') if", "loads from tkinter import Tk, messagebox, LabelFrame, BOTH, RAISED from model.Installation import INSTALLATION_FOLDER", "import JsonTreeView PROGRAM_TITLE = \"lsblk API\" LSBLK_JSON_FILE = 'lsblk.json' class Action(Enum): Generate =", "> 0: print(errors, file=stderr) print(\"Saving output to:\\t\" + LSBLK_JSON_FILE) file = open(LSBLK_JSON_FILE, \"w\")", "program ', \"Exit the \" + PROGRAM_TITLE + \" program?\") if answer: mainView.destroy()", "the command line: # lsblk --json --all --zoned --output-all --paths # from enum", "if action == Action.Generate: return Dispatcher.__generateLsBlkJsonFile() @staticmethod def __generateLsBlkJsonFile(): # lsblk --json --all", "= 'lsblk.json' class Action(Enum): Generate = 'Generate' Help = \"Help\" Load = 'Load'", "return jsonText def ExitProgram(): answer = messagebox.askyesno('Exit program ', \"Exit the \" +", "Help = \"Help\" Load = 'Load' Store = 'Store' Search = 'Search' Update", "PIPE from sys import stderr from json import loads from tkinter import Tk,", "--json --all --zoned --output-all --paths # from enum import Enum from subprocess import", "= loads(jsonText) borderFrame = LabelFrame(mainView, text=\"Block Devices\", border=5, relief=RAISED) jsonTreeView = JsonTreeView(borderFrame, lsblkJson,", "<NAME> # Date Started: March 18, 2022 # Copyright: (c) Copyright 2022 <NAME>", "2022 # Purpose: Store and provide API for Linux lsblk command. # Development:", "# Arguments to include in the command line: # lsblk --json --all --zoned", "--all --zoned --output-all --paths proc = Popen(['lsblk', '--json', '--all', '--zoned', '--output-all', '--paths'], stdin=PIPE,", "# Author: <NAME> # Date Started: March 18, 2022 # Copyright: (c) Copyright", "to:\\t\" + LSBLK_JSON_FILE) file = open(LSBLK_JSON_FILE, \"w\") file.write(jsonText) file.close() return jsonText def ExitProgram():", "print(errors, file=stderr) print(\"Saving output to:\\t\" + LSBLK_JSON_FILE) file = open(LSBLK_JSON_FILE, \"w\") file.write(jsonText) file.close()", "from sys import stderr from json import loads from tkinter import Tk, messagebox,", "Dispatcher.__generateLsBlkJsonFile() @staticmethod def __generateLsBlkJsonFile(): # lsblk --json --all --zoned --output-all --paths proc =", "print(\"Saving output to:\\t\" + LSBLK_JSON_FILE) file = open(LSBLK_JSON_FILE, \"w\") file.write(jsonText) file.close() return jsonText", "'Log' Exit = 'Exit' def __str__(self): return self.value class Dispatcher: def __init__(self): print(\"Lshw.Dispatcher", "LSBLK_JSON_FILE) file = open(LSBLK_JSON_FILE, \"w\") file.write(jsonText) file.close() return jsonText def ExitProgram(): answer =", "March 23, 2022 # Purpose: Store and provide API for Linux lsblk command.", "return self.value class Dispatcher: def __init__(self): print(\"Lshw.Dispatcher does not instantiate\") @staticmethod def do(", "Dispatcher: def __init__(self): print(\"Lshw.Dispatcher does not instantiate\") @staticmethod def do( action: Action): if", "# Module: model/LsBlk.py # Date Started: March 23, 2022 # Purpose: Store and", "mainView.protocol('WM_DELETE_WINDOW', ExitProgram) mainView.geometry(\"700x450+250+50\") mainView.title(PROGRAM_TITLE) jsonText = Dispatcher.do(Action.Generate) lsblkJson = loads(jsonText) borderFrame = LabelFrame(mainView,", "jsonText = proc[0].decode('utf-8') errors = proc[1].decode('utf-8') if len(errors) > 0: print(errors, file=stderr) print(\"Saving", "stdout=PIPE, stderr=PIPE).communicate() jsonText = proc[0].decode('utf-8') errors = proc[1].decode('utf-8') if len(errors) > 0: print(errors,", "+ PROGRAM_TITLE + \" program?\") if answer: mainView.destroy() if __name__ == '__main__': mainView", "= \"lsblk API\" LSBLK_JSON_FILE = 'lsblk.json' class Action(Enum): Generate = 'Generate' Help =", "Store and provide API for Linux lsblk command. # Development: # Arguments to", "# Copyright: (c) Copyright 2022 <NAME> # Module: model/LsBlk.py # Date Started: March", "from tkinter import Tk, messagebox, LabelFrame, BOTH, RAISED from model.Installation import INSTALLATION_FOLDER from", "--zoned --output-all --paths # from enum import Enum from subprocess import Popen, PIPE", "\"lsblk API\" LSBLK_JSON_FILE = 'lsblk.json' class Action(Enum): Generate = 'Generate' Help = \"Help\"", "Generate = 'Generate' Help = \"Help\" Load = 'Load' Store = 'Store' Search", "= 'Update' Log = 'Log' Exit = 'Exit' def __str__(self): return self.value class", "Purpose: Store and provide API for Linux lsblk command. # Development: # Arguments", "Arguments to include in the command line: # lsblk --json --all --zoned --output-all", "', \"Exit the \" + PROGRAM_TITLE + \" program?\") if answer: mainView.destroy() if", "if answer: mainView.destroy() if __name__ == '__main__': mainView = Tk() mainView.protocol('WM_DELETE_WINDOW', ExitProgram) mainView.geometry(\"700x450+250+50\")", "Tk() mainView.protocol('WM_DELETE_WINDOW', ExitProgram) mainView.geometry(\"700x450+250+50\") mainView.title(PROGRAM_TITLE) jsonText = Dispatcher.do(Action.Generate) lsblkJson = loads(jsonText) borderFrame =", "Store = 'Store' Search = 'Search' Update = 'Update' Log = 'Log' Exit", "'--paths'], stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate() jsonText = proc[0].decode('utf-8') errors = proc[1].decode('utf-8') if len(errors) >", "command. # Development: # Arguments to include in the command line: # lsblk", "class Action(Enum): Generate = 'Generate' Help = \"Help\" Load = 'Load' Store =", "Exit = 'Exit' def __str__(self): return self.value class Dispatcher: def __init__(self): print(\"Lshw.Dispatcher does", "instantiate\") @staticmethod def do( action: Action): if action == Action.Generate: return Dispatcher.__generateLsBlkJsonFile() @staticmethod", "\" program?\") if answer: mainView.destroy() if __name__ == '__main__': mainView = Tk() mainView.protocol('WM_DELETE_WINDOW',", "Date Started: March 18, 2022 # Copyright: (c) Copyright 2022 <NAME> # Module:", "== '__main__': mainView = Tk() mainView.protocol('WM_DELETE_WINDOW', ExitProgram) mainView.geometry(\"700x450+250+50\") mainView.title(PROGRAM_TITLE) jsonText = Dispatcher.do(Action.Generate) lsblkJson", "Author: <NAME> # Date Started: March 18, 2022 # Copyright: (c) Copyright 2022", "'--all', '--zoned', '--output-all', '--paths'], stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate() jsonText = proc[0].decode('utf-8') errors = proc[1].decode('utf-8')", "for Linux lsblk command. # Development: # Arguments to include in the command", "loads(jsonText) borderFrame = LabelFrame(mainView, text=\"Block Devices\", border=5, relief=RAISED) jsonTreeView = JsonTreeView(borderFrame, lsblkJson, {\"openBranches\":", "provide API for Linux lsblk command. # Development: # Arguments to include in", "if __name__ == '__main__': mainView = Tk() mainView.protocol('WM_DELETE_WINDOW', ExitProgram) mainView.geometry(\"700x450+250+50\") mainView.title(PROGRAM_TITLE) jsonText =", "lsblk command. # Development: # Arguments to include in the command line: #", "@staticmethod def do( action: Action): if action == Action.Generate: return Dispatcher.__generateLsBlkJsonFile() @staticmethod def", "print(\"Lshw.Dispatcher does not instantiate\") @staticmethod def do( action: Action): if action == Action.Generate:", "from json import loads from tkinter import Tk, messagebox, LabelFrame, BOTH, RAISED from", "the \" + PROGRAM_TITLE + \" program?\") if answer: mainView.destroy() if __name__ ==", "= 'Load' Store = 'Store' Search = 'Search' Update = 'Update' Log =", "JsonTreeView PROGRAM_TITLE = \"lsblk API\" LSBLK_JSON_FILE = 'lsblk.json' class Action(Enum): Generate = 'Generate'", "class Dispatcher: def __init__(self): print(\"Lshw.Dispatcher does not instantiate\") @staticmethod def do( action: Action):", "action == Action.Generate: return Dispatcher.__generateLsBlkJsonFile() @staticmethod def __generateLsBlkJsonFile(): # lsblk --json --all --zoned", "--paths # from enum import Enum from subprocess import Popen, PIPE from sys", "command line: # lsblk --json --all --zoned --output-all --paths # from enum import", "Log = 'Log' Exit = 'Exit' def __str__(self): return self.value class Dispatcher: def", "Development: # Arguments to include in the command line: # lsblk --json --all", "file.write(jsonText) file.close() return jsonText def ExitProgram(): answer = messagebox.askyesno('Exit program ', \"Exit the", "from view.Components import JsonTreeView PROGRAM_TITLE = \"lsblk API\" LSBLK_JSON_FILE = 'lsblk.json' class Action(Enum):", "--json --all --zoned --output-all --paths proc = Popen(['lsblk', '--json', '--all', '--zoned', '--output-all', '--paths'],", "= messagebox.askyesno('Exit program ', \"Exit the \" + PROGRAM_TITLE + \" program?\") if", "2022 # Copyright: (c) Copyright 2022 <NAME> # Module: model/LsBlk.py # Date Started:", "--all --zoned --output-all --paths # from enum import Enum from subprocess import Popen,", "--output-all --paths # from enum import Enum from subprocess import Popen, PIPE from", "= 'Search' Update = 'Update' Log = 'Log' Exit = 'Exit' def __str__(self):", "__init__(self): print(\"Lshw.Dispatcher does not instantiate\") @staticmethod def do( action: Action): if action ==", "enum import Enum from subprocess import Popen, PIPE from sys import stderr from", "March 18, 2022 # Copyright: (c) Copyright 2022 <NAME> # Module: model/LsBlk.py #", "proc[1].decode('utf-8') if len(errors) > 0: print(errors, file=stderr) print(\"Saving output to:\\t\" + LSBLK_JSON_FILE) file", "answer = messagebox.askyesno('Exit program ', \"Exit the \" + PROGRAM_TITLE + \" program?\")", "\" + PROGRAM_TITLE + \" program?\") if answer: mainView.destroy() if __name__ == '__main__':", "messagebox.askyesno('Exit program ', \"Exit the \" + PROGRAM_TITLE + \" program?\") if answer:", "lsblk --json --all --zoned --output-all --paths proc = Popen(['lsblk', '--json', '--all', '--zoned', '--output-all',", "relief=RAISED) jsonTreeView = JsonTreeView(borderFrame, lsblkJson, {\"openBranches\": True, \"mode\": \"strict\"}) jsonTreeView.pack(expand=True, fill=BOTH) borderFrame.pack(expand=True, fill=BOTH)", "Action): if action == Action.Generate: return Dispatcher.__generateLsBlkJsonFile() @staticmethod def __generateLsBlkJsonFile(): # lsblk --json", "stderr=PIPE).communicate() jsonText = proc[0].decode('utf-8') errors = proc[1].decode('utf-8') if len(errors) > 0: print(errors, file=stderr)", "file=stderr) print(\"Saving output to:\\t\" + LSBLK_JSON_FILE) file = open(LSBLK_JSON_FILE, \"w\") file.write(jsonText) file.close() return", "= \"Help\" Load = 'Load' Store = 'Store' Search = 'Search' Update =", "open(LSBLK_JSON_FILE, \"w\") file.write(jsonText) file.close() return jsonText def ExitProgram(): answer = messagebox.askyesno('Exit program ',", "= open(LSBLK_JSON_FILE, \"w\") file.write(jsonText) file.close() return jsonText def ExitProgram(): answer = messagebox.askyesno('Exit program", "messagebox, LabelFrame, BOTH, RAISED from model.Installation import INSTALLATION_FOLDER from view.Components import JsonTreeView PROGRAM_TITLE", "program?\") if answer: mainView.destroy() if __name__ == '__main__': mainView = Tk() mainView.protocol('WM_DELETE_WINDOW', ExitProgram)", "proc = Popen(['lsblk', '--json', '--all', '--zoned', '--output-all', '--paths'], stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate() jsonText =" ]
[ "encrypted = map(lambda char: key[char], message) return encrypted def decrypt(message: str, key: dict[int,", "with open(args.keyFile) as file: key = file.readline() encrypting_key = {idx: int(value) for idx,", "key: dict[int, int]) -> str: encrypted = map(lambda char: key[char], message) return encrypted", "main(): parser = argparse.ArgumentParser() parser.add_argument('--file') parser.add_argument('--keyFile') parser.add_argument('--output') parser.add_argument('--encrypt', action='store_true') parser.add_argument('--decrypt', action='store_true') args =", "'rb') as file: file_contents = file.read() output = args.output if args.encrypt is True:", "file.readline() encrypting_key = {idx: int(value) for idx, value in enumerate(key.split(' '))} decrypting_key =", "open(args.file, 'rb') as file: file_contents = file.read() output = args.output if args.encrypt is", "in enumerate(key.split(' '))} decrypting_key = {int(value): idx for idx, value in enumerate(key.split(' '))}", "parser.add_argument('--decrypt', action='store_true') args = parser.parse_args() key = None with open(args.keyFile) as file: key", "int]) -> str: encrypted = map(lambda char: key[char], message) return encrypted def decrypt(message:", "map(lambda char: key[char], message) return decrypted def main(): parser = argparse.ArgumentParser() parser.add_argument('--file') parser.add_argument('--keyFile')", "out: out.write(bytearray(encrypted)) elif args.decrypt is True: decrypted = decrypt(file_contents, decrypting_key) with open(output, \"wb\")", "key[char], message) return encrypted def decrypt(message: str, key: dict[int, int]) -> str: decrypted", "enumerate(key.split(' '))} decrypting_key = {int(value): idx for idx, value in enumerate(key.split(' '))} file_contents", "encrypted def decrypt(message: str, key: dict[int, int]) -> str: decrypted = map(lambda char:", "decrypting_key = {int(value): idx for idx, value in enumerate(key.split(' '))} file_contents = None", "file.read() output = args.output if args.encrypt is True: encrypted = encrypt(file_contents, encrypting_key) with", "-> str: encrypted = map(lambda char: key[char], message) return encrypted def decrypt(message: str,", "decrypted = map(lambda char: key[char], message) return decrypted def main(): parser = argparse.ArgumentParser()", "file_contents = None encrypted = None with open(args.file, 'rb') as file: file_contents =", "message) return decrypted def main(): parser = argparse.ArgumentParser() parser.add_argument('--file') parser.add_argument('--keyFile') parser.add_argument('--output') parser.add_argument('--encrypt', action='store_true')", "= {idx: int(value) for idx, value in enumerate(key.split(' '))} decrypting_key = {int(value): idx", "out.write(bytearray(encrypted)) elif args.decrypt is True: decrypted = decrypt(file_contents, decrypting_key) with open(output, \"wb\") as", "= None with open(args.file, 'rb') as file: file_contents = file.read() output = args.output", "= decrypt(file_contents, decrypting_key) with open(output, \"wb\") as out: out.write(bytearray(decrypted)) else: print('No action type", "dict[int, int]) -> str: encrypted = map(lambda char: key[char], message) return encrypted def", "decrypted def main(): parser = argparse.ArgumentParser() parser.add_argument('--file') parser.add_argument('--keyFile') parser.add_argument('--output') parser.add_argument('--encrypt', action='store_true') parser.add_argument('--decrypt', action='store_true')", "-> str: decrypted = map(lambda char: key[char], message) return decrypted def main(): parser", "encrypted = encrypt(file_contents, encrypting_key) with open(output, \"wb\") as out: out.write(bytearray(encrypted)) elif args.decrypt is", "key: dict[int, int]) -> str: decrypted = map(lambda char: key[char], message) return decrypted", "args.decrypt is True: decrypted = decrypt(file_contents, decrypting_key) with open(output, \"wb\") as out: out.write(bytearray(decrypted))", "parser.add_argument('--encrypt', action='store_true') parser.add_argument('--decrypt', action='store_true') args = parser.parse_args() key = None with open(args.keyFile) as", "as file: key = file.readline() encrypting_key = {idx: int(value) for idx, value in", "= argparse.ArgumentParser() parser.add_argument('--file') parser.add_argument('--keyFile') parser.add_argument('--output') parser.add_argument('--encrypt', action='store_true') parser.add_argument('--decrypt', action='store_true') args = parser.parse_args() key", "'))} file_contents = None encrypted = None with open(args.file, 'rb') as file: file_contents", "key = file.readline() encrypting_key = {idx: int(value) for idx, value in enumerate(key.split(' '))}", "return decrypted def main(): parser = argparse.ArgumentParser() parser.add_argument('--file') parser.add_argument('--keyFile') parser.add_argument('--output') parser.add_argument('--encrypt', action='store_true') parser.add_argument('--decrypt',", "file: key = file.readline() encrypting_key = {idx: int(value) for idx, value in enumerate(key.split('", "str, key: dict[int, int]) -> str: decrypted = map(lambda char: key[char], message) return", "= parser.parse_args() key = None with open(args.keyFile) as file: key = file.readline() encrypting_key", "args.output if args.encrypt is True: encrypted = encrypt(file_contents, encrypting_key) with open(output, \"wb\") as", "return encrypted def decrypt(message: str, key: dict[int, int]) -> str: decrypted = map(lambda", "None with open(args.file, 'rb') as file: file_contents = file.read() output = args.output if", "= map(lambda char: key[char], message) return encrypted def decrypt(message: str, key: dict[int, int])", "out: out.write(bytearray(decrypted)) else: print('No action type was given') if __name__ == '__main__': main()", "for idx, value in enumerate(key.split(' '))} file_contents = None encrypted = None with", "def main(): parser = argparse.ArgumentParser() parser.add_argument('--file') parser.add_argument('--keyFile') parser.add_argument('--output') parser.add_argument('--encrypt', action='store_true') parser.add_argument('--decrypt', action='store_true') args", "open(args.keyFile) as file: key = file.readline() encrypting_key = {idx: int(value) for idx, value", "parser = argparse.ArgumentParser() parser.add_argument('--file') parser.add_argument('--keyFile') parser.add_argument('--output') parser.add_argument('--encrypt', action='store_true') parser.add_argument('--decrypt', action='store_true') args = parser.parse_args()", "key[char], message) return decrypted def main(): parser = argparse.ArgumentParser() parser.add_argument('--file') parser.add_argument('--keyFile') parser.add_argument('--output') parser.add_argument('--encrypt',", "with open(output, \"wb\") as out: out.write(bytearray(encrypted)) elif args.decrypt is True: decrypted = decrypt(file_contents,", "key = None with open(args.keyFile) as file: key = file.readline() encrypting_key = {idx:", "is True: encrypted = encrypt(file_contents, encrypting_key) with open(output, \"wb\") as out: out.write(bytearray(encrypted)) elif", "{int(value): idx for idx, value in enumerate(key.split(' '))} file_contents = None encrypted =", "with open(output, \"wb\") as out: out.write(bytearray(decrypted)) else: print('No action type was given') if", "action='store_true') args = parser.parse_args() key = None with open(args.keyFile) as file: key =", "encrypting_key = {idx: int(value) for idx, value in enumerate(key.split(' '))} decrypting_key = {int(value):", "decrypted = decrypt(file_contents, decrypting_key) with open(output, \"wb\") as out: out.write(bytearray(decrypted)) else: print('No action", "parser.add_argument('--keyFile') parser.add_argument('--output') parser.add_argument('--encrypt', action='store_true') parser.add_argument('--decrypt', action='store_true') args = parser.parse_args() key = None with", "map(lambda char: key[char], message) return encrypted def decrypt(message: str, key: dict[int, int]) ->", "idx, value in enumerate(key.split(' '))} decrypting_key = {int(value): idx for idx, value in", "True: encrypted = encrypt(file_contents, encrypting_key) with open(output, \"wb\") as out: out.write(bytearray(encrypted)) elif args.decrypt", "\"wb\") as out: out.write(bytearray(decrypted)) else: print('No action type was given') if __name__ ==", "output = args.output if args.encrypt is True: encrypted = encrypt(file_contents, encrypting_key) with open(output,", "enumerate(key.split(' '))} file_contents = None encrypted = None with open(args.file, 'rb') as file:", "encrypting_key) with open(output, \"wb\") as out: out.write(bytearray(encrypted)) elif args.decrypt is True: decrypted =", "'))} decrypting_key = {int(value): idx for idx, value in enumerate(key.split(' '))} file_contents =", "encrypted = None with open(args.file, 'rb') as file: file_contents = file.read() output =", "None encrypted = None with open(args.file, 'rb') as file: file_contents = file.read() output", "args = parser.parse_args() key = None with open(args.keyFile) as file: key = file.readline()", "int]) -> str: decrypted = map(lambda char: key[char], message) return decrypted def main():", "encrypt(message: str, key: dict[int, int]) -> str: encrypted = map(lambda char: key[char], message)", "str: encrypted = map(lambda char: key[char], message) return encrypted def decrypt(message: str, key:", "\"wb\") as out: out.write(bytearray(encrypted)) elif args.decrypt is True: decrypted = decrypt(file_contents, decrypting_key) with", "parser.parse_args() key = None with open(args.keyFile) as file: key = file.readline() encrypting_key =", "as out: out.write(bytearray(encrypted)) elif args.decrypt is True: decrypted = decrypt(file_contents, decrypting_key) with open(output,", "int(value) for idx, value in enumerate(key.split(' '))} decrypting_key = {int(value): idx for idx,", "open(output, \"wb\") as out: out.write(bytearray(decrypted)) else: print('No action type was given') if __name__", "def encrypt(message: str, key: dict[int, int]) -> str: encrypted = map(lambda char: key[char],", "elif args.decrypt is True: decrypted = decrypt(file_contents, decrypting_key) with open(output, \"wb\") as out:", "dict[int, int]) -> str: decrypted = map(lambda char: key[char], message) return decrypted def", "= args.output if args.encrypt is True: encrypted = encrypt(file_contents, encrypting_key) with open(output, \"wb\")", "str: decrypted = map(lambda char: key[char], message) return decrypted def main(): parser =", "as out: out.write(bytearray(decrypted)) else: print('No action type was given') if __name__ == '__main__':", "= None encrypted = None with open(args.file, 'rb') as file: file_contents = file.read()", "decrypting_key) with open(output, \"wb\") as out: out.write(bytearray(decrypted)) else: print('No action type was given')", "open(output, \"wb\") as out: out.write(bytearray(encrypted)) elif args.decrypt is True: decrypted = decrypt(file_contents, decrypting_key)", "with open(args.file, 'rb') as file: file_contents = file.read() output = args.output if args.encrypt", "encrypt(file_contents, encrypting_key) with open(output, \"wb\") as out: out.write(bytearray(encrypted)) elif args.decrypt is True: decrypted", "= file.readline() encrypting_key = {idx: int(value) for idx, value in enumerate(key.split(' '))} decrypting_key", "= map(lambda char: key[char], message) return decrypted def main(): parser = argparse.ArgumentParser() parser.add_argument('--file')", "parser.add_argument('--file') parser.add_argument('--keyFile') parser.add_argument('--output') parser.add_argument('--encrypt', action='store_true') parser.add_argument('--decrypt', action='store_true') args = parser.parse_args() key = None", "import argparse def encrypt(message: str, key: dict[int, int]) -> str: encrypted = map(lambda", "if args.encrypt is True: encrypted = encrypt(file_contents, encrypting_key) with open(output, \"wb\") as out:", "for idx, value in enumerate(key.split(' '))} decrypting_key = {int(value): idx for idx, value", "is True: decrypted = decrypt(file_contents, decrypting_key) with open(output, \"wb\") as out: out.write(bytearray(decrypted)) else:", "decrypt(message: str, key: dict[int, int]) -> str: decrypted = map(lambda char: key[char], message)", "value in enumerate(key.split(' '))} file_contents = None encrypted = None with open(args.file, 'rb')", "= None with open(args.keyFile) as file: key = file.readline() encrypting_key = {idx: int(value)", "idx, value in enumerate(key.split(' '))} file_contents = None encrypted = None with open(args.file,", "in enumerate(key.split(' '))} file_contents = None encrypted = None with open(args.file, 'rb') as", "def decrypt(message: str, key: dict[int, int]) -> str: decrypted = map(lambda char: key[char],", "decrypt(file_contents, decrypting_key) with open(output, \"wb\") as out: out.write(bytearray(decrypted)) else: print('No action type was", "argparse def encrypt(message: str, key: dict[int, int]) -> str: encrypted = map(lambda char:", "as file: file_contents = file.read() output = args.output if args.encrypt is True: encrypted", "idx for idx, value in enumerate(key.split(' '))} file_contents = None encrypted = None", "char: key[char], message) return encrypted def decrypt(message: str, key: dict[int, int]) -> str:", "= file.read() output = args.output if args.encrypt is True: encrypted = encrypt(file_contents, encrypting_key)", "action='store_true') parser.add_argument('--decrypt', action='store_true') args = parser.parse_args() key = None with open(args.keyFile) as file:", "value in enumerate(key.split(' '))} decrypting_key = {int(value): idx for idx, value in enumerate(key.split('", "file_contents = file.read() output = args.output if args.encrypt is True: encrypted = encrypt(file_contents,", "char: key[char], message) return decrypted def main(): parser = argparse.ArgumentParser() parser.add_argument('--file') parser.add_argument('--keyFile') parser.add_argument('--output')", "args.encrypt is True: encrypted = encrypt(file_contents, encrypting_key) with open(output, \"wb\") as out: out.write(bytearray(encrypted))", "file: file_contents = file.read() output = args.output if args.encrypt is True: encrypted =", "argparse.ArgumentParser() parser.add_argument('--file') parser.add_argument('--keyFile') parser.add_argument('--output') parser.add_argument('--encrypt', action='store_true') parser.add_argument('--decrypt', action='store_true') args = parser.parse_args() key =", "message) return encrypted def decrypt(message: str, key: dict[int, int]) -> str: decrypted =", "= {int(value): idx for idx, value in enumerate(key.split(' '))} file_contents = None encrypted", "= encrypt(file_contents, encrypting_key) with open(output, \"wb\") as out: out.write(bytearray(encrypted)) elif args.decrypt is True:", "parser.add_argument('--output') parser.add_argument('--encrypt', action='store_true') parser.add_argument('--decrypt', action='store_true') args = parser.parse_args() key = None with open(args.keyFile)", "None with open(args.keyFile) as file: key = file.readline() encrypting_key = {idx: int(value) for", "{idx: int(value) for idx, value in enumerate(key.split(' '))} decrypting_key = {int(value): idx for", "True: decrypted = decrypt(file_contents, decrypting_key) with open(output, \"wb\") as out: out.write(bytearray(decrypted)) else: print('No", "str, key: dict[int, int]) -> str: encrypted = map(lambda char: key[char], message) return" ]
[ "nxsub, yopt, ytick, nysub, err.me) # end DrawAxes def PSetCharSize (plot,cscale, err): \"\"\"", "PostScript File (color) \"xfig\" Fig file \"png\" PNG file \"jpeg\" JPEG file \"gif\"", "def PContour (plot, label, image, lev, cntfac, err): \"\"\" Contour plot of image", "(X) or horizontal (Y) lines I Invert the tick marks; ie draw them", "2 = dashed, 3=dot dash, 4 = dotted, 5 = dash dot dot", "====================================================================== * xtick = World coordinate interval between major tick marks on X", "cscale = new character size (integer multiple of the default size). * err", "be placed at at COORD. Other values between 0 and 1 give intermediate", "axes or labels; -1 draw box only; 0 draw box and label it", "n = len(y) # How many points? Obit.PlotXYOver (plot.me, symbol, n, x, y,", "(defaults to none), max 120 XLABEL (string) Label for horizontal axis (defaults to", "Plot y in world coordinates * angle = Orientation of the text in", "triangle 10 filled square 11 filled circle 12 filled star == ================= *", "be a Python Obit plot\") # Obit.PlotSetPlot(plot.me, xmin, xmax, ymin, ymax, just, axis,", "y2 = world y-coordinate of the new pen position. * err = ObitErr", "# end PGetList def PIsA (plot): \"\"\" Tells if the input is a", "Python Plot object * x = World x-coordinate of center * y =", "xmin, xmax, ymin, ymax, just, axis, err.me) # end PSetPlot def PLabel (plot,", "Obit Error/message stack * output = name and type of output device: ======", "be a lower case alpha. - Subscripts: Characters between a #d and #u", "index * y = Dependent variable * err = ObitErr error stack Optional", "If xtick=0.0 [def], the interval is chosen. NXSUB (long) the number of subintervals", "\"Bogus Dude\"+str(self.__class__) return \"<C OPlot instance> \" + Obit.OPlotGetName(self.me) # Foreground Colors unBLACK", "* ylabel = a label for the y-axis (centered to the left of", "raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawAxes(plot.me, xopt, xtick, nxsub,", "err.me) # end PDrawCurve def PDrawCircle (plot, x, y,radius, err): \"\"\" Draw a", "Python Plot object * color = color index (1-15), symbolic names: BLACK (notreally),", "python interfaces: ======== ======================================= InfoList used to pass instructions to processing Member List", "Simple XY Plot Plot X vs Y using symbol. Plot should be finalized", "10 filled square 11 filled circle 12 filled star == =============== * x", "to the right of the viewport (Y). P extend (\"Project\") major tick marks", "err.me) # end PShow def PSetPlot (plot, xmin, xmax, ymin, ymax, just, axis,", "a Python Obit plot\") # Obit.PlotSetLineStyle(plot.me, lstyle, err.me) # end PetLineStyle def PSetColor", "vertical subpages \"\"\" ################################################################ out = OPlot(name) Obit.PlotInitPlot(out.me, output, bgcolor, nx, ny, err.me)", "* y = Dependent variable * err = ObitErr error stack Optional parameters", "is chosen. * yopt = string of options for Y (vertical) axis of", "# end PRelText def PDrawLine (plot, x1, y1, x2, y2, err): \"\"\" Draw", "and may be in any order: = ====================================================================== A draw Axis (X axis", "symbol, x, y, e, err): \"\"\" Simple XY Plot with error bars Plot", "maximum pixel value [def min in image] PIX_MIN (float) minimum pixel value [def", "may be less than XMIN). * ymin = the world y-coordinate at the", "Left, Top, or Right margin of the viewport. If it includes 'LV' or", "# end PShow def PSetPlot (plot, xmin, xmax, ymin, ymax, just, axis, err):", "WHITE = 15 def newOPlot(name, err, output=\"None\", bgcolor=BLACK, nx=1, ny=1 ): \"\"\" Create", "but also draw the coordinate axes (X=0, Y=0); 2 same as axis=1, but", "with PShow * plot = plot * symbol = Symbol index to use", "y, symbol, err.me) # end PDrawSymbol def PDrawPoly (plot, x, y, fill, err):", "or nxsub=0, the number is chosen. [def 0] YTICK (float) like xtick for", "* radius = World coordinate radius * err = ObitErr error stack \"\"\"", "InfoList, Image import math class OPlot(Obit.OPlot): \"\"\" Python Obit interface to display server", "30 deg upwards 5 plplot:lines 30 deg downwards 6 plplot:horizontal/vertical lines crossed 7", "interactive prompt \"xwin\" X-Window (Xlib) \"gcw\" Gnome Canvas Widget (interacts with ObitTalk) \"ps\"", "plot = Python Plot object * lstyle = Style of line (integer multiple", "MUST be a Python Obit plot\") # Obit.PlotSetCharSize (plot.me, cscale, err.me) # end", "(plot, lstyle, err): \"\"\" Set line style * plot = Python Plot object", ">0 set current subpage to sub numbering starts at the top left at", "BLUEVIOLET = 10 CYAN = 11 TURQUOISE = 12 MAGENTA = 13 SALMON", "marks, etc: == =========================================== -2 draw no box, axes or labels; -1 draw", "<=0 advance page, if >0 set current subpage to sub numbering starts at", "math class OPlot(Obit.OPlot): \"\"\" Python Obit interface to display server This class is", "C draw top (X) or right (Y) edge of frame. G draw Grid", "(string) Label for the plot (defaults to none), max 120 XLABEL (string) Label", "coordinate interval between major tick marks on X axis. If xtick=0.0, the interval", "= name desired for object (labeling purposes) * err = Python Obit Error/message", "y, err.me) # end PXYOver def PXYErr (plot, symbol, x, y, e, err):", "using symbol and error bars. Plot should be finalized and displayed with PShow", "plot\") # Obit.PlotFinishPlot(plot.me, err.me) # end PShow def PSetPlot (plot, xmin, xmax, ymin,", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetColor(plot.me,", "world x-coordinate of the new pen position. * y2 = world y-coordinate of", "nysub, err): \"\"\" Draw axes for a plot, label * plot = Python", "number of vertices * x = array of world x-coordinates of the vertices", "starts at the top left at 1 and increases along rows and columns", "between contours (def sqrt(2) * err = ObitErr error stack Optional parameters on", "of the viewport. If it includes 'LV' or 'RV', the string is written", "x, y, fill, err): \"\"\" Draw a Polygon, possibly filled * plot =", "If the Obit installation uses PLPlot for plotting the following can be used", "of image Gray Scales plot of image Plot should be finalized and displayed", "err): \"\"\" Set foreground color * plot = Python Plot object * color", "frame, use ObitPlotXYOver * plot = plot * symbol = Symbol index to", "than XMIN). * ymin = the world y-coordinate at the bottom left corner", "right-hand end of the string will be placed at at COORD. Other values", "# end newOPlot def PXYPlot (plot, symbol, x, y, err): \"\"\" Simple XY", "= ObitErr error stack Optional parameters on plot InfoList: ====== ======== ================================================== XMAX", "Set scaling for characters * plot = Python Plot object * cscale =", "# end PSetColor def PSetPage (plot, sub, err): \"\"\" Set or advance sub", "# Correspondence concerning this software should be addressed as follows: # Internet email:", "Label for vertical axis (defaults to none) YOPT (string) Options for vertical axis", "to be the same ====== ======== ================================================== \"\"\" ################################################################ # Checks if not", "prompted. Next, the plotting region must be specified using either PSetPlot, one of", "interfaces: ======== ======================================= InfoList used to pass instructions to processing Member List ========", "(plot, image, ra, dec, err, size=5.0): \"\"\" Mark positions on Contour plot of", "# end PMarkCross def PShow (plot, err): \"\"\" Display plot * plot =", "ylabel, title, err): \"\"\" Display plot * plot = Python Plot object *", "20 draw box and label Y-axis logarithmically; 30 draw box and label both", "\",image.__class__) raise TypeError(\"image MUST be a Python Obit Image\") n = len(ra) Obit.PlotMarkCross", "old if self.this!=None: Obit.OPlotUnref(Obit.OPlot_Get_me(self.this)) # In with the new Obit.OPlot_Set_me(self.this,value) return self.__dict__[name] =", "Obit Image\") n = len(ra) Obit.PlotMarkCross (plot.me, image.me, n, ra, dec, size, err.me)", "Python Obit plot\") # Obit.PlotFinishPlot(plot.me, err.me) # end PShow def PSetPlot (plot, xmin,", "\"\"\" Set or advance sub page Note: some functions such as PContour advance", "* symbol = Symbol index to use for plotting. Values in the range", "[def], the interval is chosen. NXSUB (int) the number of subintervals to divide", "the frame and adds labels, to only overplot data on the same frame,", "if None use index * y = Dependent variable * err = ObitErr", "same ====== ======== =============================================== \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__)", "for the Y axis CSIZE (int) Scaling factor for characters(default = 1) SQRT", "yopt, ytick, nysub, err.me) # end DrawAxes def PSetCharSize (plot,cscale, err): \"\"\" Set", "on X axis. If xtick=0.0 [def], the interval is chosen. NXSUB (long) the", "\"\"\" Draw a line. * plot = Python Plot object * x1 =", "value [def max in image] ======= ======== ================================================= \"\"\" ################################################################ # Checks if", "the member InfoList returns InfoList * plot = Python Obit Plot object \"\"\"", "the new Obit.OPlot_Set_me(self.this,value) return self.__dict__[name] = value def __getattr__(self,name): if not isinstance(self, OPlot):", "be equal, otherwise they will be scaled independently. * axis = controls the", "Plot object * x = world x-coordinate of the center of the symbol", "minor tick marks (Subticks). = ====================================================================== * xtick = World coordinate interval between", "plplot:lines 45 deg downwards 4 plplot:lines 30 deg upwards 5 plplot:lines 30 deg", "axis scaling to be the same ====== ======== =============================================== \"\"\" ################################################################ # Checks", "0 and 1 give intermediate placing, but they are not very useful. *", "== \"me\" : return Obit.OPlot_Get_me(self.this) # Functions to return members if name==\"List\": return", "== ================= 0 line only 1 dot 2 plus 3 \\* 4 open", "Polygon, possibly filled * plot = Python Plot object * n = number", "Display plot * plot = Python Plot object * err = ObitErr error", "len(ra) Obit.PlotMarkCross (plot.me, image.me, n, ra, dec, size, err.me) # end PMarkCross def", "fjust, text, err): \"\"\" Write text on plot relative to port * plot", "in PLPlot installations If the Obit installation uses PLPlot for plotting the following", "object * x1 = world x-coordinate of the new pen position. * y1", "on Contour plot of image Place cross at positions. Plot should be finalized", "= the world y-coordinate at the bottom left corner of the viewport. *", "* x1 = world x-coordinate of the new pen position. * y1 =", "pixel value [def max in image] ======= ======== ================================================= \"\"\" ################################################################ # Checks", "MUST be a Python Obit plot\") # Obit.PlotDrawSymbol(plot.me, x, y, symbol, err.me) #", "symbol and error bars. Plot should be finalized and displayed with PShow This", "outwards from the viewport in units of the character height. Use a negative", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotLabel(plot.me, xlabel,", "sub = if <=0 advance page, if >0 set current subpage to sub", "concerning this software should be addressed as follows: # Internet email: <EMAIL>. #", "'LV' or 'RV', the string is written perpendicular to the frame rather than", "GNU General Public # License along with this program; if not, write to", "plot\") # Obit.PlotSetLineStyle(plot.me, lstyle, err.me) # end PetLineStyle def PSetColor (plot, color, err):", "be the same ====== ======== ================================================== \"\"\" ################################################################ # Checks if not PIsA(plot):", "MUST be a Python Obit plot\") # dx = math.cos(angle/57.296) dy = math.sin(angle/57.296)", "chosen. * nxsub = The number of subintervals to divide the major coordinate", "err.me) # end PDrawLine def PDrawCurve (plot, x, y, err): \"\"\" Draw a", "letters, and may be in any order: = ====================================================================== A draw Axis (X", "* n = number of vertices * x = array of world x-coordinates", "it. Notes: on text strings in PLPlot installations If the Obit installation uses", "stack \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST", "unconventional location above the viewport (X) or to the right of the viewport", "1 same as axis=0, but also draw the coordinate axes (X=0, Y=0); 2", "symbol, x, y, err): \"\"\" Simple XY Plot Plot X vs Y using", "COORD; if JUST = 0.5, the center of the string will be placed", "(float) minimum X value (defaults to actual value) YMAX (float) maximum Y value", "Ave, Cambridge, # MA 02139, USA. # # Correspondence concerning this software should", "y = Dependent variable * err = ObitErr error stack \"\"\" ################################################################ #", "Python Obit plot\") # Obit.PlotSetPage(plot.me, sub, err.me) # end PSetPage def PText (plot,", "= 2 GREEN = 3 AQUAMARINE = 4 BLACK = 5 WHEAT =", "a Python Obit plot\") # Obit.PlotSetLineWidth(plot.me, lwidth, err.me) # end PetLineWidth def PSetLineStyle", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotFinishPlot(plot.me, err.me)", "object * lstyle = Style of line (integer multiple of the default size).", "position. * err = ObitErr error stack \"\"\" ################################################################ # Checks if not", "of options for X (horizontal) axis of plot. Options are single letters, and", "to write inside the viewport, a positive value to write outside. * coord", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # dx =", "if not, write to the Free # Software Foundation, Inc., 675 Massachusetts Ave,", "(Y) lines I Invert the tick marks; ie draw them outside the viewport", "Superscripts: Characters between a #u and #d will be written as superscripts \"\"\"", "# Checks if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\")", "variable * err = ObitErr error stack Optional parameters on plot InfoList ======", "bars Plot X vs Y using symbol and error bars. Plot should be", "Draw axes for a plot, label * plot = Python Plot object *", "of points * y = Array of world y-coordinates of points * err", "# end PSetCharSize def PSetLineWidth (plot, lwidth, err): \"\"\" Set line width *", "Define plotting area * plot = Python Plot object * xmin = the", "If FJUST = 0.0, the left-hand end of the string will be placed", "plot = plot * label = Label for plot * image = ObitImage", "default size). * err = ObitErr error stack \"\"\" ################################################################ # Checks if", "otherwise they will be scaled independently. * axis = controls the plotting of", "area * plot = Python Plot object * xmin = the world x-coordinate", "(centered to the left of the viewport, drawn vertically) * title = a", "XY Plot with error bars Plot X vs Y using symbol and error", "Obit Plot to test \"\"\" ################################################################ # Checks if not isinstance(plot, OPlot): return", "= array of world y-coordinates of the vertices * fill = Fill pattern,", "the same frame, use ObitPlotXYOver * plot = plot * symbol = Symbol", "MUST be a Python Obit Plot\") if not Image.PIsA(image): print(\"Actually \",image.__class__) raise TypeError(\"image", "stack \"\"\" ################################################################ # Checks if not PIsA(plot): raise TypeError(\"plot MUST be a", "License as # published by the Free Software Foundation; either version 2 of", "modify it under the terms of the GNU General Public License as #", "MUST be a Python Obit plot\") # n = len(x) Obit.PlotDrawCurve (plot.me, n,", "Obit.PlotDrawCurve (plot.me, n, x, y, err.me) # end PDrawCurve def PDrawCircle (plot, x,", "a Python Obit plot\") # Obit.PlotDrawAxes(plot.me, xopt, xtick, nxsub, yopt, ytick, nysub, err.me)", "to it. * disp = The displacement of the character string from the", "angle = Orientation of the text in deg, 0=horizontal * just = Controls", "right corner of the viewport (note XMAX may be less than XMIN). *", "Obit.PlotLabel(plot.me, xlabel, ylabel, title, err.me) # end PLabel def PDrawAxes(plot, xopt, xtick, nxsub,", "of line (integer multiple of the default size). 1 = continious, 2 =", "line (integer multiple of the default size). * err = ObitErr error stack", "len(x), x, y, fill, scale, err.me) # end PDrawPoly def PGetList (plot): \"\"\"", "Obit.PlotInitPlot(out.me, output, bgcolor, nx, ny, err.me) return out # end newOPlot def PXYPlot", "of image Plot should be finalized and displayed with PShow * plot =", "err, output=\"None\", bgcolor=BLACK, nx=1, ny=1 ): \"\"\" Create and initialize an ObitPlot *", "8 plplot:vertical lines == =============== * err = ObitErr error stack \"\"\" ################################################################", "end PLabel def PDrawAxes(plot, xopt, xtick, nxsub, yopt, ytick, nysub, err): \"\"\" Draw", "just, axis, err): \"\"\" Define plotting area * plot = Python Plot object", "MUST be a Python Obit plot\") # Obit.PlotSetPlot(plot.me, xmin, xmax, ymin, ymax, just,", "minimum X value (defaults to actual value) YMAX (float) maximum Y value (defaults", "software; you can redistribute it and/or # modify it under the terms of", "line (integer multiple of the default size). 1 = continious, 2 = dashed,", "image, lev, cntfac, err): \"\"\" Contour plot of image Contours at lev times", "instance> \" + Obit.OPlotGetName(self.me) # Foreground Colors unBLACK = 0 RED = 1", "lines at major increments of the coordinates; 10 draw box and label X-axis", "fill, err): \"\"\" Draw a Polygon, possibly filled * plot = Python Plot", "center * y = World y-coordinate of center * radius = World coordinate", "# Software Foundation, Inc., 675 Massachusetts Ave, Cambridge, # MA 02139, USA. #", "major coordinate interval into. If xtick=0.0 or nxsub=0, the number is chosen. *", "the output and background color. If no output is specified this information will", "= Symbol index to use for plotting values in the range [1,12] are", "(string) Label for vertical axis (defaults to none) YOPT (string) Options for vertical", "of the symbol * symbol = Symbol index to use for plotting. Values", "in text strings: - Greek letters, A #g immediately prior to a Latin", "major coordinate interval into. If xtick=0.0 or nxsub=0, the number is chosen. [def", "end PetLineStyle def PSetColor (plot, color, err): \"\"\" Set foreground color * plot", "on the same frame, use ObitPlotXYOver * plot = plot * symbol =", "circle. * plot = Python Plot object * x = World x-coordinate of", "the top left at 1 and increases along rows and columns * err", "======== ======================================= InfoList used to pass instructions to processing Member List ======== =======================================", "routine draws the frame and adds labels, to only overplot data on the", "\" + Obit.OPlotGetName(self.me) # Foreground Colors unBLACK = 0 RED = 1 YELLOW", "Obit.PlotSetLineStyle(plot.me, lstyle, err.me) # end PetLineStyle def PSetColor (plot, color, err): \"\"\" Set", "subpage to sub numbering starts at the top left at 1 and increases", "raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotLabel(plot.me, xlabel, ylabel, title,", "= plot * symbol = Symbol index to use for plotting values in", "true Or false * Plot = Python Obit Plot to test \"\"\" ################################################################", "print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST be a Python Obit Plot\") if not Image.PIsA(image):", "(deg) * dec = list of Declinations (deg) * err = ObitErr error", "6 GRAY = 7 BROWN = 8 BLUE = 9 BLUEVIOLET = 10", "__repr__(self): if not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) return \"<C OPlot instance> \"", "the terms of the GNU General Public License as # published by the", "======= ================================================== XTICK (float) world coordinate interval between major tick marks on X", "(ignored if option I is specified) T draw major Tick marks at the", "0.5, the center of the string will be placed at (x,y); if JUST", "on plot InfoList: ====== ======== ================================================== XMAX (float) maximum X value (defaults to", "Use a negative value to write inside the viewport, a positive value to", "__future__ import print_function import Obit, _Obit, InfoList, Image import math class OPlot(Obit.OPlot): \"\"\"", "Python Plot object * x = Array of world x-coordinates of points *", "# # This program is free software; you can redistribute it and/or #", "purposes) * err = Python Obit Error/message stack * output = name and", "Draw a circle. * plot = Python Plot object * x = World", "* y = array of world y-coordinates of the vertices * fill =", "positions on Contour plot of image Place cross at positions. Plot should be", "prompt \"xwin\" X-Window (Xlib) \"gcw\" Gnome Canvas Widget (interacts with ObitTalk) \"ps\" PostScript", "PSetColor (plot, color, err): \"\"\" Set foreground color * plot = Python Plot", "for details. XTICK (float) world coordinate interval between major tick marks on X", "0 no fill 1 hatched 2 crosshatched 3 plplot:lines 45 deg downwards 4", "for plotting the following can be used in text strings: - Greek letters,", "Python Obit plot\") # Obit.PlotSetColor(plot.me, color, err.me) # end PSetColor def PSetPage (plot,", "the conventional location below the viewport (X) or to the left of the", "y, e, err.me) # end PXYErr def PContour (plot, label, image, lev, cntfac,", "(plot.me, cscale, err.me) # end PSetCharSize def PSetLineWidth (plot, lwidth, err): \"\"\" Set", "10 CYAN = 11 TURQUOISE = 12 MAGENTA = 13 SALMON = 14", "box (ignored if option I is specified) T draw major Tick marks at", "# Obit.PlotSetPage(plot.me, sub, err.me) # end PSetPage def PText (plot, x, y, angle,", "should have received a copy of the GNU General Public # License along", "triangle 10 filled square 11 filled circle 12 filled star == =============== *", "* err = ObitErr error stack * size = size of cross in", "# end PDrawPoly def PGetList (plot): \"\"\" Return the member InfoList returns InfoList", "a plot object using newOPlot which allows specifying the output and background color.", "sqrt (pixel_value) INVERT (bool) If present and true ionvert colors COLOR (string) Color", "* image = ObitImage to plot * ra = list of RAs (deg)", "x = World x-coordinate of center * y = World y-coordinate of center", "Dude\"+str(self.__class__) return \"<C OPlot instance> \" + Obit.OPlotGetName(self.me) # Foreground Colors unBLACK =", "NXSUB (long) the number of subintervals to divide the major coordinate interval into.", "label X-axis logarithmically; 20 draw box and label Y-axis logarithmically; 30 draw box", "of the XY plotting routines (PXYPlot, PXYOver, or PXYErr) PGrayScale, or PContour. Then", "OPlot): return \"Bogus Dude\"+str(self.__class__) return \"<C OPlot instance> \" + Obit.OPlotGetName(self.me) # Foreground", "Obit Plot\") n = len(y) # How many points? Obit.PlotXYOver (plot.me, symbol, n,", "plot * plot = Python Plot object * x = Plot x in", "Image.PIsA(image): print(\"Actually \",image.__class__) raise TypeError(\"image MUST be a Python Obit Image\") n =", "major increments of the coordinates; 10 draw box and label X-axis logarithmically; 20", "be a Python Obit plot\") # Obit.PlotDrawAxes(plot.me, xopt, xtick, nxsub, yopt, ytick, nysub,", "disp = The displacement of the character string from the specified edge of", "out # end PGetList def PIsA (plot): \"\"\" Tells if the input is", "PIX_MAX (float) maximum pixel value [def min in image] PIX_MIN (float) minimum pixel", "Inc. Washington DC, USA. # # This program is free software; you can", "it. * disp = The displacement of the character string from the specified", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # out = InfoList.InfoList()", "Obit.PlotDrawPoly(plot.me, len(x), x, y, fill, scale, err.me) # end PDrawPoly def PGetList (plot):", "# You should have received a copy of the GNU General Public #", "ra, dec, size, err.me) # end PMarkCross def PShow (plot, err): \"\"\" Display", "the left-hand end of the string will be placed at COORD; if JUST", "y-coordinate of the center of the symbol * symbol = Symbol index to", "* ny = Number of vertical subpages \"\"\" ################################################################ out = OPlot(name) Obit.PlotInitPlot(out.me,", "this program; if not, write to the Free # Software Foundation, Inc., 675", "Correspondence concerning this software should be addressed as follows: # Internet email: <EMAIL>.", "* lev = basic contour level (def 0.1 peak) * cntfac = factor", "below the viewport). * ylabel = a label for the y-axis (centered to", "= World y-coordinate of center * radius = World coordinate radius * err", "a positive value to write outside. * coord = The location of the", "warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #", "the viewport (Y). P extend (\"Project\") major tick marks outside the box (ignored", "# Internet email: <EMAIL>. # Postal address: <NAME> # National Radio Astronomy Observatory", "range [1,12] are usable. If negative, use abs value and connect points. ==", "plot = Python Plot object * cscale = new character size (integer multiple", "CYAN, TURQUOISE, MAGENTA, SALMON, WHITE * err = ObitErr error stack \"\"\" ################################################################", "= if JUST=1, the scales of the x and y axes (in world", "5 = dash dot dot dot * err = ObitErr error stack \"\"\"", "################################################################ out = OPlot(name) Obit.PlotInitPlot(out.me, output, bgcolor, nx, ny, err.me) return out #", "scaling for characters * plot = Python Plot object * cscale = new", "* plot = Python Plot object * xmin = the world x-coordinate at", "star == =============== * err = ObitErr error stack \"\"\" ################################################################ # Checks", "chosen. [def 0] YTICK (float) like xtick for the Y axis. NYSUB (int)", "plot = Python Plot object * x = Array of world x-coordinates of", "published by the Free Software Foundation; either version 2 of # the License,", "will be prompted. Next, the plotting region must be specified using either PSetPlot,", "copy of the GNU General Public # License along with this program; if", "not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) return \"<C OPlot instance> \" + Obit.OPlotGetName(self.me)", "name) def __del__(self, DeleteOPlot=_Obit.DeleteOPlot): if _Obit!=None: DeleteOPlot(self.this) def __setattr__(self,name,value): if name == \"me\"", "on plot InfoList ====== ======== =============================================== XMAX (float) maximum X value (defaults to", "InfoList ====== ===== ============================================ CSIZE (int) Scaling factor for characters(default = 1) LWIDTH", "at COORD. Other values between 0 and 1 give intermediate placing, but they", "# Functions to return members if name==\"List\": return PGetList(self) raise AttributeError(name) def __repr__(self):", "X axis. If xtick=0.0 [def], the interval is chosen. NXSUB (long) the number", "this information will be prompted. Next, the plotting region must be specified using", "Member List ======== ======================================= \"\"\" def __init__(self, name): super(OPlot, self).__init__() Obit.CreateOPlot(self.this, name) def", "(plot, symbol, x, y, err): \"\"\" Simple XY Plot Plot X vs Y", "x, y, dx, dy, just, text, err.me) # end PText def PRelText (plot,", "1 hatched 2 crosshatched 3 plplot:lines 45 deg downwards 4 plplot:lines 30 deg", "of the string will be placed at COORD; if JUST = 1.0, the", "have received a copy of the GNU General Public # License along with", "=============== 0 line only 1 dot 2 plus 3 \\* 4 open circle", "Road # Charlottesville, VA 22903-2475 USA #----------------------------------------------------------------------- # Python shadow class to ObitPlot", "================================================= XTICK (float) world coordinate interval between major tick marks on X axis.", "y2, err): \"\"\" Draw a line. * plot = Python Plot object *", "coord, fjust, text, err.me) # end PRelText def PDrawLine (plot, x1, y1, x2,", "= Python Plot object * xlabel = a label for the x-axis (centered", "Set line style * plot = Python Plot object * lstyle = Style", "draw box and label X-axis logarithmically; 20 draw box and label Y-axis logarithmically;", "the right-hand end of the string will be placed at at (x,y). Other", "Python Obit plot\") # Obit.PlotSetPlot(plot.me, xmin, xmax, ymin, ymax, just, axis, err.me) #", "Line width (default = 1) ====== ======= ================================================== \"\"\" ################################################################ # Checks if", "written as subscripts - Superscripts: Characters between a #u and #d will be", "multiple of the default size). 1 = continious, 2 = dashed, 3=dot dash,", "PText (plot, x, y, angle, just, text, err): \"\"\" Write text on plot", "xtick=0.0, the interval is chosen. * nxsub = The number of subintervals to", "7 BROWN = 8 BLUE = 9 BLUEVIOLET = 10 CYAN = 11", "axis (defaults to none) XOPT (string) Options for horizontal axis (default \"BCNTS\") See", "actual value) YMIN (float) minimum Y value (defaults to actual value) TITLE (string)", "title = a label for the entire plot (centered above the viewport) *", "a line. * plot = Python Plot object * x1 = world x-coordinate", "be a Python Obit plot\") # n = len(x) Obit.PlotDrawCurve (plot.me, n, x,", "the character string along the specified edge of the viewport, as a fraction", "under the terms of the GNU General Public License as # published by", "along with this program; if not, write to the Free # Software Foundation,", "instructions to processing Member List ======== ======================================= \"\"\" def __init__(self, name): super(OPlot, self).__init__()", "RED(default), YELLOW, GREEN, AQUAMARINE, PINK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA,", "Options for horizontal axis (default \"BCNTS\") See PDrawAxes for details. YLABEL (string) Label", "err.me) # end PContour def PGrayScale (plot, label, image, err): \"\"\" Gray Scale", "y2, err.me) # end PDrawLine def PDrawCurve (plot, x, y, err): \"\"\" Draw", "name and type of output device: ====== ========================== \"None\" interactive prompt \"xwin\" X-Window", "#----------------------------------------------------------------------- # Copyright (C) 2006,2016,2019 # Associated Universities, Inc. Washington DC, USA. #", "11 filled circle 12 filled star == =============== * err = ObitErr error", "parameters on plot InfoList ====== ===== ============================================ CSIZE (int) Scaling factor for characters(default", "if JUST = 1.0, the right-hand end of the string will be placed", "pen position. * x2 = world x-coordinate of the new pen position. *", "min in image] PIX_MIN (float) minimum pixel value [def max in image] =======", "BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE * err = ObitErr error", "the edge. * just = Controls justification of the string parallel to the", "Y value (defaults to actual value) TITLE (string) Label for the plot (defaults", "new character size (integer multiple of the default size). * err = ObitErr", "viewport, measured outwards from the viewport in units of the character height. Use", "Symbol * plot = Python Plot object * x = world x-coordinate of", "y in world coordinates * angle = Orientation of the text in deg,", "(int) Line width (default = 1) JUST (int) If !=0 then force X", "location above the viewport (X) or to the right of the viewport (Y).", "end PDrawCircle def PDrawSymbol (plot, x, y, symbol, err): \"\"\" Draw a Symbol", "plot InfoList: ====== ======== ================================================== XMAX (float) maximum X value (defaults to actual", "positions. Plot should be finalized and displayed with PShow * plot = plot", "return self.__dict__[name] = value def __getattr__(self,name): if not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__)", "* plot = plot * label = Label for plot * image =", "): \"\"\" Create and initialize an ObitPlot * name = name desired for", "(plot, color, err): \"\"\" Set foreground color * plot = Python Plot object", "0 line only 1 dot 2 plus 3 \\* 4 open circle 5", "# end PDrawCurve def PDrawCircle (plot, x, y,radius, err): \"\"\" Draw a circle.", "and connect points == ================= 0 line only 1 dot 2 plus 3", "= ObitErr error stack * size = size of cross in pixels Optional", "fill 1 hatched 2 crosshatched 3 plplot:lines 45 deg downwards 4 plplot:lines 30", "Numeric labels in the conventional location below the viewport (X) or to the", "to actual value) YMIN (float) minimum Y value (defaults to actual value) TITLE", "the specified edge of the viewport. If * just = 0.0, the left-hand", "factor for symbols(default = 1) LWIDTH (int) Line width (default = 1) JUST", "* xmax = the world x-coordinate at the top right corner of the", "points? Obit.PlotXYErr (plot.me, symbol, n, x, y, e, err.me) # end PXYErr def", "PShow This routine draws the frame and adds labels, to only overplot data", "the string, but leading spaces are significant. * err = ObitErr error stack", "TypeError(\"plot MUST be a Python Obit Plot\") n = len(y) # How many", "plot\") # out = InfoList.InfoList() out.me = Obit.PlotGetList(plot.me) return out # end PGetList", "stack * output = name and type of output device: ====== ========================== \"None\"", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotRelText(plot.me, side,", "(default = 1) JUST (int) If !=0 then force X and Y axis", "when justifying the string, but leading spaces are significant. * err = ObitErr", "spaces are significant. * err = ObitErr error stack \"\"\" ################################################################ # Checks", "number is chosen. [def 0] YTICK (float) like xtick for the Y axis.", "character string from the specified edge of the viewport, measured outwards from the", "err): \"\"\" Gray Scale plot of image Gray Scales plot of image Plot", "class is for creating and using the interface to a plot Image Members", "Grid of vertical (X) or horizontal (Y) lines I Invert the tick marks;", "a Python Obit plot\") # Obit.PlotSetCharSize (plot.me, cscale, err.me) # end PSetCharSize def", "the string will be placed at (x,y); if JUST = 1.0, the right-hand", "\"\"\" Python Obit interface to display server This class is for creating and", "(int) the number of subintervals to divide the major coordinate interval into. If", "lstyle, err.me) # end PetLineStyle def PSetColor (plot, color, err): \"\"\" Set foreground", "14 WHITE = 15 def newOPlot(name, err, output=\"None\", bgcolor=BLACK, nx=1, ny=1 ): \"\"\"", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetLineStyle(plot.me, lstyle, err.me)", "for more details. # # You should have received a copy of the", "conventional location below the viewport (X) or to the left of the viewport", "plot InfoList: ======= ======== ================================================= XTICK (float) world coordinate interval between major tick", "write to the Free # Software Foundation, Inc., 675 Massachusetts Ave, Cambridge, #", "e = if nonNone, error in y * err = ObitErr error stack", "== =============== * x = Independent variable, if None use index * y", "end PShow def PSetPlot (plot, xmin, xmax, ymin, ymax, just, axis, err): \"\"\"", "it with coordinates; 1 same as axis=0, but also draw the coordinate axes", "* nysub = like nxsub for the Y axis * err = ObitErr", "of the viewport, as a fraction of the length of the edge. *", "======== ================================================== \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot", "logarithmically. == =========================================== * err = ObitErr error stack \"\"\" ################################################################ # Checks", "Python Obit plot\") # Obit.PlotDrawAxes(plot.me, xopt, xtick, nxsub, yopt, ytick, nysub, err.me) #", "frame and adds labels, to only overplot data on the same frame, use", "Obit plot\") # dx = math.cos(angle/57.296) dy = math.sin(angle/57.296) Obit.PlotText(plot.me, x, y, dx,", "specified this information will be prompted. Next, the plotting region must be specified", "draw grid lines at major increments of the coordinates; 10 draw box and", "new pen position. * err = ObitErr error stack \"\"\" ################################################################ # Checks", "if <=0 advance page, if >0 set current subpage to sub numbering starts", "of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU", "Optional parameters on plot InfoList: ======= ======== ================================================= XTICK (float) world coordinate interval", "nxsub for the Y axis CSIZE (int) Scaling factor for characters(default = 1)", "Optional parameters on plot InfoList ====== ===== ============================================ CSIZE (int) Scaling factor for", "vertices * fill = Fill pattern, plot package dependent * values in the", "(X) or right (Y) edge of frame. G draw Grid of vertical (X)", "Python Obit Plot object \"\"\" ################################################################ # Checks if not PIsA(plot): raise TypeError(\"plot", "initialize an ObitPlot * name = name desired for object (labeling purposes) *", "be plotted. Trailing spaces are ignored when justifying the string, but leading spaces", "\"\"\" Create and initialize an ObitPlot * name = name desired for object", "lines I Invert the tick marks; ie draw them outside the viewport instead", "frame. C draw top (X) or right (Y) edge of frame. G draw", "* xlabel = a label for the x-axis (centered below the viewport). *", "SALMON = 14 WHITE = 15 def newOPlot(name, err, output=\"None\", bgcolor=BLACK, nx=1, ny=1", "err.me) # end PSetPlot def PLabel (plot, xlabel, ylabel, title, err): \"\"\" Display", "======================================= InfoList used to pass instructions to processing Member List ======== ======================================= \"\"\"", "the bottom left corner of the viewport. * ymax = the world y-coordinate", "Obit plot\") # Obit.PlotSetColor(plot.me, color, err.me) # end PSetColor def PSetPage (plot, sub,", "the Y axis * err = ObitErr error stack \"\"\" ################################################################ # Checks", "spacing between contours (def sqrt(2) * err = ObitErr error stack Optional parameters", "======== ================================================= \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot", "for the entire plot (centered above the viewport) * err = ObitErr error", "etc: == =========================================== -2 draw no box, axes or labels; -1 draw box", "* plot = plot * symbol = Symbol index to use for plotting", "the string parallel to the specified edge of the viewport. If * just", "(bool) If present and true ionvert colors COLOR (string) Color scheme 'GRAY', 'CONTOUR',", "Universities, Inc. Washington DC, USA. # # This program is free software; you", "File (monochrome) \"psc\" PostScript File (color) \"xfig\" Fig file \"png\" PNG file \"jpeg\"", "unBLACK = 0 RED = 1 YELLOW = 2 GREEN = 3 AQUAMARINE", "bgcolor = background color index (1-15), symbolic names: BLACK, RED(default), YELLOW, GREEN, AQUAMARINE,", "in image] ======= ======== ================================================= \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually", "err): \"\"\" Draw axes for a plot, label * plot = Python Plot", "of center * radius = World coordinate radius * err = ObitErr error", "PDrawPoly def PGetList (plot): \"\"\" Return the member InfoList returns InfoList * plot", "outside the viewport instead of inside. L label axis Logarithmically N write Numeric", "along rows and columns * err = ObitErr error stack \"\"\" ################################################################ #", "= 11 TURQUOISE = 12 MAGENTA = 13 SALMON = 14 WHITE =", "displacement of the character string from the specified edge of the viewport, measured", "Line width (default = 1) ====== ===== ============================================ \"\"\" ################################################################ # Checks if", "TRC on info member honored * err = ObitErr error stack Optional parameters", "When all has been added to the plot, use PShow to finalize it.", "image, err): \"\"\" Gray Scale plot of image Gray Scales plot of image", "BLC, TRC on info member honored * err = ObitErr error stack Optional", "or PContour. Then additional lines, curves, text or symbols may be added. When", "viewport). * ylabel = a label for the y-axis (centered to the left", "n = len(ra) Obit.PlotMarkCross (plot.me, image.me, n, ra, dec, size, err.me) # end", "plot InfoList: ====== ======= ================================================== XTICK (float) world coordinate interval between major tick", "* y1 = world y-coordinate of the new pen position. * x2 =", "or Right margin of the viewport. If it includes 'LV' or 'RV', the", "* plot = Python Plot object * side = Must include one of", "the world y-coordinate at the top right corner of the viewport (note YMAX", "radius * err = ObitErr error stack \"\"\" ################################################################ # Checks if not", "width (default = 1) JUST (int) If !=0 then force X and Y", "of the string parallel to the specified edge of the viewport. If *", "style * plot = Python Plot object * lstyle = Style of line", "returns true Or false * Plot = Python Obit Plot to test \"\"\"", "variable, if None use index * y = Dependent variable * err =", "(Y) edge of frame. C draw top (X) or right (Y) edge of", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetColor(plot.me, color, err.me)", "= number of vertices * x = array of world x-coordinates of the", "controls the plotting of axes, tick marks, etc: == =========================================== -2 draw no", "MAGENTA, SALMON, WHITE * err = ObitErr error stack \"\"\" ################################################################ # Checks", "if _Obit!=None: DeleteOPlot(self.this) def __setattr__(self,name,value): if name == \"me\" : # Out with", "use index * y = Dependent variable * e = if nonNone, error", "increments of the coordinates; 10 draw box and label X-axis logarithmically; 20 draw", "This routine draws the frame and adds labels, to only overplot data on", "Python Obit plot\") # n = len(x) Obit.PlotDrawCurve (plot.me, n, x, y, err.me)", "software should be addressed as follows: # Internet email: <EMAIL>. # Postal address:", "ObitImage to plot, BLC, TRC on info member honored * lev = basic", "PDrawLine (plot, x1, y1, x2, y2, err): \"\"\" Draw a line. * plot", "# Associated Universities, Inc. Washington DC, USA. # # This program is free", "= Python Plot object * sub = if <=0 advance page, if >0", "minimum pixel value [def max in image] ======= ======== ================================================= \"\"\" ################################################################ #", "# end PContour def PGrayScale (plot, label, image, err): \"\"\" Gray Scale plot", "as subscripts - Superscripts: Characters between a #u and #d will be written", "to actual value) YMAX (float) maximum Y value (defaults to actual value) YMIN", "increases along rows and columns * err = ObitErr error stack \"\"\" ################################################################", "of the viewport, measured outwards from the viewport in units of the character", "image, ra, dec, err, size=5.0): \"\"\" Mark positions on Contour plot of image", "useful. * text = The text string to be plotted. Trailing spaces are", "center of the string will be placed at COORD; if JUST = 1.0,", "= 1 YELLOW = 2 GREEN = 3 AQUAMARINE = 4 BLACK =", "====== ===== ============================================ CSIZE (int) Scaling factor for characters(default = 1) LWIDTH (int)", "object * x = Plot x in world coordinates * y = Plot", "the default size). 1 = continious, 2 = dashed, 3=dot dash, 4 =", "to only overplot data on the same frame, use ObitPlotXYOver * plot =", "This program is distributed in the hope that it will be useful, #", "image = ObitImage to plot, BLC, TRC on info member honored * err", "end PRelText def PDrawLine (plot, x1, y1, x2, y2, err): \"\"\" Draw a", "Y using symbol. Plot should be finalized and displayed with PShow This routine", "x, y, symbol, err.me) # end PDrawSymbol def PDrawPoly (plot, x, y, fill,", "\"\"\" ################################################################ out = OPlot(name) Obit.PlotInitPlot(out.me, output, bgcolor, nx, ny, err.me) return out", "frame rather than parallel to it. * disp = The displacement of the", "a Python ObitPlot returns true Or false * Plot = Python Obit Plot", "YMAX may be less than YMIN) * just = if JUST=1, the scales", "ie draw them outside the viewport instead of inside. L label axis Logarithmically", "lwidth = Width of line (integer multiple of the default size). * err", "y-axis (centered to the left of the viewport, drawn vertically) * title =", "open square 7 open triangle 8 open star 9 filled triangle 10 filled", "\"\"\" Simple XY Plot with error bars Plot X vs Y using symbol", "xtick=0.0 or nxsub=0, the number is chosen. [def 0] YTICK (float) like xtick", "value) YMAX (float) maximum Y value (defaults to actual value) YMIN (float) minimum", "ra = list of RAs (deg) * dec = list of Declinations (deg)", "Members with python interfaces: ======== ======================================= InfoList used to pass instructions to processing", "maximum Y value (defaults to actual value) YMIN (float) minimum Y value (defaults", "raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetLineStyle(plot.me, lstyle, err.me) #", "Charlottesville, VA 22903-2475 USA #----------------------------------------------------------------------- # Python shadow class to ObitPlot class from", "PSetPlot, one of the XY plotting routines (PXYPlot, PXYOver, or PXYErr) PGrayScale, or", "err): \"\"\" Draw a curve. * plot = Python Plot object * x", "err = Python Obit Error/message stack * output = name and type of", "CYAN = 11 TURQUOISE = 12 MAGENTA = 13 SALMON = 14 WHITE", "added to the plot, use PShow to finalize it. Notes: on text strings", "symbol. Plot should be finalized and displayed with PShow This routine draws the", "(X) or left (Y) edge of frame. C draw top (X) or right", "sub, err.me) # end PSetPage def PText (plot, x, y, angle, just, text,", "(plot): \"\"\" Tells if the input is a Python ObitPlot returns true Or", "filled square 11 filled circle 12 filled star == =============== * x =", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # dx = math.cos(angle/57.296)", "\"ps\" PostScript File (monochrome) \"psc\" PostScript File (color) \"xfig\" Fig file \"png\" PNG", "(labeling purposes) * err = Python Obit Error/message stack * output = name", "len(y) # How many points? Obit.PlotXYOver (plot.me, symbol, n, x, y, err.me) #", "TypeError(\"plot MUST be a Python Obit plot\") # dx = math.cos(angle/57.296) dy =", "a label for the y-axis (centered to the left of the viewport, drawn", "error bars Plot X vs Y using symbol and error bars. Plot should", "* plot = Python Plot object * n = number of vertices *", "Plotting class Create a plot object using newOPlot which allows specifying the output", "positive value to write outside. * coord = The location of the character", "World y-coordinate of center * radius = World coordinate radius * err =", "the Bottom, Left, Top, or Right margin of the viewport. If it includes", "tick marks on X axis. If xtick=0.0, the interval is chosen. * nxsub", "array of world y-coordinates of the vertices * fill = Fill pattern, plot", "Obit plot\") # Obit.PlotDrawSymbol(plot.me, x, y, symbol, err.me) # end PDrawSymbol def PDrawPoly", "axis. If xtick=0.0, the interval is chosen. * nxsub = The number of", "of the character string along the specified edge of the viewport, as a", "Dependent variable * err = ObitErr error stack \"\"\" ################################################################ # Checks if", "the range [0,8] are usable == =============== 0 no fill 1 hatched 2", "JPEG file \"gif\" GIF file \"null\" Null device ====== ========================== * bgcolor =", "ytick, nysub, err.me) # end DrawAxes def PSetCharSize (plot,cscale, err): \"\"\" Set scaling", "a Python Obit Plot\") if not Image.PIsA(image): print(\"Actually \",image.__class__) raise TypeError(\"image MUST be", "\\* 4 open circle 5 x 6 open square 7 open triangle 8", "draw them outside the viewport instead of inside. L label axis Logarithmically N", "plot = Python Plot object * xlabel = a label for the x-axis", "Create and initialize an ObitPlot * name = name desired for object (labeling", "for details. YLABEL (string) Label for vertical axis (defaults to none) YOPT (string)", "or advance sub page Note: some functions such as PContour advance the page", "(plot.me, label, image.me, err.me) # end PGrayScale def PMarkCross (plot, image, ra, dec,", "is distributed in the hope that it will be useful, # but WITHOUT", "XY plotting routines (PXYPlot, PXYOver, or PXYErr) PGrayScale, or PContour. Then additional lines,", "= ObitImage to plot, BLC, TRC on info member honored * err =", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public", "symbol, n, x, y, err.me) # end PXYPlot def PXYOver (plot, symbol, x,", "nxsub=0, the number is chosen. [def 0] YTICK (float) like xtick for the", "in the hope that it will be useful, # but WITHOUT ANY WARRANTY;", "======== ================================================== XMAX (float) maximum X value (defaults to actual value) XMIN (float)", "plot\") # Obit.PlotLabel(plot.me, xlabel, ylabel, title, err.me) # end PLabel def PDrawAxes(plot, xopt,", "of vertical subpages \"\"\" ################################################################ out = OPlot(name) Obit.PlotInitPlot(out.me, output, bgcolor, nx, ny,", "y = Plot y in world coordinates * angle = Orientation of the", "powers of cntfac Plot should be finalized and displayed with PShow * plot", "square 11 filled circle 12 filled star == =============== * err = ObitErr", "FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for", "a Python Obit plot\") # Obit.PlotLabel(plot.me, xlabel, ylabel, title, err.me) # end PLabel", "10 draw box and label X-axis logarithmically; 20 draw box and label Y-axis", "draw no box, axes or labels; -1 draw box only; 0 draw box", "filled circle 12 filled star == ================= * x = Independent variable, if", "lev times powers of cntfac Plot should be finalized and displayed with PShow", "* coord = The location of the character string along the specified edge", "device ====== ========================== * bgcolor = background color index (1-15), symbolic names: BLACK,", "axis CSIZE (int) Scaling factor for characters(default = 1) SQRT (bool) If present", "size of cross in pixels Optional parameters on plot InfoList ====== ===== ============================================", "nysub, err.me) # end DrawAxes def PSetCharSize (plot,cscale, err): \"\"\" Set scaling for", "Plot object * xopt = string of options for X (horizontal) axis of", "JUST = 0.5, the center of the string will be placed at (x,y);", "print(\"Actually \",image.__class__) raise TypeError(\"image MUST be a Python Obit Image\") Obit.PlotGrayScale (plot.me, label,", "label, image.me, err.me) # end PGrayScale def PMarkCross (plot, image, ra, dec, err,", "name == \"me\" : # Out with the old if self.this!=None: Obit.OPlotUnref(Obit.OPlot_Get_me(self.this)) #", "X value (defaults to actual value) XMIN (float) minimum X value (defaults to", "left corner of the viewport. * xmax = the world x-coordinate at the", "columns * err = ObitErr error stack \"\"\" ################################################################ # Checks if not", "====== ======= ================================================== \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise", "for a plot, label * plot = Python Plot object * xopt =", "plot of image Plot should be finalized and displayed with PShow * plot", "end PXYOver def PXYErr (plot, symbol, x, y, e, err): \"\"\" Simple XY", "of Declinations (deg) * err = ObitErr error stack * size = size", "to return members if name==\"List\": return PGetList(self) raise AttributeError(name) def __repr__(self): if not", "width * plot = Python Plot object * lwidth = Width of line", "TRC on info member honored * lev = basic contour level (def 0.1", "Plot should be finalized and displayed with PShow This routine draws the frame", "cntfac, err): \"\"\" Contour plot of image Contours at lev times powers of", "follows: # Internet email: <EMAIL>. # Postal address: <NAME> # National Radio Astronomy", "Massachusetts Ave, Cambridge, # MA 02139, USA. # # Correspondence concerning this software", "MUST be a Python Obit plot\") # Obit.PlotSetLineStyle(plot.me, lstyle, err.me) # end PetLineStyle", "Obit plot\") # Obit.PlotFinishPlot(plot.me, err.me) # end PShow def PSetPlot (plot, xmin, xmax,", "JUST (int) If !=0 then force X and Y axis scaling to be", "star == ================= * x = Independent variable, if None use index *", "<EMAIL>. # Postal address: <NAME> # National Radio Astronomy Observatory # 520 Edgemont", "plot * label = Label for plot * image = ObitImage to plot,", "TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetPlot(plot.me, xmin, xmax, ymin, ymax,", "# end PDrawLine def PDrawCurve (plot, x, y, err): \"\"\" Draw a curve.", "of the center of the symbol * y = world y-coordinate of the", "= Controls justification of the string parallel to the specified edge of the", "= if <=0 advance page, if >0 set current subpage to sub numbering", "(deg) * err = ObitErr error stack * size = size of cross", "MUST be a Python Obit plot\") # Obit.PlotLabel(plot.me, xlabel, ylabel, title, err.me) #", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetLineWidth(plot.me,", "USA. # # Correspondence concerning this software should be addressed as follows: #", "viewport (X) or to the left of the viewport (Y). M write numeric", "axes for a plot, label * plot = Python Plot object * xopt", "position. * y2 = world y-coordinate of the new pen position. * err", "will be placed at (x,y); if JUST = 0.5, the center of the", "x, y, err.me) # end PDrawCurve def PDrawCircle (plot, x, y,radius, err): \"\"\"", "\",plot.__class__) raise TypeError(\"plot MUST be a Python Obit Plot\") if not Image.PIsA(image): print(\"Actually", "length of the edge. * just = Controls justification of the string parallel", "= Python Plot object * x = world x-coordinate of the center of", "value (defaults to actual value) TITLE (string) Label for the plot (defaults to", "symbol = Symbol index to use for plotting values in the range [1,12]", "(notreally), RED(default), YELLOW, GREEN, AQUAMARINE, BLACK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE,", "XY Plot Plot X vs Y using symbol. Plot should be finalized and", "================================================== XTICK (float) world coordinate interval between major tick marks on X axis.", "====== ========================== \"None\" interactive prompt \"xwin\" X-Window (Xlib) \"gcw\" Gnome Canvas Widget (interacts", "end PGetList def PIsA (plot): \"\"\" Tells if the input is a Python", "Color scheme 'GRAY', 'CONTOUR', 'PHLAME' default 'GRAY' PIX_MAX (float) maximum pixel value [def", "the viewport. * xmax = the world x-coordinate at the top right corner", "string of options for X (horizontal) axis of plot. Options are single letters,", "interval between major tick marks on X axis. If xtick=0.0, the interval is", "plot = plot * symbol = Symbol index to use for plotting. Values", "output = name and type of output device: ====== ========================== \"None\" interactive prompt", "placed at COORD; if JUST = 1.0, the right-hand end of the string", "I Invert the tick marks; ie draw them outside the viewport instead of", "raise TypeError(\"image MUST be a Python Obit Image\") Obit.PlotContour (plot.me, label, image.me, lev,", "Python Plot object * sub = if <=0 advance page, if >0 set", "title, err.me) # end PLabel def PDrawAxes(plot, xopt, xtick, nxsub, yopt, ytick, nysub,", "GREEN, AQUAMARINE, BLACK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE", "dot dot dot * err = ObitErr error stack \"\"\" ################################################################ # Checks", "def PDrawSymbol (plot, x, y, symbol, err): \"\"\" Draw a Symbol * plot", "(int) Line width (default = 1) ====== ======= ================================================== \"\"\" ################################################################ # Checks", "must be specified using either PSetPlot, one of the XY plotting routines (PXYPlot,", "pattern, plot package dependent * values in the range [0,8] are usable ==", "Plot object * x = World x-coordinate of center * y = World", "of the viewport (Y). P extend (\"Project\") major tick marks outside the box", "a negative value to write inside the viewport, a positive value to write", "License for more details. # # You should have received a copy of", "Plot = Python Obit Plot to test \"\"\" ################################################################ # Checks if not", "just, text, err): \"\"\" Write text on plot * plot = Python Plot", "plotted. Trailing spaces are ignored when justifying the string, but leading spaces are", "if JUST = 0.5, the center of the string will be placed at", "xopt, xtick, nxsub, yopt, ytick, nysub, err): \"\"\" Draw axes for a plot,", "string will be placed at COORD; if JUST = 1.0, the right-hand end", "PMarkCross def PShow (plot, err): \"\"\" Display plot * plot = Python Plot", "in the range [1,12] are usable. If negative, use abs value and connect", "the viewport (X) or to the left of the viewport (Y). M write", "# 520 Edgemont Road # Charlottesville, VA 22903-2475 USA #----------------------------------------------------------------------- # Python shadow", "to the plot, use PShow to finalize it. Notes: on text strings in", "- Superscripts: Characters between a #u and #d will be written as superscripts", "Obit.PlotContour (plot.me, label, image.me, lev, cntfac, err.me) # end PContour def PGrayScale (plot,", "= Orientation of the text in deg, 0=horizontal * just = Controls justification", "and connect points. == =============== 0 line only 1 dot 2 plus 3", "PRelText (plot, side, disp, coord, fjust, text, err): \"\"\" Write text on plot", "(int) Scaling factor for characters(default = 1) LWIDTH (int) Line width (default =", "Free # Software Foundation, Inc., 675 Massachusetts Ave, Cambridge, # MA 02139, USA.", "only overplot data on the same frame, use ObitPlotXYOver * plot = plot", "top left at 1 and increases along rows and columns * err =", "object using newOPlot which allows specifying the output and background color. If no", "* plot = Python Plot object * x = Array of world x-coordinates", "symbol * symbol = Symbol index to use for plotting. Values in the", "plot\") # Obit.PlotSetLineWidth(plot.me, lwidth, err.me) # end PetLineWidth def PSetLineStyle (plot, lstyle, err):", "to finalize it. Notes: on text strings in PLPlot installations If the Obit", "PXYPlot (plot, symbol, x, y, err): \"\"\" Simple XY Plot Plot X vs", "in units of the character height. Use a negative value to write inside", "should be finalized and displayed with PShow * plot = plot * label", "characters(default = 1) LWIDTH (int) Line width (default = 1) ====== ===== ============================================", "nxsub = The number of subintervals to divide the major coordinate interval into.", "line X=0). B draw bottom (X) or left (Y) edge of frame. C", "= Python Plot object * x1 = world x-coordinate of the new pen", "default size). 1 = continious, 2 = dashed, 3=dot dash, 4 = dotted,", "def newOPlot(name, err, output=\"None\", bgcolor=BLACK, nx=1, ny=1 ): \"\"\" Create and initialize an", "names: BLACK, RED(default), YELLOW, GREEN, AQUAMARINE, PINK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN,", "Foundation, Inc., 675 Massachusetts Ave, Cambridge, # MA 02139, USA. # # Correspondence", "symbol. Plot should be finalized and displayed with PShow * plot = plot", "xtick, nxsub, yopt, ytick, nysub, err): \"\"\" Draw axes for a plot, label", "If xtick=0.0 or nxsub=0, the number is chosen. * yopt = string of", "vertical axis (default \"BCNTS\") See PDrawAxes for details. XTICK (float) world coordinate interval", "is free software; you can redistribute it and/or # modify it under the", "2 GREEN = 3 AQUAMARINE = 4 BLACK = 5 WHEAT = 6", "Plot with error bars Plot X vs Y using symbol and error bars.", "x = Independent variable, if None use index * y = Dependent variable", "xlabel, ylabel, title, err): \"\"\" Display plot * plot = Python Plot object", "= dotted, 5 = dash dot dot dot * err = ObitErr error", "the x and y axes (in world coordinates per inch) will be equal,", "(note YMAX may be less than YMIN) * just = if JUST=1, the", "\"\"\" Define plotting area * plot = Python Plot object * xmin =", "bars. Plot should be finalized and displayed with PShow This routine draws the", "Optional parameters on plot InfoList ====== ======== =============================================== XMAX (float) maximum X value", "and background color. If no output is specified this information will be prompted.", "as superscripts \"\"\" # $Id$ #----------------------------------------------------------------------- # Copyright (C) 2006,2016,2019 # Associated Universities,", "* bgcolor = background color index (1-15), symbolic names: BLACK, RED(default), YELLOW, GREEN,", "label * plot = Python Plot object * xopt = string of options", "to the frame rather than parallel to it. * disp = The displacement", "12 MAGENTA = 13 SALMON = 14 WHITE = 15 def newOPlot(name, err,", "connect points == ================= 0 line only 1 dot 2 plus 3 \\*", "= controls the plotting of axes, tick marks, etc: == =========================================== -2 draw", "size). 1 = continious, 2 = dashed, 3=dot dash, 4 = dotted, 5", "LWIDTH (int) Line width (default = 1) ====== ======= ================================================== \"\"\" ################################################################ #", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawLine(plot.me, x1,", "x-coordinates of points * y = Array of world y-coordinates of points *", "the center of the symbol * y = world y-coordinate of the center", "InfoList ====== ======== =============================================== XMAX (float) maximum X value (defaults to actual value)", "Y=0, Y axis is vertical line X=0). B draw bottom (X) or left", "Obit.OPlot_Set_me(self.this,value) return self.__dict__[name] = value def __getattr__(self,name): if not isinstance(self, OPlot): return \"Bogus", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # n =", "Or false * Plot = Python Obit Plot to test \"\"\" ################################################################ #", "dash dot dot dot * err = ObitErr error stack \"\"\" ################################################################ #", "(defaults to none) YOPT (string) Options for vertical axis (default \"BCNTS\") See PDrawAxes", "BLC, TRC on info member honored * lev = basic contour level (def", "INVERT (bool) If present and true ionvert colors COLOR (string) Color scheme 'GRAY',", "xopt. * ytick = like xtick for the Y axis. * nysub =", "ny = Number of vertical subpages \"\"\" ################################################################ out = OPlot(name) Obit.PlotInitPlot(out.me, output,", "TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawAxes(plot.me, xopt, xtick, nxsub, yopt,", "= list of Declinations (deg) * err = ObitErr error stack * size", "values between 0 and 1 give intermediate placing, but they are not very", "X vs Y using symbol. Plot should be finalized and displayed with PShow", "be a Python Obit Plot\") if not Image.PIsA(image): print(\"Actually \",image.__class__) raise TypeError(\"image MUST", "label, image.me, lev, cntfac, err.me) # end PContour def PGrayScale (plot, label, image,", "YLABEL (string) Label for vertical axis (defaults to none) YOPT (string) Options for", "inch) will be equal, otherwise they will be scaled independently. * axis =", "to pass instructions to processing Member List ======== ======================================= \"\"\" def __init__(self, name):", "or left (Y) edge of frame. C draw top (X) or right (Y)", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotRelText(plot.me, side, disp,", "plot = Python Plot object * err = ObitErr error stack \"\"\" ################################################################", "the old if self.this!=None: Obit.OPlotUnref(Obit.OPlot_Get_me(self.this)) # In with the new Obit.OPlot_Set_me(self.this,value) return self.__dict__[name]", "character will cause the Greek equivalent to be used, e.g. #ga will be", "level (def 0.1 peak) * cntfac = factor for spacing between contours (def", "axis is horizontal line Y=0, Y axis is vertical line X=0). B draw", "Obit plot\") # Obit.PlotDrawCircle (plot.me, x, y, radius, err.me) # end PDrawCircle def", "the top right corner of the viewport (note XMAX may be less than", "1 = continious, 2 = dashed, 3=dot dash, 4 = dotted, 5 =", "(in world coordinates per inch) will be equal, otherwise they will be scaled", "image.me, n, ra, dec, size, err.me) # end PMarkCross def PShow (plot, err):", "Latin character will cause the Greek equivalent to be used, e.g. #ga will", "# end PSetPlot def PLabel (plot, xlabel, ylabel, title, err): \"\"\" Display plot", "absolute_import from __future__ import print_function import Obit, _Obit, InfoList, Image import math class", "x-coordinate of the center of the symbol * y = world y-coordinate of", "PSetPage def PText (plot, x, y, angle, just, text, err): \"\"\" Write text", "both axes logarithmically. == =========================================== * err = ObitErr error stack \"\"\" ################################################################", "vertices * y = array of world y-coordinates of the vertices * fill", "and increases along rows and columns * err = ObitErr error stack \"\"\"", "def PDrawPoly (plot, x, y, fill, err): \"\"\" Draw a Polygon, possibly filled", "at major increments of the coordinates; 10 draw box and label X-axis logarithmically;", "vs Y using symbol and error bars. Plot should be finalized and displayed", "lev, cntfac, err): \"\"\" Contour plot of image Contours at lev times powers", "left-hand end of the string will be placed at COORD; if JUST =", "(plot): \"\"\" Return the member InfoList returns InfoList * plot = Python Obit", "= Python Obit Plot object \"\"\" ################################################################ # Checks if not PIsA(plot): raise", "basic contour level (def 0.1 peak) * cntfac = factor for spacing between", "= Width of line (integer multiple of the default size). * err =", "err.me) # end PXYOver def PXYErr (plot, symbol, x, y, e, err): \"\"\"", "Python Plot object * xlabel = a label for the x-axis (centered below", "horizontal axis (defaults to none) XOPT (string) Options for horizontal axis (default \"BCNTS\")", "(plot, xmin, xmax, ymin, ymax, just, axis, err): \"\"\" Define plotting area *", "= 10 CYAN = 11 TURQUOISE = 12 MAGENTA = 13 SALMON =", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotLabel(plot.me,", "a #u and #d will be written as superscripts \"\"\" # $Id$ #-----------------------------------------------------------------------", "= Python Obit Error/message stack * output = name and type of output", "x, y, err): \"\"\" Draw a curve. * plot = Python Plot object", "are usable if negative, use abs value and connect points == ================= 0", "plot, label * plot = Python Plot object * xopt = string of", "a Python Obit plot\") # out = InfoList.InfoList() out.me = Obit.PlotGetList(plot.me) return out", "= Array of world x-coordinates of points * y = Array of world", "max in image] ======= ======== ================================================= \"\"\" ################################################################ # Checks if not PIsA(plot):", "= Python Plot object * xopt = string of options for X (horizontal)", "location of the character string along the specified edge of the viewport, as", "variable, if None use index * y = Dependent variable * e =", "Obit Plot\") n = len(y) # How many points? Obit.PlotXYErr (plot.me, symbol, n,", "Trailing spaces are ignored when justifying the string, but leading spaces are significant.", "Python Obit plot\") # Obit.PlotDrawLine(plot.me, x1, y1, x2, y2, err.me) # end PDrawLine", "= factor for spacing between contours (def sqrt(2) * err = ObitErr error", "\"BCNTS\") See PDrawAxes for details. XTICK (float) world coordinate interval between major tick", "at (x,y); if JUST = 1.0, the right-hand end of the string will", "M write numeric labels in the unconventional location above the viewport (X) or", "YTICK (float) like xtick for the Y axis. NYSUB (int) like nxsub for", "WHEAT = 6 GRAY = 7 BROWN = 8 BLUE = 9 BLUEVIOLET", "image Plot should be finalized and displayed with PShow * plot = plot", "Python Obit plot\") # Obit.PlotLabel(plot.me, xlabel, ylabel, title, err.me) # end PLabel def", "a plot Image Members with python interfaces: ======== ======================================= InfoList used to pass", "the interval is chosen. NXSUB (int) the number of subintervals to divide the", "be a Python Obit plot\") # Obit.PlotSetLineWidth(plot.me, lwidth, err.me) # end PetLineWidth def", "only 1 dot 2 plus 3 \\* 4 open circle 5 x 6", "plot = Python Plot object * x1 = world x-coordinate of the new", "===== ============================================ \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot", "draw the coordinate axes (X=0, Y=0); 2 same as axis=1, but also draw", "character size (integer multiple of the default size). * err = ObitErr error", "======= ======== ================================================= XTICK (float) world coordinate interval between major tick marks on", "plplot:lines 30 deg upwards 5 plplot:lines 30 deg downwards 6 plplot:horizontal/vertical lines crossed", "viewport. * xmax = the world x-coordinate at the top right corner of", "Canvas Widget (interacts with ObitTalk) \"ps\" PostScript File (monochrome) \"psc\" PostScript File (color)", "err.me) # end PetLineWidth def PSetLineStyle (plot, lstyle, err): \"\"\" Set line style", "============================================ CSIZE (int) Scaling factor for characters(default = 1) LWIDTH (int) Line width", "the left of the viewport (Y). M write numeric labels in the unconventional", "chosen. * yopt = string of options for Y (vertical) axis of plot.", "actual value) XMIN (float) minimum X value (defaults to actual value) YMAX (float)", "Symbol index to use for plotting. Values in the range [1,12] are usable.", "def PSetLineStyle (plot, lstyle, err): \"\"\" Set line style * plot = Python", "y-coordinate at the bottom left corner of the viewport. * ymax = the", "N write Numeric labels in the conventional location below the viewport (X) or", "Characters between a #d and #u will be written as subscripts - Superscripts:", "will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty", "for the y-axis (centered to the left of the viewport, drawn vertically) *", "Right margin of the viewport. If it includes 'LV' or 'RV', the string", "9 BLUEVIOLET = 10 CYAN = 11 TURQUOISE = 12 MAGENTA = 13", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawCircle (plot.me, x,", "* ymin = the world y-coordinate at the bottom left corner of the", "will be placed at at (x,y). Other values between 0 and 1 give", "to a plot Image Members with python interfaces: ======== ======================================= InfoList used to", "err = ObitErr error stack Optional parameters on plot InfoList ====== ======== ===============================================", "curve. * plot = Python Plot object * x = Array of world", "return \"Bogus Dude\"+str(self.__class__) if name == \"me\" : return Obit.OPlot_Get_me(self.this) # Functions to", "Set foreground color * plot = Python Plot object * color = color", "== ================= * x = Independent variable, if None use index * y", "axis=0, but also draw the coordinate axes (X=0, Y=0); 2 same as axis=1,", "G draw Grid of vertical (X) or horizontal (Y) lines I Invert the", "2 crosshatched 3 plplot:lines 45 deg downwards 4 plplot:lines 30 deg upwards 5", "end PXYErr def PContour (plot, label, image, lev, cntfac, err): \"\"\" Contour plot", "print(\"Actually \",image.__class__) raise TypeError(\"image MUST be a Python Obit Image\") n = len(ra)", "axis=1, but also draw grid lines at major increments of the coordinates; 10", "= 13 SALMON = 14 WHITE = 15 def newOPlot(name, err, output=\"None\", bgcolor=BLACK,", "strings: - Greek letters, A #g immediately prior to a Latin character will", "plot\") # Obit.PlotRelText(plot.me, side, disp, coord, fjust, text, err.me) # end PRelText def", "err): \"\"\" Set scaling for characters * plot = Python Plot object *", "be a Python Obit plot\") # scale = 1.0 Obit.PlotDrawPoly(plot.me, len(x), x, y,", "Plot Plot X vs Y using symbol. Plot should be finalized and displayed", "InfoList: ======= ======== ================================================= XTICK (float) world coordinate interval between major tick marks", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotFinishPlot(plot.me, err.me) #", "and columns * err = ObitErr error stack \"\"\" ################################################################ # Checks if", "Obit installation uses PLPlot for plotting the following can be used in text", "finalized and displayed with PShow This routine draws the frame and adds labels,", "port * plot = Python Plot object * side = Must include one", "(plot, x1, y1, x2, y2, err): \"\"\" Draw a line. * plot =", "30 deg downwards 6 plplot:horizontal/vertical lines crossed 7 plplot:horizontal lines 8 plplot:vertical lines", "======= ================================================== \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot", "= world y-coordinate of the center of the symbol * symbol = Symbol", "nx = Number of horizontal subpages * ny = Number of vertical subpages", "Obit.PlotDrawSymbol(plot.me, x, y, symbol, err.me) # end PDrawSymbol def PDrawPoly (plot, x, y,", "value (defaults to actual value) YMAX (float) maximum Y value (defaults to actual", "3=dot dash, 4 = dotted, 5 = dash dot dot dot * err", "the plot, use PShow to finalize it. Notes: on text strings in PLPlot", "[def max in image] ======= ======== ================================================= \"\"\" ################################################################ # Checks if not", "object (labeling purposes) * err = Python Obit Error/message stack * output =", "PetLineStyle def PSetColor (plot, color, err): \"\"\" Set foreground color * plot =", "xtick=0.0 [def], the interval is chosen. NXSUB (long) the number of subintervals to", "are single letters, and may be in any order: = ====================================================================== A draw", "color = color index (1-15), symbolic names: BLACK (notreally), RED(default), YELLOW, GREEN, AQUAMARINE,", "= Python Plot object * x = Plot x in world coordinates *", "\"\"\" Mark positions on Contour plot of image Place cross at positions. Plot", "interval into. If xtick=0.0 or nxsub=0, the number is chosen. [def 0] YTICK", "======== =============================================== \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot", "PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST be a Python Obit Plot\") n =", "axis Logarithmically N write Numeric labels in the conventional location below the viewport", "* y = world y-coordinate of the center of the symbol * symbol", "circle 12 filled star == =============== * x = Independent variable, if None", "################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST be a", "plot = Python Obit Plot object \"\"\" ################################################################ # Checks if not PIsA(plot):", "Y axis. NYSUB (int) like nxsub for the Y axis CSIZE (int) Scaling", "Image\") Obit.PlotContour (plot.me, label, image.me, lev, cntfac, err.me) # end PContour def PGrayScale", "x = world x-coordinate of the center of the symbol * y =", "_Obit, InfoList, Image import math class OPlot(Obit.OPlot): \"\"\" Python Obit interface to display", "edge. * just = Controls justification of the string parallel to the specified", "FJUST = 0.0, the left-hand end of the string will be placed at", "(plot, x, y,radius, err): \"\"\" Draw a circle. * plot = Python Plot", "* plot = Python Plot object * x = World x-coordinate of center", "output, bgcolor, nx, ny, err.me) return out # end newOPlot def PXYPlot (plot,", "(plot.me, symbol, n, x, y, e, err.me) # end PXYErr def PContour (plot,", "without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR", "DC, USA. # # This program is free software; you can redistribute it", "PLabel (plot, xlabel, ylabel, title, err): \"\"\" Display plot * plot = Python", "\"\"\" Draw a Symbol * plot = Python Plot object * x =", "used in text strings: - Greek letters, A #g immediately prior to a", "1.0 Obit.PlotDrawPoly(plot.me, len(x), x, y, fill, scale, err.me) # end PDrawPoly def PGetList", "World coordinate interval between major tick marks on X axis. If xtick=0.0, the", "open triangle 8 open star 9 filled triangle 10 filled square 11 filled", "Y (vertical) axis of plot. Coding is the same as for xopt. *", "Plot object * side = Must include one of the characters 'B', 'L',", "left (Y) edge of frame. C draw top (X) or right (Y) edge", "draw minor tick marks (Subticks). = ====================================================================== * xtick = World coordinate interval", "plot, use PShow to finalize it. Notes: on text strings in PLPlot installations", "will be placed at at COORD. Other values between 0 and 1 give", "raise TypeError(\"plot MUST be a Python Obit plot\") # dx = math.cos(angle/57.296) dy", "be a Python Obit plot\") # Obit.PlotSetPage(plot.me, sub, err.me) # end PSetPage def", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawCircle", "should be finalized and displayed with PShow This routine draws the frame and", "# Obit.PlotSetLineWidth(plot.me, lwidth, err.me) # end PetLineWidth def PSetLineStyle (plot, lstyle, err): \"\"\"", "be used, e.g. #ga will be a lower case alpha. - Subscripts: Characters", "# end PetLineStyle def PSetColor (plot, color, err): \"\"\" Set foreground color *", "not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) if name == \"me\" : return Obit.OPlot_Get_me(self.this)", "(int) If !=0 then force X and Y axis scaling to be the", "================================================== \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST", "= 4 BLACK = 5 WHEAT = 6 GRAY = 7 BROWN =", "marks on X axis. If xtick=0.0, the interval is chosen. * nxsub =", "fill = Fill pattern, plot package dependent * values in the range [0,8]", "to use for plotting. Values in the range [1,12] are usable. If negative,", "\"\"\" Display plot * plot = Python Plot object * err = ObitErr", "major tick marks on X axis. If xtick=0.0, the interval is chosen. *", "err): \"\"\" Set line style * plot = Python Plot object * lstyle", "BLACK (notreally), RED(default), YELLOW, GREEN, AQUAMARINE, BLACK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN,", "the viewport. If * just = 0.0, the left-hand end of the string", "background color index (1-15), symbolic names: BLACK, RED(default), YELLOW, GREEN, AQUAMARINE, PINK, WHEAT,", "advance sub page Note: some functions such as PContour advance the page *", "xmin, xmax, ymin, ymax, just, axis, err): \"\"\" Define plotting area * plot", "x, y, radius, err.me) # end PDrawCircle def PDrawSymbol (plot, x, y, symbol,", "current subpage to sub numbering starts at the top left at 1 and", "JUST=1, the scales of the x and y axes (in world coordinates per", "between a #d and #u will be written as subscripts - Superscripts: Characters", "DrawAxes def PSetCharSize (plot,cscale, err): \"\"\" Set scaling for characters * plot =", "viewport (X) or to the right of the viewport (Y). P extend (\"Project\")", "Python Obit plot\") # Obit.PlotSetCharSize (plot.me, cscale, err.me) # end PSetCharSize def PSetLineWidth", "end DrawAxes def PSetCharSize (plot,cscale, err): \"\"\" Set scaling for characters * plot", "info member honored * err = ObitErr error stack Optional parameters on plot", "name): super(OPlot, self).__init__() Obit.CreateOPlot(self.this, name) def __del__(self, DeleteOPlot=_Obit.DeleteOPlot): if _Obit!=None: DeleteOPlot(self.this) def __setattr__(self,name,value):", "index * y = Dependent variable * err = ObitErr error stack \"\"\"", "def PLabel (plot, xlabel, ylabel, title, err): \"\"\" Display plot * plot =", "color index (1-15), symbolic names: BLACK (notreally), RED(default), YELLOW, GREEN, AQUAMARINE, BLACK, WHEAT,", "intermediate placing, but they are not very useful. * text = The text", "contour level (def 0.1 peak) * cntfac = factor for spacing between contours", "use for plotting. Values in the range [1,12] are usable. If negative, use", "output is specified this information will be prompted. Next, the plotting region must", "background color. If no output is specified this information will be prompted. Next,", "22903-2475 USA #----------------------------------------------------------------------- # Python shadow class to ObitPlot class from __future__ import", "text, err): \"\"\" Write text on plot * plot = Python Plot object", "1) LWIDTH (int) Line width (default = 1) ====== ======= ================================================== \"\"\" ################################################################", "+ Obit.OPlotGetName(self.me) # Foreground Colors unBLACK = 0 RED = 1 YELLOW =", "lower case alpha. - Subscripts: Characters between a #d and #u will be", "PSetColor def PSetPage (plot, sub, err): \"\"\" Set or advance sub page Note:", "y-coordinates of points * err = ObitErr error stack \"\"\" ################################################################ # Checks", "\"None\" interactive prompt \"xwin\" X-Window (Xlib) \"gcw\" Gnome Canvas Widget (interacts with ObitTalk)", "the viewport, measured outwards from the viewport in units of the character height.", "peak) * cntfac = factor for spacing between contours (def sqrt(2) * err", "colors COLOR (string) Color scheme 'GRAY', 'CONTOUR', 'PHLAME' default 'GRAY' PIX_MAX (float) maximum", "just, axis, err.me) # end PSetPlot def PLabel (plot, xlabel, ylabel, title, err):", "Plot object * xlabel = a label for the x-axis (centered below the", "ObitErr error stack Optional parameters on plot InfoList ====== ======== =============================================== XMAX (float)", "# Obit.PlotLabel(plot.me, xlabel, ylabel, title, err.me) # end PLabel def PDrawAxes(plot, xopt, xtick,", "Plot object * color = color index (1-15), symbolic names: BLACK (notreally), RED(default),", "or symbols may be added. When all has been added to the plot,", "be a Python Obit Plot\") n = len(y) # How many points? Obit.PlotXYErr", "true plot sqrt (pixel_value) INVERT (bool) If present and true ionvert colors COLOR", "string from the specified edge of the viewport, measured outwards from the viewport", "plot = Python Plot object * side = Must include one of the", "the Free # Software Foundation, Inc., 675 Massachusetts Ave, Cambridge, # MA 02139,", "n = len(y) # How many points? Obit.PlotXYErr (plot.me, symbol, n, x, y,", "center of the string will be placed at (x,y); if JUST = 1.0,", "no output is specified this information will be prompted. Next, the plotting region", "value to write inside the viewport, a positive value to write outside. *", "the left of the viewport, drawn vertically) * title = a label for", "X and Y axis scaling to be the same ====== ======== ================================================== \"\"\"", "CSIZE (int) Scaling factor for characters(default = 1) SSIZE (int) Scaling factor for", "the world y-coordinate at the bottom left corner of the viewport. * ymax", "axis CSIZE (int) Scaling factor for characters(default = 1) SSIZE (int) Scaling factor", "the Y axis CSIZE (int) Scaling factor for characters(default = 1) LWIDTH (int)", "actual value) TITLE (string) Label for the plot (defaults to none), max 120", "edge of the viewport, measured outwards from the viewport in units of the", "displayed with PShow * plot = plot * image = ObitImage to plot", "= if nonNone, error in y * err = ObitErr error stack Optional", "XMIN (float) minimum X value (defaults to actual value) YMAX (float) maximum Y", "the viewport). * ylabel = a label for the y-axis (centered to the", "open star 9 filled triangle 10 filled square 11 filled circle 12 filled", "displayed with PShow This routine draws the frame and adds labels, to only", "= len(y) # How many points? Obit.PlotXYErr (plot.me, symbol, n, x, y, e,", "Python Obit plot\") # dx = math.cos(angle/57.296) dy = math.sin(angle/57.296) Obit.PlotText(plot.me, x, y,", "S draw minor tick marks (Subticks). = ====================================================================== * xtick = World coordinate", "Python Obit plot\") # out = InfoList.InfoList() out.me = Obit.PlotGetList(plot.me) return out #", "x2, y2, err.me) # end PDrawLine def PDrawCurve (plot, x, y, err): \"\"\"", "Scale plot of image Gray Scales plot of image Plot should be finalized", "of the edge. * just = Controls justification of the string parallel to", "MUST be a Python Obit Image\") Obit.PlotContour (plot.me, label, image.me, lev, cntfac, err.me)", "= Symbol index to use for plotting. Values in the range [1,12] are", "corner of the viewport (note YMAX may be less than YMIN) * just", "YELLOW, GREEN, AQUAMARINE, BLACK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON,", "end PDrawCurve def PDrawCircle (plot, x, y,radius, err): \"\"\" Draw a circle. *", "InfoList returns InfoList * plot = Python Obit Plot object \"\"\" ################################################################ #", "Draw a curve. * plot = Python Plot object * x = Array", "# How many points? Obit.PlotXYPlot (plot.me, symbol, n, x, y, err.me) # end", "BLACK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE * err", "World coordinate radius * err = ObitErr error stack \"\"\" ################################################################ # Checks", "(1-15), symbolic names: BLACK, RED(default), YELLOW, GREEN, AQUAMARINE, PINK, WHEAT, GRAY, BROWN, BLUE,", "xtick for the Y axis. * nysub = like nxsub for the Y", "of the symbol * y = world y-coordinate of the center of the", "placed at (x,y); if JUST = 1.0, the right-hand end of the string", "negative, use abs value and connect points. == =============== 0 line only 1", "out = OPlot(name) Obit.PlotInitPlot(out.me, output, bgcolor, nx, ny, err.me) return out # end", "not Image.PIsA(image): print(\"Actually \",image.__class__) raise TypeError(\"image MUST be a Python Obit Image\") Obit.PlotGrayScale", "raise AttributeError(name) def __repr__(self): if not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) return \"<C", "BROWN = 8 BLUE = 9 BLUEVIOLET = 10 CYAN = 11 TURQUOISE", "open circle 5 x 6 open square 7 open triangle 8 open star", "plot = Python Plot object * x = World x-coordinate of center *", "value and connect points == ================= 0 line only 1 dot 2 plus", "like nxsub for the Y axis CSIZE (int) Scaling factor for characters(default =", "TypeError(\"image MUST be a Python Obit Image\") Obit.PlotContour (plot.me, label, image.me, lev, cntfac,", "# published by the Free Software Foundation; either version 2 of # the", "package dependent * values in the range [0,8] are usable == =============== 0", "Plot object \"\"\" ################################################################ # Checks if not PIsA(plot): raise TypeError(\"plot MUST be", "PGrayScale (plot, label, image, err): \"\"\" Gray Scale plot of image Gray Scales", "at 1 and increases along rows and columns * err = ObitErr error", "* name = name desired for object (labeling purposes) * err = Python", "yopt, ytick, nysub, err): \"\"\" Draw axes for a plot, label * plot", "Set or advance sub page Note: some functions such as PContour advance the", "Observatory # 520 Edgemont Road # Charlottesville, VA 22903-2475 USA #----------------------------------------------------------------------- # Python", "= ObitImage to plot * ra = list of RAs (deg) * dec", "the string will be placed at at (x,y). Other values between 0 and", "deg downwards 6 plplot:horizontal/vertical lines crossed 7 plplot:horizontal lines 8 plplot:vertical lines ==", "be addressed as follows: # Internet email: <EMAIL>. # Postal address: <NAME> #", "the viewport. * ymax = the world y-coordinate at the top right corner", "# Python shadow class to ObitPlot class from __future__ import absolute_import from __future__", "= plot * label = Label for plot * image = ObitImage to", "Note: some functions such as PContour advance the page * plot = Python", "x1, y1, x2, y2, err): \"\"\" Draw a line. * plot = Python", "for characters(default = 1) LWIDTH (int) Line width (default = 1) ====== =====", "object * color = color index (1-15), symbolic names: BLACK (notreally), RED(default), YELLOW,", "[def min in image] PIX_MIN (float) minimum pixel value [def max in image]", "\"gif\" GIF file \"null\" Null device ====== ========================== * bgcolor = background color", "(def sqrt(2) * err = ObitErr error stack Optional parameters on plot InfoList:", "the following can be used in text strings: - Greek letters, A #g", "* x = Independent variable, if None use index * y = Dependent", "sub, err): \"\"\" Set or advance sub page Note: some functions such as", "raise TypeError(\"plot MUST be a Python Obit plot\") # scale = 1.0 Obit.PlotDrawPoly(plot.me,", "draws the frame and adds labels, to only overplot data on the same", "Plot X vs Y using symbol and error bars. Plot should be finalized", "image.me, lev, cntfac, err.me) # end PContour def PGrayScale (plot, label, image, err):", "and #u will be written as subscripts - Superscripts: Characters between a #u", "err): \"\"\" Draw a line. * plot = Python Plot object * x1", "PLabel def PDrawAxes(plot, xopt, xtick, nxsub, yopt, ytick, nysub, err): \"\"\" Draw axes", "TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawCircle (plot.me, x, y, radius,", "* plot = Python Plot object * lstyle = Style of line (integer", "the viewport instead of inside. L label axis Logarithmically N write Numeric labels", "right (Y) edge of frame. G draw Grid of vertical (X) or horizontal", "specified edge of the viewport, as a fraction of the length of the", "Fill pattern, plot package dependent * values in the range [0,8] are usable", "of plot. Coding is the same as for xopt. * ytick = like", "= len(y) # How many points? Obit.PlotXYOver (plot.me, symbol, n, x, y, err.me)", "a Python Obit Image\") Obit.PlotContour (plot.me, label, image.me, lev, cntfac, err.me) # end", "viewport instead of inside. L label axis Logarithmically N write Numeric labels in", "PDrawCircle def PDrawSymbol (plot, x, y, symbol, err): \"\"\" Draw a Symbol *", "the same as for xopt. * ytick = like xtick for the Y", "Greek letters, A #g immediately prior to a Latin character will cause the", "======== ======================================= \"\"\" def __init__(self, name): super(OPlot, self).__init__() Obit.CreateOPlot(self.this, name) def __del__(self, DeleteOPlot=_Obit.DeleteOPlot):", "Y axis. * nysub = like nxsub for the Y axis * err", "on plot InfoList: ====== ======= ================================================== XTICK (float) world coordinate interval between major", "value [def min in image] PIX_MIN (float) minimum pixel value [def max in", "(integer multiple of the default size). 1 = continious, 2 = dashed, 3=dot", "= world x-coordinate of the new pen position. * y1 = world y-coordinate", "is specified this information will be prompted. Next, the plotting region must be", "the new pen position. * err = ObitErr error stack \"\"\" ################################################################ #", "be a Python Obit Image\") n = len(ra) Obit.PlotMarkCross (plot.me, image.me, n, ra,", "be placed at at (x,y). Other values between 0 and 1 give intermediate", "err): \"\"\" Overplot X vs Y Overplot X vs Y using symbol. Plot", "vertically) * title = a label for the entire plot (centered above the", "Bottom, Left, Top, or Right margin of the viewport. If it includes 'LV'", "be scaled independently. * axis = controls the plotting of axes, tick marks,", "the viewport) * err = ObitErr error stack \"\"\" ################################################################ # Checks if", "Python Obit plot\") # Obit.PlotSetLineStyle(plot.me, lstyle, err.me) # end PetLineStyle def PSetColor (plot,", "0.1 peak) * cntfac = factor for spacing between contours (def sqrt(2) *", "characters 'B', 'L', 'T', or 'R' signifying the Bottom, Left, Top, or Right", "================================================= \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST", "prior to a Latin character will cause the Greek equivalent to be used,", "placed at (x,y); if JUST = 0.5, the center of the string will", "of the string will be placed at (x,y); if JUST = 1.0, the", "nysub = like nxsub for the Y axis * err = ObitErr error", "of horizontal subpages * ny = Number of vertical subpages \"\"\" ################################################################ out", "Out with the old if self.this!=None: Obit.OPlotUnref(Obit.OPlot_Get_me(self.this)) # In with the new Obit.OPlot_Set_me(self.this,value)", "may be in any order: = ====================================================================== A draw Axis (X axis is", "* e = if nonNone, error in y * err = ObitErr error", "returns InfoList * plot = Python Obit Plot object \"\"\" ################################################################ # Checks", "raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetPlot(plot.me, xmin, xmax, ymin,", "\"\"\" Set line style * plot = Python Plot object * lstyle =", "just = 0.0, the left-hand end of the string will be placed at", "interval is chosen. NXSUB (long) the number of subintervals to divide the major", "will be written as superscripts \"\"\" # $Id$ #----------------------------------------------------------------------- # Copyright (C) 2006,2016,2019", "Axis (X axis is horizontal line Y=0, Y axis is vertical line X=0).", "__getattr__(self,name): if not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) if name == \"me\" :", "# end PDrawSymbol def PDrawPoly (plot, x, y, fill, err): \"\"\" Draw a", "# end PDrawCircle def PDrawSymbol (plot, x, y, symbol, err): \"\"\" Draw a", "super(OPlot, self).__init__() Obit.CreateOPlot(self.this, name) def __del__(self, DeleteOPlot=_Obit.DeleteOPlot): if _Obit!=None: DeleteOPlot(self.this) def __setattr__(self,name,value): if", "coordinate interval between major tick marks on X axis. If xtick=0.0 [def], the", "characters(default = 1) SSIZE (int) Scaling factor for symbols(default = 1) LWIDTH (int)", "continious, 2 = dashed, 3=dot dash, 4 = dotted, 5 = dash dot", "(x,y). Other values between 0 and 1 give intermediate placing, but they are", "def __init__(self, name): super(OPlot, self).__init__() Obit.CreateOPlot(self.this, name) def __del__(self, DeleteOPlot=_Obit.DeleteOPlot): if _Obit!=None: DeleteOPlot(self.this)", "The number of subintervals to divide the major coordinate interval into. If xtick=0.0", "adds labels, to only overplot data on the same frame, use ObitPlotXYOver *", "write outside. * coord = The location of the character string along the", "in y * err = ObitErr error stack Optional parameters on plot InfoList:", "Plot should be finalized and displayed with PShow * plot = plot *", "be finalized and displayed with PShow * plot = plot * image =", "nxsub, yopt, ytick, nysub, err): \"\"\" Draw axes for a plot, label *", "Logarithmically N write Numeric labels in the conventional location below the viewport (X)", "and Y axis scaling to be the same ====== ======== =============================================== \"\"\" ################################################################", "XOPT (string) Options for horizontal axis (default \"BCNTS\") See PDrawAxes for details. YLABEL", "on plot InfoList: ======= ======== ================================================= XTICK (float) world coordinate interval between major", "PXYErr) PGrayScale, or PContour. Then additional lines, curves, text or symbols may be", "BLUE = 9 BLUEVIOLET = 10 CYAN = 11 TURQUOISE = 12 MAGENTA", "a Python Obit Plot\") n = len(y) # How many points? Obit.PlotXYErr (plot.me,", "right-hand end of the string will be placed at at (x,y). Other values", "Plot object * cscale = new character size (integer multiple of the default", "ObitImage to plot, BLC, TRC on info member honored * err = ObitErr", "2006,2016,2019 # Associated Universities, Inc. Washington DC, USA. # # This program is", "cross at positions. Plot should be finalized and displayed with PShow * plot", "(int) like nxsub for the Y axis CSIZE (int) Scaling factor for characters(default", "# Obit.PlotDrawAxes(plot.me, xopt, xtick, nxsub, yopt, ytick, nysub, err.me) # end DrawAxes def", "= Python Plot object * cscale = new character size (integer multiple of", "ObitErr error stack \"\"\" ################################################################ # Checks if not PIsA(plot): raise TypeError(\"plot MUST", "radius, err.me) # end PDrawCircle def PDrawSymbol (plot, x, y, symbol, err): \"\"\"", "\"\"\" Draw a circle. * plot = Python Plot object * x =", "x, y, err.me) # end PXYPlot def PXYOver (plot, symbol, x, y, err):", "= The number of subintervals to divide the major coordinate interval into. If", "many points? Obit.PlotXYPlot (plot.me, symbol, n, x, y, err.me) # end PXYPlot def", "world y-coordinate at the top right corner of the viewport (note YMAX may", "string will be placed at at (x,y). Other values between 0 and 1", "MA 02139, USA. # # Correspondence concerning this software should be addressed as", "# end PLabel def PDrawAxes(plot, xopt, xtick, nxsub, yopt, ytick, nysub, err): \"\"\"", "= 1) ====== ======= ================================================== \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually", "very useful. * text = The text string to be plotted. Trailing spaces", "7 plplot:horizontal lines 8 plplot:vertical lines == =============== * err = ObitErr error", "bgcolor, nx, ny, err.me) return out # end newOPlot def PXYPlot (plot, symbol,", "end of the string will be placed at COORD; if JUST = 0.5,", "plot sqrt (pixel_value) INVERT (bool) If present and true ionvert colors COLOR (string)", "the coordinate axes (X=0, Y=0); 2 same as axis=1, but also draw grid", "values in the range [1,12] are usable if negative, use abs value and", "characters(default = 1) LWIDTH (int) Line width (default = 1) ====== ======= ==================================================", "color * plot = Python Plot object * color = color index (1-15),", "PGetList def PIsA (plot): \"\"\" Tells if the input is a Python ObitPlot", "Plot object * lstyle = Style of line (integer multiple of the default", "member honored * err = ObitErr error stack Optional parameters on plot InfoList:", "XMIN). * ymin = the world y-coordinate at the bottom left corner of", "tick marks, etc: == =========================================== -2 draw no box, axes or labels; -1", "BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE * err = ObitErr error stack \"\"\"", "new pen position. * x2 = world x-coordinate of the new pen position.", "marks on X axis. If xtick=0.0 [def], the interval is chosen. NXSUB (int)", "corner of the viewport. * ymax = the world y-coordinate at the top", "PShow * plot = plot * symbol = Symbol index to use for", "spaces are ignored when justifying the string, but leading spaces are significant. *", "x = array of world x-coordinates of the vertices * y = array", "return Obit.OPlot_Get_me(self.this) # Functions to return members if name==\"List\": return PGetList(self) raise AttributeError(name)", "Python Plot object * x1 = world x-coordinate of the new pen position.", "be finalized and displayed with PShow This routine draws the frame and adds", "* yopt = string of options for Y (vertical) axis of plot. Coding", "Obit plot\") # Obit.PlotDrawAxes(plot.me, xopt, xtick, nxsub, yopt, ytick, nysub, err.me) # end", "plot\") # Obit.PlotDrawAxes(plot.me, xopt, xtick, nxsub, yopt, ytick, nysub, err.me) # end DrawAxes", "between 0 and 1 give intermediate placing, but they are not very useful.", "be a Python Obit plot\") # dx = math.cos(angle/57.296) dy = math.sin(angle/57.296) Obit.PlotText(plot.me,", "dec, err, size=5.0): \"\"\" Mark positions on Contour plot of image Place cross", "520 Edgemont Road # Charlottesville, VA 22903-2475 USA #----------------------------------------------------------------------- # Python shadow class", "interval. S draw minor tick marks (Subticks). = ====================================================================== * xtick = World", "to the left of the viewport (Y). M write numeric labels in the", "WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE * nx =", "# end PXYPlot def PXYOver (plot, symbol, x, y, err): \"\"\" Overplot X", "plot of image Contours at lev times powers of cntfac Plot should be", "(plot, label, image, lev, cntfac, err): \"\"\" Contour plot of image Contours at", "input is a Python ObitPlot returns true Or false * Plot = Python", "* ra = list of RAs (deg) * dec = list of Declinations", "(interacts with ObitTalk) \"ps\" PostScript File (monochrome) \"psc\" PostScript File (color) \"xfig\" Fig", "possibly filled * plot = Python Plot object * n = number of", "dx = math.cos(angle/57.296) dy = math.sin(angle/57.296) Obit.PlotText(plot.me, x, y, dx, dy, just, text,", "(plot, sub, err): \"\"\" Set or advance sub page Note: some functions such", "label both axes logarithmically. == =========================================== * err = ObitErr error stack \"\"\"", "* nx = Number of horizontal subpages * ny = Number of vertical", "Obit.PlotDrawLine(plot.me, x1, y1, x2, y2, err.me) # end PDrawLine def PDrawCurve (plot, x,", "for the Y axis. * nysub = like nxsub for the Y axis", "symbol, err.me) # end PDrawSymbol def PDrawPoly (plot, x, y, fill, err): \"\"\"", "= world y-coordinate of the new pen position. * err = ObitErr error", "hatched 2 crosshatched 3 plplot:lines 45 deg downwards 4 plplot:lines 30 deg upwards", "image Place cross at positions. Plot should be finalized and displayed with PShow", "same ====== ======== ================================================== \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__)", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetPlot(plot.me, xmin,", "Free Software Foundation; either version 2 of # the License, or (at your", "= size of cross in pixels Optional parameters on plot InfoList ====== =====", "as for xopt. * ytick = like xtick for the Y axis. *", "x in world coordinates * y = Plot y in world coordinates *", "= 0.5, the center of the string will be placed at (x,y); if", "The location of the character string along the specified edge of the viewport,", "ytick, nysub, err): \"\"\" Draw axes for a plot, label * plot =", "it will be useful, # but WITHOUT ANY WARRANTY; without even the implied", "* xtick = World coordinate interval between major tick marks on X axis.", "y-coordinate of center * radius = World coordinate radius * err = ObitErr", "(def 0.1 peak) * cntfac = factor for spacing between contours (def sqrt(2)", "PGetList(self) raise AttributeError(name) def __repr__(self): if not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) return", "will be placed at COORD; if JUST = 0.5, the center of the", "InfoList * plot = Python Obit Plot object \"\"\" ################################################################ # Checks if", "MUST be a Python Obit plot\") # Obit.PlotFinishPlot(plot.me, err.me) # end PShow def", "height. Use a negative value to write inside the viewport, a positive value", "end PSetCharSize def PSetLineWidth (plot, lwidth, err): \"\"\" Set line width * plot", "at at COORD. Other values between 0 and 1 give intermediate placing, but", "Obit plot\") # Obit.PlotSetPlot(plot.me, xmin, xmax, ymin, ymax, just, axis, err.me) # end", "* plot = Python Plot object * sub = if <=0 advance page,", "(vertical) axis of plot. Coding is the same as for xopt. * ytick", "with error bars Plot X vs Y using symbol and error bars. Plot", "advance page, if >0 set current subpage to sub numbering starts at the", "the Y axis CSIZE (int) Scaling factor for characters(default = 1) SQRT (bool)", "image Contours at lev times powers of cntfac Plot should be finalized and", "square 11 filled circle 12 filled star == =============== * x = Independent", "of the character height. Use a negative value to write inside the viewport,", "err.me) # end PGrayScale def PMarkCross (plot, image, ra, dec, err, size=5.0): \"\"\"", "= len(ra) Obit.PlotMarkCross (plot.me, image.me, n, ra, dec, size, err.me) # end PMarkCross", "# Obit.PlotSetCharSize (plot.me, cscale, err.me) # end PSetCharSize def PSetLineWidth (plot, lwidth, err):", "inside the viewport, a positive value to write outside. * coord = The", "XMAX may be less than XMIN). * ymin = the world y-coordinate at", "Scaling factor for characters(default = 1) LWIDTH (int) Line width (default = 1)", "* xopt = string of options for X (horizontal) axis of plot. Options", "this software should be addressed as follows: # Internet email: <EMAIL>. # Postal", "name == \"me\" : return Obit.OPlot_Get_me(self.this) # Functions to return members if name==\"List\":", "string, but leading spaces are significant. * err = ObitErr error stack \"\"\"", "coordinate interval into. If xtick=0.0 or nxsub=0, the number is chosen. [def 0]", "filled square 11 filled circle 12 filled star == =============== * err =", "the number of subintervals to divide the major coordinate interval into. If xtick=0.0", "a Python Obit plot\") # Obit.PlotSetPlot(plot.me, xmin, xmax, ymin, ymax, just, axis, err.me)", "plot = Python Plot object * color = color index (1-15), symbolic names:", "err.me) return out # end newOPlot def PXYPlot (plot, symbol, x, y, err):", "specifying the output and background color. If no output is specified this information", "= Python Plot object * color = color index (1-15), symbolic names: BLACK", "coordinate axes (X=0, Y=0); 2 same as axis=1, but also draw grid lines", "# end PSetPage def PText (plot, x, y, angle, just, text, err): \"\"\"", "symbol, x, y, err): \"\"\" Overplot X vs Y Overplot X vs Y", "image = ObitImage to plot, BLC, TRC on info member honored * lev", "(plot, symbol, x, y, e, err): \"\"\" Simple XY Plot with error bars", "coordinate radius * err = ObitErr error stack \"\"\" ################################################################ # Checks if", "Radio Astronomy Observatory # 520 Edgemont Road # Charlottesville, VA 22903-2475 USA #-----------------------------------------------------------------------", "and label Y-axis logarithmically; 30 draw box and label both axes logarithmically. ==", "* axis = controls the plotting of axes, tick marks, etc: == ===========================================", "Coding is the same as for xopt. * ytick = like xtick for", "y = array of world y-coordinates of the vertices * fill = Fill", "the range [1,12] are usable. If negative, use abs value and connect points.", "the # GNU General Public License for more details. # # You should", "x-coordinates of the vertices * y = array of world y-coordinates of the", "for characters(default = 1) SSIZE (int) Scaling factor for symbols(default = 1) LWIDTH", "= Python Plot object * x = Array of world x-coordinates of points", "# Checks if not isinstance(plot, OPlot): return False return Obit.OPlotIsA(plot.me)!=0 # end PIsA", "label Y-axis logarithmically; 30 draw box and label both axes logarithmically. == ===========================================", "B draw bottom (X) or left (Y) edge of frame. C draw top", "* values in the range [0,8] are usable == =============== 0 no fill", "6 plplot:horizontal/vertical lines crossed 7 plplot:horizontal lines 8 plplot:vertical lines == =============== *", "-1 draw box only; 0 draw box and label it with coordinates; 1", "the unconventional location above the viewport (X) or to the right of the", "Plot\") n = len(y) # How many points? Obit.PlotXYPlot (plot.me, symbol, n, x,", "err.me) # end PLabel def PDrawAxes(plot, xopt, xtick, nxsub, yopt, ytick, nysub, err):", "ymin, ymax, just, axis, err.me) # end PSetPlot def PLabel (plot, xlabel, ylabel,", "* err = Python Obit Error/message stack * output = name and type", "location below the viewport (X) or to the left of the viewport (Y).", "USA #----------------------------------------------------------------------- # Python shadow class to ObitPlot class from __future__ import absolute_import", "return \"Bogus Dude\"+str(self.__class__) return \"<C OPlot instance> \" + Obit.OPlotGetName(self.me) # Foreground Colors", "edge of frame. G draw Grid of vertical (X) or horizontal (Y) lines", "label it with coordinates; 1 same as axis=0, but also draw the coordinate", "interval into. If xtick=0.0 or nxsub=0, the number is chosen. * yopt =", "the viewport (note YMAX may be less than YMIN) * just = if", "The displacement of the character string from the specified edge of the viewport,", "to display server This class is for creating and using the interface to", "dx, dy, just, text, err.me) # end PText def PRelText (plot, side, disp,", "= 0.0, the left-hand end of the string will be placed at (x,y);", "the symbol * symbol = Symbol index to use for plotting. Values in", "points * err = ObitErr error stack \"\"\" ################################################################ # Checks if not", "for the plot (defaults to none), max 120 XLABEL (string) Label for horizontal", "Python Obit Image\") Obit.PlotGrayScale (plot.me, label, image.me, err.me) # end PGrayScale def PMarkCross", "err.me) # end PRelText def PDrawLine (plot, x1, y1, x2, y2, err): \"\"\"", "raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetCharSize (plot.me, cscale, err.me)", "plot, BLC, TRC on info member honored * lev = basic contour level", "text, err.me) # end PText def PRelText (plot, side, disp, coord, fjust, text,", "file \"png\" PNG file \"jpeg\" JPEG file \"gif\" GIF file \"null\" Null device", "3 \\* 4 open circle 5 x 6 open square 7 open triangle", "# In with the new Obit.OPlot_Set_me(self.this,value) return self.__dict__[name] = value def __getattr__(self,name): if", "TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetLineStyle(plot.me, lstyle, err.me) # end", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # scale = 1.0", "Notes: on text strings in PLPlot installations If the Obit installation uses PLPlot", "e.g. #ga will be a lower case alpha. - Subscripts: Characters between a", "superscripts \"\"\" # $Id$ #----------------------------------------------------------------------- # Copyright (C) 2006,2016,2019 # Associated Universities, Inc.", "Dependent variable * e = if nonNone, error in y * err =", "# end PGrayScale def PMarkCross (plot, image, ra, dec, err, size=5.0): \"\"\" Mark", "of vertical (X) or horizontal (Y) lines I Invert the tick marks; ie", "(\"Project\") major tick marks outside the box (ignored if option I is specified)", "of the GNU General Public License as # published by the Free Software", "color, err.me) # end PSetColor def PSetPage (plot, sub, err): \"\"\" Set or", "sqrt(2) * err = ObitErr error stack Optional parameters on plot InfoList: ======", "tick marks outside the box (ignored if option I is specified) T draw", "Obit.PlotXYOver (plot.me, symbol, n, x, y, err.me) # end PXYOver def PXYErr (plot,", "ObitPlot class from __future__ import absolute_import from __future__ import print_function import Obit, _Obit,", "Obit plot\") # Obit.PlotSetPage(plot.me, sub, err.me) # end PSetPage def PText (plot, x,", "to write outside. * coord = The location of the character string along", "Plot object * x = Plot x in world coordinates * y =", "e, err.me) # end PXYErr def PContour (plot, label, image, lev, cntfac, err):", "be less than YMIN) * just = if JUST=1, the scales of the", "a Python Obit plot\") # dx = math.cos(angle/57.296) dy = math.sin(angle/57.296) Obit.PlotText(plot.me, x,", "Plot\") n = len(y) # How many points? Obit.PlotXYErr (plot.me, symbol, n, x,", "the interval is chosen. * nxsub = The number of subintervals to divide", "(1-15), symbolic names: BLACK (notreally), RED(default), YELLOW, GREEN, AQUAMARINE, BLACK, WHEAT, GRAY, BROWN,", "Plot object * lwidth = Width of line (integer multiple of the default", "[def], the interval is chosen. NXSUB (long) the number of subintervals to divide", "the string will be placed at COORD; if JUST = 1.0, the right-hand", "\"\"\" Overplot X vs Y Overplot X vs Y using symbol. Plot should", "PMarkCross (plot, image, ra, dec, err, size=5.0): \"\"\" Mark positions on Contour plot", "page * plot = Python Plot object * sub = if <=0 advance", "option I is specified) T draw major Tick marks at the major coordinate", "dot dot * err = ObitErr error stack \"\"\" ################################################################ # Checks if", "MUST be a Python Obit Image\") n = len(ra) Obit.PlotMarkCross (plot.me, image.me, n,", "Display plot * plot = Python Plot object * xlabel = a label", "\"\"\" ################################################################ # Checks if not isinstance(plot, OPlot): return False return Obit.OPlotIsA(plot.me)!=0 #", "y,radius, err): \"\"\" Draw a circle. * plot = Python Plot object *", "Must include one of the characters 'B', 'L', 'T', or 'R' signifying the", "the top right corner of the viewport (note YMAX may be less than", "ionvert colors COLOR (string) Color scheme 'GRAY', 'CONTOUR', 'PHLAME' default 'GRAY' PIX_MAX (float)", "x, y, err): \"\"\" Overplot X vs Y Overplot X vs Y using", "interval is chosen. * nxsub = The number of subintervals to divide the", "name==\"List\": return PGetList(self) raise AttributeError(name) def __repr__(self): if not isinstance(self, OPlot): return \"Bogus", "PARTICULAR PURPOSE. See the # GNU General Public License for more details. #", "\",image.__class__) raise TypeError(\"image MUST be a Python Obit Image\") Obit.PlotContour (plot.me, label, image.me,", "center of the symbol * symbol = Symbol index to use for plotting.", "plotting routines (PXYPlot, PXYOver, or PXYErr) PGrayScale, or PContour. Then additional lines, curves,", "world coordinates * angle = Orientation of the text in deg, 0=horizontal *", "triangle 8 open star 9 filled triangle 10 filled square 11 filled circle", "Plot object * n = number of vertices * x = array of", "for vertical axis (default \"BCNTS\") See PDrawAxes for details. XTICK (float) world coordinate", "(float) like xtick for the Y axis. NYSUB (int) like nxsub for the", "all has been added to the plot, use PShow to finalize it. Notes:", "CYAN, TURQUOISE, MAGENTA, SALMON, WHITE * nx = Number of horizontal subpages *", "scale, err.me) # end PDrawPoly def PGetList (plot): \"\"\" Return the member InfoList", "creating and using the interface to a plot Image Members with python interfaces:", "tick marks; ie draw them outside the viewport instead of inside. L label", "# Obit.PlotSetLineStyle(plot.me, lstyle, err.me) # end PetLineStyle def PSetColor (plot, color, err): \"\"\"", "the entire plot (centered above the viewport) * err = ObitErr error stack", "x, y, fill, scale, err.me) # end PDrawPoly def PGetList (plot): \"\"\" Return", "= 9 BLUEVIOLET = 10 CYAN = 11 TURQUOISE = 12 MAGENTA =", "YELLOW = 2 GREEN = 3 AQUAMARINE = 4 BLACK = 5 WHEAT", "Postal address: <NAME> # National Radio Astronomy Observatory # 520 Edgemont Road #", "either PSetPlot, one of the XY plotting routines (PXYPlot, PXYOver, or PXYErr) PGrayScale,", "top right corner of the viewport (note YMAX may be less than YMIN)", "should be addressed as follows: # Internet email: <EMAIL>. # Postal address: <NAME>", "plotting area * plot = Python Plot object * xmin = the world", "of subintervals to divide the major coordinate interval into. If xtick=0.0 or nxsub=0,", "Image.PIsA(image): print(\"Actually \",image.__class__) raise TypeError(\"image MUST be a Python Obit Image\") Obit.PlotContour (plot.me,", "err): \"\"\" Display plot * plot = Python Plot object * err =", "(plot, symbol, x, y, err): \"\"\" Overplot X vs Y Overplot X vs", "# How many points? Obit.PlotXYOver (plot.me, symbol, n, x, y, err.me) # end", "PSetPlot def PLabel (plot, xlabel, ylabel, title, err): \"\"\" Display plot * plot", "'L', 'T', or 'R' signifying the Bottom, Left, Top, or Right margin of", "using either PSetPlot, one of the XY plotting routines (PXYPlot, PXYOver, or PXYErr)", "err = ObitErr error stack \"\"\" ################################################################ # Checks if not PIsA(plot): raise", "AQUAMARINE, BLACK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE *", "x, y, symbol, err): \"\"\" Draw a Symbol * plot = Python Plot", "filled triangle 10 filled square 11 filled circle 12 filled star == =================", "the x-axis (centered below the viewport). * ylabel = a label for the", "end PSetPage def PText (plot, x, y, angle, just, text, err): \"\"\" Write", "of the characters 'B', 'L', 'T', or 'R' signifying the Bottom, Left, Top,", "= InfoList.InfoList() out.me = Obit.PlotGetList(plot.me) return out # end PGetList def PIsA (plot):", "use abs value and connect points == ================= 0 line only 1 dot", "world y-coordinates of points * err = ObitErr error stack \"\"\" ################################################################ #", "Contours at lev times powers of cntfac Plot should be finalized and displayed", "useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of #", "Obit.PlotSetCharSize (plot.me, cscale, err.me) # end PSetCharSize def PSetLineWidth (plot, lwidth, err): \"\"\"", "!=0 then force X and Y axis scaling to be the same ======", "Image\") Obit.PlotGrayScale (plot.me, label, image.me, err.me) # end PGrayScale def PMarkCross (plot, image,", "13 SALMON = 14 WHITE = 15 def newOPlot(name, err, output=\"None\", bgcolor=BLACK, nx=1,", "xtick=0.0 [def], the interval is chosen. NXSUB (int) the number of subintervals to", "If xtick=0.0 [def], the interval is chosen. NXSUB (int) the number of subintervals", "1 and increases along rows and columns * err = ObitErr error stack", "world y-coordinate at the bottom left corner of the viewport. * ymax =", "TypeError(\"plot MUST be a Python Obit Plot\") if not Image.PIsA(image): print(\"Actually \",image.__class__) raise", "at the bottom left corner of the viewport. * ymax = the world", "RED(default), YELLOW, GREEN, AQUAMARINE, BLACK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA,", "even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "for horizontal axis (defaults to none) XOPT (string) Options for horizontal axis (default", "====== ======== ================================================== XMAX (float) maximum X value (defaults to actual value) XMIN", "string will be placed at (x,y); if JUST = 1.0, the right-hand end", "= plot * symbol = Symbol index to use for plotting. Values in", "plplot:vertical lines == =============== * err = ObitErr error stack \"\"\" ################################################################ #", "(plot, lwidth, err): \"\"\" Set line width * plot = Python Plot object", "class to ObitPlot class from __future__ import absolute_import from __future__ import print_function import", "= 7 BROWN = 8 BLUE = 9 BLUEVIOLET = 10 CYAN =", "text or symbols may be added. When all has been added to the", "object * x = Array of world x-coordinates of points * y =", "of cntfac Plot should be finalized and displayed with PShow * plot =", "= the world x-coordinate at the top right corner of the viewport (note", "label, image, err): \"\"\" Gray Scale plot of image Gray Scales plot of", "x, y, err): \"\"\" Simple XY Plot Plot X vs Y using symbol.", "SALMON, WHITE * nx = Number of horizontal subpages * ny = Number", "# # This program is distributed in the hope that it will be", "include one of the characters 'B', 'L', 'T', or 'R' signifying the Bottom,", "def PXYPlot (plot, symbol, x, y, err): \"\"\" Simple XY Plot Plot X", "\"\"\" Display plot * plot = Python Plot object * xlabel = a", "if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST be a Python Obit Plot\")", "to processing Member List ======== ======================================= \"\"\" def __init__(self, name): super(OPlot, self).__init__() Obit.CreateOPlot(self.this,", "plot of image Gray Scales plot of image Plot should be finalized and", "Obit.OPlotGetName(self.me) # Foreground Colors unBLACK = 0 RED = 1 YELLOW = 2", "by the Free Software Foundation; either version 2 of # the License, or", "Label for horizontal axis (defaults to none) XOPT (string) Options for horizontal axis", "* output = name and type of output device: ====== ========================== \"None\" interactive", "be finalized and displayed with PShow * plot = plot * label =", "ymax, just, axis, err.me) # end PSetPlot def PLabel (plot, xlabel, ylabel, title,", "(defaults to actual value) TITLE (string) Label for the plot (defaults to none),", "for plotting values in the range [1,12] are usable if negative, use abs", "Obit Plot\") n = len(y) # How many points? Obit.PlotXYPlot (plot.me, symbol, n,", "the character height. Use a negative value to write inside the viewport, a", "will be written as subscripts - Superscripts: Characters between a #u and #d", "world coordinates * y = Plot y in world coordinates * angle =", "Checks if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") #", "Python Plot object * cscale = new character size (integer multiple of the", "than parallel to it. * disp = The displacement of the character string", "not Image.PIsA(image): print(\"Actually \",image.__class__) raise TypeError(\"image MUST be a Python Obit Image\") n", "SSIZE (int) Scaling factor for symbols(default = 1) LWIDTH (int) Line width (default", "at the top left at 1 and increases along rows and columns *", "number is chosen. * yopt = string of options for Y (vertical) axis", "filled star == =============== * err = ObitErr error stack \"\"\" ################################################################ #", "for the Y axis CSIZE (int) Scaling factor for characters(default = 1) SSIZE", "to none), max 120 XLABEL (string) Label for horizontal axis (defaults to none)", "MAGENTA = 13 SALMON = 14 WHITE = 15 def newOPlot(name, err, output=\"None\",", "(C) 2006,2016,2019 # Associated Universities, Inc. Washington DC, USA. # # This program", "# # Correspondence concerning this software should be addressed as follows: # Internet", "interface to a plot Image Members with python interfaces: ======== ======================================= InfoList used", "x-coordinate of the new pen position. * y2 = world y-coordinate of the", "y-coordinates of the vertices * fill = Fill pattern, plot package dependent *", "to none) YOPT (string) Options for vertical axis (default \"BCNTS\") See PDrawAxes for", "* err = ObitErr error stack Optional parameters on plot InfoList: ======= ========", "in the unconventional location above the viewport (X) or to the right of", "use for plotting values in the range [1,12] are usable if negative, use", "=============================================== \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST", "to plot * ra = list of RAs (deg) * dec = list", "sub numbering starts at the top left at 1 and increases along rows", "for characters * plot = Python Plot object * cscale = new character", "the character string from the specified edge of the viewport, measured outwards from", "a circle. * plot = Python Plot object * x = World x-coordinate", "10 filled square 11 filled circle 12 filled star == =============== * err", "downwards 6 plplot:horizontal/vertical lines crossed 7 plplot:horizontal lines 8 plplot:vertical lines == ===============", "err): \"\"\" Simple XY Plot Plot X vs Y using symbol. Plot should", "names: BLACK (notreally), RED(default), YELLOW, GREEN, AQUAMARINE, BLACK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET,", "Obit.PlotXYErr (plot.me, symbol, n, x, y, e, err.me) # end PXYErr def PContour", "nxsub=0, the number is chosen. * yopt = string of options for Y", "at at (x,y). Other values between 0 and 1 give intermediate placing, but", "Inc., 675 Massachusetts Ave, Cambridge, # MA 02139, USA. # # Correspondence concerning", "of the text in deg, 0=horizontal * just = Controls justification of the", "radius = World coordinate radius * err = ObitErr error stack \"\"\" ################################################################", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetCharSize (plot.me,", "a copy of the GNU General Public # License along with this program;", "Internet email: <EMAIL>. # Postal address: <NAME> # National Radio Astronomy Observatory #", "TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawSymbol(plot.me, x, y, symbol, err.me)", "import math class OPlot(Obit.OPlot): \"\"\" Python Obit interface to display server This class", "world x-coordinate at the bottom left corner of the viewport. * xmax =", "= Python Plot object * n = number of vertices * x =", "12 filled star == ================= * x = Independent variable, if None use", "at the top right corner of the viewport (note YMAX may be less", "string along the specified edge of the viewport, as a fraction of the", "err.me) # end PXYPlot def PXYOver (plot, symbol, x, y, err): \"\"\" Overplot", "xmax = the world x-coordinate at the top right corner of the viewport", "* plot = Python Plot object * x1 = world x-coordinate of the", "Plot object * xmin = the world x-coordinate at the bottom left corner", "filled triangle 10 filled square 11 filled circle 12 filled star == ===============", "error in y * err = ObitErr error stack Optional parameters on plot", "lines, curves, text or symbols may be added. When all has been added", "plot InfoList ====== ===== ============================================ CSIZE (int) Scaling factor for characters(default = 1)", "coordinates; 10 draw box and label X-axis logarithmically; 20 draw box and label", "Obit.PlotRelText(plot.me, side, disp, coord, fjust, text, err.me) # end PRelText def PDrawLine (plot,", "alpha. - Subscripts: Characters between a #d and #u will be written as", "y-coordinate of the new pen position. * err = ObitErr error stack \"\"\"", "RED = 1 YELLOW = 2 GREEN = 3 AQUAMARINE = 4 BLACK", "be used in text strings: - Greek letters, A #g immediately prior to", "# out = InfoList.InfoList() out.me = Obit.PlotGetList(plot.me) return out # end PGetList def", "to the Free # Software Foundation, Inc., 675 Massachusetts Ave, Cambridge, # MA", "Subscripts: Characters between a #d and #u will be written as subscripts -", "disp, coord, fjust, text, err.me) # end PRelText def PDrawLine (plot, x1, y1,", "self).__init__() Obit.CreateOPlot(self.this, name) def __del__(self, DeleteOPlot=_Obit.DeleteOPlot): if _Obit!=None: DeleteOPlot(self.this) def __setattr__(self,name,value): if name", "divide the major coordinate interval into. If xtick=0.0 or nxsub=0, the number is", "a Python Obit plot\") # scale = 1.0 Obit.PlotDrawPoly(plot.me, len(x), x, y, fill,", "used, e.g. #ga will be a lower case alpha. - Subscripts: Characters between", "Python Obit Plot\") n = len(y) # How many points? Obit.PlotXYOver (plot.me, symbol,", "stack Optional parameters on plot InfoList: ====== ======== ================================================== XMAX (float) maximum X", "PGrayScale, or PContour. Then additional lines, curves, text or symbols may be added.", "of image Place cross at positions. Plot should be finalized and displayed with", "the world x-coordinate at the top right corner of the viewport (note XMAX", "Python Plot object * lstyle = Style of line (integer multiple of the", "on plot * plot = Python Plot object * x = Plot x", "* x = Plot x in world coordinates * y = Plot y", "X value (defaults to actual value) YMAX (float) maximum Y value (defaults to", "the symbol * y = world y-coordinate of the center of the symbol", "vertical axis (defaults to none) YOPT (string) Options for vertical axis (default \"BCNTS\")", "yopt = string of options for Y (vertical) axis of plot. Coding is", "# end PText def PRelText (plot, side, disp, coord, fjust, text, err): \"\"\"", "axis, err): \"\"\" Define plotting area * plot = Python Plot object *", "above the viewport (X) or to the right of the viewport (Y). P", "raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawCircle (plot.me, x, y,", "Optional parameters on plot InfoList: ====== ======= ================================================== XTICK (float) world coordinate interval", "for plotting. Values in the range [1,12] are usable. If negative, use abs", "Image.PIsA(image): print(\"Actually \",image.__class__) raise TypeError(\"image MUST be a Python Obit Image\") Obit.PlotGrayScale (plot.me,", "Y=0); 2 same as axis=1, but also draw grid lines at major increments", "plot = Python Plot object * x = Plot x in world coordinates", "(plot.me, symbol, n, x, y, err.me) # end PXYOver def PXYErr (plot, symbol,", "5 WHEAT = 6 GRAY = 7 BROWN = 8 BLUE = 9", "new pen position. * y1 = world y-coordinate of the new pen position.", "====== ======== =============================================== XMAX (float) maximum X value (defaults to actual value) XMIN", "If xtick=0.0 or nxsub=0, the number is chosen. [def 0] YTICK (float) like", "* y = Array of world y-coordinates of points * err = ObitErr", "xmin = the world x-coordinate at the bottom left corner of the viewport.", "value and connect points. == =============== 0 line only 1 dot 2 plus", "world y-coordinate of the new pen position. * x2 = world x-coordinate of", "Edgemont Road # Charlottesville, VA 22903-2475 USA #----------------------------------------------------------------------- # Python shadow class to", "is chosen. [def 0] YTICK (float) like xtick for the Y axis. NYSUB", "viewport. If FJUST = 0.0, the left-hand end of the string will be", "not, write to the Free # Software Foundation, Inc., 675 Massachusetts Ave, Cambridge,", "= Fill pattern, plot package dependent * values in the range [0,8] are", "= 0 RED = 1 YELLOW = 2 GREEN = 3 AQUAMARINE =", "such as PContour advance the page * plot = Python Plot object *", "self.__dict__[name] = value def __getattr__(self,name): if not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) if", "none) YOPT (string) Options for vertical axis (default \"BCNTS\") See PDrawAxes for details.", "parameters on plot InfoList: ====== ======== ================================================== XMAX (float) maximum X value (defaults", "honored * lev = basic contour level (def 0.1 peak) * cntfac =", "err = ObitErr error stack Optional parameters on plot InfoList: ====== ======= ==================================================", "len(x) Obit.PlotDrawCurve (plot.me, n, x, y, err.me) # end PDrawCurve def PDrawCircle (plot,", "err.me) # end DrawAxes def PSetCharSize (plot,cscale, err): \"\"\" Set scaling for characters", "y1, x2, y2, err): \"\"\" Draw a line. * plot = Python Plot", "x = Array of world x-coordinates of points * y = Array of", "General Public License as # published by the Free Software Foundation; either version", "has been added to the plot, use PShow to finalize it. Notes: on", "plot\") # Obit.PlotDrawLine(plot.me, x1, y1, x2, y2, err.me) # end PDrawLine def PDrawCurve", "name = name desired for object (labeling purposes) * err = Python Obit", "* just = if JUST=1, the scales of the x and y axes", "def PText (plot, x, y, angle, just, text, err): \"\"\" Write text on", "(x,y); if JUST = 1.0, the right-hand end of the string will be", "as # published by the Free Software Foundation; either version 2 of #", "as axis=1, but also draw grid lines at major increments of the coordinates;", "subintervals to divide the major coordinate interval into. If xtick=0.0 or nxsub=0, the", "the plotting region must be specified using either PSetPlot, one of the XY", "isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) return \"<C OPlot instance> \" + Obit.OPlotGetName(self.me) #", "def PSetLineWidth (plot, lwidth, err): \"\"\" Set line width * plot = Python", "width (default = 1) ====== ===== ============================================ \"\"\" ################################################################ # Checks if not", "the world x-coordinate at the bottom left corner of the viewport. * xmax", "y axes (in world coordinates per inch) will be equal, otherwise they will", "any order: = ====================================================================== A draw Axis (X axis is horizontal line Y=0,", "at lev times powers of cntfac Plot should be finalized and displayed with", "raise TypeError(\"image MUST be a Python Obit Image\") n = len(ra) Obit.PlotMarkCross (plot.me,", "includes 'LV' or 'RV', the string is written perpendicular to the frame rather", "ymax = the world y-coordinate at the top right corner of the viewport", "world coordinate interval between major tick marks on X axis. If xtick=0.0 [def],", "the viewport. If it includes 'LV' or 'RV', the string is written perpendicular", "right corner of the viewport (note YMAX may be less than YMIN) *", "\"\"\" Simple XY Plot Plot X vs Y using symbol. Plot should be", "installations If the Obit installation uses PLPlot for plotting the following can be", "Greek equivalent to be used, e.g. #ga will be a lower case alpha.", "outside. * coord = The location of the character string along the specified", "corner of the viewport. * xmax = the world x-coordinate at the top", "value) XMIN (float) minimum X value (defaults to actual value) YMAX (float) maximum", "box and label X-axis logarithmically; 20 draw box and label Y-axis logarithmically; 30", "member InfoList returns InfoList * plot = Python Obit Plot object \"\"\" ################################################################", "plot * ra = list of RAs (deg) * dec = list of", "string to be plotted. Trailing spaces are ignored when justifying the string, but", "n = len(x) Obit.PlotDrawCurve (plot.me, n, x, y, err.me) # end PDrawCurve def", "PContour advance the page * plot = Python Plot object * sub =", "NXSUB (int) the number of subintervals to divide the major coordinate interval into.", "label = Label for plot * image = ObitImage to plot, BLC, TRC", "number of subintervals to divide the major coordinate interval into. If xtick=0.0 or", "5 x 6 open square 7 open triangle 8 open star 9 filled", "major coordinate interval. S draw minor tick marks (Subticks). = ====================================================================== * xtick", "PDrawAxes for details. YLABEL (string) Label for vertical axis (defaults to none) YOPT", "(centered above the viewport) * err = ObitErr error stack \"\"\" ################################################################ #", "* y = Plot y in world coordinates * angle = Orientation of", "to test \"\"\" ################################################################ # Checks if not isinstance(plot, OPlot): return False return", "(plot, side, disp, coord, fjust, text, err): \"\"\" Write text on plot relative", "just, text, err.me) # end PText def PRelText (plot, side, disp, coord, fjust,", "and using the interface to a plot Image Members with python interfaces: ========", "left-hand end of the string will be placed at (x,y); if JUST =", "PDrawCircle (plot, x, y,radius, err): \"\"\" Draw a circle. * plot = Python", "the right of the viewport (Y). P extend (\"Project\") major tick marks outside", "ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR", "you can redistribute it and/or # modify it under the terms of the", "with PShow * plot = plot * label = Label for plot *", "marks outside the box (ignored if option I is specified) T draw major", "drawn vertically) * title = a label for the entire plot (centered above", "* side = Must include one of the characters 'B', 'L', 'T', or", "left corner of the viewport. * ymax = the world y-coordinate at the", "Obit.PlotDrawAxes(plot.me, xopt, xtick, nxsub, yopt, ytick, nysub, err.me) # end DrawAxes def PSetCharSize", "options for X (horizontal) axis of plot. Options are single letters, and may", "WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS", "PDrawCurve def PDrawCircle (plot, x, y,radius, err): \"\"\" Draw a circle. * plot", "dy = math.sin(angle/57.296) Obit.PlotText(plot.me, x, y, dx, dy, just, text, err.me) # end", "# # You should have received a copy of the GNU General Public", "same frame, use ObitPlotXYOver * plot = plot * symbol = Symbol index", "PSetLineStyle (plot, lstyle, err): \"\"\" Set line style * plot = Python Plot", "with PShow * plot = plot * image = ObitImage to plot *", "chosen. NXSUB (int) the number of subintervals to divide the major coordinate interval", "y, err.me) # end PDrawCurve def PDrawCircle (plot, x, y,radius, err): \"\"\" Draw", "output device: ====== ========================== \"None\" interactive prompt \"xwin\" X-Window (Xlib) \"gcw\" Gnome Canvas", "to ObitPlot class from __future__ import absolute_import from __future__ import print_function import Obit,", "* err = ObitErr error stack \"\"\" ################################################################ # Checks if not PIsA(plot):", "dot 2 plus 3 \\* 4 open circle 5 x 6 open square", "of the new pen position. * err = ObitErr error stack \"\"\" ################################################################", "plot = Python Plot object * sub = if <=0 advance page, if", "Y axis CSIZE (int) Scaling factor for characters(default = 1) SSIZE (int) Scaling", "* cscale = new character size (integer multiple of the default size). *", "Public License for more details. # # You should have received a copy", "X axis. If xtick=0.0 [def], the interval is chosen. NXSUB (int) the number", "the tick marks; ie draw them outside the viewport instead of inside. L", "object * sub = if <=0 advance page, if >0 set current subpage", "perpendicular to the frame rather than parallel to it. * disp = The", "=========================================== * err = ObitErr error stack \"\"\" ################################################################ # Checks if not", "a Polygon, possibly filled * plot = Python Plot object * n =", "TypeError(\"image MUST be a Python Obit Image\") Obit.PlotGrayScale (plot.me, label, image.me, err.me) #", "with the old if self.this!=None: Obit.OPlotUnref(Obit.OPlot_Get_me(self.this)) # In with the new Obit.OPlot_Set_me(self.this,value) return", "the right-hand end of the string will be placed at at COORD. Other", "the scales of the x and y axes (in world coordinates per inch)", "usable if negative, use abs value and connect points == ================= 0 line", "plot = Python Plot object * xmin = the world x-coordinate at the", "pixels Optional parameters on plot InfoList ====== ===== ============================================ CSIZE (int) Scaling factor", "Scaling factor for symbols(default = 1) LWIDTH (int) Line width (default = 1)", "viewport, a positive value to write outside. * coord = The location of", "labels in the unconventional location above the viewport (X) or to the right", "string will be placed at at COORD. Other values between 0 and 1", "may be less than YMIN) * just = if JUST=1, the scales of", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetPlot(plot.me,", "Colors unBLACK = 0 RED = 1 YELLOW = 2 GREEN = 3", "COORD. Other values between 0 and 1 give intermediate placing, but they are", "(float) world coordinate interval between major tick marks on X axis. If xtick=0.0", "end PText def PRelText (plot, side, disp, coord, fjust, text, err): \"\"\" Write", "of the new pen position. * y2 = world y-coordinate of the new", "(default = 1) ====== ======= ================================================== \"\"\" ################################################################ # Checks if not PIsA(plot):", "x2, y2, err): \"\"\" Draw a line. * plot = Python Plot object", "Obit.PlotSetLineWidth(plot.me, lwidth, err.me) # end PetLineWidth def PSetLineStyle (plot, lstyle, err): \"\"\" Set", "for creating and using the interface to a plot Image Members with python", "$Id$ #----------------------------------------------------------------------- # Copyright (C) 2006,2016,2019 # Associated Universities, Inc. Washington DC, USA.", "with the new Obit.OPlot_Set_me(self.this,value) return self.__dict__[name] = value def __getattr__(self,name): if not isinstance(self,", "\"null\" Null device ====== ========================== * bgcolor = background color index (1-15), symbolic", "plus 3 \\* 4 open circle 5 x 6 open square 7 open", "star == =============== * x = Independent variable, if None use index *", "as a fraction of the length of the edge. * just = Controls", "of the vertices * fill = Fill pattern, plot package dependent * values", "General Public # License along with this program; if not, write to the", "\"<C OPlot instance> \" + Obit.OPlotGetName(self.me) # Foreground Colors unBLACK = 0 RED", "marks; ie draw them outside the viewport instead of inside. L label axis", "= ====================================================================== A draw Axis (X axis is horizontal line Y=0, Y axis", "# Obit.PlotSetColor(plot.me, color, err.me) # end PSetColor def PSetPage (plot, sub, err): \"\"\"", "if name==\"List\": return PGetList(self) raise AttributeError(name) def __repr__(self): if not isinstance(self, OPlot): return", "be written as superscripts \"\"\" # $Id$ #----------------------------------------------------------------------- # Copyright (C) 2006,2016,2019 #", "use ObitPlotXYOver * plot = plot * symbol = Symbol index to use", "the plot (defaults to none), max 120 XLABEL (string) Label for horizontal axis", "print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST be a Python Obit Plot\") n = len(y)", "false * Plot = Python Obit Plot to test \"\"\" ################################################################ # Checks", "if name == \"me\" : return Obit.OPlot_Get_me(self.this) # Functions to return members if", "Washington DC, USA. # # This program is free software; you can redistribute", "x, y, err.me) # end PXYOver def PXYErr (plot, symbol, x, y, e,", "angle, just, text, err): \"\"\" Write text on plot * plot = Python", "(float) maximum Y value (defaults to actual value) YMIN (float) minimum Y value", "connect points. == =============== 0 line only 1 dot 2 plus 3 \\*", "1) LWIDTH (int) Line width (default = 1) JUST (int) If !=0 then", "viewport, drawn vertically) * title = a label for the entire plot (centered", "Software Foundation; either version 2 of # the License, or (at your option)", "* ymax = the world y-coordinate at the top right corner of the", "just = if JUST=1, the scales of the x and y axes (in", "err.me) # end PDrawCircle def PDrawSymbol (plot, x, y, symbol, err): \"\"\" Draw", "Top, or Right margin of the viewport. If it includes 'LV' or 'RV',", "output and background color. If no output is specified this information will be", "plot Image Members with python interfaces: ======== ======================================= InfoList used to pass instructions", "page Note: some functions such as PContour advance the page * plot =", "text strings: - Greek letters, A #g immediately prior to a Latin character", "\"\"\" Obit Plotting class Create a plot object using newOPlot which allows specifying", "* plot = Python Plot object * x = Plot x in world", "the viewport in units of the character height. Use a negative value to", "YOPT (string) Options for vertical axis (default \"BCNTS\") See PDrawAxes for details. XTICK", "type of output device: ====== ========================== \"None\" interactive prompt \"xwin\" X-Window (Xlib) \"gcw\"", "err): \"\"\" Contour plot of image Contours at lev times powers of cntfac", "= 1.0 Obit.PlotDrawPoly(plot.me, len(x), x, y, fill, scale, err.me) # end PDrawPoly def", "Contour plot of image Place cross at positions. Plot should be finalized and", "GIF file \"null\" Null device ====== ========================== * bgcolor = background color index", "Optional parameters on plot InfoList: ====== ======== ================================================== XMAX (float) maximum X value", "(plot, x, y, symbol, err): \"\"\" Draw a Symbol * plot = Python", "lines == =============== * err = ObitErr error stack \"\"\" ################################################################ # Checks", "symbols may be added. When all has been added to the plot, use", "TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotFinishPlot(plot.me, err.me) # end PShow", "label for the entire plot (centered above the viewport) * err = ObitErr", "object * lwidth = Width of line (integer multiple of the default size).", "of plot. Options are single letters, and may be in any order: =", "PText def PRelText (plot, side, disp, coord, fjust, text, err): \"\"\" Write text", "GRAY = 7 BROWN = 8 BLUE = 9 BLUEVIOLET = 10 CYAN", "If present and true plot sqrt (pixel_value) INVERT (bool) If present and true", "value (defaults to actual value) XMIN (float) minimum X value (defaults to actual", "2 same as axis=1, but also draw grid lines at major increments of", "left at 1 and increases along rows and columns * err = ObitErr", "1) SSIZE (int) Scaling factor for symbols(default = 1) LWIDTH (int) Line width", "also draw grid lines at major increments of the coordinates; 10 draw box", "== =========================================== * err = ObitErr error stack \"\"\" ################################################################ # Checks if", "xlabel, ylabel, title, err.me) # end PLabel def PDrawAxes(plot, xopt, xtick, nxsub, yopt,", "labels in the conventional location below the viewport (X) or to the left", "return out # end newOPlot def PXYPlot (plot, symbol, x, y, err): \"\"\"", "line only 1 dot 2 plus 3 \\* 4 open circle 5 x", "OPlot): return \"Bogus Dude\"+str(self.__class__) if name == \"me\" : return Obit.OPlot_Get_me(self.this) # Functions", "the y-axis (centered to the left of the viewport, drawn vertically) * title", "TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetLineWidth(plot.me, lwidth, err.me) # end", "PSetCharSize def PSetLineWidth (plot, lwidth, err): \"\"\" Set line width * plot =", "Obit.PlotGrayScale (plot.me, label, image.me, err.me) # end PGrayScale def PMarkCross (plot, image, ra,", "'PHLAME' default 'GRAY' PIX_MAX (float) maximum pixel value [def min in image] PIX_MIN", "foreground color * plot = Python Plot object * color = color index", "Python Plot object * n = number of vertices * x = array", "def PSetColor (plot, color, err): \"\"\" Set foreground color * plot = Python", "not very useful. * text = The text string to be plotted. Trailing", "Python Plot object * x = world x-coordinate of the center of the", "or labels; -1 draw box only; 0 draw box and label it with", "the coordinates; 10 draw box and label X-axis logarithmically; 20 draw box and", "the viewport, a positive value to write outside. * coord = The location", "Y axis CSIZE (int) Scaling factor for characters(default = 1) LWIDTH (int) Line", "\"BCNTS\") See PDrawAxes for details. YLABEL (string) Label for vertical axis (defaults to", "= list of RAs (deg) * dec = list of Declinations (deg) *", "same as axis=0, but also draw the coordinate axes (X=0, Y=0); 2 same", "email: <EMAIL>. # Postal address: <NAME> # National Radio Astronomy Observatory # 520", "USA. # # This program is free software; you can redistribute it and/or", "free software; you can redistribute it and/or # modify it under the terms", "of output device: ====== ========================== \"None\" interactive prompt \"xwin\" X-Window (Xlib) \"gcw\" Gnome", "# How many points? Obit.PlotXYErr (plot.me, symbol, n, x, y, e, err.me) #", "ObitTalk) \"ps\" PostScript File (monochrome) \"psc\" PostScript File (color) \"xfig\" Fig file \"png\"", "VA 22903-2475 USA #----------------------------------------------------------------------- # Python shadow class to ObitPlot class from __future__", "scaled independently. * axis = controls the plotting of axes, tick marks, etc:", "dependent * values in the range [0,8] are usable == =============== 0 no", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # dx", "width (default = 1) ====== ======= ================================================== \"\"\" ################################################################ # Checks if not", "plot * plot = Python Plot object * xlabel = a label for", "plplot:horizontal/vertical lines crossed 7 plplot:horizontal lines 8 plplot:vertical lines == =============== * err", "= Obit.PlotGetList(plot.me) return out # end PGetList def PIsA (plot): \"\"\" Tells if", "device: ====== ========================== \"None\" interactive prompt \"xwin\" X-Window (Xlib) \"gcw\" Gnome Canvas Widget", "axes (X=0, Y=0); 2 same as axis=1, but also draw grid lines at", "end PSetPlot def PLabel (plot, xlabel, ylabel, title, err): \"\"\" Display plot *", "# Obit.PlotSetPlot(plot.me, xmin, xmax, ymin, ymax, just, axis, err.me) # end PSetPlot def", "world y-coordinate of the new pen position. * err = ObitErr error stack", "to the left of the viewport, drawn vertically) * title = a label", "label for the y-axis (centered to the left of the viewport, drawn vertically)", "A draw Axis (X axis is horizontal line Y=0, Y axis is vertical", "xmax, ymin, ymax, just, axis, err): \"\"\" Define plotting area * plot =", "are usable. If negative, use abs value and connect points. == =============== 0", "if option I is specified) T draw major Tick marks at the major", "parameters on plot InfoList: ====== ======= ================================================== XTICK (float) world coordinate interval between", "Obit.PlotSetPage(plot.me, sub, err.me) # end PSetPage def PText (plot, x, y, angle, just,", "stack Optional parameters on plot InfoList: ======= ======== ================================================= XTICK (float) world coordinate", "WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A", "# Obit.PlotDrawSymbol(plot.me, x, y, symbol, err.me) # end PDrawSymbol def PDrawPoly (plot, x,", "that it will be useful, # but WITHOUT ANY WARRANTY; without even the", "See PDrawAxes for details. YLABEL (string) Label for vertical axis (defaults to none)", "(plot.me, image.me, n, ra, dec, size, err.me) # end PMarkCross def PShow (plot,", "xlabel = a label for the x-axis (centered below the viewport). * ylabel", "dashed, 3=dot dash, 4 = dotted, 5 = dash dot dot dot *", "= Plot y in world coordinates * angle = Orientation of the text", "Python Obit plot\") # Obit.PlotDrawSymbol(plot.me, x, y, symbol, err.me) # end PDrawSymbol def", "= 6 GRAY = 7 BROWN = 8 BLUE = 9 BLUEVIOLET =", "COORD; if JUST = 1.0, the right-hand end of the string will be", "Obit.OPlot_Get_me(self.this) # Functions to return members if name==\"List\": return PGetList(self) raise AttributeError(name) def", "usable == =============== 0 no fill 1 hatched 2 crosshatched 3 plplot:lines 45", "if not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) return \"<C OPlot instance> \" +", "string parallel to the specified edge of the viewport. If * just =", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawSymbol(plot.me, x, y,", "Obit.PlotFinishPlot(plot.me, err.me) # end PShow def PSetPlot (plot, xmin, xmax, ymin, ymax, just,", "InfoList used to pass instructions to processing Member List ======== ======================================= \"\"\" def", "TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawLine(plot.me, x1, y1, x2, y2,", "X=0). B draw bottom (X) or left (Y) edge of frame. C draw", "1 dot 2 plus 3 \\* 4 open circle 5 x 6 open", "for characters(default = 1) LWIDTH (int) Line width (default = 1) ====== =======", "Obit, _Obit, InfoList, Image import math class OPlot(Obit.OPlot): \"\"\" Python Obit interface to", "of inside. L label axis Logarithmically N write Numeric labels in the conventional", "y, angle, just, text, err): \"\"\" Write text on plot * plot =", "Plot object * x1 = world x-coordinate of the new pen position. *", "y = world y-coordinate of the center of the symbol * symbol =", "implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "error stack Optional parameters on plot InfoList: ====== ======= ================================================== XTICK (float) world", "be a Python Obit plot\") # Obit.PlotDrawLine(plot.me, x1, y1, x2, y2, err.me) #", "err = ObitErr error stack \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually", "will be equal, otherwise they will be scaled independently. * axis = controls", "Associated Universities, Inc. Washington DC, USA. # # This program is free software;", "be a Python Obit plot\") # Obit.PlotFinishPlot(plot.me, err.me) # end PShow def PSetPlot", "Public # License along with this program; if not, write to the Free", "# end DrawAxes def PSetCharSize (plot,cscale, err): \"\"\" Set scaling for characters *", "TypeError(\"image MUST be a Python Obit Image\") n = len(ra) Obit.PlotMarkCross (plot.me, image.me,", "GREEN, AQUAMARINE, PINK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE", "on X axis. If xtick=0.0 [def], the interval is chosen. NXSUB (int) the", "plot object using newOPlot which allows specifying the output and background color. If", "cntfac = factor for spacing between contours (def sqrt(2) * err = ObitErr", "size (integer multiple of the default size). * err = ObitErr error stack", "the hope that it will be useful, # but WITHOUT ANY WARRANTY; without", "InfoList: ====== ======= ================================================== XTICK (float) world coordinate interval between major tick marks", "error stack Optional parameters on plot InfoList: ====== ======== ================================================== XMAX (float) maximum", "to a Latin character will cause the Greek equivalent to be used, e.g.", "no fill 1 hatched 2 crosshatched 3 plplot:lines 45 deg downwards 4 plplot:lines", "(X) or to the right of the viewport (Y). P extend (\"Project\") major", "PGrayScale def PMarkCross (plot, image, ra, dec, err, size=5.0): \"\"\" Mark positions on", "the box (ignored if option I is specified) T draw major Tick marks", "object * n = number of vertices * x = array of world", "coordinate interval into. If xtick=0.0 or nxsub=0, the number is chosen. * yopt", "object * cscale = new character size (integer multiple of the default size).", "Scaling factor for characters(default = 1) SSIZE (int) Scaling factor for symbols(default =", "(int) Line width (default = 1) ====== ===== ============================================ \"\"\" ################################################################ # Checks", "================================================== XMAX (float) maximum X value (defaults to actual value) XMIN (float) minimum", "Y axis scaling to be the same ====== ======== ================================================== \"\"\" ################################################################ #", "one of the characters 'B', 'L', 'T', or 'R' signifying the Bottom, Left,", "* dec = list of Declinations (deg) * err = ObitErr error stack", "Functions to return members if name==\"List\": return PGetList(self) raise AttributeError(name) def __repr__(self): if", "if negative, use abs value and connect points == ================= 0 line only", "Python Plot object * side = Must include one of the characters 'B',", "Public License as # published by the Free Software Foundation; either version 2", "index (1-15), symbolic names: BLACK, RED(default), YELLOW, GREEN, AQUAMARINE, PINK, WHEAT, GRAY, BROWN,", "end PGrayScale def PMarkCross (plot, image, ra, dec, err, size=5.0): \"\"\" Mark positions", "Python Plot object * lwidth = Width of line (integer multiple of the", "the new pen position. * x2 = world x-coordinate of the new pen", "Astronomy Observatory # 520 Edgemont Road # Charlottesville, VA 22903-2475 USA #----------------------------------------------------------------------- #", "=============== * x = Independent variable, if None use index * y =", "PIX_MIN (float) minimum pixel value [def max in image] ======= ======== ================================================= \"\"\"", "the default size). * err = ObitErr error stack \"\"\" ################################################################ # Checks", "* plot = Python Plot object * color = color index (1-15), symbolic", "MUST be a Python Obit plot\") # Obit.PlotDrawLine(plot.me, x1, y1, x2, y2, err.me)", "MUST be a Python Obit plot\") # out = InfoList.InfoList() out.me = Obit.PlotGetList(plot.me)", "be prompted. Next, the plotting region must be specified using either PSetPlot, one", "===== ============================================ CSIZE (int) Scaling factor for characters(default = 1) LWIDTH (int) Line", "received a copy of the GNU General Public # License along with this", "scheme 'GRAY', 'CONTOUR', 'PHLAME' default 'GRAY' PIX_MAX (float) maximum pixel value [def min", "MUST be a Python Obit plot\") # Obit.PlotDrawAxes(plot.me, xopt, xtick, nxsub, yopt, ytick,", "be a Python Obit plot\") # Obit.PlotSetColor(plot.me, color, err.me) # end PSetColor def", "= OPlot(name) Obit.PlotInitPlot(out.me, output, bgcolor, nx, ny, err.me) return out # end newOPlot", "= Python Plot object * lstyle = Style of line (integer multiple of", "9 filled triangle 10 filled square 11 filled circle 12 filled star ==", "line style * plot = Python Plot object * lstyle = Style of", "line. * plot = Python Plot object * x1 = world x-coordinate of", "(Xlib) \"gcw\" Gnome Canvas Widget (interacts with ObitTalk) \"ps\" PostScript File (monochrome) \"psc\"", "A PARTICULAR PURPOSE. See the # GNU General Public License for more details.", "= Independent variable, if None use index * y = Dependent variable *", "(string) Options for horizontal axis (default \"BCNTS\") See PDrawAxes for details. YLABEL (string)", "n, x, y, err.me) # end PXYOver def PXYErr (plot, symbol, x, y,", "Obit.OPlotUnref(Obit.OPlot_Get_me(self.this)) # In with the new Obit.OPlot_Set_me(self.this,value) return self.__dict__[name] = value def __getattr__(self,name):", "the string parallel to the specified edge of the viewport. If FJUST =", "display server This class is for creating and using the interface to a", "RAs (deg) * dec = list of Declinations (deg) * err = ObitErr", "usable. If negative, use abs value and connect points. == =============== 0 line", "interval is chosen. NXSUB (int) the number of subintervals to divide the major", "3 AQUAMARINE = 4 BLACK = 5 WHEAT = 6 GRAY = 7", "= plot * image = ObitImage to plot * ra = list of", "= Python Plot object * lwidth = Width of line (integer multiple of", "plot * symbol = Symbol index to use for plotting values in the", "def PDrawAxes(plot, xopt, xtick, nxsub, yopt, ytick, nysub, err): \"\"\" Draw axes for", "symbol, n, x, y, e, err.me) # end PXYErr def PContour (plot, label,", "to sub numbering starts at the top left at 1 and increases along", "of frame. C draw top (X) or right (Y) edge of frame. G", "= a label for the entire plot (centered above the viewport) * err", "and displayed with PShow * plot = plot * symbol = Symbol index", "using newOPlot which allows specifying the output and background color. If no output", "# end PetLineWidth def PSetLineStyle (plot, lstyle, err): \"\"\" Set line style *", "many points? Obit.PlotXYErr (plot.me, symbol, n, x, y, e, err.me) # end PXYErr", "position. * x2 = world x-coordinate of the new pen position. * y2", "TITLE (string) Label for the plot (defaults to none), max 120 XLABEL (string)", "a curve. * plot = Python Plot object * x = Array of", "coordinates; 1 same as axis=0, but also draw the coordinate axes (X=0, Y=0);", "option) any later version. # # This program is distributed in the hope", "y = Array of world y-coordinates of points * err = ObitErr error", "and displayed with PShow * plot = plot * image = ObitImage to", "(float) maximum pixel value [def min in image] PIX_MIN (float) minimum pixel value", "MUST be a Python Obit plot\") # scale = 1.0 Obit.PlotDrawPoly(plot.me, len(x), x,", "value (defaults to actual value) YMIN (float) minimum Y value (defaults to actual", "factor for characters(default = 1) SQRT (bool) If present and true plot sqrt", "(plot, label, image, err): \"\"\" Gray Scale plot of image Gray Scales plot", "0=horizontal * just = Controls justification of the string parallel to the specified", "Plot\") n = len(y) # How many points? Obit.PlotXYOver (plot.me, symbol, n, x,", "= ObitErr error stack Optional parameters on plot InfoList: ======= ======== ================================================= XTICK", "of cross in pixels Optional parameters on plot InfoList ====== ===== ============================================ CSIZE", "if >0 set current subpage to sub numbering starts at the top left", "been added to the plot, use PShow to finalize it. Notes: on text", "to actual value) TITLE (string) Label for the plot (defaults to none), max", "# end PXYOver def PXYErr (plot, symbol, x, y, e, err): \"\"\" Simple", "the number is chosen. * yopt = string of options for Y (vertical)", "How many points? Obit.PlotXYPlot (plot.me, symbol, n, x, y, err.me) # end PXYPlot", "bottom (X) or left (Y) edge of frame. C draw top (X) or", "only; 0 draw box and label it with coordinates; 1 same as axis=0,", "box and label both axes logarithmically. == =========================================== * err = ObitErr error", "force X and Y axis scaling to be the same ====== ======== ===============================================", "units of the character height. Use a negative value to write inside the", "plot\") # Obit.PlotSetPlot(plot.me, xmin, xmax, ymin, ymax, just, axis, err.me) # end PSetPlot", "* just = 0.0, the left-hand end of the string will be placed", "X-axis logarithmically; 20 draw box and label Y-axis logarithmically; 30 draw box and", "list of Declinations (deg) * err = ObitErr error stack * size =", "center of the symbol * y = world y-coordinate of the center of", "fraction of the length of the edge. * just = Controls justification of", "can be used in text strings: - Greek letters, A #g immediately prior", "viewport, as a fraction of the length of the edge. * just =", "circle 12 filled star == =============== * err = ObitErr error stack \"\"\"", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetLineWidth(plot.me, lwidth, err.me)", "Python Plot object * x = Plot x in world coordinates * y", "and label X-axis logarithmically; 20 draw box and label Y-axis logarithmically; 30 draw", "If no output is specified this information will be prompted. Next, the plotting", "less than XMIN). * ymin = the world y-coordinate at the bottom left", "into. If xtick=0.0 or nxsub=0, the number is chosen. [def 0] YTICK (float)", "downwards 4 plplot:lines 30 deg upwards 5 plplot:lines 30 deg downwards 6 plplot:horizontal/vertical", "like xtick for the Y axis. * nysub = like nxsub for the", "coord, fjust, text, err): \"\"\" Write text on plot relative to port *", "0 RED = 1 YELLOW = 2 GREEN = 3 AQUAMARINE = 4", "of the default size). * err = ObitErr error stack \"\"\" ################################################################ #", "PXYOver (plot, symbol, x, y, err): \"\"\" Overplot X vs Y Overplot X", "placed at COORD; if JUST = 0.5, the center of the string will", "Python Obit plot\") # Obit.PlotDrawCircle (plot.me, x, y, radius, err.me) # end PDrawCircle", "edge of frame. C draw top (X) or right (Y) edge of frame.", "======== ================================================= XTICK (float) world coordinate interval between major tick marks on X", "plot\") # n = len(x) Obit.PlotDrawCurve (plot.me, n, x, y, err.me) # end", "the Free Software Foundation; either version 2 of # the License, or (at", "or PXYErr) PGrayScale, or PContour. Then additional lines, curves, text or symbols may", "MUST be a Python Obit plot\") # Obit.PlotSetColor(plot.me, color, err.me) # end PSetColor", "= like xtick for the Y axis. * nysub = like nxsub for", "the Obit installation uses PLPlot for plotting the following can be used in", "set current subpage to sub numbering starts at the top left at 1", "information will be prompted. Next, the plotting region must be specified using either", "= color index (1-15), symbolic names: BLACK (notreally), RED(default), YELLOW, GREEN, AQUAMARINE, BLACK,", "with coordinates; 1 same as axis=0, but also draw the coordinate axes (X=0,", "lines 8 plplot:vertical lines == =============== * err = ObitErr error stack \"\"\"", "coordinates * y = Plot y in world coordinates * angle = Orientation", "test \"\"\" ################################################################ # Checks if not isinstance(plot, OPlot): return False return Obit.OPlotIsA(plot.me)!=0", "value def __getattr__(self,name): if not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) if name ==", "the new pen position. * y2 = world y-coordinate of the new pen", "PContour (plot, label, image, lev, cntfac, err): \"\"\" Contour plot of image Contours", "ObitErr error stack \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise", "'B', 'L', 'T', or 'R' signifying the Bottom, Left, Top, or Right margin", "equivalent to be used, e.g. #ga will be a lower case alpha. -", "* y = World y-coordinate of center * radius = World coordinate radius", "world x-coordinate of the new pen position. * y1 = world y-coordinate of", "horizontal line Y=0, Y axis is vertical line X=0). B draw bottom (X)", "= 0.5, the center of the string will be placed at COORD; if", "err, size=5.0): \"\"\" Mark positions on Contour plot of image Place cross at", "the viewport, as a fraction of the length of the edge. * just", "err = ObitErr error stack Optional parameters on plot InfoList: ====== ======== ==================================================", "If xtick=0.0, the interval is chosen. * nxsub = The number of subintervals", "n = number of vertices * x = array of world x-coordinates of", "maximum X value (defaults to actual value) XMIN (float) minimum X value (defaults", "= The displacement of the character string from the specified edge of the", "* fill = Fill pattern, plot package dependent * values in the range", "* symbol = Symbol index to use for plotting values in the range", "n, x, y, err.me) # end PDrawCurve def PDrawCircle (plot, x, y,radius, err):", "err): \"\"\" Set or advance sub page Note: some functions such as PContour", "= world x-coordinate of the center of the symbol * y = world", "======================================= \"\"\" def __init__(self, name): super(OPlot, self).__init__() Obit.CreateOPlot(self.this, name) def __del__(self, DeleteOPlot=_Obit.DeleteOPlot): if", "for vertical axis (defaults to none) YOPT (string) Options for vertical axis (default", "is chosen. NXSUB (int) the number of subintervals to divide the major coordinate", "import absolute_import from __future__ import print_function import Obit, _Obit, InfoList, Image import math", "stack Optional parameters on plot InfoList ====== ======== =============================================== XMAX (float) maximum X", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetLineWidth(plot.me, lwidth,", "of the default size). 1 = continious, 2 = dashed, 3=dot dash, 4", "y, err): \"\"\" Draw a curve. * plot = Python Plot object *", "Then additional lines, curves, text or symbols may be added. When all has", "Python Obit Image\") n = len(ra) Obit.PlotMarkCross (plot.me, image.me, n, ra, dec, size,", "box and label it with coordinates; 1 same as axis=0, but also draw", "ObitErr error stack Optional parameters on plot InfoList: ====== ======= ================================================== XTICK (float)", "(plot.me, n, x, y, err.me) # end PDrawCurve def PDrawCircle (plot, x, y,radius,", "of the viewport. If FJUST = 0.0, the left-hand end of the string", "Tick marks at the major coordinate interval. S draw minor tick marks (Subticks).", "err.me) # end PXYErr def PContour (plot, label, image, lev, cntfac, err): \"\"\"", "= ObitImage to plot, BLC, TRC on info member honored * lev =", "= Dependent variable * e = if nonNone, error in y * err", "4 = dotted, 5 = dash dot dot dot * err = ObitErr", "= 12 MAGENTA = 13 SALMON = 14 WHITE = 15 def newOPlot(name,", "dash, 4 = dotted, 5 = dash dot dot dot * err =", "* nxsub = The number of subintervals to divide the major coordinate interval", "using symbol. Plot should be finalized and displayed with PShow * plot =", "y, radius, err.me) # end PDrawCircle def PDrawSymbol (plot, x, y, symbol, err):", "(default \"BCNTS\") See PDrawAxes for details. YLABEL (string) Label for vertical axis (defaults", "(pixel_value) INVERT (bool) If present and true ionvert colors COLOR (string) Color scheme", "ObitPlot returns true Or false * Plot = Python Obit Plot to test", "horizontal axis (default \"BCNTS\") See PDrawAxes for details. YLABEL (string) Label for vertical", "points? Obit.PlotXYPlot (plot.me, symbol, n, x, y, err.me) # end PXYPlot def PXYOver", "ra, dec, err, size=5.0): \"\"\" Mark positions on Contour plot of image Place", "* plot = Python Plot object * xopt = string of options for", "from the viewport in units of the character height. Use a negative value", "Draw a line. * plot = Python Plot object * x1 = world", "ymax, just, axis, err): \"\"\" Define plotting area * plot = Python Plot", "return members if name==\"List\": return PGetList(self) raise AttributeError(name) def __repr__(self): if not isinstance(self,", "vs Y using symbol. Plot should be finalized and displayed with PShow This", "PINK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE * nx", "minimum Y value (defaults to actual value) TITLE (string) Label for the plot", "plotting region must be specified using either PSetPlot, one of the XY plotting", "def PSetCharSize (plot,cscale, err): \"\"\" Set scaling for characters * plot = Python", "end PetLineWidth def PSetLineStyle (plot, lstyle, err): \"\"\" Set line style * plot", "placing, but they are not very useful. * text = The text string", "err): \"\"\" Simple XY Plot with error bars Plot X vs Y using", "'T', or 'R' signifying the Bottom, Left, Top, or Right margin of the", "# modify it under the terms of the GNU General Public License as", "PDrawSymbol def PDrawPoly (plot, x, y, fill, err): \"\"\" Draw a Polygon, possibly", "plot = Python Plot object * x = world x-coordinate of the center", "XLABEL (string) Label for horizontal axis (defaults to none) XOPT (string) Options for", "GREEN = 3 AQUAMARINE = 4 BLACK = 5 WHEAT = 6 GRAY", "* x = world x-coordinate of the center of the symbol * y", "WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE * err =", "(string) Color scheme 'GRAY', 'CONTOUR', 'PHLAME' default 'GRAY' PIX_MAX (float) maximum pixel value", "General Public License for more details. # # You should have received a", "one of the XY plotting routines (PXYPlot, PXYOver, or PXYErr) PGrayScale, or PContour.", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetLineStyle(plot.me, lstyle,", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetPage(plot.me,", "processing Member List ======== ======================================= \"\"\" def __init__(self, name): super(OPlot, self).__init__() Obit.CreateOPlot(self.this, name)", "Python Obit interface to display server This class is for creating and using", "details. YLABEL (string) Label for vertical axis (defaults to none) YOPT (string) Options", "will be placed at COORD; if JUST = 1.0, the right-hand end of", "the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See", "a Python Obit plot\") # Obit.PlotSetPage(plot.me, sub, err.me) # end PSetPage def PText", "List ======== ======================================= \"\"\" def __init__(self, name): super(OPlot, self).__init__() Obit.CreateOPlot(self.this, name) def __del__(self,", "at (x,y); if JUST = 0.5, the center of the string will be", "x, y, e, err): \"\"\" Simple XY Plot with error bars Plot X", "to divide the major coordinate interval into. If xtick=0.0 or nxsub=0, the number", "are not very useful. * text = The text string to be plotted.", "= 1) LWIDTH (int) Line width (default = 1) ====== ======= ================================================== \"\"\"", "parameters on plot InfoList ====== ======== =============================================== XMAX (float) maximum X value (defaults", "the center of the string will be placed at COORD; if JUST =", "plot\") # Obit.PlotDrawCircle (plot.me, x, y, radius, err.me) # end PDrawCircle def PDrawSymbol", "in the range [0,8] are usable == =============== 0 no fill 1 hatched", "symbols(default = 1) LWIDTH (int) Line width (default = 1) JUST (int) If", "PGetList (plot): \"\"\" Return the member InfoList returns InfoList * plot = Python", "National Radio Astronomy Observatory # 520 Edgemont Road # Charlottesville, VA 22903-2475 USA", "out = InfoList.InfoList() out.me = Obit.PlotGetList(plot.me) return out # end PGetList def PIsA", "circle 5 x 6 open square 7 open triangle 8 open star 9", "text in deg, 0=horizontal * just = Controls justification of the string parallel", "axis of plot. Coding is the same as for xopt. * ytick =", "OPlot(Obit.OPlot): \"\"\" Python Obit interface to display server This class is for creating", "a Python Obit Image\") n = len(ra) Obit.PlotMarkCross (plot.me, image.me, n, ra, dec,", "n, x, y, err.me) # end PXYPlot def PXYOver (plot, symbol, x, y,", "GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE * nx = Number", "y-coordinate of the new pen position. * x2 = world x-coordinate of the", "axes logarithmically. == =========================================== * err = ObitErr error stack \"\"\" ################################################################ #", "a Latin character will cause the Greek equivalent to be used, e.g. #ga", "marks (Subticks). = ====================================================================== * xtick = World coordinate interval between major tick", "y, fill, err): \"\"\" Draw a Polygon, possibly filled * plot = Python", "====== ======== =============================================== \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise", "of the viewport (note XMAX may be less than XMIN). * ymin =", "(color) \"xfig\" Fig file \"png\" PNG file \"jpeg\" JPEG file \"gif\" GIF file", "from the specified edge of the viewport, measured outwards from the viewport in", "Gray Scale plot of image Gray Scales plot of image Plot should be", "the specified edge of the viewport, measured outwards from the viewport in units", "object * x = World x-coordinate of center * y = World y-coordinate", "of the center of the symbol * symbol = Symbol index to use", "of the string will be placed at at (x,y). Other values between 0", "================= * x = Independent variable, if None use index * y =", "options for Y (vertical) axis of plot. Coding is the same as for", "raise TypeError(\"image MUST be a Python Obit Image\") Obit.PlotGrayScale (plot.me, label, image.me, err.me)", "= the world x-coordinate at the bottom left corner of the viewport. *", "* x2 = world x-coordinate of the new pen position. * y2 =", "8 BLUE = 9 BLUEVIOLET = 10 CYAN = 11 TURQUOISE = 12", "write numeric labels in the unconventional location above the viewport (X) or to", "15 def newOPlot(name, err, output=\"None\", bgcolor=BLACK, nx=1, ny=1 ): \"\"\" Create and initialize", "or (at your option) any later version. # # This program is distributed", "raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotRelText(plot.me, side, disp, coord,", "a plot, label * plot = Python Plot object * xopt = string", "and label both axes logarithmically. == =========================================== * err = ObitErr error stack", "err): \"\"\" Write text on plot * plot = Python Plot object *", "(int) Scaling factor for characters(default = 1) SSIZE (int) Scaling factor for symbols(default", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawSymbol(plot.me,", "The text string to be plotted. Trailing spaces are ignored when justifying the", "upwards 5 plplot:lines 30 deg downwards 6 plplot:horizontal/vertical lines crossed 7 plplot:horizontal lines", "for characters(default = 1) SQRT (bool) If present and true plot sqrt (pixel_value)", "raise TypeError(\"plot MUST be a Python Obit plot\") # n = len(x) Obit.PlotDrawCurve", "more details. # # You should have received a copy of the GNU", "OPlot instance> \" + Obit.OPlotGetName(self.me) # Foreground Colors unBLACK = 0 RED =", "(plot,cscale, err): \"\"\" Set scaling for characters * plot = Python Plot object", "GNU General Public License for more details. # # You should have received", "sub page Note: some functions such as PContour advance the page * plot", "of the new pen position. * y1 = world y-coordinate of the new", "object * err = ObitErr error stack \"\"\" ################################################################ # Checks if not", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # n = len(x)", "in any order: = ====================================================================== A draw Axis (X axis is horizontal line", "* plot = plot * symbol = Symbol index to use for plotting.", "Obit.PlotXYPlot (plot.me, symbol, n, x, y, err.me) # end PXYPlot def PXYOver (plot,", "4 plplot:lines 30 deg upwards 5 plplot:lines 30 deg downwards 6 plplot:horizontal/vertical lines", "PLPlot installations If the Obit installation uses PLPlot for plotting the following can", "will be scaled independently. * axis = controls the plotting of axes, tick", "index to use for plotting. Values in the range [1,12] are usable. If", "(plot, xlabel, ylabel, title, err): \"\"\" Display plot * plot = Python Plot", "# Out with the old if self.this!=None: Obit.OPlotUnref(Obit.OPlot_Get_me(self.this)) # In with the new", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetCharSize", "corner of the viewport (note XMAX may be less than XMIN). * ymin", "the characters 'B', 'L', 'T', or 'R' signifying the Bottom, Left, Top, or", "in the range [1,12] are usable if negative, use abs value and connect", "= Dependent variable * err = ObitErr error stack \"\"\" ################################################################ # Checks", "GNU General Public License as # published by the Free Software Foundation; either", "array of world x-coordinates of the vertices * y = array of world", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawAxes(plot.me, xopt,", "1 give intermediate placing, but they are not very useful. * text =", "How many points? Obit.PlotXYOver (plot.me, symbol, n, x, y, err.me) # end PXYOver", "marks on X axis. If xtick=0.0 [def], the interval is chosen. NXSUB (long)", "* ytick = like xtick for the Y axis. * nysub = like", "Overplot X vs Y using symbol. Plot should be finalized and displayed with", "====== ======== ================================================== \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise", "for the Y axis * err = ObitErr error stack \"\"\" ################################################################ #", "a Python Obit plot\") # Obit.PlotDrawCircle (plot.me, x, y, radius, err.me) # end", "not Image.PIsA(image): print(\"Actually \",image.__class__) raise TypeError(\"image MUST be a Python Obit Image\") Obit.PlotContour", "err): \"\"\" Draw a Polygon, possibly filled * plot = Python Plot object", "(Y) edge of frame. G draw Grid of vertical (X) or horizontal (Y)", "written perpendicular to the frame rather than parallel to it. * disp =", "of the vertices * y = array of world y-coordinates of the vertices", "of world x-coordinates of the vertices * y = array of world y-coordinates", "= 14 WHITE = 15 def newOPlot(name, err, output=\"None\", bgcolor=BLACK, nx=1, ny=1 ):", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetPage(plot.me, sub,", "and true ionvert colors COLOR (string) Color scheme 'GRAY', 'CONTOUR', 'PHLAME' default 'GRAY'", "be a Python Obit plot\") # Obit.PlotDrawCircle (plot.me, x, y, radius, err.me) #", "Place cross at positions. Plot should be finalized and displayed with PShow *", "a label for the x-axis (centered below the viewport). * ylabel = a", "circle 12 filled star == ================= * x = Independent variable, if None", "X (horizontal) axis of plot. Options are single letters, and may be in", "6 open square 7 open triangle 8 open star 9 filled triangle 10", "1) JUST (int) If !=0 then force X and Y axis scaling to", "color index (1-15), symbolic names: BLACK, RED(default), YELLOW, GREEN, AQUAMARINE, PINK, WHEAT, GRAY,", "draw bottom (X) or left (Y) edge of frame. C draw top (X)", "justification of the string parallel to the specified edge of the viewport. If", "points. == =============== 0 line only 1 dot 2 plus 3 \\* 4", "extend (\"Project\") major tick marks outside the box (ignored if option I is", "Obit plot\") # Obit.PlotSetCharSize (plot.me, cscale, err.me) # end PSetCharSize def PSetLineWidth (plot,", "numbering starts at the top left at 1 and increases along rows and", "finalized and displayed with PShow * plot = plot * symbol = Symbol", "of line (integer multiple of the default size). * err = ObitErr error", "Obit plot\") # out = InfoList.InfoList() out.me = Obit.PlotGetList(plot.me) return out # end", "horizontal (Y) lines I Invert the tick marks; ie draw them outside the", "return out # end PGetList def PIsA (plot): \"\"\" Tells if the input", "deg, 0=horizontal * just = Controls justification of the string parallel to the", "raise TypeError(\"plot MUST be a Python Obit plot\") # out = InfoList.InfoList() out.me", "nonNone, error in y * err = ObitErr error stack Optional parameters on", "object \"\"\" ################################################################ # Checks if not PIsA(plot): raise TypeError(\"plot MUST be a", "err = ObitErr error stack Optional parameters on plot InfoList: ======= ======== =================================================", ": # Out with the old if self.this!=None: Obit.OPlotUnref(Obit.OPlot_Get_me(self.this)) # In with the", "X and Y axis scaling to be the same ====== ======== =============================================== \"\"\"", "max 120 XLABEL (string) Label for horizontal axis (defaults to none) XOPT (string)", "the frame rather than parallel to it. * disp = The displacement of", "2 of # the License, or (at your option) any later version. #", "the viewport. If FJUST = 0.0, the left-hand end of the string will", "* y = Dependent variable * err = ObitErr error stack \"\"\" ################################################################", "hope that it will be useful, # but WITHOUT ANY WARRANTY; without even", "PostScript File (monochrome) \"psc\" PostScript File (color) \"xfig\" Fig file \"png\" PNG file", "len(y) # How many points? Obit.PlotXYErr (plot.me, symbol, n, x, y, e, err.me)", "server This class is for creating and using the interface to a plot", "axis is vertical line X=0). B draw bottom (X) or left (Y) edge", "frame. G draw Grid of vertical (X) or horizontal (Y) lines I Invert", "10 filled square 11 filled circle 12 filled star == ================= * x", "with this program; if not, write to the Free # Software Foundation, Inc.,", "contours (def sqrt(2) * err = ObitErr error stack Optional parameters on plot", "if not Image.PIsA(image): print(\"Actually \",image.__class__) raise TypeError(\"image MUST be a Python Obit Image\")", "err): \"\"\" Display plot * plot = Python Plot object * xlabel =", "len(y) # How many points? Obit.PlotXYPlot (plot.me, symbol, n, x, y, err.me) #", "or to the left of the viewport (Y). M write numeric labels in", "installation uses PLPlot for plotting the following can be used in text strings:", "#ga will be a lower case alpha. - Subscripts: Characters between a #d", "list of RAs (deg) * dec = list of Declinations (deg) * err", "ny=1 ): \"\"\" Create and initialize an ObitPlot * name = name desired", "use abs value and connect points. == =============== 0 line only 1 dot", "def PMarkCross (plot, image, ra, dec, err, size=5.0): \"\"\" Mark positions on Contour", "no box, axes or labels; -1 draw box only; 0 draw box and", "# License along with this program; if not, write to the Free #", "write Numeric labels in the conventional location below the viewport (X) or to", "= 0.0, the left-hand end of the string will be placed at COORD;", "Y axis CSIZE (int) Scaling factor for characters(default = 1) SQRT (bool) If", "between major tick marks on X axis. If xtick=0.0 [def], the interval is", "XTICK (float) world coordinate interval between major tick marks on X axis. If", "y, symbol, err): \"\"\" Draw a Symbol * plot = Python Plot object", "file \"jpeg\" JPEG file \"gif\" GIF file \"null\" Null device ====== ========================== *", "image.me, err.me) # end PGrayScale def PMarkCross (plot, image, ra, dec, err, size=5.0):", "value) YMIN (float) minimum Y value (defaults to actual value) TITLE (string) Label", "A #g immediately prior to a Latin character will cause the Greek equivalent", "(at your option) any later version. # # This program is distributed in", "of image Contours at lev times powers of cntfac Plot should be finalized", "per inch) will be equal, otherwise they will be scaled independently. * axis", "plot\") # Obit.PlotSetPage(plot.me, sub, err.me) # end PSetPage def PText (plot, x, y,", "numeric labels in the unconventional location above the viewport (X) or to the", "left of the viewport, drawn vertically) * title = a label for the", "plot. Options are single letters, and may be in any order: = ======================================================================", "# Charlottesville, VA 22903-2475 USA #----------------------------------------------------------------------- # Python shadow class to ObitPlot class", "box and label Y-axis logarithmically; 30 draw box and label both axes logarithmically.", "of the string will be placed at at COORD. Other values between 0", "filled circle 12 filled star == =============== * err = ObitErr error stack", "filled square 11 filled circle 12 filled star == ================= * x =", "======= ======== ================================================= \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise", "draw box and label it with coordinates; 1 same as axis=0, but also", "range [1,12] are usable if negative, use abs value and connect points ==", "actual value) YMAX (float) maximum Y value (defaults to actual value) YMIN (float)", "true ionvert colors COLOR (string) Color scheme 'GRAY', 'CONTOUR', 'PHLAME' default 'GRAY' PIX_MAX", "info member honored * lev = basic contour level (def 0.1 peak) *", "coordinates * angle = Orientation of the text in deg, 0=horizontal * just", "measured outwards from the viewport in units of the character height. Use a", "PIsA (plot): \"\"\" Tells if the input is a Python ObitPlot returns true", "* plot = Python Plot object * err = ObitErr error stack \"\"\"", "AQUAMARINE, PINK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE *", "BLACK = 5 WHEAT = 6 GRAY = 7 BROWN = 8 BLUE", "y-coordinate at the top right corner of the viewport (note YMAX may be", "Obit.PlotText(plot.me, x, y, dx, dy, just, text, err.me) # end PText def PRelText", "plotting values in the range [1,12] are usable if negative, use abs value", "error stack Optional parameters on plot InfoList ====== ======== =============================================== XMAX (float) maximum", "* y = Dependent variable * e = if nonNone, error in y", "Tells if the input is a Python ObitPlot returns true Or false *", "PDrawLine def PDrawCurve (plot, x, y, err): \"\"\" Draw a curve. * plot", "Python Obit Plot\") n = len(y) # How many points? Obit.PlotXYErr (plot.me, symbol,", "world x-coordinates of the vertices * y = array of world y-coordinates of", "Plot to test \"\"\" ################################################################ # Checks if not isinstance(plot, OPlot): return False", "of the viewport (Y). M write numeric labels in the unconventional location above", "if not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) if name == \"me\" : return", "dot * err = ObitErr error stack \"\"\" ################################################################ # Checks if not", "PSetPage (plot, sub, err): \"\"\" Set or advance sub page Note: some functions", "(defaults to none) XOPT (string) Options for horizontal axis (default \"BCNTS\") See PDrawAxes", "raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetLineWidth(plot.me, lwidth, err.me) #", "just = Controls justification of the string parallel to the specified edge of", "Symbol index to use for plotting values in the range [1,12] are usable", "viewport (Y). M write numeric labels in the unconventional location above the viewport", "(plot.me, x, y, radius, err.me) # end PDrawCircle def PDrawSymbol (plot, x, y,", "be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of", "stack * size = size of cross in pixels Optional parameters on plot", "30 draw box and label both axes logarithmically. == =========================================== * err =", "can redistribute it and/or # modify it under the terms of the GNU", "be a Python Obit Plot\") n = len(y) # How many points? Obit.PlotXYPlot", "label, image, lev, cntfac, err): \"\"\" Contour plot of image Contours at lev", "and type of output device: ====== ========================== \"None\" interactive prompt \"xwin\" X-Window (Xlib)", "====================================================================== A draw Axis (X axis is horizontal line Y=0, Y axis is", "then force X and Y axis scaling to be the same ====== ========", "color, err): \"\"\" Set foreground color * plot = Python Plot object *", "axis scaling to be the same ====== ======== ================================================== \"\"\" ################################################################ # Checks", "TypeError(\"plot MUST be a Python Obit plot\") # n = len(x) Obit.PlotDrawCurve (plot.me,", "def PRelText (plot, side, disp, coord, fjust, text, err): \"\"\" Write text on", "* x = array of world x-coordinates of the vertices * y =", "points == ================= 0 line only 1 dot 2 plus 3 \\* 4", "* plot = Python Plot object * lwidth = Width of line (integer", "use index * y = Dependent variable * err = ObitErr error stack", "for xopt. * ytick = like xtick for the Y axis. * nysub", "cross in pixels Optional parameters on plot InfoList ====== ===== ============================================ CSIZE (int)", "written as superscripts \"\"\" # $Id$ #----------------------------------------------------------------------- # Copyright (C) 2006,2016,2019 # Associated", "logarithmically; 30 draw box and label both axes logarithmically. == =========================================== * err", "def PDrawCurve (plot, x, y, err): \"\"\" Draw a curve. * plot =", "= string of options for Y (vertical) axis of plot. Coding is the", "TURQUOISE, MAGENTA, SALMON, WHITE * err = ObitErr error stack \"\"\" ################################################################ #", "significant. * err = ObitErr error stack \"\"\" ################################################################ # Checks if not", "raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawSymbol(plot.me, x, y, symbol,", "* Plot = Python Obit Plot to test \"\"\" ################################################################ # Checks if", "end of the string will be placed at at COORD. Other values between", "the GNU General Public # License along with this program; if not, write", "nxsub for the Y axis * err = ObitErr error stack \"\"\" ################################################################", "a Python Obit plot\") # n = len(x) Obit.PlotDrawCurve (plot.me, n, x, y,", "45 deg downwards 4 plplot:lines 30 deg upwards 5 plplot:lines 30 deg downwards", "end of the string will be placed at at (x,y). Other values between", "in image] PIX_MIN (float) minimum pixel value [def max in image] ======= ========", "square 7 open triangle 8 open star 9 filled triangle 10 filled square", "error stack \"\"\" ################################################################ # Checks if not PIsA(plot): raise TypeError(\"plot MUST be", "X axis. If xtick=0.0, the interval is chosen. * nxsub = The number", "to use for plotting values in the range [1,12] are usable if negative,", "5 plplot:lines 30 deg downwards 6 plplot:horizontal/vertical lines crossed 7 plplot:horizontal lines 8", "Simple XY Plot with error bars Plot X vs Y using symbol and", "Y using symbol and error bars. Plot should be finalized and displayed with", "(default \"BCNTS\") See PDrawAxes for details. XTICK (float) world coordinate interval between major", "into. If xtick=0.0 or nxsub=0, the number is chosen. * yopt = string", "axis. * nysub = like nxsub for the Y axis * err =", "1) ====== ======= ================================================== \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__)", "MUST be a Python Obit plot\") # Obit.PlotSetLineWidth(plot.me, lwidth, err.me) # end PetLineWidth", "PDrawAxes(plot, xopt, xtick, nxsub, yopt, ytick, nysub, err): \"\"\" Draw axes for a", "* lwidth = Width of line (integer multiple of the default size). *", "lstyle, err): \"\"\" Set line style * plot = Python Plot object *", "name desired for object (labeling purposes) * err = Python Obit Error/message stack", "-2 draw no box, axes or labels; -1 draw box only; 0 draw", "range [0,8] are usable == =============== 0 no fill 1 hatched 2 crosshatched", "x, y, angle, just, text, err): \"\"\" Write text on plot * plot", "Return the member InfoList returns InfoList * plot = Python Obit Plot object", "YMAX (float) maximum Y value (defaults to actual value) YMIN (float) minimum Y", "Y Overplot X vs Y using symbol. Plot should be finalized and displayed", "on text strings in PLPlot installations If the Obit installation uses PLPlot for", "TURQUOISE, MAGENTA, SALMON, WHITE * nx = Number of horizontal subpages * ny", "at (x,y). Other values between 0 and 1 give intermediate placing, but they", "world coordinates per inch) will be equal, otherwise they will be scaled independently.", "I is specified) T draw major Tick marks at the major coordinate interval.", "TypeError(\"plot MUST be a Python Obit plot\") # scale = 1.0 Obit.PlotDrawPoly(plot.me, len(x),", "redistribute it and/or # modify it under the terms of the GNU General", "stack Optional parameters on plot InfoList: ====== ======= ================================================== XTICK (float) world coordinate", "text string to be plotted. Trailing spaces are ignored when justifying the string,", "err = ObitErr error stack * size = size of cross in pixels", "a Python Obit plot\") # Obit.PlotRelText(plot.me, side, disp, coord, fjust, text, err.me) #", "but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or", "labels; -1 draw box only; 0 draw box and label it with coordinates;", "'CONTOUR', 'PHLAME' default 'GRAY' PIX_MAX (float) maximum pixel value [def min in image]", "Options are single letters, and may be in any order: = ====================================================================== A", "_Obit!=None: DeleteOPlot(self.this) def __setattr__(self,name,value): if name == \"me\" : # Out with the", "= basic contour level (def 0.1 peak) * cntfac = factor for spacing", "some functions such as PContour advance the page * plot = Python Plot", "err.me) # end PSetCharSize def PSetLineWidth (plot, lwidth, err): \"\"\" Set line width", "CSIZE (int) Scaling factor for characters(default = 1) LWIDTH (int) Line width (default", "# Obit.PlotRelText(plot.me, side, disp, coord, fjust, text, err.me) # end PRelText def PDrawLine", "__future__ import absolute_import from __future__ import print_function import Obit, _Obit, InfoList, Image import", "ymin, ymax, just, axis, err): \"\"\" Define plotting area * plot = Python", "terms of the GNU General Public License as # published by the Free", "(string) Options for vertical axis (default \"BCNTS\") See PDrawAxes for details. XTICK (float)", "plot = plot * symbol = Symbol index to use for plotting values", "tick marks on X axis. If xtick=0.0 [def], the interval is chosen. NXSUB", "axis * err = ObitErr error stack \"\"\" ################################################################ # Checks if not", "2 plus 3 \\* 4 open circle 5 x 6 open square 7", "be a Python Obit plot\") # Obit.PlotLabel(plot.me, xlabel, ylabel, title, err.me) # end", "and Y axis scaling to be the same ====== ======== ================================================== \"\"\" ################################################################", "(string) Label for horizontal axis (defaults to none) XOPT (string) Options for horizontal", "Cambridge, # MA 02139, USA. # # Correspondence concerning this software should be", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetPage(plot.me, sub, err.me)", "= ObitErr error stack \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__)", "the viewport (X) or to the right of the viewport (Y). P extend", "new pen position. * y2 = world y-coordinate of the new pen position.", "- Greek letters, A #g immediately prior to a Latin character will cause", "plot * plot = Python Plot object * err = ObitErr error stack", "y, e, err): \"\"\" Simple XY Plot with error bars Plot X vs", "1) SQRT (bool) If present and true plot sqrt (pixel_value) INVERT (bool) If", "__init__(self, name): super(OPlot, self).__init__() Obit.CreateOPlot(self.this, name) def __del__(self, DeleteOPlot=_Obit.DeleteOPlot): if _Obit!=None: DeleteOPlot(self.this) def", "################################################################ # Checks if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit", "= dash dot dot dot * err = ObitErr error stack \"\"\" ################################################################", "the interface to a plot Image Members with python interfaces: ======== ======================================= InfoList", "to the specified edge of the viewport. If * just = 0.0, the", "the Y axis. NYSUB (int) like nxsub for the Y axis CSIZE (int)", "WHITE * err = ObitErr error stack \"\"\" ################################################################ # Checks if not", "* x = World x-coordinate of center * y = World y-coordinate of", "ymin = the world y-coordinate at the bottom left corner of the viewport.", "error stack \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot", "axes, tick marks, etc: == =========================================== -2 draw no box, axes or labels;", "bottom left corner of the viewport. * ymax = the world y-coordinate at", "0] YTICK (float) like xtick for the Y axis. NYSUB (int) like nxsub", "be written as subscripts - Superscripts: Characters between a #u and #d will", "= Python Plot object * err = ObitErr error stack \"\"\" ################################################################ #", "def PXYErr (plot, symbol, x, y, e, err): \"\"\" Simple XY Plot with", "and error bars. Plot should be finalized and displayed with PShow This routine", "Label for plot * image = ObitImage to plot, BLC, TRC on info", "box only; 0 draw box and label it with coordinates; 1 same as", "marks at the major coordinate interval. S draw minor tick marks (Subticks). =", "= len(x) Obit.PlotDrawCurve (plot.me, n, x, y, err.me) # end PDrawCurve def PDrawCircle", "plot\") # Obit.PlotDrawSymbol(plot.me, x, y, symbol, err.me) # end PDrawSymbol def PDrawPoly (plot,", "\"me\" : # Out with the old if self.this!=None: Obit.OPlotUnref(Obit.OPlot_Get_me(self.this)) # In with", "Obit plot\") # Obit.PlotRelText(plot.me, side, disp, coord, fjust, text, err.me) # end PRelText", "Python Obit plot\") # Obit.PlotRelText(plot.me, side, disp, coord, fjust, text, err.me) # end", "be a Python Obit plot\") # Obit.PlotRelText(plot.me, side, disp, coord, fjust, text, err.me)", "* y2 = world y-coordinate of the new pen position. * err =", "object * xopt = string of options for X (horizontal) axis of plot.", "(defaults to actual value) YMIN (float) minimum Y value (defaults to actual value)", "times powers of cntfac Plot should be finalized and displayed with PShow *", "PSetCharSize (plot,cscale, err): \"\"\" Set scaling for characters * plot = Python Plot", "plot, BLC, TRC on info member honored * err = ObitErr error stack", "edge of the viewport. If * just = 0.0, the left-hand end of", "import print_function import Obit, _Obit, InfoList, Image import math class OPlot(Obit.OPlot): \"\"\" Python", "relative to port * plot = Python Plot object * side = Must", "def PDrawLine (plot, x1, y1, x2, y2, err): \"\"\" Draw a line. *", "pen position. * err = ObitErr error stack \"\"\" ################################################################ # Checks if", "= a label for the y-axis (centered to the left of the viewport,", "Values in the range [1,12] are usable. If negative, use abs value and", "end PSetColor def PSetPage (plot, sub, err): \"\"\" Set or advance sub page", "end newOPlot def PXYPlot (plot, symbol, x, y, err): \"\"\" Simple XY Plot", "= The text string to be plotted. Trailing spaces are ignored when justifying", "of world y-coordinates of points * err = ObitErr error stack \"\"\" ################################################################", "to be the same ====== ======== =============================================== \"\"\" ################################################################ # Checks if not", "(X=0, Y=0); 2 same as axis=1, but also draw grid lines at major", "addressed as follows: # Internet email: <EMAIL>. # Postal address: <NAME> # National", "Controls justification of the string parallel to the specified edge of the viewport.", "are usable == =============== 0 no fill 1 hatched 2 crosshatched 3 plplot:lines", "= Python Obit Plot to test \"\"\" ################################################################ # Checks if not isinstance(plot,", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawAxes(plot.me,", "object * x = world x-coordinate of the center of the symbol *", "axis (default \"BCNTS\") See PDrawAxes for details. XTICK (float) world coordinate interval between", "a Python Obit Image\") Obit.PlotGrayScale (plot.me, label, image.me, err.me) # end PGrayScale def", "of the viewport. * ymax = the world y-coordinate at the top right", "plot * image = ObitImage to plot * ra = list of RAs", "data on the same frame, use ObitPlotXYOver * plot = plot * symbol", "Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST be a Python Obit", "Declinations (deg) * err = ObitErr error stack * size = size of", "GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE * err = ObitErr", "xtick=0.0 or nxsub=0, the number is chosen. * yopt = string of options", "pass instructions to processing Member List ======== ======================================= \"\"\" def __init__(self, name): super(OPlot,", "lines crossed 7 plplot:horizontal lines 8 plplot:vertical lines == =============== * err =", "plot of image Place cross at positions. Plot should be finalized and displayed", "equal, otherwise they will be scaled independently. * axis = controls the plotting", "Y axis is vertical line X=0). B draw bottom (X) or left (Y)", "print(\"Actually \",image.__class__) raise TypeError(\"image MUST be a Python Obit Image\") Obit.PlotContour (plot.me, label,", "string will be placed at (x,y); if JUST = 0.5, the center of", "(int) Scaling factor for characters(default = 1) SQRT (bool) If present and true", "force X and Y axis scaling to be the same ====== ======== ==================================================", "PContour. Then additional lines, curves, text or symbols may be added. When all", "= 1) ====== ===== ============================================ \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually", "TURQUOISE = 12 MAGENTA = 13 SALMON = 14 WHITE = 15 def", "displayed with PShow * plot = plot * symbol = Symbol index to", "x-coordinate at the bottom left corner of the viewport. * xmax = the", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # n", "major tick marks outside the box (ignored if option I is specified) T", "Obit Plotting class Create a plot object using newOPlot which allows specifying the", "to none) XOPT (string) Options for horizontal axis (default \"BCNTS\") See PDrawAxes for", "Mark positions on Contour plot of image Place cross at positions. Plot should", "on plot InfoList ====== ===== ============================================ CSIZE (int) Scaling factor for characters(default =", "See PDrawAxes for details. XTICK (float) world coordinate interval between major tick marks", "region must be specified using either PSetPlot, one of the XY plotting routines", "120 XLABEL (string) Label for horizontal axis (defaults to none) XOPT (string) Options", "specified edge of the viewport, measured outwards from the viewport in units of", "Obit Image\") Obit.PlotGrayScale (plot.me, label, image.me, err.me) # end PGrayScale def PMarkCross (plot,", "COLOR (string) Color scheme 'GRAY', 'CONTOUR', 'PHLAME' default 'GRAY' PIX_MAX (float) maximum pixel", "* text = The text string to be plotted. Trailing spaces are ignored", "x1, y1, x2, y2, err.me) # end PDrawLine def PDrawCurve (plot, x, y,", "size, err.me) # end PMarkCross def PShow (plot, err): \"\"\" Display plot *", "viewport (note XMAX may be less than XMIN). * ymin = the world", "the string will be placed at COORD; if JUST = 0.5, the center", "Create a plot object using newOPlot which allows specifying the output and background", "later version. # # This program is distributed in the hope that it", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetColor(plot.me, color,", "========================== * bgcolor = background color index (1-15), symbolic names: BLACK, RED(default), YELLOW,", "vertical line X=0). B draw bottom (X) or left (Y) edge of frame.", "the center of the string will be placed at (x,y); if JUST =", "Write text on plot relative to port * plot = Python Plot object", "parameters on plot InfoList: ======= ======== ================================================= XTICK (float) world coordinate interval between", "=========================================== -2 draw no box, axes or labels; -1 draw box only; 0", "PNG file \"jpeg\" JPEG file \"gif\" GIF file \"null\" Null device ====== ==========================", "they are not very useful. * text = The text string to be", "immediately prior to a Latin character will cause the Greek equivalent to be", "#----------------------------------------------------------------------- # Python shadow class to ObitPlot class from __future__ import absolute_import from", "self.this!=None: Obit.OPlotUnref(Obit.OPlot_Get_me(self.this)) # In with the new Obit.OPlot_Set_me(self.this,value) return self.__dict__[name] = value def", "in pixels Optional parameters on plot InfoList ====== ===== ============================================ CSIZE (int) Scaling", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawCircle (plot.me,", "lev = basic contour level (def 0.1 peak) * cntfac = factor for", "as axis=0, but also draw the coordinate axes (X=0, Y=0); 2 same as", "axis = controls the plotting of axes, tick marks, etc: == =========================================== -2", "Python Plot object * xopt = string of options for X (horizontal) axis", "world x-coordinate at the top right corner of the viewport (note XMAX may", "at COORD; if JUST = 0.5, the center of the string will be", "3 plplot:lines 45 deg downwards 4 plplot:lines 30 deg upwards 5 plplot:lines 30", "line width * plot = Python Plot object * lwidth = Width of", "the left-hand end of the string will be placed at (x,y); if JUST", "= 1) SSIZE (int) Scaling factor for symbols(default = 1) LWIDTH (int) Line", "of axes, tick marks, etc: == =========================================== -2 draw no box, axes or", "def PShow (plot, err): \"\"\" Display plot * plot = Python Plot object", "def __getattr__(self,name): if not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) if name == \"me\"", "be less than XMIN). * ymin = the world y-coordinate at the bottom", "scaling to be the same ====== ======== ================================================== \"\"\" ################################################################ # Checks if", "the major coordinate interval. S draw minor tick marks (Subticks). = ====================================================================== *", "using the interface to a plot Image Members with python interfaces: ======== =======================================", "JUST = 0.5, the center of the string will be placed at COORD;", "=============== * err = ObitErr error stack \"\"\" ################################################################ # Checks if not", "Obit Plot\") if not Image.PIsA(image): print(\"Actually \",image.__class__) raise TypeError(\"image MUST be a Python", "finalize it. Notes: on text strings in PLPlot installations If the Obit installation", "PSetPlot (plot, xmin, xmax, ymin, ymax, just, axis, err): \"\"\" Define plotting area", "Image import math class OPlot(Obit.OPlot): \"\"\" Python Obit interface to display server This", "and initialize an ObitPlot * name = name desired for object (labeling purposes)", "(float) minimum pixel value [def max in image] ======= ======== ================================================= \"\"\" ################################################################", "of frame. G draw Grid of vertical (X) or horizontal (Y) lines I", "TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetPage(plot.me, sub, err.me) # end", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawLine(plot.me,", "end PDrawPoly def PGetList (plot): \"\"\" Return the member InfoList returns InfoList *", "and displayed with PShow This routine draws the frame and adds labels, to", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawAxes(plot.me, xopt, xtick,", "\"\"\" Set scaling for characters * plot = Python Plot object * cscale", "= ObitErr error stack Optional parameters on plot InfoList: ====== ======= ================================================== XTICK", "(plot.me, label, image.me, lev, cntfac, err.me) # end PContour def PGrayScale (plot, label,", "Obit Plot object \"\"\" ################################################################ # Checks if not PIsA(plot): raise TypeError(\"plot MUST", "of the length of the edge. * just = Controls justification of the", "If it includes 'LV' or 'RV', the string is written perpendicular to the", "in world coordinates * y = Plot y in world coordinates * angle", "bgcolor=BLACK, nx=1, ny=1 ): \"\"\" Create and initialize an ObitPlot * name =", "the major coordinate interval into. If xtick=0.0 or nxsub=0, the number is chosen.", "is specified) T draw major Tick marks at the major coordinate interval. S", "plot = plot * image = ObitImage to plot * ra = list", "1) LWIDTH (int) Line width (default = 1) ====== ===== ============================================ \"\"\" ################################################################", "less than YMIN) * just = if JUST=1, the scales of the x", "7 open triangle 8 open star 9 filled triangle 10 filled square 11", "y, err.me) # end PXYPlot def PXYOver (plot, symbol, x, y, err): \"\"\"", "(default = 1) ====== ===== ============================================ \"\"\" ################################################################ # Checks if not PIsA(plot):", "plot InfoList ====== ======== =============================================== XMAX (float) maximum X value (defaults to actual", "(plot.me, symbol, n, x, y, err.me) # end PXYPlot def PXYOver (plot, symbol,", "(plot, x, y, err): \"\"\" Draw a curve. * plot = Python Plot", "X vs Y Overplot X vs Y using symbol. Plot should be finalized", "Plot object * x = Array of world x-coordinates of points * y", "characters * plot = Python Plot object * cscale = new character size", "# $Id$ #----------------------------------------------------------------------- # Copyright (C) 2006,2016,2019 # Associated Universities, Inc. Washington DC,", "=============================================== XMAX (float) maximum X value (defaults to actual value) XMIN (float) minimum", "Plot x in world coordinates * y = Plot y in world coordinates", "11 filled circle 12 filled star == =============== * x = Independent variable,", "of the viewport. * xmax = the world x-coordinate at the top right", "right of the viewport (Y). P extend (\"Project\") major tick marks outside the", "error bars. Plot should be finalized and displayed with PShow This routine draws", "====== ========================== * bgcolor = background color index (1-15), symbolic names: BLACK, RED(default),", "plot\") # Obit.PlotSetCharSize (plot.me, cscale, err.me) # end PSetCharSize def PSetLineWidth (plot, lwidth,", "a fraction of the length of the edge. * just = Controls justification", "from __future__ import absolute_import from __future__ import print_function import Obit, _Obit, InfoList, Image", "1.0, the right-hand end of the string will be placed at at COORD.", "ObitPlot * name = name desired for object (labeling purposes) * err =", "size). * err = ObitErr error stack \"\"\" ################################################################ # Checks if not", "PURPOSE. See the # GNU General Public License for more details. # #", "finalized and displayed with PShow * plot = plot * label = Label", "vertical (X) or horizontal (Y) lines I Invert the tick marks; ie draw", "================= 0 line only 1 dot 2 plus 3 \\* 4 open circle", "be a Python Obit plot\") # Obit.PlotDrawSymbol(plot.me, x, y, symbol, err.me) # end", "Options for vertical axis (default \"BCNTS\") See PDrawAxes for details. XTICK (float) world", "a label for the entire plot (centered above the viewport) * err =", "for horizontal axis (default \"BCNTS\") See PDrawAxes for details. YLABEL (string) Label for", "* image = ObitImage to plot, BLC, TRC on info member honored *", "details. XTICK (float) world coordinate interval between major tick marks on X axis.", "or to the right of the viewport (Y). P extend (\"Project\") major tick", "of world x-coordinates of points * y = Array of world y-coordinates of", "the License, or (at your option) any later version. # # This program", "\"\"\" Return the member InfoList returns InfoList * plot = Python Obit Plot", "X-Window (Xlib) \"gcw\" Gnome Canvas Widget (interacts with ObitTalk) \"ps\" PostScript File (monochrome)", "Label for the plot (defaults to none), max 120 XLABEL (string) Label for", "for spacing between contours (def sqrt(2) * err = ObitErr error stack Optional", "viewport (Y). P extend (\"Project\") major tick marks outside the box (ignored if", "side, disp, coord, fjust, text, err): \"\"\" Write text on plot relative to", "size = size of cross in pixels Optional parameters on plot InfoList ======", "the same ====== ======== =============================================== \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually", "on info member honored * err = ObitErr error stack Optional parameters on", "(plot, x, y, fill, err): \"\"\" Draw a Polygon, possibly filled * plot", "* xmin = the world x-coordinate at the bottom left corner of the", "values in the range [0,8] are usable == =============== 0 no fill 1", "same as axis=1, but also draw grid lines at major increments of the", "lwidth, err.me) # end PetLineWidth def PSetLineStyle (plot, lstyle, err): \"\"\" Set line", "plot relative to port * plot = Python Plot object * side =", "Gnome Canvas Widget (interacts with ObitTalk) \"ps\" PostScript File (monochrome) \"psc\" PostScript File", "symbol * y = world y-coordinate of the center of the symbol *", "the Y axis. * nysub = like nxsub for the Y axis *", "Width of line (integer multiple of the default size). * err = ObitErr", "def PGetList (plot): \"\"\" Return the member InfoList returns InfoList * plot =", "interval between major tick marks on X axis. If xtick=0.0 [def], the interval", "Dependent variable * err = ObitErr error stack Optional parameters on plot InfoList", "raise TypeError(\"plot MUST be a Python Obit Plot\") if not Image.PIsA(image): print(\"Actually \",image.__class__)", "Gray Scales plot of image Plot should be finalized and displayed with PShow", "dec = list of Declinations (deg) * err = ObitErr error stack *", "to plot, BLC, TRC on info member honored * err = ObitErr error", "raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawLine(plot.me, x1, y1, x2,", "horizontal subpages * ny = Number of vertical subpages \"\"\" ################################################################ out =", "points * y = Array of world y-coordinates of points * err =", "print_function import Obit, _Obit, InfoList, Image import math class OPlot(Obit.OPlot): \"\"\" Python Obit", "# Copyright (C) 2006,2016,2019 # Associated Universities, Inc. Washington DC, USA. # #", "Image Members with python interfaces: ======== ======================================= InfoList used to pass instructions to", "T draw major Tick marks at the major coordinate interval. S draw minor", "PXYErr def PContour (plot, label, image, lev, cntfac, err): \"\"\" Contour plot of", "cntfac, err.me) # end PContour def PGrayScale (plot, label, image, err): \"\"\" Gray", "for object (labeling purposes) * err = Python Obit Error/message stack * output", "label for the x-axis (centered below the viewport). * ylabel = a label", "How many points? Obit.PlotXYErr (plot.me, symbol, n, x, y, e, err.me) # end", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotFinishPlot(plot.me,", "========================== \"None\" interactive prompt \"xwin\" X-Window (Xlib) \"gcw\" Gnome Canvas Widget (interacts with", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetCharSize (plot.me, cscale,", "like xtick for the Y axis. NYSUB (int) like nxsub for the Y", "the number is chosen. [def 0] YTICK (float) like xtick for the Y", "the text in deg, 0=horizontal * just = Controls justification of the string", "== =========================================== -2 draw no box, axes or labels; -1 draw box only;", "text, err.me) # end PRelText def PDrawLine (plot, x1, y1, x2, y2, err):", "Python Obit Error/message stack * output = name and type of output device:", "draw box only; 0 draw box and label it with coordinates; 1 same", "(float) maximum X value (defaults to actual value) XMIN (float) minimum X value", "box, axes or labels; -1 draw box only; 0 draw box and label", "newOPlot(name, err, output=\"None\", bgcolor=BLACK, nx=1, ny=1 ): \"\"\" Create and initialize an ObitPlot", "text on plot relative to port * plot = Python Plot object *", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotRelText(plot.me,", "class OPlot(Obit.OPlot): \"\"\" Python Obit interface to display server This class is for", "if name == \"me\" : # Out with the old if self.this!=None: Obit.OPlotUnref(Obit.OPlot_Get_me(self.this))", "Array of world x-coordinates of points * y = Array of world y-coordinates", "# Postal address: <NAME> # National Radio Astronomy Observatory # 520 Edgemont Road", "crossed 7 plplot:horizontal lines 8 plplot:vertical lines == =============== * err = ObitErr", "the viewport (Y). M write numeric labels in the unconventional location above the", "to plot, BLC, TRC on info member honored * lev = basic contour", "them outside the viewport instead of inside. L label axis Logarithmically N write", "bottom left corner of the viewport. * xmax = the world x-coordinate at", "plot package dependent * values in the range [0,8] are usable == ===============", "finalized and displayed with PShow * plot = plot * image = ObitImage", "y, fill, scale, err.me) # end PDrawPoly def PGetList (plot): \"\"\" Return the", "coord = The location of the character string along the specified edge of", "deg upwards 5 plplot:lines 30 deg downwards 6 plplot:horizontal/vertical lines crossed 7 plplot:horizontal", "Plot X vs Y using symbol. Plot should be finalized and displayed with", "= len(y) # How many points? Obit.PlotXYPlot (plot.me, symbol, n, x, y, err.me)", "====== ======= ================================================== XTICK (float) world coordinate interval between major tick marks on", "SQRT (bool) If present and true plot sqrt (pixel_value) INVERT (bool) If present", "def __setattr__(self,name,value): if name == \"me\" : # Out with the old if", "with python interfaces: ======== ======================================= InfoList used to pass instructions to processing Member", "x 6 open square 7 open triangle 8 open star 9 filled triangle", "world y-coordinate of the center of the symbol * symbol = Symbol index", "#d will be written as superscripts \"\"\" # $Id$ #----------------------------------------------------------------------- # Copyright (C)", "a Python Obit plot\") # Obit.PlotDrawLine(plot.me, x1, y1, x2, y2, err.me) # end", "details. # # You should have received a copy of the GNU General", "the range [1,12] are usable if negative, use abs value and connect points", "Set line width * plot = Python Plot object * lwidth = Width", "letters, A #g immediately prior to a Latin character will cause the Greek", "(int) Scaling factor for symbols(default = 1) LWIDTH (int) Line width (default =", "on info member honored * lev = basic contour level (def 0.1 peak)", "plot = Python Plot object * n = number of vertices * x", "= math.cos(angle/57.296) dy = math.sin(angle/57.296) Obit.PlotText(plot.me, x, y, dx, dy, just, text, err.me)", "(defaults to actual value) YMAX (float) maximum Y value (defaults to actual value)", "x, y, e, err.me) # end PXYErr def PContour (plot, label, image, lev,", "x, y,radius, err): \"\"\" Draw a circle. * plot = Python Plot object", "math.sin(angle/57.296) Obit.PlotText(plot.me, x, y, dx, dy, just, text, err.me) # end PText def", "curves, text or symbols may be added. When all has been added to", "is chosen. * nxsub = The number of subintervals to divide the major", "be a Python Obit plot\") # Obit.PlotSetCharSize (plot.me, cscale, err.me) # end PSetCharSize", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawLine(plot.me, x1, y1,", "= Python Plot object * x = World x-coordinate of center * y", "y = World y-coordinate of center * radius = World coordinate radius *", "specified) T draw major Tick marks at the major coordinate interval. S draw", "= name and type of output device: ====== ========================== \"None\" interactive prompt \"xwin\"", "program is free software; you can redistribute it and/or # modify it under", "will be placed at (x,y); if JUST = 1.0, the right-hand end of", "ObitErr error stack Optional parameters on plot InfoList: ====== ======== ================================================== XMAX (float)", "coordinate interval. S draw minor tick marks (Subticks). = ====================================================================== * xtick =", "dec, size, err.me) # end PMarkCross def PShow (plot, err): \"\"\" Display plot", "member honored * lev = basic contour level (def 0.1 peak) * cntfac", "None use index * y = Dependent variable * err = ObitErr error", "= array of world x-coordinates of the vertices * y = array of", "Obit plot\") # Obit.PlotSetLineWidth(plot.me, lwidth, err.me) # end PetLineWidth def PSetLineStyle (plot, lstyle,", "# National Radio Astronomy Observatory # 520 Edgemont Road # Charlottesville, VA 22903-2475", "File (color) \"xfig\" Fig file \"png\" PNG file \"jpeg\" JPEG file \"gif\" GIF", "it and/or # modify it under the terms of the GNU General Public", "ny, err.me) return out # end newOPlot def PXYPlot (plot, symbol, x, y,", "of points * err = ObitErr error stack \"\"\" ################################################################ # Checks if", "Obit.PlotSetPlot(plot.me, xmin, xmax, ymin, ymax, just, axis, err.me) # end PSetPlot def PLabel", "import Obit, _Obit, InfoList, Image import math class OPlot(Obit.OPlot): \"\"\" Python Obit interface", "Independent variable, if None use index * y = Dependent variable * err", "plotting. Values in the range [1,12] are usable. If negative, use abs value", "PShow * plot = plot * label = Label for plot * image", "Image\") n = len(ra) Obit.PlotMarkCross (plot.me, image.me, n, ra, dec, size, err.me) #", "= Style of line (integer multiple of the default size). 1 = continious,", "the vertices * fill = Fill pattern, plot package dependent * values in", "newOPlot which allows specifying the output and background color. If no output is", "# Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST be a Python", "* err = ObitErr error stack Optional parameters on plot InfoList: ====== =======", "a Python Obit plot\") # Obit.PlotSetColor(plot.me, color, err.me) # end PSetColor def PSetPage", "12 filled star == =============== * x = Independent variable, if None use", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotLabel(plot.me, xlabel, ylabel,", "the same ====== ======== ================================================== \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually", "Obit Image\") Obit.PlotContour (plot.me, label, image.me, lev, cntfac, err.me) # end PContour def", "BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE * nx = Number of horizontal", "plot * image = ObitImage to plot, BLC, TRC on info member honored", "= new character size (integer multiple of the default size). * err =", "the length of the edge. * just = Controls justification of the string", "to be used, e.g. #ga will be a lower case alpha. - Subscripts:", "YELLOW, GREEN, AQUAMARINE, PINK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON,", "\"\"\" Write text on plot relative to port * plot = Python Plot", "4 open circle 5 x 6 open square 7 open triangle 8 open", "at positions. Plot should be finalized and displayed with PShow * plot =", "object * side = Must include one of the characters 'B', 'L', 'T',", "if the input is a Python ObitPlot returns true Or false * Plot", "will be a lower case alpha. - Subscripts: Characters between a #d and", "== \"me\" : # Out with the old if self.this!=None: Obit.OPlotUnref(Obit.OPlot_Get_me(self.this)) # In", "the viewport (note XMAX may be less than XMIN). * ymin = the", "shadow class to ObitPlot class from __future__ import absolute_import from __future__ import print_function", "== =============== 0 line only 1 dot 2 plus 3 \\* 4 open", "(horizontal) axis of plot. Options are single letters, and may be in any", "err.me) # end PText def PRelText (plot, side, disp, coord, fjust, text, err):", "like nxsub for the Y axis * err = ObitErr error stack \"\"\"", "top (X) or right (Y) edge of frame. G draw Grid of vertical", "of the character string from the specified edge of the viewport, measured outwards", "version. # # This program is distributed in the hope that it will", "object * xlabel = a label for the x-axis (centered below the viewport).", "* title = a label for the entire plot (centered above the viewport)", "the Greek equivalent to be used, e.g. #ga will be a lower case", "= ObitErr error stack Optional parameters on plot InfoList ====== ======== =============================================== XMAX", "PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST be a Python Obit Plot\") if not", "#g immediately prior to a Latin character will cause the Greek equivalent to", "be specified using either PSetPlot, one of the XY plotting routines (PXYPlot, PXYOver,", "Foundation; either version 2 of # the License, or (at your option) any", "default 'GRAY' PIX_MAX (float) maximum pixel value [def min in image] PIX_MIN (float)", "Python Obit plot\") # scale = 1.0 Obit.PlotDrawPoly(plot.me, len(x), x, y, fill, scale,", "def PSetPlot (plot, xmin, xmax, ymin, ymax, just, axis, err): \"\"\" Define plotting", "from __future__ import print_function import Obit, _Obit, InfoList, Image import math class OPlot(Obit.OPlot):", "Line width (default = 1) JUST (int) If !=0 then force X and", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # out =", "is horizontal line Y=0, Y axis is vertical line X=0). B draw bottom", "for plot * image = ObitImage to plot, BLC, TRC on info member", "independently. * axis = controls the plotting of axes, tick marks, etc: ==", "variable * err = ObitErr error stack \"\"\" ################################################################ # Checks if not", "disp, coord, fjust, text, err): \"\"\" Write text on plot relative to port", "= Plot x in world coordinates * y = Plot y in world", "variable * e = if nonNone, error in y * err = ObitErr", "vs Y using symbol. Plot should be finalized and displayed with PShow *", "PSetLineWidth (plot, lwidth, err): \"\"\" Set line width * plot = Python Plot", "* sub = if <=0 advance page, if >0 set current subpage to", "Dude\"+str(self.__class__) if name == \"me\" : return Obit.OPlot_Get_me(self.this) # Functions to return members", "if None use index * y = Dependent variable * e = if", "If * just = 0.0, the left-hand end of the string will be", "text = The text string to be plotted. Trailing spaces are ignored when", "of world y-coordinates of the vertices * fill = Fill pattern, plot package", "side, disp, coord, fjust, text, err.me) # end PRelText def PDrawLine (plot, x1,", "# Obit.PlotFinishPlot(plot.me, err.me) # end PShow def PSetPlot (plot, xmin, xmax, ymin, ymax,", "uses PLPlot for plotting the following can be used in text strings: -", "x and y axes (in world coordinates per inch) will be equal, otherwise", "page, if >0 set current subpage to sub numbering starts at the top", "err.me) # end PDrawPoly def PGetList (plot): \"\"\" Return the member InfoList returns", "MUST be a Python Obit plot\") # Obit.PlotRelText(plot.me, side, disp, coord, fjust, text,", "strings in PLPlot installations If the Obit installation uses PLPlot for plotting the", "* cntfac = factor for spacing between contours (def sqrt(2) * err =", "\"\"\" ################################################################ # Checks if not PIsA(plot): raise TypeError(\"plot MUST be a Python", "of the coordinates; 10 draw box and label X-axis logarithmically; 20 draw box", "# GNU General Public License for more details. # # You should have", "the viewport, drawn vertically) * title = a label for the entire plot", "(centered below the viewport). * ylabel = a label for the y-axis (centered", "index to use for plotting values in the range [1,12] are usable if", "x2 = world x-coordinate of the new pen position. * y2 = world", "class from __future__ import absolute_import from __future__ import print_function import Obit, _Obit, InfoList,", "nx=1, ny=1 ): \"\"\" Create and initialize an ObitPlot * name = name", "err): \"\"\" Draw a circle. * plot = Python Plot object * x", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # out", "leading spaces are significant. * err = ObitErr error stack \"\"\" ################################################################ #", "text on plot * plot = Python Plot object * x = Plot", "* x = Array of world x-coordinates of points * y = Array", "= Number of horizontal subpages * ny = Number of vertical subpages \"\"\"", "\"\"\" Set line width * plot = Python Plot object * lwidth =", "of the GNU General Public # License along with this program; if not,", "Independent variable, if None use index * y = Dependent variable * e", "PLPlot for plotting the following can be used in text strings: - Greek", "MAGENTA, SALMON, WHITE * nx = Number of horizontal subpages * ny =", "################################################################ # Checks if not isinstance(plot, OPlot): return False return Obit.OPlotIsA(plot.me)!=0 # end", "crosshatched 3 plplot:lines 45 deg downwards 4 plplot:lines 30 deg upwards 5 plplot:lines", "be added. When all has been added to the plot, use PShow to", "be placed at COORD; if JUST = 0.5, the center of the string", "image] ======= ======== ================================================= \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__)", "n, ra, dec, size, err.me) # end PMarkCross def PShow (plot, err): \"\"\"", "def __del__(self, DeleteOPlot=_Obit.DeleteOPlot): if _Obit!=None: DeleteOPlot(self.this) def __setattr__(self,name,value): if name == \"me\" :", "is for creating and using the interface to a plot Image Members with", "err): \"\"\" Draw a Symbol * plot = Python Plot object * x", "class Create a plot object using newOPlot which allows specifying the output and", "center * radius = World coordinate radius * err = ObitErr error stack", "square 11 filled circle 12 filled star == ================= * x = Independent", "Python Plot object * err = ObitErr error stack \"\"\" ################################################################ # Checks", "present and true ionvert colors COLOR (string) Color scheme 'GRAY', 'CONTOUR', 'PHLAME' default", "ObitErr error stack * size = size of cross in pixels Optional parameters", "xtick for the Y axis. NYSUB (int) like nxsub for the Y axis", "factor for spacing between contours (def sqrt(2) * err = ObitErr error stack", "honored * err = ObitErr error stack Optional parameters on plot InfoList: =======", "Scaling factor for characters(default = 1) SQRT (bool) If present and true plot", "vs Y Overplot X vs Y using symbol. Plot should be finalized and", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General", "Plot\") if not Image.PIsA(image): print(\"Actually \",image.__class__) raise TypeError(\"image MUST be a Python Obit", "1.0, the right-hand end of the string will be placed at at (x,y).", "or 'R' signifying the Bottom, Left, Top, or Right margin of the viewport.", "lwidth, err): \"\"\" Set line width * plot = Python Plot object *", "plplot:lines 30 deg downwards 6 plplot:horizontal/vertical lines crossed 7 plplot:horizontal lines 8 plplot:vertical", "\"\"\" Draw a curve. * plot = Python Plot object * x =", "11 filled circle 12 filled star == ================= * x = Independent variable,", "\"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST be", "MUST be a Python Obit Image\") Obit.PlotGrayScale (plot.me, label, image.me, err.me) # end", "output=\"None\", bgcolor=BLACK, nx=1, ny=1 ): \"\"\" Create and initialize an ObitPlot * name", "or 'RV', the string is written perpendicular to the frame rather than parallel", "MUST be a Python Obit plot\") # Obit.PlotDrawCircle (plot.me, x, y, radius, err.me)", "or nxsub=0, the number is chosen. * yopt = string of options for", "= like nxsub for the Y axis * err = ObitErr error stack", "below the viewport (X) or to the left of the viewport (Y). M", "PDrawSymbol (plot, x, y, symbol, err): \"\"\" Draw a Symbol * plot =", "than YMIN) * just = if JUST=1, the scales of the x and", "cause the Greek equivalent to be used, e.g. #ga will be a lower", "= value def __getattr__(self,name): if not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) if name", "label axis Logarithmically N write Numeric labels in the conventional location below the", "in the conventional location below the viewport (X) or to the left of", "== =============== 0 no fill 1 hatched 2 crosshatched 3 plplot:lines 45 deg", "scaling to be the same ====== ======== =============================================== \"\"\" ################################################################ # Checks if", "(bool) If present and true plot sqrt (pixel_value) INVERT (bool) If present and", "be a Python Obit Image\") Obit.PlotContour (plot.me, label, image.me, lev, cntfac, err.me) #", "raise TypeError(\"plot MUST be a Python Obit Plot\") n = len(y) # How", "ytick = like xtick for the Y axis. * nysub = like nxsub", "end PDrawLine def PDrawCurve (plot, x, y, err): \"\"\" Draw a curve. *", "PContour def PGrayScale (plot, label, image, err): \"\"\" Gray Scale plot of image", "and/or # modify it under the terms of the GNU General Public License", "at the bottom left corner of the viewport. * xmax = the world", "program; if not, write to the Free # Software Foundation, Inc., 675 Massachusetts", "additional lines, curves, text or symbols may be added. When all has been", "a Python Obit Plot\") n = len(y) # How many points? Obit.PlotXYPlot (plot.me,", "__setattr__(self,name,value): if name == \"me\" : # Out with the old if self.this!=None:", "be a Python Obit Plot\") n = len(y) # How many points? Obit.PlotXYOver", "== =============== * err = ObitErr error stack \"\"\" ################################################################ # Checks if", "[def 0] YTICK (float) like xtick for the Y axis. NYSUB (int) like", "Obit.CreateOPlot(self.this, name) def __del__(self, DeleteOPlot=_Obit.DeleteOPlot): if _Obit!=None: DeleteOPlot(self.this) def __setattr__(self,name,value): if name ==", "at the major coordinate interval. S draw minor tick marks (Subticks). = ======================================================================", "Software Foundation, Inc., 675 Massachusetts Ave, Cambridge, # MA 02139, USA. # #", "\"\"\" def __init__(self, name): super(OPlot, self).__init__() Obit.CreateOPlot(self.this, name) def __del__(self, DeleteOPlot=_Obit.DeleteOPlot): if _Obit!=None:", "newOPlot def PXYPlot (plot, symbol, x, y, err): \"\"\" Simple XY Plot Plot", "at the top right corner of the viewport (note XMAX may be less", "specified edge of the viewport. If FJUST = 0.0, the left-hand end of", "be placed at COORD; if JUST = 1.0, the right-hand end of the", "for the Y axis. NYSUB (int) like nxsub for the Y axis CSIZE", "Copyright (C) 2006,2016,2019 # Associated Universities, Inc. Washington DC, USA. # # This", "\"psc\" PostScript File (color) \"xfig\" Fig file \"png\" PNG file \"jpeg\" JPEG file", "should be finalized and displayed with PShow * plot = plot * image", "#u will be written as subscripts - Superscripts: Characters between a #u and", "plot\") # scale = 1.0 Obit.PlotDrawPoly(plot.me, len(x), x, y, fill, scale, err.me) #", "fjust, text, err.me) # end PRelText def PDrawLine (plot, x1, y1, x2, y2,", "text strings in PLPlot installations If the Obit installation uses PLPlot for plotting", "none) XOPT (string) Options for horizontal axis (default \"BCNTS\") See PDrawAxes for details.", "= World x-coordinate of center * y = World y-coordinate of center *", "give intermediate placing, but they are not very useful. * text = The", "of vertices * x = array of world x-coordinates of the vertices *", "= dashed, 3=dot dash, 4 = dotted, 5 = dash dot dot dot", "err): \"\"\" Define plotting area * plot = Python Plot object * xmin", "is written perpendicular to the frame rather than parallel to it. * disp", "plot * symbol = Symbol index to use for plotting. Values in the", "be in any order: = ====================================================================== A draw Axis (X axis is horizontal", "# n = len(x) Obit.PlotDrawCurve (plot.me, n, x, y, err.me) # end PDrawCurve", "= 1) LWIDTH (int) Line width (default = 1) JUST (int) If !=0", "MUST be a Python Obit Plot\") n = len(y) # How many points?", "\"gcw\" Gnome Canvas Widget (interacts with ObitTalk) \"ps\" PostScript File (monochrome) \"psc\" PostScript", "PXYPlot def PXYOver (plot, symbol, x, y, err): \"\"\" Overplot X vs Y", "\"\"\" Tells if the input is a Python ObitPlot returns true Or false", "xtick, nxsub, yopt, ytick, nysub, err.me) # end DrawAxes def PSetCharSize (plot,cscale, err):", "PetLineWidth def PSetLineStyle (plot, lstyle, err): \"\"\" Set line style * plot =", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # scale", "Widget (interacts with ObitTalk) \"ps\" PostScript File (monochrome) \"psc\" PostScript File (color) \"xfig\"", "axis (default \"BCNTS\") See PDrawAxes for details. YLABEL (string) Label for vertical axis", "filled star == ================= * x = Independent variable, if None use index", "plot (defaults to none), max 120 XLABEL (string) Label for horizontal axis (defaults", "* err = ObitErr error stack Optional parameters on plot InfoList: ====== ========", "def PSetPage (plot, sub, err): \"\"\" Set or advance sub page Note: some", "math.cos(angle/57.296) dy = math.sin(angle/57.296) Obit.PlotText(plot.me, x, y, dx, dy, just, text, err.me) #", "pen position. * y1 = world y-coordinate of the new pen position. *", "draw major Tick marks at the major coordinate interval. S draw minor tick", "= 15 def newOPlot(name, err, output=\"None\", bgcolor=BLACK, nx=1, ny=1 ): \"\"\" Create and", "as follows: # Internet email: <EMAIL>. # Postal address: <NAME> # National Radio", "and label it with coordinates; 1 same as axis=0, but also draw the", "* plot = Python Plot object * cscale = new character size (integer", "02139, USA. # # Correspondence concerning this software should be addressed as follows:", "are ignored when justifying the string, but leading spaces are significant. * err", "symbolic names: BLACK, RED(default), YELLOW, GREEN, AQUAMARINE, PINK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET,", "Y axis * err = ObitErr error stack \"\"\" ################################################################ # Checks if", "#u and #d will be written as superscripts \"\"\" # $Id$ #----------------------------------------------------------------------- #", "PDrawAxes for details. XTICK (float) world coordinate interval between major tick marks on", "PShow * plot = plot * image = ObitImage to plot * ra", "If !=0 then force X and Y axis scaling to be the same", "and displayed with PShow * plot = plot * label = Label for", "ignored when justifying the string, but leading spaces are significant. * err =", "plot (centered above the viewport) * err = ObitErr error stack \"\"\" ################################################################", "out # end newOPlot def PXYPlot (plot, symbol, x, y, err): \"\"\" Simple", "pixel value [def min in image] PIX_MIN (float) minimum pixel value [def max", "error stack * size = size of cross in pixels Optional parameters on", "(x,y); if JUST = 0.5, the center of the string will be placed", "= continious, 2 = dashed, 3=dot dash, 4 = dotted, 5 = dash", "order: = ====================================================================== A draw Axis (X axis is horizontal line Y=0, Y", "Array of world y-coordinates of points * err = ObitErr error stack \"\"\"", "draw box and label Y-axis logarithmically; 30 draw box and label both axes", "DeleteOPlot=_Obit.DeleteOPlot): if _Obit!=None: DeleteOPlot(self.this) def __setattr__(self,name,value): if name == \"me\" : # Out", "version 2 of # the License, or (at your option) any later version.", "subscripts - Superscripts: Characters between a #u and #d will be written as", "ylabel = a label for the y-axis (centered to the left of the", "character string along the specified edge of the viewport, as a fraction of", "e, err): \"\"\" Simple XY Plot with error bars Plot X vs Y", "side = Must include one of the characters 'B', 'L', 'T', or 'R'", "the center of the symbol * symbol = Symbol index to use for", "multiple of the default size). * err = ObitErr error stack \"\"\" ################################################################", "rows and columns * err = ObitErr error stack \"\"\" ################################################################ # Checks", "plot\") # Obit.PlotSetColor(plot.me, color, err.me) # end PSetColor def PSetPage (plot, sub, err):", "rather than parallel to it. * disp = The displacement of the character", "Python Plot object * xmin = the world x-coordinate at the bottom left", "xmax, ymin, ymax, just, axis, err.me) # end PSetPlot def PLabel (plot, xlabel,", "address: <NAME> # National Radio Astronomy Observatory # 520 Edgemont Road # Charlottesville,", "\"Bogus Dude\"+str(self.__class__) if name == \"me\" : return Obit.OPlot_Get_me(self.this) # Functions to return", "a #d and #u will be written as subscripts - Superscripts: Characters between", "Number of vertical subpages \"\"\" ################################################################ out = OPlot(name) Obit.PlotInitPlot(out.me, output, bgcolor, nx,", "the page * plot = Python Plot object * sub = if <=0", "allows specifying the output and background color. If no output is specified this", "the specified edge of the viewport, as a fraction of the length of", "string will be placed at COORD; if JUST = 0.5, the center of", "y = Dependent variable * e = if nonNone, error in y *", "675 Massachusetts Ave, Cambridge, # MA 02139, USA. # # Correspondence concerning this", "# Foreground Colors unBLACK = 0 RED = 1 YELLOW = 2 GREEN", "axis CSIZE (int) Scaling factor for characters(default = 1) LWIDTH (int) Line width", "BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE * err = ObitErr error stack", "Invert the tick marks; ie draw them outside the viewport instead of inside.", "image Gray Scales plot of image Plot should be finalized and displayed with", "string of options for Y (vertical) axis of plot. Coding is the same", "world y-coordinates of the vertices * fill = Fill pattern, plot package dependent", "or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License", "dy, just, text, err.me) # end PText def PRelText (plot, side, disp, coord,", "viewport in units of the character height. Use a negative value to write", "same as for xopt. * ytick = like xtick for the Y axis.", "TypeError(\"plot MUST be a Python Obit plot\") # out = InfoList.InfoList() out.me =", "case alpha. - Subscripts: Characters between a #d and #u will be written", "axis. NYSUB (int) like nxsub for the Y axis CSIZE (int) Scaling factor", "are significant. * err = ObitErr error stack \"\"\" ################################################################ # Checks if", "string parallel to the specified edge of the viewport. If FJUST = 0.0,", "\"xwin\" X-Window (Xlib) \"gcw\" Gnome Canvas Widget (interacts with ObitTalk) \"ps\" PostScript File", "top right corner of the viewport (note XMAX may be less than XMIN).", "= 3 AQUAMARINE = 4 BLACK = 5 WHEAT = 6 GRAY =", "None use index * y = Dependent variable * e = if nonNone,", "(X) or to the left of the viewport (Y). M write numeric labels", "lstyle = Style of line (integer multiple of the default size). 1 =", "single letters, and may be in any order: = ====================================================================== A draw Axis", "PShow def PSetPlot (plot, xmin, xmax, ymin, ymax, just, axis, err): \"\"\" Define", "axis of plot. Options are single letters, and may be in any order:", "def PDrawCircle (plot, x, y,radius, err): \"\"\" Draw a circle. * plot =", "using symbol. Plot should be finalized and displayed with PShow This routine draws", "* plot = plot * image = ObitImage to plot * ra =", "# Obit.PlotDrawLine(plot.me, x1, y1, x2, y2, err.me) # end PDrawLine def PDrawCurve (plot,", "program is distributed in the hope that it will be useful, # but", "PDrawPoly (plot, x, y, fill, err): \"\"\" Draw a Polygon, possibly filled *", "viewport. * ymax = the world y-coordinate at the top right corner of", "of center * y = World y-coordinate of center * radius = World", "distributed in the hope that it will be useful, # but WITHOUT ANY", "y, dx, dy, just, text, err.me) # end PText def PRelText (plot, side,", "plot = Python Plot object * xopt = string of options for X", "major Tick marks at the major coordinate interval. S draw minor tick marks", "= World coordinate interval between major tick marks on X axis. If xtick=0.0,", "= 8 BLUE = 9 BLUEVIOLET = 10 CYAN = 11 TURQUOISE =", "y1 = world y-coordinate of the new pen position. * x2 = world", "factor for characters(default = 1) SSIZE (int) Scaling factor for symbols(default = 1)", "(Y). P extend (\"Project\") major tick marks outside the box (ignored if option", "ObitErr error stack Optional parameters on plot InfoList: ======= ======== ================================================= XTICK (float)", "is the same as for xopt. * ytick = like xtick for the", "X vs Y using symbol and error bars. Plot should be finalized and", "Y axis scaling to be the same ====== ======== =============================================== \"\"\" ################################################################ #", "plot. Coding is the same as for xopt. * ytick = like xtick", "any later version. # # This program is distributed in the hope that", "is a Python ObitPlot returns true Or false * Plot = Python Obit", "which allows specifying the output and background color. If no output is specified", "* angle = Orientation of the text in deg, 0=horizontal * just =", "object * xmin = the world x-coordinate at the bottom left corner of", "Obit plot\") # scale = 1.0 Obit.PlotDrawPoly(plot.me, len(x), x, y, fill, scale, err.me)", "of the string parallel to the specified edge of the viewport. If FJUST", "viewport. If it includes 'LV' or 'RV', the string is written perpendicular to", "\"\"\" # $Id$ #----------------------------------------------------------------------- # Copyright (C) 2006,2016,2019 # Associated Universities, Inc. Washington", "of the viewport, drawn vertically) * title = a label for the entire", "desired for object (labeling purposes) * err = Python Obit Error/message stack *", "\"me\" : return Obit.OPlot_Get_me(self.this) # Functions to return members if name==\"List\": return PGetList(self)", "def PXYOver (plot, symbol, x, y, err): \"\"\" Overplot X vs Y Overplot", "SALMON, WHITE * err = ObitErr error stack \"\"\" ################################################################ # Checks if", "scale = 1.0 Obit.PlotDrawPoly(plot.me, len(x), x, y, fill, scale, err.me) # end PDrawPoly", "OPlot(name) Obit.PlotInitPlot(out.me, output, bgcolor, nx, ny, err.me) return out # end newOPlot def", "if JUST=1, the scales of the x and y axes (in world coordinates", "Obit interface to display server This class is for creating and using the", "size=5.0): \"\"\" Mark positions on Contour plot of image Place cross at positions.", "color. If no output is specified this information will be prompted. Next, the", "# end PXYErr def PContour (plot, label, image, lev, cntfac, err): \"\"\" Contour", "PXYOver, or PXYErr) PGrayScale, or PContour. Then additional lines, curves, text or symbols", "coordinates per inch) will be equal, otherwise they will be scaled independently. *", "Python Obit Plot\") n = len(y) # How many points? Obit.PlotXYPlot (plot.me, symbol,", "on X axis. If xtick=0.0, the interval is chosen. * nxsub = The", "= 1.0, the right-hand end of the string will be placed at at", "along the specified edge of the viewport, as a fraction of the length", "This program is free software; you can redistribute it and/or # modify it", "# dx = math.cos(angle/57.296) dy = math.sin(angle/57.296) Obit.PlotText(plot.me, x, y, dx, dy, just,", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # scale =", "0 draw box and label it with coordinates; 1 same as axis=0, but", "symbol = Symbol index to use for plotting. Values in the range [1,12]", "instead of inside. L label axis Logarithmically N write Numeric labels in the", "y1, x2, y2, err.me) # end PDrawLine def PDrawCurve (plot, x, y, err):", "axes (in world coordinates per inch) will be equal, otherwise they will be", "JUST = 1.0, the right-hand end of the string will be placed at", "the GNU General Public License as # published by the Free Software Foundation;", "You should have received a copy of the GNU General Public # License", "following can be used in text strings: - Greek letters, A #g immediately", "TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetCharSize (plot.me, cscale, err.me) #", "of the new pen position. * x2 = world x-coordinate of the new", "to be plotted. Trailing spaces are ignored when justifying the string, but leading", "for the x-axis (centered below the viewport). * ylabel = a label for", "be finalized and displayed with PShow * plot = plot * symbol =", "the interval is chosen. NXSUB (long) the number of subintervals to divide the", "- Subscripts: Characters between a #d and #u will be written as subscripts", "Contour plot of image Contours at lev times powers of cntfac Plot should", "Obit.PlotSetColor(plot.me, color, err.me) # end PSetColor def PSetPage (plot, sub, err): \"\"\" Set", "justifying the string, but leading spaces are significant. * err = ObitErr error", "Python ObitPlot returns true Or false * Plot = Python Obit Plot to", "placed at at (x,y). Other values between 0 and 1 give intermediate placing,", "= 1) JUST (int) If !=0 then force X and Y axis scaling", "but also draw grid lines at major increments of the coordinates; 10 draw", "viewport) * err = ObitErr error stack \"\"\" ################################################################ # Checks if not", "__del__(self, DeleteOPlot=_Obit.DeleteOPlot): if _Obit!=None: DeleteOPlot(self.this) def __setattr__(self,name,value): if name == \"me\" : #", "or right (Y) edge of frame. G draw Grid of vertical (X) or", "x-coordinate of center * y = World y-coordinate of center * radius =", "Python shadow class to ObitPlot class from __future__ import absolute_import from __future__ import", "= ====================================================================== * xtick = World coordinate interval between major tick marks on", "= Dependent variable * err = ObitErr error stack Optional parameters on plot", "MUST be a Python Obit plot\") # Obit.PlotSetPage(plot.me, sub, err.me) # end PSetPage", "= Array of world y-coordinates of points * err = ObitErr error stack", "edge of the viewport. If FJUST = 0.0, the left-hand end of the", "BLACK, RED(default), YELLOW, GREEN, AQUAMARINE, PINK, WHEAT, GRAY, BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE,", "of # the License, or (at your option) any later version. # #", "with PShow This routine draws the frame and adds labels, to only overplot", "err): \"\"\" Write text on plot relative to port * plot = Python", "in deg, 0=horizontal * just = Controls justification of the string parallel to", "in world coordinates * angle = Orientation of the text in deg, 0=horizontal", "be a Python Obit Image\") Obit.PlotGrayScale (plot.me, label, image.me, err.me) # end PGrayScale", "filled star == =============== * x = Independent variable, if None use index", "the string will be placed at at COORD. Other values between 0 and", "LWIDTH (int) Line width (default = 1) JUST (int) If !=0 then force", "isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) if name == \"me\" : return Obit.OPlot_Get_me(self.this) #", "Obit plot\") # Obit.PlotSetLineStyle(plot.me, lstyle, err.me) # end PetLineStyle def PSetColor (plot, color,", "* plot = Python Plot object * xlabel = a label for the", "Obit plot\") # n = len(x) Obit.PlotDrawCurve (plot.me, n, x, y, err.me) #", "WHITE * nx = Number of horizontal subpages * ny = Number of", "P extend (\"Project\") major tick marks outside the box (ignored if option I", "value to write outside. * coord = The location of the character string", "of the string will be placed at COORD; if JUST = 0.5, the", "none), max 120 XLABEL (string) Label for horizontal axis (defaults to none) XOPT", "write inside the viewport, a positive value to write outside. * coord =", "many points? Obit.PlotXYOver (plot.me, symbol, n, x, y, err.me) # end PXYOver def", "Python Obit plot\") # Obit.PlotSetLineWidth(plot.me, lwidth, err.me) # end PetLineWidth def PSetLineStyle (plot,", "to actual value) XMIN (float) minimum X value (defaults to actual value) YMAX", "should be finalized and displayed with PShow * plot = plot * symbol", "not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotDrawSymbol(plot.me, x,", "(plot, err): \"\"\" Display plot * plot = Python Plot object * err", "TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetColor(plot.me, color, err.me) # end", "parallel to the specified edge of the viewport. If FJUST = 0.0, the", "Other values between 0 and 1 give intermediate placing, but they are not", "World x-coordinate of center * y = World y-coordinate of center * radius", "PShow (plot, err): \"\"\" Display plot * plot = Python Plot object *", "= Must include one of the characters 'B', 'L', 'T', or 'R' signifying", "PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetPlot(plot.me, xmin, xmax,", "Obit.PlotGetList(plot.me) return out # end PGetList def PIsA (plot): \"\"\" Tells if the", "======== =============================================== XMAX (float) maximum X value (defaults to actual value) XMIN (float)", "Y-axis logarithmically; 30 draw box and label both axes logarithmically. == =========================================== *", "end PContour def PGrayScale (plot, label, image, err): \"\"\" Gray Scale plot of", "is chosen. NXSUB (long) the number of subintervals to divide the major coordinate", "x-coordinate at the top right corner of the viewport (note XMAX may be", "factor for characters(default = 1) LWIDTH (int) Line width (default = 1) ======", "to port * plot = Python Plot object * side = Must include", "Characters between a #u and #d will be written as superscripts \"\"\" #", "AQUAMARINE = 4 BLACK = 5 WHEAT = 6 GRAY = 7 BROWN", "(X axis is horizontal line Y=0, Y axis is vertical line X=0). B", "outside the box (ignored if option I is specified) T draw major Tick", "out.me = Obit.PlotGetList(plot.me) return out # end PGetList def PIsA (plot): \"\"\" Tells", "xtick = World coordinate interval between major tick marks on X axis. If", "to the specified edge of the viewport. If FJUST = 0.0, the left-hand", "Fig file \"png\" PNG file \"jpeg\" JPEG file \"gif\" GIF file \"null\" Null", "or horizontal (Y) lines I Invert the tick marks; ie draw them outside", "= World coordinate radius * err = ObitErr error stack \"\"\" ################################################################ #", "TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotLabel(plot.me, xlabel, ylabel, title, err.me)", "of the string will be placed at (x,y); if JUST = 0.5, the", "viewport (note YMAX may be less than YMIN) * just = if JUST=1,", "Y using symbol. Plot should be finalized and displayed with PShow * plot", "string is written perpendicular to the frame rather than parallel to it. *", "def PIsA (plot): \"\"\" Tells if the input is a Python ObitPlot returns", "= Python Plot object * side = Must include one of the characters", "filled circle 12 filled star == =============== * x = Independent variable, if", "XMAX (float) maximum X value (defaults to actual value) XMIN (float) minimum X", "of RAs (deg) * dec = list of Declinations (deg) * err =", "* color = color index (1-15), symbolic names: BLACK (notreally), RED(default), YELLOW, GREEN,", "xopt = string of options for X (horizontal) axis of plot. Options are", "but leading spaces are significant. * err = ObitErr error stack \"\"\" ################################################################", "negative value to write inside the viewport, a positive value to write outside.", "end PDrawSymbol def PDrawPoly (plot, x, y, fill, err): \"\"\" Draw a Polygon,", "n, x, y, e, err.me) # end PXYErr def PContour (plot, label, image,", "plot\") # dx = math.cos(angle/57.296) dy = math.sin(angle/57.296) Obit.PlotText(plot.me, x, y, dx, dy,", "with ObitTalk) \"ps\" PostScript File (monochrome) \"psc\" PostScript File (color) \"xfig\" Fig file", "of the viewport. If * just = 0.0, the left-hand end of the", "it under the terms of the GNU General Public License as # published", "1) ====== ===== ============================================ \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__)", "0.0, the left-hand end of the string will be placed at (x,y); if", "= The location of the character string along the specified edge of the", "error stack Optional parameters on plot InfoList: ======= ======== ================================================= XTICK (float) world", "= math.sin(angle/57.296) Obit.PlotText(plot.me, x, y, dx, dy, just, text, err.me) # end PText", "[0,8] are usable == =============== 0 no fill 1 hatched 2 crosshatched 3", "subpages \"\"\" ################################################################ out = OPlot(name) Obit.PlotInitPlot(out.me, output, bgcolor, nx, ny, err.me) return", "dotted, 5 = dash dot dot dot * err = ObitErr error stack", "of options for Y (vertical) axis of plot. Coding is the same as", "x = Plot x in world coordinates * y = Plot y in", "parallel to it. * disp = The displacement of the character string from", "= 1) SQRT (bool) If present and true plot sqrt (pixel_value) INVERT (bool)", "InfoList: ====== ======== ================================================== XMAX (float) maximum X value (defaults to actual value)", "lev, cntfac, err.me) # end PContour def PGrayScale (plot, label, image, err): \"\"\"", "characters(default = 1) SQRT (bool) If present and true plot sqrt (pixel_value) INVERT", "8 open star 9 filled triangle 10 filled square 11 filled circle 12", "fill, scale, err.me) # end PDrawPoly def PGetList (plot): \"\"\" Return the member", "if self.this!=None: Obit.OPlotUnref(Obit.OPlot_Get_me(self.this)) # In with the new Obit.OPlot_Set_me(self.this,value) return self.__dict__[name] = value", "= 1) LWIDTH (int) Line width (default = 1) ====== ===== ============================================ \"\"\"", "also draw the coordinate axes (X=0, Y=0); 2 same as axis=1, but also", "character height. Use a negative value to write inside the viewport, a positive", "if not PIsA(plot): raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetLineStyle(plot.me,", "raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotFinishPlot(plot.me, err.me) # end", "Obit.PlotMarkCross (plot.me, image.me, n, ra, dec, size, err.me) # end PMarkCross def PShow", "draw Axis (X axis is horizontal line Y=0, Y axis is vertical line", "<NAME> # National Radio Astronomy Observatory # 520 Edgemont Road # Charlottesville, VA", "\"jpeg\" JPEG file \"gif\" GIF file \"null\" Null device ====== ========================== * bgcolor", "\"\"\" Set foreground color * plot = Python Plot object * color =", "\"\"\" Gray Scale plot of image Gray Scales plot of image Plot should", "PShow to finalize it. Notes: on text strings in PLPlot installations If the", "and true plot sqrt (pixel_value) INVERT (bool) If present and true ionvert colors", "they will be scaled independently. * axis = controls the plotting of axes,", "Scales plot of image Plot should be finalized and displayed with PShow *", "tick marks (Subticks). = ====================================================================== * xtick = World coordinate interval between major", "the input is a Python ObitPlot returns true Or false * Plot =", "# scale = 1.0 Obit.PlotDrawPoly(plot.me, len(x), x, y, fill, scale, err.me) # end", "* size = size of cross in pixels Optional parameters on plot InfoList", "overplot data on the same frame, use ObitPlotXYOver * plot = plot *", "the plotting of axes, tick marks, etc: == =========================================== -2 draw no box,", "axis. If xtick=0.0 [def], the interval is chosen. NXSUB (long) the number of", ": return Obit.OPlot_Get_me(self.this) # Functions to return members if name==\"List\": return PGetList(self) raise", "of the viewport (note YMAX may be less than YMIN) * just =", "line Y=0, Y axis is vertical line X=0). B draw bottom (X) or", "added. When all has been added to the plot, use PShow to finalize", "members if name==\"List\": return PGetList(self) raise AttributeError(name) def __repr__(self): if not isinstance(self, OPlot):", "not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST be a Python Obit Plot\") n", "edge of the viewport, as a fraction of the length of the edge.", "# This program is free software; you can redistribute it and/or # modify", "parallel to the specified edge of the viewport. If * just = 0.0,", "the new pen position. * y1 = world y-coordinate of the new pen", "symbolic names: BLACK (notreally), RED(default), YELLOW, GREEN, AQUAMARINE, BLACK, WHEAT, GRAY, BROWN, BLUE,", "used to pass instructions to processing Member List ======== ======================================= \"\"\" def __init__(self,", "for Y (vertical) axis of plot. Coding is the same as for xopt.", "Draw a Polygon, possibly filled * plot = Python Plot object * n", "it includes 'LV' or 'RV', the string is written perpendicular to the frame", "(monochrome) \"psc\" PostScript File (color) \"xfig\" Fig file \"png\" PNG file \"jpeg\" JPEG", "BROWN, BLUE, BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE * nx = Number of", "end PXYPlot def PXYOver (plot, symbol, x, y, err): \"\"\" Overplot X vs", "0.5, the center of the string will be placed at COORD; if JUST", "value) TITLE (string) Label for the plot (defaults to none), max 120 XLABEL", "L label axis Logarithmically N write Numeric labels in the conventional location below", "axis (defaults to none) YOPT (string) Options for vertical axis (default \"BCNTS\") See", "License along with this program; if not, write to the Free # Software", "index (1-15), symbolic names: BLACK (notreally), RED(default), YELLOW, GREEN, AQUAMARINE, BLACK, WHEAT, GRAY,", "= Python Plot object * xmin = the world x-coordinate at the bottom", "Obit.PlotDrawCircle (plot.me, x, y, radius, err.me) # end PDrawCircle def PDrawSymbol (plot, x,", "points? Obit.PlotXYOver (plot.me, symbol, n, x, y, err.me) # end PXYOver def PXYErr", "# This program is distributed in the hope that it will be useful,", "the string will be placed at (x,y); if JUST = 0.5, the center", "negative, use abs value and connect points == ================= 0 line only 1", "between a #u and #d will be written as superscripts \"\"\" # $Id$", "chosen. NXSUB (long) the number of subintervals to divide the major coordinate interval", "either version 2 of # the License, or (at your option) any later", "the Y axis CSIZE (int) Scaling factor for characters(default = 1) SSIZE (int)", "\"\"\" Write text on plot * plot = Python Plot object * x", "symbol, n, x, y, err.me) # end PXYOver def PXYErr (plot, symbol, x,", "draw box and label both axes logarithmically. == =========================================== * err = ObitErr", "TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotRelText(plot.me, side, disp, coord, fjust,", "\"xfig\" Fig file \"png\" PNG file \"jpeg\" JPEG file \"gif\" GIF file \"null\"", "Y value (defaults to actual value) YMIN (float) minimum Y value (defaults to", "ObitImage to plot * ra = list of RAs (deg) * dec =", "Number of horizontal subpages * ny = Number of vertical subpages \"\"\" ################################################################", "be placed at (x,y); if JUST = 0.5, the center of the string", "a lower case alpha. - Subscripts: Characters between a #d and #u will", "Obit plot\") # Obit.PlotLabel(plot.me, xlabel, ylabel, title, err.me) # end PLabel def PDrawAxes(plot,", "signifying the Bottom, Left, Top, or Right margin of the viewport. If it", "11 TURQUOISE = 12 MAGENTA = 13 SALMON = 14 WHITE = 15", "====== ===== ============================================ \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise", "scales of the x and y axes (in world coordinates per inch) will", "'R' signifying the Bottom, Left, Top, or Right margin of the viewport. If", "for the Y axis CSIZE (int) Scaling factor for characters(default = 1) LWIDTH", "specified using either PSetPlot, one of the XY plotting routines (PXYPlot, PXYOver, or", "margin of the viewport. If it includes 'LV' or 'RV', the string is", "= 5 WHEAT = 6 GRAY = 7 BROWN = 8 BLUE =", "\"\"\" Contour plot of image Contours at lev times powers of cntfac Plot", "4 BLACK = 5 WHEAT = 6 GRAY = 7 BROWN = 8", "advance the page * plot = Python Plot object * sub = if", "PRelText def PDrawLine (plot, x1, y1, x2, y2, err): \"\"\" Draw a line.", "above the viewport) * err = ObitErr error stack \"\"\" ################################################################ # Checks", "def PGrayScale (plot, label, image, err): \"\"\" Gray Scale plot of image Gray", "functions such as PContour advance the page * plot = Python Plot object", "image = ObitImage to plot * ra = list of RAs (deg) *", "a Python Obit Plot\") n = len(y) # How many points? Obit.PlotXYOver (plot.me,", "PDrawCurve (plot, x, y, err): \"\"\" Draw a curve. * plot = Python", "abs value and connect points == ================= 0 line only 1 dot 2", "the specified edge of the viewport. If FJUST = 0.0, the left-hand end", "text, err): \"\"\" Write text on plot relative to port * plot =", "ObitPlotXYOver * plot = plot * symbol = Symbol index to use for", "* err = ObitErr error stack Optional parameters on plot InfoList ====== ========", "major tick marks on X axis. If xtick=0.0 [def], the interval is chosen.", "return PGetList(self) raise AttributeError(name) def __repr__(self): if not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__)", "subpages * ny = Number of vertical subpages \"\"\" ################################################################ out = OPlot(name)", "will cause the Greek equivalent to be used, e.g. #ga will be a", "end PMarkCross def PShow (plot, err): \"\"\" Display plot * plot = Python", "(plot, x, y, angle, just, text, err): \"\"\" Write text on plot *", "\"\"\" Draw axes for a plot, label * plot = Python Plot object", "plot = Python Plot object * lwidth = Width of line (integer multiple", "PXYErr (plot, symbol, x, y, e, err): \"\"\" Simple XY Plot with error", "axis, err.me) # end PSetPlot def PLabel (plot, xlabel, ylabel, title, err): \"\"\"", "file \"null\" Null device ====== ========================== * bgcolor = background color index (1-15),", "Plot object * sub = if <=0 advance page, if >0 set current", "position. * y1 = world y-coordinate of the new pen position. * x2", "'GRAY', 'CONTOUR', 'PHLAME' default 'GRAY' PIX_MAX (float) maximum pixel value [def min in", "Error/message stack * output = name and type of output device: ====== ==========================", "use PShow to finalize it. Notes: on text strings in PLPlot installations If", "logarithmically; 20 draw box and label Y-axis logarithmically; 30 draw box and label", "world x-coordinates of points * y = Array of world y-coordinates of points", "y = Dependent variable * err = ObitErr error stack Optional parameters on", "cscale, err.me) # end PSetCharSize def PSetLineWidth (plot, lwidth, err): \"\"\" Set line", "labels, to only overplot data on the same frame, use ObitPlotXYOver * plot", "(note XMAX may be less than XMIN). * ymin = the world y-coordinate", "viewport. If * just = 0.0, the left-hand end of the string will", "= background color index (1-15), symbolic names: BLACK, RED(default), YELLOW, GREEN, AQUAMARINE, PINK,", "Style of line (integer multiple of the default size). 1 = continious, 2", "be a Python Obit plot\") # out = InfoList.InfoList() out.me = Obit.PlotGetList(plot.me) return", "raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetPage(plot.me, sub, err.me) #", "the bottom left corner of the viewport. * xmax = the world x-coordinate", "axis. If xtick=0.0 [def], the interval is chosen. NXSUB (int) the number of", "displayed with PShow * plot = plot * label = Label for plot", "and 1 give intermediate placing, but they are not very useful. * text", "* plot = Python Plot object * x = world x-coordinate of the", "(PXYPlot, PXYOver, or PXYErr) PGrayScale, or PContour. Then additional lines, curves, text or", "a Python Obit plot\") # Obit.PlotDrawSymbol(plot.me, x, y, symbol, err.me) # end PDrawSymbol", "x-coordinate of the new pen position. * y1 = world y-coordinate of the", "= world x-coordinate of the new pen position. * y2 = world y-coordinate", "but they are not very useful. * text = The text string to", "Null device ====== ========================== * bgcolor = background color index (1-15), symbolic names:", "\",plot.__class__) raise TypeError(\"plot MUST be a Python Obit Plot\") n = len(y) #", "filled * plot = Python Plot object * n = number of vertices", "and adds labels, to only overplot data on the same frame, use ObitPlotXYOver", "=============== 0 no fill 1 hatched 2 crosshatched 3 plplot:lines 45 deg downwards", "# the License, or (at your option) any later version. # # This", "YMIN) * just = if JUST=1, the scales of the x and y", "the string is written perpendicular to the frame rather than parallel to it.", "NYSUB (int) like nxsub for the Y axis CSIZE (int) Scaling factor for", "at COORD; if JUST = 1.0, the right-hand end of the string will", "entire plot (centered above the viewport) * err = ObitErr error stack \"\"\"", "= string of options for X (horizontal) axis of plot. Options are single", "See the # GNU General Public License for more details. # # You", "is vertical line X=0). B draw bottom (X) or left (Y) edge of", "= Number of vertical subpages \"\"\" ################################################################ out = OPlot(name) Obit.PlotInitPlot(out.me, output, bgcolor,", "nx, ny, err.me) return out # end newOPlot def PXYPlot (plot, symbol, x,", "draw Grid of vertical (X) or horizontal (Y) lines I Invert the tick", "abs value and connect points. == =============== 0 line only 1 dot 2", "be a Python Obit plot\") # Obit.PlotSetLineStyle(plot.me, lstyle, err.me) # end PetLineStyle def", "#d and #u will be written as subscripts - Superscripts: Characters between a", "for symbols(default = 1) LWIDTH (int) Line width (default = 1) JUST (int)", "between major tick marks on X axis. If xtick=0.0, the interval is chosen.", "* lstyle = Style of line (integer multiple of the default size). 1", "on plot relative to port * plot = Python Plot object * side", "deg downwards 4 plplot:lines 30 deg upwards 5 plplot:lines 30 deg downwards 6", "if nonNone, error in y * err = ObitErr error stack Optional parameters", "end of the string will be placed at (x,y); if JUST = 0.5,", "your option) any later version. # # This program is distributed in the", "(Subticks). = ====================================================================== * xtick = World coordinate interval between major tick marks", "(long) the number of subintervals to divide the major coordinate interval into. If", "err.me) # end PDrawSymbol def PDrawPoly (plot, x, y, fill, err): \"\"\" Draw", "return \"<C OPlot instance> \" + Obit.OPlotGetName(self.me) # Foreground Colors unBLACK = 0", "InfoList.InfoList() out.me = Obit.PlotGetList(plot.me) return out # end PGetList def PIsA (plot): \"\"\"", "for X (horizontal) axis of plot. Options are single letters, and may be", "placed at at COORD. Other values between 0 and 1 give intermediate placing,", "y, err): \"\"\" Simple XY Plot Plot X vs Y using symbol. Plot", "This class is for creating and using the interface to a plot Image", "[1,12] are usable. If negative, use abs value and connect points. == ===============", "= a label for the x-axis (centered below the viewport). * ylabel =", "err): \"\"\" Set line width * plot = Python Plot object * lwidth", "err.me) # end PMarkCross def PShow (plot, err): \"\"\" Display plot * plot", "cntfac Plot should be finalized and displayed with PShow * plot = plot", "pen position. * y2 = world y-coordinate of the new pen position. *", "plotting the following can be used in text strings: - Greek letters, A", "* disp = The displacement of the character string from the specified edge", "* plot = Python Obit Plot object \"\"\" ################################################################ # Checks if not", "Overplot X vs Y Overplot X vs Y using symbol. Plot should be", "= ObitErr error stack \"\"\" ################################################################ # Checks if not PIsA(plot): raise TypeError(\"plot", "'RV', the string is written perpendicular to the frame rather than parallel to", "AttributeError(name) def __repr__(self): if not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) return \"<C OPlot", "'GRAY' PIX_MAX (float) maximum pixel value [def min in image] PIX_MIN (float) minimum", "\",image.__class__) raise TypeError(\"image MUST be a Python Obit Image\") Obit.PlotGrayScale (plot.me, label, image.me,", "plplot:horizontal lines 8 plplot:vertical lines == =============== * err = ObitErr error stack", "(integer multiple of the default size). * err = ObitErr error stack \"\"\"", "12 filled star == =============== * err = ObitErr error stack \"\"\" ################################################################", "a Python Obit plot\") # Obit.PlotFinishPlot(plot.me, err.me) # end PShow def PSetPlot (plot,", "world x-coordinate of the center of the symbol * y = world y-coordinate", "Python Obit Plot\") if not Image.PIsA(image): print(\"Actually \",image.__class__) raise TypeError(\"image MUST be a", "\"\"\" Draw a Polygon, possibly filled * plot = Python Plot object *", "draw top (X) or right (Y) edge of frame. G draw Grid of", "YMIN (float) minimum Y value (defaults to actual value) TITLE (string) Label for", "and y axes (in world coordinates per inch) will be equal, otherwise they", "the vertices * y = array of world y-coordinates of the vertices *", "Next, the plotting region must be specified using either PSetPlot, one of the", "FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more", "1 YELLOW = 2 GREEN = 3 AQUAMARINE = 4 BLACK = 5", "left of the viewport (Y). M write numeric labels in the unconventional location", "n = len(y) # How many points? Obit.PlotXYPlot (plot.me, symbol, n, x, y,", "symbol, err): \"\"\" Draw a Symbol * plot = Python Plot object *", "In with the new Obit.OPlot_Set_me(self.this,value) return self.__dict__[name] = value def __getattr__(self,name): if not", "0.0, the left-hand end of the string will be placed at COORD; if", "= world y-coordinate of the new pen position. * x2 = world x-coordinate", "Python Obit Image\") Obit.PlotContour (plot.me, label, image.me, lev, cntfac, err.me) # end PContour", "a Symbol * plot = Python Plot object * x = world x-coordinate", "# MA 02139, USA. # # Correspondence concerning this software should be addressed", "If negative, use abs value and connect points. == =============== 0 line only", "plotting of axes, tick marks, etc: == =========================================== -2 draw no box, axes", "= the world y-coordinate at the top right corner of the viewport (note", "of the x and y axes (in world coordinates per inch) will be", "inside. L label axis Logarithmically N write Numeric labels in the conventional location", "* just = Controls justification of the string parallel to the specified edge", "y, err): \"\"\" Overplot X vs Y Overplot X vs Y using symbol.", "x1 = world x-coordinate of the new pen position. * y1 = world", "============================================ \"\"\" ################################################################ # Checks if not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST", "BLUEVIOLET, CYAN, TURQUOISE, MAGENTA, SALMON, WHITE * nx = Number of horizontal subpages", "grid lines at major increments of the coordinates; 10 draw box and label", "License, or (at your option) any later version. # # This program is", "title, err): \"\"\" Display plot * plot = Python Plot object * xlabel", "Python Obit Plot to test \"\"\" ################################################################ # Checks if not isinstance(plot, OPlot):", "the XY plotting routines (PXYPlot, PXYOver, or PXYErr) PGrayScale, or PContour. Then additional", "Obit plot\") # Obit.PlotDrawLine(plot.me, x1, y1, x2, y2, err.me) # end PDrawLine def", "be the same ====== ======== =============================================== \"\"\" ################################################################ # Checks if not PIsA(plot):", "\"png\" PNG file \"jpeg\" JPEG file \"gif\" GIF file \"null\" Null device ======", "def __repr__(self): if not isinstance(self, OPlot): return \"Bogus Dude\"+str(self.__class__) return \"<C OPlot instance>", "vertices * x = array of world x-coordinates of the vertices * y", "may be added. When all has been added to the plot, use PShow", "raise TypeError(\"plot MUST be a Python Obit plot\") # Obit.PlotSetColor(plot.me, color, err.me) #", "err.me) # end PSetColor def PSetPage (plot, sub, err): \"\"\" Set or advance", "If present and true ionvert colors COLOR (string) Color scheme 'GRAY', 'CONTOUR', 'PHLAME'", "[1,12] are usable if negative, use abs value and connect points == =================", "Foreground Colors unBLACK = 0 RED = 1 YELLOW = 2 GREEN =", "specified edge of the viewport. If * just = 0.0, the left-hand end", "LWIDTH (int) Line width (default = 1) ====== ===== ============================================ \"\"\" ################################################################ #", "Plot object * err = ObitErr error stack \"\"\" ################################################################ # Checks if", "(Y). M write numeric labels in the unconventional location above the viewport (X)", "Draw a Symbol * plot = Python Plot object * x = world", "xopt, xtick, nxsub, yopt, ytick, nysub, err.me) # end DrawAxes def PSetCharSize (plot,cscale,", "(defaults to actual value) XMIN (float) minimum X value (defaults to actual value)", "file \"gif\" GIF file \"null\" Null device ====== ========================== * bgcolor = background", "index * y = Dependent variable * e = if nonNone, error in", "star 9 filled triangle 10 filled square 11 filled circle 12 filled star", "err.me) # end PetLineStyle def PSetColor (plot, color, err): \"\"\" Set foreground color", "err.me) # end PSetPage def PText (plot, x, y, angle, just, text, err):", "Orientation of the text in deg, 0=horizontal * just = Controls justification of", "y * err = ObitErr error stack Optional parameters on plot InfoList: ======", "be placed at (x,y); if JUST = 1.0, the right-hand end of the", "= Label for plot * image = ObitImage to plot, BLC, TRC on", "not PIsA(plot): print(\"Actually \",plot.__class__) raise TypeError(\"plot MUST be a Python Obit Plot\") if", "new Obit.OPlot_Set_me(self.this,value) return self.__dict__[name] = value def __getattr__(self,name): if not isinstance(self, OPlot): return", "# Obit.PlotDrawCircle (plot.me, x, y, radius, err.me) # end PDrawCircle def PDrawSymbol (plot,", "Write text on plot * plot = Python Plot object * x =", "interface to display server This class is for creating and using the interface", "CSIZE (int) Scaling factor for characters(default = 1) SQRT (bool) If present and", "x-axis (centered below the viewport). * ylabel = a label for the y-axis", "# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY", "as PContour advance the page * plot = Python Plot object * sub", "* label = Label for plot * image = ObitImage to plot, BLC,", "and #d will be written as superscripts \"\"\" # $Id$ #----------------------------------------------------------------------- # Copyright", "PXYOver def PXYErr (plot, symbol, x, y, e, err): \"\"\" Simple XY Plot", "DeleteOPlot(self.this) def __setattr__(self,name,value): if name == \"me\" : # Out with the old", "ylabel, title, err.me) # end PLabel def PDrawAxes(plot, xopt, xtick, nxsub, yopt, ytick,", "routines (PXYPlot, PXYOver, or PXYErr) PGrayScale, or PContour. Then additional lines, curves, text", "present and true plot sqrt (pixel_value) INVERT (bool) If present and true ionvert", "an ObitPlot * name = name desired for object (labeling purposes) * err", "image] PIX_MIN (float) minimum pixel value [def max in image] ======= ======== =================================================", "(float) minimum Y value (defaults to actual value) TITLE (string) Label for the" ]
[ "palindrome(num): numstr = str(num) for i in range(num+1,sys.maxsize): if str(i)== str(i)[::-1]: return i", "here import sys def palindrome(num): numstr = str(num) for i in range(num+1,sys.maxsize): if", "Counter(str_1.lower()) print(list_str1) list_str2 = Counter(str_2.lower()) print(list_str2) if not list_str2 - list_str1: return True", "here def compress(word): string = word.lower() res = \"\" count = 1 res", "if not list_str2 - list_str1: return True else: return False a_scramble(\"<NAME>\",\"Voldemort\") # --------------", "word.lower() res = \"\" count = 1 res += string[0] for i in", "compress(\"abbs\") # -------------- #Code starts here #Code starts here from collections import Counter", "str(count) return res compress(\"abbs\") # -------------- #Code starts here #Code starts here from", "1): res += str(count) return res compress(\"abbs\") # -------------- #Code starts here #Code", "-------------- #Code starts here #Code starts here from collections import Counter def k_distinct(string,k):", "compress(word): string = word.lower() res = \"\" count = 1 res += string[0]", "str(i)[::-1]: return i palindrome(123) # -------------- #Code starts here from collections import Counter", "list_str2 - list_str1: return True else: return False a_scramble(\"<NAME>\",\"Voldemort\") # -------------- #Code starts", "a_scramble(\"<NAME>\",\"Voldemort\") # -------------- #Code starts here import math def isPerfectSquare(x): s = int(math.sqrt(x))", "count = 1 res += string[0] for i in range(len(string)-1): if(string[i]==string[i+1]): count+=1 else:", "count = 1 if(count >= 1): res += str(count) return res compress(\"abbs\") #", "- list_str1: return True else: return False a_scramble(\"<NAME>\",\"Voldemort\") # -------------- #Code starts here", "Counter(str_2.lower()) print(list_str2) if not list_str2 - list_str1: return True else: return False a_scramble(\"<NAME>\",\"Voldemort\")", "# -------------- #Code starts here def compress(word): string = word.lower() res = \"\"", "= str(num) for i in range(num+1,sys.maxsize): if str(i)== str(i)[::-1]: return i palindrome(123) #", "+= str(count) return res compress(\"abbs\") # -------------- #Code starts here #Code starts here", "here import math def isPerfectSquare(x): s = int(math.sqrt(x)) return s*s == x def", "1 res += string[0] for i in range(len(string)-1): if(string[i]==string[i+1]): count+=1 else: if(count >=", "for i in range(len(string)-1): if(string[i]==string[i+1]): count+=1 else: if(count >= 1): res += str(count)", "from collections import Counter def k_distinct(string,k): c = Counter(string.lower()) if k==len(c.keys()): return True", "string[0] for i in range(len(string)-1): if(string[i]==string[i+1]): count+=1 else: if(count >= 1): res +=", "4)or isPerfectSquare(5*num*num - 4) check_fib(377) # -------------- #Code starts here def compress(word): string", "str(count) res += string[i+1] count = 1 if(count >= 1): res += str(count)", "+ 4)or isPerfectSquare(5*num*num - 4) check_fib(377) # -------------- #Code starts here def compress(word):", "= 1 res += string[0] for i in range(len(string)-1): if(string[i]==string[i+1]): count+=1 else: if(count", "i in range(len(string)-1): if(string[i]==string[i+1]): count+=1 else: if(count >= 1): res += str(count) res", "else: return False a_scramble(\"<NAME>\",\"Voldemort\") # -------------- #Code starts here import math def isPerfectSquare(x):", "import math def isPerfectSquare(x): s = int(math.sqrt(x)) return s*s == x def check_fib(num):", "numstr = str(num) for i in range(num+1,sys.maxsize): if str(i)== str(i)[::-1]: return i palindrome(123)", "check_fib(377) # -------------- #Code starts here def compress(word): string = word.lower() res =", "\"\" count = 1 res += string[0] for i in range(len(string)-1): if(string[i]==string[i+1]): count+=1", "= Counter(str_1.lower()) print(list_str1) list_str2 = Counter(str_2.lower()) print(list_str2) if not list_str2 - list_str1: return", "- 4) check_fib(377) # -------------- #Code starts here def compress(word): string = word.lower()", "list_str1 = Counter(str_1.lower()) print(list_str1) list_str2 = Counter(str_2.lower()) print(list_str2) if not list_str2 - list_str1:", "string = word.lower() res = \"\" count = 1 res += string[0] for", "= \"\" count = 1 res += string[0] for i in range(len(string)-1): if(string[i]==string[i+1]):", "#Code starts here import sys def palindrome(num): numstr = str(num) for i in", "if(count >= 1): res += str(count) res += string[i+1] count = 1 if(count", "in range(num+1,sys.maxsize): if str(i)== str(i)[::-1]: return i palindrome(123) # -------------- #Code starts here", "= 1 if(count >= 1): res += str(count) return res compress(\"abbs\") # --------------", "+= str(count) res += string[i+1] count = 1 if(count >= 1): res +=", "here from collections import Counter def a_scramble(str_1,str_2): list_str1 = Counter(str_1.lower()) print(list_str1) list_str2 =", "res += str(count) return res compress(\"abbs\") # -------------- #Code starts here #Code starts", "str(i)== str(i)[::-1]: return i palindrome(123) # -------------- #Code starts here from collections import", "return isPerfectSquare(5*num*num + 4)or isPerfectSquare(5*num*num - 4) check_fib(377) # -------------- #Code starts here", "def palindrome(num): numstr = str(num) for i in range(num+1,sys.maxsize): if str(i)== str(i)[::-1]: return", ">= 1): res += str(count) res += string[i+1] count = 1 if(count >=", "-------------- #Code starts here from collections import Counter def a_scramble(str_1,str_2): list_str1 = Counter(str_1.lower())", "range(num+1,sys.maxsize): if str(i)== str(i)[::-1]: return i palindrome(123) # -------------- #Code starts here from", "# -------------- #Code starts here import math def isPerfectSquare(x): s = int(math.sqrt(x)) return", "isPerfectSquare(5*num*num - 4) check_fib(377) # -------------- #Code starts here def compress(word): string =", "starts here def compress(word): string = word.lower() res = \"\" count = 1", "return False a_scramble(\"<NAME>\",\"Voldemort\") # -------------- #Code starts here import math def isPerfectSquare(x): s", "a_scramble(str_1,str_2): list_str1 = Counter(str_1.lower()) print(list_str1) list_str2 = Counter(str_2.lower()) print(list_str2) if not list_str2 -", "def a_scramble(str_1,str_2): list_str1 = Counter(str_1.lower()) print(list_str1) list_str2 = Counter(str_2.lower()) print(list_str2) if not list_str2", "collections import Counter def k_distinct(string,k): c = Counter(string.lower()) if k==len(c.keys()): return True return", "res += string[i+1] count = 1 if(count >= 1): res += str(count) return", "math def isPerfectSquare(x): s = int(math.sqrt(x)) return s*s == x def check_fib(num): return", "starts here from collections import Counter def a_scramble(str_1,str_2): list_str1 = Counter(str_1.lower()) print(list_str1) list_str2", "range(len(string)-1): if(string[i]==string[i+1]): count+=1 else: if(count >= 1): res += str(count) res += string[i+1]", "import sys def palindrome(num): numstr = str(num) for i in range(num+1,sys.maxsize): if str(i)==", "1 if(count >= 1): res += str(count) return res compress(\"abbs\") # -------------- #Code", "def check_fib(num): return isPerfectSquare(5*num*num + 4)or isPerfectSquare(5*num*num - 4) check_fib(377) # -------------- #Code", "here #Code starts here from collections import Counter def k_distinct(string,k): c = Counter(string.lower())", "= word.lower() res = \"\" count = 1 res += string[0] for i", "res = \"\" count = 1 res += string[0] for i in range(len(string)-1):", "def k_distinct(string,k): c = Counter(string.lower()) if k==len(c.keys()): return True return False k_distinct('Messoptamia',8) k_distinct('SUBBOOKKEEPER',7)", "from collections import Counter def a_scramble(str_1,str_2): list_str1 = Counter(str_1.lower()) print(list_str1) list_str2 = Counter(str_2.lower())", "#Code starts here from collections import Counter def a_scramble(str_1,str_2): list_str1 = Counter(str_1.lower()) print(list_str1)", "#Code starts here def compress(word): string = word.lower() res = \"\" count =", "starts here #Code starts here from collections import Counter def k_distinct(string,k): c =", "# -------------- #Code starts here from collections import Counter def a_scramble(str_1,str_2): list_str1 =", "1): res += str(count) res += string[i+1] count = 1 if(count >= 1):", "isPerfectSquare(5*num*num + 4)or isPerfectSquare(5*num*num - 4) check_fib(377) # -------------- #Code starts here def", "return True else: return False a_scramble(\"<NAME>\",\"Voldemort\") # -------------- #Code starts here import math", "not list_str2 - list_str1: return True else: return False a_scramble(\"<NAME>\",\"Voldemort\") # -------------- #Code", "#Code starts here #Code starts here from collections import Counter def k_distinct(string,k): c", "string[i+1] count = 1 if(count >= 1): res += str(count) return res compress(\"abbs\")", "#Code starts here from collections import Counter def k_distinct(string,k): c = Counter(string.lower()) if", "= int(math.sqrt(x)) return s*s == x def check_fib(num): return isPerfectSquare(5*num*num + 4)or isPerfectSquare(5*num*num", "palindrome(123) # -------------- #Code starts here from collections import Counter def a_scramble(str_1,str_2): list_str1", "True else: return False a_scramble(\"<NAME>\",\"Voldemort\") # -------------- #Code starts here import math def", "str(num) for i in range(num+1,sys.maxsize): if str(i)== str(i)[::-1]: return i palindrome(123) # --------------", "starts here from collections import Counter def k_distinct(string,k): c = Counter(string.lower()) if k==len(c.keys()):", "s*s == x def check_fib(num): return isPerfectSquare(5*num*num + 4)or isPerfectSquare(5*num*num - 4) check_fib(377)", "= Counter(str_2.lower()) print(list_str2) if not list_str2 - list_str1: return True else: return False", "return i palindrome(123) # -------------- #Code starts here from collections import Counter def", "if str(i)== str(i)[::-1]: return i palindrome(123) # -------------- #Code starts here from collections", "x def check_fib(num): return isPerfectSquare(5*num*num + 4)or isPerfectSquare(5*num*num - 4) check_fib(377) # --------------", "i palindrome(123) # -------------- #Code starts here from collections import Counter def a_scramble(str_1,str_2):", "def isPerfectSquare(x): s = int(math.sqrt(x)) return s*s == x def check_fib(num): return isPerfectSquare(5*num*num", "-------------- #Code starts here def compress(word): string = word.lower() res = \"\" count", "+= string[0] for i in range(len(string)-1): if(string[i]==string[i+1]): count+=1 else: if(count >= 1): res", "4) check_fib(377) # -------------- #Code starts here def compress(word): string = word.lower() res", "def compress(word): string = word.lower() res = \"\" count = 1 res +=", "here from collections import Counter def k_distinct(string,k): c = Counter(string.lower()) if k==len(c.keys()): return", "Counter def k_distinct(string,k): c = Counter(string.lower()) if k==len(c.keys()): return True return False k_distinct('Messoptamia',8)", "res += str(count) res += string[i+1] count = 1 if(count >= 1): res", "int(math.sqrt(x)) return s*s == x def check_fib(num): return isPerfectSquare(5*num*num + 4)or isPerfectSquare(5*num*num -", "return s*s == x def check_fib(num): return isPerfectSquare(5*num*num + 4)or isPerfectSquare(5*num*num - 4)", "collections import Counter def a_scramble(str_1,str_2): list_str1 = Counter(str_1.lower()) print(list_str1) list_str2 = Counter(str_2.lower()) print(list_str2)", "Counter def a_scramble(str_1,str_2): list_str1 = Counter(str_1.lower()) print(list_str1) list_str2 = Counter(str_2.lower()) print(list_str2) if not", "import Counter def k_distinct(string,k): c = Counter(string.lower()) if k==len(c.keys()): return True return False", "res compress(\"abbs\") # -------------- #Code starts here #Code starts here from collections import", "-------------- #Code starts here import math def isPerfectSquare(x): s = int(math.sqrt(x)) return s*s", "-------------- #Code starts here import sys def palindrome(num): numstr = str(num) for i", "res += string[0] for i in range(len(string)-1): if(string[i]==string[i+1]): count+=1 else: if(count >= 1):", "if(string[i]==string[i+1]): count+=1 else: if(count >= 1): res += str(count) res += string[i+1] count", "== x def check_fib(num): return isPerfectSquare(5*num*num + 4)or isPerfectSquare(5*num*num - 4) check_fib(377) #", "for i in range(num+1,sys.maxsize): if str(i)== str(i)[::-1]: return i palindrome(123) # -------------- #Code", "if(count >= 1): res += str(count) return res compress(\"abbs\") # -------------- #Code starts", "#Code starts here import math def isPerfectSquare(x): s = int(math.sqrt(x)) return s*s ==", "import Counter def a_scramble(str_1,str_2): list_str1 = Counter(str_1.lower()) print(list_str1) list_str2 = Counter(str_2.lower()) print(list_str2) if", "starts here import math def isPerfectSquare(x): s = int(math.sqrt(x)) return s*s == x", "else: if(count >= 1): res += str(count) res += string[i+1] count = 1", "check_fib(num): return isPerfectSquare(5*num*num + 4)or isPerfectSquare(5*num*num - 4) check_fib(377) # -------------- #Code starts", "# -------------- #Code starts here import sys def palindrome(num): numstr = str(num) for", "list_str1: return True else: return False a_scramble(\"<NAME>\",\"Voldemort\") # -------------- #Code starts here import", "count+=1 else: if(count >= 1): res += str(count) res += string[i+1] count =", "list_str2 = Counter(str_2.lower()) print(list_str2) if not list_str2 - list_str1: return True else: return", "False a_scramble(\"<NAME>\",\"Voldemort\") # -------------- #Code starts here import math def isPerfectSquare(x): s =", ">= 1): res += str(count) return res compress(\"abbs\") # -------------- #Code starts here", "# -------------- #Code starts here #Code starts here from collections import Counter def", "i in range(num+1,sys.maxsize): if str(i)== str(i)[::-1]: return i palindrome(123) # -------------- #Code starts", "return res compress(\"abbs\") # -------------- #Code starts here #Code starts here from collections", "+= string[i+1] count = 1 if(count >= 1): res += str(count) return res", "s = int(math.sqrt(x)) return s*s == x def check_fib(num): return isPerfectSquare(5*num*num + 4)or", "in range(len(string)-1): if(string[i]==string[i+1]): count+=1 else: if(count >= 1): res += str(count) res +=", "isPerfectSquare(x): s = int(math.sqrt(x)) return s*s == x def check_fib(num): return isPerfectSquare(5*num*num +", "sys def palindrome(num): numstr = str(num) for i in range(num+1,sys.maxsize): if str(i)== str(i)[::-1]:", "print(list_str1) list_str2 = Counter(str_2.lower()) print(list_str2) if not list_str2 - list_str1: return True else:", "starts here import sys def palindrome(num): numstr = str(num) for i in range(num+1,sys.maxsize):", "print(list_str2) if not list_str2 - list_str1: return True else: return False a_scramble(\"<NAME>\",\"Voldemort\") #" ]
[ "07:09:38 PM # Last modified : 2017-01-06 07:34:29 PM # File Name :", "1 else: tmp += inp[i] i += 1 return ret if __name__ ==", "lengthLongestPath(self, inp): tmp = '' stack = [] k = 0 ret =", "n: if inp[i] == '\\n': i += 1 t = 0 while inp[i]", "+= inp[i] i += 1 return ret if __name__ == \"__main__\": s =", "k: stack.append(tmp) else: if len('/'.join(stack)) > ret: ret = len('/'.join(stack)) print '/'.join(stack) stack.pop()", "ret = 0 n = len(inp) i = 0 while i < n:", "tmp += inp[i] i += 1 return ret if __name__ == \"__main__\": s", "1 return ret if __name__ == \"__main__\": s = Solution() inp = \"dir\\n\\tsubdir1\\n\\tsubdir2\\n\\t\\tfile.ext\"", "t += 1 if t != k: stack.append(tmp) else: if len('/'.join(stack)) > ret:", "# File Name : Longest_Absolute_File_Path.py # Desc : class Solution(object): def lengthLongestPath(self, inp):", "# # Author : TangHanYi # E-mail : <EMAIL> # Create Date :", ": TangHanYi # E-mail : <EMAIL> # Create Date : 2017-01-06 07:09:38 PM", "def lengthLongestPath(self, inp): tmp = '' stack = [] k = 0 ret", "k -= 1 tmp = '' k += 1 else: tmp += inp[i]", "= 0 while inp[i] == '\\t': i += 1 t += 1 if", "'\\t': i += 1 t += 1 if t != k: stack.append(tmp) else:", "+= 1 t = 0 while inp[i] == '\\t': i += 1 t", "Date : 2017-01-06 07:09:38 PM # Last modified : 2017-01-06 07:34:29 PM #", "modified : 2017-01-06 07:34:29 PM # File Name : Longest_Absolute_File_Path.py # Desc :", "== '\\n': i += 1 t = 0 while inp[i] == '\\t': i", "stack.append(tmp) else: if len('/'.join(stack)) > ret: ret = len('/'.join(stack)) print '/'.join(stack) stack.pop() k", "> ret: ret = len('/'.join(stack)) print '/'.join(stack) stack.pop() k -= 1 tmp =", "File Name : Longest_Absolute_File_Path.py # Desc : class Solution(object): def lengthLongestPath(self, inp): tmp", "coding:utf-8 -*- # # Author : TangHanYi # E-mail : <EMAIL> # Create", "07:34:29 PM # File Name : Longest_Absolute_File_Path.py # Desc : class Solution(object): def", "= '' stack = [] k = 0 ret = 0 n =", "1 t += 1 if t != k: stack.append(tmp) else: if len('/'.join(stack)) >", "len('/'.join(stack)) print '/'.join(stack) stack.pop() k -= 1 tmp = '' k += 1", "Solution(object): def lengthLongestPath(self, inp): tmp = '' stack = [] k = 0", "return ret if __name__ == \"__main__\": s = Solution() inp = \"dir\\n\\tsubdir1\\n\\tsubdir2\\n\\t\\tfile.ext\" print", "+= 1 return ret if __name__ == \"__main__\": s = Solution() inp =", "ret if __name__ == \"__main__\": s = Solution() inp = \"dir\\n\\tsubdir1\\n\\tsubdir2\\n\\t\\tfile.ext\" print s.lengthLongestPath(inp)", "k = 0 ret = 0 n = len(inp) i = 0 while", ": <EMAIL> # Create Date : 2017-01-06 07:09:38 PM # Last modified :", "0 n = len(inp) i = 0 while i < n: if inp[i]", "= len(inp) i = 0 while i < n: if inp[i] == '\\n':", "else: if len('/'.join(stack)) > ret: ret = len('/'.join(stack)) print '/'.join(stack) stack.pop() k -=", "n = len(inp) i = 0 while i < n: if inp[i] ==", "-*- # # Author : TangHanYi # E-mail : <EMAIL> # Create Date", "# Create Date : 2017-01-06 07:09:38 PM # Last modified : 2017-01-06 07:34:29", "'/'.join(stack) stack.pop() k -= 1 tmp = '' k += 1 else: tmp", "2017-01-06 07:34:29 PM # File Name : Longest_Absolute_File_Path.py # Desc : class Solution(object):", "0 while inp[i] == '\\t': i += 1 t += 1 if t", "# E-mail : <EMAIL> # Create Date : 2017-01-06 07:09:38 PM # Last", "print '/'.join(stack) stack.pop() k -= 1 tmp = '' k += 1 else:", "inp): tmp = '' stack = [] k = 0 ret = 0", "!= k: stack.append(tmp) else: if len('/'.join(stack)) > ret: ret = len('/'.join(stack)) print '/'.join(stack)", "2017-01-06 07:09:38 PM # Last modified : 2017-01-06 07:34:29 PM # File Name", "-= 1 tmp = '' k += 1 else: tmp += inp[i] i", "while i < n: if inp[i] == '\\n': i += 1 t =", "len(inp) i = 0 while i < n: if inp[i] == '\\n': i", "t = 0 while inp[i] == '\\t': i += 1 t += 1", "i += 1 t += 1 if t != k: stack.append(tmp) else: if", "+= 1 t += 1 if t != k: stack.append(tmp) else: if len('/'.join(stack))", "Name : Longest_Absolute_File_Path.py # Desc : class Solution(object): def lengthLongestPath(self, inp): tmp =", "Desc : class Solution(object): def lengthLongestPath(self, inp): tmp = '' stack = []", "1 tmp = '' k += 1 else: tmp += inp[i] i +=", "'' stack = [] k = 0 ret = 0 n = len(inp)", "PM # File Name : Longest_Absolute_File_Path.py # Desc : class Solution(object): def lengthLongestPath(self,", "'' k += 1 else: tmp += inp[i] i += 1 return ret", "inp[i] == '\\t': i += 1 t += 1 if t != k:", "if inp[i] == '\\n': i += 1 t = 0 while inp[i] ==", "= '' k += 1 else: tmp += inp[i] i += 1 return", ": 2017-01-06 07:09:38 PM # Last modified : 2017-01-06 07:34:29 PM # File", "-*- coding:utf-8 -*- # # Author : TangHanYi # E-mail : <EMAIL> #", "= 0 while i < n: if inp[i] == '\\n': i += 1", "= len('/'.join(stack)) print '/'.join(stack) stack.pop() k -= 1 tmp = '' k +=", "# Desc : class Solution(object): def lengthLongestPath(self, inp): tmp = '' stack =", "k += 1 else: tmp += inp[i] i += 1 return ret if", "if t != k: stack.append(tmp) else: if len('/'.join(stack)) > ret: ret = len('/'.join(stack))", "<EMAIL> # Create Date : 2017-01-06 07:09:38 PM # Last modified : 2017-01-06", "= 0 n = len(inp) i = 0 while i < n: if", ": Longest_Absolute_File_Path.py # Desc : class Solution(object): def lengthLongestPath(self, inp): tmp = ''", "Longest_Absolute_File_Path.py # Desc : class Solution(object): def lengthLongestPath(self, inp): tmp = '' stack", "= 0 ret = 0 n = len(inp) i = 0 while i", "1 t = 0 while inp[i] == '\\t': i += 1 t +=", "ret: ret = len('/'.join(stack)) print '/'.join(stack) stack.pop() k -= 1 tmp = ''", "inp[i] == '\\n': i += 1 t = 0 while inp[i] == '\\t':", "# Last modified : 2017-01-06 07:34:29 PM # File Name : Longest_Absolute_File_Path.py #", "while inp[i] == '\\t': i += 1 t += 1 if t !=", "else: tmp += inp[i] i += 1 return ret if __name__ == \"__main__\":", "Create Date : 2017-01-06 07:09:38 PM # Last modified : 2017-01-06 07:34:29 PM", "if len('/'.join(stack)) > ret: ret = len('/'.join(stack)) print '/'.join(stack) stack.pop() k -= 1", "== '\\t': i += 1 t += 1 if t != k: stack.append(tmp)", "# -*- coding:utf-8 -*- # # Author : TangHanYi # E-mail : <EMAIL>", "1 if t != k: stack.append(tmp) else: if len('/'.join(stack)) > ret: ret =", "i += 1 return ret if __name__ == \"__main__\": s = Solution() inp", "E-mail : <EMAIL> # Create Date : 2017-01-06 07:09:38 PM # Last modified", "ret = len('/'.join(stack)) print '/'.join(stack) stack.pop() k -= 1 tmp = '' k", "= [] k = 0 ret = 0 n = len(inp) i =", "0 while i < n: if inp[i] == '\\n': i += 1 t", "< n: if inp[i] == '\\n': i += 1 t = 0 while", "stack = [] k = 0 ret = 0 n = len(inp) i", "inp[i] i += 1 return ret if __name__ == \"__main__\": s = Solution()", "stack.pop() k -= 1 tmp = '' k += 1 else: tmp +=", ": 2017-01-06 07:34:29 PM # File Name : Longest_Absolute_File_Path.py # Desc : class", "TangHanYi # E-mail : <EMAIL> # Create Date : 2017-01-06 07:09:38 PM #", "# Author : TangHanYi # E-mail : <EMAIL> # Create Date : 2017-01-06", "0 ret = 0 n = len(inp) i = 0 while i <", "i += 1 t = 0 while inp[i] == '\\t': i += 1", "+= 1 else: tmp += inp[i] i += 1 return ret if __name__", "class Solution(object): def lengthLongestPath(self, inp): tmp = '' stack = [] k =", "PM # Last modified : 2017-01-06 07:34:29 PM # File Name : Longest_Absolute_File_Path.py", "'\\n': i += 1 t = 0 while inp[i] == '\\t': i +=", ": class Solution(object): def lengthLongestPath(self, inp): tmp = '' stack = [] k", "i < n: if inp[i] == '\\n': i += 1 t = 0", "+= 1 if t != k: stack.append(tmp) else: if len('/'.join(stack)) > ret: ret", "t != k: stack.append(tmp) else: if len('/'.join(stack)) > ret: ret = len('/'.join(stack)) print", "tmp = '' k += 1 else: tmp += inp[i] i += 1", "[] k = 0 ret = 0 n = len(inp) i = 0", "tmp = '' stack = [] k = 0 ret = 0 n", "Last modified : 2017-01-06 07:34:29 PM # File Name : Longest_Absolute_File_Path.py # Desc", "i = 0 while i < n: if inp[i] == '\\n': i +=", "Author : TangHanYi # E-mail : <EMAIL> # Create Date : 2017-01-06 07:09:38", "len('/'.join(stack)) > ret: ret = len('/'.join(stack)) print '/'.join(stack) stack.pop() k -= 1 tmp" ]
[ "self.__view.reset() def set_init_config(self, x, y) -> None: self.__init_config_xy = [x, y] self.draw_robot_state(x, y)", "import WorkspaceCalculator from workspace_view import WorkspaceView class Workspace: def __init__(self, app_page, room_name, robot_name):", "None: self.__init_config_xy = [x, y] self.draw_robot_state(x, y) def set_goal_config(self, x, y) -> None:", "workspace_view import WorkspaceView class Workspace: def __init__(self, app_page, room_name, robot_name): room_bmp = open_greyscale_bmp(room_name)", "[] # point -> [0] = x , [1] = y def bind_click_callback(self,", "room_name, robot_name): room_bmp = open_greyscale_bmp(room_name) robot_bmp = open_greyscale_bmp(robot_name) robot_png = open_image(robot_name, 'png') self.__calculator", "x, y) -> None: self.__view.reset() if self.__init_config_xy: self.__view.draw_robot(self.__init_config_xy[0], self.__init_config_xy[1]) if self.__goal_config_xy: self.__view.draw_robot(self.__goal_config_xy[0], self.__goal_config_xy[1])", "import WorkspaceView class Workspace: def __init__(self, app_page, room_name, robot_name): room_bmp = open_greyscale_bmp(room_name) robot_bmp", "open_image(robot_name, 'png') self.__calculator = WorkspaceCalculator(room_bmp, robot_bmp) self.__view = WorkspaceView(app_page, room_bmp, robot_png) self.__init_config_xy =", "= [] # point -> [0] = x , [1] = y self.__goal_config_xy", "self.__calculator = WorkspaceCalculator(room_bmp, robot_bmp) self.__view = WorkspaceView(app_page, room_bmp, robot_png) self.__init_config_xy = [] #", "class Workspace: def __init__(self, app_page, room_name, robot_name): room_bmp = open_greyscale_bmp(room_name) robot_bmp = open_greyscale_bmp(robot_name)", "bool: return self.__calculator.is_robot_in_collision(x, y) def reset(self) -> None: self.__init_config_xy = [] self.__goal_config_xy =", "[] self.__goal_config_xy = [] self.current_position_xy = [] self.__view.reset() def set_init_config(self, x, y) ->", "return self.__calculator.is_robot_in_collision(x, y) def reset(self) -> None: self.__init_config_xy = [] self.__goal_config_xy = []", "= [] self.__view.reset() def set_init_config(self, x, y) -> None: self.__init_config_xy = [x, y]", "open_greyscale_bmp from workspace_calc import WorkspaceCalculator from workspace_view import WorkspaceView class Workspace: def __init__(self,", "-> None: self.__view.set_click_callback(action_ref) def is_in_collision(self, x, y) -> bool: return self.__calculator.is_robot_in_collision(x, y) def", "def is_in_collision(self, x, y) -> bool: return self.__calculator.is_robot_in_collision(x, y) def reset(self) -> None:", "x , [1] = y self.current_position_xy = [] # point -> [0] =", "# point -> [0] = x , [1] = y def bind_click_callback(self, action_ref)", "-> [0] = x , [1] = y self.__goal_config_xy = [] # point", "set_init_config(self, x, y) -> None: self.__init_config_xy = [x, y] self.draw_robot_state(x, y) def set_goal_config(self,", "self.__init_config_xy = [x, y] self.draw_robot_state(x, y) def set_goal_config(self, x, y) -> None: self.__goal_config_xy", "# point -> [0] = x , [1] = y self.current_position_xy = []", "y) def set_goal_config(self, x, y) -> None: self.__goal_config_xy = [x, y] self.draw_robot_state(x, y)", "y self.current_position_xy = [] # point -> [0] = x , [1] =", "[] self.current_position_xy = [] self.__view.reset() def set_init_config(self, x, y) -> None: self.__init_config_xy =", "-> [0] = x , [1] = y self.current_position_xy = [] # point", "y) -> bool: return self.__calculator.is_robot_in_collision(x, y) def reset(self) -> None: self.__init_config_xy = []", "[1] = y self.__goal_config_xy = [] # point -> [0] = x ,", "robot_bmp) self.__view = WorkspaceView(app_page, room_bmp, robot_png) self.__init_config_xy = [] # point -> [0]", "= WorkspaceCalculator(room_bmp, robot_bmp) self.__view = WorkspaceView(app_page, room_bmp, robot_png) self.__init_config_xy = [] # point", "open_greyscale_bmp(robot_name) robot_png = open_image(robot_name, 'png') self.__calculator = WorkspaceCalculator(room_bmp, robot_bmp) self.__view = WorkspaceView(app_page, room_bmp,", "= x , [1] = y self.current_position_xy = [] # point -> [0]", "bind_click_callback(self, action_ref) -> None: self.__view.set_click_callback(action_ref) def is_in_collision(self, x, y) -> bool: return self.__calculator.is_robot_in_collision(x,", "self.__init_config_xy = [] self.__goal_config_xy = [] self.current_position_xy = [] self.__view.reset() def set_init_config(self, x,", "def __init__(self, app_page, room_name, robot_name): room_bmp = open_greyscale_bmp(room_name) robot_bmp = open_greyscale_bmp(robot_name) robot_png =", "point -> [0] = x , [1] = y def bind_click_callback(self, action_ref) ->", "[0] = x , [1] = y def bind_click_callback(self, action_ref) -> None: self.__view.set_click_callback(action_ref)", "from workspace_view import WorkspaceView class Workspace: def __init__(self, app_page, room_name, robot_name): room_bmp =", "open_image, open_greyscale_bmp from workspace_calc import WorkspaceCalculator from workspace_view import WorkspaceView class Workspace: def", "y] self.draw_robot_state(x, y) def draw_robot_state(self, x, y) -> None: self.__view.reset() if self.__init_config_xy: self.__view.draw_robot(self.__init_config_xy[0],", "self.draw_robot_state(x, y) def draw_robot_state(self, x, y) -> None: self.__view.reset() if self.__init_config_xy: self.__view.draw_robot(self.__init_config_xy[0], self.__init_config_xy[1])", "self.draw_robot_state(x, y) def set_goal_config(self, x, y) -> None: self.__goal_config_xy = [x, y] self.draw_robot_state(x,", "= open_greyscale_bmp(room_name) robot_bmp = open_greyscale_bmp(robot_name) robot_png = open_image(robot_name, 'png') self.__calculator = WorkspaceCalculator(room_bmp, robot_bmp)", "x, y) -> None: self.__goal_config_xy = [x, y] self.draw_robot_state(x, y) def draw_robot_state(self, x,", "def bind_click_callback(self, action_ref) -> None: self.__view.set_click_callback(action_ref) def is_in_collision(self, x, y) -> bool: return", "y def bind_click_callback(self, action_ref) -> None: self.__view.set_click_callback(action_ref) def is_in_collision(self, x, y) -> bool:", "y self.__goal_config_xy = [] # point -> [0] = x , [1] =", "[1] = y def bind_click_callback(self, action_ref) -> None: self.__view.set_click_callback(action_ref) def is_in_collision(self, x, y)", "= [] # point -> [0] = x , [1] = y self.current_position_xy", "y) -> None: self.__goal_config_xy = [x, y] self.draw_robot_state(x, y) def draw_robot_state(self, x, y)", "self.__goal_config_xy = [x, y] self.draw_robot_state(x, y) def draw_robot_state(self, x, y) -> None: self.__view.reset()", "= [x, y] self.draw_robot_state(x, y) def set_goal_config(self, x, y) -> None: self.__goal_config_xy =", "def set_init_config(self, x, y) -> None: self.__init_config_xy = [x, y] self.draw_robot_state(x, y) def", "import open_image, open_greyscale_bmp from workspace_calc import WorkspaceCalculator from workspace_view import WorkspaceView class Workspace:", "self.current_position_xy = [] # point -> [0] = x , [1] = y", "= [x, y] self.draw_robot_state(x, y) def draw_robot_state(self, x, y) -> None: self.__view.reset() if", "-> None: self.__view.reset() if self.__init_config_xy: self.__view.draw_robot(self.__init_config_xy[0], self.__init_config_xy[1]) if self.__goal_config_xy: self.__view.draw_robot(self.__goal_config_xy[0], self.__goal_config_xy[1]) self.__view.draw_robot(x, y)", "robot_png = open_image(robot_name, 'png') self.__calculator = WorkspaceCalculator(room_bmp, robot_bmp) self.__view = WorkspaceView(app_page, room_bmp, robot_png)", ", [1] = y self.__goal_config_xy = [] # point -> [0] = x", "y) -> None: self.__view.reset() if self.__init_config_xy: self.__view.draw_robot(self.__init_config_xy[0], self.__init_config_xy[1]) if self.__goal_config_xy: self.__view.draw_robot(self.__goal_config_xy[0], self.__goal_config_xy[1]) self.__view.draw_robot(x,", "workspace_calc import WorkspaceCalculator from workspace_view import WorkspaceView class Workspace: def __init__(self, app_page, room_name,", "WorkspaceView(app_page, room_bmp, robot_png) self.__init_config_xy = [] # point -> [0] = x ,", "draw_robot_state(self, x, y) -> None: self.__view.reset() if self.__init_config_xy: self.__view.draw_robot(self.__init_config_xy[0], self.__init_config_xy[1]) if self.__goal_config_xy: self.__view.draw_robot(self.__goal_config_xy[0],", "= [] self.current_position_xy = [] self.__view.reset() def set_init_config(self, x, y) -> None: self.__init_config_xy", "WorkspaceView class Workspace: def __init__(self, app_page, room_name, robot_name): room_bmp = open_greyscale_bmp(room_name) robot_bmp =", "[] # point -> [0] = x , [1] = y self.__goal_config_xy =", "room_bmp, robot_png) self.__init_config_xy = [] # point -> [0] = x , [1]", "self.current_position_xy = [] self.__view.reset() def set_init_config(self, x, y) -> None: self.__init_config_xy = [x,", "def reset(self) -> None: self.__init_config_xy = [] self.__goal_config_xy = [] self.current_position_xy = []", "None: self.__view.set_click_callback(action_ref) def is_in_collision(self, x, y) -> bool: return self.__calculator.is_robot_in_collision(x, y) def reset(self)", "y) def reset(self) -> None: self.__init_config_xy = [] self.__goal_config_xy = [] self.current_position_xy =", ", [1] = y def bind_click_callback(self, action_ref) -> None: self.__view.set_click_callback(action_ref) def is_in_collision(self, x,", "= [] # point -> [0] = x , [1] = y def", "point -> [0] = x , [1] = y self.__goal_config_xy = [] #", "self.__view.set_click_callback(action_ref) def is_in_collision(self, x, y) -> bool: return self.__calculator.is_robot_in_collision(x, y) def reset(self) ->", "reset(self) -> None: self.__init_config_xy = [] self.__goal_config_xy = [] self.current_position_xy = [] self.__view.reset()", "= y self.__goal_config_xy = [] # point -> [0] = x , [1]", "x, y) -> None: self.__init_config_xy = [x, y] self.draw_robot_state(x, y) def set_goal_config(self, x,", "[x, y] self.draw_robot_state(x, y) def draw_robot_state(self, x, y) -> None: self.__view.reset() if self.__init_config_xy:", "point -> [0] = x , [1] = y self.current_position_xy = [] #", "robot_png) self.__init_config_xy = [] # point -> [0] = x , [1] =", "is_in_collision(self, x, y) -> bool: return self.__calculator.is_robot_in_collision(x, y) def reset(self) -> None: self.__init_config_xy", "= x , [1] = y def bind_click_callback(self, action_ref) -> None: self.__view.set_click_callback(action_ref) def", "open_greyscale_bmp(room_name) robot_bmp = open_greyscale_bmp(robot_name) robot_png = open_image(robot_name, 'png') self.__calculator = WorkspaceCalculator(room_bmp, robot_bmp) self.__view", "def draw_robot_state(self, x, y) -> None: self.__view.reset() if self.__init_config_xy: self.__view.draw_robot(self.__init_config_xy[0], self.__init_config_xy[1]) if self.__goal_config_xy:", "= y self.current_position_xy = [] # point -> [0] = x , [1]", "from utils import open_image, open_greyscale_bmp from workspace_calc import WorkspaceCalculator from workspace_view import WorkspaceView", "WorkspaceCalculator(room_bmp, robot_bmp) self.__view = WorkspaceView(app_page, room_bmp, robot_png) self.__init_config_xy = [] # point ->", "'png') self.__calculator = WorkspaceCalculator(room_bmp, robot_bmp) self.__view = WorkspaceView(app_page, room_bmp, robot_png) self.__init_config_xy = []", "[] self.__view.reset() def set_init_config(self, x, y) -> None: self.__init_config_xy = [x, y] self.draw_robot_state(x,", "-> None: self.__goal_config_xy = [x, y] self.draw_robot_state(x, y) def draw_robot_state(self, x, y) ->", "room_bmp = open_greyscale_bmp(room_name) robot_bmp = open_greyscale_bmp(robot_name) robot_png = open_image(robot_name, 'png') self.__calculator = WorkspaceCalculator(room_bmp,", "x , [1] = y self.__goal_config_xy = [] # point -> [0] =", "robot_bmp = open_greyscale_bmp(robot_name) robot_png = open_image(robot_name, 'png') self.__calculator = WorkspaceCalculator(room_bmp, robot_bmp) self.__view =", "[x, y] self.draw_robot_state(x, y) def set_goal_config(self, x, y) -> None: self.__goal_config_xy = [x,", "self.__view = WorkspaceView(app_page, room_bmp, robot_png) self.__init_config_xy = [] # point -> [0] =", "[] # point -> [0] = x , [1] = y self.current_position_xy =", "= y def bind_click_callback(self, action_ref) -> None: self.__view.set_click_callback(action_ref) def is_in_collision(self, x, y) ->", "None: self.__init_config_xy = [] self.__goal_config_xy = [] self.current_position_xy = [] self.__view.reset() def set_init_config(self,", "self.__goal_config_xy = [] self.current_position_xy = [] self.__view.reset() def set_init_config(self, x, y) -> None:", "-> None: self.__init_config_xy = [] self.__goal_config_xy = [] self.current_position_xy = [] self.__view.reset() def", "self.__goal_config_xy = [] # point -> [0] = x , [1] = y", "# point -> [0] = x , [1] = y self.__goal_config_xy = []", "y] self.draw_robot_state(x, y) def set_goal_config(self, x, y) -> None: self.__goal_config_xy = [x, y]", "Workspace: def __init__(self, app_page, room_name, robot_name): room_bmp = open_greyscale_bmp(room_name) robot_bmp = open_greyscale_bmp(robot_name) robot_png", "app_page, room_name, robot_name): room_bmp = open_greyscale_bmp(room_name) robot_bmp = open_greyscale_bmp(robot_name) robot_png = open_image(robot_name, 'png')", "= x , [1] = y self.__goal_config_xy = [] # point -> [0]", "from workspace_calc import WorkspaceCalculator from workspace_view import WorkspaceView class Workspace: def __init__(self, app_page,", "robot_name): room_bmp = open_greyscale_bmp(room_name) robot_bmp = open_greyscale_bmp(robot_name) robot_png = open_image(robot_name, 'png') self.__calculator =", ", [1] = y self.current_position_xy = [] # point -> [0] = x", "utils import open_image, open_greyscale_bmp from workspace_calc import WorkspaceCalculator from workspace_view import WorkspaceView class", "= open_greyscale_bmp(robot_name) robot_png = open_image(robot_name, 'png') self.__calculator = WorkspaceCalculator(room_bmp, robot_bmp) self.__view = WorkspaceView(app_page,", "[0] = x , [1] = y self.__goal_config_xy = [] # point ->", "y) -> None: self.__init_config_xy = [x, y] self.draw_robot_state(x, y) def set_goal_config(self, x, y)", "x, y) -> bool: return self.__calculator.is_robot_in_collision(x, y) def reset(self) -> None: self.__init_config_xy =", "[0] = x , [1] = y self.current_position_xy = [] # point ->", "= [] self.__goal_config_xy = [] self.current_position_xy = [] self.__view.reset() def set_init_config(self, x, y)", "-> None: self.__init_config_xy = [x, y] self.draw_robot_state(x, y) def set_goal_config(self, x, y) ->", "[1] = y self.current_position_xy = [] # point -> [0] = x ,", "-> bool: return self.__calculator.is_robot_in_collision(x, y) def reset(self) -> None: self.__init_config_xy = [] self.__goal_config_xy", "= open_image(robot_name, 'png') self.__calculator = WorkspaceCalculator(room_bmp, robot_bmp) self.__view = WorkspaceView(app_page, room_bmp, robot_png) self.__init_config_xy", "= WorkspaceView(app_page, room_bmp, robot_png) self.__init_config_xy = [] # point -> [0] = x", "self.__init_config_xy = [] # point -> [0] = x , [1] = y", "set_goal_config(self, x, y) -> None: self.__goal_config_xy = [x, y] self.draw_robot_state(x, y) def draw_robot_state(self,", "x , [1] = y def bind_click_callback(self, action_ref) -> None: self.__view.set_click_callback(action_ref) def is_in_collision(self,", "self.__calculator.is_robot_in_collision(x, y) def reset(self) -> None: self.__init_config_xy = [] self.__goal_config_xy = [] self.current_position_xy", "action_ref) -> None: self.__view.set_click_callback(action_ref) def is_in_collision(self, x, y) -> bool: return self.__calculator.is_robot_in_collision(x, y)", "-> [0] = x , [1] = y def bind_click_callback(self, action_ref) -> None:", "__init__(self, app_page, room_name, robot_name): room_bmp = open_greyscale_bmp(room_name) robot_bmp = open_greyscale_bmp(robot_name) robot_png = open_image(robot_name,", "y) def draw_robot_state(self, x, y) -> None: self.__view.reset() if self.__init_config_xy: self.__view.draw_robot(self.__init_config_xy[0], self.__init_config_xy[1]) if", "WorkspaceCalculator from workspace_view import WorkspaceView class Workspace: def __init__(self, app_page, room_name, robot_name): room_bmp", "None: self.__goal_config_xy = [x, y] self.draw_robot_state(x, y) def draw_robot_state(self, x, y) -> None:", "def set_goal_config(self, x, y) -> None: self.__goal_config_xy = [x, y] self.draw_robot_state(x, y) def" ]
[ "from django.db import migrations def create_premier_tenant(apps, schema_editor): # We can't import the Person", "= Client.objects.get_or_create(schema_name='public', name='Tibillet Public', paid_until='2200-12-05', on_trial=False)[0] # Add one or more domains for", "historical version. Client = apps.get_model('Customers', 'Client') Domain = apps.get_model('Customers', 'Domain') DNS = os.getenv('DOMAIN')", "Client.objects.get_or_create(schema_name='public', name='Tibillet Public', paid_until='2200-12-05', on_trial=False)[0] # Add one or more domains for the", "import os from django.db import migrations def create_premier_tenant(apps, schema_editor): # We can't import", "apps.get_model('Customers', 'Client') Domain = apps.get_model('Customers', 'Domain') DNS = os.getenv('DOMAIN') tenant_public = Client.objects.get_or_create(schema_name='public', name='Tibillet", "os.getenv('DOMAIN') tenant_public = Client.objects.get_or_create(schema_name='public', name='Tibillet Public', paid_until='2200-12-05', on_trial=False)[0] # Add one or more", "= apps.get_model('Customers', 'Domain') DNS = os.getenv('DOMAIN') tenant_public = Client.objects.get_or_create(schema_name='public', name='Tibillet Public', paid_until='2200-12-05', on_trial=False)[0]", "tenant_public.delete() domaine_seul.delete() domaine_www.delete() class Migration(migrations.Migration): dependencies = [ ('Customers', '0001_initial'), ] operations =", "DNS = os.getenv('DOMAIN') tenant_public = Client.objects.get_or_create(schema_name='public', name='Tibillet Public', paid_until='2200-12-05', on_trial=False)[0] # Add one", "schema_editor): # We can't import the Person model directly as it may be", "tenant domaine_seul = Domain.objects.get_or_create(domain=DNS, tenant=tenant_public, is_primary=True, ) domaine_www = Domain.objects.get_or_create(domain=f'www.{DNS}', tenant=tenant_public, is_primary=False, )", "= Domain.objects.get_or_create(domain=DNS, tenant=tenant_public, is_primary=True, ) domaine_www = Domain.objects.get_or_create(domain=f'www.{DNS}', tenant=tenant_public, is_primary=False, ) return tenant_public,", "be a newer # version than this migration expects. We use the historical", "class Migration(migrations.Migration): dependencies = [ ('Customers', '0001_initial'), ] operations = [ migrations.RunPython(create_premier_tenant, reverse),", "than this migration expects. We use the historical version. Client = apps.get_model('Customers', 'Client')", "create_premier_tenant(apps, schema_editor) tenant_public.delete() domaine_seul.delete() domaine_www.delete() class Migration(migrations.Migration): dependencies = [ ('Customers', '0001_initial'), ]", "this migration expects. We use the historical version. Client = apps.get_model('Customers', 'Client') Domain", "= Domain.objects.get_or_create(domain=f'www.{DNS}', tenant=tenant_public, is_primary=False, ) return tenant_public, domaine_seul[0], domaine_www[0] def reverse(apps, schema_editor): tenant_public,", "Domain.objects.get_or_create(domain=DNS, tenant=tenant_public, is_primary=True, ) domaine_www = Domain.objects.get_or_create(domain=f'www.{DNS}', tenant=tenant_public, is_primary=False, ) return tenant_public, domaine_seul[0],", "Add one or more domains for the tenant domaine_seul = Domain.objects.get_or_create(domain=DNS, tenant=tenant_public, is_primary=True,", "newer # version than this migration expects. We use the historical version. Client", "version. Client = apps.get_model('Customers', 'Client') Domain = apps.get_model('Customers', 'Domain') DNS = os.getenv('DOMAIN') tenant_public", ") return tenant_public, domaine_seul[0], domaine_www[0] def reverse(apps, schema_editor): tenant_public, domaine_seul, domaine_www = create_premier_tenant(apps,", "the Person model directly as it may be a newer # version than", "apps.get_model('Customers', 'Domain') DNS = os.getenv('DOMAIN') tenant_public = Client.objects.get_or_create(schema_name='public', name='Tibillet Public', paid_until='2200-12-05', on_trial=False)[0] #", "def reverse(apps, schema_editor): tenant_public, domaine_seul, domaine_www = create_premier_tenant(apps, schema_editor) tenant_public.delete() domaine_seul.delete() domaine_www.delete() class", "# Generated by Django 2.2.13 on 2021-06-08 10:08 import os from django.db import", "a newer # version than this migration expects. We use the historical version.", "schema_editor) tenant_public.delete() domaine_seul.delete() domaine_www.delete() class Migration(migrations.Migration): dependencies = [ ('Customers', '0001_initial'), ] operations", "on_trial=False)[0] # Add one or more domains for the tenant domaine_seul = Domain.objects.get_or_create(domain=DNS,", ") domaine_www = Domain.objects.get_or_create(domain=f'www.{DNS}', tenant=tenant_public, is_primary=False, ) return tenant_public, domaine_seul[0], domaine_www[0] def reverse(apps,", "django.db import migrations def create_premier_tenant(apps, schema_editor): # We can't import the Person model", "model directly as it may be a newer # version than this migration", "domaine_www.delete() class Migration(migrations.Migration): dependencies = [ ('Customers', '0001_initial'), ] operations = [ migrations.RunPython(create_premier_tenant,", "name='Tibillet Public', paid_until='2200-12-05', on_trial=False)[0] # Add one or more domains for the tenant", "migrations def create_premier_tenant(apps, schema_editor): # We can't import the Person model directly as", "one or more domains for the tenant domaine_seul = Domain.objects.get_or_create(domain=DNS, tenant=tenant_public, is_primary=True, )", "= create_premier_tenant(apps, schema_editor) tenant_public.delete() domaine_seul.delete() domaine_www.delete() class Migration(migrations.Migration): dependencies = [ ('Customers', '0001_initial'),", "Person model directly as it may be a newer # version than this", "= os.getenv('DOMAIN') tenant_public = Client.objects.get_or_create(schema_name='public', name='Tibillet Public', paid_until='2200-12-05', on_trial=False)[0] # Add one or", "import migrations def create_premier_tenant(apps, schema_editor): # We can't import the Person model directly", "use the historical version. Client = apps.get_model('Customers', 'Client') Domain = apps.get_model('Customers', 'Domain') DNS", "or more domains for the tenant domaine_seul = Domain.objects.get_or_create(domain=DNS, tenant=tenant_public, is_primary=True, ) domaine_www", "tenant_public, domaine_seul[0], domaine_www[0] def reverse(apps, schema_editor): tenant_public, domaine_seul, domaine_www = create_premier_tenant(apps, schema_editor) tenant_public.delete()", "tenant_public, domaine_seul, domaine_www = create_premier_tenant(apps, schema_editor) tenant_public.delete() domaine_seul.delete() domaine_www.delete() class Migration(migrations.Migration): dependencies =", "create_premier_tenant(apps, schema_editor): # We can't import the Person model directly as it may", "tenant=tenant_public, is_primary=False, ) return tenant_public, domaine_seul[0], domaine_www[0] def reverse(apps, schema_editor): tenant_public, domaine_seul, domaine_www", "'Client') Domain = apps.get_model('Customers', 'Domain') DNS = os.getenv('DOMAIN') tenant_public = Client.objects.get_or_create(schema_name='public', name='Tibillet Public',", "more domains for the tenant domaine_seul = Domain.objects.get_or_create(domain=DNS, tenant=tenant_public, is_primary=True, ) domaine_www =", "# Add one or more domains for the tenant domaine_seul = Domain.objects.get_or_create(domain=DNS, tenant=tenant_public,", "tenant_public = Client.objects.get_or_create(schema_name='public', name='Tibillet Public', paid_until='2200-12-05', on_trial=False)[0] # Add one or more domains", "it may be a newer # version than this migration expects. We use", "expects. We use the historical version. Client = apps.get_model('Customers', 'Client') Domain = apps.get_model('Customers',", "2.2.13 on 2021-06-08 10:08 import os from django.db import migrations def create_premier_tenant(apps, schema_editor):", "Generated by Django 2.2.13 on 2021-06-08 10:08 import os from django.db import migrations", "We can't import the Person model directly as it may be a newer", "as it may be a newer # version than this migration expects. We", "We use the historical version. Client = apps.get_model('Customers', 'Client') Domain = apps.get_model('Customers', 'Domain')", "os from django.db import migrations def create_premier_tenant(apps, schema_editor): # We can't import the", "is_primary=True, ) domaine_www = Domain.objects.get_or_create(domain=f'www.{DNS}', tenant=tenant_public, is_primary=False, ) return tenant_public, domaine_seul[0], domaine_www[0] def", "domaine_www = create_premier_tenant(apps, schema_editor) tenant_public.delete() domaine_seul.delete() domaine_www.delete() class Migration(migrations.Migration): dependencies = [ ('Customers',", "domains for the tenant domaine_seul = Domain.objects.get_or_create(domain=DNS, tenant=tenant_public, is_primary=True, ) domaine_www = Domain.objects.get_or_create(domain=f'www.{DNS}',", "Migration(migrations.Migration): dependencies = [ ('Customers', '0001_initial'), ] operations = [ migrations.RunPython(create_premier_tenant, reverse), ]", "= apps.get_model('Customers', 'Client') Domain = apps.get_model('Customers', 'Domain') DNS = os.getenv('DOMAIN') tenant_public = Client.objects.get_or_create(schema_name='public',", "on 2021-06-08 10:08 import os from django.db import migrations def create_premier_tenant(apps, schema_editor): #", "import the Person model directly as it may be a newer # version", "domaine_seul = Domain.objects.get_or_create(domain=DNS, tenant=tenant_public, is_primary=True, ) domaine_www = Domain.objects.get_or_create(domain=f'www.{DNS}', tenant=tenant_public, is_primary=False, ) return", "Public', paid_until='2200-12-05', on_trial=False)[0] # Add one or more domains for the tenant domaine_seul", "can't import the Person model directly as it may be a newer #", "2021-06-08 10:08 import os from django.db import migrations def create_premier_tenant(apps, schema_editor): # We", "domaine_www = Domain.objects.get_or_create(domain=f'www.{DNS}', tenant=tenant_public, is_primary=False, ) return tenant_public, domaine_seul[0], domaine_www[0] def reverse(apps, schema_editor):", "schema_editor): tenant_public, domaine_seul, domaine_www = create_premier_tenant(apps, schema_editor) tenant_public.delete() domaine_seul.delete() domaine_www.delete() class Migration(migrations.Migration): dependencies", "version than this migration expects. We use the historical version. Client = apps.get_model('Customers',", "domaine_seul, domaine_www = create_premier_tenant(apps, schema_editor) tenant_public.delete() domaine_seul.delete() domaine_www.delete() class Migration(migrations.Migration): dependencies = [", "the historical version. Client = apps.get_model('Customers', 'Client') Domain = apps.get_model('Customers', 'Domain') DNS =", "the tenant domaine_seul = Domain.objects.get_or_create(domain=DNS, tenant=tenant_public, is_primary=True, ) domaine_www = Domain.objects.get_or_create(domain=f'www.{DNS}', tenant=tenant_public, is_primary=False,", "directly as it may be a newer # version than this migration expects.", "'Domain') DNS = os.getenv('DOMAIN') tenant_public = Client.objects.get_or_create(schema_name='public', name='Tibillet Public', paid_until='2200-12-05', on_trial=False)[0] # Add", "for the tenant domaine_seul = Domain.objects.get_or_create(domain=DNS, tenant=tenant_public, is_primary=True, ) domaine_www = Domain.objects.get_or_create(domain=f'www.{DNS}', tenant=tenant_public,", "Client = apps.get_model('Customers', 'Client') Domain = apps.get_model('Customers', 'Domain') DNS = os.getenv('DOMAIN') tenant_public =", "# version than this migration expects. We use the historical version. Client =", "tenant=tenant_public, is_primary=True, ) domaine_www = Domain.objects.get_or_create(domain=f'www.{DNS}', tenant=tenant_public, is_primary=False, ) return tenant_public, domaine_seul[0], domaine_www[0]", "# We can't import the Person model directly as it may be a", "return tenant_public, domaine_seul[0], domaine_www[0] def reverse(apps, schema_editor): tenant_public, domaine_seul, domaine_www = create_premier_tenant(apps, schema_editor)", "Django 2.2.13 on 2021-06-08 10:08 import os from django.db import migrations def create_premier_tenant(apps,", "by Django 2.2.13 on 2021-06-08 10:08 import os from django.db import migrations def", "reverse(apps, schema_editor): tenant_public, domaine_seul, domaine_www = create_premier_tenant(apps, schema_editor) tenant_public.delete() domaine_seul.delete() domaine_www.delete() class Migration(migrations.Migration):", "domaine_www[0] def reverse(apps, schema_editor): tenant_public, domaine_seul, domaine_www = create_premier_tenant(apps, schema_editor) tenant_public.delete() domaine_seul.delete() domaine_www.delete()", "domaine_seul.delete() domaine_www.delete() class Migration(migrations.Migration): dependencies = [ ('Customers', '0001_initial'), ] operations = [", "def create_premier_tenant(apps, schema_editor): # We can't import the Person model directly as it", "may be a newer # version than this migration expects. We use the", "domaine_seul[0], domaine_www[0] def reverse(apps, schema_editor): tenant_public, domaine_seul, domaine_www = create_premier_tenant(apps, schema_editor) tenant_public.delete() domaine_seul.delete()", "migration expects. We use the historical version. Client = apps.get_model('Customers', 'Client') Domain =", "paid_until='2200-12-05', on_trial=False)[0] # Add one or more domains for the tenant domaine_seul =", "is_primary=False, ) return tenant_public, domaine_seul[0], domaine_www[0] def reverse(apps, schema_editor): tenant_public, domaine_seul, domaine_www =", "Domain = apps.get_model('Customers', 'Domain') DNS = os.getenv('DOMAIN') tenant_public = Client.objects.get_or_create(schema_name='public', name='Tibillet Public', paid_until='2200-12-05',", "10:08 import os from django.db import migrations def create_premier_tenant(apps, schema_editor): # We can't", "Domain.objects.get_or_create(domain=f'www.{DNS}', tenant=tenant_public, is_primary=False, ) return tenant_public, domaine_seul[0], domaine_www[0] def reverse(apps, schema_editor): tenant_public, domaine_seul," ]
[ "number is 510 501 6227') == '15105016227' assert extract_phone_number('My number is (510) 501-6227.')", "( extract_phone_number, ) def test_extract_phone_number(): assert extract_phone_number('510501622') == None assert extract_phone_number('5105016227') == '15105016227'", "from common.input_validation import ( extract_phone_number, ) def test_extract_phone_number(): assert extract_phone_number('510501622') == None assert", "assert extract_phone_number('15105016227') == '15105016227' assert extract_phone_number('+15105016227') == '15105016227' assert extract_phone_number('My number is 510", "import ( extract_phone_number, ) def test_extract_phone_number(): assert extract_phone_number('510501622') == None assert extract_phone_number('5105016227') ==", "'15105016227' assert extract_phone_number('+15105016227') == '15105016227' assert extract_phone_number('My number is 510 501 6227') ==", "== '15105016227' assert extract_phone_number('My number is 510 501 6227') == '15105016227' assert extract_phone_number('My", "test_extract_phone_number(): assert extract_phone_number('510501622') == None assert extract_phone_number('5105016227') == '15105016227' assert extract_phone_number('15105016227') == '15105016227'", "'15105016227' assert extract_phone_number('15105016227') == '15105016227' assert extract_phone_number('+15105016227') == '15105016227' assert extract_phone_number('My number is", "extract_phone_number('510501622') == None assert extract_phone_number('5105016227') == '15105016227' assert extract_phone_number('15105016227') == '15105016227' assert extract_phone_number('+15105016227')", "is 510 501 6227') == '15105016227' assert extract_phone_number('My number is (510) 501-6227.') ==", ") def test_extract_phone_number(): assert extract_phone_number('510501622') == None assert extract_phone_number('5105016227') == '15105016227' assert extract_phone_number('15105016227')", "None assert extract_phone_number('5105016227') == '15105016227' assert extract_phone_number('15105016227') == '15105016227' assert extract_phone_number('+15105016227') == '15105016227'", "== None assert extract_phone_number('5105016227') == '15105016227' assert extract_phone_number('15105016227') == '15105016227' assert extract_phone_number('+15105016227') ==", "== '15105016227' assert extract_phone_number('15105016227') == '15105016227' assert extract_phone_number('+15105016227') == '15105016227' assert extract_phone_number('My number", "common.input_validation import ( extract_phone_number, ) def test_extract_phone_number(): assert extract_phone_number('510501622') == None assert extract_phone_number('5105016227')", "510 501 6227') == '15105016227' assert extract_phone_number('My number is (510) 501-6227.') == '15105016227'", "assert extract_phone_number('5105016227') == '15105016227' assert extract_phone_number('15105016227') == '15105016227' assert extract_phone_number('+15105016227') == '15105016227' assert", "assert extract_phone_number('My number is 510 501 6227') == '15105016227' assert extract_phone_number('My number is", "assert extract_phone_number('+15105016227') == '15105016227' assert extract_phone_number('My number is 510 501 6227') == '15105016227'", "extract_phone_number('15105016227') == '15105016227' assert extract_phone_number('+15105016227') == '15105016227' assert extract_phone_number('My number is 510 501", "extract_phone_number('+15105016227') == '15105016227' assert extract_phone_number('My number is 510 501 6227') == '15105016227' assert", "extract_phone_number, ) def test_extract_phone_number(): assert extract_phone_number('510501622') == None assert extract_phone_number('5105016227') == '15105016227' assert", "'15105016227' assert extract_phone_number('My number is 510 501 6227') == '15105016227' assert extract_phone_number('My number", "def test_extract_phone_number(): assert extract_phone_number('510501622') == None assert extract_phone_number('5105016227') == '15105016227' assert extract_phone_number('15105016227') ==", "extract_phone_number('My number is 510 501 6227') == '15105016227' assert extract_phone_number('My number is (510)", "== '15105016227' assert extract_phone_number('+15105016227') == '15105016227' assert extract_phone_number('My number is 510 501 6227')", "extract_phone_number('5105016227') == '15105016227' assert extract_phone_number('15105016227') == '15105016227' assert extract_phone_number('+15105016227') == '15105016227' assert extract_phone_number('My", "assert extract_phone_number('510501622') == None assert extract_phone_number('5105016227') == '15105016227' assert extract_phone_number('15105016227') == '15105016227' assert" ]
[ "in datasets.DATASETS.values(): dataset = dataset() pages_annotated = dataset.get_annotated_pages_map() if pages_annotated is None: continue", "= doc.pages_annotated for fig in doc.figures: self.assertTrue(fig.page in pages_annotated) self.assertEqual(doc.pdffile.split(\"/\")[-1][:-4], doc.doc_id) if __name__", "pdf_file_map[doc] self.assertTrue(len(pages) <= dataset.MAX_PAGES_TO_ANNOTATE) num_pages = get_num_pages_in_pdf(filename) self.assertTrue(num_pages >= max(pages) - 1) expected_pages", "= annotations[doc] self.assertEqual(set(ann[\"annotated_pages\"]), set(pages)) for fig in ann[\"figures\"]: self.assertTrue(fig.page in pages) def test_consistency(self):", "doc in documents: if doc.color_images is not None and doc.gray_images is not None:", "pages_annotated = doc.pages_annotated for fig in doc.figures: self.assertTrue(fig.page in pages_annotated) self.assertEqual(doc.pdffile.split(\"/\")[-1][:-4], doc.doc_id) if", "self.assertTrue(len(pages) <= dataset.MAX_PAGES_TO_ANNOTATE) num_pages = get_num_pages_in_pdf(filename) self.assertTrue(num_pages >= max(pages) - 1) expected_pages =", "= dataset.get_pdf_file_map() annotations = dataset.get_annotations(\"all\") docs = dataset.get_doc_ids(\"all\") self.assertEqual(set(docs), pages_annotated.keys()) for doc, pages", ">= max(pages) - 1) expected_pages = math.ceil(num_pages*dataset.PAGE_SAMPLE_PERCENT) expected_pages = min(expected_pages, dataset.MAX_PAGES_TO_ANNOTATE) self.assertTrue(len(pages) ==", "import math import datasets from pdffigures_utils import get_num_pages_in_pdf class TestDataset(unittest.TestCase): def test_pages_annotated_consistency(self): for", "x in documents])) for doc in documents: if doc.color_images is not None and", "ann[\"figures\"]: self.assertTrue(fig.page in pages) def test_consistency(self): for dataset in datasets.DATASETS.values(): dataset = dataset()", "doc, pages in pages_annotated.items(): filename = pdf_file_map[doc] self.assertTrue(len(pages) <= dataset.MAX_PAGES_TO_ANNOTATE) num_pages = get_num_pages_in_pdf(filename)", "= dataset.get_pdf_file_map() self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map = dataset.get_color_image_file_map() if doc_map is not", "for fig in doc.figures: self.assertTrue(fig.page in pages_annotated) self.assertEqual(doc.pdffile.split(\"/\")[-1][:-4], doc.doc_id) if __name__ == '__main__':", "= dataset.get_annotations(\"all\") docs = dataset.get_doc_ids(\"all\") self.assertEqual(set(docs), pages_annotated.keys()) for doc, pages in pages_annotated.items(): filename", "continue pdf_file_map = dataset.get_pdf_file_map() annotations = dataset.get_annotations(\"all\") docs = dataset.get_doc_ids(\"all\") self.assertEqual(set(docs), pages_annotated.keys()) for", "if pages_annotated is None: continue pdf_file_map = dataset.get_pdf_file_map() annotations = dataset.get_annotations(\"all\") docs =", "doc_map is not None: self.assertEqual(len(all_docs - doc_map.keys()), 0) documents = dataset.load_doc_ids(all_docs) self.assertEqual(all_docs, set([x.doc_id", "- doc_map.keys()), 0) doc_map = dataset.get_gray_image_file_map() if doc_map is not None: self.assertEqual(len(all_docs -", "ann = annotations[doc] self.assertEqual(set(ann[\"annotated_pages\"]), set(pages)) for fig in ann[\"figures\"]: self.assertTrue(fig.page in pages) def", "None: self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map = dataset.get_gray_image_file_map() if doc_map is not None:", "dataset.get_doc_ids(\"all\") self.assertEqual(set(docs), pages_annotated.keys()) for doc, pages in pages_annotated.items(): filename = pdf_file_map[doc] self.assertTrue(len(pages) <=", "dataset in datasets.DATASETS.values(): dataset = dataset() pages_annotated = dataset.get_annotated_pages_map() if pages_annotated is None:", "for doc in documents: if doc.color_images is not None and doc.gray_images is not", "min(expected_pages, dataset.MAX_PAGES_TO_ANNOTATE) self.assertTrue(len(pages) == expected_pages) if doc in annotations: ann = annotations[doc] self.assertEqual(set(ann[\"annotated_pages\"]),", "pages_annotated = dataset.get_annotated_pages_map() if pages_annotated is None: continue pdf_file_map = dataset.get_pdf_file_map() annotations =", "doc_map = dataset.get_color_image_file_map() if doc_map is not None: self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map", "<= dataset.MAX_PAGES_TO_ANNOTATE) num_pages = get_num_pages_in_pdf(filename) self.assertTrue(num_pages >= max(pages) - 1) expected_pages = math.ceil(num_pages*dataset.PAGE_SAMPLE_PERCENT)", "doc_map is not None: self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map = dataset.get_gray_image_file_map() if doc_map", "set(dataset.get_doc_ids(datasets.DatasetPartition(\"all\"))) doc_map = dataset.get_pdf_file_map() self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map = dataset.get_color_image_file_map() if doc_map", "TestDataset(unittest.TestCase): def test_pages_annotated_consistency(self): for dataset in datasets.DATASETS.values(): dataset = dataset() pages_annotated = dataset.get_annotated_pages_map()", "doc_map.keys()), 0) documents = dataset.load_doc_ids(all_docs) self.assertEqual(all_docs, set([x.doc_id for x in documents])) for doc", "== expected_pages) if doc in annotations: ann = annotations[doc] self.assertEqual(set(ann[\"annotated_pages\"]), set(pages)) for fig", "from pdffigures_utils import get_num_pages_in_pdf class TestDataset(unittest.TestCase): def test_pages_annotated_consistency(self): for dataset in datasets.DATASETS.values(): dataset", "- doc_map.keys()), 0) documents = dataset.load_doc_ids(all_docs) self.assertEqual(all_docs, set([x.doc_id for x in documents])) for", "expected_pages = math.ceil(num_pages*dataset.PAGE_SAMPLE_PERCENT) expected_pages = min(expected_pages, dataset.MAX_PAGES_TO_ANNOTATE) self.assertTrue(len(pages) == expected_pages) if doc in", "def test_pages_annotated_consistency(self): for dataset in datasets.DATASETS.values(): dataset = dataset() pages_annotated = dataset.get_annotated_pages_map() if", "in documents])) for doc in documents: if doc.color_images is not None and doc.gray_images", "if doc_map is not None: self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map = dataset.get_gray_image_file_map() if", "datasets.DATASETS.values(): dataset = dataset() all_docs = set(dataset.get_doc_ids(datasets.DatasetPartition(\"all\"))) doc_map = dataset.get_pdf_file_map() self.assertEqual(len(all_docs - doc_map.keys()),", "get_num_pages_in_pdf class TestDataset(unittest.TestCase): def test_pages_annotated_consistency(self): for dataset in datasets.DATASETS.values(): dataset = dataset() pages_annotated", "is not None: self.assertEqual(doc.gray_images.keys(), doc.color_images.keys()) pages_annotated = doc.pages_annotated for fig in doc.figures: self.assertTrue(fig.page", "doc_map.keys()), 0) doc_map = dataset.get_color_image_file_map() if doc_map is not None: self.assertEqual(len(all_docs - doc_map.keys()),", "for dataset in datasets.DATASETS.values(): dataset = dataset() pages_annotated = dataset.get_annotated_pages_map() if pages_annotated is", "annotations: ann = annotations[doc] self.assertEqual(set(ann[\"annotated_pages\"]), set(pages)) for fig in ann[\"figures\"]: self.assertTrue(fig.page in pages)", "max(pages) - 1) expected_pages = math.ceil(num_pages*dataset.PAGE_SAMPLE_PERCENT) expected_pages = min(expected_pages, dataset.MAX_PAGES_TO_ANNOTATE) self.assertTrue(len(pages) == expected_pages)", "doc in annotations: ann = annotations[doc] self.assertEqual(set(ann[\"annotated_pages\"]), set(pages)) for fig in ann[\"figures\"]: self.assertTrue(fig.page", "fig in ann[\"figures\"]: self.assertTrue(fig.page in pages) def test_consistency(self): for dataset in datasets.DATASETS.values(): dataset", "import unittest import math import datasets from pdffigures_utils import get_num_pages_in_pdf class TestDataset(unittest.TestCase): def", "1) expected_pages = math.ceil(num_pages*dataset.PAGE_SAMPLE_PERCENT) expected_pages = min(expected_pages, dataset.MAX_PAGES_TO_ANNOTATE) self.assertTrue(len(pages) == expected_pages) if doc", "= dataset() pages_annotated = dataset.get_annotated_pages_map() if pages_annotated is None: continue pdf_file_map = dataset.get_pdf_file_map()", "= dataset() all_docs = set(dataset.get_doc_ids(datasets.DatasetPartition(\"all\"))) doc_map = dataset.get_pdf_file_map() self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map", "dataset.get_pdf_file_map() self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map = dataset.get_color_image_file_map() if doc_map is not None:", "dataset.MAX_PAGES_TO_ANNOTATE) num_pages = get_num_pages_in_pdf(filename) self.assertTrue(num_pages >= max(pages) - 1) expected_pages = math.ceil(num_pages*dataset.PAGE_SAMPLE_PERCENT) expected_pages", "is None: continue pdf_file_map = dataset.get_pdf_file_map() annotations = dataset.get_annotations(\"all\") docs = dataset.get_doc_ids(\"all\") self.assertEqual(set(docs),", "class TestDataset(unittest.TestCase): def test_pages_annotated_consistency(self): for dataset in datasets.DATASETS.values(): dataset = dataset() pages_annotated =", "is not None: self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map = dataset.get_gray_image_file_map() if doc_map is", "datasets from pdffigures_utils import get_num_pages_in_pdf class TestDataset(unittest.TestCase): def test_pages_annotated_consistency(self): for dataset in datasets.DATASETS.values():", "in ann[\"figures\"]: self.assertTrue(fig.page in pages) def test_consistency(self): for dataset in datasets.DATASETS.values(): dataset =", "pages_annotated.items(): filename = pdf_file_map[doc] self.assertTrue(len(pages) <= dataset.MAX_PAGES_TO_ANNOTATE) num_pages = get_num_pages_in_pdf(filename) self.assertTrue(num_pages >= max(pages)", "not None: self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map = dataset.get_gray_image_file_map() if doc_map is not", "dataset() all_docs = set(dataset.get_doc_ids(datasets.DatasetPartition(\"all\"))) doc_map = dataset.get_pdf_file_map() self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map =", "dataset.MAX_PAGES_TO_ANNOTATE) self.assertTrue(len(pages) == expected_pages) if doc in annotations: ann = annotations[doc] self.assertEqual(set(ann[\"annotated_pages\"]), set(pages))", "documents = dataset.load_doc_ids(all_docs) self.assertEqual(all_docs, set([x.doc_id for x in documents])) for doc in documents:", "doc.color_images is not None and doc.gray_images is not None: self.assertEqual(doc.gray_images.keys(), doc.color_images.keys()) pages_annotated =", "self.assertTrue(len(pages) == expected_pages) if doc in annotations: ann = annotations[doc] self.assertEqual(set(ann[\"annotated_pages\"]), set(pages)) for", "self.assertEqual(doc.gray_images.keys(), doc.color_images.keys()) pages_annotated = doc.pages_annotated for fig in doc.figures: self.assertTrue(fig.page in pages_annotated) self.assertEqual(doc.pdffile.split(\"/\")[-1][:-4],", "- doc_map.keys()), 0) doc_map = dataset.get_color_image_file_map() if doc_map is not None: self.assertEqual(len(all_docs -", "- 1) expected_pages = math.ceil(num_pages*dataset.PAGE_SAMPLE_PERCENT) expected_pages = min(expected_pages, dataset.MAX_PAGES_TO_ANNOTATE) self.assertTrue(len(pages) == expected_pages) if", "num_pages = get_num_pages_in_pdf(filename) self.assertTrue(num_pages >= max(pages) - 1) expected_pages = math.ceil(num_pages*dataset.PAGE_SAMPLE_PERCENT) expected_pages =", "= dataset.get_gray_image_file_map() if doc_map is not None: self.assertEqual(len(all_docs - doc_map.keys()), 0) documents =", "test_pages_annotated_consistency(self): for dataset in datasets.DATASETS.values(): dataset = dataset() pages_annotated = dataset.get_annotated_pages_map() if pages_annotated", "None: self.assertEqual(doc.gray_images.keys(), doc.color_images.keys()) pages_annotated = doc.pages_annotated for fig in doc.figures: self.assertTrue(fig.page in pages_annotated)", "in datasets.DATASETS.values(): dataset = dataset() all_docs = set(dataset.get_doc_ids(datasets.DatasetPartition(\"all\"))) doc_map = dataset.get_pdf_file_map() self.assertEqual(len(all_docs -", "pdf_file_map = dataset.get_pdf_file_map() annotations = dataset.get_annotations(\"all\") docs = dataset.get_doc_ids(\"all\") self.assertEqual(set(docs), pages_annotated.keys()) for doc,", "in pages_annotated.items(): filename = pdf_file_map[doc] self.assertTrue(len(pages) <= dataset.MAX_PAGES_TO_ANNOTATE) num_pages = get_num_pages_in_pdf(filename) self.assertTrue(num_pages >=", "= dataset.get_doc_ids(\"all\") self.assertEqual(set(docs), pages_annotated.keys()) for doc, pages in pages_annotated.items(): filename = pdf_file_map[doc] self.assertTrue(len(pages)", "dataset.get_gray_image_file_map() if doc_map is not None: self.assertEqual(len(all_docs - doc_map.keys()), 0) documents = dataset.load_doc_ids(all_docs)", "self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map = dataset.get_color_image_file_map() if doc_map is not None: self.assertEqual(len(all_docs", "import datasets from pdffigures_utils import get_num_pages_in_pdf class TestDataset(unittest.TestCase): def test_pages_annotated_consistency(self): for dataset in", "documents])) for doc in documents: if doc.color_images is not None and doc.gray_images is", "in pages) def test_consistency(self): for dataset in datasets.DATASETS.values(): dataset = dataset() all_docs =", "None and doc.gray_images is not None: self.assertEqual(doc.gray_images.keys(), doc.color_images.keys()) pages_annotated = doc.pages_annotated for fig", "dataset() pages_annotated = dataset.get_annotated_pages_map() if pages_annotated is None: continue pdf_file_map = dataset.get_pdf_file_map() annotations", "pages) def test_consistency(self): for dataset in datasets.DATASETS.values(): dataset = dataset() all_docs = set(dataset.get_doc_ids(datasets.DatasetPartition(\"all\")))", "annotations = dataset.get_annotations(\"all\") docs = dataset.get_doc_ids(\"all\") self.assertEqual(set(docs), pages_annotated.keys()) for doc, pages in pages_annotated.items():", "doc_map = dataset.get_pdf_file_map() self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map = dataset.get_color_image_file_map() if doc_map is", "is not None and doc.gray_images is not None: self.assertEqual(doc.gray_images.keys(), doc.color_images.keys()) pages_annotated = doc.pages_annotated", "in annotations: ann = annotations[doc] self.assertEqual(set(ann[\"annotated_pages\"]), set(pages)) for fig in ann[\"figures\"]: self.assertTrue(fig.page in", "in documents: if doc.color_images is not None and doc.gray_images is not None: self.assertEqual(doc.gray_images.keys(),", "not None: self.assertEqual(doc.gray_images.keys(), doc.color_images.keys()) pages_annotated = doc.pages_annotated for fig in doc.figures: self.assertTrue(fig.page in", "if doc in annotations: ann = annotations[doc] self.assertEqual(set(ann[\"annotated_pages\"]), set(pages)) for fig in ann[\"figures\"]:", "dataset.get_annotated_pages_map() if pages_annotated is None: continue pdf_file_map = dataset.get_pdf_file_map() annotations = dataset.get_annotations(\"all\") docs", "= dataset.load_doc_ids(all_docs) self.assertEqual(all_docs, set([x.doc_id for x in documents])) for doc in documents: if", "expected_pages) if doc in annotations: ann = annotations[doc] self.assertEqual(set(ann[\"annotated_pages\"]), set(pages)) for fig in", "pages_annotated.keys()) for doc, pages in pages_annotated.items(): filename = pdf_file_map[doc] self.assertTrue(len(pages) <= dataset.MAX_PAGES_TO_ANNOTATE) num_pages", "unittest import math import datasets from pdffigures_utils import get_num_pages_in_pdf class TestDataset(unittest.TestCase): def test_pages_annotated_consistency(self):", "pdffigures_utils import get_num_pages_in_pdf class TestDataset(unittest.TestCase): def test_pages_annotated_consistency(self): for dataset in datasets.DATASETS.values(): dataset =", "filename = pdf_file_map[doc] self.assertTrue(len(pages) <= dataset.MAX_PAGES_TO_ANNOTATE) num_pages = get_num_pages_in_pdf(filename) self.assertTrue(num_pages >= max(pages) -", "set(pages)) for fig in ann[\"figures\"]: self.assertTrue(fig.page in pages) def test_consistency(self): for dataset in", "def test_consistency(self): for dataset in datasets.DATASETS.values(): dataset = dataset() all_docs = set(dataset.get_doc_ids(datasets.DatasetPartition(\"all\"))) doc_map", "math.ceil(num_pages*dataset.PAGE_SAMPLE_PERCENT) expected_pages = min(expected_pages, dataset.MAX_PAGES_TO_ANNOTATE) self.assertTrue(len(pages) == expected_pages) if doc in annotations: ann", "dataset.get_pdf_file_map() annotations = dataset.get_annotations(\"all\") docs = dataset.get_doc_ids(\"all\") self.assertEqual(set(docs), pages_annotated.keys()) for doc, pages in", "pages in pages_annotated.items(): filename = pdf_file_map[doc] self.assertTrue(len(pages) <= dataset.MAX_PAGES_TO_ANNOTATE) num_pages = get_num_pages_in_pdf(filename) self.assertTrue(num_pages", "not None: self.assertEqual(len(all_docs - doc_map.keys()), 0) documents = dataset.load_doc_ids(all_docs) self.assertEqual(all_docs, set([x.doc_id for x", "docs = dataset.get_doc_ids(\"all\") self.assertEqual(set(docs), pages_annotated.keys()) for doc, pages in pages_annotated.items(): filename = pdf_file_map[doc]", "self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map = dataset.get_gray_image_file_map() if doc_map is not None: self.assertEqual(len(all_docs", "not None and doc.gray_images is not None: self.assertEqual(doc.gray_images.keys(), doc.color_images.keys()) pages_annotated = doc.pages_annotated for", "doc.color_images.keys()) pages_annotated = doc.pages_annotated for fig in doc.figures: self.assertTrue(fig.page in pages_annotated) self.assertEqual(doc.pdffile.split(\"/\")[-1][:-4], doc.doc_id)", "dataset.load_doc_ids(all_docs) self.assertEqual(all_docs, set([x.doc_id for x in documents])) for doc in documents: if doc.color_images", "= min(expected_pages, dataset.MAX_PAGES_TO_ANNOTATE) self.assertTrue(len(pages) == expected_pages) if doc in annotations: ann = annotations[doc]", "for fig in ann[\"figures\"]: self.assertTrue(fig.page in pages) def test_consistency(self): for dataset in datasets.DATASETS.values():", "set([x.doc_id for x in documents])) for doc in documents: if doc.color_images is not", "documents: if doc.color_images is not None and doc.gray_images is not None: self.assertEqual(doc.gray_images.keys(), doc.color_images.keys())", "self.assertEqual(all_docs, set([x.doc_id for x in documents])) for doc in documents: if doc.color_images is", "self.assertEqual(len(all_docs - doc_map.keys()), 0) documents = dataset.load_doc_ids(all_docs) self.assertEqual(all_docs, set([x.doc_id for x in documents]))", "fig in doc.figures: self.assertTrue(fig.page in pages_annotated) self.assertEqual(doc.pdffile.split(\"/\")[-1][:-4], doc.doc_id) if __name__ == '__main__': unittest.main()", "None: self.assertEqual(len(all_docs - doc_map.keys()), 0) documents = dataset.load_doc_ids(all_docs) self.assertEqual(all_docs, set([x.doc_id for x in", "pages_annotated is None: continue pdf_file_map = dataset.get_pdf_file_map() annotations = dataset.get_annotations(\"all\") docs = dataset.get_doc_ids(\"all\")", "self.assertTrue(num_pages >= max(pages) - 1) expected_pages = math.ceil(num_pages*dataset.PAGE_SAMPLE_PERCENT) expected_pages = min(expected_pages, dataset.MAX_PAGES_TO_ANNOTATE) self.assertTrue(len(pages)", "annotations[doc] self.assertEqual(set(ann[\"annotated_pages\"]), set(pages)) for fig in ann[\"figures\"]: self.assertTrue(fig.page in pages) def test_consistency(self): for", "= math.ceil(num_pages*dataset.PAGE_SAMPLE_PERCENT) expected_pages = min(expected_pages, dataset.MAX_PAGES_TO_ANNOTATE) self.assertTrue(len(pages) == expected_pages) if doc in annotations:", "self.assertEqual(set(ann[\"annotated_pages\"]), set(pages)) for fig in ann[\"figures\"]: self.assertTrue(fig.page in pages) def test_consistency(self): for dataset", "dataset.get_annotations(\"all\") docs = dataset.get_doc_ids(\"all\") self.assertEqual(set(docs), pages_annotated.keys()) for doc, pages in pages_annotated.items(): filename =", "is not None: self.assertEqual(len(all_docs - doc_map.keys()), 0) documents = dataset.load_doc_ids(all_docs) self.assertEqual(all_docs, set([x.doc_id for", "dataset in datasets.DATASETS.values(): dataset = dataset() all_docs = set(dataset.get_doc_ids(datasets.DatasetPartition(\"all\"))) doc_map = dataset.get_pdf_file_map() self.assertEqual(len(all_docs", "self.assertTrue(fig.page in pages) def test_consistency(self): for dataset in datasets.DATASETS.values(): dataset = dataset() all_docs", "doc_map = dataset.get_gray_image_file_map() if doc_map is not None: self.assertEqual(len(all_docs - doc_map.keys()), 0) documents", "0) doc_map = dataset.get_gray_image_file_map() if doc_map is not None: self.assertEqual(len(all_docs - doc_map.keys()), 0)", "import get_num_pages_in_pdf class TestDataset(unittest.TestCase): def test_pages_annotated_consistency(self): for dataset in datasets.DATASETS.values(): dataset = dataset()", "None: continue pdf_file_map = dataset.get_pdf_file_map() annotations = dataset.get_annotations(\"all\") docs = dataset.get_doc_ids(\"all\") self.assertEqual(set(docs), pages_annotated.keys())", "self.assertEqual(set(docs), pages_annotated.keys()) for doc, pages in pages_annotated.items(): filename = pdf_file_map[doc] self.assertTrue(len(pages) <= dataset.MAX_PAGES_TO_ANNOTATE)", "doc.gray_images is not None: self.assertEqual(doc.gray_images.keys(), doc.color_images.keys()) pages_annotated = doc.pages_annotated for fig in doc.figures:", "test_consistency(self): for dataset in datasets.DATASETS.values(): dataset = dataset() all_docs = set(dataset.get_doc_ids(datasets.DatasetPartition(\"all\"))) doc_map =", "0) doc_map = dataset.get_color_image_file_map() if doc_map is not None: self.assertEqual(len(all_docs - doc_map.keys()), 0)", "get_num_pages_in_pdf(filename) self.assertTrue(num_pages >= max(pages) - 1) expected_pages = math.ceil(num_pages*dataset.PAGE_SAMPLE_PERCENT) expected_pages = min(expected_pages, dataset.MAX_PAGES_TO_ANNOTATE)", "= dataset.get_annotated_pages_map() if pages_annotated is None: continue pdf_file_map = dataset.get_pdf_file_map() annotations = dataset.get_annotations(\"all\")", "dataset = dataset() pages_annotated = dataset.get_annotated_pages_map() if pages_annotated is None: continue pdf_file_map =", "math import datasets from pdffigures_utils import get_num_pages_in_pdf class TestDataset(unittest.TestCase): def test_pages_annotated_consistency(self): for dataset", "doc_map.keys()), 0) doc_map = dataset.get_gray_image_file_map() if doc_map is not None: self.assertEqual(len(all_docs - doc_map.keys()),", "and doc.gray_images is not None: self.assertEqual(doc.gray_images.keys(), doc.color_images.keys()) pages_annotated = doc.pages_annotated for fig in", "= set(dataset.get_doc_ids(datasets.DatasetPartition(\"all\"))) doc_map = dataset.get_pdf_file_map() self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map = dataset.get_color_image_file_map() if", "= dataset.get_color_image_file_map() if doc_map is not None: self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map =", "doc.pages_annotated for fig in doc.figures: self.assertTrue(fig.page in pages_annotated) self.assertEqual(doc.pdffile.split(\"/\")[-1][:-4], doc.doc_id) if __name__ ==", "all_docs = set(dataset.get_doc_ids(datasets.DatasetPartition(\"all\"))) doc_map = dataset.get_pdf_file_map() self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map = dataset.get_color_image_file_map()", "0) documents = dataset.load_doc_ids(all_docs) self.assertEqual(all_docs, set([x.doc_id for x in documents])) for doc in", "datasets.DATASETS.values(): dataset = dataset() pages_annotated = dataset.get_annotated_pages_map() if pages_annotated is None: continue pdf_file_map", "for doc, pages in pages_annotated.items(): filename = pdf_file_map[doc] self.assertTrue(len(pages) <= dataset.MAX_PAGES_TO_ANNOTATE) num_pages =", "dataset = dataset() all_docs = set(dataset.get_doc_ids(datasets.DatasetPartition(\"all\"))) doc_map = dataset.get_pdf_file_map() self.assertEqual(len(all_docs - doc_map.keys()), 0)", "for x in documents])) for doc in documents: if doc.color_images is not None", "if doc.color_images is not None and doc.gray_images is not None: self.assertEqual(doc.gray_images.keys(), doc.color_images.keys()) pages_annotated", "if doc_map is not None: self.assertEqual(len(all_docs - doc_map.keys()), 0) documents = dataset.load_doc_ids(all_docs) self.assertEqual(all_docs,", "expected_pages = min(expected_pages, dataset.MAX_PAGES_TO_ANNOTATE) self.assertTrue(len(pages) == expected_pages) if doc in annotations: ann =", "for dataset in datasets.DATASETS.values(): dataset = dataset() all_docs = set(dataset.get_doc_ids(datasets.DatasetPartition(\"all\"))) doc_map = dataset.get_pdf_file_map()", "= pdf_file_map[doc] self.assertTrue(len(pages) <= dataset.MAX_PAGES_TO_ANNOTATE) num_pages = get_num_pages_in_pdf(filename) self.assertTrue(num_pages >= max(pages) - 1)", "= get_num_pages_in_pdf(filename) self.assertTrue(num_pages >= max(pages) - 1) expected_pages = math.ceil(num_pages*dataset.PAGE_SAMPLE_PERCENT) expected_pages = min(expected_pages,", "dataset.get_color_image_file_map() if doc_map is not None: self.assertEqual(len(all_docs - doc_map.keys()), 0) doc_map = dataset.get_gray_image_file_map()" ]
[ "self.naked() def _setItem(self, key, value): self.naked()[key] = list(value) def _getItem(self, key): return self.naked()[key]", "RBaseObject class RGroups(RBaseObject, BaseGroups): wrapClass = defcon.Groups def _get_side1KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide1Groups\") def _get_side2KerningGroups(self):", "= defcon.Groups def _get_side1KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide1Groups\") def _get_side2KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide2Groups\") def _items(self): return", "from fontParts.fontshell.base import RBaseObject class RGroups(RBaseObject, BaseGroups): wrapClass = defcon.Groups def _get_side1KerningGroups(self): return", "defcon.Groups def _get_side1KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide1Groups\") def _get_side2KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide2Groups\") def _items(self): return self.naked().items()", "return self.naked().getRepresentation(\"defcon.groups.kerningSide1Groups\") def _get_side2KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide2Groups\") def _items(self): return self.naked().items() def _contains(self, key):", "def _get_side2KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide2Groups\") def _items(self): return self.naked().items() def _contains(self, key): return key", "return key in self.naked() def _setItem(self, key, value): self.naked()[key] = list(value) def _getItem(self,", "def _contains(self, key): return key in self.naked() def _setItem(self, key, value): self.naked()[key] =", "BaseGroups from fontParts.fontshell.base import RBaseObject class RGroups(RBaseObject, BaseGroups): wrapClass = defcon.Groups def _get_side1KerningGroups(self):", "fontParts.fontshell.base import RBaseObject class RGroups(RBaseObject, BaseGroups): wrapClass = defcon.Groups def _get_side1KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide1Groups\")", "wrapClass = defcon.Groups def _get_side1KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide1Groups\") def _get_side2KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide2Groups\") def _items(self):", "key): return key in self.naked() def _setItem(self, key, value): self.naked()[key] = list(value) def", "def _get_side1KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide1Groups\") def _get_side2KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide2Groups\") def _items(self): return self.naked().items() def", "def _setItem(self, key, value): self.naked()[key] = list(value) def _getItem(self, key): return self.naked()[key] def", "key, value): self.naked()[key] = list(value) def _getItem(self, key): return self.naked()[key] def _delItem(self, key):", "self.naked()[key] = list(value) def _getItem(self, key): return self.naked()[key] def _delItem(self, key): del self.naked()[key]", "BaseGroups): wrapClass = defcon.Groups def _get_side1KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide1Groups\") def _get_side2KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide2Groups\") def", "key in self.naked() def _setItem(self, key, value): self.naked()[key] = list(value) def _getItem(self, key):", "import BaseGroups from fontParts.fontshell.base import RBaseObject class RGroups(RBaseObject, BaseGroups): wrapClass = defcon.Groups def", "def _items(self): return self.naked().items() def _contains(self, key): return key in self.naked() def _setItem(self,", "value): self.naked()[key] = list(value) def _getItem(self, key): return self.naked()[key] def _delItem(self, key): del", "self.naked().getRepresentation(\"defcon.groups.kerningSide1Groups\") def _get_side2KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide2Groups\") def _items(self): return self.naked().items() def _contains(self, key): return", "return self.naked().getRepresentation(\"defcon.groups.kerningSide2Groups\") def _items(self): return self.naked().items() def _contains(self, key): return key in self.naked()", "self.naked().getRepresentation(\"defcon.groups.kerningSide2Groups\") def _items(self): return self.naked().items() def _contains(self, key): return key in self.naked() def", "in self.naked() def _setItem(self, key, value): self.naked()[key] = list(value) def _getItem(self, key): return", "return self.naked().items() def _contains(self, key): return key in self.naked() def _setItem(self, key, value):", "_get_side2KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide2Groups\") def _items(self): return self.naked().items() def _contains(self, key): return key in", "_contains(self, key): return key in self.naked() def _setItem(self, key, value): self.naked()[key] = list(value)", "fontParts.base import BaseGroups from fontParts.fontshell.base import RBaseObject class RGroups(RBaseObject, BaseGroups): wrapClass = defcon.Groups", "defcon from fontParts.base import BaseGroups from fontParts.fontshell.base import RBaseObject class RGroups(RBaseObject, BaseGroups): wrapClass", "from fontParts.base import BaseGroups from fontParts.fontshell.base import RBaseObject class RGroups(RBaseObject, BaseGroups): wrapClass =", "_get_side1KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide1Groups\") def _get_side2KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide2Groups\") def _items(self): return self.naked().items() def _contains(self,", "self.naked().items() def _contains(self, key): return key in self.naked() def _setItem(self, key, value): self.naked()[key]", "_items(self): return self.naked().items() def _contains(self, key): return key in self.naked() def _setItem(self, key,", "_setItem(self, key, value): self.naked()[key] = list(value) def _getItem(self, key): return self.naked()[key] def _delItem(self,", "RGroups(RBaseObject, BaseGroups): wrapClass = defcon.Groups def _get_side1KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide1Groups\") def _get_side2KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide2Groups\")", "import defcon from fontParts.base import BaseGroups from fontParts.fontshell.base import RBaseObject class RGroups(RBaseObject, BaseGroups):", "import RBaseObject class RGroups(RBaseObject, BaseGroups): wrapClass = defcon.Groups def _get_side1KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide1Groups\") def", "class RGroups(RBaseObject, BaseGroups): wrapClass = defcon.Groups def _get_side1KerningGroups(self): return self.naked().getRepresentation(\"defcon.groups.kerningSide1Groups\") def _get_side2KerningGroups(self): return" ]
[ "s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # print('Create bucket...') # response = s3.create_bucket( # Bucket='bun-chan-bot-images',", "except botocore.exceptions.ClientError as e: # print('The bucket does not found') # print(e) #", "in response['Buckets']: # print(bucket.get('Name')) # if bucket.get('Name') != 'bun-chan-bot-images': # print('Not Found') #", "重複していなければ追加。 # [読み込み] # ファイルを読み込む # response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) #", "= s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) # print(response) # response", "= s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # print('Create bucket...') # response = s3.create_bucket( #", "# print('Delet bucket...') # response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # print('Create bucket...')", "# response = None # response = s3.list_buckets() # # 指定したBucketが存在しなければ例外発生する。確認用に使える。 # try:", "from datetime import datetime import s3Uploader # Refs : https://boto3.readthedocs.io/en/latest/reference/services/s3.html s3 = boto3.client('s3')", "# response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # response = s3.create_bucket( # Bucket='bun-chan-bot-images',", "# print(response) # response = s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # )", "# [読み込み] # ファイルを読み込む # response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # response", "if isExistBucketFor(bucketName): # else: # print('Delet bucket...') # response = s3.delete_bucket( Bucket='bun-chan-bot-images') #", "s3.list_buckets() # # 指定したBucketが存在しなければ例外発生する。確認用に使える。 # try: # response = s3.head_bucket(Bucket='bun-chan-bot-images') # # response", "boto3.client('s3') def main(): # [追加する時] # バケットがなければ作成 # あればそれを使う。 # ファイルの重複チェック # 重複していれば、削除し更新", "# Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) # print(response) # response = None", "# # response = s3.head_bucket(Bucket='test-lambda-on-java') # print(response) # except botocore.exceptions.ClientError as e: #", "response = s3.list_buckets() # # 指定したBucketが存在しなければ例外発生する。確認用に使える。 # try: # response = s3.head_bucket(Bucket='bun-chan-bot-images') #", "bucket in response['Buckets']: # print(bucket.get('Name')) # if bucket.get('Name') != 'bun-chan-bot-images': # print('Not Found')", "import s3Uploader # Refs : https://boto3.readthedocs.io/en/latest/reference/services/s3.html s3 = boto3.client('s3') def main(): # [追加する時]", "= \"image_{name}.jpg\".format(name=datetime.now().strftime(\"%Y%m%d_%H%M%S\")) uploader = s3Uploader.s3Uploader(bucketName, objectName, './image.jpg') uploader.upload() if __name__ == '__main__': main()", "else: # print('Delet bucket...') # response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # print('Create", "'ap-northeast-1'} # ) # print(response) # response = None # response = s3.list_buckets()", "import io from datetime import datetime import s3Uploader # Refs : https://boto3.readthedocs.io/en/latest/reference/services/s3.html s3", "s3 = boto3.client('s3') def main(): # [追加する時] # バケットがなければ作成 # あればそれを使う。 # ファイルの重複チェック", "s3.head_bucket(Bucket='bun-chan-bot-images') # print(response) # for bucket in response['Buckets']: # print(bucket.get('Name')) # if bucket.get('Name')", "Bucket='bun-chan-bot-images') # print(response) # response = s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} #", "response = s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) bucketName = 'bun-chan-bot-images'", "bucket.get('Name') != 'bun-chan-bot-images': # print('Not Found') # if isExistBucketFor(bucketName): # else: # print('Delet", "# response = s3.list_buckets() # # 指定したBucketが存在しなければ例外発生する。確認用に使える。 # try: # response = s3.head_bucket(Bucket='bun-chan-bot-images')", "s3Uploader # Refs : https://boto3.readthedocs.io/en/latest/reference/services/s3.html s3 = boto3.client('s3') def main(): # [追加する時] #", "# if bucket.get('Name') != 'bun-chan-bot-images': # print('Not Found') # if isExistBucketFor(bucketName): # else:", ") bucketName = 'bun-chan-bot-images' objectName = \"image_{name}.jpg\".format(name=datetime.now().strftime(\"%Y%m%d_%H%M%S\")) uploader = s3Uploader.s3Uploader(bucketName, objectName, './image.jpg') uploader.upload()", "= s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # response = s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint':", "[追加する時] # バケットがなければ作成 # あればそれを使う。 # ファイルの重複チェック # 重複していれば、削除し更新 # 重複していなければ追加。 # [読み込み]", "# print('The bucket does not found') # print(e) # response = s3.head_bucket(Bucket='bun-chan-bot-images') #", "= boto3.client('s3') def main(): # [追加する時] # バケットがなければ作成 # あればそれを使う。 # ファイルの重複チェック #", "s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # response = s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'}", "ファイルを読み込む # response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # response = s3.create_bucket( #", "# ファイルを読み込む # response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # response = s3.create_bucket(", "print(bucket.get('Name')) # if bucket.get('Name') != 'bun-chan-bot-images': # print('Not Found') # if isExistBucketFor(bucketName): #", "objectName = \"image_{name}.jpg\".format(name=datetime.now().strftime(\"%Y%m%d_%H%M%S\")) uploader = s3Uploader.s3Uploader(bucketName, objectName, './image.jpg') uploader.upload() if __name__ == '__main__':", "coding: utf-8 -*- import botocore import boto3 import io from datetime import datetime", "s3.head_bucket(Bucket='bun-chan-bot-images') # # response = s3.head_bucket(Bucket='test-lambda-on-java') # print(response) # except botocore.exceptions.ClientError as e:", "botocore.exceptions.ClientError as e: # print('The bucket does not found') # print(e) # response", "print(e) # response = s3.head_bucket(Bucket='bun-chan-bot-images') # print(response) # for bucket in response['Buckets']: #", "bucket does not found') # print(e) # response = s3.head_bucket(Bucket='bun-chan-bot-images') # print(response) #", "# -*- coding: utf-8 -*- import botocore import boto3 import io from datetime", "s3.head_bucket(Bucket='test-lambda-on-java') # print(response) # except botocore.exceptions.ClientError as e: # print('The bucket does not", "# print(response) # response = None # response = s3.list_buckets() # # 指定したBucketが存在しなければ例外発生する。確認用に使える。", "= s3.list_buckets() # # 指定したBucketが存在しなければ例外発生する。確認用に使える。 # try: # response = s3.head_bucket(Bucket='bun-chan-bot-images') # #", "# ファイルの重複チェック # 重複していれば、削除し更新 # 重複していなければ追加。 # [読み込み] # ファイルを読み込む # response =", "print(response) # except botocore.exceptions.ClientError as e: # print('The bucket does not found') #", "# バケットがなければ作成 # あればそれを使う。 # ファイルの重複チェック # 重複していれば、削除し更新 # 重複していなければ追加。 # [読み込み] #", "print('The bucket does not found') # print(e) # response = s3.head_bucket(Bucket='bun-chan-bot-images') # print(response)", "# print(response) # except botocore.exceptions.ClientError as e: # print('The bucket does not found')", "does not found') # print(e) # response = s3.head_bucket(Bucket='bun-chan-bot-images') # print(response) # for", "e: # print('The bucket does not found') # print(e) # response = s3.head_bucket(Bucket='bun-chan-bot-images')", "response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # response = s3.create_bucket( # Bucket='bun-chan-bot-images', #", "for bucket in response['Buckets']: # print(bucket.get('Name')) # if bucket.get('Name') != 'bun-chan-bot-images': # print('Not", "Refs : https://boto3.readthedocs.io/en/latest/reference/services/s3.html s3 = boto3.client('s3') def main(): # [追加する時] # バケットがなければ作成 #", "print(response) # print('Create bucket...') # response = s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'}", "バケットがなければ作成 # あればそれを使う。 # ファイルの重複チェック # 重複していれば、削除し更新 # 重複していなければ追加。 # [読み込み] # ファイルを読み込む", "# try: # response = s3.head_bucket(Bucket='bun-chan-bot-images') # # response = s3.head_bucket(Bucket='test-lambda-on-java') # print(response)", "bucket...') # response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # print('Create bucket...') # response", "response['Buckets']: # print(bucket.get('Name')) # if bucket.get('Name') != 'bun-chan-bot-images': # print('Not Found') # if", "= 'bun-chan-bot-images' objectName = \"image_{name}.jpg\".format(name=datetime.now().strftime(\"%Y%m%d_%H%M%S\")) uploader = s3Uploader.s3Uploader(bucketName, objectName, './image.jpg') uploader.upload() if __name__", "# CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) bucketName = 'bun-chan-bot-images' objectName = \"image_{name}.jpg\".format(name=datetime.now().strftime(\"%Y%m%d_%H%M%S\")) uploader =", "# print('Not Found') # if isExistBucketFor(bucketName): # else: # print('Delet bucket...') # response", "= s3.head_bucket(Bucket='bun-chan-bot-images') # print(response) # for bucket in response['Buckets']: # print(bucket.get('Name')) # if", "# print(bucket.get('Name')) # if bucket.get('Name') != 'bun-chan-bot-images': # print('Not Found') # if isExistBucketFor(bucketName):", "'ap-northeast-1'} # ) bucketName = 'bun-chan-bot-images' objectName = \"image_{name}.jpg\".format(name=datetime.now().strftime(\"%Y%m%d_%H%M%S\")) uploader = s3Uploader.s3Uploader(bucketName, objectName,", "if bucket.get('Name') != 'bun-chan-bot-images': # print('Not Found') # if isExistBucketFor(bucketName): # else: #", "import datetime import s3Uploader # Refs : https://boto3.readthedocs.io/en/latest/reference/services/s3.html s3 = boto3.client('s3') def main():", "datetime import datetime import s3Uploader # Refs : https://boto3.readthedocs.io/en/latest/reference/services/s3.html s3 = boto3.client('s3') def", "# 重複していなければ追加。 # [読み込み] # ファイルを読み込む # response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response)", "Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) # print(response) # response = None #", "import boto3 import io from datetime import datetime import s3Uploader # Refs :", "# ) # print(response) # response = None # response = s3.list_buckets() #", "response = s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) # print(response) #", "def main(): # [追加する時] # バケットがなければ作成 # あればそれを使う。 # ファイルの重複チェック # 重複していれば、削除し更新 #", "# response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # print('Create bucket...') # response =", "response = s3.head_bucket(Bucket='bun-chan-bot-images') # # response = s3.head_bucket(Bucket='test-lambda-on-java') # print(response) # except botocore.exceptions.ClientError", "Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) bucketName = 'bun-chan-bot-images' objectName = \"image_{name}.jpg\".format(name=datetime.now().strftime(\"%Y%m%d_%H%M%S\")) uploader", "main(): # [追加する時] # バケットがなければ作成 # あればそれを使う。 # ファイルの重複チェック # 重複していれば、削除し更新 # 重複していなければ追加。", ": https://boto3.readthedocs.io/en/latest/reference/services/s3.html s3 = boto3.client('s3') def main(): # [追加する時] # バケットがなければ作成 # あればそれを使う。", "[読み込み] # ファイルを読み込む # response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # response =", "-*- import botocore import boto3 import io from datetime import datetime import s3Uploader", "datetime import s3Uploader # Refs : https://boto3.readthedocs.io/en/latest/reference/services/s3.html s3 = boto3.client('s3') def main(): #", "# response = s3.head_bucket(Bucket='bun-chan-bot-images') # # response = s3.head_bucket(Bucket='test-lambda-on-java') # print(response) # except", "not found') # print(e) # response = s3.head_bucket(Bucket='bun-chan-bot-images') # print(response) # for bucket", "# response = s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) bucketName =", "Bucket='bun-chan-bot-images') # print(response) # print('Create bucket...') # response = s3.create_bucket( # Bucket='bun-chan-bot-images', #", "print('Create bucket...') # response = s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # )", "s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) bucketName = 'bun-chan-bot-images' objectName =", "response = None # response = s3.list_buckets() # # 指定したBucketが存在しなければ例外発生する。確認用に使える。 # try: #", "botocore import boto3 import io from datetime import datetime import s3Uploader # Refs", "指定したBucketが存在しなければ例外発生する。確認用に使える。 # try: # response = s3.head_bucket(Bucket='bun-chan-bot-images') # # response = s3.head_bucket(Bucket='test-lambda-on-java') #", "# print(response) # print('Create bucket...') # response = s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint':", "# response = s3.head_bucket(Bucket='test-lambda-on-java') # print(response) # except botocore.exceptions.ClientError as e: # print('The", "utf-8 -*- import botocore import boto3 import io from datetime import datetime import", "import botocore import boto3 import io from datetime import datetime import s3Uploader #", "CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) bucketName = 'bun-chan-bot-images' objectName = \"image_{name}.jpg\".format(name=datetime.now().strftime(\"%Y%m%d_%H%M%S\")) uploader = s3Uploader.s3Uploader(bucketName,", "CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) # print(response) # response = None # response =", "# [追加する時] # バケットがなければ作成 # あればそれを使う。 # ファイルの重複チェック # 重複していれば、削除し更新 # 重複していなければ追加。 #", "# print(e) # response = s3.head_bucket(Bucket='bun-chan-bot-images') # print(response) # for bucket in response['Buckets']:", "# print('Create bucket...') # response = s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} #", "as e: # print('The bucket does not found') # print(e) # response =", "# print(response) # for bucket in response['Buckets']: # print(bucket.get('Name')) # if bucket.get('Name') !=", "print('Not Found') # if isExistBucketFor(bucketName): # else: # print('Delet bucket...') # response =", "try: # response = s3.head_bucket(Bucket='bun-chan-bot-images') # # response = s3.head_bucket(Bucket='test-lambda-on-java') # print(response) #", "io from datetime import datetime import s3Uploader # Refs : https://boto3.readthedocs.io/en/latest/reference/services/s3.html s3 =", "isExistBucketFor(bucketName): # else: # print('Delet bucket...') # response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response)", "# response = s3.head_bucket(Bucket='bun-chan-bot-images') # print(response) # for bucket in response['Buckets']: # print(bucket.get('Name'))", "# 指定したBucketが存在しなければ例外発生する。確認用に使える。 # try: # response = s3.head_bucket(Bucket='bun-chan-bot-images') # # response = s3.head_bucket(Bucket='test-lambda-on-java')", "print('Delet bucket...') # response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # print('Create bucket...') #", "# Refs : https://boto3.readthedocs.io/en/latest/reference/services/s3.html s3 = boto3.client('s3') def main(): # [追加する時] # バケットがなければ作成", "# CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) # print(response) # response = None # response", "bucketName = 'bun-chan-bot-images' objectName = \"image_{name}.jpg\".format(name=datetime.now().strftime(\"%Y%m%d_%H%M%S\")) uploader = s3Uploader.s3Uploader(bucketName, objectName, './image.jpg') uploader.upload() if", "response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) # print('Create bucket...') # response = s3.create_bucket(", "重複していれば、削除し更新 # 重複していなければ追加。 # [読み込み] # ファイルを読み込む # response = s3.delete_bucket( Bucket='bun-chan-bot-images') #", "= s3.head_bucket(Bucket='test-lambda-on-java') # print(response) # except botocore.exceptions.ClientError as e: # print('The bucket does", "-*- coding: utf-8 -*- import botocore import boto3 import io from datetime import", "= None # response = s3.list_buckets() # # 指定したBucketが存在しなければ例外発生する。確認用に使える。 # try: # response", "# if isExistBucketFor(bucketName): # else: # print('Delet bucket...') # response = s3.delete_bucket( Bucket='bun-chan-bot-images')", "print(response) # for bucket in response['Buckets']: # print(bucket.get('Name')) # if bucket.get('Name') != 'bun-chan-bot-images':", "found') # print(e) # response = s3.head_bucket(Bucket='bun-chan-bot-images') # print(response) # for bucket in", "= s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) bucketName = 'bun-chan-bot-images' objectName", "# for bucket in response['Buckets']: # print(bucket.get('Name')) # if bucket.get('Name') != 'bun-chan-bot-images': #", "# else: # print('Delet bucket...') # response = s3.delete_bucket( Bucket='bun-chan-bot-images') # print(response) #", "bucket...') # response = s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) bucketName", "https://boto3.readthedocs.io/en/latest/reference/services/s3.html s3 = boto3.client('s3') def main(): # [追加する時] # バケットがなければ作成 # あればそれを使う。 #", "# except botocore.exceptions.ClientError as e: # print('The bucket does not found') # print(e)", "'bun-chan-bot-images': # print('Not Found') # if isExistBucketFor(bucketName): # else: # print('Delet bucket...') #", "# 重複していれば、削除し更新 # 重複していなければ追加。 # [読み込み] # ファイルを読み込む # response = s3.delete_bucket( Bucket='bun-chan-bot-images')", "# # 指定したBucketが存在しなければ例外発生する。確認用に使える。 # try: # response = s3.head_bucket(Bucket='bun-chan-bot-images') # # response =", "# Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) bucketName = 'bun-chan-bot-images' objectName = \"image_{name}.jpg\".format(name=datetime.now().strftime(\"%Y%m%d_%H%M%S\"))", "print(response) # response = s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) #", "'bun-chan-bot-images' objectName = \"image_{name}.jpg\".format(name=datetime.now().strftime(\"%Y%m%d_%H%M%S\")) uploader = s3Uploader.s3Uploader(bucketName, objectName, './image.jpg') uploader.upload() if __name__ ==", "print(response) # response = None # response = s3.list_buckets() # # 指定したBucketが存在しなければ例外発生する。確認用に使える。 #", ") # print(response) # response = None # response = s3.list_buckets() # #", "response = s3.head_bucket(Bucket='test-lambda-on-java') # print(response) # except botocore.exceptions.ClientError as e: # print('The bucket", "= s3.head_bucket(Bucket='bun-chan-bot-images') # # response = s3.head_bucket(Bucket='test-lambda-on-java') # print(response) # except botocore.exceptions.ClientError as", "あればそれを使う。 # ファイルの重複チェック # 重複していれば、削除し更新 # 重複していなければ追加。 # [読み込み] # ファイルを読み込む # response", "response = s3.head_bucket(Bucket='bun-chan-bot-images') # print(response) # for bucket in response['Buckets']: # print(bucket.get('Name')) #", "# ) bucketName = 'bun-chan-bot-images' objectName = \"image_{name}.jpg\".format(name=datetime.now().strftime(\"%Y%m%d_%H%M%S\")) uploader = s3Uploader.s3Uploader(bucketName, objectName, './image.jpg')", "s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) # print(response) # response =", "!= 'bun-chan-bot-images': # print('Not Found') # if isExistBucketFor(bucketName): # else: # print('Delet bucket...')", "Found') # if isExistBucketFor(bucketName): # else: # print('Delet bucket...') # response = s3.delete_bucket(", "ファイルの重複チェック # 重複していれば、削除し更新 # 重複していなければ追加。 # [読み込み] # ファイルを読み込む # response = s3.delete_bucket(", "# あればそれを使う。 # ファイルの重複チェック # 重複していれば、削除し更新 # 重複していなければ追加。 # [読み込み] # ファイルを読み込む #", "boto3 import io from datetime import datetime import s3Uploader # Refs : https://boto3.readthedocs.io/en/latest/reference/services/s3.html", "# response = s3.create_bucket( # Bucket='bun-chan-bot-images', # CreateBucketConfiguration={'LocationConstraint': 'ap-northeast-1'} # ) # print(response)", "None # response = s3.list_buckets() # # 指定したBucketが存在しなければ例外発生する。確認用に使える。 # try: # response =" ]
[ "1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2,", "candidate in ranking] ground_truth = [ 2, 3, 4, 13, 6, 7, 15,", "django.test import TestCase from majority_judgment.tools import get_ranking, get_ratings, majority_grade class MajorityJudgmentTestCase(TestCase): fixtures =", "class MajorityJudgmentTestCase(TestCase): fixtures = ['election.json'] # def setUp(self): def test_ranking(self): election_id = 2", "1, 9] self.assertEqual(ranking, ground_truth) def test_majority_grade(self): election_id = 2 ranking = get_ranking(election_id) #", "ratings = get_ratings(election_id) majority_grades = [majority_grade(candidate.ratings) for candidate in ranking] ground_truth = [0,", "16, 5, 11, 17, 10, 1, 9] self.assertEqual(ranking, ground_truth) def test_majority_grade(self): election_id =", "import get_ranking, get_ratings, majority_grade class MajorityJudgmentTestCase(TestCase): fixtures = ['election.json'] # def setUp(self): def", "for candidate in ranking] ground_truth = [ 2, 3, 4, 13, 6, 7,", "['election.json'] # def setUp(self): def test_ranking(self): election_id = 2 ranking = get_ranking(election_id) ranking", "0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2,", "def test_majority_grade(self): election_id = 2 ranking = get_ranking(election_id) # ratings = get_ratings(election_id) majority_grades", "3, 4, 13, 6, 7, 15, 14, 8, 12, 16, 5, 11, 17,", "MajorityJudgmentTestCase(TestCase): fixtures = ['election.json'] # def setUp(self): def test_ranking(self): election_id = 2 ranking", "= ['election.json'] # def setUp(self): def test_ranking(self): election_id = 2 ranking = get_ranking(election_id)", "1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2] self.assertEqual(majority_grades, ground_truth)", "ranking] ground_truth = [ 2, 3, 4, 13, 6, 7, 15, 14, 8,", "# ratings = get_ratings(election_id) majority_grades = [majority_grade(candidate.ratings) for candidate in ranking] ground_truth =", "10, 1, 9] self.assertEqual(ranking, ground_truth) def test_majority_grade(self): election_id = 2 ranking = get_ranking(election_id)", "ground_truth = [ 2, 3, 4, 13, 6, 7, 15, 14, 8, 12,", "majority_grades = [majority_grade(candidate.ratings) for candidate in ranking] ground_truth = [0, 0, 0, 1,", "12, 16, 5, 11, 17, 10, 1, 9] self.assertEqual(ranking, ground_truth) def test_majority_grade(self): election_id", "TestCase from majority_judgment.tools import get_ranking, get_ratings, majority_grade class MajorityJudgmentTestCase(TestCase): fixtures = ['election.json'] #", "2 ranking = get_ranking(election_id) # ratings = get_ratings(election_id) majority_grades = [majority_grade(candidate.ratings) for candidate", "= [candidate.pk for candidate in ranking] ground_truth = [ 2, 3, 4, 13,", "[0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2,", "setUp(self): def test_ranking(self): election_id = 2 ranking = get_ranking(election_id) ranking = [candidate.pk for", "in ranking] ground_truth = [0, 0, 0, 1, 1, 1, 1, 2, 2,", "for candidate in ranking] ground_truth = [0, 0, 0, 1, 1, 1, 1,", "[candidate.pk for candidate in ranking] ground_truth = [ 2, 3, 4, 13, 6,", "from django.test import TestCase from majority_judgment.tools import get_ranking, get_ratings, majority_grade class MajorityJudgmentTestCase(TestCase): fixtures", "get_ranking, get_ratings, majority_grade class MajorityJudgmentTestCase(TestCase): fixtures = ['election.json'] # def setUp(self): def test_ranking(self):", "2, 3, 4, 13, 6, 7, 15, 14, 8, 12, 16, 5, 11,", "[ 2, 3, 4, 13, 6, 7, 15, 14, 8, 12, 16, 5,", "test_majority_grade(self): election_id = 2 ranking = get_ranking(election_id) # ratings = get_ratings(election_id) majority_grades =", "import TestCase from majority_judgment.tools import get_ranking, get_ratings, majority_grade class MajorityJudgmentTestCase(TestCase): fixtures = ['election.json']", "= [majority_grade(candidate.ratings) for candidate in ranking] ground_truth = [0, 0, 0, 1, 1,", "ranking = get_ranking(election_id) ranking = [candidate.pk for candidate in ranking] ground_truth = [", "ground_truth) def test_majority_grade(self): election_id = 2 ranking = get_ranking(election_id) # ratings = get_ratings(election_id)", "ranking] ground_truth = [0, 0, 0, 1, 1, 1, 1, 2, 2, 2,", "get_ratings, majority_grade class MajorityJudgmentTestCase(TestCase): fixtures = ['election.json'] # def setUp(self): def test_ranking(self): election_id", "= get_ranking(election_id) ranking = [candidate.pk for candidate in ranking] ground_truth = [ 2,", "def test_ranking(self): election_id = 2 ranking = get_ranking(election_id) ranking = [candidate.pk for candidate", "from majority_judgment.tools import get_ranking, get_ratings, majority_grade class MajorityJudgmentTestCase(TestCase): fixtures = ['election.json'] # def", "= [ 2, 3, 4, 13, 6, 7, 15, 14, 8, 12, 16,", "= 2 ranking = get_ranking(election_id) # ratings = get_ratings(election_id) majority_grades = [majority_grade(candidate.ratings) for", "11, 17, 10, 1, 9] self.assertEqual(ranking, ground_truth) def test_majority_grade(self): election_id = 2 ranking", "4, 13, 6, 7, 15, 14, 8, 12, 16, 5, 11, 17, 10,", "= [0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 2,", "majority_judgment.tools import get_ranking, get_ratings, majority_grade class MajorityJudgmentTestCase(TestCase): fixtures = ['election.json'] # def setUp(self):", "fixtures = ['election.json'] # def setUp(self): def test_ranking(self): election_id = 2 ranking =", "15, 14, 8, 12, 16, 5, 11, 17, 10, 1, 9] self.assertEqual(ranking, ground_truth)", "= get_ranking(election_id) # ratings = get_ratings(election_id) majority_grades = [majority_grade(candidate.ratings) for candidate in ranking]", "get_ranking(election_id) # ratings = get_ratings(election_id) majority_grades = [majority_grade(candidate.ratings) for candidate in ranking] ground_truth", "5, 11, 17, 10, 1, 9] self.assertEqual(ranking, ground_truth) def test_majority_grade(self): election_id = 2", "17, 10, 1, 9] self.assertEqual(ranking, ground_truth) def test_majority_grade(self): election_id = 2 ranking =", "[majority_grade(candidate.ratings) for candidate in ranking] ground_truth = [0, 0, 0, 1, 1, 1,", "1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]", "ranking = [candidate.pk for candidate in ranking] ground_truth = [ 2, 3, 4,", "# def setUp(self): def test_ranking(self): election_id = 2 ranking = get_ranking(election_id) ranking =", "ranking = get_ranking(election_id) # ratings = get_ratings(election_id) majority_grades = [majority_grade(candidate.ratings) for candidate in", "in ranking] ground_truth = [ 2, 3, 4, 13, 6, 7, 15, 14,", "0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2,", "2 ranking = get_ranking(election_id) ranking = [candidate.pk for candidate in ranking] ground_truth =", "1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2] self.assertEqual(majority_grades,", "get_ratings(election_id) majority_grades = [majority_grade(candidate.ratings) for candidate in ranking] ground_truth = [0, 0, 0,", "candidate in ranking] ground_truth = [0, 0, 0, 1, 1, 1, 1, 2,", "self.assertEqual(ranking, ground_truth) def test_majority_grade(self): election_id = 2 ranking = get_ranking(election_id) # ratings =", "9] self.assertEqual(ranking, ground_truth) def test_majority_grade(self): election_id = 2 ranking = get_ranking(election_id) # ratings", "election_id = 2 ranking = get_ranking(election_id) ranking = [candidate.pk for candidate in ranking]", "test_ranking(self): election_id = 2 ranking = get_ranking(election_id) ranking = [candidate.pk for candidate in", "= 2 ranking = get_ranking(election_id) ranking = [candidate.pk for candidate in ranking] ground_truth", "election_id = 2 ranking = get_ranking(election_id) # ratings = get_ratings(election_id) majority_grades = [majority_grade(candidate.ratings)", "get_ranking(election_id) ranking = [candidate.pk for candidate in ranking] ground_truth = [ 2, 3,", "def setUp(self): def test_ranking(self): election_id = 2 ranking = get_ranking(election_id) ranking = [candidate.pk", "13, 6, 7, 15, 14, 8, 12, 16, 5, 11, 17, 10, 1,", "8, 12, 16, 5, 11, 17, 10, 1, 9] self.assertEqual(ranking, ground_truth) def test_majority_grade(self):", "majority_grade class MajorityJudgmentTestCase(TestCase): fixtures = ['election.json'] # def setUp(self): def test_ranking(self): election_id =", "7, 15, 14, 8, 12, 16, 5, 11, 17, 10, 1, 9] self.assertEqual(ranking,", "14, 8, 12, 16, 5, 11, 17, 10, 1, 9] self.assertEqual(ranking, ground_truth) def", "ground_truth = [0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,", "= get_ratings(election_id) majority_grades = [majority_grade(candidate.ratings) for candidate in ranking] ground_truth = [0, 0,", "6, 7, 15, 14, 8, 12, 16, 5, 11, 17, 10, 1, 9]" ]
[ ".guided_anchor_target import ga_loc_target, ga_shape_target from .point_generator import PointGenerator from .point_target import point_target __all__", "import anchor_inside_flags, anchor_target, images_to_levels, unmap from .guided_anchor_target import ga_loc_target, ga_shape_target from .point_generator import", "point_target __all__ = [ \"AnchorGenerator\", \"anchor_target\", \"anchor_inside_flags\", \"ga_loc_target\", \"ga_shape_target\", \"PointGenerator\", \"point_target\", \"images_to_levels\", \"unmap\",", "images_to_levels, unmap from .guided_anchor_target import ga_loc_target, ga_shape_target from .point_generator import PointGenerator from .point_target", "__all__ = [ \"AnchorGenerator\", \"anchor_target\", \"anchor_inside_flags\", \"ga_loc_target\", \"ga_shape_target\", \"PointGenerator\", \"point_target\", \"images_to_levels\", \"unmap\", ]", "import PointGenerator from .point_target import point_target __all__ = [ \"AnchorGenerator\", \"anchor_target\", \"anchor_inside_flags\", \"ga_loc_target\",", "from .point_generator import PointGenerator from .point_target import point_target __all__ = [ \"AnchorGenerator\", \"anchor_target\",", "AnchorGenerator from .anchor_target import anchor_inside_flags, anchor_target, images_to_levels, unmap from .guided_anchor_target import ga_loc_target, ga_shape_target", "from .point_target import point_target __all__ = [ \"AnchorGenerator\", \"anchor_target\", \"anchor_inside_flags\", \"ga_loc_target\", \"ga_shape_target\", \"PointGenerator\",", ".point_target import point_target __all__ = [ \"AnchorGenerator\", \"anchor_target\", \"anchor_inside_flags\", \"ga_loc_target\", \"ga_shape_target\", \"PointGenerator\", \"point_target\",", "from .anchor_target import anchor_inside_flags, anchor_target, images_to_levels, unmap from .guided_anchor_target import ga_loc_target, ga_shape_target from", "anchor_target, images_to_levels, unmap from .guided_anchor_target import ga_loc_target, ga_shape_target from .point_generator import PointGenerator from", "from .guided_anchor_target import ga_loc_target, ga_shape_target from .point_generator import PointGenerator from .point_target import point_target", "ga_loc_target, ga_shape_target from .point_generator import PointGenerator from .point_target import point_target __all__ = [", "ga_shape_target from .point_generator import PointGenerator from .point_target import point_target __all__ = [ \"AnchorGenerator\",", "from .anchor_generator import AnchorGenerator from .anchor_target import anchor_inside_flags, anchor_target, images_to_levels, unmap from .guided_anchor_target", "anchor_inside_flags, anchor_target, images_to_levels, unmap from .guided_anchor_target import ga_loc_target, ga_shape_target from .point_generator import PointGenerator", ".anchor_target import anchor_inside_flags, anchor_target, images_to_levels, unmap from .guided_anchor_target import ga_loc_target, ga_shape_target from .point_generator", ".point_generator import PointGenerator from .point_target import point_target __all__ = [ \"AnchorGenerator\", \"anchor_target\", \"anchor_inside_flags\",", "PointGenerator from .point_target import point_target __all__ = [ \"AnchorGenerator\", \"anchor_target\", \"anchor_inside_flags\", \"ga_loc_target\", \"ga_shape_target\",", "unmap from .guided_anchor_target import ga_loc_target, ga_shape_target from .point_generator import PointGenerator from .point_target import", "import ga_loc_target, ga_shape_target from .point_generator import PointGenerator from .point_target import point_target __all__ =", ".anchor_generator import AnchorGenerator from .anchor_target import anchor_inside_flags, anchor_target, images_to_levels, unmap from .guided_anchor_target import", "import AnchorGenerator from .anchor_target import anchor_inside_flags, anchor_target, images_to_levels, unmap from .guided_anchor_target import ga_loc_target,", "import point_target __all__ = [ \"AnchorGenerator\", \"anchor_target\", \"anchor_inside_flags\", \"ga_loc_target\", \"ga_shape_target\", \"PointGenerator\", \"point_target\", \"images_to_levels\"," ]
[ "int(m.group(1)) - 1)) return output def colcode_to_colnum(colcode): \"\"\"Convert Excel style column ids (A,", "a column number.\"\"\" if len(colcode) == 0: return 0 else: return (ord(colcode[-1]) -", "rownum = int(row.r) if rownum > lastrownum + 1: # interpolate missing rows", "cell value looked-up from shared strings line.append(str( '' if c.v is None or", "line.append(str( '' if c.v is None or c.v == 'd' else strings[int(c.v)].t ))", "columns line.extend([''] * (colnum - lastcolnum - 1)) lastcolnum = colnum # add", "Excel style column ids (A, BB, XFD, ...) to a column number.\"\"\" if", "extract_xlsx_lines(sheetrows, strings): \"\"\" Extract cell values into lines; cell values are given as", "row values and return result return extract_xlsx_lines(rows, strings) def extract_xlsx_lines(sheetrows, strings): \"\"\" Extract", "workbook file. .xlsx files are actually zip files containing xml files. Returns a", "= [] lastcolnum = 0 for c in cells: # get column number", "- 1)) lastrownum = rownum cells = row.c line = [] lastcolnum =", "def extract_xlsx_lines(sheetrows, strings): \"\"\" Extract cell values into lines; cell values are given", "output def colcode_to_colnum(colcode): \"\"\"Convert Excel style column ids (A, BB, XFD, ...) to", "if colnum > lastcolnum + 1: # interpolate missing columns line.extend([''] * (colnum", "'' if c.v is None or c.v == 'd' else strings[int(c.v)].t )) lines.append(line)", "elements, whose string-value # is found in the node's .t element. try: stringdata", "lines = [] lastrownum = 0 for row in sheetrows: rownum = int(row.r)", "numbered starting from 1 in xlsx files # Get cell data from specified", "> lastrownum + 1: # interpolate missing rows lines.extend([[]] * (rownum - lastrownum", "file\") # Map strings to row values and return result return extract_xlsx_lines(rows, strings)", "FileError def read_xlsx_file(filename, sheetid): \"\"\" Read contents of specified sheet in Excel 2007", "return output def colcode_to_colnum(colcode): \"\"\"Convert Excel style column ids (A, BB, XFD, ...)", "in the node's .t element. Returns 2d list of strings (cell values). \"\"\"", "> lastcolnum + 1: # interpolate missing columns line.extend([''] * (colnum - lastcolnum", "zipfile from xml2obj import xml2obj from errors import FileError def read_xlsx_file(filename, sheetid): \"\"\"", "in Excel 2007 (.xlsx) workbook file. .xlsx files are actually zip files containing", "= int(row.r) if rownum > lastrownum + 1: # interpolate missing rows lines.extend([[]]", "sheetdata = zf.read('xl/worksheets/sheet%s.xml' % sheetid) xml = xml2obj(sheetdata) rows = xml.sheetData.row except: raise", "= row.c line = [] lastcolnum = 0 for c in cells: #", "FileError(\"Could not read xlsx file %s, worksheet id %s\" % ( filename, sheetid", "raise FileError(\"Could not read xlsx file %s, worksheet id %s\" % ( filename,", "files. Returns a 2d list of cell values. \"\"\" if sheetid is None:", "= [] lastrownum = 0 for row in sheetrows: rownum = int(row.r) if", "xml.sheetData.row except: raise FileError(\"Could not read xlsx file %s, worksheet id %s\" %", "index # references into sharedStrings.xml:ssi.si elements, whose string-value # is found in the", "try: zf = zipfile.ZipFile(filename) sheetdata = zf.read('xl/worksheets/sheet%s.xml' % sheetid) xml = xml2obj(sheetdata) rows", "rows lines.extend([[]] * (rownum - lastrownum - 1)) lastrownum = rownum cells =", "line.extend([''] * (colnum - lastcolnum - 1)) lastcolnum = colnum # add cell", "else: sheetid += 1 # sheets are numbered starting from 1 in xlsx", "of xlsx file\") # Map strings to row values and return result return", "except: raise FileError(\"Could not open '%s' for sheet listing.\" % filename) output =", "files # Get cell data from specified worksheet. try: zf = zipfile.ZipFile(filename) sheetdata", "- 1)) # Get shared strings xml. Cell values are given as ordinal", "column number colcode = re.match('^([A-Z]+)', str(c.r)).group(1) colnum = colcode_to_colnum(colcode) if colnum > lastcolnum", "= colcode_to_colnum(colcode) if colnum > lastcolnum + 1: # interpolate missing columns line.extend(['']", "of specified sheet in Excel 2007 (.xlsx) workbook file. .xlsx files are actually", "a list of sheets and their ids from xlsx file.\"\"\" try: zf =", "BB, XFD, ...) to a column number.\"\"\" if len(colcode) == 0: return 0", "- lastcolnum - 1)) lastcolnum = colnum # add cell value looked-up from", "output = [] for sheet in sheets: m = re.match('rId(\\d+)', sheet.r_id) if not", "open '%s' for sheet listing.\" % filename) output = [] for sheet in", "ordinal index # references into sharedStrings.xml:ssi.si elements, whose string-value # is found in", "def read_xlsx_sheet_names(filename): \"\"\"Get a list of sheets and their ids from xlsx file.\"\"\"", "style column ids (A, BB, XFD, ...) to a column number.\"\"\" if len(colcode)", "read xlsx file %s, worksheet id %s\" % ( filename, sheetid - 1))", "into sharedStrings.xml:ssi.si elements, whose string-value # is found in the node's .t element.", "id %s\" % ( filename, sheetid - 1)) # Get shared strings xml.", "row.c line = [] lastcolnum = 0 for c in cells: # get", "list of cell values. \"\"\" if sheetid is None: sheetid = 1 else:", "strings): \"\"\" Extract cell values into lines; cell values are given as ordinal", "sheets are numbered starting from 1 in xlsx files # Get cell data", ".xlsx files are actually zip files containing xml files. Returns a 2d list", "= colnum # add cell value looked-up from shared strings line.append(str( '' if", "file. .xlsx files are actually zip files containing xml files. Returns a 2d", "get column number colcode = re.match('^([A-Z]+)', str(c.r)).group(1) colnum = colcode_to_colnum(colcode) if colnum >", "shared strings xml. Cell values are given as ordinal index # references into", "not read xlsx file %s, worksheet id %s\" % ( filename, sheetid -", "stringdata = zf.read('xl/sharedStrings.xml') xml = xml2obj(stringdata) strings = xml.si except: raise FileError(\"Could not", "extract_xlsx_lines(rows, strings) def extract_xlsx_lines(sheetrows, strings): \"\"\" Extract cell values into lines; cell values", "cell data from specified worksheet. try: zf = zipfile.ZipFile(filename) sheetdata = zf.read('xl/worksheets/sheet%s.xml' %", "zf.read('xl/sharedStrings.xml') xml = xml2obj(stringdata) strings = xml.si except: raise FileError(\"Could not parse sharedStrings.xml", "def colcode_to_colnum(colcode): \"\"\"Convert Excel style column ids (A, BB, XFD, ...) to a", "row in sheetrows: rownum = int(row.r) if rownum > lastrownum + 1: #", "are given as ordinal index references into sharedStrings.xml:ssi.si elements, whose string-value is found", "is None or c.v == 'd' else strings[int(c.v)].t )) lines.append(line) return lines def", "is found in the node's .t element. try: stringdata = zf.read('xl/sharedStrings.xml') xml =", "parsing .xlsx format blueprints.\"\"\" import re import zipfile from xml2obj import xml2obj from", "None or c.v == 'd' else strings[int(c.v)].t )) lines.append(line) return lines def read_xlsx_sheet_names(filename):", "raise FileError(\"Could not read list of xlsx's worksheets.\") output.append((sheet.name, int(m.group(1)) - 1)) return", "number.\"\"\" if len(colcode) == 0: return 0 else: return (ord(colcode[-1]) - ord('A') +", "( filename, sheetid - 1)) # Get shared strings xml. Cell values are", "found in the node's .t element. try: stringdata = zf.read('xl/sharedStrings.xml') xml = xml2obj(stringdata)", "return extract_xlsx_lines(rows, strings) def extract_xlsx_lines(sheetrows, strings): \"\"\" Extract cell values into lines; cell", "re import zipfile from xml2obj import xml2obj from errors import FileError def read_xlsx_file(filename,", "add cell value looked-up from shared strings line.append(str( '' if c.v is None", "values into lines; cell values are given as ordinal index references into sharedStrings.xml:ssi.si", "sheet in Excel 2007 (.xlsx) workbook file. .xlsx files are actually zip files", "1)) # Get shared strings xml. Cell values are given as ordinal index", "from shared strings line.append(str( '' if c.v is None or c.v == 'd'", "[] lastrownum = 0 for row in sheetrows: rownum = int(row.r) if rownum", "in sheetrows: rownum = int(row.r) if rownum > lastrownum + 1: # interpolate", "rownum cells = row.c line = [] lastcolnum = 0 for c in", "strings = xml.si except: raise FileError(\"Could not parse sharedStrings.xml of xlsx file\") #", "file.\"\"\" try: zf = zipfile.ZipFile(filename) sheetsdata = zf.read('xl/workbook.xml') xml = xml2obj(sheetsdata) sheets =", "specified worksheet. try: zf = zipfile.ZipFile(filename) sheetdata = zf.read('xl/worksheets/sheet%s.xml' % sheetid) xml =", "xml = xml2obj(sheetdata) rows = xml.sheetData.row except: raise FileError(\"Could not read xlsx file", "\"\"\" if sheetid is None: sheetid = 1 else: sheetid += 1 #", "1)) lastrownum = rownum cells = row.c line = [] lastcolnum = 0", "references into sharedStrings.xml:ssi.si elements, whose string-value is found in the node's .t element.", "lastcolnum = colnum # add cell value looked-up from shared strings line.append(str( ''", "re.match('^([A-Z]+)', str(c.r)).group(1) colnum = colcode_to_colnum(colcode) if colnum > lastcolnum + 1: # interpolate", "or c.v == 'd' else strings[int(c.v)].t )) lines.append(line) return lines def read_xlsx_sheet_names(filename): \"\"\"Get", "rownum > lastrownum + 1: # interpolate missing rows lines.extend([[]] * (rownum -", "contents of specified sheet in Excel 2007 (.xlsx) workbook file. .xlsx files are", "are actually zip files containing xml files. Returns a 2d list of cell", "%s, worksheet id %s\" % ( filename, sheetid - 1)) # Get shared", "Extract cell values into lines; cell values are given as ordinal index references", "given as ordinal index references into sharedStrings.xml:ssi.si elements, whose string-value is found in", "interpolate missing rows lines.extend([[]] * (rownum - lastrownum - 1)) lastrownum = rownum", "'d' else strings[int(c.v)].t )) lines.append(line) return lines def read_xlsx_sheet_names(filename): \"\"\"Get a list of", "into sharedStrings.xml:ssi.si elements, whose string-value is found in the node's .t element. Returns", "import FileError def read_xlsx_file(filename, sheetid): \"\"\" Read contents of specified sheet in Excel", "read_xlsx_file(filename, sheetid): \"\"\" Read contents of specified sheet in Excel 2007 (.xlsx) workbook", "import re import zipfile from xml2obj import xml2obj from errors import FileError def", "return 0 else: return (ord(colcode[-1]) - ord('A') + 1) + \\ (26 *", "node's .t element. try: stringdata = zf.read('xl/sharedStrings.xml') xml = xml2obj(stringdata) strings = xml.si", "import zipfile from xml2obj import xml2obj from errors import FileError def read_xlsx_file(filename, sheetid):", "filename, sheetid - 1)) # Get shared strings xml. Cell values are given", "references into sharedStrings.xml:ssi.si elements, whose string-value # is found in the node's .t", "sheetid is None: sheetid = 1 else: sheetid += 1 # sheets are", "= rownum cells = row.c line = [] lastcolnum = 0 for c", "zip files containing xml files. Returns a 2d list of cell values. \"\"\"", "for c in cells: # get column number colcode = re.match('^([A-Z]+)', str(c.r)).group(1) colnum", "m: raise FileError(\"Could not read list of xlsx's worksheets.\") output.append((sheet.name, int(m.group(1)) - 1))", "sheet in sheets: m = re.match('rId(\\d+)', sheet.r_id) if not m: raise FileError(\"Could not", "\"\"\"Convert Excel style column ids (A, BB, XFD, ...) to a column number.\"\"\"", "+= 1 # sheets are numbered starting from 1 in xlsx files #", "from errors import FileError def read_xlsx_file(filename, sheetid): \"\"\" Read contents of specified sheet", "filename) output = [] for sheet in sheets: m = re.match('rId(\\d+)', sheet.r_id) if", "...) to a column number.\"\"\" if len(colcode) == 0: return 0 else: return", "0: return 0 else: return (ord(colcode[-1]) - ord('A') + 1) + \\ (26", "sheetid - 1)) # Get shared strings xml. Cell values are given as", "= zf.read('xl/sharedStrings.xml') xml = xml2obj(stringdata) strings = xml.si except: raise FileError(\"Could not parse", "rows = xml.sheetData.row except: raise FileError(\"Could not read xlsx file %s, worksheet id", "cell values are given as ordinal index references into sharedStrings.xml:ssi.si elements, whose string-value", "in cells: # get column number colcode = re.match('^([A-Z]+)', str(c.r)).group(1) colnum = colcode_to_colnum(colcode)", "result return extract_xlsx_lines(rows, strings) def extract_xlsx_lines(sheetrows, strings): \"\"\" Extract cell values into lines;", "\"\"\" Read contents of specified sheet in Excel 2007 (.xlsx) workbook file. .xlsx", "# get column number colcode = re.match('^([A-Z]+)', str(c.r)).group(1) colnum = colcode_to_colnum(colcode) if colnum", "sheetid) xml = xml2obj(sheetdata) rows = xml.sheetData.row except: raise FileError(\"Could not read xlsx", "= xml.sheets.sheet except: raise FileError(\"Could not open '%s' for sheet listing.\" % filename)", "(cell values). \"\"\" lines = [] lastrownum = 0 for row in sheetrows:", "re.match('rId(\\d+)', sheet.r_id) if not m: raise FileError(\"Could not read list of xlsx's worksheets.\")", "sheetid = 1 else: sheetid += 1 # sheets are numbered starting from", "xml.sheets.sheet except: raise FileError(\"Could not open '%s' for sheet listing.\" % filename) output", "xml = xml2obj(sheetsdata) sheets = xml.sheets.sheet except: raise FileError(\"Could not open '%s' for", "read_xlsx_sheet_names(filename): \"\"\"Get a list of sheets and their ids from xlsx file.\"\"\" try:", "read list of xlsx's worksheets.\") output.append((sheet.name, int(m.group(1)) - 1)) return output def colcode_to_colnum(colcode):", "the node's .t element. Returns 2d list of strings (cell values). \"\"\" lines", "zf.read('xl/workbook.xml') xml = xml2obj(sheetsdata) sheets = xml.sheets.sheet except: raise FileError(\"Could not open '%s'", "c.v is None or c.v == 'd' else strings[int(c.v)].t )) lines.append(line) return lines", "# Get cell data from specified worksheet. try: zf = zipfile.ZipFile(filename) sheetdata =", "lines; cell values are given as ordinal index references into sharedStrings.xml:ssi.si elements, whose", "missing columns line.extend([''] * (colnum - lastcolnum - 1)) lastcolnum = colnum #", "try: zf = zipfile.ZipFile(filename) sheetsdata = zf.read('xl/workbook.xml') xml = xml2obj(sheetsdata) sheets = xml.sheets.sheet", "for sheet in sheets: m = re.match('rId(\\d+)', sheet.r_id) if not m: raise FileError(\"Could", "strings xml. Cell values are given as ordinal index # references into sharedStrings.xml:ssi.si", "lastrownum + 1: # interpolate missing rows lines.extend([[]] * (rownum - lastrownum -", "None: sheetid = 1 else: sheetid += 1 # sheets are numbered starting", "# interpolate missing rows lines.extend([[]] * (rownum - lastrownum - 1)) lastrownum =", "Returns 2d list of strings (cell values). \"\"\" lines = [] lastrownum =", "is found in the node's .t element. Returns 2d list of strings (cell", "# Get shared strings xml. Cell values are given as ordinal index #", "column number.\"\"\" if len(colcode) == 0: return 0 else: return (ord(colcode[-1]) - ord('A')", "list of strings (cell values). \"\"\" lines = [] lastrownum = 0 for", "= 0 for c in cells: # get column number colcode = re.match('^([A-Z]+)',", "sheetsdata = zf.read('xl/workbook.xml') xml = xml2obj(sheetsdata) sheets = xml.sheets.sheet except: raise FileError(\"Could not", "ordinal index references into sharedStrings.xml:ssi.si elements, whose string-value is found in the node's", "lines def read_xlsx_sheet_names(filename): \"\"\"Get a list of sheets and their ids from xlsx", "if sheetid is None: sheetid = 1 else: sheetid += 1 # sheets", "format blueprints.\"\"\" import re import zipfile from xml2obj import xml2obj from errors import", "worksheet. try: zf = zipfile.ZipFile(filename) sheetdata = zf.read('xl/worksheets/sheet%s.xml' % sheetid) xml = xml2obj(sheetdata)", "\"\"\" Extract cell values into lines; cell values are given as ordinal index", "from 1 in xlsx files # Get cell data from specified worksheet. try:", "values. \"\"\" if sheetid is None: sheetid = 1 else: sheetid += 1", "colcode_to_colnum(colcode) if colnum > lastcolnum + 1: # interpolate missing columns line.extend([''] *", "starting from 1 in xlsx files # Get cell data from specified worksheet.", "sharedStrings.xml:ssi.si elements, whose string-value is found in the node's .t element. Returns 2d", "colnum = colcode_to_colnum(colcode) if colnum > lastcolnum + 1: # interpolate missing columns", "except: raise FileError(\"Could not parse sharedStrings.xml of xlsx file\") # Map strings to", "int(row.r) if rownum > lastrownum + 1: # interpolate missing rows lines.extend([[]] *", "worksheet id %s\" % ( filename, sheetid - 1)) # Get shared strings", "m = re.match('rId(\\d+)', sheet.r_id) if not m: raise FileError(\"Could not read list of", "xml2obj(sheetdata) rows = xml.sheetData.row except: raise FileError(\"Could not read xlsx file %s, worksheet", "if c.v is None or c.v == 'd' else strings[int(c.v)].t )) lines.append(line) return", "output.append((sheet.name, int(m.group(1)) - 1)) return output def colcode_to_colnum(colcode): \"\"\"Convert Excel style column ids", "lastrownum = rownum cells = row.c line = [] lastcolnum = 0 for", "line = [] lastcolnum = 0 for c in cells: # get column", "sheet listing.\" % filename) output = [] for sheet in sheets: m =", "XFD, ...) to a column number.\"\"\" if len(colcode) == 0: return 0 else:", "data from specified worksheet. try: zf = zipfile.ZipFile(filename) sheetdata = zf.read('xl/worksheets/sheet%s.xml' % sheetid)", "errors import FileError def read_xlsx_file(filename, sheetid): \"\"\" Read contents of specified sheet in", "xlsx's worksheets.\") output.append((sheet.name, int(m.group(1)) - 1)) return output def colcode_to_colnum(colcode): \"\"\"Convert Excel style", "of strings (cell values). \"\"\" lines = [] lastrownum = 0 for row", "string-value is found in the node's .t element. Returns 2d list of strings", "0 for row in sheetrows: rownum = int(row.r) if rownum > lastrownum +", "def read_xlsx_file(filename, sheetid): \"\"\" Read contents of specified sheet in Excel 2007 (.xlsx)", "colcode = re.match('^([A-Z]+)', str(c.r)).group(1) colnum = colcode_to_colnum(colcode) if colnum > lastcolnum + 1:", "sheetid += 1 # sheets are numbered starting from 1 in xlsx files", "and parsing .xlsx format blueprints.\"\"\" import re import zipfile from xml2obj import xml2obj", "+ 1: # interpolate missing columns line.extend([''] * (colnum - lastcolnum - 1))", "= zipfile.ZipFile(filename) sheetsdata = zf.read('xl/workbook.xml') xml = xml2obj(sheetsdata) sheets = xml.sheets.sheet except: raise", "files are actually zip files containing xml files. Returns a 2d list of", "looked-up from shared strings line.append(str( '' if c.v is None or c.v ==", "1: # interpolate missing columns line.extend([''] * (colnum - lastcolnum - 1)) lastcolnum", "= xml2obj(stringdata) strings = xml.si except: raise FileError(\"Could not parse sharedStrings.xml of xlsx", "shared strings line.append(str( '' if c.v is None or c.v == 'd' else", "blueprints.\"\"\" import re import zipfile from xml2obj import xml2obj from errors import FileError", "Cell values are given as ordinal index # references into sharedStrings.xml:ssi.si elements, whose", "[] lastcolnum = 0 for c in cells: # get column number colcode", "1 else: sheetid += 1 # sheets are numbered starting from 1 in", "containing xml files. Returns a 2d list of cell values. \"\"\" if sheetid", "lastcolnum + 1: # interpolate missing columns line.extend([''] * (colnum - lastcolnum -", "to a column number.\"\"\" if len(colcode) == 0: return 0 else: return (ord(colcode[-1])", "FileError(\"Could not open '%s' for sheet listing.\" % filename) output = [] for", "1)) lastcolnum = colnum # add cell value looked-up from shared strings line.append(str(", "for sheet listing.\" % filename) output = [] for sheet in sheets: m", "strings to row values and return result return extract_xlsx_lines(rows, strings) def extract_xlsx_lines(sheetrows, strings):", "from xlsx file.\"\"\" try: zf = zipfile.ZipFile(filename) sheetsdata = zf.read('xl/workbook.xml') xml = xml2obj(sheetsdata)", "from xml2obj import xml2obj from errors import FileError def read_xlsx_file(filename, sheetid): \"\"\" Read", "lastcolnum = 0 for c in cells: # get column number colcode =", "in xlsx files # Get cell data from specified worksheet. try: zf =", "= zf.read('xl/workbook.xml') xml = xml2obj(sheetsdata) sheets = xml.sheets.sheet except: raise FileError(\"Could not open", "xml.si except: raise FileError(\"Could not parse sharedStrings.xml of xlsx file\") # Map strings", "1 # sheets are numbered starting from 1 in xlsx files # Get", "in sheets: m = re.match('rId(\\d+)', sheet.r_id) if not m: raise FileError(\"Could not read", "Returns a 2d list of cell values. \"\"\" if sheetid is None: sheetid", "= zf.read('xl/worksheets/sheet%s.xml' % sheetid) xml = xml2obj(sheetdata) rows = xml.sheetData.row except: raise FileError(\"Could", "import xml2obj from errors import FileError def read_xlsx_file(filename, sheetid): \"\"\" Read contents of", "cells: # get column number colcode = re.match('^([A-Z]+)', str(c.r)).group(1) colnum = colcode_to_colnum(colcode) if", "Excel 2007 (.xlsx) workbook file. .xlsx files are actually zip files containing xml", "are given as ordinal index # references into sharedStrings.xml:ssi.si elements, whose string-value #", "sharedStrings.xml of xlsx file\") # Map strings to row values and return result", "= 0 for row in sheetrows: rownum = int(row.r) if rownum > lastrownum", "of sheets and their ids from xlsx file.\"\"\" try: zf = zipfile.ZipFile(filename) sheetsdata", "* (colnum - lastcolnum - 1)) lastcolnum = colnum # add cell value", "1: # interpolate missing rows lines.extend([[]] * (rownum - lastrownum - 1)) lastrownum", "list of xlsx's worksheets.\") output.append((sheet.name, int(m.group(1)) - 1)) return output def colcode_to_colnum(colcode): \"\"\"Convert", "lastcolnum - 1)) lastcolnum = colnum # add cell value looked-up from shared", "raise FileError(\"Could not parse sharedStrings.xml of xlsx file\") # Map strings to row", ")) lines.append(line) return lines def read_xlsx_sheet_names(filename): \"\"\"Get a list of sheets and their", "== 0: return 0 else: return (ord(colcode[-1]) - ord('A') + 1) + \\", "strings) def extract_xlsx_lines(sheetrows, strings): \"\"\" Extract cell values into lines; cell values are", "xml2obj from errors import FileError def read_xlsx_file(filename, sheetid): \"\"\" Read contents of specified", "\"\"\"Get a list of sheets and their ids from xlsx file.\"\"\" try: zf", "values). \"\"\" lines = [] lastrownum = 0 for row in sheetrows: rownum", "[] for sheet in sheets: m = re.match('rId(\\d+)', sheet.r_id) if not m: raise", "string-value # is found in the node's .t element. try: stringdata = zf.read('xl/sharedStrings.xml')", "ids (A, BB, XFD, ...) to a column number.\"\"\" if len(colcode) == 0:", "cell values. \"\"\" if sheetid is None: sheetid = 1 else: sheetid +=", "Read contents of specified sheet in Excel 2007 (.xlsx) workbook file. .xlsx files", "as ordinal index references into sharedStrings.xml:ssi.si elements, whose string-value is found in the", "Map strings to row values and return result return extract_xlsx_lines(rows, strings) def extract_xlsx_lines(sheetrows,", "sharedStrings.xml:ssi.si elements, whose string-value # is found in the node's .t element. try:", "for row in sheetrows: rownum = int(row.r) if rownum > lastrownum + 1:", "str(c.r)).group(1) colnum = colcode_to_colnum(colcode) if colnum > lastcolnum + 1: # interpolate missing", "and their ids from xlsx file.\"\"\" try: zf = zipfile.ZipFile(filename) sheetsdata = zf.read('xl/workbook.xml')", "c in cells: # get column number colcode = re.match('^([A-Z]+)', str(c.r)).group(1) colnum =", "their ids from xlsx file.\"\"\" try: zf = zipfile.ZipFile(filename) sheetsdata = zf.read('xl/workbook.xml') xml", "zipfile.ZipFile(filename) sheetsdata = zf.read('xl/workbook.xml') xml = xml2obj(sheetsdata) sheets = xml.sheets.sheet except: raise FileError(\"Could", "given as ordinal index # references into sharedStrings.xml:ssi.si elements, whose string-value # is", "raise FileError(\"Could not open '%s' for sheet listing.\" % filename) output = []", "sheets = xml.sheets.sheet except: raise FileError(\"Could not open '%s' for sheet listing.\" %", "xlsx files # Get cell data from specified worksheet. try: zf = zipfile.ZipFile(filename)", "xml2obj(sheetsdata) sheets = xml.sheets.sheet except: raise FileError(\"Could not open '%s' for sheet listing.\"", "= xml2obj(sheetdata) rows = xml.sheetData.row except: raise FileError(\"Could not read xlsx file %s,", "sheetrows: rownum = int(row.r) if rownum > lastrownum + 1: # interpolate missing", "not m: raise FileError(\"Could not read list of xlsx's worksheets.\") output.append((sheet.name, int(m.group(1)) -", ".t element. try: stringdata = zf.read('xl/sharedStrings.xml') xml = xml2obj(stringdata) strings = xml.si except:", "sheets: m = re.match('rId(\\d+)', sheet.r_id) if not m: raise FileError(\"Could not read list", "FileError(\"Could not read list of xlsx's worksheets.\") output.append((sheet.name, int(m.group(1)) - 1)) return output", "if len(colcode) == 0: return 0 else: return (ord(colcode[-1]) - ord('A') + 1)", "missing rows lines.extend([[]] * (rownum - lastrownum - 1)) lastrownum = rownum cells", "values are given as ordinal index references into sharedStrings.xml:ssi.si elements, whose string-value is", "values and return result return extract_xlsx_lines(rows, strings) def extract_xlsx_lines(sheetrows, strings): \"\"\" Extract cell", "specified sheet in Excel 2007 (.xlsx) workbook file. .xlsx files are actually zip", "whose string-value is found in the node's .t element. Returns 2d list of", "element. Returns 2d list of strings (cell values). \"\"\" lines = [] lastrownum", "if not m: raise FileError(\"Could not read list of xlsx's worksheets.\") output.append((sheet.name, int(m.group(1))", "index references into sharedStrings.xml:ssi.si elements, whose string-value is found in the node's .t", "into lines; cell values are given as ordinal index references into sharedStrings.xml:ssi.si elements,", "zipfile.ZipFile(filename) sheetdata = zf.read('xl/worksheets/sheet%s.xml' % sheetid) xml = xml2obj(sheetdata) rows = xml.sheetData.row except:", "return lines def read_xlsx_sheet_names(filename): \"\"\"Get a list of sheets and their ids from", "Get shared strings xml. Cell values are given as ordinal index # references", "a 2d list of cell values. \"\"\" if sheetid is None: sheetid =", "in the node's .t element. try: stringdata = zf.read('xl/sharedStrings.xml') xml = xml2obj(stringdata) strings", "len(colcode) == 0: return 0 else: return (ord(colcode[-1]) - ord('A') + 1) +", "- lastrownum - 1)) lastrownum = rownum cells = row.c line = []", "element. try: stringdata = zf.read('xl/sharedStrings.xml') xml = xml2obj(stringdata) strings = xml.si except: raise", "from specified worksheet. try: zf = zipfile.ZipFile(filename) sheetdata = zf.read('xl/worksheets/sheet%s.xml' % sheetid) xml", "strings (cell values). \"\"\" lines = [] lastrownum = 0 for row in", "'%s' for sheet listing.\" % filename) output = [] for sheet in sheets:", "if rownum > lastrownum + 1: # interpolate missing rows lines.extend([[]] * (rownum", "file %s, worksheet id %s\" % ( filename, sheetid - 1)) # Get", "xml = xml2obj(stringdata) strings = xml.si except: raise FileError(\"Could not parse sharedStrings.xml of", "cell values into lines; cell values are given as ordinal index references into", "ids from xlsx file.\"\"\" try: zf = zipfile.ZipFile(filename) sheetsdata = zf.read('xl/workbook.xml') xml =", "else strings[int(c.v)].t )) lines.append(line) return lines def read_xlsx_sheet_names(filename): \"\"\"Get a list of sheets", "lines.append(line) return lines def read_xlsx_sheet_names(filename): \"\"\"Get a list of sheets and their ids", "== 'd' else strings[int(c.v)].t )) lines.append(line) return lines def read_xlsx_sheet_names(filename): \"\"\"Get a list", "xlsx file\") # Map strings to row values and return result return extract_xlsx_lines(rows,", "to row values and return result return extract_xlsx_lines(rows, strings) def extract_xlsx_lines(sheetrows, strings): \"\"\"", "= xml.si except: raise FileError(\"Could not parse sharedStrings.xml of xlsx file\") # Map", "actually zip files containing xml files. Returns a 2d list of cell values.", "Get cell data from specified worksheet. try: zf = zipfile.ZipFile(filename) sheetdata = zf.read('xl/worksheets/sheet%s.xml'", "0 for c in cells: # get column number colcode = re.match('^([A-Z]+)', str(c.r)).group(1)", "colcode_to_colnum(colcode): \"\"\"Convert Excel style column ids (A, BB, XFD, ...) to a column", "value looked-up from shared strings line.append(str( '' if c.v is None or c.v", "% sheetid) xml = xml2obj(sheetdata) rows = xml.sheetData.row except: raise FileError(\"Could not read", "xml. Cell values are given as ordinal index # references into sharedStrings.xml:ssi.si elements,", "colnum # add cell value looked-up from shared strings line.append(str( '' if c.v", "% filename) output = [] for sheet in sheets: m = re.match('rId(\\d+)', sheet.r_id)", "xml2obj import xml2obj from errors import FileError def read_xlsx_file(filename, sheetid): \"\"\" Read contents", "found in the node's .t element. Returns 2d list of strings (cell values).", "1)) return output def colcode_to_colnum(colcode): \"\"\"Convert Excel style column ids (A, BB, XFD,", "= zipfile.ZipFile(filename) sheetdata = zf.read('xl/worksheets/sheet%s.xml' % sheetid) xml = xml2obj(sheetdata) rows = xml.sheetData.row", "+ 1: # interpolate missing rows lines.extend([[]] * (rownum - lastrownum - 1))", "# sheets are numbered starting from 1 in xlsx files # Get cell", "FileError(\"Could not parse sharedStrings.xml of xlsx file\") # Map strings to row values", "xml2obj(stringdata) strings = xml.si except: raise FileError(\"Could not parse sharedStrings.xml of xlsx file\")", "2d list of strings (cell values). \"\"\" lines = [] lastrownum = 0", "zf = zipfile.ZipFile(filename) sheetdata = zf.read('xl/worksheets/sheet%s.xml' % sheetid) xml = xml2obj(sheetdata) rows =", "not read list of xlsx's worksheets.\") output.append((sheet.name, int(m.group(1)) - 1)) return output def", "(.xlsx) workbook file. .xlsx files are actually zip files containing xml files. Returns", "number colcode = re.match('^([A-Z]+)', str(c.r)).group(1) colnum = colcode_to_colnum(colcode) if colnum > lastcolnum +", "= re.match('rId(\\d+)', sheet.r_id) if not m: raise FileError(\"Could not read list of xlsx's", "listing.\" % filename) output = [] for sheet in sheets: m = re.match('rId(\\d+)',", "and return result return extract_xlsx_lines(rows, strings) def extract_xlsx_lines(sheetrows, strings): \"\"\" Extract cell values", "%s\" % ( filename, sheetid - 1)) # Get shared strings xml. Cell", "xlsx file %s, worksheet id %s\" % ( filename, sheetid - 1)) #", "(A, BB, XFD, ...) to a column number.\"\"\" if len(colcode) == 0: return", "strings[int(c.v)].t )) lines.append(line) return lines def read_xlsx_sheet_names(filename): \"\"\"Get a list of sheets and", "c.v == 'd' else strings[int(c.v)].t )) lines.append(line) return lines def read_xlsx_sheet_names(filename): \"\"\"Get a", "of cell values. \"\"\" if sheetid is None: sheetid = 1 else: sheetid", "= 1 else: sheetid += 1 # sheets are numbered starting from 1", "sheetid): \"\"\" Read contents of specified sheet in Excel 2007 (.xlsx) workbook file.", "list of sheets and their ids from xlsx file.\"\"\" try: zf = zipfile.ZipFile(filename)", "try: stringdata = zf.read('xl/sharedStrings.xml') xml = xml2obj(stringdata) strings = xml.si except: raise FileError(\"Could", "zf = zipfile.ZipFile(filename) sheetsdata = zf.read('xl/workbook.xml') xml = xml2obj(sheetsdata) sheets = xml.sheets.sheet except:", "# add cell value looked-up from shared strings line.append(str( '' if c.v is", "- 1)) lastcolnum = colnum # add cell value looked-up from shared strings", ".xlsx format blueprints.\"\"\" import re import zipfile from xml2obj import xml2obj from errors", "(colnum - lastcolnum - 1)) lastcolnum = colnum # add cell value looked-up", "colnum > lastcolnum + 1: # interpolate missing columns line.extend([''] * (colnum -", "as ordinal index # references into sharedStrings.xml:ssi.si elements, whose string-value # is found", "parse sharedStrings.xml of xlsx file\") # Map strings to row values and return", "values are given as ordinal index # references into sharedStrings.xml:ssi.si elements, whose string-value", "elements, whose string-value is found in the node's .t element. Returns 2d list", "strings line.append(str( '' if c.v is None or c.v == 'd' else strings[int(c.v)].t", "xlsx file.\"\"\" try: zf = zipfile.ZipFile(filename) sheetsdata = zf.read('xl/workbook.xml') xml = xml2obj(sheetsdata) sheets", "whose string-value # is found in the node's .t element. try: stringdata =", "\"\"\" lines = [] lastrownum = 0 for row in sheetrows: rownum =", "not open '%s' for sheet listing.\" % filename) output = [] for sheet", "is None: sheetid = 1 else: sheetid += 1 # sheets are numbered", "1 in xlsx files # Get cell data from specified worksheet. try: zf", "interpolate missing columns line.extend([''] * (colnum - lastcolnum - 1)) lastcolnum = colnum", "return result return extract_xlsx_lines(rows, strings) def extract_xlsx_lines(sheetrows, strings): \"\"\" Extract cell values into", "# interpolate missing columns line.extend([''] * (colnum - lastcolnum - 1)) lastcolnum =", "worksheets.\") output.append((sheet.name, int(m.group(1)) - 1)) return output def colcode_to_colnum(colcode): \"\"\"Convert Excel style column", "% ( filename, sheetid - 1)) # Get shared strings xml. Cell values", "# is found in the node's .t element. try: stringdata = zf.read('xl/sharedStrings.xml') xml", "= xml2obj(sheetsdata) sheets = xml.sheets.sheet except: raise FileError(\"Could not open '%s' for sheet", "\"\"\"Reading and parsing .xlsx format blueprints.\"\"\" import re import zipfile from xml2obj import", ".t element. Returns 2d list of strings (cell values). \"\"\" lines = []", "zf.read('xl/worksheets/sheet%s.xml' % sheetid) xml = xml2obj(sheetdata) rows = xml.sheetData.row except: raise FileError(\"Could not", "lines.extend([[]] * (rownum - lastrownum - 1)) lastrownum = rownum cells = row.c", "= [] for sheet in sheets: m = re.match('rId(\\d+)', sheet.r_id) if not m:", "except: raise FileError(\"Could not read xlsx file %s, worksheet id %s\" % (", "* (rownum - lastrownum - 1)) lastrownum = rownum cells = row.c line", "- 1)) return output def colcode_to_colnum(colcode): \"\"\"Convert Excel style column ids (A, BB,", "lastrownum - 1)) lastrownum = rownum cells = row.c line = [] lastcolnum", "cells = row.c line = [] lastcolnum = 0 for c in cells:", "# references into sharedStrings.xml:ssi.si elements, whose string-value # is found in the node's", "of xlsx's worksheets.\") output.append((sheet.name, int(m.group(1)) - 1)) return output def colcode_to_colnum(colcode): \"\"\"Convert Excel", "= re.match('^([A-Z]+)', str(c.r)).group(1) colnum = colcode_to_colnum(colcode) if colnum > lastcolnum + 1: #", "node's .t element. Returns 2d list of strings (cell values). \"\"\" lines =", "<reponame>ML-SolInvictus/modified-MWDF \"\"\"Reading and parsing .xlsx format blueprints.\"\"\" import re import zipfile from xml2obj", "not parse sharedStrings.xml of xlsx file\") # Map strings to row values and", "2007 (.xlsx) workbook file. .xlsx files are actually zip files containing xml files.", "lastrownum = 0 for row in sheetrows: rownum = int(row.r) if rownum >", "(rownum - lastrownum - 1)) lastrownum = rownum cells = row.c line =", "sheets and their ids from xlsx file.\"\"\" try: zf = zipfile.ZipFile(filename) sheetsdata =", "# Map strings to row values and return result return extract_xlsx_lines(rows, strings) def", "0 else: return (ord(colcode[-1]) - ord('A') + 1) + \\ (26 * colcode_to_colnum(colcode[:-1]))", "sheet.r_id) if not m: raise FileError(\"Could not read list of xlsx's worksheets.\") output.append((sheet.name,", "xml files. Returns a 2d list of cell values. \"\"\" if sheetid is", "= xml.sheetData.row except: raise FileError(\"Could not read xlsx file %s, worksheet id %s\"", "files containing xml files. Returns a 2d list of cell values. \"\"\" if", "2d list of cell values. \"\"\" if sheetid is None: sheetid = 1", "column ids (A, BB, XFD, ...) to a column number.\"\"\" if len(colcode) ==", "are numbered starting from 1 in xlsx files # Get cell data from", "the node's .t element. try: stringdata = zf.read('xl/sharedStrings.xml') xml = xml2obj(stringdata) strings =" ]
[ "output='41', speed=1, vlag=1) #main(sk=S1, hd=H3, output='31', speed=1, vlag=1) #main(sk=S1, hd=H2, output='21', speed=1, vlag=1)", ") ''' ''' Hider = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Hpolicy_name # output_graph=True,", "elif i < j: matm[i, j] = mas[i, j-1] else: matm[i, j] =", "rhlist.append(rh) rslist.append(rs) #np.save('SGLDS'+output + '.npy', rs) #np.save('RMS' + output + '.npy', rh) #print(ii,R)", "action_Hider=np.random.randint(9) h1=(action_Hider//3-1)*1+5 h2 = (action_Hider%3 - 1) * 1 + 5 #h1,h2=5,5 #print(action)", "dism = np.zeros((n, n)) for i in range(n): for j in range(n): if", "mae_envs.viewer.env_viewer import EnvViewer from mae_envs.wrappers.multi_agent import JoinMultiAgentActions from mujoco_worldgen.util.envs import examine_env, load_env from", "[] ########## np.save(output+'.npy', a) rhlist.append(rh) rslist.append(rs) #np.save('SGLDS'+output + '.npy', rs) #np.save('RMS' + output", "thr=1.0): return np.sum( (np.sum ( ((dism < np.ones((n,n))*thr) & (matm))[-n_seekers:], axis=0)>0)) kwargs =", "= make_env(**args_to_pass) env.reset() env_viewer = EnvViewer(env) rhlist=[] rslist=[] def main(sk=None,hd=None, output='output',speed=1,vlag=0): ''' RL", "xx = xx + 1.0 return w*xx*1.0 def matdis(n, obs_x): dism = np.zeros((n,", "+ 1.0 return w*xx*1.0 def matdis(n, obs_x): dism = np.zeros((n, n)) for i", "i in range(n): for j in range(n): if i > j: matm[i, j]", "np.random.rand()>0.95: action_Hider=np.random.randint(9) h1=(action_Hider//3-1)*1+5 h2 = (action_Hider%3 - 1) * 1 + 5 #h1,h2=5,5", "from mujoco_worldgen.util.types import extract_matching_arguments from mujoco_worldgen.util.parse_arguments import parse_arguments from runpy import run_path from", "observation_[0]) ** 2 + (observation_[5] - observation_[1]) ** 2)*5) rew=1.0/np.sqrt((observation_[4] - observation_[0]) **", "+ (observation_[5] - observation_[1]) ** 2)*5-3 #print(observation_) #rrew=3-edge_punish(observation_[0],observation_[1]) #print(observation_[0],observation_[1]) #Seeker.store_transition(observation[-4:], action_Seeker, +rew) Seeker.store_rewards(rew-edge_punish(observation_[4],observation_[5]))", "| (np.abs(y-p)<l): xx = xx + 1.0 return w*xx*1.0 def matdis(n, obs_x): dism", "return matm def game_rew(n,n_seekers, dism, matm, thr=1.0): return np.sum( (np.sum ( ((dism <", "vlag == 0: Hider.learn() Seeker.learn() else: Seeker.reward_memory=[] Seeker.action_memory = [] Hider.reward_memory = []", "i > j: matm[i, j] = mas[i,j] elif i < j: matm[i, j]", ": 1 }) module = run_path(env_name) make_env = module[\"make_env\"] args_to_pass, args_remaining = extract_matching_arguments(make_env,", "(uniform_placement, center_placement, uniform_placement_middle) from gym.spaces import Box, MultiDiscrete, Discrete #from simphas.MRL import mpolicy", "kwargs.update({ 'n_agents': n_agents, 'n_seekers': n_seekers, 'n_hiders': n_hiders, 'n_boxes':0, 'cone_angle': 2 * np.pi, #'n_substeps'", "pickle_file) #pickle_file.close() #pickle_file = open('objH2.pkl', 'wb') #pickle.dump(H2, pickle_file) #pickle_file.close() #main(sk=S1, hd=H4, output='41', speed=1,", "H3 = main(output='3', speed=3) #S4, H4 = main(output='4', speed=4) #S1, H1 = main(output='1',", "from mujoco_py import const, MjViewer from mae_envs.viewer.env_viewer import EnvViewer from mae_envs.wrappers.multi_agent import JoinMultiAgentActions", "= False return matm def game_rew(n,n_seekers, dism, matm, thr=1.0): return np.sum( (np.sum (", "5, 5]]) action = {'action_movement': sampleaction} obs, rew, down, _ = env_viewer.step(action) observation", "if vlag == 0: Hider.learn() Seeker.learn() else: Seeker.reward_memory=[] Seeker.action_memory = [] Hider.reward_memory =", "obs_x[j, :2])**2)) return dism def matmas(n,mas): matm = np.empty([n,n],dtype= bool) for i in", "rs.append(np.mean(Seeker.reward_memory)) rh.append(np.mean(Hider.reward_memory)) if ii>(n_episode-201): #a.append(Hider.ep_rs) a.append(Seeker.reward_memory) if vlag == 0: Hider.learn() Seeker.learn() else:", "#print(observation_) #rrew=3-edge_punish(observation_[0],observation_[1]) #print(observation_[0],observation_[1]) #Seeker.store_transition(observation[-4:], action_Seeker, +rew) Seeker.store_rewards(rew-edge_punish(observation_[4],observation_[5])) Hider.store_rewards(-rew-edge_punish(observation_[0],observation_[1])) #print(-rrew) #Hider.store_transition(observation[4:], action_Hider, rrew) #print(50-edge_punish(observation_[0],observation_[1]))", "True n_agents= 2 n_seekers=1 n_hiders=1 episode=350 n_episode=10000 kwargs.update({ 'n_agents': n_agents, 'n_seekers': n_seekers, 'n_hiders':", "Seeker=PolicyGradientAgent(0.001,[8],n_actions=9,layer1_size=20,layer2_size=10) Hider = PolicyGradientAgent(0.001, [8], n_actions=9, layer1_size=20, layer2_size=10) else: Seeker=sk Hider=hd a=[] rs=[]", "#main(sk=S1, hd=H4, output='41', speed=1, vlag=1) #main(sk=S1, hd=H3, output='31', speed=1, vlag=1) #main(sk=S1, hd=H2, output='21',", "mpolicy( n_actions=9, n_features=8, #n_features=4, learning_rate=0.01, reward_decay=0.9, units=30 # output_graph=True, ) ''' ''' Hider", "j: matm[i, j] = mas[i,j] elif i < j: matm[i, j] = mas[i,", "def matdis(n, obs_x): dism = np.zeros((n, n)) for i in range(n): for j", "(np.abs(y-p)<l): xx = xx + 1.0 return w*xx*1.0 def matdis(n, obs_x): dism =", "rslist.append(rs) #np.save('SGLDS'+output + '.npy', rs) #np.save('RMS' + output + '.npy', rh) #print(ii,R) return", "H1 = main(output='1', speed=4) #import pickle #pickle_file = open('objS2.pkl', 'wb') #pickle.dump(S2, pickle_file) #pickle_file.close()", "output_graph=True, ) ''' ''' Hider = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Hpolicy_name #", "j] = np.sqrt(np.sum((obs_x[i, :2] - obs_x[j, :2])**2)) return dism def matmas(n,mas): matm =", "''' ''' Hider = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Hpolicy_name # output_graph=True, )", "#h1,h2=5,5 #print(action) if np.random.rand()>0.95: action_Seeker=np.random.randint(9) s1=(action_Seeker//3-1)*speed+5 s2=(action_Seeker%3-1)*speed+5 ac = {'action_movement': np.array([[h1, h2, 5],", "#print(ii,R) return Seeker,Hider if __name__ == '__main__': #S2, H2 = main(output='2', speed=2) #S3,", "#print(action) if np.random.rand()>0.95: action_Seeker=np.random.randint(9) s1=(action_Seeker//3-1)*speed+5 s2=(action_Seeker%3-1)*speed+5 ac = {'action_movement': np.array([[h1, h2, 5], [s1,", "obs_['observation_self'][1][4], obs_['observation_self'][1][5]]) # if not obs_['mask_aa_obs'][1][0]: # rew= 5.0-( np.sqrt((observation_[4] - observation_[0]) **", "mujoco_worldgen.util.parse_arguments import parse_arguments from runpy import run_path from mae_envs.modules.util import (uniform_placement, center_placement, uniform_placement_middle)", "matm def game_rew(n,n_seekers, dism, matm, thr=1.0): return np.sum( (np.sum ( ((dism < np.ones((n,n))*thr)", "** 2)*5-3 #print(observation_) #rrew=3-edge_punish(observation_[0],observation_[1]) #print(observation_[0],observation_[1]) #Seeker.store_transition(observation[-4:], action_Seeker, +rew) Seeker.store_rewards(rew-edge_punish(observation_[4],observation_[5])) Hider.store_rewards(-rew-edge_punish(observation_[0],observation_[1])) #print(-rrew) #Hider.store_transition(observation[4:], action_Hider,", "'n_seekers': n_seekers, 'n_hiders': n_hiders, 'n_boxes':0, 'cone_angle': 2 * np.pi, #'n_substeps' : 1 })", "import PolicyGradient from RL_brain_3 import PolicyGradientAgent import matplotlib.pyplot as plt def edge_punish(x,y,l=0.2,p=3.53,w=0): xx=0.0", "i != j: dism[i, j] = np.sqrt(np.sum((obs_x[i, :2] - obs_x[j, :2])**2)) return dism", "env.reset() env_viewer = EnvViewer(env) rhlist=[] rslist=[] def main(sk=None,hd=None, output='output',speed=1,vlag=0): ''' RL = mpolicy(", "PolicyGradientAgent import matplotlib.pyplot as plt def edge_punish(x,y,l=0.2,p=3.53,w=0): xx=0.0 if (np.abs(x-0)<l) | (np.abs(x-p)<l): xx", "dism def matmas(n,mas): matm = np.empty([n,n],dtype= bool) for i in range(n): for j", "module[\"make_env\"] args_to_pass, args_remaining = extract_matching_arguments(make_env, kwargs) env = make_env(**args_to_pass) env.reset() env_viewer = EnvViewer(env)", "n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Hpolicy_name # output_graph=True, ) Seeker = PolicyGradient( n_actions=9, n_features=4,", "show=False) observation_ = np.array([obs_['observation_self'][0][0], obs_['observation_self'][0][1],obs_['observation_self'][0][4], obs_['observation_self'][0][5],obs_['observation_self'][1][0], obs_['observation_self'][1][1], obs_['observation_self'][1][4], obs_['observation_self'][1][5]]) # if not obs_['mask_aa_obs'][1][0]:", "np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] - observation_[1]) ** 2)*5) rew=1.0/np.sqrt((observation_[4] -", "speed=4) #S1, H1 = main(output='1', speed=1) S1, H1 = main(output='1', speed=4) #import pickle", "0: Seeker=PolicyGradientAgent(0.001,[8],n_actions=9,layer1_size=20,layer2_size=10) Hider = PolicyGradientAgent(0.001, [8], n_actions=9, layer1_size=20, layer2_size=10) else: Seeker=sk Hider=hd a=[]", "import Box, MultiDiscrete, Discrete #from simphas.MRL import mpolicy #import gym #from RL_brain_2 import", "reward_decay=0.99, policy_name=Spolicy_name # output_graph=True, ) ''' if vlag == 0: Seeker=PolicyGradientAgent(0.001,[8],n_actions=9,layer1_size=20,layer2_size=10) Hider =", "import JoinMultiAgentActions from mujoco_worldgen.util.envs import examine_env, load_env from mujoco_worldgen.util.types import extract_matching_arguments from mujoco_worldgen.util.parse_arguments", "args_remaining = extract_matching_arguments(make_env, kwargs) env = make_env(**args_to_pass) env.reset() env_viewer = EnvViewer(env) rhlist=[] rslist=[]", "h1=(action_Hider//3-1)*1+5 h2 = (action_Hider%3 - 1) * 1 + 5 #h1,h2=5,5 #print(action) if", "2 + (observation_[5] - observation_[1]) ** 2)*5) #else: # rew= 5.0-( np.sqrt((observation_[4] -", "game_rew(n,n_seekers, dism, matm, thr=1.0): return np.sum( (np.sum ( ((dism < np.ones((n,n))*thr) & (matm))[-n_seekers:],", "n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Hpolicy_name # output_graph=True, ) Seeker = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01,", "# output_graph=True, ) ''' if vlag == 0: Seeker=PolicyGradientAgent(0.001,[8],n_actions=9,layer1_size=20,layer2_size=10) Hider = PolicyGradientAgent(0.001, [8],", "xx=0.0 if (np.abs(x-0)<l) | (np.abs(x-p)<l): xx = xx + 1.0 elif (np.abs(y-0)<l) |", "obs_x): dism = np.zeros((n, n)) for i in range(n): for j in range(n):", "n_hiders, 'n_boxes':0, 'cone_angle': 2 * np.pi, #'n_substeps' : 1 }) module = run_path(env_name)", "if i > j: matm[i, j] = mas[i,j] elif i < j: matm[i,", "matm[i, j] = mas[i, j-1] else: matm[i, j] = False return matm def", "return w*xx*1.0 def matdis(n, obs_x): dism = np.zeros((n, n)) for i in range(n):", "= {} env_name = 'mae_envs/envs/mybase.py' display = True n_agents= 2 n_seekers=1 n_hiders=1 episode=350", "#print(action_Seeker) if np.random.rand()>0.95: action_Hider=np.random.randint(9) h1=(action_Hider//3-1)*1+5 h2 = (action_Hider%3 - 1) * 1 +", "MjViewer from mae_envs.viewer.env_viewer import EnvViewer from mae_envs.wrappers.multi_agent import JoinMultiAgentActions from mujoco_worldgen.util.envs import examine_env,", "module = run_path(env_name) make_env = module[\"make_env\"] args_to_pass, args_remaining = extract_matching_arguments(make_env, kwargs) env =", "n)) for i in range(n): for j in range(n): if i != j:", "env_name = 'mae_envs/envs/mybase.py' display = True n_agents= 2 n_seekers=1 n_hiders=1 episode=350 n_episode=10000 kwargs.update({", "PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Hpolicy_name # output_graph=True, ) Seeker = PolicyGradient( n_actions=9,", "'n_boxes':0, 'cone_angle': 2 * np.pi, #'n_substeps' : 1 }) module = run_path(env_name) make_env", "= PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Hpolicy_name # output_graph=True, ) Seeker = PolicyGradient(", "#pickle_file.close() #pickle_file = open('objH2.pkl', 'wb') #pickle.dump(H2, pickle_file) #pickle_file.close() #main(sk=S1, hd=H4, output='41', speed=1, vlag=1)", "#np.save('RMS' + output + '.npy', rh) #print(ii,R) return Seeker,Hider if __name__ == '__main__':", "rrew) #print(50-edge_punish(observation_[0],observation_[1])) observation = observation_ print(ii) #print(np.mean(Seeker.reward_memory[0])) rs.append(np.mean(Seeker.reward_memory)) rh.append(np.mean(Hider.reward_memory)) if ii>(n_episode-201): #a.append(Hider.ep_rs) a.append(Seeker.reward_memory)", "Seeker.action_memory = [] Hider.reward_memory = [] Hider.action_memory = [] ########## np.save(output+'.npy', a) rhlist.append(rh)", "env_viewer.step(ac, show=False) observation_ = np.array([obs_['observation_self'][0][0], obs_['observation_self'][0][1],obs_['observation_self'][0][4], obs_['observation_self'][0][5],obs_['observation_self'][1][0], obs_['observation_self'][1][1], obs_['observation_self'][1][4], obs_['observation_self'][1][5]]) # if not", "import abspath, dirname, join from gym.spaces import Tuple from mujoco_py import const, MjViewer", "}) module = run_path(env_name) make_env = module[\"make_env\"] args_to_pass, args_remaining = extract_matching_arguments(make_env, kwargs) env", "+ 1.0 elif (np.abs(y-0)<l) | (np.abs(y-p)<l): xx = xx + 1.0 return w*xx*1.0", "( ((dism < np.ones((n,n))*thr) & (matm))[-n_seekers:], axis=0)>0)) kwargs = {} env_name = 'mae_envs/envs/mybase.py'", "mujoco_worldgen.util.types import extract_matching_arguments from mujoco_worldgen.util.parse_arguments import parse_arguments from runpy import run_path from mae_envs.modules.util", "matdis(n, obs_x): dism = np.zeros((n, n)) for i in range(n): for j in", "make_env(**args_to_pass) env.reset() env_viewer = EnvViewer(env) rhlist=[] rslist=[] def main(sk=None,hd=None, output='output',speed=1,vlag=0): ''' RL =", "extract_matching_arguments(make_env, kwargs) env = make_env(**args_to_pass) env.reset() env_viewer = EnvViewer(env) rhlist=[] rslist=[] def main(sk=None,hd=None,", "''' if vlag == 0: Seeker=PolicyGradientAgent(0.001,[8],n_actions=9,layer1_size=20,layer2_size=10) Hider = PolicyGradientAgent(0.001, [8], n_actions=9, layer1_size=20, layer2_size=10)", "= open('objS2.pkl', 'wb') #pickle.dump(S2, pickle_file) #pickle_file.close() #pickle_file = open('objH2.pkl', 'wb') #pickle.dump(H2, pickle_file) #pickle_file.close()", "def main(sk=None,hd=None, output='output',speed=1,vlag=0): ''' RL = mpolicy( n_actions=9, n_features=8, #n_features=4, learning_rate=0.01, reward_decay=0.9, units=30", "2 + (observation_[5] - observation_[1]) ** 2)*5-3 #print(observation_) #rrew=3-edge_punish(observation_[0],observation_[1]) #print(observation_[0],observation_[1]) #Seeker.store_transition(observation[-4:], action_Seeker, +rew)", "JoinMultiAgentActions from mujoco_worldgen.util.envs import examine_env, load_env from mujoco_worldgen.util.types import extract_matching_arguments from mujoco_worldgen.util.parse_arguments import", "= np.zeros((n, n)) for i in range(n): for j in range(n): if i", "+ '.npy', rh) #print(ii,R) return Seeker,Hider if __name__ == '__main__': #S2, H2 =", "output_graph=True, ) Seeker = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Spolicy_name # output_graph=True, )", "5.0-( np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] - observation_[1]) ** 2)*5) #else:", "RL = mpolicy( n_actions=9, n_features=8, #n_features=4, learning_rate=0.01, reward_decay=0.9, units=30 # output_graph=True, ) '''", "mae_envs.modules.util import (uniform_placement, center_placement, uniform_placement_middle) from gym.spaces import Box, MultiDiscrete, Discrete #from simphas.MRL", "< np.ones((n,n))*thr) & (matm))[-n_seekers:], axis=0)>0)) kwargs = {} env_name = 'mae_envs/envs/mybase.py' display =", "EnvViewer from mae_envs.wrappers.multi_agent import JoinMultiAgentActions from mujoco_worldgen.util.envs import examine_env, load_env from mujoco_worldgen.util.types import", "s1=(action_Seeker//3-1)*speed+5 s2=(action_Seeker%3-1)*speed+5 ac = {'action_movement': np.array([[h1, h2, 5], [s1, s2, 5]])} #print(ac) obs_,", "observation_[1]) ** 2)*5) rew=1.0/np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] - observation_[1]) **", "1 }) module = run_path(env_name) make_env = module[\"make_env\"] args_to_pass, args_remaining = extract_matching_arguments(make_env, kwargs)", "observation_[0]) ** 2 + (observation_[5] - observation_[1]) ** 2)*5-3 #print(observation_) #rrew=3-edge_punish(observation_[0],observation_[1]) #print(observation_[0],observation_[1]) #Seeker.store_transition(observation[-4:],", "[] Hider.action_memory = [] ########## np.save(output+'.npy', a) rhlist.append(rh) rslist.append(rs) #np.save('SGLDS'+output + '.npy', rs)", "#import pickle #pickle_file = open('objS2.pkl', 'wb') #pickle.dump(S2, pickle_file) #pickle_file.close() #pickle_file = open('objH2.pkl', 'wb')", "speed=1) S1, H1 = main(output='1', speed=4) #import pickle #pickle_file = open('objS2.pkl', 'wb') #pickle.dump(S2,", "EnvViewer(env) rhlist=[] rslist=[] def main(sk=None,hd=None, output='output',speed=1,vlag=0): ''' RL = mpolicy( n_actions=9, n_features=8, #n_features=4,", "action_Seeker = Seeker.choose_action(observation) action_Hider = Hider.choose_action(observation) #print(action_Seeker) if np.random.rand()>0.95: action_Hider=np.random.randint(9) h1=(action_Hider//3-1)*1+5 h2 =", "click import numpy as np from os.path import abspath, dirname, join from gym.spaces", "pickle #pickle_file = open('objS2.pkl', 'wb') #pickle.dump(S2, pickle_file) #pickle_file.close() #pickle_file = open('objH2.pkl', 'wb') #pickle.dump(H2,", "(action_Hider%3 - 1) * 1 + 5 #h1,h2=5,5 #print(action) if np.random.rand()>0.95: action_Seeker=np.random.randint(9) s1=(action_Seeker//3-1)*speed+5", "#pickle.dump(S2, pickle_file) #pickle_file.close() #pickle_file = open('objH2.pkl', 'wb') #pickle.dump(H2, pickle_file) #pickle_file.close() #main(sk=S1, hd=H4, output='41',", "return np.sum( (np.sum ( ((dism < np.ones((n,n))*thr) & (matm))[-n_seekers:], axis=0)>0)) kwargs = {}", "output='31', speed=1, vlag=1) #main(sk=S1, hd=H2, output='21', speed=1, vlag=1) #test() np.save('SGLDS.npy', rslist) np.save('SGLDH.npy', rhlist)", "0: Hider.learn() Seeker.learn() else: Seeker.reward_memory=[] Seeker.action_memory = [] Hider.reward_memory = [] Hider.action_memory =", "layer1_size=20, layer2_size=10) else: Seeker=sk Hider=hd a=[] rs=[] rh=[] for ii in range(n_episode): env_viewer.env_reset()", "plt def edge_punish(x,y,l=0.2,p=3.53,w=0): xx=0.0 if (np.abs(x-0)<l) | (np.abs(x-p)<l): xx = xx + 1.0", "= [] Hider.reward_memory = [] Hider.action_memory = [] ########## np.save(output+'.npy', a) rhlist.append(rh) rslist.append(rs)", "= mas[i,j] elif i < j: matm[i, j] = mas[i, j-1] else: matm[i,", "matplotlib.pyplot as plt def edge_punish(x,y,l=0.2,p=3.53,w=0): xx=0.0 if (np.abs(x-0)<l) | (np.abs(x-p)<l): xx = xx", "** 2 + (observation_[5] - observation_[1]) ** 2)*5) rew=1.0/np.sqrt((observation_[4] - observation_[0]) ** 2", "- observation_[0]) ** 2 + (observation_[5] - observation_[1]) ** 2)*5) #else: # rew=", "n_actions=9, n_features=8, #n_features=4, learning_rate=0.01, reward_decay=0.9, units=30 # output_graph=True, ) ''' ''' Hider =", "(np.abs(x-0)<l) | (np.abs(x-p)<l): xx = xx + 1.0 elif (np.abs(y-0)<l) | (np.abs(y-p)<l): xx", "Seeker=sk Hider=hd a=[] rs=[] rh=[] for ii in range(n_episode): env_viewer.env_reset() sampleaction = np.array([[5,", "main(output='4', speed=4) #S1, H1 = main(output='1', speed=1) S1, H1 = main(output='1', speed=4) #import", "= mpolicy( n_actions=9, n_features=8, #n_features=4, learning_rate=0.01, reward_decay=0.9, units=30 # output_graph=True, ) ''' '''", "action_Hider, rrew) #print(50-edge_punish(observation_[0],observation_[1])) observation = observation_ print(ii) #print(np.mean(Seeker.reward_memory[0])) rs.append(np.mean(Seeker.reward_memory)) rh.append(np.mean(Hider.reward_memory)) if ii>(n_episode-201): #a.append(Hider.ep_rs)", "np.empty([n,n],dtype= bool) for i in range(n): for j in range(n): if i >", "matm, thr=1.0): return np.sum( (np.sum ( ((dism < np.ones((n,n))*thr) & (matm))[-n_seekers:], axis=0)>0)) kwargs", "rh.append(np.mean(Hider.reward_memory)) if ii>(n_episode-201): #a.append(Hider.ep_rs) a.append(Seeker.reward_memory) if vlag == 0: Hider.learn() Seeker.learn() else: Seeker.reward_memory=[]", "h2 = (action_Hider%3 - 1) * 1 + 5 #h1,h2=5,5 #print(action) if np.random.rand()>0.95:", "if __name__ == '__main__': #S2, H2 = main(output='2', speed=2) #S3, H3 = main(output='3',", "h2, 5], [s1, s2, 5]])} #print(ac) obs_, reward, done, info = env_viewer.step(ac, show=False)", "range(n): for j in range(n): if i > j: matm[i, j] = mas[i,j]", "observation_ print(ii) #print(np.mean(Seeker.reward_memory[0])) rs.append(np.mean(Seeker.reward_memory)) rh.append(np.mean(Hider.reward_memory)) if ii>(n_episode-201): #a.append(Hider.ep_rs) a.append(Seeker.reward_memory) if vlag == 0:", "(np.abs(x-p)<l): xx = xx + 1.0 elif (np.abs(y-0)<l) | (np.abs(y-p)<l): xx = xx", "= run_path(env_name) make_env = module[\"make_env\"] args_to_pass, args_remaining = extract_matching_arguments(make_env, kwargs) env = make_env(**args_to_pass)", "# if not obs_['mask_aa_obs'][1][0]: # rew= 5.0-( np.sqrt((observation_[4] - observation_[0]) ** 2 +", "(observation_[5] - observation_[1]) ** 2)*5-3 #print(observation_) #rrew=3-edge_punish(observation_[0],observation_[1]) #print(observation_[0],observation_[1]) #Seeker.store_transition(observation[-4:], action_Seeker, +rew) Seeker.store_rewards(rew-edge_punish(observation_[4],observation_[5])) Hider.store_rewards(-rew-edge_punish(observation_[0],observation_[1]))", "5.0-( np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] - observation_[1]) ** 2)*5) rew=1.0/np.sqrt((observation_[4]", "# output_graph=True, ) Seeker = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Spolicy_name # output_graph=True,", "s2=(action_Seeker%3-1)*speed+5 ac = {'action_movement': np.array([[h1, h2, 5], [s1, s2, 5]])} #print(ac) obs_, reward,", "= Seeker.choose_action(observation) action_Hider = Hider.choose_action(observation) #print(action_Seeker) if np.random.rand()>0.95: action_Hider=np.random.randint(9) h1=(action_Hider//3-1)*1+5 h2 = (action_Hider%3", "PolicyGradientAgent(0.001, [8], n_actions=9, layer1_size=20, layer2_size=10) else: Seeker=sk Hider=hd a=[] rs=[] rh=[] for ii", "2 + (observation_[5] - observation_[1]) ** 2)*5) rew=1.0/np.sqrt((observation_[4] - observation_[0]) ** 2 +", "env = make_env(**args_to_pass) env.reset() env_viewer = EnvViewer(env) rhlist=[] rslist=[] def main(sk=None,hd=None, output='output',speed=1,vlag=0): '''", "observation_[1]) ** 2)*5-3 #print(observation_) #rrew=3-edge_punish(observation_[0],observation_[1]) #print(observation_[0],observation_[1]) #Seeker.store_transition(observation[-4:], action_Seeker, +rew) Seeker.store_rewards(rew-edge_punish(observation_[4],observation_[5])) Hider.store_rewards(-rew-edge_punish(observation_[0],observation_[1])) #print(-rrew) #Hider.store_transition(observation[4:],", "pickle_file) #pickle_file.close() #main(sk=S1, hd=H4, output='41', speed=1, vlag=1) #main(sk=S1, hd=H3, output='31', speed=1, vlag=1) #main(sk=S1,", "{} env_name = 'mae_envs/envs/mybase.py' display = True n_agents= 2 n_seekers=1 n_hiders=1 episode=350 n_episode=10000", "5], [s1, s2, 5]])} #print(ac) obs_, reward, done, info = env_viewer.step(ac, show=False) observation_", "#print(-rrew) #Hider.store_transition(observation[4:], action_Hider, rrew) #print(50-edge_punish(observation_[0],observation_[1])) observation = observation_ print(ii) #print(np.mean(Seeker.reward_memory[0])) rs.append(np.mean(Seeker.reward_memory)) rh.append(np.mean(Hider.reward_memory)) if", "(matm))[-n_seekers:], axis=0)>0)) kwargs = {} env_name = 'mae_envs/envs/mybase.py' display = True n_agents= 2", "for ii in range(n_episode): env_viewer.env_reset() sampleaction = np.array([[5, 5, 5], [5, 5, 5]])", "= xx + 1.0 elif (np.abs(y-0)<l) | (np.abs(y-p)<l): xx = xx + 1.0", "center_placement, uniform_placement_middle) from gym.spaces import Box, MultiDiscrete, Discrete #from simphas.MRL import mpolicy #import", "open('objS2.pkl', 'wb') #pickle.dump(S2, pickle_file) #pickle_file.close() #pickle_file = open('objH2.pkl', 'wb') #pickle.dump(H2, pickle_file) #pickle_file.close() #main(sk=S1,", "hd=H3, output='31', speed=1, vlag=1) #main(sk=S1, hd=H2, output='21', speed=1, vlag=1) #test() np.save('SGLDS.npy', rslist) np.save('SGLDH.npy',", "for i in range(n): for j in range(n): if i > j: matm[i,", "!= j: dism[i, j] = np.sqrt(np.sum((obs_x[i, :2] - obs_x[j, :2])**2)) return dism def", "output + '.npy', rh) #print(ii,R) return Seeker,Hider if __name__ == '__main__': #S2, H2", "if np.random.rand()>0.95: action_Seeker=np.random.randint(9) s1=(action_Seeker//3-1)*speed+5 s2=(action_Seeker%3-1)*speed+5 ac = {'action_movement': np.array([[h1, h2, 5], [s1, s2,", "os.path import abspath, dirname, join from gym.spaces import Tuple from mujoco_py import const,", "Tuple from mujoco_py import const, MjViewer from mae_envs.viewer.env_viewer import EnvViewer from mae_envs.wrappers.multi_agent import", "np.array([obs['observation_self'][0][0], obs['observation_self'][0][1],obs['observation_self'][0][4],obs['observation_self'][0][5],obs['observation_self'][1][0], obs['observation_self'][1][1], obs['observation_self'][1][4],obs['observation_self'][1][5]]) #observation = np.array([obs['observation_self'][1][0], obs['observation_self'][1][1],obs['observation_self'][1][4],obs['observation_self'][1][5]]) for i in range(episode): action_Seeker", "import logging import click import numpy as np from os.path import abspath, dirname,", "python3 import logging import click import numpy as np from os.path import abspath,", "RL_brain_2 import PolicyGradient from RL_brain_3 import PolicyGradientAgent import matplotlib.pyplot as plt def edge_punish(x,y,l=0.2,p=3.53,w=0):", "< j: matm[i, j] = mas[i, j-1] else: matm[i, j] = False return", "- observation_[1]) ** 2)*5) #else: # rew= 5.0-( np.sqrt((observation_[4] - observation_[0]) ** 2", "> j: matm[i, j] = mas[i,j] elif i < j: matm[i, j] =", "observation_ = np.array([obs_['observation_self'][0][0], obs_['observation_self'][0][1],obs_['observation_self'][0][4], obs_['observation_self'][0][5],obs_['observation_self'][1][0], obs_['observation_self'][1][1], obs_['observation_self'][1][4], obs_['observation_self'][1][5]]) # if not obs_['mask_aa_obs'][1][0]: #", "speed=4) #import pickle #pickle_file = open('objS2.pkl', 'wb') #pickle.dump(S2, pickle_file) #pickle_file.close() #pickle_file = open('objH2.pkl',", "done, info = env_viewer.step(ac, show=False) observation_ = np.array([obs_['observation_self'][0][0], obs_['observation_self'][0][1],obs_['observation_self'][0][4], obs_['observation_self'][0][5],obs_['observation_self'][1][0], obs_['observation_self'][1][1], obs_['observation_self'][1][4], obs_['observation_self'][1][5]])", "np.pi, #'n_substeps' : 1 }) module = run_path(env_name) make_env = module[\"make_env\"] args_to_pass, args_remaining", "from gym.spaces import Tuple from mujoco_py import const, MjViewer from mae_envs.viewer.env_viewer import EnvViewer", "display = True n_agents= 2 n_seekers=1 n_hiders=1 episode=350 n_episode=10000 kwargs.update({ 'n_agents': n_agents, 'n_seekers':", "= PolicyGradientAgent(0.001, [8], n_actions=9, layer1_size=20, layer2_size=10) else: Seeker=sk Hider=hd a=[] rs=[] rh=[] for", "= EnvViewer(env) rhlist=[] rslist=[] def main(sk=None,hd=None, output='output',speed=1,vlag=0): ''' RL = mpolicy( n_actions=9, n_features=8,", "i < j: matm[i, j] = mas[i, j-1] else: matm[i, j] = False", "const, MjViewer from mae_envs.viewer.env_viewer import EnvViewer from mae_envs.wrappers.multi_agent import JoinMultiAgentActions from mujoco_worldgen.util.envs import", "mae_envs.wrappers.multi_agent import JoinMultiAgentActions from mujoco_worldgen.util.envs import examine_env, load_env from mujoco_worldgen.util.types import extract_matching_arguments from", "Hider.reward_memory = [] Hider.action_memory = [] ########## np.save(output+'.npy', a) rhlist.append(rh) rslist.append(rs) #np.save('SGLDS'+output +", "** 2)*5) #else: # rew= 5.0-( np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5]", "= np.array([obs_['observation_self'][0][0], obs_['observation_self'][0][1],obs_['observation_self'][0][4], obs_['observation_self'][0][5],obs_['observation_self'][1][0], obs_['observation_self'][1][1], obs_['observation_self'][1][4], obs_['observation_self'][1][5]]) # if not obs_['mask_aa_obs'][1][0]: # rew=", "main(output='3', speed=3) #S4, H4 = main(output='4', speed=4) #S1, H1 = main(output='1', speed=1) S1,", "if np.random.rand()>0.95: action_Hider=np.random.randint(9) h1=(action_Hider//3-1)*1+5 h2 = (action_Hider%3 - 1) * 1 + 5", "xx = xx + 1.0 elif (np.abs(y-0)<l) | (np.abs(y-p)<l): xx = xx +", "#from RL_brain_2 import PolicyGradient from RL_brain_3 import PolicyGradientAgent import matplotlib.pyplot as plt def", "matmas(n,mas): matm = np.empty([n,n],dtype= bool) for i in range(n): for j in range(n):", "{'action_movement': sampleaction} obs, rew, down, _ = env_viewer.step(action) observation = np.array([obs['observation_self'][0][0], obs['observation_self'][0][1],obs['observation_self'][0][4],obs['observation_self'][0][5],obs['observation_self'][1][0], obs['observation_self'][1][1],", "range(n): if i > j: matm[i, j] = mas[i,j] elif i < j:", "import run_path from mae_envs.modules.util import (uniform_placement, center_placement, uniform_placement_middle) from gym.spaces import Box, MultiDiscrete,", "rh) #print(ii,R) return Seeker,Hider if __name__ == '__main__': #S2, H2 = main(output='2', speed=2)", "#print(np.mean(Seeker.reward_memory[0])) rs.append(np.mean(Seeker.reward_memory)) rh.append(np.mean(Hider.reward_memory)) if ii>(n_episode-201): #a.append(Hider.ep_rs) a.append(Seeker.reward_memory) if vlag == 0: Hider.learn() Seeker.learn()", "= main(output='1', speed=1) S1, H1 = main(output='1', speed=4) #import pickle #pickle_file = open('objS2.pkl',", "as plt def edge_punish(x,y,l=0.2,p=3.53,w=0): xx=0.0 if (np.abs(x-0)<l) | (np.abs(x-p)<l): xx = xx +", "in range(n): if i != j: dism[i, j] = np.sqrt(np.sum((obs_x[i, :2] - obs_x[j,", "n_features=8, #n_features=4, learning_rate=0.01, reward_decay=0.9, units=30 # output_graph=True, ) ''' ''' Hider = PolicyGradient(", "i in range(n): for j in range(n): if i != j: dism[i, j]", "n_agents= 2 n_seekers=1 n_hiders=1 episode=350 n_episode=10000 kwargs.update({ 'n_agents': n_agents, 'n_seekers': n_seekers, 'n_hiders': n_hiders,", "if vlag == 0: Seeker=PolicyGradientAgent(0.001,[8],n_actions=9,layer1_size=20,layer2_size=10) Hider = PolicyGradientAgent(0.001, [8], n_actions=9, layer1_size=20, layer2_size=10) else:", "join from gym.spaces import Tuple from mujoco_py import const, MjViewer from mae_envs.viewer.env_viewer import", "matm[i, j] = False return matm def game_rew(n,n_seekers, dism, matm, thr=1.0): return np.sum(", "mujoco_py import const, MjViewer from mae_envs.viewer.env_viewer import EnvViewer from mae_envs.wrappers.multi_agent import JoinMultiAgentActions from", "H1 = main(output='1', speed=1) S1, H1 = main(output='1', speed=4) #import pickle #pickle_file =", "w*xx*1.0 def matdis(n, obs_x): dism = np.zeros((n, n)) for i in range(n): for", "[5, 5, 5]]) action = {'action_movement': sampleaction} obs, rew, down, _ = env_viewer.step(action)", "= [] Hider.action_memory = [] ########## np.save(output+'.npy', a) rhlist.append(rh) rslist.append(rs) #np.save('SGLDS'+output + '.npy',", "= 'mae_envs/envs/mybase.py' display = True n_agents= 2 n_seekers=1 n_hiders=1 episode=350 n_episode=10000 kwargs.update({ 'n_agents':", "= open('objH2.pkl', 'wb') #pickle.dump(H2, pickle_file) #pickle_file.close() #main(sk=S1, hd=H4, output='41', speed=1, vlag=1) #main(sk=S1, hd=H3,", "(observation_[5] - observation_[1]) ** 2)*5) #else: # rew= 5.0-( np.sqrt((observation_[4] - observation_[0]) **", "import PolicyGradientAgent import matplotlib.pyplot as plt def edge_punish(x,y,l=0.2,p=3.53,w=0): xx=0.0 if (np.abs(x-0)<l) | (np.abs(x-p)<l):", "learning_rate=0.01, reward_decay=0.9, units=30 # output_graph=True, ) ''' ''' Hider = PolicyGradient( n_actions=9, n_features=4,", "= (action_Hider%3 - 1) * 1 + 5 #h1,h2=5,5 #print(action) if np.random.rand()>0.95: action_Seeker=np.random.randint(9)", "np.sqrt(np.sum((obs_x[i, :2] - obs_x[j, :2])**2)) return dism def matmas(n,mas): matm = np.empty([n,n],dtype= bool)", "= mas[i, j-1] else: matm[i, j] = False return matm def game_rew(n,n_seekers, dism,", "in range(n): if i > j: matm[i, j] = mas[i,j] elif i <", "= True n_agents= 2 n_seekers=1 n_hiders=1 episode=350 n_episode=10000 kwargs.update({ 'n_agents': n_agents, 'n_seekers': n_seekers,", "Hider.store_rewards(-rew-edge_punish(observation_[0],observation_[1])) #print(-rrew) #Hider.store_transition(observation[4:], action_Hider, rrew) #print(50-edge_punish(observation_[0],observation_[1])) observation = observation_ print(ii) #print(np.mean(Seeker.reward_memory[0])) rs.append(np.mean(Seeker.reward_memory)) rh.append(np.mean(Hider.reward_memory))", "5, 5], [5, 5, 5]]) action = {'action_movement': sampleaction} obs, rew, down, _", "'n_hiders': n_hiders, 'n_boxes':0, 'cone_angle': 2 * np.pi, #'n_substeps' : 1 }) module =", "- observation_[0]) ** 2 + (observation_[5] - observation_[1]) ** 2)*5-3 #print(observation_) #rrew=3-edge_punish(observation_[0],observation_[1]) #print(observation_[0],observation_[1])", "a) rhlist.append(rh) rslist.append(rs) #np.save('SGLDS'+output + '.npy', rs) #np.save('RMS' + output + '.npy', rh)", "run_path(env_name) make_env = module[\"make_env\"] args_to_pass, args_remaining = extract_matching_arguments(make_env, kwargs) env = make_env(**args_to_pass) env.reset()", "False return matm def game_rew(n,n_seekers, dism, matm, thr=1.0): return np.sum( (np.sum ( ((dism", "Hider = PolicyGradientAgent(0.001, [8], n_actions=9, layer1_size=20, layer2_size=10) else: Seeker=sk Hider=hd a=[] rs=[] rh=[]", "1 + 5 #h1,h2=5,5 #print(action) if np.random.rand()>0.95: action_Seeker=np.random.randint(9) s1=(action_Seeker//3-1)*speed+5 s2=(action_Seeker%3-1)*speed+5 ac = {'action_movement':", "elif (np.abs(y-0)<l) | (np.abs(y-p)<l): xx = xx + 1.0 return w*xx*1.0 def matdis(n,", "from runpy import run_path from mae_envs.modules.util import (uniform_placement, center_placement, uniform_placement_middle) from gym.spaces import", "[8], n_actions=9, layer1_size=20, layer2_size=10) else: Seeker=sk Hider=hd a=[] rs=[] rh=[] for ii in", "observation = np.array([obs['observation_self'][0][0], obs['observation_self'][0][1],obs['observation_self'][0][4],obs['observation_self'][0][5],obs['observation_self'][1][0], obs['observation_self'][1][1], obs['observation_self'][1][4],obs['observation_self'][1][5]]) #observation = np.array([obs['observation_self'][1][0], obs['observation_self'][1][1],obs['observation_self'][1][4],obs['observation_self'][1][5]]) for i in", "load_env from mujoco_worldgen.util.types import extract_matching_arguments from mujoco_worldgen.util.parse_arguments import parse_arguments from runpy import run_path", "main(sk=None,hd=None, output='output',speed=1,vlag=0): ''' RL = mpolicy( n_actions=9, n_features=8, #n_features=4, learning_rate=0.01, reward_decay=0.9, units=30 #", "s2, 5]])} #print(ac) obs_, reward, done, info = env_viewer.step(ac, show=False) observation_ = np.array([obs_['observation_self'][0][0],", "print(ii) #print(np.mean(Seeker.reward_memory[0])) rs.append(np.mean(Seeker.reward_memory)) rh.append(np.mean(Hider.reward_memory)) if ii>(n_episode-201): #a.append(Hider.ep_rs) a.append(Seeker.reward_memory) if vlag == 0: Hider.learn()", "range(episode): action_Seeker = Seeker.choose_action(observation) action_Hider = Hider.choose_action(observation) #print(action_Seeker) if np.random.rand()>0.95: action_Hider=np.random.randint(9) h1=(action_Hider//3-1)*1+5 h2", "1) * 1 + 5 #h1,h2=5,5 #print(action) if np.random.rand()>0.95: action_Seeker=np.random.randint(9) s1=(action_Seeker//3-1)*speed+5 s2=(action_Seeker%3-1)*speed+5 ac", "#Seeker.store_transition(observation[-4:], action_Seeker, +rew) Seeker.store_rewards(rew-edge_punish(observation_[4],observation_[5])) Hider.store_rewards(-rew-edge_punish(observation_[0],observation_[1])) #print(-rrew) #Hider.store_transition(observation[4:], action_Hider, rrew) #print(50-edge_punish(observation_[0],observation_[1])) observation = observation_", "import numpy as np from os.path import abspath, dirname, join from gym.spaces import", "__name__ == '__main__': #S2, H2 = main(output='2', speed=2) #S3, H3 = main(output='3', speed=3)", "j: matm[i, j] = mas[i, j-1] else: matm[i, j] = False return matm", "logging import click import numpy as np from os.path import abspath, dirname, join", "+ (observation_[5] - observation_[1]) ** 2)*5) #else: # rew= 5.0-( np.sqrt((observation_[4] - observation_[0])", "_ = env_viewer.step(action) observation = np.array([obs['observation_self'][0][0], obs['observation_self'][0][1],obs['observation_self'][0][4],obs['observation_self'][0][5],obs['observation_self'][1][0], obs['observation_self'][1][1], obs['observation_self'][1][4],obs['observation_self'][1][5]]) #observation = np.array([obs['observation_self'][1][0], obs['observation_self'][1][1],obs['observation_self'][1][4],obs['observation_self'][1][5]])", "- obs_x[j, :2])**2)) return dism def matmas(n,mas): matm = np.empty([n,n],dtype= bool) for i", "2 * np.pi, #'n_substeps' : 1 }) module = run_path(env_name) make_env = module[\"make_env\"]", "a=[] rs=[] rh=[] for ii in range(n_episode): env_viewer.env_reset() sampleaction = np.array([[5, 5, 5],", "np.array([[h1, h2, 5], [s1, s2, 5]])} #print(ac) obs_, reward, done, info = env_viewer.step(ac,", "extract_matching_arguments from mujoco_worldgen.util.parse_arguments import parse_arguments from runpy import run_path from mae_envs.modules.util import (uniform_placement,", "np.array([[5, 5, 5], [5, 5, 5]]) action = {'action_movement': sampleaction} obs, rew, down,", "observation_[1]) ** 2)*5) #else: # rew= 5.0-( np.sqrt((observation_[4] - observation_[0]) ** 2 +", "#S3, H3 = main(output='3', speed=3) #S4, H4 = main(output='4', speed=4) #S1, H1 =", "sampleaction = np.array([[5, 5, 5], [5, 5, 5]]) action = {'action_movement': sampleaction} obs,", "= main(output='2', speed=2) #S3, H3 = main(output='3', speed=3) #S4, H4 = main(output='4', speed=4)", "import examine_env, load_env from mujoco_worldgen.util.types import extract_matching_arguments from mujoco_worldgen.util.parse_arguments import parse_arguments from runpy", ") Seeker = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Spolicy_name # output_graph=True, ) '''", "== 0: Hider.learn() Seeker.learn() else: Seeker.reward_memory=[] Seeker.action_memory = [] Hider.reward_memory = [] Hider.action_memory", "obs['observation_self'][0][1],obs['observation_self'][0][4],obs['observation_self'][0][5],obs['observation_self'][1][0], obs['observation_self'][1][1], obs['observation_self'][1][4],obs['observation_self'][1][5]]) #observation = np.array([obs['observation_self'][1][0], obs['observation_self'][1][1],obs['observation_self'][1][4],obs['observation_self'][1][5]]) for i in range(episode): action_Seeker =", "# rew= 5.0-( np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] - observation_[1]) **", "#S4, H4 = main(output='4', speed=4) #S1, H1 = main(output='1', speed=1) S1, H1 =", "j in range(n): if i != j: dism[i, j] = np.sqrt(np.sum((obs_x[i, :2] -", "i in range(episode): action_Seeker = Seeker.choose_action(observation) action_Hider = Hider.choose_action(observation) #print(action_Seeker) if np.random.rand()>0.95: action_Hider=np.random.randint(9)", "** 2 + (observation_[5] - observation_[1]) ** 2)*5) #else: # rew= 5.0-( np.sqrt((observation_[4]", "j in range(n): if i > j: matm[i, j] = mas[i,j] elif i", "= np.array([obs['observation_self'][1][0], obs['observation_self'][1][1],obs['observation_self'][1][4],obs['observation_self'][1][5]]) for i in range(episode): action_Seeker = Seeker.choose_action(observation) action_Hider = Hider.choose_action(observation)", "action_Hider = Hider.choose_action(observation) #print(action_Seeker) if np.random.rand()>0.95: action_Hider=np.random.randint(9) h1=(action_Hider//3-1)*1+5 h2 = (action_Hider%3 - 1)", "xx + 1.0 return w*xx*1.0 def matdis(n, obs_x): dism = np.zeros((n, n)) for", "from os.path import abspath, dirname, join from gym.spaces import Tuple from mujoco_py import", "obs_['observation_self'][0][1],obs_['observation_self'][0][4], obs_['observation_self'][0][5],obs_['observation_self'][1][0], obs_['observation_self'][1][1], obs_['observation_self'][1][4], obs_['observation_self'][1][5]]) # if not obs_['mask_aa_obs'][1][0]: # rew= 5.0-( np.sqrt((observation_[4]", "learning_rate=0.01, reward_decay=0.99, policy_name=Spolicy_name # output_graph=True, ) ''' if vlag == 0: Seeker=PolicyGradientAgent(0.001,[8],n_actions=9,layer1_size=20,layer2_size=10) Hider", "env_viewer = EnvViewer(env) rhlist=[] rslist=[] def main(sk=None,hd=None, output='output',speed=1,vlag=0): ''' RL = mpolicy( n_actions=9,", "= main(output='4', speed=4) #S1, H1 = main(output='1', speed=1) S1, H1 = main(output='1', speed=4)", "env_viewer.step(action) observation = np.array([obs['observation_self'][0][0], obs['observation_self'][0][1],obs['observation_self'][0][4],obs['observation_self'][0][5],obs['observation_self'][1][0], obs['observation_self'][1][1], obs['observation_self'][1][4],obs['observation_self'][1][5]]) #observation = np.array([obs['observation_self'][1][0], obs['observation_self'][1][1],obs['observation_self'][1][4],obs['observation_self'][1][5]]) for i", "[] Hider.reward_memory = [] Hider.action_memory = [] ########## np.save(output+'.npy', a) rhlist.append(rh) rslist.append(rs) #np.save('SGLDS'+output", "n_hiders=1 episode=350 n_episode=10000 kwargs.update({ 'n_agents': n_agents, 'n_seekers': n_seekers, 'n_hiders': n_hiders, 'n_boxes':0, 'cone_angle': 2", "= {'action_movement': sampleaction} obs, rew, down, _ = env_viewer.step(action) observation = np.array([obs['observation_self'][0][0], obs['observation_self'][0][1],obs['observation_self'][0][4],obs['observation_self'][0][5],obs['observation_self'][1][0],", "matm[i, j] = mas[i,j] elif i < j: matm[i, j] = mas[i, j-1]", "* 1 + 5 #h1,h2=5,5 #print(action) if np.random.rand()>0.95: action_Seeker=np.random.randint(9) s1=(action_Seeker//3-1)*speed+5 s2=(action_Seeker%3-1)*speed+5 ac =", "np.save(output+'.npy', a) rhlist.append(rh) rslist.append(rs) #np.save('SGLDS'+output + '.npy', rs) #np.save('RMS' + output + '.npy',", "obs_['mask_aa_obs'][1][0]: # rew= 5.0-( np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] - observation_[1])", "return dism def matmas(n,mas): matm = np.empty([n,n],dtype= bool) for i in range(n): for", "#a.append(Hider.ep_rs) a.append(Seeker.reward_memory) if vlag == 0: Hider.learn() Seeker.learn() else: Seeker.reward_memory=[] Seeker.action_memory = []", "H4 = main(output='4', speed=4) #S1, H1 = main(output='1', speed=1) S1, H1 = main(output='1',", "'wb') #pickle.dump(S2, pickle_file) #pickle_file.close() #pickle_file = open('objH2.pkl', 'wb') #pickle.dump(H2, pickle_file) #pickle_file.close() #main(sk=S1, hd=H4,", "RL_brain_3 import PolicyGradientAgent import matplotlib.pyplot as plt def edge_punish(x,y,l=0.2,p=3.53,w=0): xx=0.0 if (np.abs(x-0)<l) |", "units=30 # output_graph=True, ) ''' ''' Hider = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99,", "output='output',speed=1,vlag=0): ''' RL = mpolicy( n_actions=9, n_features=8, #n_features=4, learning_rate=0.01, reward_decay=0.9, units=30 # output_graph=True,", "dism, matm, thr=1.0): return np.sum( (np.sum ( ((dism < np.ones((n,n))*thr) & (matm))[-n_seekers:], axis=0)>0))", "import (uniform_placement, center_placement, uniform_placement_middle) from gym.spaces import Box, MultiDiscrete, Discrete #from simphas.MRL import", "action_Seeker=np.random.randint(9) s1=(action_Seeker//3-1)*speed+5 s2=(action_Seeker%3-1)*speed+5 ac = {'action_movement': np.array([[h1, h2, 5], [s1, s2, 5]])} #print(ac)", "+ '.npy', rs) #np.save('RMS' + output + '.npy', rh) #print(ii,R) return Seeker,Hider if", "mas[i,j] elif i < j: matm[i, j] = mas[i, j-1] else: matm[i, j]", "#Hider.store_transition(observation[4:], action_Hider, rrew) #print(50-edge_punish(observation_[0],observation_[1])) observation = observation_ print(ii) #print(np.mean(Seeker.reward_memory[0])) rs.append(np.mean(Seeker.reward_memory)) rh.append(np.mean(Hider.reward_memory)) if ii>(n_episode-201):", "obs_['observation_self'][1][5]]) # if not obs_['mask_aa_obs'][1][0]: # rew= 5.0-( np.sqrt((observation_[4] - observation_[0]) ** 2", "range(n): for j in range(n): if i != j: dism[i, j] = np.sqrt(np.sum((obs_x[i,", "** 2)*5) rew=1.0/np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] - observation_[1]) ** 2)*5-3", "action_Seeker, +rew) Seeker.store_rewards(rew-edge_punish(observation_[4],observation_[5])) Hider.store_rewards(-rew-edge_punish(observation_[0],observation_[1])) #print(-rrew) #Hider.store_transition(observation[4:], action_Hider, rrew) #print(50-edge_punish(observation_[0],observation_[1])) observation = observation_ print(ii)", "from mae_envs.modules.util import (uniform_placement, center_placement, uniform_placement_middle) from gym.spaces import Box, MultiDiscrete, Discrete #from", "** 2 + (observation_[5] - observation_[1]) ** 2)*5-3 #print(observation_) #rrew=3-edge_punish(observation_[0],observation_[1]) #print(observation_[0],observation_[1]) #Seeker.store_transition(observation[-4:], action_Seeker,", "#pickle_file = open('objH2.pkl', 'wb') #pickle.dump(H2, pickle_file) #pickle_file.close() #main(sk=S1, hd=H4, output='41', speed=1, vlag=1) #main(sk=S1,", "layer2_size=10) else: Seeker=sk Hider=hd a=[] rs=[] rh=[] for ii in range(n_episode): env_viewer.env_reset() sampleaction", "edge_punish(x,y,l=0.2,p=3.53,w=0): xx=0.0 if (np.abs(x-0)<l) | (np.abs(x-p)<l): xx = xx + 1.0 elif (np.abs(y-0)<l)", "run_path from mae_envs.modules.util import (uniform_placement, center_placement, uniform_placement_middle) from gym.spaces import Box, MultiDiscrete, Discrete", "Hider=hd a=[] rs=[] rh=[] for ii in range(n_episode): env_viewer.env_reset() sampleaction = np.array([[5, 5,", "reward_decay=0.99, policy_name=Hpolicy_name # output_graph=True, ) Seeker = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Spolicy_name", "S1, H1 = main(output='1', speed=4) #import pickle #pickle_file = open('objS2.pkl', 'wb') #pickle.dump(S2, pickle_file)", "rew, down, _ = env_viewer.step(action) observation = np.array([obs['observation_self'][0][0], obs['observation_self'][0][1],obs['observation_self'][0][4],obs['observation_self'][0][5],obs['observation_self'][1][0], obs['observation_self'][1][1], obs['observation_self'][1][4],obs['observation_self'][1][5]]) #observation =", "Box, MultiDiscrete, Discrete #from simphas.MRL import mpolicy #import gym #from RL_brain_2 import PolicyGradient", "(np.abs(y-0)<l) | (np.abs(y-p)<l): xx = xx + 1.0 return w*xx*1.0 def matdis(n, obs_x):", "#rrew=3-edge_punish(observation_[0],observation_[1]) #print(observation_[0],observation_[1]) #Seeker.store_transition(observation[-4:], action_Seeker, +rew) Seeker.store_rewards(rew-edge_punish(observation_[4],observation_[5])) Hider.store_rewards(-rew-edge_punish(observation_[0],observation_[1])) #print(-rrew) #Hider.store_transition(observation[4:], action_Hider, rrew) #print(50-edge_punish(observation_[0],observation_[1])) observation", "'.npy', rh) #print(ii,R) return Seeker,Hider if __name__ == '__main__': #S2, H2 = main(output='2',", "1.0 elif (np.abs(y-0)<l) | (np.abs(y-p)<l): xx = xx + 1.0 return w*xx*1.0 def", "+rew) Seeker.store_rewards(rew-edge_punish(observation_[4],observation_[5])) Hider.store_rewards(-rew-edge_punish(observation_[0],observation_[1])) #print(-rrew) #Hider.store_transition(observation[4:], action_Hider, rrew) #print(50-edge_punish(observation_[0],observation_[1])) observation = observation_ print(ii) #print(np.mean(Seeker.reward_memory[0]))", "= np.array([obs['observation_self'][0][0], obs['observation_self'][0][1],obs['observation_self'][0][4],obs['observation_self'][0][5],obs['observation_self'][1][0], obs['observation_self'][1][1], obs['observation_self'][1][4],obs['observation_self'][1][5]]) #observation = np.array([obs['observation_self'][1][0], obs['observation_self'][1][1],obs['observation_self'][1][4],obs['observation_self'][1][5]]) for i in range(episode):", "'cone_angle': 2 * np.pi, #'n_substeps' : 1 }) module = run_path(env_name) make_env =", "gym.spaces import Tuple from mujoco_py import const, MjViewer from mae_envs.viewer.env_viewer import EnvViewer from", "simphas.MRL import mpolicy #import gym #from RL_brain_2 import PolicyGradient from RL_brain_3 import PolicyGradientAgent", "in range(n): for j in range(n): if i != j: dism[i, j] =", "#S2, H2 = main(output='2', speed=2) #S3, H3 = main(output='3', speed=3) #S4, H4 =", "if (np.abs(x-0)<l) | (np.abs(x-p)<l): xx = xx + 1.0 elif (np.abs(y-0)<l) | (np.abs(y-p)<l):", "########## np.save(output+'.npy', a) rhlist.append(rh) rslist.append(rs) #np.save('SGLDS'+output + '.npy', rs) #np.save('RMS' + output +", "main(output='2', speed=2) #S3, H3 = main(output='3', speed=3) #S4, H4 = main(output='4', speed=4) #S1,", "in range(episode): action_Seeker = Seeker.choose_action(observation) action_Hider = Hider.choose_action(observation) #print(action_Seeker) if np.random.rand()>0.95: action_Hider=np.random.randint(9) h1=(action_Hider//3-1)*1+5", "def matmas(n,mas): matm = np.empty([n,n],dtype= bool) for i in range(n): for j in", "* np.pi, #'n_substeps' : 1 }) module = run_path(env_name) make_env = module[\"make_env\"] args_to_pass,", "n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Spolicy_name # output_graph=True, ) ''' if vlag == 0:", "obs_, reward, done, info = env_viewer.step(ac, show=False) observation_ = np.array([obs_['observation_self'][0][0], obs_['observation_self'][0][1],obs_['observation_self'][0][4], obs_['observation_self'][0][5],obs_['observation_self'][1][0], obs_['observation_self'][1][1],", "import extract_matching_arguments from mujoco_worldgen.util.parse_arguments import parse_arguments from runpy import run_path from mae_envs.modules.util import", "= env_viewer.step(action) observation = np.array([obs['observation_self'][0][0], obs['observation_self'][0][1],obs['observation_self'][0][4],obs['observation_self'][0][5],obs['observation_self'][1][0], obs['observation_self'][1][1], obs['observation_self'][1][4],obs['observation_self'][1][5]]) #observation = np.array([obs['observation_self'][1][0], obs['observation_self'][1][1],obs['observation_self'][1][4],obs['observation_self'][1][5]]) for", "#!/usr/bin/env python3 import logging import click import numpy as np from os.path import", "from gym.spaces import Box, MultiDiscrete, Discrete #from simphas.MRL import mpolicy #import gym #from", "- 1) * 1 + 5 #h1,h2=5,5 #print(action) if np.random.rand()>0.95: action_Seeker=np.random.randint(9) s1=(action_Seeker//3-1)*speed+5 s2=(action_Seeker%3-1)*speed+5", "numpy as np from os.path import abspath, dirname, join from gym.spaces import Tuple", "H2 = main(output='2', speed=2) #S3, H3 = main(output='3', speed=3) #S4, H4 = main(output='4',", "gym #from RL_brain_2 import PolicyGradient from RL_brain_3 import PolicyGradientAgent import matplotlib.pyplot as plt", "mujoco_worldgen.util.envs import examine_env, load_env from mujoco_worldgen.util.types import extract_matching_arguments from mujoco_worldgen.util.parse_arguments import parse_arguments from", "rs) #np.save('RMS' + output + '.npy', rh) #print(ii,R) return Seeker,Hider if __name__ ==", "reward_decay=0.9, units=30 # output_graph=True, ) ''' ''' Hider = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01,", "import Tuple from mujoco_py import const, MjViewer from mae_envs.viewer.env_viewer import EnvViewer from mae_envs.wrappers.multi_agent", "''' RL = mpolicy( n_actions=9, n_features=8, #n_features=4, learning_rate=0.01, reward_decay=0.9, units=30 # output_graph=True, )", "np.ones((n,n))*thr) & (matm))[-n_seekers:], axis=0)>0)) kwargs = {} env_name = 'mae_envs/envs/mybase.py' display = True", "if not obs_['mask_aa_obs'][1][0]: # rew= 5.0-( np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5]", "speed=3) #S4, H4 = main(output='4', speed=4) #S1, H1 = main(output='1', speed=1) S1, H1", "ac = {'action_movement': np.array([[h1, h2, 5], [s1, s2, 5]])} #print(ac) obs_, reward, done,", "- observation_[0]) ** 2 + (observation_[5] - observation_[1]) ** 2)*5) rew=1.0/np.sqrt((observation_[4] - observation_[0])", "import click import numpy as np from os.path import abspath, dirname, join from", "vlag == 0: Seeker=PolicyGradientAgent(0.001,[8],n_actions=9,layer1_size=20,layer2_size=10) Hider = PolicyGradientAgent(0.001, [8], n_actions=9, layer1_size=20, layer2_size=10) else: Seeker=sk", "# output_graph=True, ) ''' ''' Hider = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Hpolicy_name", "rew= 5.0-( np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] - observation_[1]) ** 2)*5)", "vlag=1) #main(sk=S1, hd=H3, output='31', speed=1, vlag=1) #main(sk=S1, hd=H2, output='21', speed=1, vlag=1) #test() np.save('SGLDS.npy',", "= extract_matching_arguments(make_env, kwargs) env = make_env(**args_to_pass) env.reset() env_viewer = EnvViewer(env) rhlist=[] rslist=[] def", "mpolicy #import gym #from RL_brain_2 import PolicyGradient from RL_brain_3 import PolicyGradientAgent import matplotlib.pyplot", "xx + 1.0 elif (np.abs(y-0)<l) | (np.abs(y-p)<l): xx = xx + 1.0 return", "= np.sqrt(np.sum((obs_x[i, :2] - obs_x[j, :2])**2)) return dism def matmas(n,mas): matm = np.empty([n,n],dtype=", "n_seekers=1 n_hiders=1 episode=350 n_episode=10000 kwargs.update({ 'n_agents': n_agents, 'n_seekers': n_seekers, 'n_hiders': n_hiders, 'n_boxes':0, 'cone_angle':", "| (np.abs(x-p)<l): xx = xx + 1.0 elif (np.abs(y-0)<l) | (np.abs(y-p)<l): xx =", "episode=350 n_episode=10000 kwargs.update({ 'n_agents': n_agents, 'n_seekers': n_seekers, 'n_hiders': n_hiders, 'n_boxes':0, 'cone_angle': 2 *", "PolicyGradient from RL_brain_3 import PolicyGradientAgent import matplotlib.pyplot as plt def edge_punish(x,y,l=0.2,p=3.53,w=0): xx=0.0 if", "Discrete #from simphas.MRL import mpolicy #import gym #from RL_brain_2 import PolicyGradient from RL_brain_3", "Hider.choose_action(observation) #print(action_Seeker) if np.random.rand()>0.95: action_Hider=np.random.randint(9) h1=(action_Hider//3-1)*1+5 h2 = (action_Hider%3 - 1) * 1", "#main(sk=S1, hd=H3, output='31', speed=1, vlag=1) #main(sk=S1, hd=H2, output='21', speed=1, vlag=1) #test() np.save('SGLDS.npy', rslist)", "np from os.path import abspath, dirname, join from gym.spaces import Tuple from mujoco_py", "else: Seeker=sk Hider=hd a=[] rs=[] rh=[] for ii in range(n_episode): env_viewer.env_reset() sampleaction =", "(np.sum ( ((dism < np.ones((n,n))*thr) & (matm))[-n_seekers:], axis=0)>0)) kwargs = {} env_name =", "else: Seeker.reward_memory=[] Seeker.action_memory = [] Hider.reward_memory = [] Hider.action_memory = [] ########## np.save(output+'.npy',", "make_env = module[\"make_env\"] args_to_pass, args_remaining = extract_matching_arguments(make_env, kwargs) env = make_env(**args_to_pass) env.reset() env_viewer", "open('objH2.pkl', 'wb') #pickle.dump(H2, pickle_file) #pickle_file.close() #main(sk=S1, hd=H4, output='41', speed=1, vlag=1) #main(sk=S1, hd=H3, output='31',", "5 #h1,h2=5,5 #print(action) if np.random.rand()>0.95: action_Seeker=np.random.randint(9) s1=(action_Seeker//3-1)*speed+5 s2=(action_Seeker%3-1)*speed+5 ac = {'action_movement': np.array([[h1, h2,", "5]]) action = {'action_movement': sampleaction} obs, rew, down, _ = env_viewer.step(action) observation =", "#pickle_file = open('objS2.pkl', 'wb') #pickle.dump(S2, pickle_file) #pickle_file.close() #pickle_file = open('objH2.pkl', 'wb') #pickle.dump(H2, pickle_file)", "import matplotlib.pyplot as plt def edge_punish(x,y,l=0.2,p=3.53,w=0): xx=0.0 if (np.abs(x-0)<l) | (np.abs(x-p)<l): xx =", "bool) for i in range(n): for j in range(n): if i > j:", "rslist=[] def main(sk=None,hd=None, output='output',speed=1,vlag=0): ''' RL = mpolicy( n_actions=9, n_features=8, #n_features=4, learning_rate=0.01, reward_decay=0.9,", "output_graph=True, ) ''' if vlag == 0: Seeker=PolicyGradientAgent(0.001,[8],n_actions=9,layer1_size=20,layer2_size=10) Hider = PolicyGradientAgent(0.001, [8], n_actions=9,", "from mujoco_worldgen.util.envs import examine_env, load_env from mujoco_worldgen.util.types import extract_matching_arguments from mujoco_worldgen.util.parse_arguments import parse_arguments", "j] = mas[i,j] elif i < j: matm[i, j] = mas[i, j-1] else:", "= PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Spolicy_name # output_graph=True, ) ''' if vlag", "np.array([obs['observation_self'][1][0], obs['observation_self'][1][1],obs['observation_self'][1][4],obs['observation_self'][1][5]]) for i in range(episode): action_Seeker = Seeker.choose_action(observation) action_Hider = Hider.choose_action(observation) #print(action_Seeker)", "np.sum( (np.sum ( ((dism < np.ones((n,n))*thr) & (matm))[-n_seekers:], axis=0)>0)) kwargs = {} env_name", "((dism < np.ones((n,n))*thr) & (matm))[-n_seekers:], axis=0)>0)) kwargs = {} env_name = 'mae_envs/envs/mybase.py' display", "kwargs) env = make_env(**args_to_pass) env.reset() env_viewer = EnvViewer(env) rhlist=[] rslist=[] def main(sk=None,hd=None, output='output',speed=1,vlag=0):", "reward, done, info = env_viewer.step(ac, show=False) observation_ = np.array([obs_['observation_self'][0][0], obs_['observation_self'][0][1],obs_['observation_self'][0][4], obs_['observation_self'][0][5],obs_['observation_self'][1][0], obs_['observation_self'][1][1], obs_['observation_self'][1][4],", "as np from os.path import abspath, dirname, join from gym.spaces import Tuple from", "import const, MjViewer from mae_envs.viewer.env_viewer import EnvViewer from mae_envs.wrappers.multi_agent import JoinMultiAgentActions from mujoco_worldgen.util.envs", "dism[i, j] = np.sqrt(np.sum((obs_x[i, :2] - obs_x[j, :2])**2)) return dism def matmas(n,mas): matm", "obs['observation_self'][1][1], obs['observation_self'][1][4],obs['observation_self'][1][5]]) #observation = np.array([obs['observation_self'][1][0], obs['observation_self'][1][1],obs['observation_self'][1][4],obs['observation_self'][1][5]]) for i in range(episode): action_Seeker = Seeker.choose_action(observation)", "return Seeker,Hider if __name__ == '__main__': #S2, H2 = main(output='2', speed=2) #S3, H3", "obs['observation_self'][1][1],obs['observation_self'][1][4],obs['observation_self'][1][5]]) for i in range(episode): action_Seeker = Seeker.choose_action(observation) action_Hider = Hider.choose_action(observation) #print(action_Seeker) if", "for i in range(n): for j in range(n): if i != j: dism[i,", "j: dism[i, j] = np.sqrt(np.sum((obs_x[i, :2] - obs_x[j, :2])**2)) return dism def matmas(n,mas):", "else: matm[i, j] = False return matm def game_rew(n,n_seekers, dism, matm, thr=1.0): return", "= [] ########## np.save(output+'.npy', a) rhlist.append(rh) rslist.append(rs) #np.save('SGLDS'+output + '.npy', rs) #np.save('RMS' +", "np.array([obs_['observation_self'][0][0], obs_['observation_self'][0][1],obs_['observation_self'][0][4], obs_['observation_self'][0][5],obs_['observation_self'][1][0], obs_['observation_self'][1][1], obs_['observation_self'][1][4], obs_['observation_self'][1][5]]) # if not obs_['mask_aa_obs'][1][0]: # rew= 5.0-(", "np.zeros((n, n)) for i in range(n): for j in range(n): if i !=", "speed=1, vlag=1) #main(sk=S1, hd=H3, output='31', speed=1, vlag=1) #main(sk=S1, hd=H2, output='21', speed=1, vlag=1) #test()", "learning_rate=0.01, reward_decay=0.99, policy_name=Hpolicy_name # output_graph=True, ) Seeker = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99,", "n_seekers, 'n_hiders': n_hiders, 'n_boxes':0, 'cone_angle': 2 * np.pi, #'n_substeps' : 1 }) module", "obs_['observation_self'][1][1], obs_['observation_self'][1][4], obs_['observation_self'][1][5]]) # if not obs_['mask_aa_obs'][1][0]: # rew= 5.0-( np.sqrt((observation_[4] - observation_[0])", "= observation_ print(ii) #print(np.mean(Seeker.reward_memory[0])) rs.append(np.mean(Seeker.reward_memory)) rh.append(np.mean(Hider.reward_memory)) if ii>(n_episode-201): #a.append(Hider.ep_rs) a.append(Seeker.reward_memory) if vlag ==", "#S1, H1 = main(output='1', speed=1) S1, H1 = main(output='1', speed=4) #import pickle #pickle_file", "= main(output='3', speed=3) #S4, H4 = main(output='4', speed=4) #S1, H1 = main(output='1', speed=1)", "axis=0)>0)) kwargs = {} env_name = 'mae_envs/envs/mybase.py' display = True n_agents= 2 n_seekers=1", "rs=[] rh=[] for ii in range(n_episode): env_viewer.env_reset() sampleaction = np.array([[5, 5, 5], [5,", "''' Hider = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Hpolicy_name # output_graph=True, ) Seeker", "main(output='1', speed=4) #import pickle #pickle_file = open('objS2.pkl', 'wb') #pickle.dump(S2, pickle_file) #pickle_file.close() #pickle_file =", "n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Spolicy_name # output_graph=True, ) ''' if vlag == 0: Seeker=PolicyGradientAgent(0.001,[8],n_actions=9,layer1_size=20,layer2_size=10)", "rew=1.0/np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] - observation_[1]) ** 2)*5-3 #print(observation_) #rrew=3-edge_punish(observation_[0],observation_[1])", "#print(ac) obs_, reward, done, info = env_viewer.step(ac, show=False) observation_ = np.array([obs_['observation_self'][0][0], obs_['observation_self'][0][1],obs_['observation_self'][0][4], obs_['observation_self'][0][5],obs_['observation_self'][1][0],", "- observation_[1]) ** 2)*5-3 #print(observation_) #rrew=3-edge_punish(observation_[0],observation_[1]) #print(observation_[0],observation_[1]) #Seeker.store_transition(observation[-4:], action_Seeker, +rew) Seeker.store_rewards(rew-edge_punish(observation_[4],observation_[5])) Hider.store_rewards(-rew-edge_punish(observation_[0],observation_[1])) #print(-rrew)", "(observation_[5] - observation_[1]) ** 2)*5) rew=1.0/np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] -", "info = env_viewer.step(ac, show=False) observation_ = np.array([obs_['observation_self'][0][0], obs_['observation_self'][0][1],obs_['observation_self'][0][4], obs_['observation_self'][0][5],obs_['observation_self'][1][0], obs_['observation_self'][1][1], obs_['observation_self'][1][4], obs_['observation_self'][1][5]]) #", "Hider.learn() Seeker.learn() else: Seeker.reward_memory=[] Seeker.action_memory = [] Hider.reward_memory = [] Hider.action_memory = []", "#pickle_file.close() #main(sk=S1, hd=H4, output='41', speed=1, vlag=1) #main(sk=S1, hd=H3, output='31', speed=1, vlag=1) #main(sk=S1, hd=H2,", "2)*5) rew=1.0/np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] - observation_[1]) ** 2)*5-3 #print(observation_)", "import EnvViewer from mae_envs.wrappers.multi_agent import JoinMultiAgentActions from mujoco_worldgen.util.envs import examine_env, load_env from mujoco_worldgen.util.types", "down, _ = env_viewer.step(action) observation = np.array([obs['observation_self'][0][0], obs['observation_self'][0][1],obs['observation_self'][0][4],obs['observation_self'][0][5],obs['observation_self'][1][0], obs['observation_self'][1][1], obs['observation_self'][1][4],obs['observation_self'][1][5]]) #observation = np.array([obs['observation_self'][1][0],", "= module[\"make_env\"] args_to_pass, args_remaining = extract_matching_arguments(make_env, kwargs) env = make_env(**args_to_pass) env.reset() env_viewer =", ") ''' if vlag == 0: Seeker=PolicyGradientAgent(0.001,[8],n_actions=9,layer1_size=20,layer2_size=10) Hider = PolicyGradientAgent(0.001, [8], n_actions=9, layer1_size=20,", "obs['observation_self'][1][4],obs['observation_self'][1][5]]) #observation = np.array([obs['observation_self'][1][0], obs['observation_self'][1][1],obs['observation_self'][1][4],obs['observation_self'][1][5]]) for i in range(episode): action_Seeker = Seeker.choose_action(observation) action_Hider", "observation = observation_ print(ii) #print(np.mean(Seeker.reward_memory[0])) rs.append(np.mean(Seeker.reward_memory)) rh.append(np.mean(Hider.reward_memory)) if ii>(n_episode-201): #a.append(Hider.ep_rs) a.append(Seeker.reward_memory) if vlag", "np.random.rand()>0.95: action_Seeker=np.random.randint(9) s1=(action_Seeker//3-1)*speed+5 s2=(action_Seeker%3-1)*speed+5 ac = {'action_movement': np.array([[h1, h2, 5], [s1, s2, 5]])}", "obs_['observation_self'][0][5],obs_['observation_self'][1][0], obs_['observation_self'][1][1], obs_['observation_self'][1][4], obs_['observation_self'][1][5]]) # if not obs_['mask_aa_obs'][1][0]: # rew= 5.0-( np.sqrt((observation_[4] -", "'__main__': #S2, H2 = main(output='2', speed=2) #S3, H3 = main(output='3', speed=3) #S4, H4", "& (matm))[-n_seekers:], axis=0)>0)) kwargs = {} env_name = 'mae_envs/envs/mybase.py' display = True n_agents=", "import mpolicy #import gym #from RL_brain_2 import PolicyGradient from RL_brain_3 import PolicyGradientAgent import", "'wb') #pickle.dump(H2, pickle_file) #pickle_file.close() #main(sk=S1, hd=H4, output='41', speed=1, vlag=1) #main(sk=S1, hd=H3, output='31', speed=1,", "#import gym #from RL_brain_2 import PolicyGradient from RL_brain_3 import PolicyGradientAgent import matplotlib.pyplot as", "import parse_arguments from runpy import run_path from mae_envs.modules.util import (uniform_placement, center_placement, uniform_placement_middle) from", "policy_name=Spolicy_name # output_graph=True, ) ''' if vlag == 0: Seeker=PolicyGradientAgent(0.001,[8],n_actions=9,layer1_size=20,layer2_size=10) Hider = PolicyGradientAgent(0.001,", "not obs_['mask_aa_obs'][1][0]: # rew= 5.0-( np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] -", "for j in range(n): if i > j: matm[i, j] = mas[i,j] elif", "#observation = np.array([obs['observation_self'][1][0], obs['observation_self'][1][1],obs['observation_self'][1][4],obs['observation_self'][1][5]]) for i in range(episode): action_Seeker = Seeker.choose_action(observation) action_Hider =", "2 n_seekers=1 n_hiders=1 episode=350 n_episode=10000 kwargs.update({ 'n_agents': n_agents, 'n_seekers': n_seekers, 'n_hiders': n_hiders, 'n_boxes':0,", "{'action_movement': np.array([[h1, h2, 5], [s1, s2, 5]])} #print(ac) obs_, reward, done, info =", "if i != j: dism[i, j] = np.sqrt(np.sum((obs_x[i, :2] - obs_x[j, :2])**2)) return", "2)*5) #else: # rew= 5.0-( np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] -", "from mae_envs.viewer.env_viewer import EnvViewer from mae_envs.wrappers.multi_agent import JoinMultiAgentActions from mujoco_worldgen.util.envs import examine_env, load_env", "parse_arguments from runpy import run_path from mae_envs.modules.util import (uniform_placement, center_placement, uniform_placement_middle) from gym.spaces", "main(output='1', speed=1) S1, H1 = main(output='1', speed=4) #import pickle #pickle_file = open('objS2.pkl', 'wb')", "= Hider.choose_action(observation) #print(action_Seeker) if np.random.rand()>0.95: action_Hider=np.random.randint(9) h1=(action_Hider//3-1)*1+5 h2 = (action_Hider%3 - 1) *", "= env_viewer.step(ac, show=False) observation_ = np.array([obs_['observation_self'][0][0], obs_['observation_self'][0][1],obs_['observation_self'][0][4], obs_['observation_self'][0][5],obs_['observation_self'][1][0], obs_['observation_self'][1][1], obs_['observation_self'][1][4], obs_['observation_self'][1][5]]) # if", "PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Spolicy_name # output_graph=True, ) ''' if vlag ==", "sampleaction} obs, rew, down, _ = env_viewer.step(action) observation = np.array([obs['observation_self'][0][0], obs['observation_self'][0][1],obs['observation_self'][0][4],obs['observation_self'][0][5],obs['observation_self'][1][0], obs['observation_self'][1][1], obs['observation_self'][1][4],obs['observation_self'][1][5]])", "== '__main__': #S2, H2 = main(output='2', speed=2) #S3, H3 = main(output='3', speed=3) #S4,", "runpy import run_path from mae_envs.modules.util import (uniform_placement, center_placement, uniform_placement_middle) from gym.spaces import Box,", "range(n_episode): env_viewer.env_reset() sampleaction = np.array([[5, 5, 5], [5, 5, 5]]) action = {'action_movement':", "rhlist=[] rslist=[] def main(sk=None,hd=None, output='output',speed=1,vlag=0): ''' RL = mpolicy( n_actions=9, n_features=8, #n_features=4, learning_rate=0.01,", "rh=[] for ii in range(n_episode): env_viewer.env_reset() sampleaction = np.array([[5, 5, 5], [5, 5,", "Hider.action_memory = [] ########## np.save(output+'.npy', a) rhlist.append(rh) rslist.append(rs) #np.save('SGLDS'+output + '.npy', rs) #np.save('RMS'", "= np.array([[5, 5, 5], [5, 5, 5]]) action = {'action_movement': sampleaction} obs, rew,", "abspath, dirname, join from gym.spaces import Tuple from mujoco_py import const, MjViewer from", "in range(n): for j in range(n): if i > j: matm[i, j] =", "2)*5-3 #print(observation_) #rrew=3-edge_punish(observation_[0],observation_[1]) #print(observation_[0],observation_[1]) #Seeker.store_transition(observation[-4:], action_Seeker, +rew) Seeker.store_rewards(rew-edge_punish(observation_[4],observation_[5])) Hider.store_rewards(-rew-edge_punish(observation_[0],observation_[1])) #print(-rrew) #Hider.store_transition(observation[4:], action_Hider, rrew)", "policy_name=Hpolicy_name # output_graph=True, ) Seeker = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Spolicy_name #", "j] = False return matm def game_rew(n,n_seekers, dism, matm, thr=1.0): return np.sum( (np.sum", "j] = mas[i, j-1] else: matm[i, j] = False return matm def game_rew(n,n_seekers,", "Hider = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Hpolicy_name # output_graph=True, ) Seeker =", "= main(output='1', speed=4) #import pickle #pickle_file = open('objS2.pkl', 'wb') #pickle.dump(S2, pickle_file) #pickle_file.close() #pickle_file", "#pickle.dump(H2, pickle_file) #pickle_file.close() #main(sk=S1, hd=H4, output='41', speed=1, vlag=1) #main(sk=S1, hd=H3, output='31', speed=1, vlag=1)", "n_actions=9, layer1_size=20, layer2_size=10) else: Seeker=sk Hider=hd a=[] rs=[] rh=[] for ii in range(n_episode):", "Seeker.choose_action(observation) action_Hider = Hider.choose_action(observation) #print(action_Seeker) if np.random.rand()>0.95: action_Hider=np.random.randint(9) h1=(action_Hider//3-1)*1+5 h2 = (action_Hider%3 -", "= np.empty([n,n],dtype= bool) for i in range(n): for j in range(n): if i", "'.npy', rs) #np.save('RMS' + output + '.npy', rh) #print(ii,R) return Seeker,Hider if __name__", "def game_rew(n,n_seekers, dism, matm, thr=1.0): return np.sum( (np.sum ( ((dism < np.ones((n,n))*thr) &", "if ii>(n_episode-201): #a.append(Hider.ep_rs) a.append(Seeker.reward_memory) if vlag == 0: Hider.learn() Seeker.learn() else: Seeker.reward_memory=[] Seeker.action_memory", "Seeker.store_rewards(rew-edge_punish(observation_[4],observation_[5])) Hider.store_rewards(-rew-edge_punish(observation_[0],observation_[1])) #print(-rrew) #Hider.store_transition(observation[4:], action_Hider, rrew) #print(50-edge_punish(observation_[0],observation_[1])) observation = observation_ print(ii) #print(np.mean(Seeker.reward_memory[0])) rs.append(np.mean(Seeker.reward_memory))", "def edge_punish(x,y,l=0.2,p=3.53,w=0): xx=0.0 if (np.abs(x-0)<l) | (np.abs(x-p)<l): xx = xx + 1.0 elif", "uniform_placement_middle) from gym.spaces import Box, MultiDiscrete, Discrete #from simphas.MRL import mpolicy #import gym", "= xx + 1.0 return w*xx*1.0 def matdis(n, obs_x): dism = np.zeros((n, n))", "+ (observation_[5] - observation_[1]) ** 2)*5) rew=1.0/np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5]", "matm = np.empty([n,n],dtype= bool) for i in range(n): for j in range(n): if", "gym.spaces import Box, MultiDiscrete, Discrete #from simphas.MRL import mpolicy #import gym #from RL_brain_2", "#'n_substeps' : 1 }) module = run_path(env_name) make_env = module[\"make_env\"] args_to_pass, args_remaining =", "np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] - observation_[1]) ** 2)*5) #else: #", "Seeker,Hider if __name__ == '__main__': #S2, H2 = main(output='2', speed=2) #S3, H3 =", "#print(50-edge_punish(observation_[0],observation_[1])) observation = observation_ print(ii) #print(np.mean(Seeker.reward_memory[0])) rs.append(np.mean(Seeker.reward_memory)) rh.append(np.mean(Hider.reward_memory)) if ii>(n_episode-201): #a.append(Hider.ep_rs) a.append(Seeker.reward_memory) if", "observation_[0]) ** 2 + (observation_[5] - observation_[1]) ** 2)*5) #else: # rew= 5.0-(", "from mae_envs.wrappers.multi_agent import JoinMultiAgentActions from mujoco_worldgen.util.envs import examine_env, load_env from mujoco_worldgen.util.types import extract_matching_arguments", "Seeker.reward_memory=[] Seeker.action_memory = [] Hider.reward_memory = [] Hider.action_memory = [] ########## np.save(output+'.npy', a)", "from RL_brain_3 import PolicyGradientAgent import matplotlib.pyplot as plt def edge_punish(x,y,l=0.2,p=3.53,w=0): xx=0.0 if (np.abs(x-0)<l)", "[s1, s2, 5]])} #print(ac) obs_, reward, done, info = env_viewer.step(ac, show=False) observation_ =", "+ output + '.npy', rh) #print(ii,R) return Seeker,Hider if __name__ == '__main__': #S2,", "j-1] else: matm[i, j] = False return matm def game_rew(n,n_seekers, dism, matm, thr=1.0):", "in range(n_episode): env_viewer.env_reset() sampleaction = np.array([[5, 5, 5], [5, 5, 5]]) action =", "<filename>simphas/play.py #!/usr/bin/env python3 import logging import click import numpy as np from os.path", "for i in range(episode): action_Seeker = Seeker.choose_action(observation) action_Hider = Hider.choose_action(observation) #print(action_Seeker) if np.random.rand()>0.95:", "'n_agents': n_agents, 'n_seekers': n_seekers, 'n_hiders': n_hiders, 'n_boxes':0, 'cone_angle': 2 * np.pi, #'n_substeps' :", "+ 5 #h1,h2=5,5 #print(action) if np.random.rand()>0.95: action_Seeker=np.random.randint(9) s1=(action_Seeker//3-1)*speed+5 s2=(action_Seeker%3-1)*speed+5 ac = {'action_movement': np.array([[h1,", "examine_env, load_env from mujoco_worldgen.util.types import extract_matching_arguments from mujoco_worldgen.util.parse_arguments import parse_arguments from runpy import", "Seeker.learn() else: Seeker.reward_memory=[] Seeker.action_memory = [] Hider.reward_memory = [] Hider.action_memory = [] ##########", "speed=2) #S3, H3 = main(output='3', speed=3) #S4, H4 = main(output='4', speed=4) #S1, H1", "a.append(Seeker.reward_memory) if vlag == 0: Hider.learn() Seeker.learn() else: Seeker.reward_memory=[] Seeker.action_memory = [] Hider.reward_memory", "5]])} #print(ac) obs_, reward, done, info = env_viewer.step(ac, show=False) observation_ = np.array([obs_['observation_self'][0][0], obs_['observation_self'][0][1],obs_['observation_self'][0][4],", "#print(observation_[0],observation_[1]) #Seeker.store_transition(observation[-4:], action_Seeker, +rew) Seeker.store_rewards(rew-edge_punish(observation_[4],observation_[5])) Hider.store_rewards(-rew-edge_punish(observation_[0],observation_[1])) #print(-rrew) #Hider.store_transition(observation[4:], action_Hider, rrew) #print(50-edge_punish(observation_[0],observation_[1])) observation =", "dirname, join from gym.spaces import Tuple from mujoco_py import const, MjViewer from mae_envs.viewer.env_viewer", ":2])**2)) return dism def matmas(n,mas): matm = np.empty([n,n],dtype= bool) for i in range(n):", "kwargs = {} env_name = 'mae_envs/envs/mybase.py' display = True n_agents= 2 n_seekers=1 n_hiders=1", "- observation_[1]) ** 2)*5) rew=1.0/np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] - observation_[1])", "obs, rew, down, _ = env_viewer.step(action) observation = np.array([obs['observation_self'][0][0], obs['observation_self'][0][1],obs['observation_self'][0][4],obs['observation_self'][0][5],obs['observation_self'][1][0], obs['observation_self'][1][1], obs['observation_self'][1][4],obs['observation_self'][1][5]]) #observation", "hd=H4, output='41', speed=1, vlag=1) #main(sk=S1, hd=H3, output='31', speed=1, vlag=1) #main(sk=S1, hd=H2, output='21', speed=1,", "#n_features=4, learning_rate=0.01, reward_decay=0.9, units=30 # output_graph=True, ) ''' ''' Hider = PolicyGradient( n_actions=9,", "== 0: Seeker=PolicyGradientAgent(0.001,[8],n_actions=9,layer1_size=20,layer2_size=10) Hider = PolicyGradientAgent(0.001, [8], n_actions=9, layer1_size=20, layer2_size=10) else: Seeker=sk Hider=hd", "for j in range(n): if i != j: dism[i, j] = np.sqrt(np.sum((obs_x[i, :2]", "ii in range(n_episode): env_viewer.env_reset() sampleaction = np.array([[5, 5, 5], [5, 5, 5]]) action", "MultiDiscrete, Discrete #from simphas.MRL import mpolicy #import gym #from RL_brain_2 import PolicyGradient from", "5], [5, 5, 5]]) action = {'action_movement': sampleaction} obs, rew, down, _ =", "n_agents, 'n_seekers': n_seekers, 'n_hiders': n_hiders, 'n_boxes':0, 'cone_angle': 2 * np.pi, #'n_substeps' : 1", "range(n): if i != j: dism[i, j] = np.sqrt(np.sum((obs_x[i, :2] - obs_x[j, :2])**2))", "1.0 return w*xx*1.0 def matdis(n, obs_x): dism = np.zeros((n, n)) for i in", "'mae_envs/envs/mybase.py' display = True n_agents= 2 n_seekers=1 n_hiders=1 episode=350 n_episode=10000 kwargs.update({ 'n_agents': n_agents,", "#else: # rew= 5.0-( np.sqrt((observation_[4] - observation_[0]) ** 2 + (observation_[5] - observation_[1])", "n_episode=10000 kwargs.update({ 'n_agents': n_agents, 'n_seekers': n_seekers, 'n_hiders': n_hiders, 'n_boxes':0, 'cone_angle': 2 * np.pi,", ":2] - obs_x[j, :2])**2)) return dism def matmas(n,mas): matm = np.empty([n,n],dtype= bool) for", "ii>(n_episode-201): #a.append(Hider.ep_rs) a.append(Seeker.reward_memory) if vlag == 0: Hider.learn() Seeker.learn() else: Seeker.reward_memory=[] Seeker.action_memory =", "env_viewer.env_reset() sampleaction = np.array([[5, 5, 5], [5, 5, 5]]) action = {'action_movement': sampleaction}", "= {'action_movement': np.array([[h1, h2, 5], [s1, s2, 5]])} #print(ac) obs_, reward, done, info", "Seeker = PolicyGradient( n_actions=9, n_features=4, learning_rate=0.01, reward_decay=0.99, policy_name=Spolicy_name # output_graph=True, ) ''' if", "from mujoco_worldgen.util.parse_arguments import parse_arguments from runpy import run_path from mae_envs.modules.util import (uniform_placement, center_placement,", "#np.save('SGLDS'+output + '.npy', rs) #np.save('RMS' + output + '.npy', rh) #print(ii,R) return Seeker,Hider", "args_to_pass, args_remaining = extract_matching_arguments(make_env, kwargs) env = make_env(**args_to_pass) env.reset() env_viewer = EnvViewer(env) rhlist=[]", "mas[i, j-1] else: matm[i, j] = False return matm def game_rew(n,n_seekers, dism, matm,", "#from simphas.MRL import mpolicy #import gym #from RL_brain_2 import PolicyGradient from RL_brain_3 import", "action = {'action_movement': sampleaction} obs, rew, down, _ = env_viewer.step(action) observation = np.array([obs['observation_self'][0][0]," ]
[ "tablespoons freshly squeezed lemon juice\", \"1 tablespoon soy sauce or tamari\", \"2 teaspoons", "2 teaspoons Cognac or brandy\", \"1 teaspoon brown sugar\", \"1/8 teaspoon cayenne pepper\",", "salt, pepper, and additional cognac, soy sauce, or lemon juice, if it needs", "<NAME> Lentils double in volume when cooked, so 1 cup (160g) of dried", "mushroom mixture and process until completely smooth. Taste, and add salt, pepper, and", "8 minutes. Remove from heat.\\nIn a food processor, combine the cooked lentils, nuts,", "Scrape in the cooked mushroom mixture and process until completely smooth. Taste, and", "to 6 minutes. Add the mushrooms and cook, stirring occasionally, until they’re soft", "additional cognac, soy sauce, or lemon juice, if it needs balancing.\\nScrape the pâté", "if it needs balancing.\\nScrape the pâté into a small serving bowl and refrigerate", "\"2 teaspoons fresh thyme (minced)\", \"2 tablespoons fresh sage or flat leaf parsley\",", "sugar, and cayenne. Scrape in the cooked mushroom mixture and process until completely", "of 1/4 cup (60ml) of olive oil. The cognac or brandy is optional,", "lemon juice, if it needs balancing.\\nScrape the pâté into a small serving bowl", "cook, stirring frequently, until the onions become translucent, 5 to 6 minutes. Add", "Clean, Très Chic by <NAME> Lentils double in volume when cooked, so 1", "combine the cooked lentils, nuts, lemon juice, soy sauce, rosemary, thyme, sage or", "garlic, and cook, stirring frequently, until the onions become translucent, 5 to 6", "correct amount. They usually take about 20 to 30 minutes to cook until", "teaspoons minced fresh rosemary\", \"2 teaspoons fresh thyme (minced)\", \"2 tablespoons fresh sage", "minced)\", \"2 cups (400g) cooked green lentils\", \"1 cup (140g) toasted walnuts or", "mushrooms clean. Slice off a bit of the stem end (the funky parts)", "a skillet or wide saucepan. Add the onions and garlic, and cook, stirring", "DavidLebovitz def test_host(self): self.assertEqual(\"davidlebovitz.com\", self.harvester_class.host()) def test_author(self): self.assertEqual(\"David\", self.harvester_class.author()) def test_title(self): self.assertEqual(\"Faux Gras\",", "from Très Green, Très Clean, Très Chic by <NAME> Lentils double in volume", "sugar\", \"1/8 teaspoon cayenne pepper\", \"salt and freshly ground black pepper\", ], self.harvester_class.ingredients(),", "pecans\", \"2 tablespoons freshly squeezed lemon juice\", \"1 tablespoon soy sauce or tamari\",", "walnuts or pecans\", \"2 tablespoons freshly squeezed lemon juice\", \"1 tablespoon soy sauce", "medium-sized (100g, about 1 cup) button mushrooms\", \"2 tablespoons olive oil\", \"2 tablespoons", "the pâté into a small serving bowl and refrigerate for a few hours,", "\"https://www.davidlebovitz.com/wp-content/uploads/2015/06/Faux-Gras-Lentil-Pate-8.jpg\", self.harvester_class.image(), ) def test_ingredients(self): self.assertEqual( [ \"12 medium-sized (100g, about 1 cup)", "to cook until soft, but check the directions on the package for specific", "def test_description(self): self.assertEqual( \"\"\"Adapted from Très Green, Très Clean, Très Chic by <NAME>", "serving bowl and refrigerate for a few hours, until firm.\"\"\", self.harvester_class.instructions(), ) def", "juice, if it needs balancing.\\nScrape the pâté into a small serving bowl and", "oil. The cognac or brandy is optional, but it does give the faux", "them. Heat the olive oil and butter in a skillet or wide saucepan.", "minced fresh rosemary\", \"2 teaspoons fresh thyme (minced)\", \"2 tablespoons fresh sage or", "and cook, stirring frequently, until the onions become translucent, 5 to 6 minutes.", "the cooked lentils, nuts, lemon juice, soy sauce, rosemary, thyme, sage or parsley,", "and process until completely smooth. Taste, and add salt, pepper, and additional cognac,", "scraper_class = DavidLebovitz def test_host(self): self.assertEqual(\"davidlebovitz.com\", self.harvester_class.host()) def test_author(self): self.assertEqual(\"David\", self.harvester_class.author()) def test_title(self):", "heat.\\nIn a food processor, combine the cooked lentils, nuts, lemon juice, soy sauce,", "\"2 teaspoons minced fresh rosemary\", \"2 teaspoons fresh thyme (minced)\", \"2 tablespoons fresh", "\"optional: 2 teaspoons Cognac or brandy\", \"1 teaspoon brown sugar\", \"1/8 teaspoon cayenne", "from heat.\\nIn a food processor, combine the cooked lentils, nuts, lemon juice, soy", "in the cooked mushroom mixture and process until completely smooth. Taste, and add", "or lemon juice, if it needs balancing.\\nScrape the pâté into a small serving", "the mushrooms clean. Slice off a bit of the stem end (the funky", "For a vegan version, replace the butter with the same quantity of olive", "teaspoon cayenne pepper\", \"salt and freshly ground black pepper\", ], self.harvester_class.ingredients(), ) def", "of dried lentils will yield close to the correct amount. They usually take", "mushrooms and cook, stirring occasionally, until they’re soft and cooked through, another 5", "hours, until firm.\"\"\", self.harvester_class.instructions(), ) def test_description(self): self.assertEqual( \"\"\"Adapted from Très Green, Très", "black pepper\", ], self.harvester_class.ingredients(), ) def test_instructions(self): self.assertEqual( \"\"\"Wipe the mushrooms clean. Slice", "the olive oil and butter in a skillet or wide saucepan. Add the", "it needs balancing.\\nScrape the pâté into a small serving bowl and refrigerate for", "it does give the faux gras a little je ne sais quoi.\"\"\", self.harvester_class.description(),", "tablespoon soy sauce or tamari\", \"2 teaspoons minced fresh rosemary\", \"2 teaspoons fresh", "the onions and garlic, and cook, stirring frequently, until the onions become translucent,", "cayenne pepper\", \"salt and freshly ground black pepper\", ], self.harvester_class.ingredients(), ) def test_instructions(self):", "oil\", \"2 tablespoons butter (salted or unsalted)\", \"1 small onion (peeled and diced)\",", "oil and butter in a skillet or wide saucepan. Add the onions and", "funky parts) and slice them. Heat the olive oil and butter in a", "and slice them. Heat the olive oil and butter in a skillet or", "sage or flat leaf parsley\", \"optional: 2 teaspoons Cognac or brandy\", \"1 teaspoon", "(minced)\", \"2 tablespoons fresh sage or flat leaf parsley\", \"optional: 2 teaspoons Cognac", "Add the mushrooms and cook, stirring occasionally, until they’re soft and cooked through,", "test_instructions(self): self.assertEqual( \"\"\"Wipe the mushrooms clean. Slice off a bit of the stem", "self.harvester_class.yields()) def test_image(self): self.assertEqual( \"https://www.davidlebovitz.com/wp-content/uploads/2015/06/Faux-Gras-Lentil-Pate-8.jpg\", self.harvester_class.image(), ) def test_ingredients(self): self.assertEqual( [ \"12 medium-sized", "through, another 5 to 8 minutes. Remove from heat.\\nIn a food processor, combine", "become translucent, 5 to 6 minutes. Add the mushrooms and cook, stirring occasionally,", "sauce, rosemary, thyme, sage or parsley, Cognac (if using), brown sugar, and cayenne.", "a small serving bowl and refrigerate for a few hours, until firm.\"\"\", self.harvester_class.instructions(),", "lemon juice, soy sauce, rosemary, thyme, sage or parsley, Cognac (if using), brown", "Heat the olive oil and butter in a skillet or wide saucepan. Add", "the package for specific guidelines. If avoiding gluten, use tamari instead of soy", "and garlic, and cook, stirring frequently, until the onions become translucent, 5 to", "a food processor, combine the cooked lentils, nuts, lemon juice, soy sauce, rosemary,", "diced)\", \"2 cloves garlic (peeled and minced)\", \"2 cups (400g) cooked green lentils\",", "a bit of the stem end (the funky parts) and slice them. Heat", "mushrooms\", \"2 tablespoons olive oil\", \"2 tablespoons butter (salted or unsalted)\", \"1 small", "does give the faux gras a little je ne sais quoi.\"\"\", self.harvester_class.description(), )", "will yield close to the correct amount. They usually take about 20 to", "The cognac or brandy is optional, but it does give the faux gras", "refrigerate for a few hours, until firm.\"\"\", self.harvester_class.instructions(), ) def test_description(self): self.assertEqual( \"\"\"Adapted", "of the stem end (the funky parts) and slice them. Heat the olive", "and diced)\", \"2 cloves garlic (peeled and minced)\", \"2 cups (400g) cooked green", "def test_title(self): self.assertEqual(\"Faux Gras\", self.harvester_class.title()) def test_total_time(self): self.assertEqual(None, self.harvester_class.total_time()) def test_yields(self): self.assertEqual(\"0 servings\",", "test_title(self): self.assertEqual(\"Faux Gras\", self.harvester_class.title()) def test_total_time(self): self.assertEqual(None, self.harvester_class.total_time()) def test_yields(self): self.assertEqual(\"0 servings\", self.harvester_class.yields())", "Très Clean, Très Chic by <NAME> Lentils double in volume when cooked, so", "(60ml) of olive oil. The cognac or brandy is optional, but it does", "the correct amount. They usually take about 20 to 30 minutes to cook", "tamari instead of soy sauce. For a vegan version, replace the butter with", "until soft, but check the directions on the package for specific guidelines. If", "Lentils double in volume when cooked, so 1 cup (160g) of dried lentils", "self.assertEqual( \"\"\"Wipe the mushrooms clean. Slice off a bit of the stem end", "the butter with the same quantity of olive oil, for a total of", "def test_instructions(self): self.assertEqual( \"\"\"Wipe the mushrooms clean. Slice off a bit of the", "teaspoons fresh thyme (minced)\", \"2 tablespoons fresh sage or flat leaf parsley\", \"optional:", "version, replace the butter with the same quantity of olive oil, for a", "and minced)\", \"2 cups (400g) cooked green lentils\", \"1 cup (140g) toasted walnuts", "small serving bowl and refrigerate for a few hours, until firm.\"\"\", self.harvester_class.instructions(), )", "tablespoons olive oil\", \"2 tablespoons butter (salted or unsalted)\", \"1 small onion (peeled", "or unsalted)\", \"1 small onion (peeled and diced)\", \"2 cloves garlic (peeled and", "few hours, until firm.\"\"\", self.harvester_class.instructions(), ) def test_description(self): self.assertEqual( \"\"\"Adapted from Très Green,", "self.assertEqual( \"\"\"Adapted from Très Green, Très Clean, Très Chic by <NAME> Lentils double", "import DavidLebovitz from tests import ScraperTest class TestDavidLebovivtzScraper(ScraperTest): scraper_class = DavidLebovitz def test_host(self):", "\"2 tablespoons freshly squeezed lemon juice\", \"1 tablespoon soy sauce or tamari\", \"2", "Taste, and add salt, pepper, and additional cognac, soy sauce, or lemon juice,", "from tests import ScraperTest class TestDavidLebovivtzScraper(ScraperTest): scraper_class = DavidLebovitz def test_host(self): self.assertEqual(\"davidlebovitz.com\", self.harvester_class.host())", "test_description(self): self.assertEqual( \"\"\"Adapted from Très Green, Très Clean, Très Chic by <NAME> Lentils", "completely smooth. Taste, and add salt, pepper, and additional cognac, soy sauce, or", "translucent, 5 to 6 minutes. Add the mushrooms and cook, stirring occasionally, until", "test_total_time(self): self.assertEqual(None, self.harvester_class.total_time()) def test_yields(self): self.assertEqual(\"0 servings\", self.harvester_class.yields()) def test_image(self): self.assertEqual( \"https://www.davidlebovitz.com/wp-content/uploads/2015/06/Faux-Gras-Lentil-Pate-8.jpg\", self.harvester_class.image(),", "Green, Très Clean, Très Chic by <NAME> Lentils double in volume when cooked,", "(peeled and minced)\", \"2 cups (400g) cooked green lentils\", \"1 cup (140g) toasted", "or wide saucepan. Add the onions and garlic, and cook, stirring frequently, until", "\"1 small onion (peeled and diced)\", \"2 cloves garlic (peeled and minced)\", \"2", "balancing.\\nScrape the pâté into a small serving bowl and refrigerate for a few", "same quantity of olive oil, for a total of 1/4 cup (60ml) of", "Add the onions and garlic, and cook, stirring frequently, until the onions become", "a total of 1/4 cup (60ml) of olive oil. The cognac or brandy", "into a small serving bowl and refrigerate for a few hours, until firm.\"\"\",", "clean. Slice off a bit of the stem end (the funky parts) and", "pepper, and additional cognac, soy sauce, or lemon juice, if it needs balancing.\\nScrape", "ScraperTest class TestDavidLebovivtzScraper(ScraperTest): scraper_class = DavidLebovitz def test_host(self): self.assertEqual(\"davidlebovitz.com\", self.harvester_class.host()) def test_author(self): self.assertEqual(\"David\",", "self.assertEqual( \"https://www.davidlebovitz.com/wp-content/uploads/2015/06/Faux-Gras-Lentil-Pate-8.jpg\", self.harvester_class.image(), ) def test_ingredients(self): self.assertEqual( [ \"12 medium-sized (100g, about 1", "If avoiding gluten, use tamari instead of soy sauce. For a vegan version,", "wide saucepan. Add the onions and garlic, and cook, stirring frequently, until the", "total of 1/4 cup (60ml) of olive oil. The cognac or brandy is", "skillet or wide saucepan. Add the onions and garlic, and cook, stirring frequently,", "bowl and refrigerate for a few hours, until firm.\"\"\", self.harvester_class.instructions(), ) def test_description(self):", "directions on the package for specific guidelines. If avoiding gluten, use tamari instead", "and additional cognac, soy sauce, or lemon juice, if it needs balancing.\\nScrape the", "soy sauce, or lemon juice, if it needs balancing.\\nScrape the pâté into a", "minutes to cook until soft, but check the directions on the package for", "cups (400g) cooked green lentils\", \"1 cup (140g) toasted walnuts or pecans\", \"2", "onions and garlic, and cook, stirring frequently, until the onions become translucent, 5", "olive oil. The cognac or brandy is optional, but it does give the", "DavidLebovitz from tests import ScraperTest class TestDavidLebovivtzScraper(ScraperTest): scraper_class = DavidLebovitz def test_host(self): self.assertEqual(\"davidlebovitz.com\",", "\"2 tablespoons fresh sage or flat leaf parsley\", \"optional: 2 teaspoons Cognac or", "a few hours, until firm.\"\"\", self.harvester_class.instructions(), ) def test_description(self): self.assertEqual( \"\"\"Adapted from Très", "parts) and slice them. Heat the olive oil and butter in a skillet", "flat leaf parsley\", \"optional: 2 teaspoons Cognac or brandy\", \"1 teaspoon brown sugar\",", "sauce, or lemon juice, if it needs balancing.\\nScrape the pâté into a small", "cognac or brandy is optional, but it does give the faux gras a", "\"2 cloves garlic (peeled and minced)\", \"2 cups (400g) cooked green lentils\", \"1", "cooked, so 1 cup (160g) of dried lentils will yield close to the", "sage or parsley, Cognac (if using), brown sugar, and cayenne. Scrape in the", "brandy is optional, but it does give the faux gras a little je", "button mushrooms\", \"2 tablespoons olive oil\", \"2 tablespoons butter (salted or unsalted)\", \"1", "green lentils\", \"1 cup (140g) toasted walnuts or pecans\", \"2 tablespoons freshly squeezed", "6 minutes. Add the mushrooms and cook, stirring occasionally, until they’re soft and", "30 minutes to cook until soft, but check the directions on the package", "smooth. Taste, and add salt, pepper, and additional cognac, soy sauce, or lemon", "for a few hours, until firm.\"\"\", self.harvester_class.instructions(), ) def test_description(self): self.assertEqual( \"\"\"Adapted from", "lentils will yield close to the correct amount. They usually take about 20", "test_author(self): self.assertEqual(\"David\", self.harvester_class.author()) def test_title(self): self.assertEqual(\"Faux Gras\", self.harvester_class.title()) def test_total_time(self): self.assertEqual(None, self.harvester_class.total_time()) def", "and butter in a skillet or wide saucepan. Add the onions and garlic,", "cognac, soy sauce, or lemon juice, if it needs balancing.\\nScrape the pâté into", "\"2 cups (400g) cooked green lentils\", \"1 cup (140g) toasted walnuts or pecans\",", "nuts, lemon juice, soy sauce, rosemary, thyme, sage or parsley, Cognac (if using),", "Très Green, Très Clean, Très Chic by <NAME> Lentils double in volume when", "(100g, about 1 cup) button mushrooms\", \"2 tablespoons olive oil\", \"2 tablespoons butter", "and cooked through, another 5 to 8 minutes. Remove from heat.\\nIn a food", "test_host(self): self.assertEqual(\"davidlebovitz.com\", self.harvester_class.host()) def test_author(self): self.assertEqual(\"David\", self.harvester_class.author()) def test_title(self): self.assertEqual(\"Faux Gras\", self.harvester_class.title()) def", "sauce or tamari\", \"2 teaspoons minced fresh rosemary\", \"2 teaspoons fresh thyme (minced)\",", "They usually take about 20 to 30 minutes to cook until soft, but", "process until completely smooth. Taste, and add salt, pepper, and additional cognac, soy", "\"\"\"Adapted from Très Green, Très Clean, Très Chic by <NAME> Lentils double in", "test_ingredients(self): self.assertEqual( [ \"12 medium-sized (100g, about 1 cup) button mushrooms\", \"2 tablespoons", "replace the butter with the same quantity of olive oil, for a total", "onions become translucent, 5 to 6 minutes. Add the mushrooms and cook, stirring", "1 cup) button mushrooms\", \"2 tablespoons olive oil\", \"2 tablespoons butter (salted or", "cook until soft, but check the directions on the package for specific guidelines.", "parsley\", \"optional: 2 teaspoons Cognac or brandy\", \"1 teaspoon brown sugar\", \"1/8 teaspoon", "another 5 to 8 minutes. Remove from heat.\\nIn a food processor, combine the", "lentils\", \"1 cup (140g) toasted walnuts or pecans\", \"2 tablespoons freshly squeezed lemon", "teaspoons Cognac or brandy\", \"1 teaspoon brown sugar\", \"1/8 teaspoon cayenne pepper\", \"salt", "squeezed lemon juice\", \"1 tablespoon soy sauce or tamari\", \"2 teaspoons minced fresh", "5 to 8 minutes. Remove from heat.\\nIn a food processor, combine the cooked", "rosemary, thyme, sage or parsley, Cognac (if using), brown sugar, and cayenne. Scrape", "pepper\", ], self.harvester_class.ingredients(), ) def test_instructions(self): self.assertEqual( \"\"\"Wipe the mushrooms clean. Slice off", "the onions become translucent, 5 to 6 minutes. Add the mushrooms and cook,", "soy sauce, rosemary, thyme, sage or parsley, Cognac (if using), brown sugar, and", "unsalted)\", \"1 small onion (peeled and diced)\", \"2 cloves garlic (peeled and minced)\",", "<gh_stars>0 from recipe_scrapers.davidlebovitz import DavidLebovitz from tests import ScraperTest class TestDavidLebovivtzScraper(ScraperTest): scraper_class =", "minutes. Remove from heat.\\nIn a food processor, combine the cooked lentils, nuts, lemon", "soy sauce or tamari\", \"2 teaspoons minced fresh rosemary\", \"2 teaspoons fresh thyme", "the cooked mushroom mixture and process until completely smooth. Taste, and add salt,", "olive oil\", \"2 tablespoons butter (salted or unsalted)\", \"1 small onion (peeled and", "specific guidelines. If avoiding gluten, use tamari instead of soy sauce. For a", "soft, but check the directions on the package for specific guidelines. If avoiding", ") def test_ingredients(self): self.assertEqual( [ \"12 medium-sized (100g, about 1 cup) button mushrooms\",", "tablespoons butter (salted or unsalted)\", \"1 small onion (peeled and diced)\", \"2 cloves", "(salted or unsalted)\", \"1 small onion (peeled and diced)\", \"2 cloves garlic (peeled", "TestDavidLebovivtzScraper(ScraperTest): scraper_class = DavidLebovitz def test_host(self): self.assertEqual(\"davidlebovitz.com\", self.harvester_class.host()) def test_author(self): self.assertEqual(\"David\", self.harvester_class.author()) def", "butter (salted or unsalted)\", \"1 small onion (peeled and diced)\", \"2 cloves garlic", "olive oil, for a total of 1/4 cup (60ml) of olive oil. The", "in a skillet or wide saucepan. Add the onions and garlic, and cook,", "amount. They usually take about 20 to 30 minutes to cook until soft,", "\"\"\"Wipe the mushrooms clean. Slice off a bit of the stem end (the", "tests import ScraperTest class TestDavidLebovivtzScraper(ScraperTest): scraper_class = DavidLebovitz def test_host(self): self.assertEqual(\"davidlebovitz.com\", self.harvester_class.host()) def", "leaf parsley\", \"optional: 2 teaspoons Cognac or brandy\", \"1 teaspoon brown sugar\", \"1/8", "to 8 minutes. Remove from heat.\\nIn a food processor, combine the cooked lentils,", "firm.\"\"\", self.harvester_class.instructions(), ) def test_description(self): self.assertEqual( \"\"\"Adapted from Très Green, Très Clean, Très", "sauce. For a vegan version, replace the butter with the same quantity of", "freshly ground black pepper\", ], self.harvester_class.ingredients(), ) def test_instructions(self): self.assertEqual( \"\"\"Wipe the mushrooms", "(peeled and diced)\", \"2 cloves garlic (peeled and minced)\", \"2 cups (400g) cooked", "pepper\", \"salt and freshly ground black pepper\", ], self.harvester_class.ingredients(), ) def test_instructions(self): self.assertEqual(", "double in volume when cooked, so 1 cup (160g) of dried lentils will", "(if using), brown sugar, and cayenne. Scrape in the cooked mushroom mixture and", "cook, stirring occasionally, until they’re soft and cooked through, another 5 to 8", "gluten, use tamari instead of soy sauce. For a vegan version, replace the", "cloves garlic (peeled and minced)\", \"2 cups (400g) cooked green lentils\", \"1 cup", "self.assertEqual(None, self.harvester_class.total_time()) def test_yields(self): self.assertEqual(\"0 servings\", self.harvester_class.yields()) def test_image(self): self.assertEqual( \"https://www.davidlebovitz.com/wp-content/uploads/2015/06/Faux-Gras-Lentil-Pate-8.jpg\", self.harvester_class.image(), )", "1 cup (160g) of dried lentils will yield close to the correct amount.", "def test_author(self): self.assertEqual(\"David\", self.harvester_class.author()) def test_title(self): self.assertEqual(\"Faux Gras\", self.harvester_class.title()) def test_total_time(self): self.assertEqual(None, self.harvester_class.total_time())", "ground black pepper\", ], self.harvester_class.ingredients(), ) def test_instructions(self): self.assertEqual( \"\"\"Wipe the mushrooms clean.", "brandy\", \"1 teaspoon brown sugar\", \"1/8 teaspoon cayenne pepper\", \"salt and freshly ground", "cooked lentils, nuts, lemon juice, soy sauce, rosemary, thyme, sage or parsley, Cognac", "cup (140g) toasted walnuts or pecans\", \"2 tablespoons freshly squeezed lemon juice\", \"1", "off a bit of the stem end (the funky parts) and slice them.", "add salt, pepper, and additional cognac, soy sauce, or lemon juice, if it", "juice, soy sauce, rosemary, thyme, sage or parsley, Cognac (if using), brown sugar,", ") def test_description(self): self.assertEqual( \"\"\"Adapted from Très Green, Très Clean, Très Chic by", "def test_total_time(self): self.assertEqual(None, self.harvester_class.total_time()) def test_yields(self): self.assertEqual(\"0 servings\", self.harvester_class.yields()) def test_image(self): self.assertEqual( \"https://www.davidlebovitz.com/wp-content/uploads/2015/06/Faux-Gras-Lentil-Pate-8.jpg\",", "to 30 minutes to cook until soft, but check the directions on the", "\"12 medium-sized (100g, about 1 cup) button mushrooms\", \"2 tablespoons olive oil\", \"2", "cayenne. Scrape in the cooked mushroom mixture and process until completely smooth. Taste,", "for specific guidelines. If avoiding gluten, use tamari instead of soy sauce. For", "by <NAME> Lentils double in volume when cooked, so 1 cup (160g) of", "brown sugar\", \"1/8 teaspoon cayenne pepper\", \"salt and freshly ground black pepper\", ],", "self.assertEqual(\"0 servings\", self.harvester_class.yields()) def test_image(self): self.assertEqual( \"https://www.davidlebovitz.com/wp-content/uploads/2015/06/Faux-Gras-Lentil-Pate-8.jpg\", self.harvester_class.image(), ) def test_ingredients(self): self.assertEqual( [", "Remove from heat.\\nIn a food processor, combine the cooked lentils, nuts, lemon juice,", "but check the directions on the package for specific guidelines. If avoiding gluten,", ") def test_instructions(self): self.assertEqual( \"\"\"Wipe the mushrooms clean. Slice off a bit of", "optional, but it does give the faux gras a little je ne sais", "\"2 tablespoons olive oil\", \"2 tablespoons butter (salted or unsalted)\", \"1 small onion", "thyme (minced)\", \"2 tablespoons fresh sage or flat leaf parsley\", \"optional: 2 teaspoons", "servings\", self.harvester_class.yields()) def test_image(self): self.assertEqual( \"https://www.davidlebovitz.com/wp-content/uploads/2015/06/Faux-Gras-Lentil-Pate-8.jpg\", self.harvester_class.image(), ) def test_ingredients(self): self.assertEqual( [ \"12", "take about 20 to 30 minutes to cook until soft, but check the", "\"1/8 teaspoon cayenne pepper\", \"salt and freshly ground black pepper\", ], self.harvester_class.ingredients(), )", "occasionally, until they’re soft and cooked through, another 5 to 8 minutes. Remove", "and add salt, pepper, and additional cognac, soy sauce, or lemon juice, if", "lemon juice\", \"1 tablespoon soy sauce or tamari\", \"2 teaspoons minced fresh rosemary\",", "of olive oil. The cognac or brandy is optional, but it does give", "\"salt and freshly ground black pepper\", ], self.harvester_class.ingredients(), ) def test_instructions(self): self.assertEqual( \"\"\"Wipe", "volume when cooked, so 1 cup (160g) of dried lentils will yield close", "toasted walnuts or pecans\", \"2 tablespoons freshly squeezed lemon juice\", \"1 tablespoon soy", "quantity of olive oil, for a total of 1/4 cup (60ml) of olive", "they’re soft and cooked through, another 5 to 8 minutes. Remove from heat.\\nIn", "food processor, combine the cooked lentils, nuts, lemon juice, soy sauce, rosemary, thyme,", "5 to 6 minutes. Add the mushrooms and cook, stirring occasionally, until they’re", "thyme, sage or parsley, Cognac (if using), brown sugar, and cayenne. Scrape in", "soft and cooked through, another 5 to 8 minutes. Remove from heat.\\nIn a", "on the package for specific guidelines. If avoiding gluten, use tamari instead of", "frequently, until the onions become translucent, 5 to 6 minutes. Add the mushrooms", "package for specific guidelines. If avoiding gluten, use tamari instead of soy sauce.", "1/4 cup (60ml) of olive oil. The cognac or brandy is optional, but", "tamari\", \"2 teaspoons minced fresh rosemary\", \"2 teaspoons fresh thyme (minced)\", \"2 tablespoons", "cooked through, another 5 to 8 minutes. Remove from heat.\\nIn a food processor,", "freshly squeezed lemon juice\", \"1 tablespoon soy sauce or tamari\", \"2 teaspoons minced", "from recipe_scrapers.davidlebovitz import DavidLebovitz from tests import ScraperTest class TestDavidLebovivtzScraper(ScraperTest): scraper_class = DavidLebovitz", "until they’re soft and cooked through, another 5 to 8 minutes. Remove from", "stirring occasionally, until they’re soft and cooked through, another 5 to 8 minutes.", "pâté into a small serving bowl and refrigerate for a few hours, until", "olive oil and butter in a skillet or wide saucepan. Add the onions", "test_image(self): self.assertEqual( \"https://www.davidlebovitz.com/wp-content/uploads/2015/06/Faux-Gras-Lentil-Pate-8.jpg\", self.harvester_class.image(), ) def test_ingredients(self): self.assertEqual( [ \"12 medium-sized (100g, about", "self.harvester_class.title()) def test_total_time(self): self.assertEqual(None, self.harvester_class.total_time()) def test_yields(self): self.assertEqual(\"0 servings\", self.harvester_class.yields()) def test_image(self): self.assertEqual(", "(the funky parts) and slice them. Heat the olive oil and butter in", "fresh rosemary\", \"2 teaspoons fresh thyme (minced)\", \"2 tablespoons fresh sage or flat", "of soy sauce. For a vegan version, replace the butter with the same", "def test_ingredients(self): self.assertEqual( [ \"12 medium-sized (100g, about 1 cup) button mushrooms\", \"2", "stem end (the funky parts) and slice them. Heat the olive oil and", "or brandy\", \"1 teaspoon brown sugar\", \"1/8 teaspoon cayenne pepper\", \"salt and freshly", "in volume when cooked, so 1 cup (160g) of dried lentils will yield", "recipe_scrapers.davidlebovitz import DavidLebovitz from tests import ScraperTest class TestDavidLebovivtzScraper(ScraperTest): scraper_class = DavidLebovitz def", "and refrigerate for a few hours, until firm.\"\"\", self.harvester_class.instructions(), ) def test_description(self): self.assertEqual(", "def test_yields(self): self.assertEqual(\"0 servings\", self.harvester_class.yields()) def test_image(self): self.assertEqual( \"https://www.davidlebovitz.com/wp-content/uploads/2015/06/Faux-Gras-Lentil-Pate-8.jpg\", self.harvester_class.image(), ) def test_ingredients(self):", "the same quantity of olive oil, for a total of 1/4 cup (60ml)", "avoiding gluten, use tamari instead of soy sauce. For a vegan version, replace", "or brandy is optional, but it does give the faux gras a little", "butter with the same quantity of olive oil, for a total of 1/4", "stirring frequently, until the onions become translucent, 5 to 6 minutes. Add the", "the stem end (the funky parts) and slice them. Heat the olive oil", "and cook, stirring occasionally, until they’re soft and cooked through, another 5 to", "Gras\", self.harvester_class.title()) def test_total_time(self): self.assertEqual(None, self.harvester_class.total_time()) def test_yields(self): self.assertEqual(\"0 servings\", self.harvester_class.yields()) def test_image(self):", "self.harvester_class.total_time()) def test_yields(self): self.assertEqual(\"0 servings\", self.harvester_class.yields()) def test_image(self): self.assertEqual( \"https://www.davidlebovitz.com/wp-content/uploads/2015/06/Faux-Gras-Lentil-Pate-8.jpg\", self.harvester_class.image(), ) def", "rosemary\", \"2 teaspoons fresh thyme (minced)\", \"2 tablespoons fresh sage or flat leaf", "teaspoon brown sugar\", \"1/8 teaspoon cayenne pepper\", \"salt and freshly ground black pepper\",", "self.harvester_class.image(), ) def test_ingredients(self): self.assertEqual( [ \"12 medium-sized (100g, about 1 cup) button", "\"1 cup (140g) toasted walnuts or pecans\", \"2 tablespoons freshly squeezed lemon juice\",", "onion (peeled and diced)\", \"2 cloves garlic (peeled and minced)\", \"2 cups (400g)", "with the same quantity of olive oil, for a total of 1/4 cup", "[ \"12 medium-sized (100g, about 1 cup) button mushrooms\", \"2 tablespoons olive oil\",", "juice\", \"1 tablespoon soy sauce or tamari\", \"2 teaspoons minced fresh rosemary\", \"2", "so 1 cup (160g) of dried lentils will yield close to the correct", "brown sugar, and cayenne. Scrape in the cooked mushroom mixture and process until", "mixture and process until completely smooth. Taste, and add salt, pepper, and additional", "self.assertEqual( [ \"12 medium-sized (100g, about 1 cup) button mushrooms\", \"2 tablespoons olive", "bit of the stem end (the funky parts) and slice them. Heat the", "until the onions become translucent, 5 to 6 minutes. Add the mushrooms and", "def test_image(self): self.assertEqual( \"https://www.davidlebovitz.com/wp-content/uploads/2015/06/Faux-Gras-Lentil-Pate-8.jpg\", self.harvester_class.image(), ) def test_ingredients(self): self.assertEqual( [ \"12 medium-sized (100g,", "self.assertEqual(\"davidlebovitz.com\", self.harvester_class.host()) def test_author(self): self.assertEqual(\"David\", self.harvester_class.author()) def test_title(self): self.assertEqual(\"Faux Gras\", self.harvester_class.title()) def test_total_time(self):", "Très Chic by <NAME> Lentils double in volume when cooked, so 1 cup", "self.harvester_class.author()) def test_title(self): self.assertEqual(\"Faux Gras\", self.harvester_class.title()) def test_total_time(self): self.assertEqual(None, self.harvester_class.total_time()) def test_yields(self): self.assertEqual(\"0", "or pecans\", \"2 tablespoons freshly squeezed lemon juice\", \"1 tablespoon soy sauce or", "and cayenne. Scrape in the cooked mushroom mixture and process until completely smooth.", "is optional, but it does give the faux gras a little je ne", "soy sauce. For a vegan version, replace the butter with the same quantity", "usually take about 20 to 30 minutes to cook until soft, but check", "or flat leaf parsley\", \"optional: 2 teaspoons Cognac or brandy\", \"1 teaspoon brown", "= DavidLebovitz def test_host(self): self.assertEqual(\"davidlebovitz.com\", self.harvester_class.host()) def test_author(self): self.assertEqual(\"David\", self.harvester_class.author()) def test_title(self): self.assertEqual(\"Faux", "processor, combine the cooked lentils, nuts, lemon juice, soy sauce, rosemary, thyme, sage", "cup) button mushrooms\", \"2 tablespoons olive oil\", \"2 tablespoons butter (salted or unsalted)\",", "the mushrooms and cook, stirring occasionally, until they’re soft and cooked through, another", "test_yields(self): self.assertEqual(\"0 servings\", self.harvester_class.yields()) def test_image(self): self.assertEqual( \"https://www.davidlebovitz.com/wp-content/uploads/2015/06/Faux-Gras-Lentil-Pate-8.jpg\", self.harvester_class.image(), ) def test_ingredients(self): self.assertEqual(", "fresh thyme (minced)\", \"2 tablespoons fresh sage or flat leaf parsley\", \"optional: 2", "fresh sage or flat leaf parsley\", \"optional: 2 teaspoons Cognac or brandy\", \"1", "or tamari\", \"2 teaspoons minced fresh rosemary\", \"2 teaspoons fresh thyme (minced)\", \"2", "self.harvester_class.instructions(), ) def test_description(self): self.assertEqual( \"\"\"Adapted from Très Green, Très Clean, Très Chic", "Chic by <NAME> Lentils double in volume when cooked, so 1 cup (160g)", "cooked mushroom mixture and process until completely smooth. Taste, and add salt, pepper,", "cup (160g) of dried lentils will yield close to the correct amount. They", "parsley, Cognac (if using), brown sugar, and cayenne. Scrape in the cooked mushroom", "yield close to the correct amount. They usually take about 20 to 30", "needs balancing.\\nScrape the pâté into a small serving bowl and refrigerate for a", "guidelines. If avoiding gluten, use tamari instead of soy sauce. For a vegan", "slice them. Heat the olive oil and butter in a skillet or wide", "for a total of 1/4 cup (60ml) of olive oil. The cognac or", "self.harvester_class.ingredients(), ) def test_instructions(self): self.assertEqual( \"\"\"Wipe the mushrooms clean. Slice off a bit", "Slice off a bit of the stem end (the funky parts) and slice", "when cooked, so 1 cup (160g) of dried lentils will yield close to", "(160g) of dried lentils will yield close to the correct amount. They usually", "small onion (peeled and diced)\", \"2 cloves garlic (peeled and minced)\", \"2 cups", "(400g) cooked green lentils\", \"1 cup (140g) toasted walnuts or pecans\", \"2 tablespoons", "20 to 30 minutes to cook until soft, but check the directions on", "about 1 cup) button mushrooms\", \"2 tablespoons olive oil\", \"2 tablespoons butter (salted", "dried lentils will yield close to the correct amount. They usually take about", "tablespoons fresh sage or flat leaf parsley\", \"optional: 2 teaspoons Cognac or brandy\",", "def test_host(self): self.assertEqual(\"davidlebovitz.com\", self.harvester_class.host()) def test_author(self): self.assertEqual(\"David\", self.harvester_class.author()) def test_title(self): self.assertEqual(\"Faux Gras\", self.harvester_class.title())", "], self.harvester_class.ingredients(), ) def test_instructions(self): self.assertEqual( \"\"\"Wipe the mushrooms clean. Slice off a", "butter in a skillet or wide saucepan. Add the onions and garlic, and", "a vegan version, replace the butter with the same quantity of olive oil,", "\"1 tablespoon soy sauce or tamari\", \"2 teaspoons minced fresh rosemary\", \"2 teaspoons", "self.harvester_class.host()) def test_author(self): self.assertEqual(\"David\", self.harvester_class.author()) def test_title(self): self.assertEqual(\"Faux Gras\", self.harvester_class.title()) def test_total_time(self): self.assertEqual(None,", "cooked green lentils\", \"1 cup (140g) toasted walnuts or pecans\", \"2 tablespoons freshly", "until completely smooth. Taste, and add salt, pepper, and additional cognac, soy sauce,", "garlic (peeled and minced)\", \"2 cups (400g) cooked green lentils\", \"1 cup (140g)", "and freshly ground black pepper\", ], self.harvester_class.ingredients(), ) def test_instructions(self): self.assertEqual( \"\"\"Wipe the", "check the directions on the package for specific guidelines. If avoiding gluten, use", "or parsley, Cognac (if using), brown sugar, and cayenne. Scrape in the cooked", "Cognac (if using), brown sugar, and cayenne. Scrape in the cooked mushroom mixture", "of olive oil, for a total of 1/4 cup (60ml) of olive oil.", "Cognac or brandy\", \"1 teaspoon brown sugar\", \"1/8 teaspoon cayenne pepper\", \"salt and", "\"1 teaspoon brown sugar\", \"1/8 teaspoon cayenne pepper\", \"salt and freshly ground black", "import ScraperTest class TestDavidLebovivtzScraper(ScraperTest): scraper_class = DavidLebovitz def test_host(self): self.assertEqual(\"davidlebovitz.com\", self.harvester_class.host()) def test_author(self):", "instead of soy sauce. For a vegan version, replace the butter with the", "close to the correct amount. They usually take about 20 to 30 minutes", "to the correct amount. They usually take about 20 to 30 minutes to", "vegan version, replace the butter with the same quantity of olive oil, for", "\"2 tablespoons butter (salted or unsalted)\", \"1 small onion (peeled and diced)\", \"2", "the directions on the package for specific guidelines. If avoiding gluten, use tamari", "self.assertEqual(\"David\", self.harvester_class.author()) def test_title(self): self.assertEqual(\"Faux Gras\", self.harvester_class.title()) def test_total_time(self): self.assertEqual(None, self.harvester_class.total_time()) def test_yields(self):", "until firm.\"\"\", self.harvester_class.instructions(), ) def test_description(self): self.assertEqual( \"\"\"Adapted from Très Green, Très Clean,", "self.assertEqual(\"Faux Gras\", self.harvester_class.title()) def test_total_time(self): self.assertEqual(None, self.harvester_class.total_time()) def test_yields(self): self.assertEqual(\"0 servings\", self.harvester_class.yields()) def", "using), brown sugar, and cayenne. Scrape in the cooked mushroom mixture and process", "saucepan. Add the onions and garlic, and cook, stirring frequently, until the onions", "(140g) toasted walnuts or pecans\", \"2 tablespoons freshly squeezed lemon juice\", \"1 tablespoon", "but it does give the faux gras a little je ne sais quoi.\"\"\",", "lentils, nuts, lemon juice, soy sauce, rosemary, thyme, sage or parsley, Cognac (if", "class TestDavidLebovivtzScraper(ScraperTest): scraper_class = DavidLebovitz def test_host(self): self.assertEqual(\"davidlebovitz.com\", self.harvester_class.host()) def test_author(self): self.assertEqual(\"David\", self.harvester_class.author())", "about 20 to 30 minutes to cook until soft, but check the directions", "use tamari instead of soy sauce. For a vegan version, replace the butter", "cup (60ml) of olive oil. The cognac or brandy is optional, but it", "oil, for a total of 1/4 cup (60ml) of olive oil. The cognac", "minutes. Add the mushrooms and cook, stirring occasionally, until they’re soft and cooked", "end (the funky parts) and slice them. Heat the olive oil and butter" ]
[ "['id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low'] super().__init__(self.data_schema, entity_ids, entity_type, exchanges, codes,", "end_timestamp, columns, filters, order, limit, provider, level, category_field, time_field, 26, False, None, None,", "= None, keep_all_timestamp: bool = False, fill_method: str = 'ffill', effective_number: int =", "False) -> None: transformer = MacdTransformer() super().__init__(entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp,", "keep_all_timestamp, fill_method, effective_number, transformer, accumulator, persist_factor, dry_run) def __json__(self): result = super().__json__() result['indicator_cols']", "= factor.get_factor_df()['macd'] assert round(diff.loc[('stock_sz_000338', '2019-06-17')], 2) == 0.06 assert round(dea.loc[('stock_sz_000338', '2019-06-17')], 2) ==", "assert round(diff.loc[('stock_sz_000338', '2019-06-17')], 2) == 0.06 assert round(dea.loc[('stock_sz_000338', '2019-06-17')], 2) == -0.03 assert", "columns: columns = ['id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low'] super().__init__(self.data_schema, entity_ids,", "order, limit, provider, level, category_field, time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, accumulator, persist_factor,", "str = 'timestamp', computing_window: int = None, keep_all_timestamp: bool = False, fill_method: str", "__name__ == '__main__': factor = TechnicalFactor(entity_type='stock', codes=['000338'], start_timestamp='2019-01-01', end_timestamp='2019-06-10', level=IntervalLevel.LEVEL_1DAY, provider='joinquant', computing_window=26, transformer=MacdTransformer())", "Transformer, Accumulator class TechnicalFactor(Factor): def __init__(self, entity_ids: List[str] = None, entity_type: str =", "columns: List = None, filters: List = None, order: object = None, limit:", "category_field, time_field, 26, False, None, None, transformer, None, persist_factor, dry_run) def do_compute(self): super().do_compute()", "2) == 0.06 assert round(dea.loc[('stock_sz_000338', '2019-06-17')], 2) == -0.03 assert round(macd.loc[('stock_sz_000338', '2019-06-17')], 2)", "'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low'] super().__init__(self.data_schema, entity_ids, entity_type, exchanges, codes, the_timestamp,", "macd = factor.get_factor_df()['macd'] assert round(diff.loc[('stock_sz_000338', '2019-06-17')], 2) == 0.06 assert round(dea.loc[('stock_sz_000338', '2019-06-17')], 2)", "pd.Timestamp] = None, columns: List = None, filters: List = None, order: object", "= 'ffill', effective_number: int = 10, transformer: Transformer = MacdTransformer(), accumulator: Accumulator =", "IntervalLevel from zvt.api.common import get_kdata_schema from zvt.factors.algorithm import MacdTransformer, MaTransformer from zvt.factors.factor import", "'stock', exchanges: List[str] = ['sh', 'sz'], codes: List[str] = None, the_timestamp: Union[str, pd.Timestamp]", "import Factor, Transformer, Accumulator class TechnicalFactor(Factor): def __init__(self, entity_ids: List[str] = None, entity_type:", "None, persist_factor: bool = False, dry_run: bool = True) -> None: self.data_schema =", "dry_run: bool = True) -> None: self.data_schema = get_kdata_schema(entity_type, level=level) if transformer: self.indicator_cols", "import pandas as pd from zvdata import IntervalLevel from zvt.api.common import get_kdata_schema from", "level, category_field, time_field, 26, False, None, None, transformer, None, persist_factor, dry_run) def do_compute(self):", "bool = True) -> None: self.data_schema = get_kdata_schema(entity_type, level=level) if transformer: self.indicator_cols =", "start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None, columns: List =", "int = None, keep_all_timestamp: bool = False, fill_method: str = 'ffill', effective_number: int", "Transformer = MacdTransformer(), accumulator: Accumulator = None, persist_factor: bool = False, dry_run: bool", "= MacdTransformer() super().__init__(entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit,", "None, None, transformer, None, persist_factor, dry_run) def do_compute(self): super().do_compute() s = (self.factor_df['diff'] >", "exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, provider, level, category_field, time_field,", "'entity_id', time_field: str = 'timestamp', persist_factor: bool = False, dry_run: bool = False)", "['sh', 'sz'], codes: List[str] = None, the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str,", "for_json = __json__ # supported by simplejson class BullFactor(TechnicalFactor): def __init__(self, entity_ids: List[str]", "= None, limit: int = None, provider: str = 'joinquant', level: Union[str, IntervalLevel]", "as pd from zvdata import IntervalLevel from zvt.api.common import get_kdata_schema from zvt.factors.algorithm import", "MacdTransformer() super().__init__(entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, provider,", "= 10, transformer: Transformer = MacdTransformer(), accumulator: Accumulator = None, persist_factor: bool =", "result = super().__json__() result['indicator_cols'] = self.indicator_cols return result for_json = __json__ # supported", "import List, Union import pandas as pd from zvdata import IntervalLevel from zvt.api.common", "(self.factor_df['diff'] > 0) & (self.factor_df['dea'] > 0) self.result_df = s.to_frame(name='score') if __name__ ==", "'__main__': factor = TechnicalFactor(entity_type='stock', codes=['000338'], start_timestamp='2019-01-01', end_timestamp='2019-06-10', level=IntervalLevel.LEVEL_1DAY, provider='joinquant', computing_window=26, transformer=MacdTransformer()) print(factor.get_factor_df().tail()) factor.move_on(to_timestamp='2019-06-17')", "level=IntervalLevel.LEVEL_1DAY, provider='joinquant', computing_window=26, transformer=MacdTransformer()) print(factor.get_factor_df().tail()) factor.move_on(to_timestamp='2019-06-17') diff = factor.get_factor_df()['diff'] dea = factor.get_factor_df()['dea'] macd", "# supported by simplejson class BullFactor(TechnicalFactor): def __init__(self, entity_ids: List[str] = None, entity_type:", "start_timestamp, end_timestamp, columns, filters, order, limit, provider, level, category_field, time_field, computing_window, keep_all_timestamp, fill_method,", "from zvt.factors.algorithm import MacdTransformer, MaTransformer from zvt.factors.factor import Factor, Transformer, Accumulator class TechnicalFactor(Factor):", "columns, filters, order, limit, provider, level, category_field, time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer,", "order, limit, provider, level, category_field, time_field, 26, False, None, None, transformer, None, persist_factor,", "TechnicalFactor(entity_type='stock', codes=['000338'], start_timestamp='2019-01-01', end_timestamp='2019-06-10', level=IntervalLevel.LEVEL_1DAY, provider='joinquant', computing_window=26, transformer=MacdTransformer()) print(factor.get_factor_df().tail()) factor.move_on(to_timestamp='2019-06-17') diff = factor.get_factor_df()['diff']", "import IntervalLevel from zvt.api.common import get_kdata_schema from zvt.factors.algorithm import MacdTransformer, MaTransformer from zvt.factors.factor", "10, transformer: Transformer = MacdTransformer(), accumulator: Accumulator = None, persist_factor: bool = False,", "order: object = None, limit: int = None, provider: str = 'joinquant', level:", "from typing import List, Union import pandas as pd from zvdata import IntervalLevel", "None: self.data_schema = get_kdata_schema(entity_type, level=level) if transformer: self.indicator_cols = transformer.indicator_cols if not columns:", "'low'] super().__init__(self.data_schema, entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit,", "simplejson class BullFactor(TechnicalFactor): def __init__(self, entity_ids: List[str] = None, entity_type: str = 'stock',", "computing_window=26, transformer=MacdTransformer()) print(factor.get_factor_df().tail()) factor.move_on(to_timestamp='2019-06-17') diff = factor.get_factor_df()['diff'] dea = factor.get_factor_df()['dea'] macd = factor.get_factor_df()['macd']", "from zvt.api.common import get_kdata_schema from zvt.factors.algorithm import MacdTransformer, MaTransformer from zvt.factors.factor import Factor,", "str = 'entity_id', time_field: str = 'timestamp', computing_window: int = None, keep_all_timestamp: bool", "return result for_json = __json__ # supported by simplejson class BullFactor(TechnicalFactor): def __init__(self,", "str = 'ffill', effective_number: int = 10, transformer: Transformer = MacdTransformer(), accumulator: Accumulator", "IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str = 'timestamp', persist_factor: bool = False,", "(self.factor_df['dea'] > 0) self.result_df = s.to_frame(name='score') if __name__ == '__main__': factor = TechnicalFactor(entity_type='stock',", "'ffill', effective_number: int = 10, transformer: Transformer = MacdTransformer(), accumulator: Accumulator = None,", "TechnicalFactor(Factor): def __init__(self, entity_ids: List[str] = None, entity_type: str = 'stock', exchanges: List[str]", "transformer: Transformer = MacdTransformer(), accumulator: Accumulator = None, persist_factor: bool = False, dry_run:", "MacdTransformer(), accumulator: Accumulator = None, persist_factor: bool = False, dry_run: bool = True)", "List = None, filters: List = None, order: object = None, limit: int", "Accumulator = None, persist_factor: bool = False, dry_run: bool = True) -> None:", "MacdTransformer, MaTransformer from zvt.factors.factor import Factor, Transformer, Accumulator class TechnicalFactor(Factor): def __init__(self, entity_ids:", "'timestamp', 'level', 'open', 'close', 'high', 'low'] super().__init__(self.data_schema, entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp,", "category_field, time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, accumulator, persist_factor, dry_run) def __json__(self): result", "level=level) if transformer: self.indicator_cols = transformer.indicator_cols if not columns: columns = ['id', 'entity_id',", "time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, accumulator, persist_factor, dry_run) def __json__(self): result =", "str = 'timestamp', persist_factor: bool = False, dry_run: bool = False) -> None:", "persist_factor: bool = False, dry_run: bool = False) -> None: transformer = MacdTransformer()", "None, transformer, None, persist_factor, dry_run) def do_compute(self): super().do_compute() s = (self.factor_df['diff'] > 0)", "start_timestamp='2019-01-01', end_timestamp='2019-06-10', level=IntervalLevel.LEVEL_1DAY, provider='joinquant', computing_window=26, transformer=MacdTransformer()) print(factor.get_factor_df().tail()) factor.move_on(to_timestamp='2019-06-17') diff = factor.get_factor_df()['diff'] dea =", "print(factor.get_factor_df().tail()) factor.move_on(to_timestamp='2019-06-17') diff = factor.get_factor_df()['diff'] dea = factor.get_factor_df()['dea'] macd = factor.get_factor_df()['macd'] assert round(diff.loc[('stock_sz_000338',", "= super().__json__() result['indicator_cols'] = self.indicator_cols return result for_json = __json__ # supported by", "= factor.get_factor_df()['diff'] dea = factor.get_factor_df()['dea'] macd = factor.get_factor_df()['macd'] assert round(diff.loc[('stock_sz_000338', '2019-06-17')], 2) ==", "s = (self.factor_df['diff'] > 0) & (self.factor_df['dea'] > 0) self.result_df = s.to_frame(name='score') if", "= False) -> None: transformer = MacdTransformer() super().__init__(entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp,", "__json__ # supported by simplejson class BullFactor(TechnicalFactor): def __init__(self, entity_ids: List[str] = None,", "BullFactor(TechnicalFactor): def __init__(self, entity_ids: List[str] = None, entity_type: str = 'stock', exchanges: List[str]", "= False, fill_method: str = 'ffill', effective_number: int = 10, transformer: Transformer =", "if not columns: columns = ['id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low']", "transformer, None, persist_factor, dry_run) def do_compute(self): super().do_compute() s = (self.factor_df['diff'] > 0) &", "= 'entity_id', time_field: str = 'timestamp', persist_factor: bool = False, dry_run: bool =", "end_timestamp='2019-06-10', level=IntervalLevel.LEVEL_1DAY, provider='joinquant', computing_window=26, transformer=MacdTransformer()) print(factor.get_factor_df().tail()) factor.move_on(to_timestamp='2019-06-17') diff = factor.get_factor_df()['diff'] dea = factor.get_factor_df()['dea']", "entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, provider, level, category_field,", "__init__(self, entity_ids: List[str] = None, entity_type: str = 'stock', exchanges: List[str] = ['sh',", "None, the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str,", "None, order: object = None, limit: int = None, provider: str = 'joinquant',", "import get_kdata_schema from zvt.factors.algorithm import MacdTransformer, MaTransformer from zvt.factors.factor import Factor, Transformer, Accumulator", "List[str] = None, the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None,", "= None, start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None, columns:", "accumulator, persist_factor, dry_run) def __json__(self): result = super().__json__() result['indicator_cols'] = self.indicator_cols return result", "self.indicator_cols return result for_json = __json__ # supported by simplejson class BullFactor(TechnicalFactor): def", "entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, provider, level,", "def do_compute(self): super().do_compute() s = (self.factor_df['diff'] > 0) & (self.factor_df['dea'] > 0) self.result_df", "provider: str = 'joinquant', level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id',", "bool = False) -> None: transformer = MacdTransformer() super().__init__(entity_ids, entity_type, exchanges, codes, the_timestamp,", "26, False, None, None, transformer, None, persist_factor, dry_run) def do_compute(self): super().do_compute() s =", "provider='joinquant', computing_window=26, transformer=MacdTransformer()) print(factor.get_factor_df().tail()) factor.move_on(to_timestamp='2019-06-17') diff = factor.get_factor_df()['diff'] dea = factor.get_factor_df()['dea'] macd =", "= factor.get_factor_df()['dea'] macd = factor.get_factor_df()['macd'] assert round(diff.loc[('stock_sz_000338', '2019-06-17')], 2) == 0.06 assert round(dea.loc[('stock_sz_000338',", "super().__init__(entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, provider, level,", "= 'timestamp', computing_window: int = None, keep_all_timestamp: bool = False, fill_method: str =", "transformer: self.indicator_cols = transformer.indicator_cols if not columns: columns = ['id', 'entity_id', 'timestamp', 'level',", "= IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str = 'timestamp', persist_factor: bool =", "List, Union import pandas as pd from zvdata import IntervalLevel from zvt.api.common import", "List = None, order: object = None, limit: int = None, provider: str", "transformer.indicator_cols if not columns: columns = ['id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high',", "transformer = MacdTransformer() super().__init__(entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order,", "False, None, None, transformer, None, persist_factor, dry_run) def do_compute(self): super().do_compute() s = (self.factor_df['diff']", "'2019-06-17')], 2) == 0.06 assert round(dea.loc[('stock_sz_000338', '2019-06-17')], 2) == -0.03 assert round(macd.loc[('stock_sz_000338', '2019-06-17')],", "= 'stock', exchanges: List[str] = ['sh', 'sz'], codes: List[str] = None, the_timestamp: Union[str,", "'sz'], codes: List[str] = None, the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp]", "= None, columns: List = None, filters: List = None, order: object =", "if __name__ == '__main__': factor = TechnicalFactor(entity_type='stock', codes=['000338'], start_timestamp='2019-01-01', end_timestamp='2019-06-10', level=IntervalLevel.LEVEL_1DAY, provider='joinquant', computing_window=26,", "time_field: str = 'timestamp', persist_factor: bool = False, dry_run: bool = False) ->", "zvt.factors.factor import Factor, Transformer, Accumulator class TechnicalFactor(Factor): def __init__(self, entity_ids: List[str] = None,", "from zvdata import IntervalLevel from zvt.api.common import get_kdata_schema from zvt.factors.algorithm import MacdTransformer, MaTransformer", "None, entity_type: str = 'stock', exchanges: List[str] = ['sh', 'sz'], codes: List[str] =", "= ['id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low'] super().__init__(self.data_schema, entity_ids, entity_type, exchanges,", "start_timestamp, end_timestamp, columns, filters, order, limit, provider, level, category_field, time_field, 26, False, None,", "from zvt.factors.factor import Factor, Transformer, Accumulator class TechnicalFactor(Factor): def __init__(self, entity_ids: List[str] =", "columns, filters, order, limit, provider, level, category_field, time_field, 26, False, None, None, transformer,", "dry_run) def do_compute(self): super().do_compute() s = (self.factor_df['diff'] > 0) & (self.factor_df['dea'] > 0)", "True) -> None: self.data_schema = get_kdata_schema(entity_type, level=level) if transformer: self.indicator_cols = transformer.indicator_cols if", "Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None, columns: List = None,", "pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None, columns: List = None, filters:", "= 'entity_id', time_field: str = 'timestamp', computing_window: int = None, keep_all_timestamp: bool =", "> 0) & (self.factor_df['dea'] > 0) self.result_df = s.to_frame(name='score') if __name__ == '__main__':", "transformer, accumulator, persist_factor, dry_run) def __json__(self): result = super().__json__() result['indicator_cols'] = self.indicator_cols return", "None, provider: str = 'joinquant', level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str =", "category_field: str = 'entity_id', time_field: str = 'timestamp', persist_factor: bool = False, dry_run:", "provider, level, category_field, time_field, 26, False, None, None, transformer, None, persist_factor, dry_run) def", "Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] =", "= False, dry_run: bool = False) -> None: transformer = MacdTransformer() super().__init__(entity_ids, entity_type,", "= ['sh', 'sz'], codes: List[str] = None, the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp:", "= None, end_timestamp: Union[str, pd.Timestamp] = None, columns: List = None, filters: List", "factor.get_factor_df()['macd'] assert round(diff.loc[('stock_sz_000338', '2019-06-17')], 2) == 0.06 assert round(dea.loc[('stock_sz_000338', '2019-06-17')], 2) == -0.03", "limit, provider, level, category_field, time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, accumulator, persist_factor, dry_run)", "None, keep_all_timestamp: bool = False, fill_method: str = 'ffill', effective_number: int = 10,", "IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str = 'timestamp', computing_window: int", "= s.to_frame(name='score') if __name__ == '__main__': factor = TechnicalFactor(entity_type='stock', codes=['000338'], start_timestamp='2019-01-01', end_timestamp='2019-06-10', level=IntervalLevel.LEVEL_1DAY,", "accumulator: Accumulator = None, persist_factor: bool = False, dry_run: bool = True) ->", "category_field: str = 'entity_id', time_field: str = 'timestamp', computing_window: int = None, keep_all_timestamp:", "== '__main__': factor = TechnicalFactor(entity_type='stock', codes=['000338'], start_timestamp='2019-01-01', end_timestamp='2019-06-10', level=IntervalLevel.LEVEL_1DAY, provider='joinquant', computing_window=26, transformer=MacdTransformer()) print(factor.get_factor_df().tail())", "factor.get_factor_df()['dea'] macd = factor.get_factor_df()['macd'] assert round(diff.loc[('stock_sz_000338', '2019-06-17')], 2) == 0.06 assert round(dea.loc[('stock_sz_000338', '2019-06-17')],", "-> None: transformer = MacdTransformer() super().__init__(entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns,", "'entity_id', time_field: str = 'timestamp', computing_window: int = None, keep_all_timestamp: bool = False,", "def __init__(self, entity_ids: List[str] = None, entity_type: str = 'stock', exchanges: List[str] =", "str = 'entity_id', time_field: str = 'timestamp', persist_factor: bool = False, dry_run: bool", "List[str] = None, entity_type: str = 'stock', exchanges: List[str] = ['sh', 'sz'], codes:", "if transformer: self.indicator_cols = transformer.indicator_cols if not columns: columns = ['id', 'entity_id', 'timestamp',", "'high', 'low'] super().__init__(self.data_schema, entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order,", "pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None,", "& (self.factor_df['dea'] > 0) self.result_df = s.to_frame(name='score') if __name__ == '__main__': factor =", "= TechnicalFactor(entity_type='stock', codes=['000338'], start_timestamp='2019-01-01', end_timestamp='2019-06-10', level=IntervalLevel.LEVEL_1DAY, provider='joinquant', computing_window=26, transformer=MacdTransformer()) print(factor.get_factor_df().tail()) factor.move_on(to_timestamp='2019-06-17') diff =", "dry_run) def __json__(self): result = super().__json__() result['indicator_cols'] = self.indicator_cols return result for_json =", "= None, entity_type: str = 'stock', exchanges: List[str] = ['sh', 'sz'], codes: List[str]", "List[str] = ['sh', 'sz'], codes: List[str] = None, the_timestamp: Union[str, pd.Timestamp] = None,", "supported by simplejson class BullFactor(TechnicalFactor): def __init__(self, entity_ids: List[str] = None, entity_type: str", "Union[str, pd.Timestamp] = None, columns: List = None, filters: List = None, order:", "the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, provider, level, category_field, time_field, computing_window, keep_all_timestamp,", "keep_all_timestamp: bool = False, fill_method: str = 'ffill', effective_number: int = 10, transformer:", "0) & (self.factor_df['dea'] > 0) self.result_df = s.to_frame(name='score') if __name__ == '__main__': factor", "super().do_compute() s = (self.factor_df['diff'] > 0) & (self.factor_df['dea'] > 0) self.result_df = s.to_frame(name='score')", "Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str = 'timestamp', persist_factor:", "by simplejson class BullFactor(TechnicalFactor): def __init__(self, entity_ids: List[str] = None, entity_type: str =", "int = None, provider: str = 'joinquant', level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field:", "effective_number, transformer, accumulator, persist_factor, dry_run) def __json__(self): result = super().__json__() result['indicator_cols'] = self.indicator_cols", "typing import List, Union import pandas as pd from zvdata import IntervalLevel from", "persist_factor: bool = False, dry_run: bool = True) -> None: self.data_schema = get_kdata_schema(entity_type,", "= get_kdata_schema(entity_type, level=level) if transformer: self.indicator_cols = transformer.indicator_cols if not columns: columns =", "Union import pandas as pd from zvdata import IntervalLevel from zvt.api.common import get_kdata_schema", "self.result_df = s.to_frame(name='score') if __name__ == '__main__': factor = TechnicalFactor(entity_type='stock', codes=['000338'], start_timestamp='2019-01-01', end_timestamp='2019-06-10',", "import MacdTransformer, MaTransformer from zvt.factors.factor import Factor, Transformer, Accumulator class TechnicalFactor(Factor): def __init__(self,", "codes: List[str] = None, the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] =", "fill_method: str = 'ffill', effective_number: int = 10, transformer: Transformer = MacdTransformer(), accumulator:", "False, fill_method: str = 'ffill', effective_number: int = 10, transformer: Transformer = MacdTransformer(),", "dry_run: bool = False) -> None: transformer = MacdTransformer() super().__init__(entity_ids, entity_type, exchanges, codes,", "not columns: columns = ['id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low'] super().__init__(self.data_schema,", "'open', 'close', 'high', 'low'] super().__init__(self.data_schema, entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns,", "time_field, 26, False, None, None, transformer, None, persist_factor, dry_run) def do_compute(self): super().do_compute() s", "columns = ['id', 'entity_id', 'timestamp', 'level', 'open', 'close', 'high', 'low'] super().__init__(self.data_schema, entity_ids, entity_type,", "transformer=MacdTransformer()) print(factor.get_factor_df().tail()) factor.move_on(to_timestamp='2019-06-17') diff = factor.get_factor_df()['diff'] dea = factor.get_factor_df()['dea'] macd = factor.get_factor_df()['macd'] assert", "the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp]", "None, start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp: Union[str, pd.Timestamp] = None, columns: List", "= True) -> None: self.data_schema = get_kdata_schema(entity_type, level=level) if transformer: self.indicator_cols = transformer.indicator_cols", "'timestamp', persist_factor: bool = False, dry_run: bool = False) -> None: transformer =", "False, dry_run: bool = True) -> None: self.data_schema = get_kdata_schema(entity_type, level=level) if transformer:", "= MacdTransformer(), accumulator: Accumulator = None, persist_factor: bool = False, dry_run: bool =", "None, persist_factor, dry_run) def do_compute(self): super().do_compute() s = (self.factor_df['diff'] > 0) & (self.factor_df['dea']", "0.06 assert round(dea.loc[('stock_sz_000338', '2019-06-17')], 2) == -0.03 assert round(macd.loc[('stock_sz_000338', '2019-06-17')], 2) == 0.19", "computing_window, keep_all_timestamp, fill_method, effective_number, transformer, accumulator, persist_factor, dry_run) def __json__(self): result = super().__json__()", "factor.get_factor_df()['diff'] dea = factor.get_factor_df()['dea'] macd = factor.get_factor_df()['macd'] assert round(diff.loc[('stock_sz_000338', '2019-06-17')], 2) == 0.06", "bool = False, fill_method: str = 'ffill', effective_number: int = 10, transformer: Transformer", "get_kdata_schema from zvt.factors.algorithm import MacdTransformer, MaTransformer from zvt.factors.factor import Factor, Transformer, Accumulator class", "limit: int = None, provider: str = 'joinquant', level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY,", "class TechnicalFactor(Factor): def __init__(self, entity_ids: List[str] = None, entity_type: str = 'stock', exchanges:", "= None, provider: str = 'joinquant', level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str", "codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, provider, level, category_field, time_field, computing_window,", "fill_method, effective_number, transformer, accumulator, persist_factor, dry_run) def __json__(self): result = super().__json__() result['indicator_cols'] =", "limit, provider, level, category_field, time_field, 26, False, None, None, transformer, None, persist_factor, dry_run)", "= 'joinquant', level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str", "effective_number: int = 10, transformer: Transformer = MacdTransformer(), accumulator: Accumulator = None, persist_factor:", "bool = False, dry_run: bool = False) -> None: transformer = MacdTransformer() super().__init__(entity_ids,", "'close', 'high', 'low'] super().__init__(self.data_schema, entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters,", "str = 'stock', exchanges: List[str] = ['sh', 'sz'], codes: List[str] = None, the_timestamp:", "def __json__(self): result = super().__json__() result['indicator_cols'] = self.indicator_cols return result for_json = __json__", "MaTransformer from zvt.factors.factor import Factor, Transformer, Accumulator class TechnicalFactor(Factor): def __init__(self, entity_ids: List[str]", "get_kdata_schema(entity_type, level=level) if transformer: self.indicator_cols = transformer.indicator_cols if not columns: columns = ['id',", "self.indicator_cols = transformer.indicator_cols if not columns: columns = ['id', 'entity_id', 'timestamp', 'level', 'open',", "None, columns: List = None, filters: List = None, order: object = None,", "entity_ids: List[str] = None, entity_type: str = 'stock', exchanges: List[str] = ['sh', 'sz'],", "level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str = 'timestamp',", "__json__(self): result = super().__json__() result['indicator_cols'] = self.indicator_cols return result for_json = __json__ #", "== 0.06 assert round(dea.loc[('stock_sz_000338', '2019-06-17')], 2) == -0.03 assert round(macd.loc[('stock_sz_000338', '2019-06-17')], 2) ==", "-> None: self.data_schema = get_kdata_schema(entity_type, level=level) if transformer: self.indicator_cols = transformer.indicator_cols if not", "Accumulator class TechnicalFactor(Factor): def __init__(self, entity_ids: List[str] = None, entity_type: str = 'stock',", "= transformer.indicator_cols if not columns: columns = ['id', 'entity_id', 'timestamp', 'level', 'open', 'close',", "pd from zvdata import IntervalLevel from zvt.api.common import get_kdata_schema from zvt.factors.algorithm import MacdTransformer,", "super().__json__() result['indicator_cols'] = self.indicator_cols return result for_json = __json__ # supported by simplejson", "= 'timestamp', persist_factor: bool = False, dry_run: bool = False) -> None: transformer", "provider, level, category_field, time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, accumulator, persist_factor, dry_run) def", "= self.indicator_cols return result for_json = __json__ # supported by simplejson class BullFactor(TechnicalFactor):", "= __json__ # supported by simplejson class BullFactor(TechnicalFactor): def __init__(self, entity_ids: List[str] =", "zvdata import IntervalLevel from zvt.api.common import get_kdata_schema from zvt.factors.algorithm import MacdTransformer, MaTransformer from", "codes=['000338'], start_timestamp='2019-01-01', end_timestamp='2019-06-10', level=IntervalLevel.LEVEL_1DAY, provider='joinquant', computing_window=26, transformer=MacdTransformer()) print(factor.get_factor_df().tail()) factor.move_on(to_timestamp='2019-06-17') diff = factor.get_factor_df()['diff'] dea", "result['indicator_cols'] = self.indicator_cols return result for_json = __json__ # supported by simplejson class", "the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, provider, level, category_field, time_field, 26, False,", "False, dry_run: bool = False) -> None: transformer = MacdTransformer() super().__init__(entity_ids, entity_type, exchanges,", "= False, dry_run: bool = True) -> None: self.data_schema = get_kdata_schema(entity_type, level=level) if", "'joinquant', level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str =", "IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str = 'timestamp', computing_window: int = None,", "persist_factor, dry_run) def do_compute(self): super().do_compute() s = (self.factor_df['diff'] > 0) & (self.factor_df['dea'] >", "do_compute(self): super().do_compute() s = (self.factor_df['diff'] > 0) & (self.factor_df['dea'] > 0) self.result_df =", "factor = TechnicalFactor(entity_type='stock', codes=['000338'], start_timestamp='2019-01-01', end_timestamp='2019-06-10', level=IntervalLevel.LEVEL_1DAY, provider='joinquant', computing_window=26, transformer=MacdTransformer()) print(factor.get_factor_df().tail()) factor.move_on(to_timestamp='2019-06-17') diff", "result for_json = __json__ # supported by simplejson class BullFactor(TechnicalFactor): def __init__(self, entity_ids:", "entity_type: str = 'stock', exchanges: List[str] = ['sh', 'sz'], codes: List[str] = None,", "persist_factor, dry_run) def __json__(self): result = super().__json__() result['indicator_cols'] = self.indicator_cols return result for_json", "super().__init__(self.data_schema, entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, provider,", "object = None, limit: int = None, provider: str = 'joinquant', level: Union[str,", "filters, order, limit, provider, level, category_field, time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, accumulator,", "zvt.api.common import get_kdata_schema from zvt.factors.algorithm import MacdTransformer, MaTransformer from zvt.factors.factor import Factor, Transformer,", "end_timestamp: Union[str, pd.Timestamp] = None, columns: List = None, filters: List = None,", "bool = False, dry_run: bool = True) -> None: self.data_schema = get_kdata_schema(entity_type, level=level)", "dea = factor.get_factor_df()['dea'] macd = factor.get_factor_df()['macd'] assert round(diff.loc[('stock_sz_000338', '2019-06-17')], 2) == 0.06 assert", "IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str = 'timestamp', persist_factor: bool", "diff = factor.get_factor_df()['diff'] dea = factor.get_factor_df()['dea'] macd = factor.get_factor_df()['macd'] assert round(diff.loc[('stock_sz_000338', '2019-06-17')], 2)", "factor.move_on(to_timestamp='2019-06-17') diff = factor.get_factor_df()['diff'] dea = factor.get_factor_df()['dea'] macd = factor.get_factor_df()['macd'] assert round(diff.loc[('stock_sz_000338', '2019-06-17')],", "None, filters: List = None, order: object = None, limit: int = None,", "int = 10, transformer: Transformer = MacdTransformer(), accumulator: Accumulator = None, persist_factor: bool", "round(diff.loc[('stock_sz_000338', '2019-06-17')], 2) == 0.06 assert round(dea.loc[('stock_sz_000338', '2019-06-17')], 2) == -0.03 assert round(macd.loc[('stock_sz_000338',", "exchanges: List[str] = ['sh', 'sz'], codes: List[str] = None, the_timestamp: Union[str, pd.Timestamp] =", "Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str = 'timestamp', computing_window:", "filters, order, limit, provider, level, category_field, time_field, 26, False, None, None, transformer, None,", "None: transformer = MacdTransformer() super().__init__(entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp, columns, filters,", "'timestamp', computing_window: int = None, keep_all_timestamp: bool = False, fill_method: str = 'ffill',", "= None, order: object = None, limit: int = None, provider: str =", "= None, the_timestamp: Union[str, pd.Timestamp] = None, start_timestamp: Union[str, pd.Timestamp] = None, end_timestamp:", "= None, filters: List = None, order: object = None, limit: int =", "pandas as pd from zvdata import IntervalLevel from zvt.api.common import get_kdata_schema from zvt.factors.algorithm", "computing_window: int = None, keep_all_timestamp: bool = False, fill_method: str = 'ffill', effective_number:", "Factor, Transformer, Accumulator class TechnicalFactor(Factor): def __init__(self, entity_ids: List[str] = None, entity_type: str", "class BullFactor(TechnicalFactor): def __init__(self, entity_ids: List[str] = None, entity_type: str = 'stock', exchanges:", "> 0) self.result_df = s.to_frame(name='score') if __name__ == '__main__': factor = TechnicalFactor(entity_type='stock', codes=['000338'],", "time_field: str = 'timestamp', computing_window: int = None, keep_all_timestamp: bool = False, fill_method:", "= (self.factor_df['diff'] > 0) & (self.factor_df['dea'] > 0) self.result_df = s.to_frame(name='score') if __name__", "None, end_timestamp: Union[str, pd.Timestamp] = None, columns: List = None, filters: List =", "self.data_schema = get_kdata_schema(entity_type, level=level) if transformer: self.indicator_cols = transformer.indicator_cols if not columns: columns", "zvt.factors.algorithm import MacdTransformer, MaTransformer from zvt.factors.factor import Factor, Transformer, Accumulator class TechnicalFactor(Factor): def", "filters: List = None, order: object = None, limit: int = None, provider:", "str = 'joinquant', level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field:", "'level', 'open', 'close', 'high', 'low'] super().__init__(self.data_schema, entity_ids, entity_type, exchanges, codes, the_timestamp, start_timestamp, end_timestamp,", "= None, persist_factor: bool = False, dry_run: bool = True) -> None: self.data_schema", "level, category_field, time_field, computing_window, keep_all_timestamp, fill_method, effective_number, transformer, accumulator, persist_factor, dry_run) def __json__(self):", "codes, the_timestamp, start_timestamp, end_timestamp, columns, filters, order, limit, provider, level, category_field, time_field, 26,", "None, limit: int = None, provider: str = 'joinquant', level: Union[str, IntervalLevel] =", "0) self.result_df = s.to_frame(name='score') if __name__ == '__main__': factor = TechnicalFactor(entity_type='stock', codes=['000338'], start_timestamp='2019-01-01',", "s.to_frame(name='score') if __name__ == '__main__': factor = TechnicalFactor(entity_type='stock', codes=['000338'], start_timestamp='2019-01-01', end_timestamp='2019-06-10', level=IntervalLevel.LEVEL_1DAY, provider='joinquant',", "= IntervalLevel.LEVEL_1DAY, category_field: str = 'entity_id', time_field: str = 'timestamp', computing_window: int =", "end_timestamp, columns, filters, order, limit, provider, level, category_field, time_field, computing_window, keep_all_timestamp, fill_method, effective_number," ]
[ "throttled. On success calls `throttle_success`. On failure calls `throttle_failure`. \"\"\" # Bypass CORS", "to see if the request should be throttled. On success calls `throttle_success`. On", "along with the key into the cache. \"\"\" # if type(request.user) is not", "scope = 'baseThrottle' class Meta: abstract = True def get_cache_key(self, request, view): return", "cache. \"\"\" # if type(request.user) is not AnonymousUser: # self.history.insert(0, request.user.id) self.history.insert(0, self.now)", "} def allow_request(self, request, view): \"\"\" Implement the check to see if the", "True def get_cache_key(self, request, view): return self.cache_format % { 'scope': self.scope, 'ident': self.get_ident(request)", "self.key = self.get_cache_key(request, view) if self.key is None: return True self.history = self.cache.get(self.key,", "`throttle_success`. On failure calls `throttle_failure`. \"\"\" # Bypass CORS OPTIONS requests if request.method", "None: return True self.key = self.get_cache_key(request, view) if self.key is None: return True", "None: return True self.history = self.cache.get(self.key, []) self.now = self.timer() while len(self.history) and", "self.num_requests: return self.throttle_failure() return self.throttle_success(request) def throttle_success(self, request): \"\"\" Inserts the current request's", "allow_request(self, request, view): \"\"\" Implement the check to see if the request should", "should be throttled. On success calls `throttle_success`. On failure calls `throttle_failure`. \"\"\" #", "django.contrib.auth.models import AnonymousUser from rest_framework.throttling import SimpleRateThrottle class BaseRateThrottle(SimpleRateThrottle): scope = 'baseThrottle' class", "return self.throttle_success(request) def throttle_success(self, request): \"\"\" Inserts the current request's timestamp along with", "request, view): return self.cache_format % { 'scope': self.scope, 'ident': self.get_ident(request) } def allow_request(self,", "self.now - self.duration: self.history.pop() if len(self.history) >= self.num_requests: return self.throttle_failure() return self.throttle_success(request) def", "Meta: abstract = True def get_cache_key(self, request, view): return self.cache_format % { 'scope':", "calls `throttle_failure`. \"\"\" # Bypass CORS OPTIONS requests if request.method == \"OPTIONS\": return", "the request should be throttled. On success calls `throttle_success`. On failure calls `throttle_failure`.", "see if the request should be throttled. On success calls `throttle_success`. On failure", "CORS OPTIONS requests if request.method == \"OPTIONS\": return True if self.rate is None:", "request.method == \"OPTIONS\": return True if self.rate is None: return True self.key =", "self.timer() while len(self.history) and self.history[-1] <= self.now - self.duration: self.history.pop() if len(self.history) >=", "type(request.user) is not AnonymousUser: # self.history.insert(0, request.user.id) self.history.insert(0, self.now) self.cache.set(self.key, self.history, self.duration) return", "class UserLoginRateThrottle(BaseRateThrottle): scope = 'loginAttempts' class ContactRateThrottle(BaseRateThrottle): scope = 'contact' class EmailValidationRateThrottle(BaseRateThrottle): scope", "self.throttle_success(request) def throttle_success(self, request): \"\"\" Inserts the current request's timestamp along with the", "On failure calls `throttle_failure`. \"\"\" # Bypass CORS OPTIONS requests if request.method ==", "AnonymousUser from rest_framework.throttling import SimpleRateThrottle class BaseRateThrottle(SimpleRateThrottle): scope = 'baseThrottle' class Meta: abstract", "import SimpleRateThrottle class BaseRateThrottle(SimpleRateThrottle): scope = 'baseThrottle' class Meta: abstract = True def", "if self.rate is None: return True self.key = self.get_cache_key(request, view) if self.key is", "self.cache.get(self.key, []) self.now = self.timer() while len(self.history) and self.history[-1] <= self.now - self.duration:", "with the key into the cache. \"\"\" # if type(request.user) is not AnonymousUser:", "key into the cache. \"\"\" # if type(request.user) is not AnonymousUser: # self.history.insert(0,", "= self.timer() while len(self.history) and self.history[-1] <= self.now - self.duration: self.history.pop() if len(self.history)", "\"\"\" # if type(request.user) is not AnonymousUser: # self.history.insert(0, request.user.id) self.history.insert(0, self.now) self.cache.set(self.key,", "self.throttle_failure() return self.throttle_success(request) def throttle_success(self, request): \"\"\" Inserts the current request's timestamp along", "'ident': self.get_ident(request) } def allow_request(self, request, view): \"\"\" Implement the check to see", "is not AnonymousUser: # self.history.insert(0, request.user.id) self.history.insert(0, self.now) self.cache.set(self.key, self.history, self.duration) return True", "self.cache.set(self.key, self.history, self.duration) return True class UserLoginRateThrottle(BaseRateThrottle): scope = 'loginAttempts' class ContactRateThrottle(BaseRateThrottle): scope", "\"\"\" Implement the check to see if the request should be throttled. On", "self.get_cache_key(request, view) if self.key is None: return True self.history = self.cache.get(self.key, []) self.now", "True self.history = self.cache.get(self.key, []) self.now = self.timer() while len(self.history) and self.history[-1] <=", "self.history[-1] <= self.now - self.duration: self.history.pop() if len(self.history) >= self.num_requests: return self.throttle_failure() return", "return self.cache_format % { 'scope': self.scope, 'ident': self.get_ident(request) } def allow_request(self, request, view):", "len(self.history) and self.history[-1] <= self.now - self.duration: self.history.pop() if len(self.history) >= self.num_requests: return", "self.history.pop() if len(self.history) >= self.num_requests: return self.throttle_failure() return self.throttle_success(request) def throttle_success(self, request): \"\"\"", "and self.history[-1] <= self.now - self.duration: self.history.pop() if len(self.history) >= self.num_requests: return self.throttle_failure()", "self.duration: self.history.pop() if len(self.history) >= self.num_requests: return self.throttle_failure() return self.throttle_success(request) def throttle_success(self, request):", "into the cache. \"\"\" # if type(request.user) is not AnonymousUser: # self.history.insert(0, request.user.id)", ">= self.num_requests: return self.throttle_failure() return self.throttle_success(request) def throttle_success(self, request): \"\"\" Inserts the current", "self.history, self.duration) return True class UserLoginRateThrottle(BaseRateThrottle): scope = 'loginAttempts' class ContactRateThrottle(BaseRateThrottle): scope =", "True class UserLoginRateThrottle(BaseRateThrottle): scope = 'loginAttempts' class ContactRateThrottle(BaseRateThrottle): scope = 'contact' class EmailValidationRateThrottle(BaseRateThrottle):", "= self.cache.get(self.key, []) self.now = self.timer() while len(self.history) and self.history[-1] <= self.now -", "def throttle_success(self, request): \"\"\" Inserts the current request's timestamp along with the key", "{ 'scope': self.scope, 'ident': self.get_ident(request) } def allow_request(self, request, view): \"\"\" Implement the", "if self.key is None: return True self.history = self.cache.get(self.key, []) self.now = self.timer()", "= 'baseThrottle' class Meta: abstract = True def get_cache_key(self, request, view): return self.cache_format", "if request.method == \"OPTIONS\": return True if self.rate is None: return True self.key", "is None: return True self.key = self.get_cache_key(request, view) if self.key is None: return", "self.cache_format % { 'scope': self.scope, 'ident': self.get_ident(request) } def allow_request(self, request, view): \"\"\"", "success calls `throttle_success`. On failure calls `throttle_failure`. \"\"\" # Bypass CORS OPTIONS requests", "the key into the cache. \"\"\" # if type(request.user) is not AnonymousUser: #", "# self.history.insert(0, request.user.id) self.history.insert(0, self.now) self.cache.set(self.key, self.history, self.duration) return True class UserLoginRateThrottle(BaseRateThrottle): scope", "get_cache_key(self, request, view): return self.cache_format % { 'scope': self.scope, 'ident': self.get_ident(request) } def", "BaseRateThrottle(SimpleRateThrottle): scope = 'baseThrottle' class Meta: abstract = True def get_cache_key(self, request, view):", "OPTIONS requests if request.method == \"OPTIONS\": return True if self.rate is None: return", "the current request's timestamp along with the key into the cache. \"\"\" #", "\"OPTIONS\": return True if self.rate is None: return True self.key = self.get_cache_key(request, view)", "request.user.id) self.history.insert(0, self.now) self.cache.set(self.key, self.history, self.duration) return True class UserLoginRateThrottle(BaseRateThrottle): scope = 'loginAttempts'", "True if self.rate is None: return True self.key = self.get_cache_key(request, view) if self.key", "self.history.insert(0, request.user.id) self.history.insert(0, self.now) self.cache.set(self.key, self.history, self.duration) return True class UserLoginRateThrottle(BaseRateThrottle): scope =", "len(self.history) >= self.num_requests: return self.throttle_failure() return self.throttle_success(request) def throttle_success(self, request): \"\"\" Inserts the", "# if type(request.user) is not AnonymousUser: # self.history.insert(0, request.user.id) self.history.insert(0, self.now) self.cache.set(self.key, self.history,", "self.history = self.cache.get(self.key, []) self.now = self.timer() while len(self.history) and self.history[-1] <= self.now", "timestamp along with the key into the cache. \"\"\" # if type(request.user) is", "self.rate is None: return True self.key = self.get_cache_key(request, view) if self.key is None:", "be throttled. On success calls `throttle_success`. On failure calls `throttle_failure`. \"\"\" # Bypass", "current request's timestamp along with the key into the cache. \"\"\" # if", "return True class UserLoginRateThrottle(BaseRateThrottle): scope = 'loginAttempts' class ContactRateThrottle(BaseRateThrottle): scope = 'contact' class", "request): \"\"\" Inserts the current request's timestamp along with the key into the", "On success calls `throttle_success`. On failure calls `throttle_failure`. \"\"\" # Bypass CORS OPTIONS", "self.scope, 'ident': self.get_ident(request) } def allow_request(self, request, view): \"\"\" Implement the check to", "the cache. \"\"\" # if type(request.user) is not AnonymousUser: # self.history.insert(0, request.user.id) self.history.insert(0,", "<= self.now - self.duration: self.history.pop() if len(self.history) >= self.num_requests: return self.throttle_failure() return self.throttle_success(request)", "self.now) self.cache.set(self.key, self.history, self.duration) return True class UserLoginRateThrottle(BaseRateThrottle): scope = 'loginAttempts' class ContactRateThrottle(BaseRateThrottle):", "== \"OPTIONS\": return True if self.rate is None: return True self.key = self.get_cache_key(request,", "view): return self.cache_format % { 'scope': self.scope, 'ident': self.get_ident(request) } def allow_request(self, request,", "self.duration) return True class UserLoginRateThrottle(BaseRateThrottle): scope = 'loginAttempts' class ContactRateThrottle(BaseRateThrottle): scope = 'contact'", "self.key is None: return True self.history = self.cache.get(self.key, []) self.now = self.timer() while", "`throttle_failure`. \"\"\" # Bypass CORS OPTIONS requests if request.method == \"OPTIONS\": return True", "check to see if the request should be throttled. On success calls `throttle_success`.", "request's timestamp along with the key into the cache. \"\"\" # if type(request.user)", "Inserts the current request's timestamp along with the key into the cache. \"\"\"", "AnonymousUser: # self.history.insert(0, request.user.id) self.history.insert(0, self.now) self.cache.set(self.key, self.history, self.duration) return True class UserLoginRateThrottle(BaseRateThrottle):", "return self.throttle_failure() return self.throttle_success(request) def throttle_success(self, request): \"\"\" Inserts the current request's timestamp", "throttle_success(self, request): \"\"\" Inserts the current request's timestamp along with the key into", "Bypass CORS OPTIONS requests if request.method == \"OPTIONS\": return True if self.rate is", "class BaseRateThrottle(SimpleRateThrottle): scope = 'baseThrottle' class Meta: abstract = True def get_cache_key(self, request,", "- self.duration: self.history.pop() if len(self.history) >= self.num_requests: return self.throttle_failure() return self.throttle_success(request) def throttle_success(self,", "\"\"\" # Bypass CORS OPTIONS requests if request.method == \"OPTIONS\": return True if", "def allow_request(self, request, view): \"\"\" Implement the check to see if the request", "= True def get_cache_key(self, request, view): return self.cache_format % { 'scope': self.scope, 'ident':", "if the request should be throttled. On success calls `throttle_success`. On failure calls", "return True self.key = self.get_cache_key(request, view) if self.key is None: return True self.history", "from django.contrib.auth.models import AnonymousUser from rest_framework.throttling import SimpleRateThrottle class BaseRateThrottle(SimpleRateThrottle): scope = 'baseThrottle'", "self.get_ident(request) } def allow_request(self, request, view): \"\"\" Implement the check to see if", "view) if self.key is None: return True self.history = self.cache.get(self.key, []) self.now =", "SimpleRateThrottle class BaseRateThrottle(SimpleRateThrottle): scope = 'baseThrottle' class Meta: abstract = True def get_cache_key(self,", "calls `throttle_success`. On failure calls `throttle_failure`. \"\"\" # Bypass CORS OPTIONS requests if", "abstract = True def get_cache_key(self, request, view): return self.cache_format % { 'scope': self.scope,", "= self.get_cache_key(request, view) if self.key is None: return True self.history = self.cache.get(self.key, [])", "self.history.insert(0, self.now) self.cache.set(self.key, self.history, self.duration) return True class UserLoginRateThrottle(BaseRateThrottle): scope = 'loginAttempts' class", "view): \"\"\" Implement the check to see if the request should be throttled.", "def get_cache_key(self, request, view): return self.cache_format % { 'scope': self.scope, 'ident': self.get_ident(request) }", "'scope': self.scope, 'ident': self.get_ident(request) } def allow_request(self, request, view): \"\"\" Implement the check", "while len(self.history) and self.history[-1] <= self.now - self.duration: self.history.pop() if len(self.history) >= self.num_requests:", "# Bypass CORS OPTIONS requests if request.method == \"OPTIONS\": return True if self.rate", "from rest_framework.throttling import SimpleRateThrottle class BaseRateThrottle(SimpleRateThrottle): scope = 'baseThrottle' class Meta: abstract =", "\"\"\" Inserts the current request's timestamp along with the key into the cache.", "'baseThrottle' class Meta: abstract = True def get_cache_key(self, request, view): return self.cache_format %", "scope = 'loginAttempts' class ContactRateThrottle(BaseRateThrottle): scope = 'contact' class EmailValidationRateThrottle(BaseRateThrottle): scope = 'emailValidation'", "if len(self.history) >= self.num_requests: return self.throttle_failure() return self.throttle_success(request) def throttle_success(self, request): \"\"\" Inserts", "[]) self.now = self.timer() while len(self.history) and self.history[-1] <= self.now - self.duration: self.history.pop()", "return True if self.rate is None: return True self.key = self.get_cache_key(request, view) if", "request, view): \"\"\" Implement the check to see if the request should be", "if type(request.user) is not AnonymousUser: # self.history.insert(0, request.user.id) self.history.insert(0, self.now) self.cache.set(self.key, self.history, self.duration)", "is None: return True self.history = self.cache.get(self.key, []) self.now = self.timer() while len(self.history)", "self.now = self.timer() while len(self.history) and self.history[-1] <= self.now - self.duration: self.history.pop() if", "requests if request.method == \"OPTIONS\": return True if self.rate is None: return True", "failure calls `throttle_failure`. \"\"\" # Bypass CORS OPTIONS requests if request.method == \"OPTIONS\":", "Implement the check to see if the request should be throttled. On success", "rest_framework.throttling import SimpleRateThrottle class BaseRateThrottle(SimpleRateThrottle): scope = 'baseThrottle' class Meta: abstract = True", "class Meta: abstract = True def get_cache_key(self, request, view): return self.cache_format % {", "request should be throttled. On success calls `throttle_success`. On failure calls `throttle_failure`. \"\"\"", "the check to see if the request should be throttled. On success calls", "return True self.history = self.cache.get(self.key, []) self.now = self.timer() while len(self.history) and self.history[-1]", "UserLoginRateThrottle(BaseRateThrottle): scope = 'loginAttempts' class ContactRateThrottle(BaseRateThrottle): scope = 'contact' class EmailValidationRateThrottle(BaseRateThrottle): scope =", "not AnonymousUser: # self.history.insert(0, request.user.id) self.history.insert(0, self.now) self.cache.set(self.key, self.history, self.duration) return True class", "% { 'scope': self.scope, 'ident': self.get_ident(request) } def allow_request(self, request, view): \"\"\" Implement", "True self.key = self.get_cache_key(request, view) if self.key is None: return True self.history =", "import AnonymousUser from rest_framework.throttling import SimpleRateThrottle class BaseRateThrottle(SimpleRateThrottle): scope = 'baseThrottle' class Meta:" ]
[ "up to you to look up documentation and understand different Python modules. ##", "the angle and the missing label ## Remember, the first 60 images are", "im = crop(im, padding, 0, width-padding, height) #make new file else: im =", "pre_label.append(label_list[54]) pre_data.append(data_list[55].rotate(90)) pre_label.append(0) pre_data.append(data_list[56].rotate(315)) pre_label.append(label_list[56]) pre_data.append(data_list[57].rotate(180)) pre_label.append(8) pre_data.append(data_list[58].rotate(90)) pre_label.append(label_list[58]) pre_data.append(data_list[59].rotate(90)) pre_label.append(label_list[59]) return pre_data,", "two parts into corresponding file new_data = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_data.pkl', 'wb') new_label = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_label.pkl', 'wb')", "new file else: im = crop(im, 0, padding, width, height-padding) if width !=", "data loader ## expects the path to a pickle file.) ## Most code", "should look something like this: # with open(path_to_file, 'rb') as f: # obj", "left, top, right, bottom): image = image.crop((left, top, right, bottom)) return image ##", "image ## We want you to clean the data, and then create a", "pre_data.append(data_list[8].rotate(315)) pre_label.append(5) pre_data.append(data_list[9].rotate(315)) pre_label.append(5) pre_data.append(data_list[10].rotate(315)) pre_label.append(label_list[10]) pre_data.append(data_list[11].rotate(90)) pre_label.append(9) pre_data.append(data_list[12].rotate(180)) pre_label.append(label_list[12]) pre_data.append(data_list[13].rotate(90)) pre_label.append(7) pre_data.append(data_list[14].rotate(180))", "pre_label.append(1) pre_data.append(data_list[22].rotate(180)) pre_label.append(4) pre_data.append(data_list[23].rotate(135)) pre_label.append(8) pre_data.append(data_list[24].rotate(90)) pre_label.append(2) pre_data.append(data_list[25].rotate(90)) pre_label.append(3) pre_data.append(data_list[26].rotate(90)) pre_label.append(0) pre_data.append(data_list[27].rotate(180)) pre_label.append(2)", "the data/train and data/val folders ## To correct rotated images and add missing", "out which pickle operation to use with open(path_to_file,'rb')as f: new_data=pickle.load(f) return new_data ##", "load_pickle_file(path_to_file): \"\"\" Loads the data from a pickle file and returns that object", "give you too much starter ## code. It'll be up to you to", "import numpy as np from PIL import Image, ExifTags,ImageOps def load_pickle_file(path_to_file): \"\"\" Loads", "pickle file.) ## Most code written in this file will be DIY. It's", "We will let you figure out which pickle operation to use with open(path_to_file,'rb')as", "(so your data folder in a3/ should look like: ) # data/ #", "look up documentation and understand different Python modules. ## That being said, the", "Running this script should read the input images.pkl and labels.pkl and clean the", "pre_data.append(data_list[29].rotate(90)) pre_label.append(2) pre_data.append(data_list[30].rotate(90)) pre_label.append(label_list[30]) pre_data.append(data_list[31].rotate(90)) pre_label.append(label_list[31]) pre_data.append(data_list[32].rotate(315)) pre_label.append(label_list[32]) pre_data.append(data_list[33].rotate(315)) pre_label.append(label_list[33]) pre_data.append(data_list[34].rotate(90)) pre_label.append(label_list[34]) pre_data.append(data_list[35].rotate(270))", "## Inside the train and val folders, you will have to dump the", "your data folder in a3/ should look like: ) # data/ # train/", "be too hard, so we won't send you down any rabbit hole. #", "missing labels, you might want to prompt the terminal ## for input, so", "def resize(image, height, width): newSize = (width, height) image = image.resize(newSize) return image", "data_list[60:] new_label_list = label_list[60:] new_data_list = auto_op(new_data_list, 28) # combine and dump two", "bottom): image = image.crop((left, top, right, bottom)) return image ## We want you", "pre_data.append(data_list[11].rotate(90)) pre_label.append(9) pre_data.append(data_list[12].rotate(180)) pre_label.append(label_list[12]) pre_data.append(data_list[13].rotate(90)) pre_label.append(7) pre_data.append(data_list[14].rotate(180)) pre_label.append(7) pre_data.append(data_list[15].rotate(315)) pre_label.append(9) pre_data.append(data_list[16].rotate(315)) pre_label.append(label_list[16]) pre_data.append(data_list[17].rotate(315))", "pre_data.append(data_list[1].rotate(90)) pre_label.append(0) pre_data.append(data_list[2].rotate(180)) pre_label.append(label_list[2]) pre_data.append(data_list[3].rotate(180)) pre_label.append(3) pre_data.append(data_list[4]) pre_label.append(3) pre_data.append(data_list[5].rotate(180)) pre_label.append(2) pre_data.append(data_list[6].rotate(315)) pre_label.append(7) pre_data.append(data_list[7].rotate(180))", "important that you get to practice ## cleaning datasets and visualising them, so", "bottom)) return image ## We want you to clean the data, and then", "dump the CLEANED images and ## labels. You can dump images/annotations in a", "so that you can input the angle and the missing label ## Remember,", "pre_data, pre_label = byhand_op(data_list, label_list) #auto process the images after 60 new_data_list =", "## Most code written in this file will be DIY. It's important that", "def load_pickle_file(path_to_file): \"\"\" Loads the data from a pickle file and returns that", "pre_label.append(7) pre_data.append(data_list[15].rotate(315)) pre_label.append(9) pre_data.append(data_list[16].rotate(315)) pre_label.append(label_list[16]) pre_data.append(data_list[17].rotate(315)) pre_label.append(0) pre_data.append(data_list[18].rotate(90)) pre_label.append(2) pre_data.append(data_list[19].rotate(90)) pre_label.append(label_list[19]) pre_data.append(data_list[20].rotate(315)) pre_label.append(3)", "# train/ # val/ ## Inside the train and val folders, you will", "It'll be up to you to look up documentation and understand different Python", "dataObject.append(im) return dataObject # function to clean the data by hand def byhand_op(data_list,", "pre_label.append(label_list[58]) pre_data.append(data_list[59].rotate(90)) pre_label.append(label_list[59]) return pre_data, pre_label if __name__ == \"__main__\": ## Running this", "our data loader ## expects the path to a pickle file.) ## Most", "pre_data.append(data_list[5].rotate(180)) pre_label.append(2) pre_data.append(data_list[6].rotate(315)) pre_label.append(7) pre_data.append(data_list[7].rotate(180)) pre_label.append(label_list[7]) pre_data.append(data_list[8].rotate(315)) pre_label.append(5) pre_data.append(data_list[9].rotate(315)) pre_label.append(5) pre_data.append(data_list[10].rotate(315)) pre_label.append(label_list[10]) pre_data.append(data_list[11].rotate(90))", "images are rotated, and might contain missing labels. #clean the data for first", "60 images by hand data_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/images.pkl') label_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/labels.pkl') pre_data, pre_label =", "angle and the missing label ## Remember, the first 60 images are rotated,", "new_label_list = label_list[60:] new_data_list = auto_op(new_data_list, 28) # combine and dump two parts", "dump two parts into corresponding file new_data = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_data.pkl', 'wb') new_label = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_label.pkl',", "pre_label.append(label_list[45]) pre_data.append(data_list[46].rotate(180)) pre_label.append(7) pre_data.append(data_list[47].rotate(270)) pre_label.append(label_list[47]) pre_data.append(data_list[48].rotate(180)) pre_label.append(0) pre_data.append(data_list[49].rotate(315)) pre_label.append(label_list[49]) pre_data.append(data_list[50].rotate(90)) pre_label.append(label_list[50]) pre_data.append(data_list[51].rotate(90)) pre_label.append(3)", "train and val folders, you will have to dump the CLEANED images and", "we purposely won't give you too much starter ## code. It'll be up", "!= size: im = resize(im, size, size) dataObject.append(im) return dataObject # function to", "the train and val folders, you will have to dump the CLEANED images", "new_data ## You should define functions to resize, rotate and crop images ##", "the task shouldn't be too hard, so we won't send you down any", "= pickle.... ## We will let you figure out which pickle operation to", "im in my_file: width, height = im.size if width != height: padding =", "## code. It'll be up to you to look up documentation and understand", "pre_label.append(7) pre_data.append(data_list[47].rotate(270)) pre_label.append(label_list[47]) pre_data.append(data_list[48].rotate(180)) pre_label.append(0) pre_data.append(data_list[49].rotate(315)) pre_label.append(label_list[49]) pre_data.append(data_list[50].rotate(90)) pre_label.append(label_list[50]) pre_data.append(data_list[51].rotate(90)) pre_label.append(3) pre_data.append(data_list[52].rotate(180)) pre_label.append(label_list[52])", "pre_data.append(data_list[56].rotate(315)) pre_label.append(label_list[56]) pre_data.append(data_list[57].rotate(180)) pre_label.append(8) pre_data.append(data_list[58].rotate(90)) pre_label.append(label_list[58]) pre_data.append(data_list[59].rotate(90)) pre_label.append(label_list[59]) return pre_data, pre_label if __name__", "contain missing labels. #clean the data for first 60 images by hand data_list", "written in this file will be DIY. It's important that you get to", "dataObject = [] for im in my_file: width, height = im.size if width", "width, height-padding) if width != size: im = resize(im, size, size) dataObject.append(im) return", "pre_data.append(data_list[36].rotate(315)) pre_label.append(9) pre_data.append(data_list[37].rotate(180)) pre_label.append(2) pre_data.append(data_list[38].rotate(270)) pre_label.append(1) pre_data.append(data_list[39].rotate(270)) pre_label.append(6) pre_data.append(data_list[40].rotate(315)) pre_label.append(label_list[40]) pre_data.append(data_list[41].rotate(180)) pre_label.append(7) pre_data.append(data_list[42].rotate(90))", "down any rabbit hole. # function to clean the data automatically def auto_op(my_file,", "# function to clean the data by hand def byhand_op(data_list, label_list): pre_data =", "<gh_stars>0 import pickle import numpy as np from PIL import Image, ExifTags,ImageOps def", "and then create a train and val folder inside ## the data folder", "pre_label.append(3) pre_data.append(data_list[21].rotate(315)) pre_label.append(1) pre_data.append(data_list[22].rotate(180)) pre_label.append(4) pre_data.append(data_list[23].rotate(135)) pre_label.append(8) pre_data.append(data_list[24].rotate(90)) pre_label.append(2) pre_data.append(data_list[25].rotate(90)) pre_label.append(3) pre_data.append(data_list[26].rotate(90)) pre_label.append(0)", "as np from PIL import Image, ExifTags,ImageOps def load_pickle_file(path_to_file): \"\"\" Loads the data", "!= height: padding = abs(width-height)/2 if width>height: im = crop(im, padding, 0, width-padding,", "pre_label.append(label_list[59]) return pre_data, pre_label if __name__ == \"__main__\": ## Running this script should", "pre_data.append(data_list[18].rotate(90)) pre_label.append(2) pre_data.append(data_list[19].rotate(90)) pre_label.append(label_list[19]) pre_data.append(data_list[20].rotate(315)) pre_label.append(3) pre_data.append(data_list[21].rotate(315)) pre_label.append(1) pre_data.append(data_list[22].rotate(180)) pre_label.append(4) pre_data.append(data_list[23].rotate(135)) pre_label.append(8) pre_data.append(data_list[24].rotate(90))", "returns that object \"\"\" ## Look up: https://docs.python.org/3/library/pickle.html ## The code should look", "perform these operations either on numpy arrays ## or on PIL images (read", "val folders, you will have to dump the CLEANED images and ## labels.", "It's important that you get to practice ## cleaning datasets and visualising them,", "to prompt the terminal ## for input, so that you can input the", "object \"\"\" ## Look up: https://docs.python.org/3/library/pickle.html ## The code should look something like", "numpy arrays ## or on PIL images (read docs: https://pillow.readthedocs.io/en/stable/reference/Image.html) def resize(image, height,", "pre_label.append(2) pre_data.append(data_list[6].rotate(315)) pre_label.append(7) pre_data.append(data_list[7].rotate(180)) pre_label.append(label_list[7]) pre_data.append(data_list[8].rotate(315)) pre_label.append(5) pre_data.append(data_list[9].rotate(315)) pre_label.append(5) pre_data.append(data_list[10].rotate(315)) pre_label.append(label_list[10]) pre_data.append(data_list[11].rotate(90)) pre_label.append(9)", "return image ## We want you to clean the data, and then create", "in a pickle file (because our data loader ## expects the path to", "\"__main__\": ## Running this script should read the input images.pkl and labels.pkl and", "rotated, and might contain missing labels. #clean the data for first 60 images", "missing label ## Remember, the first 60 images are rotated, and might contain", "label ## Remember, the first 60 images are rotated, and might contain missing", "= data_list[60:] new_label_list = label_list[60:] new_data_list = auto_op(new_data_list, 28) # combine and dump", "size: im = resize(im, size, size) dataObject.append(im) return dataObject # function to clean", "pre_data.append(data_list[13].rotate(90)) pre_label.append(7) pre_data.append(data_list[14].rotate(180)) pre_label.append(7) pre_data.append(data_list[15].rotate(315)) pre_label.append(9) pre_data.append(data_list[16].rotate(315)) pre_label.append(label_list[16]) pre_data.append(data_list[17].rotate(315)) pre_label.append(0) pre_data.append(data_list[18].rotate(90)) pre_label.append(2) pre_data.append(data_list[19].rotate(90))", "code should look something like this: # with open(path_to_file, 'rb') as f: #", "modules. ## That being said, the task shouldn't be too hard, so we", "datasets and visualising them, so we purposely won't give you too much starter", "being said, the task shouldn't be too hard, so we won't send you", "__name__ == \"__main__\": ## Running this script should read the input images.pkl and", "either on numpy arrays ## or on PIL images (read docs: https://pillow.readthedocs.io/en/stable/reference/Image.html) def", "byhand_op(data_list, label_list): pre_data = [] pre_label = [] pre_data.append(data_list[0].rotate(270)) pre_label.append(label_list[0]) pre_data.append(data_list[1].rotate(90)) pre_label.append(0) pre_data.append(data_list[2].rotate(180))", "pre_label.append(2) pre_data.append(data_list[28].rotate(315)) pre_label.append(4) pre_data.append(data_list[29].rotate(90)) pre_label.append(2) pre_data.append(data_list[30].rotate(90)) pre_label.append(label_list[30]) pre_data.append(data_list[31].rotate(90)) pre_label.append(label_list[31]) pre_data.append(data_list[32].rotate(315)) pre_label.append(label_list[32]) pre_data.append(data_list[33].rotate(315)) pre_label.append(label_list[33])", "data ## and store cleaned data into the data/train and data/val folders ##", "DIY. It's important that you get to practice ## cleaning datasets and visualising", "and ## labels. You can dump images/annotations in a pickle file (because our", "to clean the data automatically def auto_op(my_file, size): dataObject = [] for im", "arrays ## or on PIL images (read docs: https://pillow.readthedocs.io/en/stable/reference/Image.html) def resize(image, height, width):", "so we purposely won't give you too much starter ## code. It'll be", "too much starter ## code. It'll be up to you to look up", "store cleaned data into the data/train and data/val folders ## To correct rotated", "in my_file: width, height = im.size if width != height: padding = abs(width-height)/2", "pre_data.append(data_list[50].rotate(90)) pre_label.append(label_list[50]) pre_data.append(data_list[51].rotate(90)) pre_label.append(3) pre_data.append(data_list[52].rotate(180)) pre_label.append(label_list[52]) pre_data.append(data_list[53].rotate(180)) pre_label.append(2) pre_data.append(data_list[54].rotate(180)) pre_label.append(label_list[54]) pre_data.append(data_list[55].rotate(90)) pre_label.append(0) pre_data.append(data_list[56].rotate(315))", "to clean the data, and then create a train and val folder inside", "crop(im, padding, 0, width-padding, height) #make new file else: im = crop(im, 0,", "pre_label.append(0) pre_data.append(data_list[49].rotate(315)) pre_label.append(label_list[49]) pre_data.append(data_list[50].rotate(90)) pre_label.append(label_list[50]) pre_data.append(data_list[51].rotate(90)) pre_label.append(3) pre_data.append(data_list[52].rotate(180)) pre_label.append(label_list[52]) pre_data.append(data_list[53].rotate(180)) pre_label.append(2) pre_data.append(data_list[54].rotate(180)) pre_label.append(label_list[54])", "the CLEANED images and ## labels. You can dump images/annotations in a pickle", "im = resize(im, size, size) dataObject.append(im) return dataObject # function to clean the", "pre_data.append(data_list[47].rotate(270)) pre_label.append(label_list[47]) pre_data.append(data_list[48].rotate(180)) pre_label.append(0) pre_data.append(data_list[49].rotate(315)) pre_label.append(label_list[49]) pre_data.append(data_list[50].rotate(90)) pre_label.append(label_list[50]) pre_data.append(data_list[51].rotate(90)) pre_label.append(3) pre_data.append(data_list[52].rotate(180)) pre_label.append(label_list[52]) pre_data.append(data_list[53].rotate(180))", "look like: ) # data/ # train/ # val/ ## Inside the train", "pre_data.append(data_list[39].rotate(270)) pre_label.append(6) pre_data.append(data_list[40].rotate(315)) pre_label.append(label_list[40]) pre_data.append(data_list[41].rotate(180)) pre_label.append(7) pre_data.append(data_list[42].rotate(90)) pre_label.append(9) pre_data.append(data_list[43].rotate(270)) pre_label.append(5) pre_data.append(data_list[44].rotate(270)) pre_label.append(9) pre_data.append(data_list[45].rotate(180))", "\"\"\" Loads the data from a pickle file and returns that object \"\"\"", "pre_data.append(data_list[9].rotate(315)) pre_label.append(5) pre_data.append(data_list[10].rotate(315)) pre_label.append(label_list[10]) pre_data.append(data_list[11].rotate(90)) pre_label.append(9) pre_data.append(data_list[12].rotate(180)) pre_label.append(label_list[12]) pre_data.append(data_list[13].rotate(90)) pre_label.append(7) pre_data.append(data_list[14].rotate(180)) pre_label.append(7) pre_data.append(data_list[15].rotate(315))", "should read the input images.pkl and labels.pkl and clean the data ## and", "pre_label.append(9) pre_data.append(data_list[45].rotate(180)) pre_label.append(label_list[45]) pre_data.append(data_list[46].rotate(180)) pre_label.append(7) pre_data.append(data_list[47].rotate(270)) pre_label.append(label_list[47]) pre_data.append(data_list[48].rotate(180)) pre_label.append(0) pre_data.append(data_list[49].rotate(315)) pre_label.append(label_list[49]) pre_data.append(data_list[50].rotate(90)) pre_label.append(label_list[50])", "rabbit hole. # function to clean the data automatically def auto_op(my_file, size): dataObject", "pre_label.append(2) pre_data.append(data_list[30].rotate(90)) pre_label.append(label_list[30]) pre_data.append(data_list[31].rotate(90)) pre_label.append(label_list[31]) pre_data.append(data_list[32].rotate(315)) pre_label.append(label_list[32]) pre_data.append(data_list[33].rotate(315)) pre_label.append(label_list[33]) pre_data.append(data_list[34].rotate(90)) pre_label.append(label_list[34]) pre_data.append(data_list[35].rotate(270)) pre_label.append(8)", "images after 60 new_data_list = data_list[60:] new_label_list = label_list[60:] new_data_list = auto_op(new_data_list, 28)", "def auto_op(my_file, size): dataObject = [] for im in my_file: width, height =", "data automatically def auto_op(my_file, size): dataObject = [] for im in my_file: width,", "= load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/labels.pkl') pre_data, pre_label = byhand_op(data_list, label_list) #auto process the images after 60", "pre_label.append(6) pre_data.append(data_list[40].rotate(315)) pre_label.append(label_list[40]) pre_data.append(data_list[41].rotate(180)) pre_label.append(7) pre_data.append(data_list[42].rotate(90)) pre_label.append(9) pre_data.append(data_list[43].rotate(270)) pre_label.append(5) pre_data.append(data_list[44].rotate(270)) pre_label.append(9) pre_data.append(data_list[45].rotate(180)) pre_label.append(label_list[45])", "Remember, the first 60 images are rotated, and might contain missing labels. #clean", "should define functions to resize, rotate and crop images ## below. You can", "= byhand_op(data_list, label_list) #auto process the images after 60 new_data_list = data_list[60:] new_label_list", "newSize = (width, height) image = image.resize(newSize) return image def crop(image, left, top,", "## expects the path to a pickle file.) ## Most code written in", "PIL import Image, ExifTags,ImageOps def load_pickle_file(path_to_file): \"\"\" Loads the data from a pickle", "define functions to resize, rotate and crop images ## below. You can perform", "train/ # val/ ## Inside the train and val folders, you will have", "labels.pkl and clean the data ## and store cleaned data into the data/train", "def byhand_op(data_list, label_list): pre_data = [] pre_label = [] pre_data.append(data_list[0].rotate(270)) pre_label.append(label_list[0]) pre_data.append(data_list[1].rotate(90)) pre_label.append(0)", "clean the data by hand def byhand_op(data_list, label_list): pre_data = [] pre_label =", "def crop(image, left, top, right, bottom): image = image.crop((left, top, right, bottom)) return", "you will have to dump the CLEANED images and ## labels. You can", "labels, you might want to prompt the terminal ## for input, so that", "pre_label.append(label_list[10]) pre_data.append(data_list[11].rotate(90)) pre_label.append(9) pre_data.append(data_list[12].rotate(180)) pre_label.append(label_list[12]) pre_data.append(data_list[13].rotate(90)) pre_label.append(7) pre_data.append(data_list[14].rotate(180)) pre_label.append(7) pre_data.append(data_list[15].rotate(315)) pre_label.append(9) pre_data.append(data_list[16].rotate(315)) pre_label.append(label_list[16])", "then create a train and val folder inside ## the data folder (so", "and visualising them, so we purposely won't give you too much starter ##", "pre_label.append(8) pre_data.append(data_list[58].rotate(90)) pre_label.append(label_list[58]) pre_data.append(data_list[59].rotate(90)) pre_label.append(label_list[59]) return pre_data, pre_label if __name__ == \"__main__\": ##", "the data automatically def auto_op(my_file, size): dataObject = [] for im in my_file:", "labels. You can dump images/annotations in a pickle file (because our data loader", "pre_label = [] pre_data.append(data_list[0].rotate(270)) pre_label.append(label_list[0]) pre_data.append(data_list[1].rotate(90)) pre_label.append(0) pre_data.append(data_list[2].rotate(180)) pre_label.append(label_list[2]) pre_data.append(data_list[3].rotate(180)) pre_label.append(3) pre_data.append(data_list[4]) pre_label.append(3)", "and returns that object \"\"\" ## Look up: https://docs.python.org/3/library/pickle.html ## The code should", "pre_label.append(3) pre_data.append(data_list[4]) pre_label.append(3) pre_data.append(data_list[5].rotate(180)) pre_label.append(2) pre_data.append(data_list[6].rotate(315)) pre_label.append(7) pre_data.append(data_list[7].rotate(180)) pre_label.append(label_list[7]) pre_data.append(data_list[8].rotate(315)) pre_label.append(5) pre_data.append(data_list[9].rotate(315)) pre_label.append(5)", "You can dump images/annotations in a pickle file (because our data loader ##", "and val folders, you will have to dump the CLEANED images and ##", "you to look up documentation and understand different Python modules. ## That being", "the data from a pickle file and returns that object \"\"\" ## Look", "Look up: https://docs.python.org/3/library/pickle.html ## The code should look something like this: # with", "images and ## labels. You can dump images/annotations in a pickle file (because", "to a pickle file.) ## Most code written in this file will be", "data/ # train/ # val/ ## Inside the train and val folders, you", "the first 60 images are rotated, and might contain missing labels. #clean the", "a train and val folder inside ## the data folder (so your data", "and might contain missing labels. #clean the data for first 60 images by", "and dump two parts into corresponding file new_data = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_data.pkl', 'wb') new_label =", "= image.resize(newSize) return image def crop(image, left, top, right, bottom): image = image.crop((left,", "pre_data.append(data_list[17].rotate(315)) pre_label.append(0) pre_data.append(data_list[18].rotate(90)) pre_label.append(2) pre_data.append(data_list[19].rotate(90)) pre_label.append(label_list[19]) pre_data.append(data_list[20].rotate(315)) pre_label.append(3) pre_data.append(data_list[21].rotate(315)) pre_label.append(1) pre_data.append(data_list[22].rotate(180)) pre_label.append(4) pre_data.append(data_list[23].rotate(135))", "pre_label.append(5) pre_data.append(data_list[44].rotate(270)) pre_label.append(9) pre_data.append(data_list[45].rotate(180)) pre_label.append(label_list[45]) pre_data.append(data_list[46].rotate(180)) pre_label.append(7) pre_data.append(data_list[47].rotate(270)) pre_label.append(label_list[47]) pre_data.append(data_list[48].rotate(180)) pre_label.append(0) pre_data.append(data_list[49].rotate(315)) pre_label.append(label_list[49])", "look something like this: # with open(path_to_file, 'rb') as f: # obj =", "pre_label.append(label_list[50]) pre_data.append(data_list[51].rotate(90)) pre_label.append(3) pre_data.append(data_list[52].rotate(180)) pre_label.append(label_list[52]) pre_data.append(data_list[53].rotate(180)) pre_label.append(2) pre_data.append(data_list[54].rotate(180)) pre_label.append(label_list[54]) pre_data.append(data_list[55].rotate(90)) pre_label.append(0) pre_data.append(data_list[56].rotate(315)) pre_label.append(label_list[56])", "width != size: im = resize(im, size, size) dataObject.append(im) return dataObject # function", "to dump the CLEANED images and ## labels. You can dump images/annotations in", "size) dataObject.append(im) return dataObject # function to clean the data by hand def", "image = image.resize(newSize) return image def crop(image, left, top, right, bottom): image =", "to resize, rotate and crop images ## below. You can perform these operations", "the missing label ## Remember, the first 60 images are rotated, and might", "height) #make new file else: im = crop(im, 0, padding, width, height-padding) if", "= resize(im, size, size) dataObject.append(im) return dataObject # function to clean the data", "resize, rotate and crop images ## below. You can perform these operations either", "You should define functions to resize, rotate and crop images ## below. You", "and clean the data ## and store cleaned data into the data/train and", "= label_list[60:] new_data_list = auto_op(new_data_list, 28) # combine and dump two parts into", "will have to dump the CLEANED images and ## labels. You can dump", "pre_data.append(data_list[41].rotate(180)) pre_label.append(7) pre_data.append(data_list[42].rotate(90)) pre_label.append(9) pre_data.append(data_list[43].rotate(270)) pre_label.append(5) pre_data.append(data_list[44].rotate(270)) pre_label.append(9) pre_data.append(data_list[45].rotate(180)) pre_label.append(label_list[45]) pre_data.append(data_list[46].rotate(180)) pre_label.append(7) pre_data.append(data_list[47].rotate(270))", "Image, ExifTags,ImageOps def load_pickle_file(path_to_file): \"\"\" Loads the data from a pickle file and", "pre_data.append(data_list[48].rotate(180)) pre_label.append(0) pre_data.append(data_list[49].rotate(315)) pre_label.append(label_list[49]) pre_data.append(data_list[50].rotate(90)) pre_label.append(label_list[50]) pre_data.append(data_list[51].rotate(90)) pre_label.append(3) pre_data.append(data_list[52].rotate(180)) pre_label.append(label_list[52]) pre_data.append(data_list[53].rotate(180)) pre_label.append(2) pre_data.append(data_list[54].rotate(180))", "into the data/train and data/val folders ## To correct rotated images and add", "pre_data.append(data_list[21].rotate(315)) pre_label.append(1) pre_data.append(data_list[22].rotate(180)) pre_label.append(4) pre_data.append(data_list[23].rotate(135)) pre_label.append(8) pre_data.append(data_list[24].rotate(90)) pre_label.append(2) pre_data.append(data_list[25].rotate(90)) pre_label.append(3) pre_data.append(data_list[26].rotate(90)) pre_label.append(0) pre_data.append(data_list[27].rotate(180))", "input, so that you can input the angle and the missing label ##", "resize(im, size, size) dataObject.append(im) return dataObject # function to clean the data by", "ExifTags,ImageOps def load_pickle_file(path_to_file): \"\"\" Loads the data from a pickle file and returns", "with open(path_to_file,'rb')as f: new_data=pickle.load(f) return new_data ## You should define functions to resize,", "pre_data = [] pre_label = [] pre_data.append(data_list[0].rotate(270)) pre_label.append(label_list[0]) pre_data.append(data_list[1].rotate(90)) pre_label.append(0) pre_data.append(data_list[2].rotate(180)) pre_label.append(label_list[2]) pre_data.append(data_list[3].rotate(180))", "## Running this script should read the input images.pkl and labels.pkl and clean", "pre_data.append(data_list[2].rotate(180)) pre_label.append(label_list[2]) pre_data.append(data_list[3].rotate(180)) pre_label.append(3) pre_data.append(data_list[4]) pre_label.append(3) pre_data.append(data_list[5].rotate(180)) pre_label.append(2) pre_data.append(data_list[6].rotate(315)) pre_label.append(7) pre_data.append(data_list[7].rotate(180)) pre_label.append(label_list[7]) pre_data.append(data_list[8].rotate(315))", "pre_label.append(3) pre_data.append(data_list[5].rotate(180)) pre_label.append(2) pre_data.append(data_list[6].rotate(315)) pre_label.append(7) pre_data.append(data_list[7].rotate(180)) pre_label.append(label_list[7]) pre_data.append(data_list[8].rotate(315)) pre_label.append(5) pre_data.append(data_list[9].rotate(315)) pre_label.append(5) pre_data.append(data_list[10].rotate(315)) pre_label.append(label_list[10])", "pickle file (because our data loader ## expects the path to a pickle", "# with open(path_to_file, 'rb') as f: # obj = pickle.... ## We will", "data folder in a3/ should look like: ) # data/ # train/ #", "like: ) # data/ # train/ # val/ ## Inside the train and", "pre_data.append(data_list[46].rotate(180)) pre_label.append(7) pre_data.append(data_list[47].rotate(270)) pre_label.append(label_list[47]) pre_data.append(data_list[48].rotate(180)) pre_label.append(0) pre_data.append(data_list[49].rotate(315)) pre_label.append(label_list[49]) pre_data.append(data_list[50].rotate(90)) pre_label.append(label_list[50]) pre_data.append(data_list[51].rotate(90)) pre_label.append(3) pre_data.append(data_list[52].rotate(180))", "and store cleaned data into the data/train and data/val folders ## To correct", "crop(im, 0, padding, width, height-padding) if width != size: im = resize(im, size,", "= [] pre_data.append(data_list[0].rotate(270)) pre_label.append(label_list[0]) pre_data.append(data_list[1].rotate(90)) pre_label.append(0) pre_data.append(data_list[2].rotate(180)) pre_label.append(label_list[2]) pre_data.append(data_list[3].rotate(180)) pre_label.append(3) pre_data.append(data_list[4]) pre_label.append(3) pre_data.append(data_list[5].rotate(180))", "pre_data.append(data_list[0].rotate(270)) pre_label.append(label_list[0]) pre_data.append(data_list[1].rotate(90)) pre_label.append(0) pre_data.append(data_list[2].rotate(180)) pre_label.append(label_list[2]) pre_data.append(data_list[3].rotate(180)) pre_label.append(3) pre_data.append(data_list[4]) pre_label.append(3) pre_data.append(data_list[5].rotate(180)) pre_label.append(2) pre_data.append(data_list[6].rotate(315))", "pre_data.append(data_list[35].rotate(270)) pre_label.append(8) pre_data.append(data_list[36].rotate(315)) pre_label.append(9) pre_data.append(data_list[37].rotate(180)) pre_label.append(2) pre_data.append(data_list[38].rotate(270)) pre_label.append(1) pre_data.append(data_list[39].rotate(270)) pre_label.append(6) pre_data.append(data_list[40].rotate(315)) pre_label.append(label_list[40]) pre_data.append(data_list[41].rotate(180))", "as f: # obj = pickle.... ## We will let you figure out", "operation to use with open(path_to_file,'rb')as f: new_data=pickle.load(f) return new_data ## You should define", "pre_label.append(3) pre_data.append(data_list[26].rotate(90)) pre_label.append(0) pre_data.append(data_list[27].rotate(180)) pre_label.append(2) pre_data.append(data_list[28].rotate(315)) pre_label.append(4) pre_data.append(data_list[29].rotate(90)) pre_label.append(2) pre_data.append(data_list[30].rotate(90)) pre_label.append(label_list[30]) pre_data.append(data_list[31].rotate(90)) pre_label.append(label_list[31])", "crop(image, left, top, right, bottom): image = image.crop((left, top, right, bottom)) return image", "this script should read the input images.pkl and labels.pkl and clean the data", "will be DIY. It's important that you get to practice ## cleaning datasets", "https://pillow.readthedocs.io/en/stable/reference/Image.html) def resize(image, height, width): newSize = (width, height) image = image.resize(newSize) return", "# val/ ## Inside the train and val folders, you will have to", "images/annotations in a pickle file (because our data loader ## expects the path", "that you get to practice ## cleaning datasets and visualising them, so we", "== \"__main__\": ## Running this script should read the input images.pkl and labels.pkl", "## and store cleaned data into the data/train and data/val folders ## To", "purposely won't give you too much starter ## code. It'll be up to", "for im in my_file: width, height = im.size if width != height: padding", "= im.size if width != height: padding = abs(width-height)/2 if width>height: im =", "if width != height: padding = abs(width-height)/2 if width>height: im = crop(im, padding,", "pre_label.append(9) pre_data.append(data_list[43].rotate(270)) pre_label.append(5) pre_data.append(data_list[44].rotate(270)) pre_label.append(9) pre_data.append(data_list[45].rotate(180)) pre_label.append(label_list[45]) pre_data.append(data_list[46].rotate(180)) pre_label.append(7) pre_data.append(data_list[47].rotate(270)) pre_label.append(label_list[47]) pre_data.append(data_list[48].rotate(180)) pre_label.append(0)", "to you to look up documentation and understand different Python modules. ## That", "## The code should look something like this: # with open(path_to_file, 'rb') as", "## cleaning datasets and visualising them, so we purposely won't give you too", "hole. # function to clean the data automatically def auto_op(my_file, size): dataObject =", "pickle operation to use with open(path_to_file,'rb')as f: new_data=pickle.load(f) return new_data ## You should", "images.pkl and labels.pkl and clean the data ## and store cleaned data into", "import pickle import numpy as np from PIL import Image, ExifTags,ImageOps def load_pickle_file(path_to_file):", "width, height = im.size if width != height: padding = abs(width-height)/2 if width>height:", "height-padding) if width != size: im = resize(im, size, size) dataObject.append(im) return dataObject", "on numpy arrays ## or on PIL images (read docs: https://pillow.readthedocs.io/en/stable/reference/Image.html) def resize(image,", "by hand data_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/images.pkl') label_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/labels.pkl') pre_data, pre_label = byhand_op(data_list, label_list)", "this: # with open(path_to_file, 'rb') as f: # obj = pickle.... ## We", "pre_data.append(data_list[23].rotate(135)) pre_label.append(8) pre_data.append(data_list[24].rotate(90)) pre_label.append(2) pre_data.append(data_list[25].rotate(90)) pre_label.append(3) pre_data.append(data_list[26].rotate(90)) pre_label.append(0) pre_data.append(data_list[27].rotate(180)) pre_label.append(2) pre_data.append(data_list[28].rotate(315)) pre_label.append(4) pre_data.append(data_list[29].rotate(90))", "might contain missing labels. #clean the data for first 60 images by hand", "images (read docs: https://pillow.readthedocs.io/en/stable/reference/Image.html) def resize(image, height, width): newSize = (width, height) image", "top, right, bottom)) return image ## We want you to clean the data,", "pickle.... ## We will let you figure out which pickle operation to use", "= crop(im, 0, padding, width, height-padding) if width != size: im = resize(im,", "(width, height) image = image.resize(newSize) return image def crop(image, left, top, right, bottom):", "= [] pre_label = [] pre_data.append(data_list[0].rotate(270)) pre_label.append(label_list[0]) pre_data.append(data_list[1].rotate(90)) pre_label.append(0) pre_data.append(data_list[2].rotate(180)) pre_label.append(label_list[2]) pre_data.append(data_list[3].rotate(180)) pre_label.append(3)", "data/val folders ## To correct rotated images and add missing labels, you might", "pickle file and returns that object \"\"\" ## Look up: https://docs.python.org/3/library/pickle.html ## The", "hand def byhand_op(data_list, label_list): pre_data = [] pre_label = [] pre_data.append(data_list[0].rotate(270)) pre_label.append(label_list[0]) pre_data.append(data_list[1].rotate(90))", "shouldn't be too hard, so we won't send you down any rabbit hole.", "you can input the angle and the missing label ## Remember, the first", "obj = pickle.... ## We will let you figure out which pickle operation", "# obj = pickle.... ## We will let you figure out which pickle", "pre_label.append(2) pre_data.append(data_list[38].rotate(270)) pre_label.append(1) pre_data.append(data_list[39].rotate(270)) pre_label.append(6) pre_data.append(data_list[40].rotate(315)) pre_label.append(label_list[40]) pre_data.append(data_list[41].rotate(180)) pre_label.append(7) pre_data.append(data_list[42].rotate(90)) pre_label.append(9) pre_data.append(data_list[43].rotate(270)) pre_label.append(5)", "60 images are rotated, and might contain missing labels. #clean the data for", "pickle import numpy as np from PIL import Image, ExifTags,ImageOps def load_pickle_file(path_to_file): \"\"\"", "## labels. You can dump images/annotations in a pickle file (because our data", "pre_label.append(0) pre_data.append(data_list[27].rotate(180)) pre_label.append(2) pre_data.append(data_list[28].rotate(315)) pre_label.append(4) pre_data.append(data_list[29].rotate(90)) pre_label.append(2) pre_data.append(data_list[30].rotate(90)) pre_label.append(label_list[30]) pre_data.append(data_list[31].rotate(90)) pre_label.append(label_list[31]) pre_data.append(data_list[32].rotate(315)) pre_label.append(label_list[32])", "pre_data.append(data_list[38].rotate(270)) pre_label.append(1) pre_data.append(data_list[39].rotate(270)) pre_label.append(6) pre_data.append(data_list[40].rotate(315)) pre_label.append(label_list[40]) pre_data.append(data_list[41].rotate(180)) pre_label.append(7) pre_data.append(data_list[42].rotate(90)) pre_label.append(9) pre_data.append(data_list[43].rotate(270)) pre_label.append(5) pre_data.append(data_list[44].rotate(270))", "height, width): newSize = (width, height) image = image.resize(newSize) return image def crop(image,", "val folder inside ## the data folder (so your data folder in a3/", "pre_data.append(data_list[28].rotate(315)) pre_label.append(4) pre_data.append(data_list[29].rotate(90)) pre_label.append(2) pre_data.append(data_list[30].rotate(90)) pre_label.append(label_list[30]) pre_data.append(data_list[31].rotate(90)) pre_label.append(label_list[31]) pre_data.append(data_list[32].rotate(315)) pre_label.append(label_list[32]) pre_data.append(data_list[33].rotate(315)) pre_label.append(label_list[33]) pre_data.append(data_list[34].rotate(90))", "size): dataObject = [] for im in my_file: width, height = im.size if", "pre_data.append(data_list[43].rotate(270)) pre_label.append(5) pre_data.append(data_list[44].rotate(270)) pre_label.append(9) pre_data.append(data_list[45].rotate(180)) pre_label.append(label_list[45]) pre_data.append(data_list[46].rotate(180)) pre_label.append(7) pre_data.append(data_list[47].rotate(270)) pre_label.append(label_list[47]) pre_data.append(data_list[48].rotate(180)) pre_label.append(0) pre_data.append(data_list[49].rotate(315))", "file new_data = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_data.pkl', 'wb') new_label = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_label.pkl', 'wb') pickle.dump(pre_data+new_data_list,new_data) pickle.dump(pre_label+new_label_list,new_label) new_data.close() new_label.close()", "width>height: im = crop(im, padding, 0, width-padding, height) #make new file else: im", "or on PIL images (read docs: https://pillow.readthedocs.io/en/stable/reference/Image.html) def resize(image, height, width): newSize =", "pre_data.append(data_list[58].rotate(90)) pre_label.append(label_list[58]) pre_data.append(data_list[59].rotate(90)) pre_label.append(label_list[59]) return pre_data, pre_label if __name__ == \"__main__\": ## Running", "hand data_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/images.pkl') label_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/labels.pkl') pre_data, pre_label = byhand_op(data_list, label_list) #auto", "pre_label.append(0) pre_data.append(data_list[56].rotate(315)) pre_label.append(label_list[56]) pre_data.append(data_list[57].rotate(180)) pre_label.append(8) pre_data.append(data_list[58].rotate(90)) pre_label.append(label_list[58]) pre_data.append(data_list[59].rotate(90)) pre_label.append(label_list[59]) return pre_data, pre_label if", "parts into corresponding file new_data = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_data.pkl', 'wb') new_label = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_label.pkl', 'wb') pickle.dump(pre_data+new_data_list,new_data)", "pre_data.append(data_list[15].rotate(315)) pre_label.append(9) pre_data.append(data_list[16].rotate(315)) pre_label.append(label_list[16]) pre_data.append(data_list[17].rotate(315)) pre_label.append(0) pre_data.append(data_list[18].rotate(90)) pre_label.append(2) pre_data.append(data_list[19].rotate(90)) pre_label.append(label_list[19]) pre_data.append(data_list[20].rotate(315)) pre_label.append(3) pre_data.append(data_list[21].rotate(315))", "pre_label.append(1) pre_data.append(data_list[39].rotate(270)) pre_label.append(6) pre_data.append(data_list[40].rotate(315)) pre_label.append(label_list[40]) pre_data.append(data_list[41].rotate(180)) pre_label.append(7) pre_data.append(data_list[42].rotate(90)) pre_label.append(9) pre_data.append(data_list[43].rotate(270)) pre_label.append(5) pre_data.append(data_list[44].rotate(270)) pre_label.append(9)", "Loads the data from a pickle file and returns that object \"\"\" ##", "right, bottom)) return image ## We want you to clean the data, and", "corresponding file new_data = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_data.pkl', 'wb') new_label = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_label.pkl', 'wb') pickle.dump(pre_data+new_data_list,new_data) pickle.dump(pre_label+new_label_list,new_label) new_data.close()", "use with open(path_to_file,'rb')as f: new_data=pickle.load(f) return new_data ## You should define functions to", "might want to prompt the terminal ## for input, so that you can", "pre_label if __name__ == \"__main__\": ## Running this script should read the input", "folders, you will have to dump the CLEANED images and ## labels. You", "CLEANED images and ## labels. You can dump images/annotations in a pickle file", "folder (so your data folder in a3/ should look like: ) # data/", "below. You can perform these operations either on numpy arrays ## or on", "pre_data.append(data_list[14].rotate(180)) pre_label.append(7) pre_data.append(data_list[15].rotate(315)) pre_label.append(9) pre_data.append(data_list[16].rotate(315)) pre_label.append(label_list[16]) pre_data.append(data_list[17].rotate(315)) pre_label.append(0) pre_data.append(data_list[18].rotate(90)) pre_label.append(2) pre_data.append(data_list[19].rotate(90)) pre_label.append(label_list[19]) pre_data.append(data_list[20].rotate(315))", "pre_data.append(data_list[40].rotate(315)) pre_label.append(label_list[40]) pre_data.append(data_list[41].rotate(180)) pre_label.append(7) pre_data.append(data_list[42].rotate(90)) pre_label.append(9) pre_data.append(data_list[43].rotate(270)) pre_label.append(5) pre_data.append(data_list[44].rotate(270)) pre_label.append(9) pre_data.append(data_list[45].rotate(180)) pre_label.append(label_list[45]) pre_data.append(data_list[46].rotate(180))", "f: # obj = pickle.... ## We will let you figure out which", "the data, and then create a train and val folder inside ## the", "## To correct rotated images and add missing labels, you might want to", "label_list) #auto process the images after 60 new_data_list = data_list[60:] new_label_list = label_list[60:]", "clean the data automatically def auto_op(my_file, size): dataObject = [] for im in", "clean the data, and then create a train and val folder inside ##", "pre_label.append(label_list[52]) pre_data.append(data_list[53].rotate(180)) pre_label.append(2) pre_data.append(data_list[54].rotate(180)) pre_label.append(label_list[54]) pre_data.append(data_list[55].rotate(90)) pre_label.append(0) pre_data.append(data_list[56].rotate(315)) pre_label.append(label_list[56]) pre_data.append(data_list[57].rotate(180)) pre_label.append(8) pre_data.append(data_list[58].rotate(90)) pre_label.append(label_list[58])", "= auto_op(new_data_list, 28) # combine and dump two parts into corresponding file new_data", "width != height: padding = abs(width-height)/2 if width>height: im = crop(im, padding, 0,", "pre_data.append(data_list[27].rotate(180)) pre_label.append(2) pre_data.append(data_list[28].rotate(315)) pre_label.append(4) pre_data.append(data_list[29].rotate(90)) pre_label.append(2) pre_data.append(data_list[30].rotate(90)) pre_label.append(label_list[30]) pre_data.append(data_list[31].rotate(90)) pre_label.append(label_list[31]) pre_data.append(data_list[32].rotate(315)) pre_label.append(label_list[32]) pre_data.append(data_list[33].rotate(315))", "## Remember, the first 60 images are rotated, and might contain missing labels.", "pre_label.append(7) pre_data.append(data_list[14].rotate(180)) pre_label.append(7) pre_data.append(data_list[15].rotate(315)) pre_label.append(9) pre_data.append(data_list[16].rotate(315)) pre_label.append(label_list[16]) pre_data.append(data_list[17].rotate(315)) pre_label.append(0) pre_data.append(data_list[18].rotate(90)) pre_label.append(2) pre_data.append(data_list[19].rotate(90)) pre_label.append(label_list[19])", "abs(width-height)/2 if width>height: im = crop(im, padding, 0, width-padding, height) #make new file", "practice ## cleaning datasets and visualising them, so we purposely won't give you", "pre_label.append(label_list[40]) pre_data.append(data_list[41].rotate(180)) pre_label.append(7) pre_data.append(data_list[42].rotate(90)) pre_label.append(9) pre_data.append(data_list[43].rotate(270)) pre_label.append(5) pre_data.append(data_list[44].rotate(270)) pre_label.append(9) pre_data.append(data_list[45].rotate(180)) pre_label.append(label_list[45]) pre_data.append(data_list[46].rotate(180)) pre_label.append(7)", "https://docs.python.org/3/library/pickle.html ## The code should look something like this: # with open(path_to_file, 'rb')", "pre_data.append(data_list[26].rotate(90)) pre_label.append(0) pre_data.append(data_list[27].rotate(180)) pre_label.append(2) pre_data.append(data_list[28].rotate(315)) pre_label.append(4) pre_data.append(data_list[29].rotate(90)) pre_label.append(2) pre_data.append(data_list[30].rotate(90)) pre_label.append(label_list[30]) pre_data.append(data_list[31].rotate(90)) pre_label.append(label_list[31]) pre_data.append(data_list[32].rotate(315))", "them, so we purposely won't give you too much starter ## code. It'll", "pre_label.append(0) pre_data.append(data_list[18].rotate(90)) pre_label.append(2) pre_data.append(data_list[19].rotate(90)) pre_label.append(label_list[19]) pre_data.append(data_list[20].rotate(315)) pre_label.append(3) pre_data.append(data_list[21].rotate(315)) pre_label.append(1) pre_data.append(data_list[22].rotate(180)) pre_label.append(4) pre_data.append(data_list[23].rotate(135)) pre_label.append(8)", "terminal ## for input, so that you can input the angle and the", "size, size) dataObject.append(im) return dataObject # function to clean the data by hand", "that object \"\"\" ## Look up: https://docs.python.org/3/library/pickle.html ## The code should look something", "code written in this file will be DIY. It's important that you get", "file will be DIY. It's important that you get to practice ## cleaning", "label_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/labels.pkl') pre_data, pre_label = byhand_op(data_list, label_list) #auto process the images after", "up: https://docs.python.org/3/library/pickle.html ## The code should look something like this: # with open(path_to_file,", "auto_op(new_data_list, 28) # combine and dump two parts into corresponding file new_data =", "To correct rotated images and add missing labels, you might want to prompt", "will let you figure out which pickle operation to use with open(path_to_file,'rb')as f:", "a3/ should look like: ) # data/ # train/ # val/ ## Inside", "something like this: # with open(path_to_file, 'rb') as f: # obj = pickle....", "pre_label.append(9) pre_data.append(data_list[12].rotate(180)) pre_label.append(label_list[12]) pre_data.append(data_list[13].rotate(90)) pre_label.append(7) pre_data.append(data_list[14].rotate(180)) pre_label.append(7) pre_data.append(data_list[15].rotate(315)) pre_label.append(9) pre_data.append(data_list[16].rotate(315)) pre_label.append(label_list[16]) pre_data.append(data_list[17].rotate(315)) pre_label.append(0)", "[] pre_data.append(data_list[0].rotate(270)) pre_label.append(label_list[0]) pre_data.append(data_list[1].rotate(90)) pre_label.append(0) pre_data.append(data_list[2].rotate(180)) pre_label.append(label_list[2]) pre_data.append(data_list[3].rotate(180)) pre_label.append(3) pre_data.append(data_list[4]) pre_label.append(3) pre_data.append(data_list[5].rotate(180)) pre_label.append(2)", "pre_data.append(data_list[12].rotate(180)) pre_label.append(label_list[12]) pre_data.append(data_list[13].rotate(90)) pre_label.append(7) pre_data.append(data_list[14].rotate(180)) pre_label.append(7) pre_data.append(data_list[15].rotate(315)) pre_label.append(9) pre_data.append(data_list[16].rotate(315)) pre_label.append(label_list[16]) pre_data.append(data_list[17].rotate(315)) pre_label.append(0) pre_data.append(data_list[18].rotate(90))", "width-padding, height) #make new file else: im = crop(im, 0, padding, width, height-padding)", "a pickle file (because our data loader ## expects the path to a", "pre_data.append(data_list[45].rotate(180)) pre_label.append(label_list[45]) pre_data.append(data_list[46].rotate(180)) pre_label.append(7) pre_data.append(data_list[47].rotate(270)) pre_label.append(label_list[47]) pre_data.append(data_list[48].rotate(180)) pre_label.append(0) pre_data.append(data_list[49].rotate(315)) pre_label.append(label_list[49]) pre_data.append(data_list[50].rotate(90)) pre_label.append(label_list[50]) pre_data.append(data_list[51].rotate(90))", "task shouldn't be too hard, so we won't send you down any rabbit", "pre_data.append(data_list[33].rotate(315)) pre_label.append(label_list[33]) pre_data.append(data_list[34].rotate(90)) pre_label.append(label_list[34]) pre_data.append(data_list[35].rotate(270)) pre_label.append(8) pre_data.append(data_list[36].rotate(315)) pre_label.append(9) pre_data.append(data_list[37].rotate(180)) pre_label.append(2) pre_data.append(data_list[38].rotate(270)) pre_label.append(1) pre_data.append(data_list[39].rotate(270))", "add missing labels, you might want to prompt the terminal ## for input,", "combine and dump two parts into corresponding file new_data = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_data.pkl', 'wb') new_label", "to practice ## cleaning datasets and visualising them, so we purposely won't give", "pre_data.append(data_list[55].rotate(90)) pre_label.append(0) pre_data.append(data_list[56].rotate(315)) pre_label.append(label_list[56]) pre_data.append(data_list[57].rotate(180)) pre_label.append(8) pre_data.append(data_list[58].rotate(90)) pre_label.append(label_list[58]) pre_data.append(data_list[59].rotate(90)) pre_label.append(label_list[59]) return pre_data, pre_label", "pre_label.append(5) pre_data.append(data_list[10].rotate(315)) pre_label.append(label_list[10]) pre_data.append(data_list[11].rotate(90)) pre_label.append(9) pre_data.append(data_list[12].rotate(180)) pre_label.append(label_list[12]) pre_data.append(data_list[13].rotate(90)) pre_label.append(7) pre_data.append(data_list[14].rotate(180)) pre_label.append(7) pre_data.append(data_list[15].rotate(315)) pre_label.append(9)", "# data/ # train/ # val/ ## Inside the train and val folders,", "want to prompt the terminal ## for input, so that you can input", "pre_label.append(label_list[32]) pre_data.append(data_list[33].rotate(315)) pre_label.append(label_list[33]) pre_data.append(data_list[34].rotate(90)) pre_label.append(label_list[34]) pre_data.append(data_list[35].rotate(270)) pre_label.append(8) pre_data.append(data_list[36].rotate(315)) pre_label.append(9) pre_data.append(data_list[37].rotate(180)) pre_label.append(2) pre_data.append(data_list[38].rotate(270)) pre_label.append(1)", "pre_label.append(2) pre_data.append(data_list[25].rotate(90)) pre_label.append(3) pre_data.append(data_list[26].rotate(90)) pre_label.append(0) pre_data.append(data_list[27].rotate(180)) pre_label.append(2) pre_data.append(data_list[28].rotate(315)) pre_label.append(4) pre_data.append(data_list[29].rotate(90)) pre_label.append(2) pre_data.append(data_list[30].rotate(90)) pre_label.append(label_list[30])", "can perform these operations either on numpy arrays ## or on PIL images", "the data by hand def byhand_op(data_list, label_list): pre_data = [] pre_label = []", "pre_label.append(9) pre_data.append(data_list[37].rotate(180)) pre_label.append(2) pre_data.append(data_list[38].rotate(270)) pre_label.append(1) pre_data.append(data_list[39].rotate(270)) pre_label.append(6) pre_data.append(data_list[40].rotate(315)) pre_label.append(label_list[40]) pre_data.append(data_list[41].rotate(180)) pre_label.append(7) pre_data.append(data_list[42].rotate(90)) pre_label.append(9)", "get to practice ## cleaning datasets and visualising them, so we purposely won't", "padding = abs(width-height)/2 if width>height: im = crop(im, padding, 0, width-padding, height) #make", "create a train and val folder inside ## the data folder (so your", "#clean the data for first 60 images by hand data_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/images.pkl') label_list", "if __name__ == \"__main__\": ## Running this script should read the input images.pkl", "val/ ## Inside the train and val folders, you will have to dump", "byhand_op(data_list, label_list) #auto process the images after 60 new_data_list = data_list[60:] new_label_list =", "these operations either on numpy arrays ## or on PIL images (read docs:", "pre_data.append(data_list[4]) pre_label.append(3) pre_data.append(data_list[5].rotate(180)) pre_label.append(2) pre_data.append(data_list[6].rotate(315)) pre_label.append(7) pre_data.append(data_list[7].rotate(180)) pre_label.append(label_list[7]) pre_data.append(data_list[8].rotate(315)) pre_label.append(5) pre_data.append(data_list[9].rotate(315)) pre_label.append(5) pre_data.append(data_list[10].rotate(315))", "pre_data.append(data_list[51].rotate(90)) pre_label.append(3) pre_data.append(data_list[52].rotate(180)) pre_label.append(label_list[52]) pre_data.append(data_list[53].rotate(180)) pre_label.append(2) pre_data.append(data_list[54].rotate(180)) pre_label.append(label_list[54]) pre_data.append(data_list[55].rotate(90)) pre_label.append(0) pre_data.append(data_list[56].rotate(315)) pre_label.append(label_list[56]) pre_data.append(data_list[57].rotate(180))", "into corresponding file new_data = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_data.pkl', 'wb') new_label = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_label.pkl', 'wb') pickle.dump(pre_data+new_data_list,new_data) pickle.dump(pre_label+new_label_list,new_label)", "# function to clean the data automatically def auto_op(my_file, size): dataObject = []", "operations either on numpy arrays ## or on PIL images (read docs: https://pillow.readthedocs.io/en/stable/reference/Image.html)", "the data ## and store cleaned data into the data/train and data/val folders", "pre_data.append(data_list[3].rotate(180)) pre_label.append(3) pre_data.append(data_list[4]) pre_label.append(3) pre_data.append(data_list[5].rotate(180)) pre_label.append(2) pre_data.append(data_list[6].rotate(315)) pre_label.append(7) pre_data.append(data_list[7].rotate(180)) pre_label.append(label_list[7]) pre_data.append(data_list[8].rotate(315)) pre_label.append(5) pre_data.append(data_list[9].rotate(315))", "like this: # with open(path_to_file, 'rb') as f: # obj = pickle.... ##", "pre_label.append(label_list[49]) pre_data.append(data_list[50].rotate(90)) pre_label.append(label_list[50]) pre_data.append(data_list[51].rotate(90)) pre_label.append(3) pre_data.append(data_list[52].rotate(180)) pre_label.append(label_list[52]) pre_data.append(data_list[53].rotate(180)) pre_label.append(2) pre_data.append(data_list[54].rotate(180)) pre_label.append(label_list[54]) pre_data.append(data_list[55].rotate(90)) pre_label.append(0)", "pre_data.append(data_list[16].rotate(315)) pre_label.append(label_list[16]) pre_data.append(data_list[17].rotate(315)) pre_label.append(0) pre_data.append(data_list[18].rotate(90)) pre_label.append(2) pre_data.append(data_list[19].rotate(90)) pre_label.append(label_list[19]) pre_data.append(data_list[20].rotate(315)) pre_label.append(3) pre_data.append(data_list[21].rotate(315)) pre_label.append(1) pre_data.append(data_list[22].rotate(180))", "height: padding = abs(width-height)/2 if width>height: im = crop(im, padding, 0, width-padding, height)", "## Look up: https://docs.python.org/3/library/pickle.html ## The code should look something like this: #", "numpy as np from PIL import Image, ExifTags,ImageOps def load_pickle_file(path_to_file): \"\"\" Loads the", "## below. You can perform these operations either on numpy arrays ## or", "## We want you to clean the data, and then create a train", "train and val folder inside ## the data folder (so your data folder", "= crop(im, padding, 0, width-padding, height) #make new file else: im = crop(im,", "said, the task shouldn't be too hard, so we won't send you down", "read the input images.pkl and labels.pkl and clean the data ## and store", "Python modules. ## That being said, the task shouldn't be too hard, so", "file.) ## Most code written in this file will be DIY. It's important", "pre_label.append(2) pre_data.append(data_list[19].rotate(90)) pre_label.append(label_list[19]) pre_data.append(data_list[20].rotate(315)) pre_label.append(3) pre_data.append(data_list[21].rotate(315)) pre_label.append(1) pre_data.append(data_list[22].rotate(180)) pre_label.append(4) pre_data.append(data_list[23].rotate(135)) pre_label.append(8) pre_data.append(data_list[24].rotate(90)) pre_label.append(2)", "## We will let you figure out which pickle operation to use with", "28) # combine and dump two parts into corresponding file new_data = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_data.pkl',", "load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/images.pkl') label_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/labels.pkl') pre_data, pre_label = byhand_op(data_list, label_list) #auto process the images", "function to clean the data automatically def auto_op(my_file, size): dataObject = [] for", "prompt the terminal ## for input, so that you can input the angle", "pre_label.append(label_list[2]) pre_data.append(data_list[3].rotate(180)) pre_label.append(3) pre_data.append(data_list[4]) pre_label.append(3) pre_data.append(data_list[5].rotate(180)) pre_label.append(2) pre_data.append(data_list[6].rotate(315)) pre_label.append(7) pre_data.append(data_list[7].rotate(180)) pre_label.append(label_list[7]) pre_data.append(data_list[8].rotate(315)) pre_label.append(5)", "data folder (so your data folder in a3/ should look like: ) #", "pre_label.append(8) pre_data.append(data_list[36].rotate(315)) pre_label.append(9) pre_data.append(data_list[37].rotate(180)) pre_label.append(2) pre_data.append(data_list[38].rotate(270)) pre_label.append(1) pre_data.append(data_list[39].rotate(270)) pre_label.append(6) pre_data.append(data_list[40].rotate(315)) pre_label.append(label_list[40]) pre_data.append(data_list[41].rotate(180)) pre_label.append(7)", "you down any rabbit hole. # function to clean the data automatically def", "label_list): pre_data = [] pre_label = [] pre_data.append(data_list[0].rotate(270)) pre_label.append(label_list[0]) pre_data.append(data_list[1].rotate(90)) pre_label.append(0) pre_data.append(data_list[2].rotate(180)) pre_label.append(label_list[2])", "pre_data.append(data_list[10].rotate(315)) pre_label.append(label_list[10]) pre_data.append(data_list[11].rotate(90)) pre_label.append(9) pre_data.append(data_list[12].rotate(180)) pre_label.append(label_list[12]) pre_data.append(data_list[13].rotate(90)) pre_label.append(7) pre_data.append(data_list[14].rotate(180)) pre_label.append(7) pre_data.append(data_list[15].rotate(315)) pre_label.append(9) pre_data.append(data_list[16].rotate(315))", "you might want to prompt the terminal ## for input, so that you", "and the missing label ## Remember, the first 60 images are rotated, and", "pre_data.append(data_list[22].rotate(180)) pre_label.append(4) pre_data.append(data_list[23].rotate(135)) pre_label.append(8) pre_data.append(data_list[24].rotate(90)) pre_label.append(2) pre_data.append(data_list[25].rotate(90)) pre_label.append(3) pre_data.append(data_list[26].rotate(90)) pre_label.append(0) pre_data.append(data_list[27].rotate(180)) pre_label.append(2) pre_data.append(data_list[28].rotate(315))", "height = im.size if width != height: padding = abs(width-height)/2 if width>height: im", "pre_data.append(data_list[44].rotate(270)) pre_label.append(9) pre_data.append(data_list[45].rotate(180)) pre_label.append(label_list[45]) pre_data.append(data_list[46].rotate(180)) pre_label.append(7) pre_data.append(data_list[47].rotate(270)) pre_label.append(label_list[47]) pre_data.append(data_list[48].rotate(180)) pre_label.append(0) pre_data.append(data_list[49].rotate(315)) pre_label.append(label_list[49]) pre_data.append(data_list[50].rotate(90))", "## the data folder (so your data folder in a3/ should look like:", "a pickle file and returns that object \"\"\" ## Look up: https://docs.python.org/3/library/pickle.html ##", "pre_label.append(label_list[0]) pre_data.append(data_list[1].rotate(90)) pre_label.append(0) pre_data.append(data_list[2].rotate(180)) pre_label.append(label_list[2]) pre_data.append(data_list[3].rotate(180)) pre_label.append(3) pre_data.append(data_list[4]) pre_label.append(3) pre_data.append(data_list[5].rotate(180)) pre_label.append(2) pre_data.append(data_list[6].rotate(315)) pre_label.append(7)", "return dataObject # function to clean the data by hand def byhand_op(data_list, label_list):", "pre_label.append(4) pre_data.append(data_list[29].rotate(90)) pre_label.append(2) pre_data.append(data_list[30].rotate(90)) pre_label.append(label_list[30]) pre_data.append(data_list[31].rotate(90)) pre_label.append(label_list[31]) pre_data.append(data_list[32].rotate(315)) pre_label.append(label_list[32]) pre_data.append(data_list[33].rotate(315)) pre_label.append(label_list[33]) pre_data.append(data_list[34].rotate(90)) pre_label.append(label_list[34])", "pre_data, pre_label if __name__ == \"__main__\": ## Running this script should read the", "for first 60 images by hand data_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/images.pkl') label_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/labels.pkl') pre_data,", "we won't send you down any rabbit hole. # function to clean the", "pre_data.append(data_list[34].rotate(90)) pre_label.append(label_list[34]) pre_data.append(data_list[35].rotate(270)) pre_label.append(8) pre_data.append(data_list[36].rotate(315)) pre_label.append(9) pre_data.append(data_list[37].rotate(180)) pre_label.append(2) pre_data.append(data_list[38].rotate(270)) pre_label.append(1) pre_data.append(data_list[39].rotate(270)) pre_label.append(6) pre_data.append(data_list[40].rotate(315))", "pre_label.append(label_list[30]) pre_data.append(data_list[31].rotate(90)) pre_label.append(label_list[31]) pre_data.append(data_list[32].rotate(315)) pre_label.append(label_list[32]) pre_data.append(data_list[33].rotate(315)) pre_label.append(label_list[33]) pre_data.append(data_list[34].rotate(90)) pre_label.append(label_list[34]) pre_data.append(data_list[35].rotate(270)) pre_label.append(8) pre_data.append(data_list[36].rotate(315)) pre_label.append(9)", "after 60 new_data_list = data_list[60:] new_label_list = label_list[60:] new_data_list = auto_op(new_data_list, 28) #", "clean the data ## and store cleaned data into the data/train and data/val", "= (width, height) image = image.resize(newSize) return image def crop(image, left, top, right,", "return new_data ## You should define functions to resize, rotate and crop images", "input the angle and the missing label ## Remember, the first 60 images", "pre_data.append(data_list[31].rotate(90)) pre_label.append(label_list[31]) pre_data.append(data_list[32].rotate(315)) pre_label.append(label_list[32]) pre_data.append(data_list[33].rotate(315)) pre_label.append(label_list[33]) pre_data.append(data_list[34].rotate(90)) pre_label.append(label_list[34]) pre_data.append(data_list[35].rotate(270)) pre_label.append(8) pre_data.append(data_list[36].rotate(315)) pre_label.append(9) pre_data.append(data_list[37].rotate(180))", "auto_op(my_file, size): dataObject = [] for im in my_file: width, height = im.size", "#auto process the images after 60 new_data_list = data_list[60:] new_label_list = label_list[60:] new_data_list", "file and returns that object \"\"\" ## Look up: https://docs.python.org/3/library/pickle.html ## The code", "return image def crop(image, left, top, right, bottom): image = image.crop((left, top, right,", "the data folder (so your data folder in a3/ should look like: )", "data/train and data/val folders ## To correct rotated images and add missing labels,", "pre_label.append(7) pre_data.append(data_list[7].rotate(180)) pre_label.append(label_list[7]) pre_data.append(data_list[8].rotate(315)) pre_label.append(5) pre_data.append(data_list[9].rotate(315)) pre_label.append(5) pre_data.append(data_list[10].rotate(315)) pre_label.append(label_list[10]) pre_data.append(data_list[11].rotate(90)) pre_label.append(9) pre_data.append(data_list[12].rotate(180)) pre_label.append(label_list[12])", "pre_data.append(data_list[53].rotate(180)) pre_label.append(2) pre_data.append(data_list[54].rotate(180)) pre_label.append(label_list[54]) pre_data.append(data_list[55].rotate(90)) pre_label.append(0) pre_data.append(data_list[56].rotate(315)) pre_label.append(label_list[56]) pre_data.append(data_list[57].rotate(180)) pre_label.append(8) pre_data.append(data_list[58].rotate(90)) pre_label.append(label_list[58]) pre_data.append(data_list[59].rotate(90))", "too hard, so we won't send you down any rabbit hole. # function", "first 60 images by hand data_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/images.pkl') label_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/labels.pkl') pre_data, pre_label", "file else: im = crop(im, 0, padding, width, height-padding) if width != size:", "pre_data.append(data_list[42].rotate(90)) pre_label.append(9) pre_data.append(data_list[43].rotate(270)) pre_label.append(5) pre_data.append(data_list[44].rotate(270)) pre_label.append(9) pre_data.append(data_list[45].rotate(180)) pre_label.append(label_list[45]) pre_data.append(data_list[46].rotate(180)) pre_label.append(7) pre_data.append(data_list[47].rotate(270)) pre_label.append(label_list[47]) pre_data.append(data_list[48].rotate(180))", "rotated images and add missing labels, you might want to prompt the terminal", "resize(image, height, width): newSize = (width, height) image = image.resize(newSize) return image def", "figure out which pickle operation to use with open(path_to_file,'rb')as f: new_data=pickle.load(f) return new_data", "pre_data.append(data_list[7].rotate(180)) pre_label.append(label_list[7]) pre_data.append(data_list[8].rotate(315)) pre_label.append(5) pre_data.append(data_list[9].rotate(315)) pre_label.append(5) pre_data.append(data_list[10].rotate(315)) pre_label.append(label_list[10]) pre_data.append(data_list[11].rotate(90)) pre_label.append(9) pre_data.append(data_list[12].rotate(180)) pre_label.append(label_list[12]) pre_data.append(data_list[13].rotate(90))", "in this file will be DIY. It's important that you get to practice", "pre_data.append(data_list[30].rotate(90)) pre_label.append(label_list[30]) pre_data.append(data_list[31].rotate(90)) pre_label.append(label_list[31]) pre_data.append(data_list[32].rotate(315)) pre_label.append(label_list[32]) pre_data.append(data_list[33].rotate(315)) pre_label.append(label_list[33]) pre_data.append(data_list[34].rotate(90)) pre_label.append(label_list[34]) pre_data.append(data_list[35].rotate(270)) pre_label.append(8) pre_data.append(data_list[36].rotate(315))", "won't give you too much starter ## code. It'll be up to you", "label_list[60:] new_data_list = auto_op(new_data_list, 28) # combine and dump two parts into corresponding", "padding, 0, width-padding, height) #make new file else: im = crop(im, 0, padding,", "the path to a pickle file.) ## Most code written in this file", "pre_data.append(data_list[32].rotate(315)) pre_label.append(label_list[32]) pre_data.append(data_list[33].rotate(315)) pre_label.append(label_list[33]) pre_data.append(data_list[34].rotate(90)) pre_label.append(label_list[34]) pre_data.append(data_list[35].rotate(270)) pre_label.append(8) pre_data.append(data_list[36].rotate(315)) pre_label.append(9) pre_data.append(data_list[37].rotate(180)) pre_label.append(2) pre_data.append(data_list[38].rotate(270))", "visualising them, so we purposely won't give you too much starter ## code.", "up documentation and understand different Python modules. ## That being said, the task", "should look like: ) # data/ # train/ # val/ ## Inside the", "pre_label.append(label_list[19]) pre_data.append(data_list[20].rotate(315)) pre_label.append(3) pre_data.append(data_list[21].rotate(315)) pre_label.append(1) pre_data.append(data_list[22].rotate(180)) pre_label.append(4) pre_data.append(data_list[23].rotate(135)) pre_label.append(8) pre_data.append(data_list[24].rotate(90)) pre_label.append(2) pre_data.append(data_list[25].rotate(90)) pre_label.append(3)", "images and add missing labels, you might want to prompt the terminal ##", "loader ## expects the path to a pickle file.) ## Most code written", "if width != size: im = resize(im, size, size) dataObject.append(im) return dataObject #", "you to clean the data, and then create a train and val folder", "be DIY. It's important that you get to practice ## cleaning datasets and", "automatically def auto_op(my_file, size): dataObject = [] for im in my_file: width, height", "that you can input the angle and the missing label ## Remember, the", "input images.pkl and labels.pkl and clean the data ## and store cleaned data", "much starter ## code. It'll be up to you to look up documentation", "pre_data.append(data_list[59].rotate(90)) pre_label.append(label_list[59]) return pre_data, pre_label if __name__ == \"__main__\": ## Running this script", ") # data/ # train/ # val/ ## Inside the train and val", "(read docs: https://pillow.readthedocs.io/en/stable/reference/Image.html) def resize(image, height, width): newSize = (width, height) image =", "docs: https://pillow.readthedocs.io/en/stable/reference/Image.html) def resize(image, height, width): newSize = (width, height) image = image.resize(newSize)", "folders ## To correct rotated images and add missing labels, you might want", "open(path_to_file,'rb')as f: new_data=pickle.load(f) return new_data ## You should define functions to resize, rotate", "script should read the input images.pkl and labels.pkl and clean the data ##", "any rabbit hole. # function to clean the data automatically def auto_op(my_file, size):", "np from PIL import Image, ExifTags,ImageOps def load_pickle_file(path_to_file): \"\"\" Loads the data from", "right, bottom): image = image.crop((left, top, right, bottom)) return image ## We want", "[] for im in my_file: width, height = im.size if width != height:", "data by hand def byhand_op(data_list, label_list): pre_data = [] pre_label = [] pre_data.append(data_list[0].rotate(270))", "folder in a3/ should look like: ) # data/ # train/ # val/", "## for input, so that you can input the angle and the missing", "won't send you down any rabbit hole. # function to clean the data", "pre_label.append(3) pre_data.append(data_list[52].rotate(180)) pre_label.append(label_list[52]) pre_data.append(data_list[53].rotate(180)) pre_label.append(2) pre_data.append(data_list[54].rotate(180)) pre_label.append(label_list[54]) pre_data.append(data_list[55].rotate(90)) pre_label.append(0) pre_data.append(data_list[56].rotate(315)) pre_label.append(label_list[56]) pre_data.append(data_list[57].rotate(180)) pre_label.append(8)", "pre_label.append(7) pre_data.append(data_list[42].rotate(90)) pre_label.append(9) pre_data.append(data_list[43].rotate(270)) pre_label.append(5) pre_data.append(data_list[44].rotate(270)) pre_label.append(9) pre_data.append(data_list[45].rotate(180)) pre_label.append(label_list[45]) pre_data.append(data_list[46].rotate(180)) pre_label.append(7) pre_data.append(data_list[47].rotate(270)) pre_label.append(label_list[47])", "pre_data.append(data_list[24].rotate(90)) pre_label.append(2) pre_data.append(data_list[25].rotate(90)) pre_label.append(3) pre_data.append(data_list[26].rotate(90)) pre_label.append(0) pre_data.append(data_list[27].rotate(180)) pre_label.append(2) pre_data.append(data_list[28].rotate(315)) pre_label.append(4) pre_data.append(data_list[29].rotate(90)) pre_label.append(2) pre_data.append(data_list[30].rotate(90))", "want you to clean the data, and then create a train and val", "= image.crop((left, top, right, bottom)) return image ## We want you to clean", "'rb') as f: # obj = pickle.... ## We will let you figure", "We want you to clean the data, and then create a train and", "pre_data.append(data_list[19].rotate(90)) pre_label.append(label_list[19]) pre_data.append(data_list[20].rotate(315)) pre_label.append(3) pre_data.append(data_list[21].rotate(315)) pre_label.append(1) pre_data.append(data_list[22].rotate(180)) pre_label.append(4) pre_data.append(data_list[23].rotate(135)) pre_label.append(8) pre_data.append(data_list[24].rotate(90)) pre_label.append(2) pre_data.append(data_list[25].rotate(90))", "can input the angle and the missing label ## Remember, the first 60", "im.size if width != height: padding = abs(width-height)/2 if width>height: im = crop(im,", "f: new_data=pickle.load(f) return new_data ## You should define functions to resize, rotate and", "rotate and crop images ## below. You can perform these operations either on", "padding, width, height-padding) if width != size: im = resize(im, size, size) dataObject.append(im)", "new_data_list = data_list[60:] new_label_list = label_list[60:] new_data_list = auto_op(new_data_list, 28) # combine and", "understand different Python modules. ## That being said, the task shouldn't be too", "new_data_list = auto_op(new_data_list, 28) # combine and dump two parts into corresponding file", "this file will be DIY. It's important that you get to practice ##", "labels. #clean the data for first 60 images by hand data_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/images.pkl')", "(because our data loader ## expects the path to a pickle file.) ##", "data from a pickle file and returns that object \"\"\" ## Look up:", "PIL images (read docs: https://pillow.readthedocs.io/en/stable/reference/Image.html) def resize(image, height, width): newSize = (width, height)", "## That being said, the task shouldn't be too hard, so we won't", "and data/val folders ## To correct rotated images and add missing labels, you", "pre_label = byhand_op(data_list, label_list) #auto process the images after 60 new_data_list = data_list[60:]", "to use with open(path_to_file,'rb')as f: new_data=pickle.load(f) return new_data ## You should define functions", "let you figure out which pickle operation to use with open(path_to_file,'rb')as f: new_data=pickle.load(f)", "You can perform these operations either on numpy arrays ## or on PIL", "if width>height: im = crop(im, padding, 0, width-padding, height) #make new file else:", "pre_label.append(label_list[31]) pre_data.append(data_list[32].rotate(315)) pre_label.append(label_list[32]) pre_data.append(data_list[33].rotate(315)) pre_label.append(label_list[33]) pre_data.append(data_list[34].rotate(90)) pre_label.append(label_list[34]) pre_data.append(data_list[35].rotate(270)) pre_label.append(8) pre_data.append(data_list[36].rotate(315)) pre_label.append(9) pre_data.append(data_list[37].rotate(180)) pre_label.append(2)", "expects the path to a pickle file.) ## Most code written in this", "have to dump the CLEANED images and ## labels. You can dump images/annotations", "0, padding, width, height-padding) if width != size: im = resize(im, size, size)", "open(path_to_file, 'rb') as f: # obj = pickle.... ## We will let you", "\"\"\" ## Look up: https://docs.python.org/3/library/pickle.html ## The code should look something like this:", "from a pickle file and returns that object \"\"\" ## Look up: https://docs.python.org/3/library/pickle.html", "with open(path_to_file, 'rb') as f: # obj = pickle.... ## We will let", "which pickle operation to use with open(path_to_file,'rb')as f: new_data=pickle.load(f) return new_data ## You", "top, right, bottom): image = image.crop((left, top, right, bottom)) return image ## We", "inside ## the data folder (so your data folder in a3/ should look", "starter ## code. It'll be up to you to look up documentation and", "pre_data.append(data_list[57].rotate(180)) pre_label.append(8) pre_data.append(data_list[58].rotate(90)) pre_label.append(label_list[58]) pre_data.append(data_list[59].rotate(90)) pre_label.append(label_list[59]) return pre_data, pre_label if __name__ == \"__main__\":", "return pre_data, pre_label if __name__ == \"__main__\": ## Running this script should read", "= load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/images.pkl') label_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/labels.pkl') pre_data, pre_label = byhand_op(data_list, label_list) #auto process the", "pre_label.append(label_list[33]) pre_data.append(data_list[34].rotate(90)) pre_label.append(label_list[34]) pre_data.append(data_list[35].rotate(270)) pre_label.append(8) pre_data.append(data_list[36].rotate(315)) pre_label.append(9) pre_data.append(data_list[37].rotate(180)) pre_label.append(2) pre_data.append(data_list[38].rotate(270)) pre_label.append(1) pre_data.append(data_list[39].rotate(270)) pre_label.append(6)", "images ## below. You can perform these operations either on numpy arrays ##", "the terminal ## for input, so that you can input the angle and", "image.resize(newSize) return image def crop(image, left, top, right, bottom): image = image.crop((left, top,", "so we won't send you down any rabbit hole. # function to clean", "pre_label.append(5) pre_data.append(data_list[9].rotate(315)) pre_label.append(5) pre_data.append(data_list[10].rotate(315)) pre_label.append(label_list[10]) pre_data.append(data_list[11].rotate(90)) pre_label.append(9) pre_data.append(data_list[12].rotate(180)) pre_label.append(label_list[12]) pre_data.append(data_list[13].rotate(90)) pre_label.append(7) pre_data.append(data_list[14].rotate(180)) pre_label.append(7)", "by hand def byhand_op(data_list, label_list): pre_data = [] pre_label = [] pre_data.append(data_list[0].rotate(270)) pre_label.append(label_list[0])", "new_data=pickle.load(f) return new_data ## You should define functions to resize, rotate and crop", "dump images/annotations in a pickle file (because our data loader ## expects the", "pre_label.append(label_list[7]) pre_data.append(data_list[8].rotate(315)) pre_label.append(5) pre_data.append(data_list[9].rotate(315)) pre_label.append(5) pre_data.append(data_list[10].rotate(315)) pre_label.append(label_list[10]) pre_data.append(data_list[11].rotate(90)) pre_label.append(9) pre_data.append(data_list[12].rotate(180)) pre_label.append(label_list[12]) pre_data.append(data_list[13].rotate(90)) pre_label.append(7)", "the input images.pkl and labels.pkl and clean the data ## and store cleaned", "and val folder inside ## the data folder (so your data folder in", "image = image.crop((left, top, right, bottom)) return image ## We want you to", "pre_data.append(data_list[20].rotate(315)) pre_label.append(3) pre_data.append(data_list[21].rotate(315)) pre_label.append(1) pre_data.append(data_list[22].rotate(180)) pre_label.append(4) pre_data.append(data_list[23].rotate(135)) pre_label.append(8) pre_data.append(data_list[24].rotate(90)) pre_label.append(2) pre_data.append(data_list[25].rotate(90)) pre_label.append(3) pre_data.append(data_list[26].rotate(90))", "the data for first 60 images by hand data_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/images.pkl') label_list =", "be up to you to look up documentation and understand different Python modules.", "pre_data.append(data_list[6].rotate(315)) pre_label.append(7) pre_data.append(data_list[7].rotate(180)) pre_label.append(label_list[7]) pre_data.append(data_list[8].rotate(315)) pre_label.append(5) pre_data.append(data_list[9].rotate(315)) pre_label.append(5) pre_data.append(data_list[10].rotate(315)) pre_label.append(label_list[10]) pre_data.append(data_list[11].rotate(90)) pre_label.append(9) pre_data.append(data_list[12].rotate(180))", "pre_data.append(data_list[52].rotate(180)) pre_label.append(label_list[52]) pre_data.append(data_list[53].rotate(180)) pre_label.append(2) pre_data.append(data_list[54].rotate(180)) pre_label.append(label_list[54]) pre_data.append(data_list[55].rotate(90)) pre_label.append(0) pre_data.append(data_list[56].rotate(315)) pre_label.append(label_list[56]) pre_data.append(data_list[57].rotate(180)) pre_label.append(8) pre_data.append(data_list[58].rotate(90))", "cleaning datasets and visualising them, so we purposely won't give you too much", "missing labels. #clean the data for first 60 images by hand data_list =", "load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/labels.pkl') pre_data, pre_label = byhand_op(data_list, label_list) #auto process the images after 60 new_data_list", "pre_data.append(data_list[49].rotate(315)) pre_label.append(label_list[49]) pre_data.append(data_list[50].rotate(90)) pre_label.append(label_list[50]) pre_data.append(data_list[51].rotate(90)) pre_label.append(3) pre_data.append(data_list[52].rotate(180)) pre_label.append(label_list[52]) pre_data.append(data_list[53].rotate(180)) pre_label.append(2) pre_data.append(data_list[54].rotate(180)) pre_label.append(label_list[54]) pre_data.append(data_list[55].rotate(90))", "first 60 images are rotated, and might contain missing labels. #clean the data", "different Python modules. ## That being said, the task shouldn't be too hard,", "to clean the data by hand def byhand_op(data_list, label_list): pre_data = [] pre_label", "pre_label.append(2) pre_data.append(data_list[54].rotate(180)) pre_label.append(label_list[54]) pre_data.append(data_list[55].rotate(90)) pre_label.append(0) pre_data.append(data_list[56].rotate(315)) pre_label.append(label_list[56]) pre_data.append(data_list[57].rotate(180)) pre_label.append(8) pre_data.append(data_list[58].rotate(90)) pre_label.append(label_list[58]) pre_data.append(data_list[59].rotate(90)) pre_label.append(label_list[59])", "on PIL images (read docs: https://pillow.readthedocs.io/en/stable/reference/Image.html) def resize(image, height, width): newSize = (width,", "import Image, ExifTags,ImageOps def load_pickle_file(path_to_file): \"\"\" Loads the data from a pickle file", "you too much starter ## code. It'll be up to you to look", "# combine and dump two parts into corresponding file new_data = open('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/cleaned_data.pkl', 'wb')", "## You should define functions to resize, rotate and crop images ## below.", "for input, so that you can input the angle and the missing label", "you figure out which pickle operation to use with open(path_to_file,'rb')as f: new_data=pickle.load(f) return", "and understand different Python modules. ## That being said, the task shouldn't be", "pre_label.append(label_list[16]) pre_data.append(data_list[17].rotate(315)) pre_label.append(0) pre_data.append(data_list[18].rotate(90)) pre_label.append(2) pre_data.append(data_list[19].rotate(90)) pre_label.append(label_list[19]) pre_data.append(data_list[20].rotate(315)) pre_label.append(3) pre_data.append(data_list[21].rotate(315)) pre_label.append(1) pre_data.append(data_list[22].rotate(180)) pre_label.append(4)", "a pickle file.) ## Most code written in this file will be DIY.", "functions to resize, rotate and crop images ## below. You can perform these", "dataObject # function to clean the data by hand def byhand_op(data_list, label_list): pre_data", "im = crop(im, 0, padding, width, height-padding) if width != size: im =", "folder inside ## the data folder (so your data folder in a3/ should", "cleaned data into the data/train and data/val folders ## To correct rotated images", "= abs(width-height)/2 if width>height: im = crop(im, padding, 0, width-padding, height) #make new", "60 new_data_list = data_list[60:] new_label_list = label_list[60:] new_data_list = auto_op(new_data_list, 28) # combine", "= [] for im in my_file: width, height = im.size if width !=", "you get to practice ## cleaning datasets and visualising them, so we purposely", "height) image = image.resize(newSize) return image def crop(image, left, top, right, bottom): image", "[] pre_label = [] pre_data.append(data_list[0].rotate(270)) pre_label.append(label_list[0]) pre_data.append(data_list[1].rotate(90)) pre_label.append(0) pre_data.append(data_list[2].rotate(180)) pre_label.append(label_list[2]) pre_data.append(data_list[3].rotate(180)) pre_label.append(3) pre_data.append(data_list[4])", "code. It'll be up to you to look up documentation and understand different", "hard, so we won't send you down any rabbit hole. # function to", "pre_label.append(4) pre_data.append(data_list[23].rotate(135)) pre_label.append(8) pre_data.append(data_list[24].rotate(90)) pre_label.append(2) pre_data.append(data_list[25].rotate(90)) pre_label.append(3) pre_data.append(data_list[26].rotate(90)) pre_label.append(0) pre_data.append(data_list[27].rotate(180)) pre_label.append(2) pre_data.append(data_list[28].rotate(315)) pre_label.append(4)", "process the images after 60 new_data_list = data_list[60:] new_label_list = label_list[60:] new_data_list =", "path to a pickle file.) ## Most code written in this file will", "pre_label.append(9) pre_data.append(data_list[16].rotate(315)) pre_label.append(label_list[16]) pre_data.append(data_list[17].rotate(315)) pre_label.append(0) pre_data.append(data_list[18].rotate(90)) pre_label.append(2) pre_data.append(data_list[19].rotate(90)) pre_label.append(label_list[19]) pre_data.append(data_list[20].rotate(315)) pre_label.append(3) pre_data.append(data_list[21].rotate(315)) pre_label.append(1)", "pre_label.append(8) pre_data.append(data_list[24].rotate(90)) pre_label.append(2) pre_data.append(data_list[25].rotate(90)) pre_label.append(3) pre_data.append(data_list[26].rotate(90)) pre_label.append(0) pre_data.append(data_list[27].rotate(180)) pre_label.append(2) pre_data.append(data_list[28].rotate(315)) pre_label.append(4) pre_data.append(data_list[29].rotate(90)) pre_label.append(2)", "pre_data.append(data_list[37].rotate(180)) pre_label.append(2) pre_data.append(data_list[38].rotate(270)) pre_label.append(1) pre_data.append(data_list[39].rotate(270)) pre_label.append(6) pre_data.append(data_list[40].rotate(315)) pre_label.append(label_list[40]) pre_data.append(data_list[41].rotate(180)) pre_label.append(7) pre_data.append(data_list[42].rotate(90)) pre_label.append(9) pre_data.append(data_list[43].rotate(270))", "my_file: width, height = im.size if width != height: padding = abs(width-height)/2 if", "and labels.pkl and clean the data ## and store cleaned data into the", "images by hand data_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/images.pkl') label_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/labels.pkl') pre_data, pre_label = byhand_op(data_list,", "pre_label.append(label_list[56]) pre_data.append(data_list[57].rotate(180)) pre_label.append(8) pre_data.append(data_list[58].rotate(90)) pre_label.append(label_list[58]) pre_data.append(data_list[59].rotate(90)) pre_label.append(label_list[59]) return pre_data, pre_label if __name__ ==", "else: im = crop(im, 0, padding, width, height-padding) if width != size: im", "function to clean the data by hand def byhand_op(data_list, label_list): pre_data = []", "Inside the train and val folders, you will have to dump the CLEANED", "The code should look something like this: # with open(path_to_file, 'rb') as f:", "file (because our data loader ## expects the path to a pickle file.)", "#make new file else: im = crop(im, 0, padding, width, height-padding) if width", "can dump images/annotations in a pickle file (because our data loader ## expects", "send you down any rabbit hole. # function to clean the data automatically", "to look up documentation and understand different Python modules. ## That being said,", "the images after 60 new_data_list = data_list[60:] new_label_list = label_list[60:] new_data_list = auto_op(new_data_list,", "crop images ## below. You can perform these operations either on numpy arrays", "from PIL import Image, ExifTags,ImageOps def load_pickle_file(path_to_file): \"\"\" Loads the data from a", "## or on PIL images (read docs: https://pillow.readthedocs.io/en/stable/reference/Image.html) def resize(image, height, width): newSize", "0, width-padding, height) #make new file else: im = crop(im, 0, padding, width,", "data for first 60 images by hand data_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/images.pkl') label_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/labels.pkl')", "are rotated, and might contain missing labels. #clean the data for first 60", "data, and then create a train and val folder inside ## the data", "pre_label.append(0) pre_data.append(data_list[2].rotate(180)) pre_label.append(label_list[2]) pre_data.append(data_list[3].rotate(180)) pre_label.append(3) pre_data.append(data_list[4]) pre_label.append(3) pre_data.append(data_list[5].rotate(180)) pre_label.append(2) pre_data.append(data_list[6].rotate(315)) pre_label.append(7) pre_data.append(data_list[7].rotate(180)) pre_label.append(label_list[7])", "data_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/images.pkl') label_list = load_pickle_file('/Users/wendyyyy/Cornell/CDS/IntSys-Education-master/a3/data/data/labels.pkl') pre_data, pre_label = byhand_op(data_list, label_list) #auto process", "correct rotated images and add missing labels, you might want to prompt the", "in a3/ should look like: ) # data/ # train/ # val/ ##", "pre_label.append(label_list[12]) pre_data.append(data_list[13].rotate(90)) pre_label.append(7) pre_data.append(data_list[14].rotate(180)) pre_label.append(7) pre_data.append(data_list[15].rotate(315)) pre_label.append(9) pre_data.append(data_list[16].rotate(315)) pre_label.append(label_list[16]) pre_data.append(data_list[17].rotate(315)) pre_label.append(0) pre_data.append(data_list[18].rotate(90)) pre_label.append(2)", "documentation and understand different Python modules. ## That being said, the task shouldn't", "image def crop(image, left, top, right, bottom): image = image.crop((left, top, right, bottom))", "pre_label.append(label_list[34]) pre_data.append(data_list[35].rotate(270)) pre_label.append(8) pre_data.append(data_list[36].rotate(315)) pre_label.append(9) pre_data.append(data_list[37].rotate(180)) pre_label.append(2) pre_data.append(data_list[38].rotate(270)) pre_label.append(1) pre_data.append(data_list[39].rotate(270)) pre_label.append(6) pre_data.append(data_list[40].rotate(315)) pre_label.append(label_list[40])", "width): newSize = (width, height) image = image.resize(newSize) return image def crop(image, left,", "and crop images ## below. You can perform these operations either on numpy", "Most code written in this file will be DIY. It's important that you", "and add missing labels, you might want to prompt the terminal ## for", "pre_label.append(label_list[47]) pre_data.append(data_list[48].rotate(180)) pre_label.append(0) pre_data.append(data_list[49].rotate(315)) pre_label.append(label_list[49]) pre_data.append(data_list[50].rotate(90)) pre_label.append(label_list[50]) pre_data.append(data_list[51].rotate(90)) pre_label.append(3) pre_data.append(data_list[52].rotate(180)) pre_label.append(label_list[52]) pre_data.append(data_list[53].rotate(180)) pre_label.append(2)", "That being said, the task shouldn't be too hard, so we won't send", "image.crop((left, top, right, bottom)) return image ## We want you to clean the", "pre_data.append(data_list[25].rotate(90)) pre_label.append(3) pre_data.append(data_list[26].rotate(90)) pre_label.append(0) pre_data.append(data_list[27].rotate(180)) pre_label.append(2) pre_data.append(data_list[28].rotate(315)) pre_label.append(4) pre_data.append(data_list[29].rotate(90)) pre_label.append(2) pre_data.append(data_list[30].rotate(90)) pre_label.append(label_list[30]) pre_data.append(data_list[31].rotate(90))", "data into the data/train and data/val folders ## To correct rotated images and", "pre_data.append(data_list[54].rotate(180)) pre_label.append(label_list[54]) pre_data.append(data_list[55].rotate(90)) pre_label.append(0) pre_data.append(data_list[56].rotate(315)) pre_label.append(label_list[56]) pre_data.append(data_list[57].rotate(180)) pre_label.append(8) pre_data.append(data_list[58].rotate(90)) pre_label.append(label_list[58]) pre_data.append(data_list[59].rotate(90)) pre_label.append(label_list[59]) return" ]
[ "from unittest import TestCase from kevin.leet.is_anagram import Solution class TestIsAnagram(TestCase): def _base_test_is_anagram(self, s:", "def test_is_anagram_easy(self): self._base_test_is_anagram('anagram', 'nagaram', True) def test_is_anagram_easy_false(self): self._base_test_is_anagram('rat', 'car', False) def test_is_anagram_easy_false_repeats(self): self._base_test_is_anagram('aa',", "t: str, expected: bool): sol = Solution() actual = sol.is_anagram(s, t) assert expected", "TestCase from kevin.leet.is_anagram import Solution class TestIsAnagram(TestCase): def _base_test_is_anagram(self, s: str, t: str,", "(expected, actual) def test_is_anagram_easy(self): self._base_test_is_anagram('anagram', 'nagaram', True) def test_is_anagram_easy_false(self): self._base_test_is_anagram('rat', 'car', False) def", "unittest import TestCase from kevin.leet.is_anagram import Solution class TestIsAnagram(TestCase): def _base_test_is_anagram(self, s: str,", "expected: bool): sol = Solution() actual = sol.is_anagram(s, t) assert expected == actual,", "= sol.is_anagram(s, t) assert expected == actual, (expected, actual) def test_is_anagram_easy(self): self._base_test_is_anagram('anagram', 'nagaram',", "assert expected == actual, (expected, actual) def test_is_anagram_easy(self): self._base_test_is_anagram('anagram', 'nagaram', True) def test_is_anagram_easy_false(self):", "sol.is_anagram(s, t) assert expected == actual, (expected, actual) def test_is_anagram_easy(self): self._base_test_is_anagram('anagram', 'nagaram', True)", "s: str, t: str, expected: bool): sol = Solution() actual = sol.is_anagram(s, t)", "kevin.leet.is_anagram import Solution class TestIsAnagram(TestCase): def _base_test_is_anagram(self, s: str, t: str, expected: bool):", "TestIsAnagram(TestCase): def _base_test_is_anagram(self, s: str, t: str, expected: bool): sol = Solution() actual", "_base_test_is_anagram(self, s: str, t: str, expected: bool): sol = Solution() actual = sol.is_anagram(s,", "import TestCase from kevin.leet.is_anagram import Solution class TestIsAnagram(TestCase): def _base_test_is_anagram(self, s: str, t:", "https://leetcode.com/explore/challenge/card/february-leetcoding-challenge-2021/585/week-2-february-8th-february-14th/3636/ \"\"\" from unittest import TestCase from kevin.leet.is_anagram import Solution class TestIsAnagram(TestCase): def", "\"\"\" from unittest import TestCase from kevin.leet.is_anagram import Solution class TestIsAnagram(TestCase): def _base_test_is_anagram(self,", "actual, (expected, actual) def test_is_anagram_easy(self): self._base_test_is_anagram('anagram', 'nagaram', True) def test_is_anagram_easy_false(self): self._base_test_is_anagram('rat', 'car', False)", "self._base_test_is_anagram('anagram', 'nagaram', True) def test_is_anagram_easy_false(self): self._base_test_is_anagram('rat', 'car', False) def test_is_anagram_easy_false_repeats(self): self._base_test_is_anagram('aa', 'a', False)", "from kevin.leet.is_anagram import Solution class TestIsAnagram(TestCase): def _base_test_is_anagram(self, s: str, t: str, expected:", "import Solution class TestIsAnagram(TestCase): def _base_test_is_anagram(self, s: str, t: str, expected: bool): sol", "test_is_anagram_easy(self): self._base_test_is_anagram('anagram', 'nagaram', True) def test_is_anagram_easy_false(self): self._base_test_is_anagram('rat', 'car', False) def test_is_anagram_easy_false_repeats(self): self._base_test_is_anagram('aa', 'a',", "str, expected: bool): sol = Solution() actual = sol.is_anagram(s, t) assert expected ==", "actual) def test_is_anagram_easy(self): self._base_test_is_anagram('anagram', 'nagaram', True) def test_is_anagram_easy_false(self): self._base_test_is_anagram('rat', 'car', False) def test_is_anagram_easy_false_repeats(self):", "Solution class TestIsAnagram(TestCase): def _base_test_is_anagram(self, s: str, t: str, expected: bool): sol =", "== actual, (expected, actual) def test_is_anagram_easy(self): self._base_test_is_anagram('anagram', 'nagaram', True) def test_is_anagram_easy_false(self): self._base_test_is_anagram('rat', 'car',", "<reponame>kalyons11/kevin<filename>kevin/tests/leet/test_is_anagram.py \"\"\" https://leetcode.com/explore/challenge/card/february-leetcoding-challenge-2021/585/week-2-february-8th-february-14th/3636/ \"\"\" from unittest import TestCase from kevin.leet.is_anagram import Solution class", "expected == actual, (expected, actual) def test_is_anagram_easy(self): self._base_test_is_anagram('anagram', 'nagaram', True) def test_is_anagram_easy_false(self): self._base_test_is_anagram('rat',", "\"\"\" https://leetcode.com/explore/challenge/card/february-leetcoding-challenge-2021/585/week-2-february-8th-february-14th/3636/ \"\"\" from unittest import TestCase from kevin.leet.is_anagram import Solution class TestIsAnagram(TestCase):", "Solution() actual = sol.is_anagram(s, t) assert expected == actual, (expected, actual) def test_is_anagram_easy(self):", "t) assert expected == actual, (expected, actual) def test_is_anagram_easy(self): self._base_test_is_anagram('anagram', 'nagaram', True) def", "def _base_test_is_anagram(self, s: str, t: str, expected: bool): sol = Solution() actual =", "actual = sol.is_anagram(s, t) assert expected == actual, (expected, actual) def test_is_anagram_easy(self): self._base_test_is_anagram('anagram',", "= Solution() actual = sol.is_anagram(s, t) assert expected == actual, (expected, actual) def", "bool): sol = Solution() actual = sol.is_anagram(s, t) assert expected == actual, (expected,", "str, t: str, expected: bool): sol = Solution() actual = sol.is_anagram(s, t) assert", "sol = Solution() actual = sol.is_anagram(s, t) assert expected == actual, (expected, actual)", "class TestIsAnagram(TestCase): def _base_test_is_anagram(self, s: str, t: str, expected: bool): sol = Solution()" ]
[]
[ "# Generated by Django 2.2.12 on 2020-06-16 13:14 from django.db import migrations, models", "on 2020-06-16 13:14 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies", "[ migrations.RemoveField( model_name='task', name='name', ), migrations.AddField( model_name='task', name='result_url', field=models.URLField(null=True), ), migrations.AlterField( model_name='task', name='status',", "'서버 리소스 다운로드'), (0, '작업 실패')], default=10), ), migrations.AlterField( model_name='taskresource', name='task', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_resources',", "Migration(migrations.Migration): dependencies = [ ('memes', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='task', name='name',", "'0001_initial'), ] operations = [ migrations.RemoveField( model_name='task', name='name', ), migrations.AddField( model_name='task', name='result_url', field=models.URLField(null=True),", "operations = [ migrations.RemoveField( model_name='task', name='name', ), migrations.AddField( model_name='task', name='result_url', field=models.URLField(null=True), ), migrations.AlterField(", "시작'), (30, '서버 리소스 다운로드'), (0, '작업 실패')], default=10), ), migrations.AlterField( model_name='taskresource', name='task',", "2.2.12 on 2020-06-16 13:14 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):", "django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('memes', '0001_initial'), ] operations = [ migrations.RemoveField(", "migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('memes', '0001_initial'), ] operations", "Django 2.2.12 on 2020-06-16 13:14 from django.db import migrations, models import django.db.models.deletion class", "('memes', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='task', name='name', ), migrations.AddField( model_name='task', name='result_url',", "] operations = [ migrations.RemoveField( model_name='task', name='name', ), migrations.AddField( model_name='task', name='result_url', field=models.URLField(null=True), ),", "model_name='task', name='result_url', field=models.URLField(null=True), ), migrations.AlterField( model_name='task', name='status', field=models.IntegerField(choices=[(10, '서버 작업 대기중'), (20, '서버", "model_name='task', name='name', ), migrations.AddField( model_name='task', name='result_url', field=models.URLField(null=True), ), migrations.AlterField( model_name='task', name='status', field=models.IntegerField(choices=[(10, '서버", "field=models.IntegerField(choices=[(10, '서버 작업 대기중'), (20, '서버 작업 시작'), (30, '서버 리소스 다운로드'), (0,", "= [ ('memes', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='task', name='name', ), migrations.AddField(", "import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('memes', '0001_initial'), ] operations = [", "migrations.AddField( model_name='task', name='result_url', field=models.URLField(null=True), ), migrations.AlterField( model_name='task', name='status', field=models.IntegerField(choices=[(10, '서버 작업 대기중'), (20,", "리소스 다운로드'), (0, '작업 실패')], default=10), ), migrations.AlterField( model_name='taskresource', name='task', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_resources', to='memes.Task'),", "(0, '작업 실패')], default=10), ), migrations.AlterField( model_name='taskresource', name='task', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_resources', to='memes.Task'), ), ]", "(20, '서버 작업 시작'), (30, '서버 리소스 다운로드'), (0, '작업 실패')], default=10), ),", "migrations.RemoveField( model_name='task', name='name', ), migrations.AddField( model_name='task', name='result_url', field=models.URLField(null=True), ), migrations.AlterField( model_name='task', name='status', field=models.IntegerField(choices=[(10,", "class Migration(migrations.Migration): dependencies = [ ('memes', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='task',", "django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('memes', '0001_initial'),", "models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('memes', '0001_initial'), ] operations =", "by Django 2.2.12 on 2020-06-16 13:14 from django.db import migrations, models import django.db.models.deletion", "= [ migrations.RemoveField( model_name='task', name='name', ), migrations.AddField( model_name='task', name='result_url', field=models.URLField(null=True), ), migrations.AlterField( model_name='task',", "'서버 작업 시작'), (30, '서버 리소스 다운로드'), (0, '작업 실패')], default=10), ), migrations.AlterField(", "name='name', ), migrations.AddField( model_name='task', name='result_url', field=models.URLField(null=True), ), migrations.AlterField( model_name='task', name='status', field=models.IntegerField(choices=[(10, '서버 작업", "name='result_url', field=models.URLField(null=True), ), migrations.AlterField( model_name='task', name='status', field=models.IntegerField(choices=[(10, '서버 작업 대기중'), (20, '서버 작업", "), migrations.AlterField( model_name='task', name='status', field=models.IntegerField(choices=[(10, '서버 작업 대기중'), (20, '서버 작업 시작'), (30,", "대기중'), (20, '서버 작업 시작'), (30, '서버 리소스 다운로드'), (0, '작업 실패')], default=10),", "dependencies = [ ('memes', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='task', name='name', ),", "다운로드'), (0, '작업 실패')], default=10), ), migrations.AlterField( model_name='taskresource', name='task', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_resources', to='memes.Task'), ),", "name='status', field=models.IntegerField(choices=[(10, '서버 작업 대기중'), (20, '서버 작업 시작'), (30, '서버 리소스 다운로드'),", "작업 시작'), (30, '서버 리소스 다운로드'), (0, '작업 실패')], default=10), ), migrations.AlterField( model_name='taskresource',", "[ ('memes', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='task', name='name', ), migrations.AddField( model_name='task',", "(30, '서버 리소스 다운로드'), (0, '작업 실패')], default=10), ), migrations.AlterField( model_name='taskresource', name='task', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,", "13:14 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [", "'서버 작업 대기중'), (20, '서버 작업 시작'), (30, '서버 리소스 다운로드'), (0, '작업", "작업 대기중'), (20, '서버 작업 시작'), (30, '서버 리소스 다운로드'), (0, '작업 실패')],", "model_name='task', name='status', field=models.IntegerField(choices=[(10, '서버 작업 대기중'), (20, '서버 작업 시작'), (30, '서버 리소스", "2020-06-16 13:14 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies =", "), migrations.AddField( model_name='task', name='result_url', field=models.URLField(null=True), ), migrations.AlterField( model_name='task', name='status', field=models.IntegerField(choices=[(10, '서버 작업 대기중'),", "import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('memes', '0001_initial'), ]", "field=models.URLField(null=True), ), migrations.AlterField( model_name='task', name='status', field=models.IntegerField(choices=[(10, '서버 작업 대기중'), (20, '서버 작업 시작'),", "from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('memes',", "migrations.AlterField( model_name='task', name='status', field=models.IntegerField(choices=[(10, '서버 작업 대기중'), (20, '서버 작업 시작'), (30, '서버", "Generated by Django 2.2.12 on 2020-06-16 13:14 from django.db import migrations, models import" ]
[ "from controller.controller import Controller from repository.repository import Repository with open(\"database.txt\", \"r\") as f:", "from tester.tester import Tester from ui.application import Application from controller.controller import Controller from", "import Repository with open(\"database.txt\", \"r\") as f: t = Tester() repo = Repository()", "import Application from controller.controller import Controller from repository.repository import Repository with open(\"database.txt\", \"r\")", "import Tester from ui.application import Application from controller.controller import Controller from repository.repository import", "controller.controller import Controller from repository.repository import Repository with open(\"database.txt\", \"r\") as f: t", "<gh_stars>0 from tester.tester import Tester from ui.application import Application from controller.controller import Controller", "open(\"database.txt\", \"r\") as f: t = Tester() repo = Repository() controller = Controller(repo,", "\"r\") as f: t = Tester() repo = Repository() controller = Controller(repo, f)", "repository.repository import Repository with open(\"database.txt\", \"r\") as f: t = Tester() repo =", "Tester() repo = Repository() controller = Controller(repo, f) app = Application(controller, repo) app.run()", "tester.tester import Tester from ui.application import Application from controller.controller import Controller from repository.repository", "t = Tester() repo = Repository() controller = Controller(repo, f) app = Application(controller,", "ui.application import Application from controller.controller import Controller from repository.repository import Repository with open(\"database.txt\",", "f: t = Tester() repo = Repository() controller = Controller(repo, f) app =", "from ui.application import Application from controller.controller import Controller from repository.repository import Repository with", "from repository.repository import Repository with open(\"database.txt\", \"r\") as f: t = Tester() repo", "as f: t = Tester() repo = Repository() controller = Controller(repo, f) app", "Controller from repository.repository import Repository with open(\"database.txt\", \"r\") as f: t = Tester()", "= Tester() repo = Repository() controller = Controller(repo, f) app = Application(controller, repo)", "import Controller from repository.repository import Repository with open(\"database.txt\", \"r\") as f: t =", "Application from controller.controller import Controller from repository.repository import Repository with open(\"database.txt\", \"r\") as", "with open(\"database.txt\", \"r\") as f: t = Tester() repo = Repository() controller =", "Tester from ui.application import Application from controller.controller import Controller from repository.repository import Repository", "Repository with open(\"database.txt\", \"r\") as f: t = Tester() repo = Repository() controller" ]
[ "pictures_folder = \"\" class TinderBot(): def __init__(self): self.driver = webdriver.Chrome(ChromeDriverManager().install()) # def passport(self,", "pic_url = pic_path.get_attribute('style')[0].split('\"')[1] return pic_url def download_tinder_jpeg(self, file_name): path = pictures_folder try: pic_path", "= self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[2]/button') # select_location_accept.click() sleep(2) # Like action def like(self): like_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[4]/button')", "TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "close_popup(self): popup_3 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[2]/button[2]') popup_3.click() # Close match pop-up def close_match(self): match_popup =", "self.driver.maximize_window() self.driver.get('https://tinder.com') sleep(3) # Click on login button login_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/header/div[1]/div[2]/div/button') login_button.click() sleep(1)", "\"accuracy\": 100} # self.driver.execute_cdp_cmd(\"Page.setGeolocationOverride\", params) def login(self): self.driver.maximize_window() self.driver.get('https://tinder.com') sleep(3) # Click on", "Auto-swipe right def auto_swipe(self): while True: sleep(1) try: pic_id = datetime.datetime.now().strftime('%d%H%M%S%f') print(pic_id) try:", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "the Software without restriction, including without limitation the rights # to use, copy,", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN", "DEALINGS IN THE # SOFTWARE. from selenium import webdriver from webdriver_manager.chrome import ChromeDriverManager", "\"-o\") print(\"Converting to jpeg...\") print(decoder['stderr']) os.remove(full_path) print(\"WebP file removed!\") # Auto-swipe right def", "person obtaining a copy # of this software and associated documentation files (the", "# select_location_accept.click() sleep(2) # Like action def like(self): like_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[4]/button') like_btn.click() #", "def close_popup(self): popup_3 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[2]/button[2]') popup_3.click() # Close match pop-up def close_match(self): match_popup", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies", "on login button login_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/header/div[1]/div[2]/div/button') login_button.click() sleep(1) # Log in with Facebook", "# Click on login button login_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/header/div[1]/div[2]/div/button') login_button.click() sleep(1) # Log in", "full_path[:-5] + \".jpg\", \"-o\") print(\"Converting to jpeg...\") print(decoder['stderr']) os.remove(full_path) print(\"WebP file removed!\") #", "included in all # copies or substantial portions of the Software. # #", "self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[2]') popup_2.click() sleep(5) # Agree to passport mode and select city # passport_popup_button", "pic_url = pic_path.get_attribute('style').split('\"')[1] except: pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] return pic_url", "is hereby granted, free of charge, to any person obtaining a copy #", "mode and select city # passport_popup_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[1]/a') # passport_popup_button.click() # sleep(5) #", "persons to whom the Software is # furnished to do so, subject to", "self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[2]/button') # select_location_accept.click() sleep(2) # Like action def like(self): like_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[4]/button') like_btn.click()", "# \"latitude\": lat, # \"longitude\": lon, # \"accuracy\": 100} # self.driver.execute_cdp_cmd(\"Page.setGeolocationOverride\", params) def", "conditions: # # The above copyright notice and this permission notice shall be", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A", "bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1] except: pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1]", "= False while not logged_in: try: bot.login() logged_in = True break except: bot.driver.quit()", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "def login(self): self.driver.maximize_window() self.driver.get('https://tinder.com') sleep(3) # Click on login button login_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/header/div[1]/div[2]/div/button')", "= False sleep(5) # bot.passport() bot.auto_swipe() # subprocess.call(['osascript', '-e', 'tell application \"Chrome\" to", "OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "documentation files (the \"Software\"), to deal # in the Software without restriction, including", "to permit persons to whom the Software is # furnished to do so,", "ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS", "'//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] return pic_url def download_tinder_jpeg(self, file_name): path = pictures_folder try:", "while True: sleep(1) try: pic_id = datetime.datetime.now().strftime('%d%H%M%S%f') print(pic_id) try: self.download_tinder_jpeg(pic_id) self.dislike() except IndexError:", "import os from webptools import webplib as webp import urllib.request import datetime import", "back to Tinder main window self.driver.switch_to.window(base_window) # Dismiss pop-ups # Cookies accept_cookies_button =", "to jpeg...\") print(decoder['stderr']) os.remove(full_path) print(\"WebP file removed!\") # Auto-swipe right def auto_swipe(self): while", "of charge, to any person obtaining a copy # of this software and", "password_input_field = self.driver.find_element_by_xpath('//*[@id=\"pass\"]') password_input_field.send_keys(password) facebook_login_button = self.driver.find_element_by_xpath('//*[@id=\"loginbutton\"]') facebook_login_button.click() sleep(8) # Switch back to", "100} # self.driver.execute_cdp_cmd(\"Page.setGeolocationOverride\", params) def login(self): self.driver.maximize_window() self.driver.get('https://tinder.com') sleep(3) # Click on login", "path + '/' + file_name + '.webp' urllib.request.urlretrieve(pic_url, full_path) print('Photo downloaded...') decoder =", "SOFTWARE. from selenium import webdriver from webdriver_manager.chrome import ChromeDriverManager from time import sleep", "auto_swipe(self): while True: sleep(1) try: pic_id = datetime.datetime.now().strftime('%d%H%M%S%f') print(pic_id) try: self.download_tinder_jpeg(pic_id) self.dislike() except", "import ChromeDriverManager from time import sleep from secrets import username, password import os", "= True break except: bot.driver.quit() bot = TinderBot() logged_in = False sleep(5) #", "so, subject to the following conditions: # # The above copyright notice and", "sleep from secrets import username, password import os from webptools import webplib as", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION", "passport(self, lat=40.7128, lon=-74.0060): # params = { # \"latitude\": lat, # \"longitude\": lon,", "base window to go back after facebook login window base_window = self.driver.window_handles[0] #", "credentials and submits email_input_field = self.driver.find_element_by_xpath('//*[@id=\"email\"]') email_input_field.send_keys(username) password_input_field = self.driver.find_element_by_xpath('//*[@id=\"pass\"]') password_input_field.send_keys(password) facebook_login_button =", "removed!\") # Auto-swipe right def auto_swipe(self): while True: sleep(1) try: pic_id = datetime.datetime.now().strftime('%d%H%M%S%f')", "= self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[2]/div/div/div[1]/button/span') accept_cookies_button.click() # Location popup_1 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[1]/span') popup_1.click() # Notifications popup_2 =", "datetime.datetime.now().strftime('%d%H%M%S%f') print(pic_id) try: self.download_tinder_jpeg(pic_id) self.dislike() except IndexError: self.dislike() except Exception: print('Auto_swipe exception...') try:", "os.remove(full_path) print(\"WebP file removed!\") # Auto-swipe right def auto_swipe(self): while True: sleep(1) try:", "copy # of this software and associated documentation files (the \"Software\"), to deal", "from selenium import webdriver from webdriver_manager.chrome import ChromeDriverManager from time import sleep from", "sleep(3) # Click on login button login_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/header/div[1]/div[2]/div/button') login_button.click() sleep(1) # Log", "# Inputs Facebook credentials and submits email_input_field = self.driver.find_element_by_xpath('//*[@id=\"email\"]') email_input_field.send_keys(username) password_input_field = self.driver.find_element_by_xpath('//*[@id=\"pass\"]')", "to the following conditions: # # The above copyright notice and this permission", "'/' + file_name + '.webp' urllib.request.urlretrieve(pic_url, full_path) print('Photo downloaded...') decoder = webp.dwebp(full_path, full_path[:-5]", "popup_2.click() sleep(5) # Agree to passport mode and select city # passport_popup_button =", "sleep(5) # select_location_accept = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[2]/button') # select_location_accept.click() sleep(2) # Like action def like(self):", "the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "= self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[1]/a') # passport_popup_button.click() # sleep(5) # select_location_accept = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[2]/button') # select_location_accept.click() sleep(2)", "and associated documentation files (the \"Software\"), to deal # in the Software without", "webdriver.Chrome(ChromeDriverManager().install()) # def passport(self, lat=40.7128, lon=-74.0060): # params = { # \"latitude\": lat,", "# Dismiss pop-ups # Cookies accept_cookies_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[2]/div/div/div[1]/button/span') accept_cookies_button.click() # Location popup_1 =", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "break except: bot.driver.quit() bot = TinderBot() logged_in = False sleep(5) # bot.passport() bot.auto_swipe()", "popup_2 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[2]') popup_2.click() sleep(5) # Agree to passport mode and select city", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN", "window to go back after facebook login window base_window = self.driver.window_handles[0] # Selects", "= self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[2]/button[2]') popup_3.click() # Close match pop-up def close_match(self): match_popup = self.driver.find_element_by_xpath('//*[@id=\"modal-manager-canvas\"]/div/div/div[1]/div/div[3]/a') match_popup.click()", "sublicense, and/or sell # copies of the Software, and to permit persons to", "Software is # furnished to do so, subject to the following conditions: #", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "logged_in = False sleep(5) # bot.passport() bot.auto_swipe() # subprocess.call(['osascript', '-e', 'tell application \"Chrome\"", "webdriver_manager.chrome import ChromeDriverManager from time import sleep from secrets import username, password import", "jpeg...\") print(decoder['stderr']) os.remove(full_path) print(\"WebP file removed!\") # Auto-swipe right def auto_swipe(self): while True:", "# \"accuracy\": 100} # self.driver.execute_cdp_cmd(\"Page.setGeolocationOverride\", params) def login(self): self.driver.maximize_window() self.driver.get('https://tinder.com') sleep(3) # Click", "CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "self.close_popup() except Exception: self.close_match() # Close offer pop-up def close_popup(self): popup_3 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[2]/button[2]')", "file removed!\") # Auto-swipe right def auto_swipe(self): while True: sleep(1) try: pic_id =", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "all # copies or substantial portions of the Software. # # THE SOFTWARE", "self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[2]/button[2]') popup_3.click() # Close match pop-up def close_match(self): match_popup = self.driver.find_element_by_xpath('//*[@id=\"modal-manager-canvas\"]/div/div/div[1]/div/div[3]/a') match_popup.click() bot", "close_match(self): match_popup = self.driver.find_element_by_xpath('//*[@id=\"modal-manager-canvas\"]/div/div/div[1]/div/div[3]/a') match_popup.click() bot = TinderBot() logged_in = False while not", "Notifications popup_2 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[2]') popup_2.click() sleep(5) # Agree to passport mode and select", "= self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[2]/button') dislike_btn.click() # Save picture def get_pic_path(self): try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div')", "ChromeDriverManager from time import sleep from secrets import username, password import os from", "{ # \"latitude\": lat, # \"longitude\": lon, # \"accuracy\": 100} # self.driver.execute_cdp_cmd(\"Page.setGeolocationOverride\", params)", "webp.dwebp(full_path, full_path[:-5] + \".jpg\", \"-o\") print(\"Converting to jpeg...\") print(decoder['stderr']) os.remove(full_path) print(\"WebP file removed!\")", "self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/header/div[1]/div[2]/div/button') login_button.click() sleep(1) # Log in with Facebook login_with_facebook_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/span/div[2]/button/span[2]') login_with_facebook_button.click() #", "not logged_in: try: bot.login() logged_in = True break except: bot.driver.quit() bot = TinderBot()", "False while not logged_in: try: bot.login() logged_in = True break except: bot.driver.quit() bot", "of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "to go back after facebook login window base_window = self.driver.window_handles[0] # Selects Facebook", "# copies of the Software, and to permit persons to whom the Software", "= self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[4]/button') like_btn.click() # Dislike action def dislike(self): dislike_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[2]/button') dislike_btn.click() #", "back after facebook login window base_window = self.driver.window_handles[0] # Selects Facebook login window", "select_location_accept.click() sleep(2) # Like action def like(self): like_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[4]/button') like_btn.click() # Dislike", "DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "try: pic_id = datetime.datetime.now().strftime('%d%H%M%S%f') print(pic_id) try: self.download_tinder_jpeg(pic_id) self.dislike() except IndexError: self.dislike() except Exception:", "login window for interaction self.driver.switch_to.window(self.driver.window_handles[1]) # Inputs Facebook credentials and submits email_input_field =", "MIT License # # Copyright (c) 2020 <NAME> # # Permission is hereby", "'.webp' urllib.request.urlretrieve(pic_url, full_path) print('Photo downloaded...') decoder = webp.dwebp(full_path, full_path[:-5] + \".jpg\", \"-o\") print(\"Converting", "print('Auto_swipe exception...') try: self.close_popup() except Exception: self.close_match() # Close offer pop-up def close_popup(self):", "permission notice shall be included in all # copies or substantial portions of", "login_button.click() sleep(1) # Log in with Facebook login_with_facebook_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/span/div[2]/button/span[2]') login_with_facebook_button.click() # Marks", "webplib as webp import urllib.request import datetime import subprocess pictures_folder = \"\" class", "# def passport(self, lat=40.7128, lon=-74.0060): # params = { # \"latitude\": lat, #", "notice and this permission notice shall be included in all # copies or", "popup_3.click() # Close match pop-up def close_match(self): match_popup = self.driver.find_element_by_xpath('//*[@id=\"modal-manager-canvas\"]/div/div/div[1]/div/div[3]/a') match_popup.click() bot =", "IN THE # SOFTWARE. from selenium import webdriver from webdriver_manager.chrome import ChromeDriverManager from", "NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "datetime import subprocess pictures_folder = \"\" class TinderBot(): def __init__(self): self.driver = webdriver.Chrome(ChromeDriverManager().install())", "software and associated documentation files (the \"Software\"), to deal # in the Software", "as webp import urllib.request import datetime import subprocess pictures_folder = \"\" class TinderBot():", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #", "submits email_input_field = self.driver.find_element_by_xpath('//*[@id=\"email\"]') email_input_field.send_keys(username) password_input_field = self.driver.find_element_by_xpath('//*[@id=\"pass\"]') password_input_field.send_keys(password) facebook_login_button = self.driver.find_element_by_xpath('//*[@id=\"loginbutton\"]') facebook_login_button.click()", "\".jpg\", \"-o\") print(\"Converting to jpeg...\") print(decoder['stderr']) os.remove(full_path) print(\"WebP file removed!\") # Auto-swipe right", "self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[4]/button') like_btn.click() # Dislike action def dislike(self): dislike_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[2]/button') dislike_btn.click() # Save", "with Facebook login_with_facebook_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/span/div[2]/button/span[2]') login_with_facebook_button.click() # Marks base window to go back", "Inputs Facebook credentials and submits email_input_field = self.driver.find_element_by_xpath('//*[@id=\"email\"]') email_input_field.send_keys(username) password_input_field = self.driver.find_element_by_xpath('//*[@id=\"pass\"]') password_input_field.send_keys(password)", "and to permit persons to whom the Software is # furnished to do", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the", "OTHER DEALINGS IN THE # SOFTWARE. from selenium import webdriver from webdriver_manager.chrome import", "pic_path.get_attribute('style')[0].split('\"')[1] return pic_url def download_tinder_jpeg(self, file_name): path = pictures_folder try: pic_path = bot.driver.find_element_by_xpath(", "= webdriver.Chrome(ChromeDriverManager().install()) # def passport(self, lat=40.7128, lon=-74.0060): # params = { # \"latitude\":", "the following conditions: # # The above copyright notice and this permission notice", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND", "# furnished to do so, subject to the following conditions: # # The", "the Software, and to permit persons to whom the Software is # furnished", "bot = TinderBot() logged_in = False while not logged_in: try: bot.login() logged_in =", "rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #", "login button login_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/header/div[1]/div[2]/div/button') login_button.click() sleep(1) # Log in with Facebook login_with_facebook_button", "def download_tinder_jpeg(self, file_name): path = pictures_folder try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url =", "FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF", "except Exception: print('Auto_swipe exception...') try: self.close_popup() except Exception: self.close_match() # Close offer pop-up", "params = { # \"latitude\": lat, # \"longitude\": lon, # \"accuracy\": 100} #", "= self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/span/div[2]/button/span[2]') login_with_facebook_button.click() # Marks base window to go back after facebook login", "merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to", "webp import urllib.request import datetime import subprocess pictures_folder = \"\" class TinderBot(): def", "to Tinder main window self.driver.switch_to.window(base_window) # Dismiss pop-ups # Cookies accept_cookies_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[2]/div/div/div[1]/button/span')", "OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "Save picture def get_pic_path(self): try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1] except:", "# Auto-swipe right def auto_swipe(self): while True: sleep(1) try: pic_id = datetime.datetime.now().strftime('%d%H%M%S%f') print(pic_id)", "IndexError: self.dislike() except Exception: print('Auto_swipe exception...') try: self.close_popup() except Exception: self.close_match() # Close", "try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1] except: pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div')", "= self.driver.find_element_by_xpath('//*[@id=\"modal-manager-canvas\"]/div/div/div[1]/div/div[3]/a') match_popup.click() bot = TinderBot() logged_in = False while not logged_in: try:", "to do so, subject to the following conditions: # # The above copyright", "# Like action def like(self): like_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[4]/button') like_btn.click() # Dislike action def", "dislike(self): dislike_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[2]/button') dislike_btn.click() # Save picture def get_pic_path(self): try: pic_path =", "import webdriver from webdriver_manager.chrome import ChromeDriverManager from time import sleep from secrets import", "= pic_path.get_attribute('style')[0].split('\"')[1] full_path = path + '/' + file_name + '.webp' urllib.request.urlretrieve(pic_url, full_path)", "main window self.driver.switch_to.window(base_window) # Dismiss pop-ups # Cookies accept_cookies_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[2]/div/div/div[1]/button/span') accept_cookies_button.click() #", "sleep(1) try: pic_id = datetime.datetime.now().strftime('%d%H%M%S%f') print(pic_id) try: self.download_tinder_jpeg(pic_id) self.dislike() except IndexError: self.dislike() except", "class TinderBot(): def __init__(self): self.driver = webdriver.Chrome(ChromeDriverManager().install()) # def passport(self, lat=40.7128, lon=-74.0060): #", "whom the Software is # furnished to do so, subject to the following", "popup_1.click() # Notifications popup_2 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[2]') popup_2.click() sleep(5) # Agree to passport mode", "get_pic_path(self): try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1] except: pic_path = self.driver.find_element_by_xpath(", "urllib.request.urlretrieve(pic_url, full_path) print('Photo downloaded...') decoder = webp.dwebp(full_path, full_path[:-5] + \".jpg\", \"-o\") print(\"Converting to", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH", "selenium import webdriver from webdriver_manager.chrome import ChromeDriverManager from time import sleep from secrets", "Facebook login_with_facebook_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/span/div[2]/button/span[2]') login_with_facebook_button.click() # Marks base window to go back after", "pop-up def close_match(self): match_popup = self.driver.find_element_by_xpath('//*[@id=\"modal-manager-canvas\"]/div/div/div[1]/div/div[3]/a') match_popup.click() bot = TinderBot() logged_in = False", "Click on login button login_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/header/div[1]/div[2]/div/button') login_button.click() sleep(1) # Log in with", "free of charge, to any person obtaining a copy # of this software", "OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from selenium import", "lat, # \"longitude\": lon, # \"accuracy\": 100} # self.driver.execute_cdp_cmd(\"Page.setGeolocationOverride\", params) def login(self): self.driver.maximize_window()", "# passport_popup_button.click() # sleep(5) # select_location_accept = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[2]/button') # select_location_accept.click() sleep(2) # Like", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT", "self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] full_path = path + '/' + file_name +", "copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software,", "Agree to passport mode and select city # passport_popup_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[1]/a') # passport_popup_button.click()", "self.driver.find_element_by_xpath('//*[@id=\"email\"]') email_input_field.send_keys(username) password_input_field = self.driver.find_element_by_xpath('//*[@id=\"pass\"]') password_input_field.send_keys(password) facebook_login_button = self.driver.find_element_by_xpath('//*[@id=\"loginbutton\"]') facebook_login_button.click() sleep(8) # Switch", "\"longitude\": lon, # \"accuracy\": 100} # self.driver.execute_cdp_cmd(\"Page.setGeolocationOverride\", params) def login(self): self.driver.maximize_window() self.driver.get('https://tinder.com') sleep(3)", "False sleep(5) # bot.passport() bot.auto_swipe() # subprocess.call(['osascript', '-e', 'tell application \"Chrome\" to quit'])", "login_with_facebook_button.click() # Marks base window to go back after facebook login window base_window", "TinderBot(): def __init__(self): self.driver = webdriver.Chrome(ChromeDriverManager().install()) # def passport(self, lat=40.7128, lon=-74.0060): # params", "Cookies accept_cookies_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[2]/div/div/div[1]/button/span') accept_cookies_button.click() # Location popup_1 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[1]/span') popup_1.click() # Notifications", "without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense,", "= self.driver.find_element_by_xpath('//*[@id=\"pass\"]') password_input_field.send_keys(password) facebook_login_button = self.driver.find_element_by_xpath('//*[@id=\"loginbutton\"]') facebook_login_button.click() sleep(8) # Switch back to Tinder", "is # furnished to do so, subject to the following conditions: # #", "password_input_field.send_keys(password) facebook_login_button = self.driver.find_element_by_xpath('//*[@id=\"loginbutton\"]') facebook_login_button.click() sleep(8) # Switch back to Tinder main window", "Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "dislike_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[2]/button') dislike_btn.click() # Save picture def get_pic_path(self): try: pic_path = bot.driver.find_element_by_xpath(", "decoder = webp.dwebp(full_path, full_path[:-5] + \".jpg\", \"-o\") print(\"Converting to jpeg...\") print(decoder['stderr']) os.remove(full_path) print(\"WebP", "from webptools import webplib as webp import urllib.request import datetime import subprocess pictures_folder", "= \"\" class TinderBot(): def __init__(self): self.driver = webdriver.Chrome(ChromeDriverManager().install()) # def passport(self, lat=40.7128,", "# bot.passport() bot.auto_swipe() # subprocess.call(['osascript', '-e', 'tell application \"Chrome\" to quit']) # os._exit(0)", "to deal # in the Software without restriction, including without limitation the rights", "to any person obtaining a copy # of this software and associated documentation", "accept_cookies_button.click() # Location popup_1 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[1]/span') popup_1.click() # Notifications popup_2 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[2]') popup_2.click()", "in all # copies or substantial portions of the Software. # # THE", "# Notifications popup_2 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[2]') popup_2.click() sleep(5) # Agree to passport mode and", "print(decoder['stderr']) os.remove(full_path) print(\"WebP file removed!\") # Auto-swipe right def auto_swipe(self): while True: sleep(1)", "# Log in with Facebook login_with_facebook_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/span/div[2]/button/span[2]') login_with_facebook_button.click() # Marks base window", "email_input_field = self.driver.find_element_by_xpath('//*[@id=\"email\"]') email_input_field.send_keys(username) password_input_field = self.driver.find_element_by_xpath('//*[@id=\"pass\"]') password_input_field.send_keys(password) facebook_login_button = self.driver.find_element_by_xpath('//*[@id=\"loginbutton\"]') facebook_login_button.click() sleep(8)", "import urllib.request import datetime import subprocess pictures_folder = \"\" class TinderBot(): def __init__(self):", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "# Agree to passport mode and select city # passport_popup_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[1]/a') #", "= webp.dwebp(full_path, full_path[:-5] + \".jpg\", \"-o\") print(\"Converting to jpeg...\") print(decoder['stderr']) os.remove(full_path) print(\"WebP file", "THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "go back after facebook login window base_window = self.driver.window_handles[0] # Selects Facebook login", "OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE", "webdriver from webdriver_manager.chrome import ChromeDriverManager from time import sleep from secrets import username,", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #", "SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "THE # SOFTWARE. from selenium import webdriver from webdriver_manager.chrome import ChromeDriverManager from time", "from secrets import username, password import os from webptools import webplib as webp", "# select_location_accept = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[2]/button') # select_location_accept.click() sleep(2) # Like action def like(self): like_btn", "Software, and to permit persons to whom the Software is # furnished to", "# Close match pop-up def close_match(self): match_popup = self.driver.find_element_by_xpath('//*[@id=\"modal-manager-canvas\"]/div/div/div[1]/div/div[3]/a') match_popup.click() bot = TinderBot()", "Copyright (c) 2020 <NAME> # # Permission is hereby granted, free of charge,", "Close offer pop-up def close_popup(self): popup_3 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[2]/button[2]') popup_3.click() # Close match pop-up", "interaction self.driver.switch_to.window(self.driver.window_handles[1]) # Inputs Facebook credentials and submits email_input_field = self.driver.find_element_by_xpath('//*[@id=\"email\"]') email_input_field.send_keys(username) password_input_field", "Facebook login window for interaction self.driver.switch_to.window(self.driver.window_handles[1]) # Inputs Facebook credentials and submits email_input_field", "sleep(8) # Switch back to Tinder main window self.driver.switch_to.window(base_window) # Dismiss pop-ups #", "this software and associated documentation files (the \"Software\"), to deal # in the", "= { # \"latitude\": lat, # \"longitude\": lon, # \"accuracy\": 100} # self.driver.execute_cdp_cmd(\"Page.setGeolocationOverride\",", "after facebook login window base_window = self.driver.window_handles[0] # Selects Facebook login window for", "email_input_field.send_keys(username) password_input_field = self.driver.find_element_by_xpath('//*[@id=\"pass\"]') password_input_field.send_keys(password) facebook_login_button = self.driver.find_element_by_xpath('//*[@id=\"loginbutton\"]') facebook_login_button.click() sleep(8) # Switch back", "pop-up def close_popup(self): popup_3 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[2]/button[2]') popup_3.click() # Close match pop-up def close_match(self):", "username, password import os from webptools import webplib as webp import urllib.request import", "= bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1] except: pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url =", "Marks base window to go back after facebook login window base_window = self.driver.window_handles[0]", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY,", "facebook_login_button.click() sleep(8) # Switch back to Tinder main window self.driver.switch_to.window(base_window) # Dismiss pop-ups", "granted, free of charge, to any person obtaining a copy # of this", "in with Facebook login_with_facebook_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/span/div[2]/button/span[2]') login_with_facebook_button.click() # Marks base window to go", "except: bot.driver.quit() bot = TinderBot() logged_in = False sleep(5) # bot.passport() bot.auto_swipe() #", "match pop-up def close_match(self): match_popup = self.driver.find_element_by_xpath('//*[@id=\"modal-manager-canvas\"]/div/div/div[1]/div/div[3]/a') match_popup.click() bot = TinderBot() logged_in =", "= self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/header/div[1]/div[2]/div/button') login_button.click() sleep(1) # Log in with Facebook login_with_facebook_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/span/div[2]/button/span[2]') login_with_facebook_button.click()", "furnished to do so, subject to the following conditions: # # The above", "and this permission notice shall be included in all # copies or substantial", "modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "logged_in: try: bot.login() logged_in = True break except: bot.driver.quit() bot = TinderBot() logged_in", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "lon=-74.0060): # params = { # \"latitude\": lat, # \"longitude\": lon, # \"accuracy\":", "# Permission is hereby granted, free of charge, to any person obtaining a", "login window base_window = self.driver.window_handles[0] # Selects Facebook login window for interaction self.driver.switch_to.window(self.driver.window_handles[1])", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF", "def dislike(self): dislike_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[2]/button') dislike_btn.click() # Save picture def get_pic_path(self): try: pic_path", "self.driver.switch_to.window(self.driver.window_handles[1]) # Inputs Facebook credentials and submits email_input_field = self.driver.find_element_by_xpath('//*[@id=\"email\"]') email_input_field.send_keys(username) password_input_field =", "logged_in = False while not logged_in: try: bot.login() logged_in = True break except:", "# Switch back to Tinder main window self.driver.switch_to.window(base_window) # Dismiss pop-ups # Cookies", "publish, distribute, sublicense, and/or sell # copies of the Software, and to permit", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "login_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/header/div[1]/div[2]/div/button') login_button.click() sleep(1) # Log in with Facebook login_with_facebook_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/span/div[2]/button/span[2]')", "time import sleep from secrets import username, password import os from webptools import", "\"\" class TinderBot(): def __init__(self): self.driver = webdriver.Chrome(ChromeDriverManager().install()) # def passport(self, lat=40.7128, lon=-74.0060):", "'//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1] except: pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] return", "pic_path.get_attribute('style').split('\"')[1] except: pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] return pic_url def download_tinder_jpeg(self,", "self.download_tinder_jpeg(pic_id) self.dislike() except IndexError: self.dislike() except Exception: print('Auto_swipe exception...') try: self.close_popup() except Exception:", "popup_3 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[2]/button[2]') popup_3.click() # Close match pop-up def close_match(self): match_popup = self.driver.find_element_by_xpath('//*[@id=\"modal-manager-canvas\"]/div/div/div[1]/div/div[3]/a')", "USE OR OTHER DEALINGS IN THE # SOFTWARE. from selenium import webdriver from", "self.driver.window_handles[0] # Selects Facebook login window for interaction self.driver.switch_to.window(self.driver.window_handles[1]) # Inputs Facebook credentials", "self.driver.switch_to.window(base_window) # Dismiss pop-ups # Cookies accept_cookies_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[2]/div/div/div[1]/button/span') accept_cookies_button.click() # Location popup_1", "License # # Copyright (c) 2020 <NAME> # # Permission is hereby granted,", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION", "without restriction, including without limitation the rights # to use, copy, modify, merge,", "subprocess pictures_folder = \"\" class TinderBot(): def __init__(self): self.driver = webdriver.Chrome(ChromeDriverManager().install()) # def", "pictures_folder try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1] except: pic_path = self.driver.find_element_by_xpath(", "like(self): like_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[4]/button') like_btn.click() # Dislike action def dislike(self): dislike_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[2]/button')", "print('Photo downloaded...') decoder = webp.dwebp(full_path, full_path[:-5] + \".jpg\", \"-o\") print(\"Converting to jpeg...\") print(decoder['stderr'])", "<filename>tinder_scraper.py # MIT License # # Copyright (c) 2020 <NAME> # # Permission", "in the Software without restriction, including without limitation the rights # to use,", "import datetime import subprocess pictures_folder = \"\" class TinderBot(): def __init__(self): self.driver =", "login_with_facebook_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/span/div[2]/button/span[2]') login_with_facebook_button.click() # Marks base window to go back after facebook", "while not logged_in: try: bot.login() logged_in = True break except: bot.driver.quit() bot =", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS", "window self.driver.switch_to.window(base_window) # Dismiss pop-ups # Cookies accept_cookies_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[2]/div/div/div[1]/button/span') accept_cookies_button.click() # Location", "copies of the Software, and to permit persons to whom the Software is", "= self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[1]/span') popup_1.click() # Notifications popup_2 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[2]') popup_2.click() sleep(5) # Agree to", "= self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] return pic_url def download_tinder_jpeg(self, file_name): path =", "Dislike action def dislike(self): dislike_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[2]/button') dislike_btn.click() # Save picture def get_pic_path(self):", "except: pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] full_path = path + '/'", "self.driver.find_element_by_xpath('//*[@id=\"modal-manager-canvas\"]/div/div/div[1]/div/div[3]/a') match_popup.click() bot = TinderBot() logged_in = False while not logged_in: try: bot.login()", "action def like(self): like_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[4]/button') like_btn.click() # Dislike action def dislike(self): dislike_btn", "from time import sleep from secrets import username, password import os from webptools", "<NAME> # # Permission is hereby granted, free of charge, to any person", "+ file_name + '.webp' urllib.request.urlretrieve(pic_url, full_path) print('Photo downloaded...') decoder = webp.dwebp(full_path, full_path[:-5] +", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR", "secrets import username, password import os from webptools import webplib as webp import", "city # passport_popup_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[1]/a') # passport_popup_button.click() # sleep(5) # select_location_accept = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[2]/button')", "picture def get_pic_path(self): try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1] except: pic_path", "webptools import webplib as webp import urllib.request import datetime import subprocess pictures_folder =", "= pic_path.get_attribute('style').split('\"')[1] except: pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] full_path = path", "OR OTHER DEALINGS IN THE # SOFTWARE. from selenium import webdriver from webdriver_manager.chrome", "notice shall be included in all # copies or substantial portions of the", "match_popup.click() bot = TinderBot() logged_in = False while not logged_in: try: bot.login() logged_in", "obtaining a copy # of this software and associated documentation files (the \"Software\"),", "THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from selenium import webdriver", "# # Copyright (c) 2020 <NAME> # # Permission is hereby granted, free", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE", "MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "urllib.request import datetime import subprocess pictures_folder = \"\" class TinderBot(): def __init__(self): self.driver", "facebook login window base_window = self.driver.window_handles[0] # Selects Facebook login window for interaction", "Switch back to Tinder main window self.driver.switch_to.window(base_window) # Dismiss pop-ups # Cookies accept_cookies_button", "shall be included in all # copies or substantial portions of the Software.", "self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[1]/a') # passport_popup_button.click() # sleep(5) # select_location_accept = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[2]/button') # select_location_accept.click() sleep(2) #", "def auto_swipe(self): while True: sleep(1) try: pic_id = datetime.datetime.now().strftime('%d%H%M%S%f') print(pic_id) try: self.download_tinder_jpeg(pic_id) self.dislike()", "Selects Facebook login window for interaction self.driver.switch_to.window(self.driver.window_handles[1]) # Inputs Facebook credentials and submits", "The above copyright notice and this permission notice shall be included in all", "and/or sell # copies of the Software, and to permit persons to whom", "except: pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] return pic_url def download_tinder_jpeg(self, file_name):", "pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] full_path = path + '/' +", "Close match pop-up def close_match(self): match_popup = self.driver.find_element_by_xpath('//*[@id=\"modal-manager-canvas\"]/div/div/div[1]/div/div[3]/a') match_popup.click() bot = TinderBot() logged_in", "+ \".jpg\", \"-o\") print(\"Converting to jpeg...\") print(decoder['stderr']) os.remove(full_path) print(\"WebP file removed!\") # Auto-swipe", "self.close_match() # Close offer pop-up def close_popup(self): popup_3 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[2]/button[2]') popup_3.click() # Close", "dislike_btn.click() # Save picture def get_pic_path(self): try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url =", "sleep(2) # Like action def like(self): like_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[4]/button') like_btn.click() # Dislike action", "# Save picture def get_pic_path(self): try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1]", "path = pictures_folder try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1] except: pic_path", "pic_path.get_attribute('style').split('\"')[1] except: pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] full_path = path +", "# in the Software without restriction, including without limitation the rights # to", "def __init__(self): self.driver = webdriver.Chrome(ChromeDriverManager().install()) # def passport(self, lat=40.7128, lon=-74.0060): # params =", "match_popup = self.driver.find_element_by_xpath('//*[@id=\"modal-manager-canvas\"]/div/div/div[1]/div/div[3]/a') match_popup.click() bot = TinderBot() logged_in = False while not logged_in:", "Exception: print('Auto_swipe exception...') try: self.close_popup() except Exception: self.close_match() # Close offer pop-up def", "# sleep(5) # select_location_accept = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[2]/button') # select_location_accept.click() sleep(2) # Like action def", "def close_match(self): match_popup = self.driver.find_element_by_xpath('//*[@id=\"modal-manager-canvas\"]/div/div/div[1]/div/div[3]/a') match_popup.click() bot = TinderBot() logged_in = False while", "self.driver.execute_cdp_cmd(\"Page.setGeolocationOverride\", params) def login(self): self.driver.maximize_window() self.driver.get('https://tinder.com') sleep(3) # Click on login button login_button", "TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE", "= pic_path.get_attribute('style').split('\"')[1] except: pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] return pic_url def", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from selenium", "pic_url = pic_path.get_attribute('style')[0].split('\"')[1] full_path = path + '/' + file_name + '.webp' urllib.request.urlretrieve(pic_url,", "+ '/' + file_name + '.webp' urllib.request.urlretrieve(pic_url, full_path) print('Photo downloaded...') decoder = webp.dwebp(full_path,", "accept_cookies_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[2]/div/div/div[1]/button/span') accept_cookies_button.click() # Location popup_1 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[1]/span') popup_1.click() # Notifications popup_2", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS", "self.driver = webdriver.Chrome(ChromeDriverManager().install()) # def passport(self, lat=40.7128, lon=-74.0060): # params = { #", "any person obtaining a copy # of this software and associated documentation files", "window for interaction self.driver.switch_to.window(self.driver.window_handles[1]) # Inputs Facebook credentials and submits email_input_field = self.driver.find_element_by_xpath('//*[@id=\"email\"]')", "= TinderBot() logged_in = False while not logged_in: try: bot.login() logged_in = True", "# # The above copyright notice and this permission notice shall be included", "and submits email_input_field = self.driver.find_element_by_xpath('//*[@id=\"email\"]') email_input_field.send_keys(username) password_input_field = self.driver.find_element_by_xpath('//*[@id=\"pass\"]') password_input_field.send_keys(password) facebook_login_button = self.driver.find_element_by_xpath('//*[@id=\"loginbutton\"]')", "to passport mode and select city # passport_popup_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[1]/a') # passport_popup_button.click() #", "sleep(1) # Log in with Facebook login_with_facebook_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/span/div[2]/button/span[2]') login_with_facebook_button.click() # Marks base", "bot.login() logged_in = True break except: bot.driver.quit() bot = TinderBot() logged_in = False", "self.driver.get('https://tinder.com') sleep(3) # Click on login button login_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/header/div[1]/div[2]/div/button') login_button.click() sleep(1) #", "\"Software\"), to deal # in the Software without restriction, including without limitation the", "return pic_url def download_tinder_jpeg(self, file_name): path = pictures_folder try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div')", "like_btn.click() # Dislike action def dislike(self): dislike_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[2]/button') dislike_btn.click() # Save picture", "sleep(5) # Agree to passport mode and select city # passport_popup_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[1]/a')", "print(\"WebP file removed!\") # Auto-swipe right def auto_swipe(self): while True: sleep(1) try: pic_id", "lat=40.7128, lon=-74.0060): # params = { # \"latitude\": lat, # \"longitude\": lon, #", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "a copy # of this software and associated documentation files (the \"Software\"), to", "deal # in the Software without restriction, including without limitation the rights #", "# Close offer pop-up def close_popup(self): popup_3 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[2]/button[2]') popup_3.click() # Close match", "pic_id = datetime.datetime.now().strftime('%d%H%M%S%f') print(pic_id) try: self.download_tinder_jpeg(pic_id) self.dislike() except IndexError: self.dislike() except Exception: print('Auto_swipe", "Exception: self.close_match() # Close offer pop-up def close_popup(self): popup_3 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[2]/button[2]') popup_3.click() #", "= TinderBot() logged_in = False sleep(5) # bot.passport() bot.auto_swipe() # subprocess.call(['osascript', '-e', 'tell", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "# Marks base window to go back after facebook login window base_window =", "(the \"Software\"), to deal # in the Software without restriction, including without limitation", "IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "bot.driver.quit() bot = TinderBot() logged_in = False sleep(5) # bot.passport() bot.auto_swipe() # subprocess.call(['osascript',", "distribute, sublicense, and/or sell # copies of the Software, and to permit persons", "facebook_login_button = self.driver.find_element_by_xpath('//*[@id=\"loginbutton\"]') facebook_login_button.click() sleep(8) # Switch back to Tinder main window self.driver.switch_to.window(base_window)", "pic_path.get_attribute('style')[0].split('\"')[1] full_path = path + '/' + file_name + '.webp' urllib.request.urlretrieve(pic_url, full_path) print('Photo", "charge, to any person obtaining a copy # of this software and associated", "'//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1] except: pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] full_path", "'//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] full_path = path + '/' + file_name + '.webp'", "exception...') try: self.close_popup() except Exception: self.close_match() # Close offer pop-up def close_popup(self): popup_3", "self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/span/div[2]/button/span[2]') login_with_facebook_button.click() # Marks base window to go back after facebook login window", "passport_popup_button.click() # sleep(5) # select_location_accept = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[2]/button') # select_location_accept.click() sleep(2) # Like action", "for interaction self.driver.switch_to.window(self.driver.window_handles[1]) # Inputs Facebook credentials and submits email_input_field = self.driver.find_element_by_xpath('//*[@id=\"email\"]') email_input_field.send_keys(username)", "WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "try: self.download_tinder_jpeg(pic_id) self.dislike() except IndexError: self.dislike() except Exception: print('Auto_swipe exception...') try: self.close_popup() except", "= pictures_folder try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1] except: pic_path =", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "True break except: bot.driver.quit() bot = TinderBot() logged_in = False sleep(5) # bot.passport()", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "downloaded...') decoder = webp.dwebp(full_path, full_path[:-5] + \".jpg\", \"-o\") print(\"Converting to jpeg...\") print(decoder['stderr']) os.remove(full_path)", "to whom the Software is # furnished to do so, subject to the", "limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or", "Dismiss pop-ups # Cookies accept_cookies_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[2]/div/div/div[1]/button/span') accept_cookies_button.click() # Location popup_1 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[1]/span')", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER", "be included in all # copies or substantial portions of the Software. #", "os from webptools import webplib as webp import urllib.request import datetime import subprocess", "pic_url = pic_path.get_attribute('style').split('\"')[1] except: pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] full_path =", "TinderBot() logged_in = False while not logged_in: try: bot.login() logged_in = True break", "button login_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/header/div[1]/div[2]/div/button') login_button.click() sleep(1) # Log in with Facebook login_with_facebook_button =", "# \"longitude\": lon, # \"accuracy\": 100} # self.driver.execute_cdp_cmd(\"Page.setGeolocationOverride\", params) def login(self): self.driver.maximize_window() self.driver.get('https://tinder.com')", "import webplib as webp import urllib.request import datetime import subprocess pictures_folder = \"\"", "2020 <NAME> # # Permission is hereby granted, free of charge, to any", "(c) 2020 <NAME> # # Permission is hereby granted, free of charge, to", "# Selects Facebook login window for interaction self.driver.switch_to.window(self.driver.window_handles[1]) # Inputs Facebook credentials and", "True: sleep(1) try: pic_id = datetime.datetime.now().strftime('%d%H%M%S%f') print(pic_id) try: self.download_tinder_jpeg(pic_id) self.dislike() except IndexError: self.dislike()", "copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED", "and select city # passport_popup_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[1]/a') # passport_popup_button.click() # sleep(5) # select_location_accept", "pic_url def download_tinder_jpeg(self, file_name): path = pictures_folder try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url", "portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "do so, subject to the following conditions: # # The above copyright notice", "THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "self.driver.find_element_by_xpath('//*[@id=\"pass\"]') password_input_field.send_keys(password) facebook_login_button = self.driver.find_element_by_xpath('//*[@id=\"loginbutton\"]') facebook_login_button.click() sleep(8) # Switch back to Tinder main", "lon, # \"accuracy\": 100} # self.driver.execute_cdp_cmd(\"Page.setGeolocationOverride\", params) def login(self): self.driver.maximize_window() self.driver.get('https://tinder.com') sleep(3) #", "# Dislike action def dislike(self): dislike_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[2]/button') dislike_btn.click() # Save picture def", "self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] return pic_url def download_tinder_jpeg(self, file_name): path = pictures_folder", "permit persons to whom the Software is # furnished to do so, subject", "# Location popup_1 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[1]/span') popup_1.click() # Notifications popup_2 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[2]') popup_2.click() sleep(5)", "Permission is hereby granted, free of charge, to any person obtaining a copy", "params) def login(self): self.driver.maximize_window() self.driver.get('https://tinder.com') sleep(3) # Click on login button login_button =", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "Software without restriction, including without limitation the rights # to use, copy, modify,", "passport_popup_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[1]/a') # passport_popup_button.click() # sleep(5) # select_location_accept = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[2]/button') # select_location_accept.click()", "self.dislike() except IndexError: self.dislike() except Exception: print('Auto_swipe exception...') try: self.close_popup() except Exception: self.close_match()", "self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[2]/button') dislike_btn.click() # Save picture def get_pic_path(self): try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url", "import username, password import os from webptools import webplib as webp import urllib.request", "# The above copyright notice and this permission notice shall be included in", "# of this software and associated documentation files (the \"Software\"), to deal #", "window base_window = self.driver.window_handles[0] # Selects Facebook login window for interaction self.driver.switch_to.window(self.driver.window_handles[1]) #", "OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "= datetime.datetime.now().strftime('%d%H%M%S%f') print(pic_id) try: self.download_tinder_jpeg(pic_id) self.dislike() except IndexError: self.dislike() except Exception: print('Auto_swipe exception...')", "above copyright notice and this permission notice shall be included in all #", "from webdriver_manager.chrome import ChromeDriverManager from time import sleep from secrets import username, password", "download_tinder_jpeg(self, file_name): path = pictures_folder try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1]", "sell # copies of the Software, and to permit persons to whom the", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE.", "substantial portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\",", "print(pic_id) try: self.download_tinder_jpeg(pic_id) self.dislike() except IndexError: self.dislike() except Exception: print('Auto_swipe exception...') try: self.close_popup()", "TinderBot() logged_in = False sleep(5) # bot.passport() bot.auto_swipe() # subprocess.call(['osascript', '-e', 'tell application", "pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1] except: pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url", "= pic_path.get_attribute('style')[0].split('\"')[1] return pic_url def download_tinder_jpeg(self, file_name): path = pictures_folder try: pic_path =", "restriction, including without limitation the rights # to use, copy, modify, merge, publish,", "base_window = self.driver.window_handles[0] # Selects Facebook login window for interaction self.driver.switch_to.window(self.driver.window_handles[1]) # Inputs", "def passport(self, lat=40.7128, lon=-74.0060): # params = { # \"latitude\": lat, # \"longitude\":", "logged_in = True break except: bot.driver.quit() bot = TinderBot() logged_in = False sleep(5)", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS", "import subprocess pictures_folder = \"\" class TinderBot(): def __init__(self): self.driver = webdriver.Chrome(ChromeDriverManager().install()) #", "= self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[2]') popup_2.click() sleep(5) # Agree to passport mode and select city #", "# # Permission is hereby granted, free of charge, to any person obtaining", "Like action def like(self): like_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[4]/button') like_btn.click() # Dislike action def dislike(self):", "= self.driver.window_handles[0] # Selects Facebook login window for interaction self.driver.switch_to.window(self.driver.window_handles[1]) # Inputs Facebook", "# Cookies accept_cookies_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[2]/div/div/div[1]/button/span') accept_cookies_button.click() # Location popup_1 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[1]/span') popup_1.click() #", "file_name + '.webp' urllib.request.urlretrieve(pic_url, full_path) print('Photo downloaded...') decoder = webp.dwebp(full_path, full_path[:-5] + \".jpg\",", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR", "this permission notice shall be included in all # copies or substantial portions", "pic_path = self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] return pic_url def download_tinder_jpeg(self, file_name): path", "sleep(5) # bot.passport() bot.auto_swipe() # subprocess.call(['osascript', '-e', 'tell application \"Chrome\" to quit']) #", "FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "# copies or substantial portions of the Software. # # THE SOFTWARE IS", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "popup_1 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[1]/span') popup_1.click() # Notifications popup_2 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[2]') popup_2.click() sleep(5) # Agree", "files (the \"Software\"), to deal # in the Software without restriction, including without", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "= self.driver.find_element_by_xpath('//*[@id=\"loginbutton\"]') facebook_login_button.click() sleep(8) # Switch back to Tinder main window self.driver.switch_to.window(base_window) #", "def like(self): like_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[4]/button') like_btn.click() # Dislike action def dislike(self): dislike_btn =", "try: bot.login() logged_in = True break except: bot.driver.quit() bot = TinderBot() logged_in =", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "Location popup_1 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[1]/span') popup_1.click() # Notifications popup_2 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[2]') popup_2.click() sleep(5) #", "# MIT License # # Copyright (c) 2020 <NAME> # # Permission is", "bot = TinderBot() logged_in = False sleep(5) # bot.passport() bot.auto_swipe() # subprocess.call(['osascript', '-e',", "# Copyright (c) 2020 <NAME> # # Permission is hereby granted, free of", "# SOFTWARE. from selenium import webdriver from webdriver_manager.chrome import ChromeDriverManager from time import", "following conditions: # # The above copyright notice and this permission notice shall", "# self.driver.execute_cdp_cmd(\"Page.setGeolocationOverride\", params) def login(self): self.driver.maximize_window() self.driver.get('https://tinder.com') sleep(3) # Click on login button", "of the Software, and to permit persons to whom the Software is #", "right def auto_swipe(self): while True: sleep(1) try: pic_id = datetime.datetime.now().strftime('%d%H%M%S%f') print(pic_id) try: self.download_tinder_jpeg(pic_id)", "import sleep from secrets import username, password import os from webptools import webplib", "except IndexError: self.dislike() except Exception: print('Auto_swipe exception...') try: self.close_popup() except Exception: self.close_match() #", "login(self): self.driver.maximize_window() self.driver.get('https://tinder.com') sleep(3) # Click on login button login_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/header/div[1]/div[2]/div/button') login_button.click()", "= self.driver.find_element_by_xpath('//*[@id=\"email\"]') email_input_field.send_keys(username) password_input_field = self.driver.find_element_by_xpath('//*[@id=\"pass\"]') password_input_field.send_keys(password) facebook_login_button = self.driver.find_element_by_xpath('//*[@id=\"loginbutton\"]') facebook_login_button.click() sleep(8) #", "offer pop-up def close_popup(self): popup_3 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[2]/button[2]') popup_3.click() # Close match pop-up def", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR", "+ '.webp' urllib.request.urlretrieve(pic_url, full_path) print('Photo downloaded...') decoder = webp.dwebp(full_path, full_path[:-5] + \".jpg\", \"-o\")", "\"latitude\": lat, # \"longitude\": lon, # \"accuracy\": 100} # self.driver.execute_cdp_cmd(\"Page.setGeolocationOverride\", params) def login(self):", "action def dislike(self): dislike_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[2]/button') dislike_btn.click() # Save picture def get_pic_path(self): try:", "self.dislike() except Exception: print('Auto_swipe exception...') try: self.close_popup() except Exception: self.close_match() # Close offer", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN", "__init__(self): self.driver = webdriver.Chrome(ChromeDriverManager().install()) # def passport(self, lat=40.7128, lon=-74.0060): # params = {", "def get_pic_path(self): try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1] except: pic_path =", "including without limitation the rights # to use, copy, modify, merge, publish, distribute,", "except Exception: self.close_match() # Close offer pop-up def close_popup(self): popup_3 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[2]/button[2]') popup_3.click()", "# passport_popup_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[1]/a') # passport_popup_button.click() # sleep(5) # select_location_accept = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[2]/button') #", "select city # passport_popup_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[1]/a') # passport_popup_button.click() # sleep(5) # select_location_accept =", "Tinder main window self.driver.switch_to.window(base_window) # Dismiss pop-ups # Cookies accept_cookies_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[2]/div/div/div[1]/button/span') accept_cookies_button.click()", "print(\"Converting to jpeg...\") print(decoder['stderr']) os.remove(full_path) print(\"WebP file removed!\") # Auto-swipe right def auto_swipe(self):", "passport mode and select city # passport_popup_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div[1]/a') # passport_popup_button.click() # sleep(5)", "copyright notice and this permission notice shall be included in all # copies", "try: self.close_popup() except Exception: self.close_match() # Close offer pop-up def close_popup(self): popup_3 =", "self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[2]/div/div/div[1]/button/span') accept_cookies_button.click() # Location popup_1 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[1]/span') popup_1.click() # Notifications popup_2 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[2]')", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "like_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[4]/button') like_btn.click() # Dislike action def dislike(self): dislike_btn = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[2]/div[2]/button') dislike_btn.click()", "pop-ups # Cookies accept_cookies_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[2]/div/div/div[1]/button/span') accept_cookies_button.click() # Location popup_1 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[1]/span') popup_1.click()", "associated documentation files (the \"Software\"), to deal # in the Software without restriction,", "Log in with Facebook login_with_facebook_button = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/span/div[2]/button/span[2]') login_with_facebook_button.click() # Marks base window to", "hereby granted, free of charge, to any person obtaining a copy # of", "of this software and associated documentation files (the \"Software\"), to deal # in", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "# # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[1]/span') popup_1.click() # Notifications popup_2 = self.driver.find_element_by_xpath('//*[@id=\"modal-manager\"]/div/div/div/div/div[3]/button[2]') popup_2.click() sleep(5) # Agree to passport", "full_path) print('Photo downloaded...') decoder = webp.dwebp(full_path, full_path[:-5] + \".jpg\", \"-o\") print(\"Converting to jpeg...\")", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from", "full_path = path + '/' + file_name + '.webp' urllib.request.urlretrieve(pic_url, full_path) print('Photo downloaded...')", "= self.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/div/div[1]/div/div') pic_url = pic_path.get_attribute('style')[0].split('\"')[1] full_path = path + '/' + file_name", "self.driver.find_element_by_xpath('//*[@id=\"loginbutton\"]') facebook_login_button.click() sleep(8) # Switch back to Tinder main window self.driver.switch_to.window(base_window) # Dismiss", "file_name): path = pictures_folder try: pic_path = bot.driver.find_element_by_xpath( '//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[1]/div/div[1]/div[3]/div[1]/div[1]/span[1]/div') pic_url = pic_path.get_attribute('style').split('\"')[1] except:", "= path + '/' + file_name + '.webp' urllib.request.urlretrieve(pic_url, full_path) print('Photo downloaded...') decoder", "Facebook credentials and submits email_input_field = self.driver.find_element_by_xpath('//*[@id=\"email\"]') email_input_field.send_keys(username) password_input_field = self.driver.find_element_by_xpath('//*[@id=\"pass\"]') password_input_field.send_keys(password) facebook_login_button", "select_location_accept = self.driver.find_element_by_xpath('//*[@id=\"content\"]/div/div[1]/div/main/div[1]/div/div/div[2]/button') # select_location_accept.click() sleep(2) # Like action def like(self): like_btn =", "# params = { # \"latitude\": lat, # \"longitude\": lon, # \"accuracy\": 100}", "NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "password import os from webptools import webplib as webp import urllib.request import datetime", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of", "the Software is # furnished to do so, subject to the following conditions:", "subject to the following conditions: # # The above copyright notice and this" ]
[ "# Generated by Django 2.1.7 on 2019-05-29 12:46 from django.db import migrations, models", "2019-05-29 12:46 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('custom_models',", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('custom_models', '0003_auto_20190528_1156'), ]", "by Django 2.1.7 on 2019-05-29 12:46 from django.db import migrations, models class Migration(migrations.Migration):", "[ ('custom_models', '0003_auto_20190528_1156'), ] operations = [ migrations.AlterField( model_name='itemtext', name='text_detail', field=models.CharField(max_length=3000), ), ]", "Migration(migrations.Migration): dependencies = [ ('custom_models', '0003_auto_20190528_1156'), ] operations = [ migrations.AlterField( model_name='itemtext', name='text_detail',", "Generated by Django 2.1.7 on 2019-05-29 12:46 from django.db import migrations, models class", "12:46 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('custom_models', '0003_auto_20190528_1156'),", "on 2019-05-29 12:46 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "Django 2.1.7 on 2019-05-29 12:46 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('custom_models', '0003_auto_20190528_1156'), ] operations =", "= [ ('custom_models', '0003_auto_20190528_1156'), ] operations = [ migrations.AlterField( model_name='itemtext', name='text_detail', field=models.CharField(max_length=3000), ),", "models class Migration(migrations.Migration): dependencies = [ ('custom_models', '0003_auto_20190528_1156'), ] operations = [ migrations.AlterField(", "<reponame>code-for-canada/django-nginx-reactjs-docker # Generated by Django 2.1.7 on 2019-05-29 12:46 from django.db import migrations,", "class Migration(migrations.Migration): dependencies = [ ('custom_models', '0003_auto_20190528_1156'), ] operations = [ migrations.AlterField( model_name='itemtext',", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('custom_models', '0003_auto_20190528_1156'), ] operations", "migrations, models class Migration(migrations.Migration): dependencies = [ ('custom_models', '0003_auto_20190528_1156'), ] operations = [", "dependencies = [ ('custom_models', '0003_auto_20190528_1156'), ] operations = [ migrations.AlterField( model_name='itemtext', name='text_detail', field=models.CharField(max_length=3000),", "2.1.7 on 2019-05-29 12:46 from django.db import migrations, models class Migration(migrations.Migration): dependencies =" ]
[ "* from .manifest import * from .tally import * from .tally_decrypt import *", "import * from .ballot import * from .base import * from .decrypt import", "* from .base import * from .decrypt import * from .encrypt import *", "import * from .guardian import * from .key_ceremony import * from .key_guardian import", ".key_ceremony import * from .key_guardian import * from .manifest import * from .tally", ".auth import * from .ballot import * from .base import * from .decrypt", "from .key_guardian import * from .manifest import * from .tally import * from", "import * from .key_ceremony import * from .key_guardian import * from .manifest import", ".manifest import * from .tally import * from .tally_decrypt import * from .user", "from .auth import * from .ballot import * from .base import * from", "from .decrypt import * from .encrypt import * from .election import * from", ".encrypt import * from .election import * from .guardian import * from .key_ceremony", "from .manifest import * from .tally import * from .tally_decrypt import * from", "* from .key_guardian import * from .manifest import * from .tally import *", "import * from .election import * from .guardian import * from .key_ceremony import", "from .base import * from .decrypt import * from .encrypt import * from", ".key_guardian import * from .manifest import * from .tally import * from .tally_decrypt", "import * from .decrypt import * from .encrypt import * from .election import", "import * from .manifest import * from .tally import * from .tally_decrypt import", "* from .guardian import * from .key_ceremony import * from .key_guardian import *", ".base import * from .decrypt import * from .encrypt import * from .election", "from .ballot import * from .base import * from .decrypt import * from", "* from .encrypt import * from .election import * from .guardian import *", "from .encrypt import * from .election import * from .guardian import * from", "* from .decrypt import * from .encrypt import * from .election import *", "import * from .tally import * from .tally_decrypt import * from .user import", "* from .ballot import * from .base import * from .decrypt import *", ".election import * from .guardian import * from .key_ceremony import * from .key_guardian", "from .key_ceremony import * from .key_guardian import * from .manifest import * from", "import * from .key_guardian import * from .manifest import * from .tally import", ".guardian import * from .key_ceremony import * from .key_guardian import * from .manifest", ".ballot import * from .base import * from .decrypt import * from .encrypt", "* from .tally import * from .tally_decrypt import * from .user import *", ".decrypt import * from .encrypt import * from .election import * from .guardian", "import * from .encrypt import * from .election import * from .guardian import", "* from .election import * from .guardian import * from .key_ceremony import *", "from .election import * from .guardian import * from .key_ceremony import * from", "* from .key_ceremony import * from .key_guardian import * from .manifest import *", "from .guardian import * from .key_ceremony import * from .key_guardian import * from", "import * from .base import * from .decrypt import * from .encrypt import" ]
[ "= argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( '--split', choices=['train', 'test'], default='train', help='dataset split', ) parser.add_argument(", "import instance_occlsegm if __name__ == '__main__': parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( '--split',", "parser.add_argument( '--augmentation', action='store_true', help='do augmentation', ) args = parser.parse_args() data = instance_occlsegm.datasets.PanopticOcclusionSegmentationDataset( args.split,", "action='store_true', help='do augmentation', ) args = parser.parse_args() data = instance_occlsegm.datasets.PanopticOcclusionSegmentationDataset( args.split, augmentation=args.augmentation )", "choices=['train', 'test'], default='train', help='dataset split', ) parser.add_argument( '--augmentation', action='store_true', help='do augmentation', ) args", "split', ) parser.add_argument( '--augmentation', action='store_true', help='do augmentation', ) args = parser.parse_args() data =", "import argparse from instance_occlsegm_lib.contrib import instance_occlsegm if __name__ == '__main__': parser = argparse.ArgumentParser(", "'__main__': parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( '--split', choices=['train', 'test'], default='train', help='dataset split',", "parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( '--split', choices=['train', 'test'], default='train', help='dataset split', )", "help='do augmentation', ) args = parser.parse_args() data = instance_occlsegm.datasets.PanopticOcclusionSegmentationDataset( args.split, augmentation=args.augmentation ) instance_occlsegm.datasets.view_panoptic_occlusion_segmentation_dataset(", "parser.add_argument( '--split', choices=['train', 'test'], default='train', help='dataset split', ) parser.add_argument( '--augmentation', action='store_true', help='do augmentation',", "formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( '--split', choices=['train', 'test'], default='train', help='dataset split', ) parser.add_argument( '--augmentation', action='store_true',", "default='train', help='dataset split', ) parser.add_argument( '--augmentation', action='store_true', help='do augmentation', ) args = parser.parse_args()", ") args = parser.parse_args() data = instance_occlsegm.datasets.PanopticOcclusionSegmentationDataset( args.split, augmentation=args.augmentation ) instance_occlsegm.datasets.view_panoptic_occlusion_segmentation_dataset( data )", "from instance_occlsegm_lib.contrib import instance_occlsegm if __name__ == '__main__': parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, )", "instance_occlsegm_lib.contrib import instance_occlsegm if __name__ == '__main__': parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument(", "instance_occlsegm if __name__ == '__main__': parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( '--split', choices=['train',", "#!/usr/bin/env python import argparse from instance_occlsegm_lib.contrib import instance_occlsegm if __name__ == '__main__': parser", "if __name__ == '__main__': parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( '--split', choices=['train', 'test'],", ") parser.add_argument( '--split', choices=['train', 'test'], default='train', help='dataset split', ) parser.add_argument( '--augmentation', action='store_true', help='do", "__name__ == '__main__': parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( '--split', choices=['train', 'test'], default='train',", "<reponame>pazeshun/jsk_apc #!/usr/bin/env python import argparse from instance_occlsegm_lib.contrib import instance_occlsegm if __name__ == '__main__':", ") parser.add_argument( '--augmentation', action='store_true', help='do augmentation', ) args = parser.parse_args() data = instance_occlsegm.datasets.PanopticOcclusionSegmentationDataset(", "python import argparse from instance_occlsegm_lib.contrib import instance_occlsegm if __name__ == '__main__': parser =", "augmentation', ) args = parser.parse_args() data = instance_occlsegm.datasets.PanopticOcclusionSegmentationDataset( args.split, augmentation=args.augmentation ) instance_occlsegm.datasets.view_panoptic_occlusion_segmentation_dataset( data", "== '__main__': parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( '--split', choices=['train', 'test'], default='train', help='dataset", "'--split', choices=['train', 'test'], default='train', help='dataset split', ) parser.add_argument( '--augmentation', action='store_true', help='do augmentation', )", "help='dataset split', ) parser.add_argument( '--augmentation', action='store_true', help='do augmentation', ) args = parser.parse_args() data", "argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( '--split', choices=['train', 'test'], default='train', help='dataset split', ) parser.add_argument( '--augmentation',", "'test'], default='train', help='dataset split', ) parser.add_argument( '--augmentation', action='store_true', help='do augmentation', ) args =", "argparse from instance_occlsegm_lib.contrib import instance_occlsegm if __name__ == '__main__': parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter,", "'--augmentation', action='store_true', help='do augmentation', ) args = parser.parse_args() data = instance_occlsegm.datasets.PanopticOcclusionSegmentationDataset( args.split, augmentation=args.augmentation" ]
[ "(bbu.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean) # break the loop once", "from entities.switch import Switch from forwarding.forwarding import Forwarding class StatHistory(object): history = {}", "= self.external_switch self.rrhs.append(rrh_object) self.forwarding.add_mapping(remote_radio_head['id'], remote_radio_head['baseband_units']) for hypervisor in configuration['hypervisors']: hypervisor_object = Hypervisor(self.env, hypervisor['id'])", "+= (stats['rec'] + stats['drop']) total_drop += stats['drop'] if (total == 0): return 0.0", "loop once we found a single transmission break return load / hypervisor.switch.rate def", "= current return value StatHistory.history[key] = current return current class Topology(object): def __init__(self,", "= 0 for hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_current_wait()", "self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu in hypervisor.bbus: if (bbu.id in mapping): load", "[] self.hypervisors = [] self.external_switch = None self.stat_history = {} self.total_migrations = 0", "0 for rrh in self.rrhs: total += (rrh.arrival_rate * rrh.packet_mean * len(self.forwarding.get_mapping(rrh.id))) return", "is None): raise Exception(\"Target hypervisor not found with the given id\") for hypervisor", "target_hypervisor.id): hypervisor.remove_baseband_unit(subject_bbu) target_hypervisor.add_baseband_unit(subject_bbu) self.total_migrations += 1 def get_cluster_load(self, cluster): load = 0 for", "0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) if (bbu_x.id in mapping and", "None, packet_dev = None): for rrh in self.rrhs: if (rrh.id == rrh_id): if", "for hypervisor in self.hypervisors: subject_bbu = hypervisor.find_baseband_unit(bbu_id) if (subject_bbu is not None and", "/ bbu_count def get_lifetime_wait(self): total = 0 bbu_count = 0 for hypervisor in", "def get(key, current): if (key in StatHistory.history): value = current - StatHistory.history[key] StatHistory.history[key]", "remote_radio_head['id']) rrh_object.set_arrival_rate(remote_radio_head['arrival_rate']) rrh_object.set_packet_mean(remote_radio_head['packet_mean']) rrh_object.set_packet_dev(remote_radio_head['packet_dev']) rrh_object.out = self.external_switch self.rrhs.append(rrh_object) self.forwarding.add_mapping(remote_radio_head['id'], remote_radio_head['baseband_units']) for hypervisor in", "get_utilization_gain(self): stopped_hypervisors = 0 for hypervisor in self.hypervisors: if (len(hypervisor.bbus) == 0): stopped_hypervisors", "total = 0 bbu_count = 0 for hypervisor in self.hypervisors: for bbu in", "(stats['rec'] + stats['drop']) total_drop += stats['drop'] if (total == 0): return 0.0 return", "hypervisor.switch.packets_rec) if (total_received == 0): return 0.0 return total_received / StatHistory.get('extswitch.packets_rec', self.external_switch.packets_rec) def", "* rrh.packet_mean) # break the loop once we found a single transmission break", "1 return total / bbu_count def get_lifetime_delay(self): total = 0 bbu_count = 0", "= None self.stat_history = {} self.total_migrations = 0 self.setup(configuration) def update_load(self, rrh_id, arrival_rate", "def migrate(self, bbu_id, target_hypervisor_id): target_hypervisor = [hv for hv in self.hypervisors if hv.id", "bbu in hypervisor.bbus: total += bbu.get_lifetime_delay() bbu_count += 1 return total / bbu_count", "for bbu in hypervisor.bbus: total += bbu.get_lifetime_delay() bbu_count += 1 return total /", "None, packet_mean = None, packet_dev = None): for rrh in self.rrhs: if (rrh.id", "+= 1 return total / bbu_count def get_lifetime_wait(self): total = 0 bbu_count =", "cluster.baseband_units: if (bbu.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean) # break the", "return StatHistory.get('transmission_cost', self.forwarding.get_transmission_cost()) def get_migration_count(self): return StatHistory.get('migration_count', self.total_migrations) def get_current_load(self): total = 0", "= hypervisor.switch.get_current_stats() total += (stats['rec'] + stats['drop']) total_drop += stats['drop'] if (total ==", "= 0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) if (bbu_x.id in mapping", "def get_common_load(self, bbu_x, bbu_y): if (bbu_x.id == bbu_y.id): return 0 load = 0", "def get_transmission_cost(self): return StatHistory.get('transmission_cost', self.forwarding.get_transmission_cost()) def get_migration_count(self): return StatHistory.get('migration_count', self.total_migrations) def get_current_load(self): total", "self.forwarding.get_mapping(rrh.id) for bbu in hypervisor.bbus: if (bbu.id in mapping): load += (rrh.arrival_rate *", "in self.hypervisors: stats = hypervisor.switch.get_current_stats() total += (stats['rec'] + stats['drop']) total_drop += stats['drop']", "RemoteRadioHead from entities.hypervisor import Hypervisor from entities.baseband_unit import BasebandUnit from entities.switch import Switch", "% hypervisor.id, hypervisor.switch.packets_rec) if (total_received == 0): return 0.0 return total_received / StatHistory.get('extswitch.packets_rec',", "def setup(self, configuration): self.external_switch = Switch(self.env, 'physical', 'external') self.external_switch.set_forwarding_function(self.forwarding.forwarding_function) for remote_radio_head in configuration['remote_radio_heads']:", "__init__(self, env, configuration): self.env = env self.forwarding = Forwarding(self.env, self) self.rrhs = []", "update_load(self, rrh_id, arrival_rate = None, packet_mean = None, packet_dev = None): for rrh", "+= bbu.get_current_delay() bbu_count += 1 return total / bbu_count def get_lifetime_delay(self): total =", "hypervisor in self.hypervisors: stats = hypervisor.switch.get_current_stats() total += (stats['rec'] + stats['drop']) total_drop +=", "rrh_id, arrival_rate = None, packet_mean = None, packet_dev = None): for rrh in", "not None: rrh.set_arrival_rate(arrival_rate) if packet_mean is not None: rrh.set_packet_mean(packet_mean) if packet_dev is not", "is not None: rrh.set_packet_dev(packet_dev) def migrate(self, bbu_id, target_hypervisor_id): target_hypervisor = [hv for hv", "in self.hypervisors if hv.id == target_hypervisor_id][0] if (target_hypervisor is None): raise Exception(\"Target hypervisor", "setup(self, configuration): self.external_switch = Switch(self.env, 'physical', 'external') self.external_switch.set_forwarding_function(self.forwarding.forwarding_function) for remote_radio_head in configuration['remote_radio_heads']: rrh_object", "rrh.packet_mean * len(self.forwarding.get_mapping(rrh.id))) return total def get_lifetime_replication_factor(self): total_received = 0 for hypervisor in", "= 0 self.setup(configuration) def update_load(self, rrh_id, arrival_rate = None, packet_mean = None, packet_dev", "from entities.hypervisor import Hypervisor from entities.baseband_unit import BasebandUnit from entities.switch import Switch from", "self.hypervisors if hv.id == target_hypervisor_id][0] if (target_hypervisor is None): raise Exception(\"Target hypervisor not", "= 0 for hypervisor in self.hypervisors: stats = hypervisor.switch.get_lifetime_stats() total += (stats['rec'] +", "+= 1 return total / bbu_count def get_lifetime_delay(self): total = 0 bbu_count =", "stopped_hypervisors += 1 return stopped_hypervisors / len(self.hypervisors) def setup(self, configuration): self.external_switch = Switch(self.env,", "+= (rrh.arrival_rate * rrh.packet_mean * len(self.forwarding.get_mapping(rrh.id))) return total def get_lifetime_replication_factor(self): total_received = 0", "in configuration['hypervisors']: hypervisor_object = Hypervisor(self.env, hypervisor['id']) for baseband_unit in hypervisor['baseband_units']: bbu_object = BasebandUnit(self.env,", "stats['drop'] if (total == 0): return 0.0 return total_drop / total def get_current_utilization(self,", "self.forwarding.add_mapping(remote_radio_head['id'], remote_radio_head['baseband_units']) for hypervisor in configuration['hypervisors']: hypervisor_object = Hypervisor(self.env, hypervisor['id']) for baseband_unit in", "return value StatHistory.history[key] = current return current class Topology(object): def __init__(self, env, configuration):", "return total_drop / total def get_current_drop_rate(self): total = 0 total_drop = 0 for", "bbu_count += 1 return total / bbu_count def get_lifetime_drop_rate(self): total = 0 total_drop", "/ total def get_current_utilization(self, hypervisor): load = 0 for rrh in self.rrhs: mapping", "None: rrh.set_packet_dev(packet_dev) def migrate(self, bbu_id, target_hypervisor_id): target_hypervisor = [hv for hv in self.hypervisors", "in hypervisor.bbus: total += bbu.get_lifetime_wait() bbu_count += 1 return total / bbu_count def", "return 0.0 return total_received / StatHistory.get('extswitch.packets_rec', self.external_switch.packets_rec) def get_current_wait(self): total = 0 bbu_count", "self.setup(configuration) def update_load(self, rrh_id, arrival_rate = None, packet_mean = None, packet_dev = None):", "/ self.external_switch.packets_rec def get_current_replication_factor(self): total_received = 0 for hypervisor in self.hypervisors: total_received +=", "return total_received / StatHistory.get('extswitch.packets_rec', self.external_switch.packets_rec) def get_current_wait(self): total = 0 bbu_count = 0", "not found with the given id\") for hypervisor in self.hypervisors: subject_bbu = hypervisor.find_baseband_unit(bbu_id)", "self.hypervisors = [] self.external_switch = None self.stat_history = {} self.total_migrations = 0 self.setup(configuration)", "def get_cluster_load(self, cluster): load = 0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id)", "single transmission break return load def get_common_load(self, bbu_x, bbu_y): if (bbu_x.id == bbu_y.id):", "load / hypervisor.switch.rate def get_utilization_gain(self): stopped_hypervisors = 0 for hypervisor in self.hypervisors: if", "cluster): load = 0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu", "def get_migration_count(self): return StatHistory.get('migration_count', self.total_migrations) def get_current_load(self): total = 0 for rrh in", "load def get_transmission_cost(self): return StatHistory.get('transmission_cost', self.forwarding.get_transmission_cost()) def get_migration_count(self): return StatHistory.get('migration_count', self.total_migrations) def get_current_load(self):", "1 return total / bbu_count def get_lifetime_drop_rate(self): total = 0 total_drop = 0", "1 return total / bbu_count def get_current_delay(self): total = 0 bbu_count = 0", "self) self.rrhs = [] self.hypervisors = [] self.external_switch = None self.stat_history = {}", "def get_current_utilization(self, hypervisor): load = 0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id)", "hypervisor_object = Hypervisor(self.env, hypervisor['id']) for baseband_unit in hypervisor['baseband_units']: bbu_object = BasebandUnit(self.env, baseband_unit['id']) hypervisor_object.add_baseband_unit(bbu_object)", "in hypervisor.bbus: total += bbu.get_current_delay() bbu_count += 1 return total / bbu_count def", "0 load = 0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) if (bbu_x.id", "= hypervisor.switch.get_lifetime_stats() total += (stats['rec'] + stats['drop']) total_drop += stats['drop'] if (total ==", "rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu in hypervisor.bbus: if (bbu.id in", "for bbu in cluster.baseband_units: if (bbu.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean)", "hypervisor.switch.packets_rec return total_received / self.external_switch.packets_rec def get_current_replication_factor(self): total_received = 0 for hypervisor in", "in hypervisor.bbus: total += bbu.get_current_wait() bbu_count += 1 return total / bbu_count def", "hypervisor not found with the given id\") for hypervisor in self.hypervisors: subject_bbu =", "packet_dev = None): for rrh in self.rrhs: if (rrh.id == rrh_id): if arrival_rate", "total_drop = 0 for hypervisor in self.hypervisors: stats = hypervisor.switch.get_lifetime_stats() total += (stats['rec']", "0 for hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_lifetime_wait() bbu_count", "the loop once we found a single transmission break return load / hypervisor.switch.rate", "self.external_switch = Switch(self.env, 'physical', 'external') self.external_switch.set_forwarding_function(self.forwarding.forwarding_function) for remote_radio_head in configuration['remote_radio_heads']: rrh_object = RemoteRadioHead(self.env,", "bbu in hypervisor.bbus: total += bbu.get_current_wait() bbu_count += 1 return total / bbu_count", "= [] self.hypervisors = [] self.external_switch = None self.stat_history = {} self.total_migrations =", "hypervisor.id, hypervisor.switch.packets_rec) if (total_received == 0): return 0.0 return total_received / StatHistory.get('extswitch.packets_rec', self.external_switch.packets_rec)", "/ len(self.hypervisors) def setup(self, configuration): self.external_switch = Switch(self.env, 'physical', 'external') self.external_switch.set_forwarding_function(self.forwarding.forwarding_function) for remote_radio_head", "current return value StatHistory.history[key] = current return current class Topology(object): def __init__(self, env,", "StatHistory(object): history = {} def get(key, current): if (key in StatHistory.history): value =", "get_migration_count(self): return StatHistory.get('migration_count', self.total_migrations) def get_current_load(self): total = 0 for rrh in self.rrhs:", "mapping and bbu_y.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean) return load def", "bbu_count += 1 return total / bbu_count def get_lifetime_delay(self): total = 0 bbu_count", "self.rrhs: total += (rrh.arrival_rate * rrh.packet_mean * len(self.forwarding.get_mapping(rrh.id))) return total def get_lifetime_replication_factor(self): total_received", "= 0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu in hypervisor.bbus:", "found with the given id\") for hypervisor in self.hypervisors: subject_bbu = hypervisor.find_baseband_unit(bbu_id) if", "return StatHistory.get('migration_count', self.total_migrations) def get_current_load(self): total = 0 for rrh in self.rrhs: total", "import Switch from forwarding.forwarding import Forwarding class StatHistory(object): history = {} def get(key,", "once we found a single transmission break return load def get_common_load(self, bbu_x, bbu_y):", "= 0 for hypervisor in self.hypervisors: if (len(hypervisor.bbus) == 0): stopped_hypervisors += 1", "1 return stopped_hypervisors / len(self.hypervisors) def setup(self, configuration): self.external_switch = Switch(self.env, 'physical', 'external')", "return 0.0 return total_drop / total def get_current_drop_rate(self): total = 0 total_drop =", "hv in self.hypervisors if hv.id == target_hypervisor_id][0] if (target_hypervisor is None): raise Exception(\"Target", "hypervisor.find_baseband_unit(bbu_id) if (subject_bbu is not None and hypervisor.id != target_hypervisor.id): hypervisor.remove_baseband_unit(subject_bbu) target_hypervisor.add_baseband_unit(subject_bbu) self.total_migrations", "hypervisor.bbus: if (bbu.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean) # break the", "rrh_object = RemoteRadioHead(self.env, remote_radio_head['id']) rrh_object.set_arrival_rate(remote_radio_head['arrival_rate']) rrh_object.set_packet_mean(remote_radio_head['packet_mean']) rrh_object.set_packet_dev(remote_radio_head['packet_dev']) rrh_object.out = self.external_switch self.rrhs.append(rrh_object) self.forwarding.add_mapping(remote_radio_head['id'], remote_radio_head['baseband_units'])", "return load def get_transmission_cost(self): return StatHistory.get('transmission_cost', self.forwarding.get_transmission_cost()) def get_migration_count(self): return StatHistory.get('migration_count', self.total_migrations) def", "total def get_lifetime_replication_factor(self): total_received = 0 for hypervisor in self.hypervisors: total_received += hypervisor.switch.packets_rec", "def get_current_delay(self): total = 0 bbu_count = 0 for hypervisor in self.hypervisors: for", "bbu.get_lifetime_wait() bbu_count += 1 return total / bbu_count def get_current_delay(self): total = 0", "bbu.get_lifetime_delay() bbu_count += 1 return total / bbu_count def get_lifetime_drop_rate(self): total = 0", "is not None and hypervisor.id != target_hypervisor.id): hypervisor.remove_baseband_unit(subject_bbu) target_hypervisor.add_baseband_unit(subject_bbu) self.total_migrations += 1 def", "total += bbu.get_current_wait() bbu_count += 1 return total / bbu_count def get_lifetime_wait(self): total", "(bbu_x.id in mapping and bbu_y.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean) return", "in hypervisor.bbus: total += bbu.get_lifetime_delay() bbu_count += 1 return total / bbu_count def", "hv.id == target_hypervisor_id][0] if (target_hypervisor is None): raise Exception(\"Target hypervisor not found with", "for hypervisor in self.hypervisors: if (len(hypervisor.bbus) == 0): stopped_hypervisors += 1 return stopped_hypervisors", "StatHistory.get('transmission_cost', self.forwarding.get_transmission_cost()) def get_migration_count(self): return StatHistory.get('migration_count', self.total_migrations) def get_current_load(self): total = 0 for", "0.0 return total_drop / total def get_current_drop_rate(self): total = 0 total_drop = 0", "rrh.set_packet_dev(packet_dev) def migrate(self, bbu_id, target_hypervisor_id): target_hypervisor = [hv for hv in self.hypervisors if", "def get_current_replication_factor(self): total_received = 0 for hypervisor in self.hypervisors: total_received += StatHistory.get('hypervisor.%d.switch.packets_rec' %", "(key in StatHistory.history): value = current - StatHistory.history[key] StatHistory.history[key] = current return value", "rrh_object.set_packet_mean(remote_radio_head['packet_mean']) rrh_object.set_packet_dev(remote_radio_head['packet_dev']) rrh_object.out = self.external_switch self.rrhs.append(rrh_object) self.forwarding.add_mapping(remote_radio_head['id'], remote_radio_head['baseband_units']) for hypervisor in configuration['hypervisors']: hypervisor_object", "self.rrhs = [] self.hypervisors = [] self.external_switch = None self.stat_history = {} self.total_migrations", "self.forwarding.get_mapping(rrh.id) for bbu in cluster.baseband_units: if (bbu.id in mapping): load += (rrh.arrival_rate *", "in self.hypervisors: if (len(hypervisor.bbus) == 0): stopped_hypervisors += 1 return stopped_hypervisors / len(self.hypervisors)", "load = 0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu in", "+= bbu.get_current_wait() bbu_count += 1 return total / bbu_count def get_lifetime_wait(self): total =", "total += bbu.get_current_delay() bbu_count += 1 return total / bbu_count def get_lifetime_delay(self): total", "= 0 for hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_current_delay()", "Forwarding class StatHistory(object): history = {} def get(key, current): if (key in StatHistory.history):", "if packet_dev is not None: rrh.set_packet_dev(packet_dev) def migrate(self, bbu_id, target_hypervisor_id): target_hypervisor = [hv", "load def get_common_load(self, bbu_x, bbu_y): if (bbu_x.id == bbu_y.id): return 0 load =", "total += bbu.get_lifetime_wait() bbu_count += 1 return total / bbu_count def get_current_delay(self): total", "total / bbu_count def get_lifetime_drop_rate(self): total = 0 total_drop = 0 for hypervisor", "= RemoteRadioHead(self.env, remote_radio_head['id']) rrh_object.set_arrival_rate(remote_radio_head['arrival_rate']) rrh_object.set_packet_mean(remote_radio_head['packet_mean']) rrh_object.set_packet_dev(remote_radio_head['packet_dev']) rrh_object.out = self.external_switch self.rrhs.append(rrh_object) self.forwarding.add_mapping(remote_radio_head['id'], remote_radio_head['baseband_units']) for", "total += (rrh.arrival_rate * rrh.packet_mean * len(self.forwarding.get_mapping(rrh.id))) return total def get_lifetime_replication_factor(self): total_received =", "for rrh in self.rrhs: total += (rrh.arrival_rate * rrh.packet_mean * len(self.forwarding.get_mapping(rrh.id))) return total", "in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_current_wait() bbu_count += 1 return", "bbu_y): if (bbu_x.id == bbu_y.id): return 0 load = 0 for rrh in", "for hv in self.hypervisors if hv.id == target_hypervisor_id][0] if (target_hypervisor is None): raise", "rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu in cluster.baseband_units: if (bbu.id in", "import RemoteRadioHead from entities.hypervisor import Hypervisor from entities.baseband_unit import BasebandUnit from entities.switch import", "total += (stats['rec'] + stats['drop']) total_drop += stats['drop'] if (total == 0): return", "== bbu_y.id): return 0 load = 0 for rrh in self.rrhs: mapping =", "self.external_switch self.rrhs.append(rrh_object) self.forwarding.add_mapping(remote_radio_head['id'], remote_radio_head['baseband_units']) for hypervisor in configuration['hypervisors']: hypervisor_object = Hypervisor(self.env, hypervisor['id']) for", "== target_hypervisor_id][0] if (target_hypervisor is None): raise Exception(\"Target hypervisor not found with the", "(target_hypervisor is None): raise Exception(\"Target hypervisor not found with the given id\") for", "in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_lifetime_wait() bbu_count += 1 return", "stopped_hypervisors = 0 for hypervisor in self.hypervisors: if (len(hypervisor.bbus) == 0): stopped_hypervisors +=", "import Forwarding class StatHistory(object): history = {} def get(key, current): if (key in", "StatHistory.history[key] = current return value StatHistory.history[key] = current return current class Topology(object): def", "class StatHistory(object): history = {} def get(key, current): if (key in StatHistory.history): value", "hypervisor in self.hypervisors: if (len(hypervisor.bbus) == 0): stopped_hypervisors += 1 return stopped_hypervisors /", "+= 1 def get_cluster_load(self, cluster): load = 0 for rrh in self.rrhs: mapping", "Hypervisor from entities.baseband_unit import BasebandUnit from entities.switch import Switch from forwarding.forwarding import Forwarding", "get_lifetime_delay(self): total = 0 bbu_count = 0 for hypervisor in self.hypervisors: for bbu", "hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_lifetime_delay() bbu_count += 1", "Forwarding(self.env, self) self.rrhs = [] self.hypervisors = [] self.external_switch = None self.stat_history =", "+= stats['drop'] if (total == 0): return 0.0 return total_drop / total def", "return 0.0 return total_drop / total def get_current_utilization(self, hypervisor): load = 0 for", "get_cluster_load(self, cluster): load = 0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for", "hypervisor.bbus: total += bbu.get_lifetime_delay() bbu_count += 1 return total / bbu_count def get_lifetime_drop_rate(self):", "total def get_current_drop_rate(self): total = 0 total_drop = 0 for hypervisor in self.hypervisors:", "found a single transmission break return load / hypervisor.switch.rate def get_utilization_gain(self): stopped_hypervisors =", "is not None: rrh.set_arrival_rate(arrival_rate) if packet_mean is not None: rrh.set_packet_mean(packet_mean) if packet_dev is", "packet_mean = None, packet_dev = None): for rrh in self.rrhs: if (rrh.id ==", "in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_current_delay() bbu_count += 1 return", "and bbu_y.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean) return load def get_transmission_cost(self):", "None): raise Exception(\"Target hypervisor not found with the given id\") for hypervisor in", "with the given id\") for hypervisor in self.hypervisors: subject_bbu = hypervisor.find_baseband_unit(bbu_id) if (subject_bbu", "for remote_radio_head in configuration['remote_radio_heads']: rrh_object = RemoteRadioHead(self.env, remote_radio_head['id']) rrh_object.set_arrival_rate(remote_radio_head['arrival_rate']) rrh_object.set_packet_mean(remote_radio_head['packet_mean']) rrh_object.set_packet_dev(remote_radio_head['packet_dev']) rrh_object.out =", "self.hypervisors: stats = hypervisor.switch.get_lifetime_stats() total += (stats['rec'] + stats['drop']) total_drop += stats['drop'] if", "in self.rrhs: if (rrh.id == rrh_id): if arrival_rate is not None: rrh.set_arrival_rate(arrival_rate) if", "if (bbu.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean) # break the loop", "get_current_load(self): total = 0 for rrh in self.rrhs: total += (rrh.arrival_rate * rrh.packet_mean", "RemoteRadioHead(self.env, remote_radio_head['id']) rrh_object.set_arrival_rate(remote_radio_head['arrival_rate']) rrh_object.set_packet_mean(remote_radio_head['packet_mean']) rrh_object.set_packet_dev(remote_radio_head['packet_dev']) rrh_object.out = self.external_switch self.rrhs.append(rrh_object) self.forwarding.add_mapping(remote_radio_head['id'], remote_radio_head['baseband_units']) for hypervisor", "= self.forwarding.get_mapping(rrh.id) if (bbu_x.id in mapping and bbu_y.id in mapping): load += (rrh.arrival_rate", "total_drop += stats['drop'] if (total == 0): return 0.0 return total_drop / total", "in cluster.baseband_units: if (bbu.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean) # break", "hypervisor.id != target_hypervisor.id): hypervisor.remove_baseband_unit(subject_bbu) target_hypervisor.add_baseband_unit(subject_bbu) self.total_migrations += 1 def get_cluster_load(self, cluster): load =", "Exception(\"Target hypervisor not found with the given id\") for hypervisor in self.hypervisors: subject_bbu", "= Switch(self.env, 'physical', 'external') self.external_switch.set_forwarding_function(self.forwarding.forwarding_function) for remote_radio_head in configuration['remote_radio_heads']: rrh_object = RemoteRadioHead(self.env, remote_radio_head['id'])", "(rrh.id == rrh_id): if arrival_rate is not None: rrh.set_arrival_rate(arrival_rate) if packet_mean is not", "get_current_wait(self): total = 0 bbu_count = 0 for hypervisor in self.hypervisors: for bbu", "0 total_drop = 0 for hypervisor in self.hypervisors: stats = hypervisor.switch.get_current_stats() total +=", "in self.hypervisors: stats = hypervisor.switch.get_lifetime_stats() total += (stats['rec'] + stats['drop']) total_drop += stats['drop']", "self.total_migrations) def get_current_load(self): total = 0 for rrh in self.rrhs: total += (rrh.arrival_rate", "get_current_delay(self): total = 0 bbu_count = 0 for hypervisor in self.hypervisors: for bbu", "bbu_count def get_current_delay(self): total = 0 bbu_count = 0 for hypervisor in self.hypervisors:", "get_transmission_cost(self): return StatHistory.get('transmission_cost', self.forwarding.get_transmission_cost()) def get_migration_count(self): return StatHistory.get('migration_count', self.total_migrations) def get_current_load(self): total =", "def get_current_wait(self): total = 0 bbu_count = 0 for hypervisor in self.hypervisors: for", "is not None: rrh.set_packet_mean(packet_mean) if packet_dev is not None: rrh.set_packet_dev(packet_dev) def migrate(self, bbu_id,", "0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu in hypervisor.bbus: if", "packet_mean is not None: rrh.set_packet_mean(packet_mean) if packet_dev is not None: rrh.set_packet_dev(packet_dev) def migrate(self,", "= None): for rrh in self.rrhs: if (rrh.id == rrh_id): if arrival_rate is", "rrh_object.out = self.external_switch self.rrhs.append(rrh_object) self.forwarding.add_mapping(remote_radio_head['id'], remote_radio_head['baseband_units']) for hypervisor in configuration['hypervisors']: hypervisor_object = Hypervisor(self.env,", "if packet_mean is not None: rrh.set_packet_mean(packet_mean) if packet_dev is not None: rrh.set_packet_dev(packet_dev) def", "total_drop / total def get_current_drop_rate(self): total = 0 total_drop = 0 for hypervisor", "= 0 for hypervisor in self.hypervisors: total_received += hypervisor.switch.packets_rec return total_received / self.external_switch.packets_rec", "/ total def get_current_drop_rate(self): total = 0 total_drop = 0 for hypervisor in", "stats = hypervisor.switch.get_current_stats() total += (stats['rec'] + stats['drop']) total_drop += stats['drop'] if (total", "StatHistory.history): value = current - StatHistory.history[key] StatHistory.history[key] = current return value StatHistory.history[key] =", "transmission break return load def get_common_load(self, bbu_x, bbu_y): if (bbu_x.id == bbu_y.id): return", "total += bbu.get_lifetime_delay() bbu_count += 1 return total / bbu_count def get_lifetime_drop_rate(self): total", "return total_received / self.external_switch.packets_rec def get_current_replication_factor(self): total_received = 0 for hypervisor in self.hypervisors:", "0 for hypervisor in self.hypervisors: if (len(hypervisor.bbus) == 0): stopped_hypervisors += 1 return", "in configuration['remote_radio_heads']: rrh_object = RemoteRadioHead(self.env, remote_radio_head['id']) rrh_object.set_arrival_rate(remote_radio_head['arrival_rate']) rrh_object.set_packet_mean(remote_radio_head['packet_mean']) rrh_object.set_packet_dev(remote_radio_head['packet_dev']) rrh_object.out = self.external_switch self.rrhs.append(rrh_object)", "self.forwarding.get_transmission_cost()) def get_migration_count(self): return StatHistory.get('migration_count', self.total_migrations) def get_current_load(self): total = 0 for rrh", "bbu in hypervisor.bbus: if (bbu.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean) #", "return current class Topology(object): def __init__(self, env, configuration): self.env = env self.forwarding =", "StatHistory.history[key] = current return current class Topology(object): def __init__(self, env, configuration): self.env =", "total = 0 for rrh in self.rrhs: total += (rrh.arrival_rate * rrh.packet_mean *", "== 0): stopped_hypervisors += 1 return stopped_hypervisors / len(self.hypervisors) def setup(self, configuration): self.external_switch", "None: rrh.set_arrival_rate(arrival_rate) if packet_mean is not None: rrh.set_packet_mean(packet_mean) if packet_dev is not None:", "from entities.baseband_unit import BasebandUnit from entities.switch import Switch from forwarding.forwarding import Forwarding class", "entities.switch import Switch from forwarding.forwarding import Forwarding class StatHistory(object): history = {} def", "0): stopped_hypervisors += 1 return stopped_hypervisors / len(self.hypervisors) def setup(self, configuration): self.external_switch =", "+= bbu.get_lifetime_wait() bbu_count += 1 return total / bbu_count def get_current_delay(self): total =", "env self.forwarding = Forwarding(self.env, self) self.rrhs = [] self.hypervisors = [] self.external_switch =", "return total / bbu_count def get_lifetime_wait(self): total = 0 bbu_count = 0 for", "if (total == 0): return 0.0 return total_drop / total def get_current_drop_rate(self): total", "hypervisor.switch.get_current_stats() total += (stats['rec'] + stats['drop']) total_drop += stats['drop'] if (total == 0):", "stats['drop']) total_drop += stats['drop'] if (total == 0): return 0.0 return total_drop /", "hypervisor.bbus: total += bbu.get_current_wait() bbu_count += 1 return total / bbu_count def get_lifetime_wait(self):", "total / bbu_count def get_lifetime_delay(self): total = 0 bbu_count = 0 for hypervisor", "and hypervisor.id != target_hypervisor.id): hypervisor.remove_baseband_unit(subject_bbu) target_hypervisor.add_baseband_unit(subject_bbu) self.total_migrations += 1 def get_cluster_load(self, cluster): load", "arrival_rate is not None: rrh.set_arrival_rate(arrival_rate) if packet_mean is not None: rrh.set_packet_mean(packet_mean) if packet_dev", "target_hypervisor = [hv for hv in self.hypervisors if hv.id == target_hypervisor_id][0] if (target_hypervisor", "in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu in cluster.baseband_units: if (bbu.id in mapping):", "hypervisor.remove_baseband_unit(subject_bbu) target_hypervisor.add_baseband_unit(subject_bbu) self.total_migrations += 1 def get_cluster_load(self, cluster): load = 0 for rrh", "history = {} def get(key, current): if (key in StatHistory.history): value = current", "= None, packet_dev = None): for rrh in self.rrhs: if (rrh.id == rrh_id):", "target_hypervisor_id): target_hypervisor = [hv for hv in self.hypervisors if hv.id == target_hypervisor_id][0] if", "in self.hypervisors: total_received += hypervisor.switch.packets_rec return total_received / self.external_switch.packets_rec def get_current_replication_factor(self): total_received =", "hypervisor in self.hypervisors: stats = hypervisor.switch.get_lifetime_stats() total += (stats['rec'] + stats['drop']) total_drop +=", "remote_radio_head['baseband_units']) for hypervisor in configuration['hypervisors']: hypervisor_object = Hypervisor(self.env, hypervisor['id']) for baseband_unit in hypervisor['baseband_units']:", "self.external_switch.packets_rec def get_current_replication_factor(self): total_received = 0 for hypervisor in self.hypervisors: total_received += StatHistory.get('hypervisor.%d.switch.packets_rec'", "for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) if (bbu_x.id in mapping and bbu_y.id", "StatHistory.get('hypervisor.%d.switch.packets_rec' % hypervisor.id, hypervisor.switch.packets_rec) if (total_received == 0): return 0.0 return total_received /", "bbu.get_current_delay() bbu_count += 1 return total / bbu_count def get_lifetime_delay(self): total = 0", "(len(hypervisor.bbus) == 0): stopped_hypervisors += 1 return stopped_hypervisors / len(self.hypervisors) def setup(self, configuration):", "[hv for hv in self.hypervisors if hv.id == target_hypervisor_id][0] if (target_hypervisor is None):", "bbu_id, target_hypervisor_id): target_hypervisor = [hv for hv in self.hypervisors if hv.id == target_hypervisor_id][0]", "rrh.packet_mean) return load def get_transmission_cost(self): return StatHistory.get('transmission_cost', self.forwarding.get_transmission_cost()) def get_migration_count(self): return StatHistory.get('migration_count', self.total_migrations)", "None: rrh.set_packet_mean(packet_mean) if packet_dev is not None: rrh.set_packet_dev(packet_dev) def migrate(self, bbu_id, target_hypervisor_id): target_hypervisor", "hypervisor): load = 0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu", "we found a single transmission break return load / hypervisor.switch.rate def get_utilization_gain(self): stopped_hypervisors", "/ bbu_count def get_lifetime_delay(self): total = 0 bbu_count = 0 for hypervisor in", "bbu.get_current_wait() bbu_count += 1 return total / bbu_count def get_lifetime_wait(self): total = 0", "from forwarding.forwarding import Forwarding class StatHistory(object): history = {} def get(key, current): if", "len(self.forwarding.get_mapping(rrh.id))) return total def get_lifetime_replication_factor(self): total_received = 0 for hypervisor in self.hypervisors: total_received", "if (target_hypervisor is None): raise Exception(\"Target hypervisor not found with the given id\")", "= [hv for hv in self.hypervisors if hv.id == target_hypervisor_id][0] if (target_hypervisor is", "self.rrhs: if (rrh.id == rrh_id): if arrival_rate is not None: rrh.set_arrival_rate(arrival_rate) if packet_mean", "hypervisor in self.hypervisors: subject_bbu = hypervisor.find_baseband_unit(bbu_id) if (subject_bbu is not None and hypervisor.id", "for hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_current_wait() bbu_count +=", "0 for hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_current_delay() bbu_count", "BasebandUnit from entities.switch import Switch from forwarding.forwarding import Forwarding class StatHistory(object): history =", "self.hypervisors: stats = hypervisor.switch.get_current_stats() total += (stats['rec'] + stats['drop']) total_drop += stats['drop'] if", "return total / bbu_count def get_current_delay(self): total = 0 bbu_count = 0 for", "(rrh.arrival_rate * rrh.packet_mean) # break the loop once we found a single transmission", "0 for hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_lifetime_delay() bbu_count", "len(self.hypervisors) def setup(self, configuration): self.external_switch = Switch(self.env, 'physical', 'external') self.external_switch.set_forwarding_function(self.forwarding.forwarding_function) for remote_radio_head in", "= current - StatHistory.history[key] StatHistory.history[key] = current return value StatHistory.history[key] = current return", "total_drop = 0 for hypervisor in self.hypervisors: stats = hypervisor.switch.get_current_stats() total += (stats['rec']", "load = 0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) if (bbu_x.id in", "= {} self.total_migrations = 0 self.setup(configuration) def update_load(self, rrh_id, arrival_rate = None, packet_mean", "rrh.set_arrival_rate(arrival_rate) if packet_mean is not None: rrh.set_packet_mean(packet_mean) if packet_dev is not None: rrh.set_packet_dev(packet_dev)", "= 0 for rrh in self.rrhs: total += (rrh.arrival_rate * rrh.packet_mean * len(self.forwarding.get_mapping(rrh.id)))", "= env self.forwarding = Forwarding(self.env, self) self.rrhs = [] self.hypervisors = [] self.external_switch", "total / bbu_count def get_lifetime_wait(self): total = 0 bbu_count = 0 for hypervisor", "if (len(hypervisor.bbus) == 0): stopped_hypervisors += 1 return stopped_hypervisors / len(self.hypervisors) def setup(self,", "(total == 0): return 0.0 return total_drop / total def get_current_drop_rate(self): total =", "!= target_hypervisor.id): hypervisor.remove_baseband_unit(subject_bbu) target_hypervisor.add_baseband_unit(subject_bbu) self.total_migrations += 1 def get_cluster_load(self, cluster): load = 0", "configuration): self.env = env self.forwarding = Forwarding(self.env, self) self.rrhs = [] self.hypervisors =", "* len(self.forwarding.get_mapping(rrh.id))) return total def get_lifetime_replication_factor(self): total_received = 0 for hypervisor in self.hypervisors:", "= {} def get(key, current): if (key in StatHistory.history): value = current -", "rrh.packet_mean) # break the loop once we found a single transmission break return", "0 for hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_current_wait() bbu_count", "rrh.set_packet_mean(packet_mean) if packet_dev is not None: rrh.set_packet_dev(packet_dev) def migrate(self, bbu_id, target_hypervisor_id): target_hypervisor =", "if (bbu_x.id in mapping and bbu_y.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean)", "bbu_count def get_lifetime_drop_rate(self): total = 0 total_drop = 0 for hypervisor in self.hypervisors:", "entities.baseband_unit import BasebandUnit from entities.switch import Switch from forwarding.forwarding import Forwarding class StatHistory(object):", "def get_current_drop_rate(self): total = 0 total_drop = 0 for hypervisor in self.hypervisors: stats", "hypervisor in configuration['hypervisors']: hypervisor_object = Hypervisor(self.env, hypervisor['id']) for baseband_unit in hypervisor['baseband_units']: bbu_object =", "= 0 total_drop = 0 for hypervisor in self.hypervisors: stats = hypervisor.switch.get_lifetime_stats() total", "raise Exception(\"Target hypervisor not found with the given id\") for hypervisor in self.hypervisors:", "packet_dev is not None: rrh.set_packet_dev(packet_dev) def migrate(self, bbu_id, target_hypervisor_id): target_hypervisor = [hv for", "stats = hypervisor.switch.get_lifetime_stats() total += (stats['rec'] + stats['drop']) total_drop += stats['drop'] if (total", "self.forwarding = Forwarding(self.env, self) self.rrhs = [] self.hypervisors = [] self.external_switch = None", "if (subject_bbu is not None and hypervisor.id != target_hypervisor.id): hypervisor.remove_baseband_unit(subject_bbu) target_hypervisor.add_baseband_unit(subject_bbu) self.total_migrations +=", "get_current_drop_rate(self): total = 0 total_drop = 0 for hypervisor in self.hypervisors: stats =", "None self.stat_history = {} self.total_migrations = 0 self.setup(configuration) def update_load(self, rrh_id, arrival_rate =", "0): return 0.0 return total_drop / total def get_current_utilization(self, hypervisor): load = 0", "def get_lifetime_replication_factor(self): total_received = 0 for hypervisor in self.hypervisors: total_received += hypervisor.switch.packets_rec return", "if (rrh.id == rrh_id): if arrival_rate is not None: rrh.set_arrival_rate(arrival_rate) if packet_mean is", "in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_lifetime_delay() bbu_count += 1 return", "forwarding.forwarding import Forwarding class StatHistory(object): history = {} def get(key, current): if (key", "current - StatHistory.history[key] StatHistory.history[key] = current return value StatHistory.history[key] = current return current", "0 for hypervisor in self.hypervisors: total_received += hypervisor.switch.packets_rec return total_received / self.external_switch.packets_rec def", "0 self.setup(configuration) def update_load(self, rrh_id, arrival_rate = None, packet_mean = None, packet_dev =", "'external') self.external_switch.set_forwarding_function(self.forwarding.forwarding_function) for remote_radio_head in configuration['remote_radio_heads']: rrh_object = RemoteRadioHead(self.env, remote_radio_head['id']) rrh_object.set_arrival_rate(remote_radio_head['arrival_rate']) rrh_object.set_packet_mean(remote_radio_head['packet_mean']) rrh_object.set_packet_dev(remote_radio_head['packet_dev'])", "if arrival_rate is not None: rrh.set_arrival_rate(arrival_rate) if packet_mean is not None: rrh.set_packet_mean(packet_mean) if", "target_hypervisor.add_baseband_unit(subject_bbu) self.total_migrations += 1 def get_cluster_load(self, cluster): load = 0 for rrh in", "current return current class Topology(object): def __init__(self, env, configuration): self.env = env self.forwarding", "hypervisor.bbus: total += bbu.get_current_delay() bbu_count += 1 return total / bbu_count def get_lifetime_delay(self):", "0 total_drop = 0 for hypervisor in self.hypervisors: stats = hypervisor.switch.get_lifetime_stats() total +=", "from entities.remote_radio_head import RemoteRadioHead from entities.hypervisor import Hypervisor from entities.baseband_unit import BasebandUnit from", "rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) if (bbu_x.id in mapping and bbu_y.id in", "0 for hypervisor in self.hypervisors: stats = hypervisor.switch.get_current_stats() total += (stats['rec'] + stats['drop'])", "get_current_utilization(self, hypervisor): load = 0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for", "the loop once we found a single transmission break return load def get_common_load(self,", "load += (rrh.arrival_rate * rrh.packet_mean) # break the loop once we found a", "self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) if (bbu_x.id in mapping and bbu_y.id in mapping): load", "+ stats['drop']) total_drop += stats['drop'] if (total == 0): return 0.0 return total_drop", "= 0 bbu_count = 0 for hypervisor in self.hypervisors: for bbu in hypervisor.bbus:", "rrh in self.rrhs: total += (rrh.arrival_rate * rrh.packet_mean * len(self.forwarding.get_mapping(rrh.id))) return total def", "env, configuration): self.env = env self.forwarding = Forwarding(self.env, self) self.rrhs = [] self.hypervisors", "found a single transmission break return load def get_common_load(self, bbu_x, bbu_y): if (bbu_x.id", "in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) if (bbu_x.id in mapping and bbu_y.id in mapping):", "in self.hypervisors: total_received += StatHistory.get('hypervisor.%d.switch.packets_rec' % hypervisor.id, hypervisor.switch.packets_rec) if (total_received == 0): return", "for bbu in hypervisor.bbus: total += bbu.get_current_wait() bbu_count += 1 return total /", "None): for rrh in self.rrhs: if (rrh.id == rrh_id): if arrival_rate is not", "a single transmission break return load def get_common_load(self, bbu_x, bbu_y): if (bbu_x.id ==", "/ StatHistory.get('extswitch.packets_rec', self.external_switch.packets_rec) def get_current_wait(self): total = 0 bbu_count = 0 for hypervisor", "== 0): return 0.0 return total_drop / total def get_current_utilization(self, hypervisor): load =", "+= 1 return total / bbu_count def get_current_delay(self): total = 0 bbu_count =", "== 0): return 0.0 return total_received / StatHistory.get('extswitch.packets_rec', self.external_switch.packets_rec) def get_current_wait(self): total =", "import BasebandUnit from entities.switch import Switch from forwarding.forwarding import Forwarding class StatHistory(object): history", "load += (rrh.arrival_rate * rrh.packet_mean) return load def get_transmission_cost(self): return StatHistory.get('transmission_cost', self.forwarding.get_transmission_cost()) def", "self.forwarding.get_mapping(rrh.id) if (bbu_x.id in mapping and bbu_y.id in mapping): load += (rrh.arrival_rate *", "for hypervisor in self.hypervisors: stats = hypervisor.switch.get_lifetime_stats() total += (stats['rec'] + stats['drop']) total_drop", "hypervisor.switch.rate def get_utilization_gain(self): stopped_hypervisors = 0 for hypervisor in self.hypervisors: if (len(hypervisor.bbus) ==", "self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_lifetime_wait() bbu_count += 1 return total", "remote_radio_head in configuration['remote_radio_heads']: rrh_object = RemoteRadioHead(self.env, remote_radio_head['id']) rrh_object.set_arrival_rate(remote_radio_head['arrival_rate']) rrh_object.set_packet_mean(remote_radio_head['packet_mean']) rrh_object.set_packet_dev(remote_radio_head['packet_dev']) rrh_object.out = self.external_switch", "not None: rrh.set_packet_mean(packet_mean) if packet_dev is not None: rrh.set_packet_dev(packet_dev) def migrate(self, bbu_id, target_hypervisor_id):", "in self.hypervisors: subject_bbu = hypervisor.find_baseband_unit(bbu_id) if (subject_bbu is not None and hypervisor.id !=", "self.total_migrations += 1 def get_cluster_load(self, cluster): load = 0 for rrh in self.rrhs:", "bbu_count += 1 return total / bbu_count def get_current_delay(self): total = 0 bbu_count", "1 def get_cluster_load(self, cluster): load = 0 for rrh in self.rrhs: mapping =", "id\") for hypervisor in self.hypervisors: subject_bbu = hypervisor.find_baseband_unit(bbu_id) if (subject_bbu is not None", "0.0 return total_received / StatHistory.get('extswitch.packets_rec', self.external_switch.packets_rec) def get_current_wait(self): total = 0 bbu_count =", "+= (rrh.arrival_rate * rrh.packet_mean) return load def get_transmission_cost(self): return StatHistory.get('transmission_cost', self.forwarding.get_transmission_cost()) def get_migration_count(self):", "self.external_switch.packets_rec) def get_current_wait(self): total = 0 bbu_count = 0 for hypervisor in self.hypervisors:", "= 0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu in cluster.baseband_units:", "= 0 for hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_lifetime_delay()", "we found a single transmission break return load def get_common_load(self, bbu_x, bbu_y): if", "for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu in hypervisor.bbus: if (bbu.id", "'physical', 'external') self.external_switch.set_forwarding_function(self.forwarding.forwarding_function) for remote_radio_head in configuration['remote_radio_heads']: rrh_object = RemoteRadioHead(self.env, remote_radio_head['id']) rrh_object.set_arrival_rate(remote_radio_head['arrival_rate']) rrh_object.set_packet_mean(remote_radio_head['packet_mean'])", "hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_current_wait() bbu_count += 1", "{} self.total_migrations = 0 self.setup(configuration) def update_load(self, rrh_id, arrival_rate = None, packet_mean =", "return total / bbu_count def get_lifetime_drop_rate(self): total = 0 total_drop = 0 for", "configuration['hypervisors']: hypervisor_object = Hypervisor(self.env, hypervisor['id']) for baseband_unit in hypervisor['baseband_units']: bbu_object = BasebandUnit(self.env, baseband_unit['id'])", "def get_lifetime_wait(self): total = 0 bbu_count = 0 for hypervisor in self.hypervisors: for", "not None: rrh.set_packet_dev(packet_dev) def migrate(self, bbu_id, target_hypervisor_id): target_hypervisor = [hv for hv in", "total_received / self.external_switch.packets_rec def get_current_replication_factor(self): total_received = 0 for hypervisor in self.hypervisors: total_received", "0 for hypervisor in self.hypervisors: total_received += StatHistory.get('hypervisor.%d.switch.packets_rec' % hypervisor.id, hypervisor.switch.packets_rec) if (total_received", "= [] self.external_switch = None self.stat_history = {} self.total_migrations = 0 self.setup(configuration) def", "single transmission break return load / hypervisor.switch.rate def get_utilization_gain(self): stopped_hypervisors = 0 for", "= self.forwarding.get_mapping(rrh.id) for bbu in cluster.baseband_units: if (bbu.id in mapping): load += (rrh.arrival_rate", "for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu in cluster.baseband_units: if (bbu.id", "in hypervisor.bbus: if (bbu.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean) # break", "self.hypervisors: total_received += hypervisor.switch.packets_rec return total_received / self.external_switch.packets_rec def get_current_replication_factor(self): total_received = 0", "return load def get_common_load(self, bbu_x, bbu_y): if (bbu_x.id == bbu_y.id): return 0 load", "= self.forwarding.get_mapping(rrh.id) for bbu in hypervisor.bbus: if (bbu.id in mapping): load += (rrh.arrival_rate", "for hypervisor in self.hypervisors: total_received += StatHistory.get('hypervisor.%d.switch.packets_rec' % hypervisor.id, hypervisor.switch.packets_rec) if (total_received ==", "in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu in hypervisor.bbus: if (bbu.id in mapping):", "def __init__(self, env, configuration): self.env = env self.forwarding = Forwarding(self.env, self) self.rrhs =", "(total_received == 0): return 0.0 return total_received / StatHistory.get('extswitch.packets_rec', self.external_switch.packets_rec) def get_current_wait(self): total", "- StatHistory.history[key] StatHistory.history[key] = current return value StatHistory.history[key] = current return current class", "get_common_load(self, bbu_x, bbu_y): if (bbu_x.id == bbu_y.id): return 0 load = 0 for", "/ bbu_count def get_lifetime_drop_rate(self): total = 0 total_drop = 0 for hypervisor in", "total / bbu_count def get_current_delay(self): total = 0 bbu_count = 0 for hypervisor", "bbu in hypervisor.bbus: total += bbu.get_lifetime_wait() bbu_count += 1 return total / bbu_count", "for bbu in hypervisor.bbus: total += bbu.get_current_delay() bbu_count += 1 return total /", "current): if (key in StatHistory.history): value = current - StatHistory.history[key] StatHistory.history[key] = current", "configuration): self.external_switch = Switch(self.env, 'physical', 'external') self.external_switch.set_forwarding_function(self.forwarding.forwarding_function) for remote_radio_head in configuration['remote_radio_heads']: rrh_object =", "get(key, current): if (key in StatHistory.history): value = current - StatHistory.history[key] StatHistory.history[key] =", "bbu_y.id): return 0 load = 0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id)", "in mapping and bbu_y.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean) return load", "for hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_lifetime_delay() bbu_count +=", "/ hypervisor.switch.rate def get_utilization_gain(self): stopped_hypervisors = 0 for hypervisor in self.hypervisors: if (len(hypervisor.bbus)", "bbu_count += 1 return total / bbu_count def get_lifetime_wait(self): total = 0 bbu_count", "bbu in hypervisor.bbus: total += bbu.get_current_delay() bbu_count += 1 return total / bbu_count", "entities.remote_radio_head import RemoteRadioHead from entities.hypervisor import Hypervisor from entities.baseband_unit import BasebandUnit from entities.switch", "break return load def get_common_load(self, bbu_x, bbu_y): if (bbu_x.id == bbu_y.id): return 0", "StatHistory.history[key] StatHistory.history[key] = current return value StatHistory.history[key] = current return current class Topology(object):", "# break the loop once we found a single transmission break return load", "if (total == 0): return 0.0 return total_drop / total def get_current_utilization(self, hypervisor):", "mapping): load += (rrh.arrival_rate * rrh.packet_mean) # break the loop once we found", "total_drop / total def get_current_utilization(self, hypervisor): load = 0 for rrh in self.rrhs:", "for rrh in self.rrhs: if (rrh.id == rrh_id): if arrival_rate is not None:", "rrh_object.set_packet_dev(remote_radio_head['packet_dev']) rrh_object.out = self.external_switch self.rrhs.append(rrh_object) self.forwarding.add_mapping(remote_radio_head['id'], remote_radio_head['baseband_units']) for hypervisor in configuration['hypervisors']: hypervisor_object =", "once we found a single transmission break return load / hypervisor.switch.rate def get_utilization_gain(self):", "self.hypervisors: subject_bbu = hypervisor.find_baseband_unit(bbu_id) if (subject_bbu is not None and hypervisor.id != target_hypervisor.id):", "0 bbu_count = 0 for hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total", "if (key in StatHistory.history): value = current - StatHistory.history[key] StatHistory.history[key] = current return", "total = 0 total_drop = 0 for hypervisor in self.hypervisors: stats = hypervisor.switch.get_current_stats()", "a single transmission break return load / hypervisor.switch.rate def get_utilization_gain(self): stopped_hypervisors = 0", "0): return 0.0 return total_received / StatHistory.get('extswitch.packets_rec', self.external_switch.packets_rec) def get_current_wait(self): total = 0", "(rrh.arrival_rate * rrh.packet_mean) return load def get_transmission_cost(self): return StatHistory.get('transmission_cost', self.forwarding.get_transmission_cost()) def get_migration_count(self): return", "return total_drop / total def get_current_utilization(self, hypervisor): load = 0 for rrh in", "def get_current_load(self): total = 0 for rrh in self.rrhs: total += (rrh.arrival_rate *", "total = 0 total_drop = 0 for hypervisor in self.hypervisors: stats = hypervisor.switch.get_lifetime_stats()", "0 for hypervisor in self.hypervisors: stats = hypervisor.switch.get_lifetime_stats() total += (stats['rec'] + stats['drop'])", "[] self.external_switch = None self.stat_history = {} self.total_migrations = 0 self.setup(configuration) def update_load(self,", "hypervisor.switch.get_lifetime_stats() total += (stats['rec'] + stats['drop']) total_drop += stats['drop'] if (total == 0):", "== 0): return 0.0 return total_drop / total def get_current_drop_rate(self): total = 0", "mapping = self.forwarding.get_mapping(rrh.id) if (bbu_x.id in mapping and bbu_y.id in mapping): load +=", "(total == 0): return 0.0 return total_drop / total def get_current_utilization(self, hypervisor): load", "hypervisor in self.hypervisors: total_received += StatHistory.get('hypervisor.%d.switch.packets_rec' % hypervisor.id, hypervisor.switch.packets_rec) if (total_received == 0):", "self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_lifetime_delay() bbu_count += 1 return total", "total_received / StatHistory.get('extswitch.packets_rec', self.external_switch.packets_rec) def get_current_wait(self): total = 0 bbu_count = 0 for", "/ bbu_count def get_current_delay(self): total = 0 bbu_count = 0 for hypervisor in", "class Topology(object): def __init__(self, env, configuration): self.env = env self.forwarding = Forwarding(self.env, self)", "(subject_bbu is not None and hypervisor.id != target_hypervisor.id): hypervisor.remove_baseband_unit(subject_bbu) target_hypervisor.add_baseband_unit(subject_bbu) self.total_migrations += 1", "loop once we found a single transmission break return load def get_common_load(self, bbu_x,", "current class Topology(object): def __init__(self, env, configuration): self.env = env self.forwarding = Forwarding(self.env,", "+= StatHistory.get('hypervisor.%d.switch.packets_rec' % hypervisor.id, hypervisor.switch.packets_rec) if (total_received == 0): return 0.0 return total_received", "return total / bbu_count def get_lifetime_delay(self): total = 0 bbu_count = 0 for", "arrival_rate = None, packet_mean = None, packet_dev = None): for rrh in self.rrhs:", "= None, packet_mean = None, packet_dev = None): for rrh in self.rrhs: if", "= hypervisor.find_baseband_unit(bbu_id) if (subject_bbu is not None and hypervisor.id != target_hypervisor.id): hypervisor.remove_baseband_unit(subject_bbu) target_hypervisor.add_baseband_unit(subject_bbu)", "self.rrhs.append(rrh_object) self.forwarding.add_mapping(remote_radio_head['id'], remote_radio_head['baseband_units']) for hypervisor in configuration['hypervisors']: hypervisor_object = Hypervisor(self.env, hypervisor['id']) for baseband_unit", "in StatHistory.history): value = current - StatHistory.history[key] StatHistory.history[key] = current return value StatHistory.history[key]", "Hypervisor(self.env, hypervisor['id']) for baseband_unit in hypervisor['baseband_units']: bbu_object = BasebandUnit(self.env, baseband_unit['id']) hypervisor_object.add_baseband_unit(bbu_object) hypervisor_object.switch.set_forwarding_function(self.forwarding.forwarding_function) self.hypervisors.append(hypervisor_object)", "for bbu in hypervisor.bbus: total += bbu.get_lifetime_wait() bbu_count += 1 return total /", "return load / hypervisor.switch.rate def get_utilization_gain(self): stopped_hypervisors = 0 for hypervisor in self.hypervisors:", "= 0 total_drop = 0 for hypervisor in self.hypervisors: stats = hypervisor.switch.get_current_stats() total", "if hv.id == target_hypervisor_id][0] if (target_hypervisor is None): raise Exception(\"Target hypervisor not found", "get_current_replication_factor(self): total_received = 0 for hypervisor in self.hypervisors: total_received += StatHistory.get('hypervisor.%d.switch.packets_rec' % hypervisor.id,", "def get_utilization_gain(self): stopped_hypervisors = 0 for hypervisor in self.hypervisors: if (len(hypervisor.bbus) == 0):", "def get_lifetime_drop_rate(self): total = 0 total_drop = 0 for hypervisor in self.hypervisors: stats", "StatHistory.get('migration_count', self.total_migrations) def get_current_load(self): total = 0 for rrh in self.rrhs: total +=", "= 0 for hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_lifetime_wait()", "return total def get_lifetime_replication_factor(self): total_received = 0 for hypervisor in self.hypervisors: total_received +=", "if (total_received == 0): return 0.0 return total_received / StatHistory.get('extswitch.packets_rec', self.external_switch.packets_rec) def get_current_wait(self):", "self.hypervisors: total_received += StatHistory.get('hypervisor.%d.switch.packets_rec' % hypervisor.id, hypervisor.switch.packets_rec) if (total_received == 0): return 0.0", "None and hypervisor.id != target_hypervisor.id): hypervisor.remove_baseband_unit(subject_bbu) target_hypervisor.add_baseband_unit(subject_bbu) self.total_migrations += 1 def get_cluster_load(self, cluster):", "= Forwarding(self.env, self) self.rrhs = [] self.hypervisors = [] self.external_switch = None self.stat_history", "self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_current_wait() bbu_count += 1 return total", "total def get_current_utilization(self, hypervisor): load = 0 for rrh in self.rrhs: mapping =", "for hypervisor in self.hypervisors: stats = hypervisor.switch.get_current_stats() total += (stats['rec'] + stats['drop']) total_drop", "bbu_count = 0 for hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total +=", "the given id\") for hypervisor in self.hypervisors: subject_bbu = hypervisor.find_baseband_unit(bbu_id) if (subject_bbu is", "for hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_lifetime_wait() bbu_count +=", "self.stat_history = {} self.total_migrations = 0 self.setup(configuration) def update_load(self, rrh_id, arrival_rate = None,", "+= (rrh.arrival_rate * rrh.packet_mean) # break the loop once we found a single", "+= 1 return total / bbu_count def get_lifetime_drop_rate(self): total = 0 total_drop =", "1 return total / bbu_count def get_lifetime_wait(self): total = 0 bbu_count = 0", "for hypervisor in configuration['hypervisors']: hypervisor_object = Hypervisor(self.env, hypervisor['id']) for baseband_unit in hypervisor['baseband_units']: bbu_object", "bbu_count def get_lifetime_delay(self): total = 0 bbu_count = 0 for hypervisor in self.hypervisors:", "bbu in cluster.baseband_units: if (bbu.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean) #", "(rrh.arrival_rate * rrh.packet_mean * len(self.forwarding.get_mapping(rrh.id))) return total def get_lifetime_replication_factor(self): total_received = 0 for", "break return load / hypervisor.switch.rate def get_utilization_gain(self): stopped_hypervisors = 0 for hypervisor in", "hypervisor in self.hypervisors: total_received += hypervisor.switch.packets_rec return total_received / self.external_switch.packets_rec def get_current_replication_factor(self): total_received", "self.external_switch.set_forwarding_function(self.forwarding.forwarding_function) for remote_radio_head in configuration['remote_radio_heads']: rrh_object = RemoteRadioHead(self.env, remote_radio_head['id']) rrh_object.set_arrival_rate(remote_radio_head['arrival_rate']) rrh_object.set_packet_mean(remote_radio_head['packet_mean']) rrh_object.set_packet_dev(remote_radio_head['packet_dev']) rrh_object.out", "in mapping): load += (rrh.arrival_rate * rrh.packet_mean) return load def get_transmission_cost(self): return StatHistory.get('transmission_cost',", "value StatHistory.history[key] = current return current class Topology(object): def __init__(self, env, configuration): self.env", "break the loop once we found a single transmission break return load def", "mapping = self.forwarding.get_mapping(rrh.id) for bbu in hypervisor.bbus: if (bbu.id in mapping): load +=", "(bbu_x.id == bbu_y.id): return 0 load = 0 for rrh in self.rrhs: mapping", "return 0 load = 0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) if", "0.0 return total_drop / total def get_current_utilization(self, hypervisor): load = 0 for rrh", "self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu in cluster.baseband_units: if (bbu.id in mapping): load", "self.external_switch = None self.stat_history = {} self.total_migrations = 0 self.setup(configuration) def update_load(self, rrh_id,", "import Hypervisor from entities.baseband_unit import BasebandUnit from entities.switch import Switch from forwarding.forwarding import", "bbu_x, bbu_y): if (bbu_x.id == bbu_y.id): return 0 load = 0 for rrh", "for hypervisor in self.hypervisors: total_received += hypervisor.switch.packets_rec return total_received / self.external_switch.packets_rec def get_current_replication_factor(self):", "= 0 for hypervisor in self.hypervisors: total_received += StatHistory.get('hypervisor.%d.switch.packets_rec' % hypervisor.id, hypervisor.switch.packets_rec) if", "Topology(object): def __init__(self, env, configuration): self.env = env self.forwarding = Forwarding(self.env, self) self.rrhs", "entities.hypervisor import Hypervisor from entities.baseband_unit import BasebandUnit from entities.switch import Switch from forwarding.forwarding", "{} def get(key, current): if (key in StatHistory.history): value = current - StatHistory.history[key]", "mapping = self.forwarding.get_mapping(rrh.id) for bbu in cluster.baseband_units: if (bbu.id in mapping): load +=", "mapping): load += (rrh.arrival_rate * rrh.packet_mean) return load def get_transmission_cost(self): return StatHistory.get('transmission_cost', self.forwarding.get_transmission_cost())", "return stopped_hypervisors / len(self.hypervisors) def setup(self, configuration): self.external_switch = Switch(self.env, 'physical', 'external') self.external_switch.set_forwarding_function(self.forwarding.forwarding_function)", "bbu_count def get_lifetime_wait(self): total = 0 bbu_count = 0 for hypervisor in self.hypervisors:", "def get_lifetime_delay(self): total = 0 bbu_count = 0 for hypervisor in self.hypervisors: for", "total_received = 0 for hypervisor in self.hypervisors: total_received += hypervisor.switch.packets_rec return total_received /", "break the loop once we found a single transmission break return load /", "rrh_id): if arrival_rate is not None: rrh.set_arrival_rate(arrival_rate) if packet_mean is not None: rrh.set_packet_mean(packet_mean)", "+= bbu.get_lifetime_delay() bbu_count += 1 return total / bbu_count def get_lifetime_drop_rate(self): total =", "0): return 0.0 return total_drop / total def get_current_drop_rate(self): total = 0 total_drop", "bbu_y.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean) return load def get_transmission_cost(self): return", "target_hypervisor_id][0] if (target_hypervisor is None): raise Exception(\"Target hypervisor not found with the given", "== rrh_id): if arrival_rate is not None: rrh.set_arrival_rate(arrival_rate) if packet_mean is not None:", "hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_lifetime_wait() bbu_count += 1", "stats['drop'] if (total == 0): return 0.0 return total_drop / total def get_current_drop_rate(self):", "StatHistory.get('extswitch.packets_rec', self.external_switch.packets_rec) def get_current_wait(self): total = 0 bbu_count = 0 for hypervisor in", "value = current - StatHistory.history[key] StatHistory.history[key] = current return value StatHistory.history[key] = current", "for hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_current_delay() bbu_count +=", "def update_load(self, rrh_id, arrival_rate = None, packet_mean = None, packet_dev = None): for", "* rrh.packet_mean * len(self.forwarding.get_mapping(rrh.id))) return total def get_lifetime_replication_factor(self): total_received = 0 for hypervisor", "hypervisor.bbus: total += bbu.get_lifetime_wait() bbu_count += 1 return total / bbu_count def get_current_delay(self):", "+= 1 return stopped_hypervisors / len(self.hypervisors) def setup(self, configuration): self.external_switch = Switch(self.env, 'physical',", "Switch(self.env, 'physical', 'external') self.external_switch.set_forwarding_function(self.forwarding.forwarding_function) for remote_radio_head in configuration['remote_radio_heads']: rrh_object = RemoteRadioHead(self.env, remote_radio_head['id']) rrh_object.set_arrival_rate(remote_radio_head['arrival_rate'])", "given id\") for hypervisor in self.hypervisors: subject_bbu = hypervisor.find_baseband_unit(bbu_id) if (subject_bbu is not", "stopped_hypervisors / len(self.hypervisors) def setup(self, configuration): self.external_switch = Switch(self.env, 'physical', 'external') self.external_switch.set_forwarding_function(self.forwarding.forwarding_function) for", "hypervisor in self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_current_delay() bbu_count += 1", "in self.rrhs: total += (rrh.arrival_rate * rrh.packet_mean * len(self.forwarding.get_mapping(rrh.id))) return total def get_lifetime_replication_factor(self):", "rrh in self.rrhs: if (rrh.id == rrh_id): if arrival_rate is not None: rrh.set_arrival_rate(arrival_rate)", "self.hypervisors: for bbu in hypervisor.bbus: total += bbu.get_current_delay() bbu_count += 1 return total", "+= hypervisor.switch.packets_rec return total_received / self.external_switch.packets_rec def get_current_replication_factor(self): total_received = 0 for hypervisor", "transmission break return load / hypervisor.switch.rate def get_utilization_gain(self): stopped_hypervisors = 0 for hypervisor", "= current return current class Topology(object): def __init__(self, env, configuration): self.env = env", "if (bbu_x.id == bbu_y.id): return 0 load = 0 for rrh in self.rrhs:", "in mapping): load += (rrh.arrival_rate * rrh.packet_mean) # break the loop once we", "configuration['remote_radio_heads']: rrh_object = RemoteRadioHead(self.env, remote_radio_head['id']) rrh_object.set_arrival_rate(remote_radio_head['arrival_rate']) rrh_object.set_packet_mean(remote_radio_head['packet_mean']) rrh_object.set_packet_dev(remote_radio_head['packet_dev']) rrh_object.out = self.external_switch self.rrhs.append(rrh_object) self.forwarding.add_mapping(remote_radio_head['id'],", "get_lifetime_replication_factor(self): total_received = 0 for hypervisor in self.hypervisors: total_received += hypervisor.switch.packets_rec return total_received", "= Hypervisor(self.env, hypervisor['id']) for baseband_unit in hypervisor['baseband_units']: bbu_object = BasebandUnit(self.env, baseband_unit['id']) hypervisor_object.add_baseband_unit(bbu_object) hypervisor_object.switch.set_forwarding_function(self.forwarding.forwarding_function)", "total_received = 0 for hypervisor in self.hypervisors: total_received += StatHistory.get('hypervisor.%d.switch.packets_rec' % hypervisor.id, hypervisor.switch.packets_rec)", "for bbu in hypervisor.bbus: if (bbu.id in mapping): load += (rrh.arrival_rate * rrh.packet_mean)", "self.env = env self.forwarding = Forwarding(self.env, self) self.rrhs = [] self.hypervisors = []", "* rrh.packet_mean) return load def get_transmission_cost(self): return StatHistory.get('transmission_cost', self.forwarding.get_transmission_cost()) def get_migration_count(self): return StatHistory.get('migration_count',", "total_received += hypervisor.switch.packets_rec return total_received / self.external_switch.packets_rec def get_current_replication_factor(self): total_received = 0 for", "self.total_migrations = 0 self.setup(configuration) def update_load(self, rrh_id, arrival_rate = None, packet_mean = None,", "0 for rrh in self.rrhs: mapping = self.forwarding.get_mapping(rrh.id) for bbu in cluster.baseband_units: if", "get_lifetime_wait(self): total = 0 bbu_count = 0 for hypervisor in self.hypervisors: for bbu", "self.hypervisors: if (len(hypervisor.bbus) == 0): stopped_hypervisors += 1 return stopped_hypervisors / len(self.hypervisors) def", "get_lifetime_drop_rate(self): total = 0 total_drop = 0 for hypervisor in self.hypervisors: stats =", "subject_bbu = hypervisor.find_baseband_unit(bbu_id) if (subject_bbu is not None and hypervisor.id != target_hypervisor.id): hypervisor.remove_baseband_unit(subject_bbu)", "= 0 for hypervisor in self.hypervisors: stats = hypervisor.switch.get_current_stats() total += (stats['rec'] +", "migrate(self, bbu_id, target_hypervisor_id): target_hypervisor = [hv for hv in self.hypervisors if hv.id ==", "total_received += StatHistory.get('hypervisor.%d.switch.packets_rec' % hypervisor.id, hypervisor.switch.packets_rec) if (total_received == 0): return 0.0 return", "not None and hypervisor.id != target_hypervisor.id): hypervisor.remove_baseband_unit(subject_bbu) target_hypervisor.add_baseband_unit(subject_bbu) self.total_migrations += 1 def get_cluster_load(self,", "rrh_object.set_arrival_rate(remote_radio_head['arrival_rate']) rrh_object.set_packet_mean(remote_radio_head['packet_mean']) rrh_object.set_packet_dev(remote_radio_head['packet_dev']) rrh_object.out = self.external_switch self.rrhs.append(rrh_object) self.forwarding.add_mapping(remote_radio_head['id'], remote_radio_head['baseband_units']) for hypervisor in configuration['hypervisors']:", "Switch from forwarding.forwarding import Forwarding class StatHistory(object): history = {} def get(key, current):" ]
[ "in occurrences: with linter_context(code_reference=occurrence.code_reference): Linter.shared.emit_error(f'{variable_name} must be unique - {occurrence.value} is already used.')", "variable_name=variable_name): variables.append(variable) values: [str] = list(map(lambda var: var.value, variables)) for unique_value in set(values):", "test_logs_logger_DEBUG_log_with_error() { // E2E:wip` \"\"\" return method.code_reference.line_text.endswith('// E2E:wip\\n') def __monitor_id_has_method_name_prefix(monitor: MonitorConfiguration, tested_method_name: str):", "if not re.match(regex, monitor_query_variable.value): with linter_context(code_reference=monitor_query_variable.code_reference): Linter.shared.emit_warning(f'$monitor_query must include method name ({tested_method_name})') def", "ID starting with `logs_logger_debug_log_with_error`. \"\"\" if monitor_id_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_id'): if not monitor_id_variable.value.startswith(tested_method_name):", "is `logs_logger_debug_log_with_error` tested_method_name = __remove_prefix(test_method.method_name.lower(), 'test_') __monitor_id_has_method_name_prefix( monitor=monitor, tested_method_name=tested_method_name ) __method_name_occurs_in_monitor_name_and_query( monitor=monitor, tested_method_name=tested_method_name", "1: for occurrence in occurrences: with linter_context(code_reference=occurrence.code_reference): Linter.shared.emit_error(f'{variable_name} must be unique - {occurrence.value}", "Linter.shared.emit_warning(f'$monitor_query must include method name ({tested_method_name})') def lint_monitors(monitors: [MonitorConfiguration]): __have_unique_variable_values(monitors=monitors, variable_name='$monitor_id') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_name')", "[MonitorConfiguration], variable_name: str): \"\"\" Checks if $variable_name is unique among all `monitors`. \"\"\"", "var: var.value == unique_value, variables)) if len(occurrences) > 1: for occurrence in occurrences:", "from src.linter import Linter, linter_context from src.test_file_parser import TestMethod, MonitorConfiguration, MonitorVariable def lint_test_methods(test_methods:", "linter_context(code_reference=occurrence.code_reference): Linter.shared.emit_error(f'{variable_name} must be unique - {occurrence.value} is already used.') def __find_monitor_variable(monitor: MonitorConfiguration,", "method name ({tested_method_name})') if monitor_query_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_query'): if not re.match(regex, monitor_query_variable.value): with", "set(values): occurrences = list(filter(lambda var: var.value == unique_value, variables)) if len(occurrences) > 1:", "__find_monitor_variable(monitor: MonitorConfiguration, variable_name: str): return next((v for v in monitor.variables if v.name ==", "with linter_context(code_reference=monitor_name_variable.code_reference): Linter.shared.emit_warning(f'$monitor_name must include method name ({tested_method_name})') if monitor_query_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_query'):", "test_methods: with linter_context(code_reference=test_method.code_reference): if test_method.monitors: for monitor in test_method.monitors: with linter_context(code_reference=monitor.code_reference): # `tested_method_name`", "def __have_unique_variable_values(monitors: [MonitorConfiguration], variable_name: str): \"\"\" Checks if $variable_name is unique among all", "start with method name ({tested_method_name})') def __method_name_occurs_in_monitor_name_and_query(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" The test", "[MonitorVariable] = [] for monitor in monitors: if variable := __find_monitor_variable(monitor=monitor, variable_name=variable_name): variables.append(variable)", "= __remove_prefix(test_method.method_name.lower(), 'test_') __monitor_id_has_method_name_prefix( monitor=monitor, tested_method_name=tested_method_name ) __method_name_occurs_in_monitor_name_and_query( monitor=monitor, tested_method_name=tested_method_name ) elif not", "all `monitors`. \"\"\" variables: [MonitorVariable] = [] for monitor in monitors: if variable", "MonitorConfiguration, tested_method_name: str): \"\"\" $monitor_id must start with the test method name, e.g.", "name, e.g.: # for `test_logs_logger_DEBUG_log_with_error` it is `logs_logger_debug_log_with_error` tested_method_name = __remove_prefix(test_method.method_name.lower(), 'test_') __monitor_id_has_method_name_prefix(", "with linter_context(code_reference=occurrence.code_reference): Linter.shared.emit_error(f'{variable_name} must be unique - {occurrence.value} is already used.') def __find_monitor_variable(monitor:", "def __method_name_occurs_in_monitor_name_and_query(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" The test method name must occur in", "unique_value, variables)) if len(occurrences) > 1: for occurrence in occurrences: with linter_context(code_reference=occurrence.code_reference): Linter.shared.emit_error(f'{variable_name}", "test method name must occur in $monitor_name and $monitor_query. \"\"\" regex = re.compile(rf\"^.*(\\W+){tested_method_name}(\\W+).*$\")", "not re.match(regex, monitor_query_variable.value): with linter_context(code_reference=monitor_query_variable.code_reference): Linter.shared.emit_warning(f'$monitor_query must include method name ({tested_method_name})') def lint_monitors(monitors:", "monitor_query_variable.value): with linter_context(code_reference=monitor_query_variable.code_reference): Linter.shared.emit_warning(f'$monitor_query must include method name ({tested_method_name})') def lint_monitors(monitors: [MonitorConfiguration]): __have_unique_variable_values(monitors=monitors,", "elif not __is_excluded_from_lint(method=test_method): Linter.shared.emit_warning(f'Test method `{test_method.method_name}` defines no E2E monitors.') def __is_excluded_from_lint(method: TestMethod):", "Checks if $variable_name is unique among all `monitors`. \"\"\" variables: [MonitorVariable] = []", ":= __find_monitor_variable(monitor=monitor, variable_name=variable_name): variables.append(variable) values: [str] = list(map(lambda var: var.value, variables)) for unique_value", "\"\"\" $monitor_id must start with the test method name, e.g. method: `func test_logs_logger_DEBUG_log_with_error()", "Version 2.0. # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright", "for `test_logs_logger_DEBUG_log_with_error` it is `logs_logger_debug_log_with_error` tested_method_name = __remove_prefix(test_method.method_name.lower(), 'test_') __monitor_id_has_method_name_prefix( monitor=monitor, tested_method_name=tested_method_name )", "re.compile(rf\"^.*(\\W+){tested_method_name}(\\W+).*$\") if monitor_name_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_name'): if not re.match(regex, monitor_name_variable.value): with linter_context(code_reference=monitor_name_variable.code_reference): Linter.shared.emit_warning(f'$monitor_name", "with linter_context(code_reference=monitor_id_variable.code_reference): Linter.shared.emit_error(f'$monitor_id must start with method name ({tested_method_name})') def __method_name_occurs_in_monitor_name_and_query(monitor: MonitorConfiguration, tested_method_name:", "at Datadog (https://www.datadoghq.com/). # Copyright 2019-2020 Datadog, Inc. # ----------------------------------------------------------- import re from", "src.linter import Linter, linter_context from src.test_file_parser import TestMethod, MonitorConfiguration, MonitorVariable def lint_test_methods(test_methods: [TestMethod]):", "2.0. # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019-2020", "for test_method in test_methods: with linter_context(code_reference=test_method.code_reference): if test_method.monitors: for monitor in test_method.monitors: with", "__have_unique_variable_values(monitors: [MonitorConfiguration], variable_name: str): \"\"\" Checks if $variable_name is unique among all `monitors`.", "linter_context(code_reference=test_method.code_reference): if test_method.monitors: for monitor in test_method.monitors: with linter_context(code_reference=monitor.code_reference): # `tested_method_name` is computed", "monitor_name_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_name'): if not re.match(regex, monitor_name_variable.value): with linter_context(code_reference=monitor_name_variable.code_reference): Linter.shared.emit_warning(f'$monitor_name must include", "lint_monitors(monitors: [MonitorConfiguration]): __have_unique_variable_values(monitors=monitors, variable_name='$monitor_id') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_name') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_query') def __have_unique_variable_values(monitors: [MonitorConfiguration], variable_name: str):", "excluded its signature is suffixed by `// E2E:wip`, e.g.: ` func test_logs_logger_DEBUG_log_with_error() {", "tested_method_name: str): \"\"\" $monitor_id must start with the test method name, e.g. method:", "# for `test_logs_logger_DEBUG_log_with_error` it is `logs_logger_debug_log_with_error` tested_method_name = __remove_prefix(test_method.method_name.lower(), 'test_') __monitor_id_has_method_name_prefix( monitor=monitor, tested_method_name=tested_method_name", "\"\"\" regex = re.compile(rf\"^.*(\\W+){tested_method_name}(\\W+).*$\") if monitor_name_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_name'): if not re.match(regex, monitor_name_variable.value):", "name ({tested_method_name})') def __method_name_occurs_in_monitor_name_and_query(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" The test method name must", "({tested_method_name})') def __method_name_occurs_in_monitor_name_and_query(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" The test method name must occur", "__monitor_id_has_method_name_prefix( monitor=monitor, tested_method_name=tested_method_name ) __method_name_occurs_in_monitor_name_and_query( monitor=monitor, tested_method_name=tested_method_name ) elif not __is_excluded_from_lint(method=test_method): Linter.shared.emit_warning(f'Test method", "monitors: if variable := __find_monitor_variable(monitor=monitor, variable_name=variable_name): variables.append(variable) values: [str] = list(map(lambda var: var.value,", "lint_test_methods(test_methods: [TestMethod]): for test_method in test_methods: with linter_context(code_reference=test_method.code_reference): if test_method.monitors: for monitor in", "monitor ID starting with `logs_logger_debug_log_with_error`. \"\"\" if monitor_id_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_id'): if not", "\"\"\" Checks if $variable_name is unique among all `monitors`. \"\"\" variables: [MonitorVariable] =", "Linter.shared.emit_error(f'$monitor_id must start with method name ({tested_method_name})') def __method_name_occurs_in_monitor_name_and_query(monitor: MonitorConfiguration, tested_method_name: str): \"\"\"", "Apache License Version 2.0. # This product includes software developed at Datadog (https://www.datadoghq.com/).", "test_method in test_methods: with linter_context(code_reference=test_method.code_reference): if test_method.monitors: for monitor in test_method.monitors: with linter_context(code_reference=monitor.code_reference):", "monitor.variables if v.name == variable_name), None) def __remove_prefix(s, prefix): return s[len(prefix):] if s.startswith(prefix)", "\"\"\" return method.code_reference.line_text.endswith('// E2E:wip\\n') def __monitor_id_has_method_name_prefix(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" $monitor_id must start", "test method name, e.g.: # for `test_logs_logger_DEBUG_log_with_error` it is `logs_logger_debug_log_with_error` tested_method_name = __remove_prefix(test_method.method_name.lower(),", "list(filter(lambda var: var.value == unique_value, variables)) if len(occurrences) > 1: for occurrence in", "unique_value in set(values): occurrences = list(filter(lambda var: var.value == unique_value, variables)) if len(occurrences)", "include method name ({tested_method_name})') def lint_monitors(monitors: [MonitorConfiguration]): __have_unique_variable_values(monitors=monitors, variable_name='$monitor_id') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_name') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_query')", "occur in $monitor_name and $monitor_query. \"\"\" regex = re.compile(rf\"^.*(\\W+){tested_method_name}(\\W+).*$\") if monitor_name_variable := __find_monitor_variable(monitor=monitor,", "({tested_method_name})') if monitor_query_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_query'): if not re.match(regex, monitor_query_variable.value): with linter_context(code_reference=monitor_query_variable.code_reference): Linter.shared.emit_warning(f'$monitor_query", "= re.compile(rf\"^.*(\\W+){tested_method_name}(\\W+).*$\") if monitor_name_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_name'): if not re.match(regex, monitor_name_variable.value): with linter_context(code_reference=monitor_name_variable.code_reference):", "def __is_excluded_from_lint(method: TestMethod): \"\"\" Method can be excluded its signature is suffixed by", "$variable_name is unique among all `monitors`. \"\"\" variables: [MonitorVariable] = [] for monitor", "Linter, linter_context from src.test_file_parser import TestMethod, MonitorConfiguration, MonitorVariable def lint_test_methods(test_methods: [TestMethod]): for test_method", "TestMethod): \"\"\" Method can be excluded its signature is suffixed by `// E2E:wip`,", "$monitor_query. \"\"\" regex = re.compile(rf\"^.*(\\W+){tested_method_name}(\\W+).*$\") if monitor_name_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_name'): if not re.match(regex,", "[MonitorConfiguration]): __have_unique_variable_values(monitors=monitors, variable_name='$monitor_id') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_name') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_query') def __have_unique_variable_values(monitors: [MonitorConfiguration], variable_name: str): \"\"\"", "unique among all `monitors`. \"\"\" variables: [MonitorVariable] = [] for monitor in monitors:", "Linter.shared.emit_error(f'{variable_name} must be unique - {occurrence.value} is already used.') def __find_monitor_variable(monitor: MonitorConfiguration, variable_name:", "must include method name ({tested_method_name})') if monitor_query_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_query'): if not re.match(regex,", "with `logs_logger_debug_log_with_error`. \"\"\" if monitor_id_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_id'): if not monitor_id_variable.value.startswith(tested_method_name): with linter_context(code_reference=monitor_id_variable.code_reference):", "str): \"\"\" The test method name must occur in $monitor_name and $monitor_query. \"\"\"", "import re from src.linter import Linter, linter_context from src.test_file_parser import TestMethod, MonitorConfiguration, MonitorVariable", ") __method_name_occurs_in_monitor_name_and_query( monitor=monitor, tested_method_name=tested_method_name ) elif not __is_excluded_from_lint(method=test_method): Linter.shared.emit_warning(f'Test method `{test_method.method_name}` defines no", "(https://www.datadoghq.com/). # Copyright 2019-2020 Datadog, Inc. # ----------------------------------------------------------- import re from src.linter import", "in this repository are licensed under the Apache License Version 2.0. # This", "import Linter, linter_context from src.test_file_parser import TestMethod, MonitorConfiguration, MonitorVariable def lint_test_methods(test_methods: [TestMethod]): for", "Inc. # ----------------------------------------------------------- import re from src.linter import Linter, linter_context from src.test_file_parser import", "This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019-2020 Datadog, Inc.", "def __monitor_id_has_method_name_prefix(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" $monitor_id must start with the test method", "# Unless explicitly stated otherwise all files in this repository are licensed under", "next((v for v in monitor.variables if v.name == variable_name), None) def __remove_prefix(s, prefix):", "Datadog, Inc. # ----------------------------------------------------------- import re from src.linter import Linter, linter_context from src.test_file_parser", "# Copyright 2019-2020 Datadog, Inc. # ----------------------------------------------------------- import re from src.linter import Linter,", "and $monitor_query. \"\"\" regex = re.compile(rf\"^.*(\\W+){tested_method_name}(\\W+).*$\") if monitor_name_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_name'): if not", "e.g.: ` func test_logs_logger_DEBUG_log_with_error() { // E2E:wip` \"\"\" return method.code_reference.line_text.endswith('// E2E:wip\\n') def __monitor_id_has_method_name_prefix(monitor:", "v.name == variable_name), None) def __remove_prefix(s, prefix): return s[len(prefix):] if s.startswith(prefix) else s", "The test method name must occur in $monitor_name and $monitor_query. \"\"\" regex =", ":= __find_monitor_variable(monitor=monitor, variable_name='$monitor_query'): if not re.match(regex, monitor_query_variable.value): with linter_context(code_reference=monitor_query_variable.code_reference): Linter.shared.emit_warning(f'$monitor_query must include method", "- {occurrence.value} is already used.') def __find_monitor_variable(monitor: MonitorConfiguration, variable_name: str): return next((v for", "re.match(regex, monitor_name_variable.value): with linter_context(code_reference=monitor_name_variable.code_reference): Linter.shared.emit_warning(f'$monitor_name must include method name ({tested_method_name})') if monitor_query_variable :=", "not __is_excluded_from_lint(method=test_method): Linter.shared.emit_warning(f'Test method `{test_method.method_name}` defines no E2E monitors.') def __is_excluded_from_lint(method: TestMethod): \"\"\"", "method name ({tested_method_name})') def lint_monitors(monitors: [MonitorConfiguration]): __have_unique_variable_values(monitors=monitors, variable_name='$monitor_id') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_name') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_query') def", "for occurrence in occurrences: with linter_context(code_reference=occurrence.code_reference): Linter.shared.emit_error(f'{variable_name} must be unique - {occurrence.value} is", "method name ({tested_method_name})') def __method_name_occurs_in_monitor_name_and_query(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" The test method name", "linter_context(code_reference=monitor_id_variable.code_reference): Linter.shared.emit_error(f'$monitor_id must start with method name ({tested_method_name})') def __method_name_occurs_in_monitor_name_and_query(monitor: MonitorConfiguration, tested_method_name: str):", "if $variable_name is unique among all `monitors`. \"\"\" variables: [MonitorVariable] = [] for", "variable_name='$monitor_query') def __have_unique_variable_values(monitors: [MonitorConfiguration], variable_name: str): \"\"\" Checks if $variable_name is unique among", "suffixed by `// E2E:wip`, e.g.: ` func test_logs_logger_DEBUG_log_with_error() { // E2E:wip` \"\"\" return", "test method name, e.g. method: `func test_logs_logger_DEBUG_log_with_error() {` must define monitor ID starting", "if test_method.monitors: for monitor in test_method.monitors: with linter_context(code_reference=monitor.code_reference): # `tested_method_name` is computed from", "str): \"\"\" Checks if $variable_name is unique among all `monitors`. \"\"\" variables: [MonitorVariable]", "in test_methods: with linter_context(code_reference=test_method.code_reference): if test_method.monitors: for monitor in test_method.monitors: with linter_context(code_reference=monitor.code_reference): #", "== unique_value, variables)) if len(occurrences) > 1: for occurrence in occurrences: with linter_context(code_reference=occurrence.code_reference):", "for monitor in test_method.monitors: with linter_context(code_reference=monitor.code_reference): # `tested_method_name` is computed from test method", "in $monitor_name and $monitor_query. \"\"\" regex = re.compile(rf\"^.*(\\W+){tested_method_name}(\\W+).*$\") if monitor_name_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_name'):", "Datadog (https://www.datadoghq.com/). # Copyright 2019-2020 Datadog, Inc. # ----------------------------------------------------------- import re from src.linter", "test_logs_logger_DEBUG_log_with_error() {` must define monitor ID starting with `logs_logger_debug_log_with_error`. \"\"\" if monitor_id_variable :=", "in monitors: if variable := __find_monitor_variable(monitor=monitor, variable_name=variable_name): variables.append(variable) values: [str] = list(map(lambda var:", "it is `logs_logger_debug_log_with_error` tested_method_name = __remove_prefix(test_method.method_name.lower(), 'test_') __monitor_id_has_method_name_prefix( monitor=monitor, tested_method_name=tested_method_name ) __method_name_occurs_in_monitor_name_and_query( monitor=monitor,", "from src.test_file_parser import TestMethod, MonitorConfiguration, MonitorVariable def lint_test_methods(test_methods: [TestMethod]): for test_method in test_methods:", "if monitor_name_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_name'): if not re.match(regex, monitor_name_variable.value): with linter_context(code_reference=monitor_name_variable.code_reference): Linter.shared.emit_warning(f'$monitor_name must", "be excluded its signature is suffixed by `// E2E:wip`, e.g.: ` func test_logs_logger_DEBUG_log_with_error()", "= list(map(lambda var: var.value, variables)) for unique_value in set(values): occurrences = list(filter(lambda var:", "is already used.') def __find_monitor_variable(monitor: MonitorConfiguration, variable_name: str): return next((v for v in", "product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019-2020 Datadog, Inc. #", ") elif not __is_excluded_from_lint(method=test_method): Linter.shared.emit_warning(f'Test method `{test_method.method_name}` defines no E2E monitors.') def __is_excluded_from_lint(method:", "method `{test_method.method_name}` defines no E2E monitors.') def __is_excluded_from_lint(method: TestMethod): \"\"\" Method can be", "test_method.monitors: for monitor in test_method.monitors: with linter_context(code_reference=monitor.code_reference): # `tested_method_name` is computed from test", "variable_name='$monitor_name') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_query') def __have_unique_variable_values(monitors: [MonitorConfiguration], variable_name: str): \"\"\" Checks if $variable_name is", "software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019-2020 Datadog, Inc. # ----------------------------------------------------------- import", "its signature is suffixed by `// E2E:wip`, e.g.: ` func test_logs_logger_DEBUG_log_with_error() { //", "this repository are licensed under the Apache License Version 2.0. # This product", "// E2E:wip` \"\"\" return method.code_reference.line_text.endswith('// E2E:wip\\n') def __monitor_id_has_method_name_prefix(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" $monitor_id", "method.code_reference.line_text.endswith('// E2E:wip\\n') def __monitor_id_has_method_name_prefix(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" $monitor_id must start with the", "name ({tested_method_name})') if monitor_query_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_query'): if not re.match(regex, monitor_query_variable.value): with linter_context(code_reference=monitor_query_variable.code_reference):", "\"\"\" Method can be excluded its signature is suffixed by `// E2E:wip`, e.g.:", "__find_monitor_variable(monitor=monitor, variable_name=variable_name): variables.append(variable) values: [str] = list(map(lambda var: var.value, variables)) for unique_value in", "return method.code_reference.line_text.endswith('// E2E:wip\\n') def __monitor_id_has_method_name_prefix(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" $monitor_id must start with", "with linter_context(code_reference=test_method.code_reference): if test_method.monitors: for monitor in test_method.monitors: with linter_context(code_reference=monitor.code_reference): # `tested_method_name` is", "test_method.monitors: with linter_context(code_reference=monitor.code_reference): # `tested_method_name` is computed from test method name, e.g.: #", "starting with `logs_logger_debug_log_with_error`. \"\"\" if monitor_id_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_id'): if not monitor_id_variable.value.startswith(tested_method_name): with", "----------------------------------------------------------- # Unless explicitly stated otherwise all files in this repository are licensed", "must occur in $monitor_name and $monitor_query. \"\"\" regex = re.compile(rf\"^.*(\\W+){tested_method_name}(\\W+).*$\") if monitor_name_variable :=", "licensed under the Apache License Version 2.0. # This product includes software developed", "be unique - {occurrence.value} is already used.') def __find_monitor_variable(monitor: MonitorConfiguration, variable_name: str): return", "variable_name: str): \"\"\" Checks if $variable_name is unique among all `monitors`. \"\"\" variables:", "must include method name ({tested_method_name})') def lint_monitors(monitors: [MonitorConfiguration]): __have_unique_variable_values(monitors=monitors, variable_name='$monitor_id') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_name') __have_unique_variable_values(monitors=monitors,", "__method_name_occurs_in_monitor_name_and_query( monitor=monitor, tested_method_name=tested_method_name ) elif not __is_excluded_from_lint(method=test_method): Linter.shared.emit_warning(f'Test method `{test_method.method_name}` defines no E2E", "with linter_context(code_reference=monitor_query_variable.code_reference): Linter.shared.emit_warning(f'$monitor_query must include method name ({tested_method_name})') def lint_monitors(monitors: [MonitorConfiguration]): __have_unique_variable_values(monitors=monitors, variable_name='$monitor_id')", "> 1: for occurrence in occurrences: with linter_context(code_reference=occurrence.code_reference): Linter.shared.emit_error(f'{variable_name} must be unique -", "len(occurrences) > 1: for occurrence in occurrences: with linter_context(code_reference=occurrence.code_reference): Linter.shared.emit_error(f'{variable_name} must be unique", "# This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019-2020 Datadog,", "# ----------------------------------------------------------- # Unless explicitly stated otherwise all files in this repository are", "are licensed under the Apache License Version 2.0. # This product includes software", "import TestMethod, MonitorConfiguration, MonitorVariable def lint_test_methods(test_methods: [TestMethod]): for test_method in test_methods: with linter_context(code_reference=test_method.code_reference):", "`logs_logger_debug_log_with_error` tested_method_name = __remove_prefix(test_method.method_name.lower(), 'test_') __monitor_id_has_method_name_prefix( monitor=monitor, tested_method_name=tested_method_name ) __method_name_occurs_in_monitor_name_and_query( monitor=monitor, tested_method_name=tested_method_name )", "[] for monitor in monitors: if variable := __find_monitor_variable(monitor=monitor, variable_name=variable_name): variables.append(variable) values: [str]", "`test_logs_logger_DEBUG_log_with_error` it is `logs_logger_debug_log_with_error` tested_method_name = __remove_prefix(test_method.method_name.lower(), 'test_') __monitor_id_has_method_name_prefix( monitor=monitor, tested_method_name=tested_method_name ) __method_name_occurs_in_monitor_name_and_query(", "`tested_method_name` is computed from test method name, e.g.: # for `test_logs_logger_DEBUG_log_with_error` it is", "str): return next((v for v in monitor.variables if v.name == variable_name), None) def", "\"\"\" variables: [MonitorVariable] = [] for monitor in monitors: if variable := __find_monitor_variable(monitor=monitor,", "`func test_logs_logger_DEBUG_log_with_error() {` must define monitor ID starting with `logs_logger_debug_log_with_error`. \"\"\" if monitor_id_variable", "e.g.: # for `test_logs_logger_DEBUG_log_with_error` it is `logs_logger_debug_log_with_error` tested_method_name = __remove_prefix(test_method.method_name.lower(), 'test_') __monitor_id_has_method_name_prefix( monitor=monitor,", "Copyright 2019-2020 Datadog, Inc. # ----------------------------------------------------------- import re from src.linter import Linter, linter_context", "variable := __find_monitor_variable(monitor=monitor, variable_name=variable_name): variables.append(variable) values: [str] = list(map(lambda var: var.value, variables)) for", "tested_method_name: str): \"\"\" The test method name must occur in $monitor_name and $monitor_query.", "stated otherwise all files in this repository are licensed under the Apache License", "monitor in test_method.monitors: with linter_context(code_reference=monitor.code_reference): # `tested_method_name` is computed from test method name,", "src.test_file_parser import TestMethod, MonitorConfiguration, MonitorVariable def lint_test_methods(test_methods: [TestMethod]): for test_method in test_methods: with", "= list(filter(lambda var: var.value == unique_value, variables)) if len(occurrences) > 1: for occurrence", "must define monitor ID starting with `logs_logger_debug_log_with_error`. \"\"\" if monitor_id_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_id'):", "include method name ({tested_method_name})') if monitor_query_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_query'): if not re.match(regex, monitor_query_variable.value):", "re.match(regex, monitor_query_variable.value): with linter_context(code_reference=monitor_query_variable.code_reference): Linter.shared.emit_warning(f'$monitor_query must include method name ({tested_method_name})') def lint_monitors(monitors: [MonitorConfiguration]):", "occurrences = list(filter(lambda var: var.value == unique_value, variables)) if len(occurrences) > 1: for", "all files in this repository are licensed under the Apache License Version 2.0.", "({tested_method_name})') def lint_monitors(monitors: [MonitorConfiguration]): __have_unique_variable_values(monitors=monitors, variable_name='$monitor_id') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_name') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_query') def __have_unique_variable_values(monitors: [MonitorConfiguration],", "MonitorConfiguration, MonitorVariable def lint_test_methods(test_methods: [TestMethod]): for test_method in test_methods: with linter_context(code_reference=test_method.code_reference): if test_method.monitors:", "{occurrence.value} is already used.') def __find_monitor_variable(monitor: MonitorConfiguration, variable_name: str): return next((v for v", "`logs_logger_debug_log_with_error`. \"\"\" if monitor_id_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_id'): if not monitor_id_variable.value.startswith(tested_method_name): with linter_context(code_reference=monitor_id_variable.code_reference): Linter.shared.emit_error(f'$monitor_id", "under the Apache License Version 2.0. # This product includes software developed at", "defines no E2E monitors.') def __is_excluded_from_lint(method: TestMethod): \"\"\" Method can be excluded its", "method: `func test_logs_logger_DEBUG_log_with_error() {` must define monitor ID starting with `logs_logger_debug_log_with_error`. \"\"\" if", "$monitor_name and $monitor_query. \"\"\" regex = re.compile(rf\"^.*(\\W+){tested_method_name}(\\W+).*$\") if monitor_name_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_name'): if", "monitor_query_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_query'): if not re.match(regex, monitor_query_variable.value): with linter_context(code_reference=monitor_query_variable.code_reference): Linter.shared.emit_warning(f'$monitor_query must include", "if not monitor_id_variable.value.startswith(tested_method_name): with linter_context(code_reference=monitor_id_variable.code_reference): Linter.shared.emit_error(f'$monitor_id must start with method name ({tested_method_name})') def", "values: [str] = list(map(lambda var: var.value, variables)) for unique_value in set(values): occurrences =", "if v.name == variable_name), None) def __remove_prefix(s, prefix): return s[len(prefix):] if s.startswith(prefix) else", "\"\"\" if monitor_id_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_id'): if not monitor_id_variable.value.startswith(tested_method_name): with linter_context(code_reference=monitor_id_variable.code_reference): Linter.shared.emit_error(f'$monitor_id must", "linter_context from src.test_file_parser import TestMethod, MonitorConfiguration, MonitorVariable def lint_test_methods(test_methods: [TestMethod]): for test_method in", "var.value == unique_value, variables)) if len(occurrences) > 1: for occurrence in occurrences: with", "$monitor_id must start with the test method name, e.g. method: `func test_logs_logger_DEBUG_log_with_error() {`", "__have_unique_variable_values(monitors=monitors, variable_name='$monitor_query') def __have_unique_variable_values(monitors: [MonitorConfiguration], variable_name: str): \"\"\" Checks if $variable_name is unique", "occurrences: with linter_context(code_reference=occurrence.code_reference): Linter.shared.emit_error(f'{variable_name} must be unique - {occurrence.value} is already used.') def", "`monitors`. \"\"\" variables: [MonitorVariable] = [] for monitor in monitors: if variable :=", "__find_monitor_variable(monitor=monitor, variable_name='$monitor_query'): if not re.match(regex, monitor_query_variable.value): with linter_context(code_reference=monitor_query_variable.code_reference): Linter.shared.emit_warning(f'$monitor_query must include method name", "must start with method name ({tested_method_name})') def __method_name_occurs_in_monitor_name_and_query(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" The", "tested_method_name=tested_method_name ) __method_name_occurs_in_monitor_name_and_query( monitor=monitor, tested_method_name=tested_method_name ) elif not __is_excluded_from_lint(method=test_method): Linter.shared.emit_warning(f'Test method `{test_method.method_name}` defines", "variable_name='$monitor_query'): if not re.match(regex, monitor_query_variable.value): with linter_context(code_reference=monitor_query_variable.code_reference): Linter.shared.emit_warning(f'$monitor_query must include method name ({tested_method_name})')", "func test_logs_logger_DEBUG_log_with_error() { // E2E:wip` \"\"\" return method.code_reference.line_text.endswith('// E2E:wip\\n') def __monitor_id_has_method_name_prefix(monitor: MonitorConfiguration, tested_method_name:", "# `tested_method_name` is computed from test method name, e.g.: # for `test_logs_logger_DEBUG_log_with_error` it", "__is_excluded_from_lint(method=test_method): Linter.shared.emit_warning(f'Test method `{test_method.method_name}` defines no E2E monitors.') def __is_excluded_from_lint(method: TestMethod): \"\"\" Method", "name ({tested_method_name})') def lint_monitors(monitors: [MonitorConfiguration]): __have_unique_variable_values(monitors=monitors, variable_name='$monitor_id') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_name') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_query') def __have_unique_variable_values(monitors:", "__method_name_occurs_in_monitor_name_and_query(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" The test method name must occur in $monitor_name", "if len(occurrences) > 1: for occurrence in occurrences: with linter_context(code_reference=occurrence.code_reference): Linter.shared.emit_error(f'{variable_name} must be", "occurrence in occurrences: with linter_context(code_reference=occurrence.code_reference): Linter.shared.emit_error(f'{variable_name} must be unique - {occurrence.value} is already", "`// E2E:wip`, e.g.: ` func test_logs_logger_DEBUG_log_with_error() { // E2E:wip` \"\"\" return method.code_reference.line_text.endswith('// E2E:wip\\n')", "linter_context(code_reference=monitor_query_variable.code_reference): Linter.shared.emit_warning(f'$monitor_query must include method name ({tested_method_name})') def lint_monitors(monitors: [MonitorConfiguration]): __have_unique_variable_values(monitors=monitors, variable_name='$monitor_id') __have_unique_variable_values(monitors=monitors,", "__monitor_id_has_method_name_prefix(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" $monitor_id must start with the test method name,", "{ // E2E:wip` \"\"\" return method.code_reference.line_text.endswith('// E2E:wip\\n') def __monitor_id_has_method_name_prefix(monitor: MonitorConfiguration, tested_method_name: str): \"\"\"", "TestMethod, MonitorConfiguration, MonitorVariable def lint_test_methods(test_methods: [TestMethod]): for test_method in test_methods: with linter_context(code_reference=test_method.code_reference): if", "in test_method.monitors: with linter_context(code_reference=monitor.code_reference): # `tested_method_name` is computed from test method name, e.g.:", "monitor_name_variable.value): with linter_context(code_reference=monitor_name_variable.code_reference): Linter.shared.emit_warning(f'$monitor_name must include method name ({tested_method_name})') if monitor_query_variable := __find_monitor_variable(monitor=monitor,", "name must occur in $monitor_name and $monitor_query. \"\"\" regex = re.compile(rf\"^.*(\\W+){tested_method_name}(\\W+).*$\") if monitor_name_variable", "`{test_method.method_name}` defines no E2E monitors.') def __is_excluded_from_lint(method: TestMethod): \"\"\" Method can be excluded", "if not re.match(regex, monitor_name_variable.value): with linter_context(code_reference=monitor_name_variable.code_reference): Linter.shared.emit_warning(f'$monitor_name must include method name ({tested_method_name})') if", "define monitor ID starting with `logs_logger_debug_log_with_error`. \"\"\" if monitor_id_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_id'): if", "var: var.value, variables)) for unique_value in set(values): occurrences = list(filter(lambda var: var.value ==", "e.g. method: `func test_logs_logger_DEBUG_log_with_error() {` must define monitor ID starting with `logs_logger_debug_log_with_error`. \"\"\"", "the test method name, e.g. method: `func test_logs_logger_DEBUG_log_with_error() {` must define monitor ID", "for v in monitor.variables if v.name == variable_name), None) def __remove_prefix(s, prefix): return", "MonitorVariable def lint_test_methods(test_methods: [TestMethod]): for test_method in test_methods: with linter_context(code_reference=test_method.code_reference): if test_method.monitors: for", "# ----------------------------------------------------------- import re from src.linter import Linter, linter_context from src.test_file_parser import TestMethod,", "tested_method_name=tested_method_name ) elif not __is_excluded_from_lint(method=test_method): Linter.shared.emit_warning(f'Test method `{test_method.method_name}` defines no E2E monitors.') def", "used.') def __find_monitor_variable(monitor: MonitorConfiguration, variable_name: str): return next((v for v in monitor.variables if", "E2E:wip`, e.g.: ` func test_logs_logger_DEBUG_log_with_error() { // E2E:wip` \"\"\" return method.code_reference.line_text.endswith('// E2E:wip\\n') def", "is suffixed by `// E2E:wip`, e.g.: ` func test_logs_logger_DEBUG_log_with_error() { // E2E:wip` \"\"\"", "{` must define monitor ID starting with `logs_logger_debug_log_with_error`. \"\"\" if monitor_id_variable := __find_monitor_variable(monitor=monitor,", "Linter.shared.emit_warning(f'$monitor_name must include method name ({tested_method_name})') if monitor_query_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_query'): if not", "unique - {occurrence.value} is already used.') def __find_monitor_variable(monitor: MonitorConfiguration, variable_name: str): return next((v", "monitors.') def __is_excluded_from_lint(method: TestMethod): \"\"\" Method can be excluded its signature is suffixed", "method name must occur in $monitor_name and $monitor_query. \"\"\" regex = re.compile(rf\"^.*(\\W+){tested_method_name}(\\W+).*$\") if", "monitor=monitor, tested_method_name=tested_method_name ) __method_name_occurs_in_monitor_name_and_query( monitor=monitor, tested_method_name=tested_method_name ) elif not __is_excluded_from_lint(method=test_method): Linter.shared.emit_warning(f'Test method `{test_method.method_name}`", "signature is suffixed by `// E2E:wip`, e.g.: ` func test_logs_logger_DEBUG_log_with_error() { // E2E:wip`", "def lint_test_methods(test_methods: [TestMethod]): for test_method in test_methods: with linter_context(code_reference=test_method.code_reference): if test_method.monitors: for monitor", "monitor in monitors: if variable := __find_monitor_variable(monitor=monitor, variable_name=variable_name): variables.append(variable) values: [str] = list(map(lambda", "includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2019-2020 Datadog, Inc. # -----------------------------------------------------------", "among all `monitors`. \"\"\" variables: [MonitorVariable] = [] for monitor in monitors: if", ":= __find_monitor_variable(monitor=monitor, variable_name='$monitor_name'): if not re.match(regex, monitor_name_variable.value): with linter_context(code_reference=monitor_name_variable.code_reference): Linter.shared.emit_warning(f'$monitor_name must include method", "License Version 2.0. # This product includes software developed at Datadog (https://www.datadoghq.com/). #", "var.value, variables)) for unique_value in set(values): occurrences = list(filter(lambda var: var.value == unique_value,", "re from src.linter import Linter, linter_context from src.test_file_parser import TestMethod, MonitorConfiguration, MonitorVariable def", "explicitly stated otherwise all files in this repository are licensed under the Apache", "monitor_id_variable.value.startswith(tested_method_name): with linter_context(code_reference=monitor_id_variable.code_reference): Linter.shared.emit_error(f'$monitor_id must start with method name ({tested_method_name})') def __method_name_occurs_in_monitor_name_and_query(monitor: MonitorConfiguration,", "'test_') __monitor_id_has_method_name_prefix( monitor=monitor, tested_method_name=tested_method_name ) __method_name_occurs_in_monitor_name_and_query( monitor=monitor, tested_method_name=tested_method_name ) elif not __is_excluded_from_lint(method=test_method): Linter.shared.emit_warning(f'Test", "no E2E monitors.') def __is_excluded_from_lint(method: TestMethod): \"\"\" Method can be excluded its signature", "MonitorConfiguration, tested_method_name: str): \"\"\" The test method name must occur in $monitor_name and", "if monitor_query_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_query'): if not re.match(regex, monitor_query_variable.value): with linter_context(code_reference=monitor_query_variable.code_reference): Linter.shared.emit_warning(f'$monitor_query must", "variables: [MonitorVariable] = [] for monitor in monitors: if variable := __find_monitor_variable(monitor=monitor, variable_name=variable_name):", "def __find_monitor_variable(monitor: MonitorConfiguration, variable_name: str): return next((v for v in monitor.variables if v.name", "computed from test method name, e.g.: # for `test_logs_logger_DEBUG_log_with_error` it is `logs_logger_debug_log_with_error` tested_method_name", "method name, e.g.: # for `test_logs_logger_DEBUG_log_with_error` it is `logs_logger_debug_log_with_error` tested_method_name = __remove_prefix(test_method.method_name.lower(), 'test_')", "----------------------------------------------------------- import re from src.linter import Linter, linter_context from src.test_file_parser import TestMethod, MonitorConfiguration,", "if monitor_id_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_id'): if not monitor_id_variable.value.startswith(tested_method_name): with linter_context(code_reference=monitor_id_variable.code_reference): Linter.shared.emit_error(f'$monitor_id must start", "Unless explicitly stated otherwise all files in this repository are licensed under the", "2019-2020 Datadog, Inc. # ----------------------------------------------------------- import re from src.linter import Linter, linter_context from", "variable_name='$monitor_id') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_name') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_query') def __have_unique_variable_values(monitors: [MonitorConfiguration], variable_name: str): \"\"\" Checks if", "__find_monitor_variable(monitor=monitor, variable_name='$monitor_id'): if not monitor_id_variable.value.startswith(tested_method_name): with linter_context(code_reference=monitor_id_variable.code_reference): Linter.shared.emit_error(f'$monitor_id must start with method name", "E2E:wip\\n') def __monitor_id_has_method_name_prefix(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" $monitor_id must start with the test", "must be unique - {occurrence.value} is already used.') def __find_monitor_variable(monitor: MonitorConfiguration, variable_name: str):", "variables)) for unique_value in set(values): occurrences = list(filter(lambda var: var.value == unique_value, variables))", "v in monitor.variables if v.name == variable_name), None) def __remove_prefix(s, prefix): return s[len(prefix):]", "is computed from test method name, e.g.: # for `test_logs_logger_DEBUG_log_with_error` it is `logs_logger_debug_log_with_error`", "MonitorConfiguration, variable_name: str): return next((v for v in monitor.variables if v.name == variable_name),", "monitor=monitor, tested_method_name=tested_method_name ) elif not __is_excluded_from_lint(method=test_method): Linter.shared.emit_warning(f'Test method `{test_method.method_name}` defines no E2E monitors.')", "__remove_prefix(test_method.method_name.lower(), 'test_') __monitor_id_has_method_name_prefix( monitor=monitor, tested_method_name=tested_method_name ) __method_name_occurs_in_monitor_name_and_query( monitor=monitor, tested_method_name=tested_method_name ) elif not __is_excluded_from_lint(method=test_method):", "if variable := __find_monitor_variable(monitor=monitor, variable_name=variable_name): variables.append(variable) values: [str] = list(map(lambda var: var.value, variables))", "developed at Datadog (https://www.datadoghq.com/). # Copyright 2019-2020 Datadog, Inc. # ----------------------------------------------------------- import re", "from test method name, e.g.: # for `test_logs_logger_DEBUG_log_with_error` it is `logs_logger_debug_log_with_error` tested_method_name =", "for unique_value in set(values): occurrences = list(filter(lambda var: var.value == unique_value, variables)) if", "variable_name='$monitor_name'): if not re.match(regex, monitor_name_variable.value): with linter_context(code_reference=monitor_name_variable.code_reference): Linter.shared.emit_warning(f'$monitor_name must include method name ({tested_method_name})')", "tested_method_name = __remove_prefix(test_method.method_name.lower(), 'test_') __monitor_id_has_method_name_prefix( monitor=monitor, tested_method_name=tested_method_name ) __method_name_occurs_in_monitor_name_and_query( monitor=monitor, tested_method_name=tested_method_name ) elif", "the Apache License Version 2.0. # This product includes software developed at Datadog", "with the test method name, e.g. method: `func test_logs_logger_DEBUG_log_with_error() {` must define monitor", "method name, e.g. method: `func test_logs_logger_DEBUG_log_with_error() {` must define monitor ID starting with", "variables)) if len(occurrences) > 1: for occurrence in occurrences: with linter_context(code_reference=occurrence.code_reference): Linter.shared.emit_error(f'{variable_name} must", "is unique among all `monitors`. \"\"\" variables: [MonitorVariable] = [] for monitor in", "by `// E2E:wip`, e.g.: ` func test_logs_logger_DEBUG_log_with_error() { // E2E:wip` \"\"\" return method.code_reference.line_text.endswith('//", "start with the test method name, e.g. method: `func test_logs_logger_DEBUG_log_with_error() {` must define", "[str] = list(map(lambda var: var.value, variables)) for unique_value in set(values): occurrences = list(filter(lambda", "E2E:wip` \"\"\" return method.code_reference.line_text.endswith('// E2E:wip\\n') def __monitor_id_has_method_name_prefix(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" $monitor_id must", "__have_unique_variable_values(monitors=monitors, variable_name='$monitor_id') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_name') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_query') def __have_unique_variable_values(monitors: [MonitorConfiguration], variable_name: str): \"\"\" Checks", "\"\"\" The test method name must occur in $monitor_name and $monitor_query. \"\"\" regex", "Linter.shared.emit_warning(f'Test method `{test_method.method_name}` defines no E2E monitors.') def __is_excluded_from_lint(method: TestMethod): \"\"\" Method can", "variable_name='$monitor_id'): if not monitor_id_variable.value.startswith(tested_method_name): with linter_context(code_reference=monitor_id_variable.code_reference): Linter.shared.emit_error(f'$monitor_id must start with method name ({tested_method_name})')", "variables.append(variable) values: [str] = list(map(lambda var: var.value, variables)) for unique_value in set(values): occurrences", "list(map(lambda var: var.value, variables)) for unique_value in set(values): occurrences = list(filter(lambda var: var.value", "` func test_logs_logger_DEBUG_log_with_error() { // E2E:wip` \"\"\" return method.code_reference.line_text.endswith('// E2E:wip\\n') def __monitor_id_has_method_name_prefix(monitor: MonitorConfiguration,", "regex = re.compile(rf\"^.*(\\W+){tested_method_name}(\\W+).*$\") if monitor_name_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_name'): if not re.match(regex, monitor_name_variable.value): with", "__find_monitor_variable(monitor=monitor, variable_name='$monitor_name'): if not re.match(regex, monitor_name_variable.value): with linter_context(code_reference=monitor_name_variable.code_reference): Linter.shared.emit_warning(f'$monitor_name must include method name", "__is_excluded_from_lint(method: TestMethod): \"\"\" Method can be excluded its signature is suffixed by `//", "otherwise all files in this repository are licensed under the Apache License Version", "__have_unique_variable_values(monitors=monitors, variable_name='$monitor_name') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_query') def __have_unique_variable_values(monitors: [MonitorConfiguration], variable_name: str): \"\"\" Checks if $variable_name", "return next((v for v in monitor.variables if v.name == variable_name), None) def __remove_prefix(s,", "in monitor.variables if v.name == variable_name), None) def __remove_prefix(s, prefix): return s[len(prefix):] if", "= [] for monitor in monitors: if variable := __find_monitor_variable(monitor=monitor, variable_name=variable_name): variables.append(variable) values:", "with linter_context(code_reference=monitor.code_reference): # `tested_method_name` is computed from test method name, e.g.: # for", "str): \"\"\" $monitor_id must start with the test method name, e.g. method: `func", "already used.') def __find_monitor_variable(monitor: MonitorConfiguration, variable_name: str): return next((v for v in monitor.variables", ":= __find_monitor_variable(monitor=monitor, variable_name='$monitor_id'): if not monitor_id_variable.value.startswith(tested_method_name): with linter_context(code_reference=monitor_id_variable.code_reference): Linter.shared.emit_error(f'$monitor_id must start with method", "must start with the test method name, e.g. method: `func test_logs_logger_DEBUG_log_with_error() {` must", "def lint_monitors(monitors: [MonitorConfiguration]): __have_unique_variable_values(monitors=monitors, variable_name='$monitor_id') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_name') __have_unique_variable_values(monitors=monitors, variable_name='$monitor_query') def __have_unique_variable_values(monitors: [MonitorConfiguration], variable_name:", "linter_context(code_reference=monitor.code_reference): # `tested_method_name` is computed from test method name, e.g.: # for `test_logs_logger_DEBUG_log_with_error`", "Method can be excluded its signature is suffixed by `// E2E:wip`, e.g.: `", "variable_name: str): return next((v for v in monitor.variables if v.name == variable_name), None)", "not re.match(regex, monitor_name_variable.value): with linter_context(code_reference=monitor_name_variable.code_reference): Linter.shared.emit_warning(f'$monitor_name must include method name ({tested_method_name})') if monitor_query_variable", "monitor_id_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_id'): if not monitor_id_variable.value.startswith(tested_method_name): with linter_context(code_reference=monitor_id_variable.code_reference): Linter.shared.emit_error(f'$monitor_id must start with", "in set(values): occurrences = list(filter(lambda var: var.value == unique_value, variables)) if len(occurrences) >", "<gh_stars>1-10 # ----------------------------------------------------------- # Unless explicitly stated otherwise all files in this repository", "with method name ({tested_method_name})') def __method_name_occurs_in_monitor_name_and_query(monitor: MonitorConfiguration, tested_method_name: str): \"\"\" The test method", "E2E monitors.') def __is_excluded_from_lint(method: TestMethod): \"\"\" Method can be excluded its signature is", "can be excluded its signature is suffixed by `// E2E:wip`, e.g.: ` func", "files in this repository are licensed under the Apache License Version 2.0. #", "linter_context(code_reference=monitor_name_variable.code_reference): Linter.shared.emit_warning(f'$monitor_name must include method name ({tested_method_name})') if monitor_query_variable := __find_monitor_variable(monitor=monitor, variable_name='$monitor_query'): if", "for monitor in monitors: if variable := __find_monitor_variable(monitor=monitor, variable_name=variable_name): variables.append(variable) values: [str] =", "name, e.g. method: `func test_logs_logger_DEBUG_log_with_error() {` must define monitor ID starting with `logs_logger_debug_log_with_error`.", "not monitor_id_variable.value.startswith(tested_method_name): with linter_context(code_reference=monitor_id_variable.code_reference): Linter.shared.emit_error(f'$monitor_id must start with method name ({tested_method_name})') def __method_name_occurs_in_monitor_name_and_query(monitor:", "[TestMethod]): for test_method in test_methods: with linter_context(code_reference=test_method.code_reference): if test_method.monitors: for monitor in test_method.monitors:", "repository are licensed under the Apache License Version 2.0. # This product includes" ]
[ "children[len(children) - 1] return find_last_child(last) def show_error(parent, message): if not wx.IsMainThread(): raise RuntimeError('utils.show_error", "else: last = children[len(children) - 1] return find_last_child(last) def show_error(parent, message): if not", "wx.IsMainThread(): raise RuntimeError('utils.show_error called from thread {0}, must only be called from main", "wx.MessageBox(message, 'Error', parent=parent, style=wx.ICON_ERROR) def main_thread(func): @functools.wraps(func) def wrapper(*args, **kwargs): return wx.CallAfter(func, *args,", "raise RuntimeError('utils.show_error called from thread {0}, must only be called from main thread'.format(threading.current_thread().name))", "import threading import wx def find_last_child(widget): children = widget.GetChildren() if not children: return", "= children[len(children) - 1] return find_last_child(last) def show_error(parent, message): if not wx.IsMainThread(): raise", "parent=parent, style=wx.ICON_ERROR) def main_thread(func): @functools.wraps(func) def wrapper(*args, **kwargs): return wx.CallAfter(func, *args, **kwargs) return", "return widget else: last = children[len(children) - 1] return find_last_child(last) def show_error(parent, message):", "threading import wx def find_last_child(widget): children = widget.GetChildren() if not children: return widget", "be called from main thread'.format(threading.current_thread().name)) else: wx.MessageBox(message, 'Error', parent=parent, style=wx.ICON_ERROR) def main_thread(func): @functools.wraps(func)", "main thread'.format(threading.current_thread().name)) else: wx.MessageBox(message, 'Error', parent=parent, style=wx.ICON_ERROR) def main_thread(func): @functools.wraps(func) def wrapper(*args, **kwargs):", "else: wx.MessageBox(message, 'Error', parent=parent, style=wx.ICON_ERROR) def main_thread(func): @functools.wraps(func) def wrapper(*args, **kwargs): return wx.CallAfter(func,", "from main thread'.format(threading.current_thread().name)) else: wx.MessageBox(message, 'Error', parent=parent, style=wx.ICON_ERROR) def main_thread(func): @functools.wraps(func) def wrapper(*args,", "message): if not wx.IsMainThread(): raise RuntimeError('utils.show_error called from thread {0}, must only be", "functools import threading import wx def find_last_child(widget): children = widget.GetChildren() if not children:", "import functools import threading import wx def find_last_child(widget): children = widget.GetChildren() if not", "wx def find_last_child(widget): children = widget.GetChildren() if not children: return widget else: last", "find_last_child(widget): children = widget.GetChildren() if not children: return widget else: last = children[len(children)", "children: return widget else: last = children[len(children) - 1] return find_last_child(last) def show_error(parent,", "1] return find_last_child(last) def show_error(parent, message): if not wx.IsMainThread(): raise RuntimeError('utils.show_error called from", "= widget.GetChildren() if not children: return widget else: last = children[len(children) - 1]", "widget.GetChildren() if not children: return widget else: last = children[len(children) - 1] return", "called from main thread'.format(threading.current_thread().name)) else: wx.MessageBox(message, 'Error', parent=parent, style=wx.ICON_ERROR) def main_thread(func): @functools.wraps(func) def", "must only be called from main thread'.format(threading.current_thread().name)) else: wx.MessageBox(message, 'Error', parent=parent, style=wx.ICON_ERROR) def", "find_last_child(last) def show_error(parent, message): if not wx.IsMainThread(): raise RuntimeError('utils.show_error called from thread {0},", "show_error(parent, message): if not wx.IsMainThread(): raise RuntimeError('utils.show_error called from thread {0}, must only", "widget else: last = children[len(children) - 1] return find_last_child(last) def show_error(parent, message): if", "'Error', parent=parent, style=wx.ICON_ERROR) def main_thread(func): @functools.wraps(func) def wrapper(*args, **kwargs): return wx.CallAfter(func, *args, **kwargs)", "from thread {0}, must only be called from main thread'.format(threading.current_thread().name)) else: wx.MessageBox(message, 'Error',", "{0}, must only be called from main thread'.format(threading.current_thread().name)) else: wx.MessageBox(message, 'Error', parent=parent, style=wx.ICON_ERROR)", "<gh_stars>0 import functools import threading import wx def find_last_child(widget): children = widget.GetChildren() if", "if not children: return widget else: last = children[len(children) - 1] return find_last_child(last)", "not children: return widget else: last = children[len(children) - 1] return find_last_child(last) def", "called from thread {0}, must only be called from main thread'.format(threading.current_thread().name)) else: wx.MessageBox(message,", "children = widget.GetChildren() if not children: return widget else: last = children[len(children) -", "- 1] return find_last_child(last) def show_error(parent, message): if not wx.IsMainThread(): raise RuntimeError('utils.show_error called", "if not wx.IsMainThread(): raise RuntimeError('utils.show_error called from thread {0}, must only be called", "RuntimeError('utils.show_error called from thread {0}, must only be called from main thread'.format(threading.current_thread().name)) else:", "style=wx.ICON_ERROR) def main_thread(func): @functools.wraps(func) def wrapper(*args, **kwargs): return wx.CallAfter(func, *args, **kwargs) return wrapper", "def show_error(parent, message): if not wx.IsMainThread(): raise RuntimeError('utils.show_error called from thread {0}, must", "def find_last_child(widget): children = widget.GetChildren() if not children: return widget else: last =", "not wx.IsMainThread(): raise RuntimeError('utils.show_error called from thread {0}, must only be called from", "only be called from main thread'.format(threading.current_thread().name)) else: wx.MessageBox(message, 'Error', parent=parent, style=wx.ICON_ERROR) def main_thread(func):", "import wx def find_last_child(widget): children = widget.GetChildren() if not children: return widget else:", "thread {0}, must only be called from main thread'.format(threading.current_thread().name)) else: wx.MessageBox(message, 'Error', parent=parent,", "last = children[len(children) - 1] return find_last_child(last) def show_error(parent, message): if not wx.IsMainThread():", "thread'.format(threading.current_thread().name)) else: wx.MessageBox(message, 'Error', parent=parent, style=wx.ICON_ERROR) def main_thread(func): @functools.wraps(func) def wrapper(*args, **kwargs): return", "return find_last_child(last) def show_error(parent, message): if not wx.IsMainThread(): raise RuntimeError('utils.show_error called from thread" ]
[ "+ new_tags + '.' else: tagsandtext = text return tagsandtext def removeNewLines(sentence): \"\"\"", "os.path.exists(dirname): print('-- Creating Folders: %s --' % (dirname)) os.makedirs(dirname) def load_save_return(dbname): \"\"\" What", "re.UNICODE) def removeAllAfterRetweet(text): \"\"\" Remove everything after a retweet is seen.\"\"\" return re_all_after_retweet.sub(text,", "#i#love#newyork -> #i #love #newyork \"\"\" if len(text) == 0: return '' #", "handle Instagram Caption/Hashtag # re_repostapp = re.compile(r\"(#Repost @\\w+ with @repostapp)|(#EzRepost @\\w+ with @ezrepostapp)|(Regrann", "to distance in the ``[0,inf]`` interval: Args: p (float): proximity value Returns: d", "removeHashtagSymbol(text): \"\"\" # - remove # symbol \"\"\" return re_hashtagsymbol.sub(r'\\1', text) def removeLinks(text):", "by another person using the @repostapp \"\"\" m = re_repostapp.search(caption) if m: start,", "' new_text += c return new_text def combineTagsAndText(text, tags): \"\"\" Combine Both Tags", "re_all_after_retweet.sub(text, '') # # Functions to handle Instagram Caption/Hashtag # re_repostapp = re.compile(r\"(#Repost", "return LS_wrapper return LS_decorator # # Network functions # def prox2dist(p): \"\"\"Transforms a", "# Description: Utility functions # import os import re import functools import pickle", "| re.UNICODE) def removeAllAfterRetweet(text): \"\"\" Remove everything after a retweet is seen.\"\"\" return", "re_repostapp = re.compile(r\"(#Repost @\\w+ with @repostapp)|(#EzRepost @\\w+ with @ezrepostapp)|(Regrann from @\\w+ -)\") def", "to handle general social media text # re_atmention = re.compile(r\"@[a-zA-Z0-9_]+\") re_hashtagsymbol = re.compile(r\"#([a-zA-Z0-9_]+)\")", "Functions to handle Instagram Caption/Hashtag # re_repostapp = re.compile(r\"(#Repost @\\w+ with @repostapp)|(#EzRepost @\\w+", "Remove everything after a retweet is seen.\"\"\" return re_all_after_retweet.sub(text, '') # # Functions", "def removeLinks(text): \"\"\" remove links from text \"\"\" return re_links.sub('', text) # #", "removeAtMention(text): \"\"\" Remove @mentions\"\"\" return re_atmention.sub('', text) def removeHashtagSymbol(text): \"\"\" # - remove", "Utility functions # import os import re import functools import pickle import numpy", "if not os.path.exists(dirname): print('-- Creating Folders: %s --' % (dirname)) os.makedirs(dirname) def load_save_return(dbname):", "else: return caption # # Functions to handle general social media text #", "def removeHashtagSymbol(text): \"\"\" # - remove # symbol \"\"\" return re_hashtagsymbol.sub(r'\\1', text) def", "re.compile(r\"#([a-zA-Z0-9_]+)\") re_links = re.compile(r\"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\") def removeAtMention(text): \"\"\" Remove @mentions\"\"\" return re_atmention.sub('', text) def", "File handling functions # def ensurePathExists(path): \"\"\" Ensure path exists.\"\"\" dirname = os.path.dirname(path)", "<NAME> # Date: Jan 06, 2021 # # Description: Utility functions # import", "text + '. ' + new_tags + '.' else: tagsandtext = text return", "+ '.' else: tagsandtext = text return tagsandtext def removeNewLines(sentence): \"\"\" Remove new", "enumerate(text, start=0): if (c in ['#', '@']) and (i > 0): if text[i", "LS_wrapper return LS_decorator # # Network functions # def prox2dist(p): \"\"\"Transforms a non-negative", "[tag for tag in tags if tag not in text] if len(tags): new_tags", "def combineTagsAndText(text, tags): \"\"\" Combine Both Tags and Text Fields.\"\"\" text = addSpacesBetweenHashTags(text)", "# symbol \"\"\" return re_hashtagsymbol.sub(r'\\1', text) def removeLinks(text): \"\"\" remove links from text", "in the ``[0,inf]`` interval: Args: p (float): proximity value Returns: d (float): distance", "os.path.isfile(dbpath): with open(dbpath, 'rb') as db_fp: return pickle.load(db_fp) else: result = func(*args, **kwargs)", "if os.path.isfile(dbpath): with open(dbpath, 'rb') as db_fp: return pickle.load(db_fp) else: result = func(*args,", "# # Network functions # def prox2dist(p): \"\"\"Transforms a non-negative ``[0,1]`` proximity to", "re_hashtagsymbol.sub(r'\\1', text) def removeLinks(text): \"\"\" remove links from text \"\"\" return re_links.sub('', text)", "= re.compile(r\"@[a-zA-Z0-9_]+\") re_hashtagsymbol = re.compile(r\"#([a-zA-Z0-9_]+)\") re_links = re.compile(r\"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\") def removeAtMention(text): \"\"\" Remove @mentions\"\"\"", "').replace('\\r', ' ') return sentence def removeRepostApp(caption): \"\"\" Remove content that was posted", "Twitter text # re_all_after_retweet = re.compile(r\"rt @[a-zA-Z0-9_]+.+\", re.IGNORECASE | re.UNICODE) def removeAllAfterRetweet(text): \"\"\"", "new_tags + '.' else: tagsandtext = text return tagsandtext def removeNewLines(sentence): \"\"\" Remove", "hashtags are togerther new_text = '' for i, c in enumerate(text, start=0): if", "prox2dist(p): \"\"\"Transforms a non-negative ``[0,1]`` proximity to distance in the ``[0,inf]`` interval: Args:", "and Text Fields.\"\"\" text = addSpacesBetweenHashTags(text) tags = [tag for tag in tags", "# dbpath = os.path.join(godbpath, dbname) dbpath = dbname if os.path.isfile(dbpath): with open(dbpath, 'rb')", "(float): proximity value Returns: d (float): distance value \"\"\" if (p == 0):", "dbname if os.path.isfile(dbpath): with open(dbpath, 'rb') as db_fp: return pickle.load(db_fp) else: result =", "' + new_tags + '.' else: tagsandtext = text return tagsandtext def removeNewLines(sentence):", "(i > 0): if text[i - 1] != ' ': new_text += '", "lines \"\"\" sentence = sentence.replace('\\r\\n', ' ').replace('\\n', ' ').replace('\\r', ' ') return sentence", "p (float): proximity value Returns: d (float): distance value \"\"\" if (p ==", "'') # # Functions to handle Instagram Caption/Hashtag # re_repostapp = re.compile(r\"(#Repost @\\w+", "that was posted by another person using the @repostapp \"\"\" m = re_repostapp.search(caption)", "open(dbpath, 'rb') as db_fp: return pickle.load(db_fp) else: result = func(*args, **kwargs) with open(dbpath,", "#newyork \"\"\" if len(text) == 0: return '' # Add spaces if hashtags", "& <NAME> # Date: Jan 06, 2021 # # Description: Utility functions #", "with @repostapp)|(#EzRepost @\\w+ with @ezrepostapp)|(Regrann from @\\w+ -)\") def addSpacesBetweenHashTags(text): \"\"\" Add spaces", "# re_repostapp = re.compile(r\"(#Repost @\\w+ with @repostapp)|(#EzRepost @\\w+ with @ezrepostapp)|(Regrann from @\\w+ -)\")", "from @\\w+ -)\") def addSpacesBetweenHashTags(text): \"\"\" Add spaces between hastags: #i#love#newyork -> #i", "\"\"\" Remove @mentions\"\"\" return re_atmention.sub('', text) def removeHashtagSymbol(text): \"\"\" # - remove #", "def LS_wrapper(*args, **kwargs): # dbpath = os.path.join(godbpath, dbname) dbpath = dbname if os.path.isfile(dbpath):", "between hastags: #i#love#newyork -> #i #love #newyork \"\"\" if len(text) == 0: return", "return pickle.load(db_fp) else: result = func(*args, **kwargs) with open(dbpath, 'wb') as db_fp: pickle.dump(result,", "start=0): if (c in ['#', '@']) and (i > 0): if text[i -", "sentence.replace('\\r\\n', ' ').replace('\\n', ' ').replace('\\r', ' ') return sentence def removeRepostApp(caption): \"\"\" Remove", "# re_all_after_retweet = re.compile(r\"rt @[a-zA-Z0-9_]+.+\", re.IGNORECASE | re.UNICODE) def removeAllAfterRetweet(text): \"\"\" Remove everything", "re_links = re.compile(r\"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\") def removeAtMention(text): \"\"\" Remove @mentions\"\"\" return re_atmention.sub('', text) def removeHashtagSymbol(text):", "spaces between hastags: #i#love#newyork -> #i #love #newyork \"\"\" if len(text) == 0:", "as db_fp: return pickle.load(db_fp) else: result = func(*args, **kwargs) with open(dbpath, 'wb') as", "the @repostapp \"\"\" m = re_repostapp.search(caption) if m: start, finish = m.span() return", "if hashtags are togerther new_text = '' for i, c in enumerate(text, start=0):", "# - remove # symbol \"\"\" return re_hashtagsymbol.sub(r'\\1', text) def removeLinks(text): \"\"\" remove", "for tag in tags if tag not in text] if len(tags): new_tags =", "0: return '' # Add spaces if hashtags are togerther new_text = ''", "= m.span() return caption[:start] else: return caption # # Functions to handle general", "'' for i, c in enumerate(text, start=0): if (c in ['#', '@']) and", "proximity value Returns: d (float): distance value \"\"\" if (p == 0): return", "\"\"\" return re_links.sub('', text) # # File handling functions # def ensurePathExists(path): \"\"\"", "re_repostapp.search(caption) if m: start, finish = m.span() return caption[:start] else: return caption #", "= os.path.dirname(path) if not os.path.exists(dirname): print('-- Creating Folders: %s --' % (dirname)) os.makedirs(dirname)", "06, 2021 # # Description: Utility functions # import os import re import", "def addSpacesBetweenHashTags(text): \"\"\" Add spaces between hastags: #i#love#newyork -> #i #love #newyork \"\"\"", "result = func(*args, **kwargs) with open(dbpath, 'wb') as db_fp: pickle.dump(result, db_fp) return result", "in tags if tag not in text] if len(tags): new_tags = '. '.join(['#'", "tags if tag not in text] if len(tags): new_tags = '. '.join(['#' +", "= re_repostapp.search(caption) if m: start, finish = m.span() return caption[:start] else: return caption", "m = re_repostapp.search(caption) if m: start, finish = m.span() return caption[:start] else: return", "Description: Utility functions # import os import re import functools import pickle import", "os.path.dirname(path) if not os.path.exists(dirname): print('-- Creating Folders: %s --' % (dirname)) os.makedirs(dirname) def", "caption # # Functions to handle general social media text # re_atmention =", "\"\"\" What does this do? \"\"\" def LS_decorator(func): @functools.wraps(func) def LS_wrapper(*args, **kwargs): #", "pickle.dump(result, db_fp) return result return LS_wrapper return LS_decorator # # Network functions #", "'rb') as db_fp: return pickle.load(db_fp) else: result = func(*args, **kwargs) with open(dbpath, 'wb')", "'.' else: tagsandtext = text return tagsandtext def removeNewLines(sentence): \"\"\" Remove new lines", "\"\"\" return re_hashtagsymbol.sub(r'\\1', text) def removeLinks(text): \"\"\" remove links from text \"\"\" return", "\"\"\" Remove content that was posted by another person using the @repostapp \"\"\"", "return result return LS_wrapper return LS_decorator # # Network functions # def prox2dist(p):", "tags = [tag for tag in tags if tag not in text] if", "person using the @repostapp \"\"\" m = re_repostapp.search(caption) if m: start, finish =", "Functions to handle Twitter text # re_all_after_retweet = re.compile(r\"rt @[a-zA-Z0-9_]+.+\", re.IGNORECASE | re.UNICODE)", "@repostapp)|(#EzRepost @\\w+ with @ezrepostapp)|(Regrann from @\\w+ -)\") def addSpacesBetweenHashTags(text): \"\"\" Add spaces between", "# # Description: Utility functions # import os import re import functools import", "another person using the @repostapp \"\"\" m = re_repostapp.search(caption) if m: start, finish", "removeLinks(text): \"\"\" remove links from text \"\"\" return re_links.sub('', text) # # File", "new_text = '' for i, c in enumerate(text, start=0): if (c in ['#',", "Both Tags and Text Fields.\"\"\" text = addSpacesBetweenHashTags(text) tags = [tag for tag", "after a retweet is seen.\"\"\" return re_all_after_retweet.sub(text, '') # # Functions to handle", "if len(tags): new_tags = '. '.join(['#' + w for w in tags]) tagsandtext", "Caption/Hashtag # re_repostapp = re.compile(r\"(#Repost @\\w+ with @repostapp)|(#EzRepost @\\w+ with @ezrepostapp)|(Regrann from @\\w+", "# # Functions to handle general social media text # re_atmention = re.compile(r\"@[a-zA-Z0-9_]+\")", "# Network functions # def prox2dist(p): \"\"\"Transforms a non-negative ``[0,1]`` proximity to distance", "re import functools import pickle import numpy as np # # Functions to", "= [tag for tag in tags if tag not in text] if len(tags):", "path exists.\"\"\" dirname = os.path.dirname(path) if not os.path.exists(dirname): print('-- Creating Folders: %s --'", "tag not in text] if len(tags): new_tags = '. '.join(['#' + w for", "os.path.join(godbpath, dbname) dbpath = dbname if os.path.isfile(dbpath): with open(dbpath, 'rb') as db_fp: return", "if (p == 0): return np.inf else: return (1 / float(p)) - 1", "social media text # re_atmention = re.compile(r\"@[a-zA-Z0-9_]+\") re_hashtagsymbol = re.compile(r\"#([a-zA-Z0-9_]+)\") re_links = re.compile(r\"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\")", "Ensure path exists.\"\"\" dirname = os.path.dirname(path) if not os.path.exists(dirname): print('-- Creating Folders: %s", "' ').replace('\\n', ' ').replace('\\r', ' ') return sentence def removeRepostApp(caption): \"\"\" Remove content", "text) # # File handling functions # def ensurePathExists(path): \"\"\" Ensure path exists.\"\"\"", "Jan 06, 2021 # # Description: Utility functions # import os import re", "'.join(['#' + w for w in tags]) tagsandtext = text + '. '", "in tags]) tagsandtext = text + '. ' + new_tags + '.' else:", "# # File handling functions # def ensurePathExists(path): \"\"\" Ensure path exists.\"\"\" dirname", "Folders: %s --' % (dirname)) os.makedirs(dirname) def load_save_return(dbname): \"\"\" What does this do?", "c in enumerate(text, start=0): if (c in ['#', '@']) and (i > 0):", "0): if text[i - 1] != ' ': new_text += ' ' new_text", "are togerther new_text = '' for i, c in enumerate(text, start=0): if (c", "return caption # # Functions to handle general social media text # re_atmention", "': new_text += ' ' new_text += c return new_text def combineTagsAndText(text, tags):", "posted by another person using the @repostapp \"\"\" m = re_repostapp.search(caption) if m:", "(float): distance value \"\"\" if (p == 0): return np.inf else: return (1", "re.compile(r\"(#Repost @\\w+ with @repostapp)|(#EzRepost @\\w+ with @ezrepostapp)|(Regrann from @\\w+ -)\") def addSpacesBetweenHashTags(text): \"\"\"", "caption[:start] else: return caption # # Functions to handle general social media text", "removeRepostApp(caption): \"\"\" Remove content that was posted by another person using the @repostapp", "handle general social media text # re_atmention = re.compile(r\"@[a-zA-Z0-9_]+\") re_hashtagsymbol = re.compile(r\"#([a-zA-Z0-9_]+)\") re_links", "text[i - 1] != ' ': new_text += ' ' new_text += c", "general social media text # re_atmention = re.compile(r\"@[a-zA-Z0-9_]+\") re_hashtagsymbol = re.compile(r\"#([a-zA-Z0-9_]+)\") re_links =", "open(dbpath, 'wb') as db_fp: pickle.dump(result, db_fp) return result return LS_wrapper return LS_decorator #", "re.compile(r\"@[a-zA-Z0-9_]+\") re_hashtagsymbol = re.compile(r\"#([a-zA-Z0-9_]+)\") re_links = re.compile(r\"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\") def removeAtMention(text): \"\"\" Remove @mentions\"\"\" return", "pickle.load(db_fp) else: result = func(*args, **kwargs) with open(dbpath, 'wb') as db_fp: pickle.dump(result, db_fp)", "Text Fields.\"\"\" text = addSpacesBetweenHashTags(text) tags = [tag for tag in tags if", "= text + '. ' + new_tags + '.' else: tagsandtext = text", "# re_atmention = re.compile(r\"@[a-zA-Z0-9_]+\") re_hashtagsymbol = re.compile(r\"#([a-zA-Z0-9_]+)\") re_links = re.compile(r\"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\") def removeAtMention(text): \"\"\"", "\"\"\" if len(text) == 0: return '' # Add spaces if hashtags are", "% (dirname)) os.makedirs(dirname) def load_save_return(dbname): \"\"\" What does this do? \"\"\" def LS_decorator(func):", "Tags and Text Fields.\"\"\" text = addSpacesBetweenHashTags(text) tags = [tag for tag in", "this do? \"\"\" def LS_decorator(func): @functools.wraps(func) def LS_wrapper(*args, **kwargs): # dbpath = os.path.join(godbpath,", "#i #love #newyork \"\"\" if len(text) == 0: return '' # Add spaces", "Remove new lines \"\"\" sentence = sentence.replace('\\r\\n', ' ').replace('\\n', ' ').replace('\\r', ' ')", "coding=utf-8 # Author: <NAME> & <NAME> # Date: Jan 06, 2021 # #", "seen.\"\"\" return re_all_after_retweet.sub(text, '') # # Functions to handle Instagram Caption/Hashtag # re_repostapp", "functools import pickle import numpy as np # # Functions to handle Twitter", "# Add spaces if hashtags are togerther new_text = '' for i, c", "re_all_after_retweet = re.compile(r\"rt @[a-zA-Z0-9_]+.+\", re.IGNORECASE | re.UNICODE) def removeAllAfterRetweet(text): \"\"\" Remove everything after", "spaces if hashtags are togerther new_text = '' for i, c in enumerate(text,", "as db_fp: pickle.dump(result, db_fp) return result return LS_wrapper return LS_decorator # # Network", "dirname = os.path.dirname(path) if not os.path.exists(dirname): print('-- Creating Folders: %s --' % (dirname))", "Remove content that was posted by another person using the @repostapp \"\"\" m", "addSpacesBetweenHashTags(text): \"\"\" Add spaces between hastags: #i#love#newyork -> #i #love #newyork \"\"\" if", "' ').replace('\\r', ' ') return sentence def removeRepostApp(caption): \"\"\" Remove content that was", "db_fp) return result return LS_wrapper return LS_decorator # # Network functions # def", "= os.path.join(godbpath, dbname) dbpath = dbname if os.path.isfile(dbpath): with open(dbpath, 'rb') as db_fp:", "Remove @mentions\"\"\" return re_atmention.sub('', text) def removeHashtagSymbol(text): \"\"\" # - remove # symbol", "!= ' ': new_text += ' ' new_text += c return new_text def", "@ezrepostapp)|(Regrann from @\\w+ -)\") def addSpacesBetweenHashTags(text): \"\"\" Add spaces between hastags: #i#love#newyork ->", "@\\w+ with @repostapp)|(#EzRepost @\\w+ with @ezrepostapp)|(Regrann from @\\w+ -)\") def addSpacesBetweenHashTags(text): \"\"\" Add", "= addSpacesBetweenHashTags(text) tags = [tag for tag in tags if tag not in", "interval: Args: p (float): proximity value Returns: d (float): distance value \"\"\" if", "+= ' ' new_text += c return new_text def combineTagsAndText(text, tags): \"\"\" Combine", "from text \"\"\" return re_links.sub('', text) # # File handling functions # def", "in enumerate(text, start=0): if (c in ['#', '@']) and (i > 0): if", "def removeAtMention(text): \"\"\" Remove @mentions\"\"\" return re_atmention.sub('', text) def removeHashtagSymbol(text): \"\"\" # -", "for w in tags]) tagsandtext = text + '. ' + new_tags +", "LS_decorator # # Network functions # def prox2dist(p): \"\"\"Transforms a non-negative ``[0,1]`` proximity", "symbol \"\"\" return re_hashtagsymbol.sub(r'\\1', text) def removeLinks(text): \"\"\" remove links from text \"\"\"", "dbpath = dbname if os.path.isfile(dbpath): with open(dbpath, 'rb') as db_fp: return pickle.load(db_fp) else:", "re.IGNORECASE | re.UNICODE) def removeAllAfterRetweet(text): \"\"\" Remove everything after a retweet is seen.\"\"\"", "combineTagsAndText(text, tags): \"\"\" Combine Both Tags and Text Fields.\"\"\" text = addSpacesBetweenHashTags(text) tags", "not in text] if len(tags): new_tags = '. '.join(['#' + w for w", "#love #newyork \"\"\" if len(text) == 0: return '' # Add spaces if", "import re import functools import pickle import numpy as np # # Functions", "handling functions # def ensurePathExists(path): \"\"\" Ensure path exists.\"\"\" dirname = os.path.dirname(path) if", "w in tags]) tagsandtext = text + '. ' + new_tags + '.'", "(c in ['#', '@']) and (i > 0): if text[i - 1] !=", "> 0): if text[i - 1] != ' ': new_text += ' '", "= func(*args, **kwargs) with open(dbpath, 'wb') as db_fp: pickle.dump(result, db_fp) return result return", "import os import re import functools import pickle import numpy as np #", "with @ezrepostapp)|(Regrann from @\\w+ -)\") def addSpacesBetweenHashTags(text): \"\"\" Add spaces between hastags: #i#love#newyork", "if (c in ['#', '@']) and (i > 0): if text[i - 1]", "tags]) tagsandtext = text + '. ' + new_tags + '.' else: tagsandtext", "return new_text def combineTagsAndText(text, tags): \"\"\" Combine Both Tags and Text Fields.\"\"\" text", "text # re_all_after_retweet = re.compile(r\"rt @[a-zA-Z0-9_]+.+\", re.IGNORECASE | re.UNICODE) def removeAllAfterRetweet(text): \"\"\" Remove", "Creating Folders: %s --' % (dirname)) os.makedirs(dirname) def load_save_return(dbname): \"\"\" What does this", "m.span() return caption[:start] else: return caption # # Functions to handle general social", "# import os import re import functools import pickle import numpy as np", "= re.compile(r\"rt @[a-zA-Z0-9_]+.+\", re.IGNORECASE | re.UNICODE) def removeAllAfterRetweet(text): \"\"\" Remove everything after a", "os import re import functools import pickle import numpy as np # #", "LS_wrapper(*args, **kwargs): # dbpath = os.path.join(godbpath, dbname) dbpath = dbname if os.path.isfile(dbpath): with", "'@']) and (i > 0): if text[i - 1] != ' ': new_text", "Args: p (float): proximity value Returns: d (float): distance value \"\"\" if (p", "hastags: #i#love#newyork -> #i #love #newyork \"\"\" if len(text) == 0: return ''", "m: start, finish = m.span() return caption[:start] else: return caption # # Functions", "``[0,1]`` proximity to distance in the ``[0,inf]`` interval: Args: p (float): proximity value", "new_text += c return new_text def combineTagsAndText(text, tags): \"\"\" Combine Both Tags and", "'. ' + new_tags + '.' else: tagsandtext = text return tagsandtext def", "everything after a retweet is seen.\"\"\" return re_all_after_retweet.sub(text, '') # # Functions to", "distance value \"\"\" if (p == 0): return np.inf else: return (1 /", "%s --' % (dirname)) os.makedirs(dirname) def load_save_return(dbname): \"\"\" What does this do? \"\"\"", "\"\"\" def LS_decorator(func): @functools.wraps(func) def LS_wrapper(*args, **kwargs): # dbpath = os.path.join(godbpath, dbname) dbpath", "sentence def removeRepostApp(caption): \"\"\" Remove content that was posted by another person using", "the ``[0,inf]`` interval: Args: p (float): proximity value Returns: d (float): distance value", "Network functions # def prox2dist(p): \"\"\"Transforms a non-negative ``[0,1]`` proximity to distance in", "+= c return new_text def combineTagsAndText(text, tags): \"\"\" Combine Both Tags and Text", "numpy as np # # Functions to handle Twitter text # re_all_after_retweet =", "else: tagsandtext = text return tagsandtext def removeNewLines(sentence): \"\"\" Remove new lines \"\"\"", "@[a-zA-Z0-9_]+.+\", re.IGNORECASE | re.UNICODE) def removeAllAfterRetweet(text): \"\"\" Remove everything after a retweet is", "\"\"\" # - remove # symbol \"\"\" return re_hashtagsymbol.sub(r'\\1', text) def removeLinks(text): \"\"\"", "\"\"\" sentence = sentence.replace('\\r\\n', ' ').replace('\\n', ' ').replace('\\r', ' ') return sentence def", "- remove # symbol \"\"\" return re_hashtagsymbol.sub(r'\\1', text) def removeLinks(text): \"\"\" remove links", "1] != ' ': new_text += ' ' new_text += c return new_text", "= sentence.replace('\\r\\n', ' ').replace('\\n', ' ').replace('\\r', ' ') return sentence def removeRepostApp(caption): \"\"\"", "\"\"\" Ensure path exists.\"\"\" dirname = os.path.dirname(path) if not os.path.exists(dirname): print('-- Creating Folders:", "= re.compile(r\"#([a-zA-Z0-9_]+)\") re_links = re.compile(r\"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\") def removeAtMention(text): \"\"\" Remove @mentions\"\"\" return re_atmention.sub('', text)", "return re_hashtagsymbol.sub(r'\\1', text) def removeLinks(text): \"\"\" remove links from text \"\"\" return re_links.sub('',", "# coding=utf-8 # Author: <NAME> & <NAME> # Date: Jan 06, 2021 #", "def removeRepostApp(caption): \"\"\" Remove content that was posted by another person using the", "tagsandtext = text + '. ' + new_tags + '.' else: tagsandtext =", "\"\"\" m = re_repostapp.search(caption) if m: start, finish = m.span() return caption[:start] else:", "# Date: Jan 06, 2021 # # Description: Utility functions # import os", "retweet is seen.\"\"\" return re_all_after_retweet.sub(text, '') # # Functions to handle Instagram Caption/Hashtag", "import numpy as np # # Functions to handle Twitter text # re_all_after_retweet", "addSpacesBetweenHashTags(text) tags = [tag for tag in tags if tag not in text]", "return caption[:start] else: return caption # # Functions to handle general social media", "removeNewLines(sentence): \"\"\" Remove new lines \"\"\" sentence = sentence.replace('\\r\\n', ' ').replace('\\n', ' ').replace('\\r',", "['#', '@']) and (i > 0): if text[i - 1] != ' ':", "text) def removeHashtagSymbol(text): \"\"\" # - remove # symbol \"\"\" return re_hashtagsymbol.sub(r'\\1', text)", "c return new_text def combineTagsAndText(text, tags): \"\"\" Combine Both Tags and Text Fields.\"\"\"", "remove # symbol \"\"\" return re_hashtagsymbol.sub(r'\\1', text) def removeLinks(text): \"\"\" remove links from", "return re_links.sub('', text) # # File handling functions # def ensurePathExists(path): \"\"\" Ensure", "@functools.wraps(func) def LS_wrapper(*args, **kwargs): # dbpath = os.path.join(godbpath, dbname) dbpath = dbname if", "text return tagsandtext def removeNewLines(sentence): \"\"\" Remove new lines \"\"\" sentence = sentence.replace('\\r\\n',", "'' # Add spaces if hashtags are togerther new_text = '' for i,", "new_text += ' ' new_text += c return new_text def combineTagsAndText(text, tags): \"\"\"", "d (float): distance value \"\"\" if (p == 0): return np.inf else: return", "# # Functions to handle Twitter text # re_all_after_retweet = re.compile(r\"rt @[a-zA-Z0-9_]+.+\", re.IGNORECASE", "to handle Instagram Caption/Hashtag # re_repostapp = re.compile(r\"(#Repost @\\w+ with @repostapp)|(#EzRepost @\\w+ with", "functions # def prox2dist(p): \"\"\"Transforms a non-negative ``[0,1]`` proximity to distance in the", "# Author: <NAME> & <NAME> # Date: Jan 06, 2021 # # Description:", "not os.path.exists(dirname): print('-- Creating Folders: %s --' % (dirname)) os.makedirs(dirname) def load_save_return(dbname): \"\"\"", "ensurePathExists(path): \"\"\" Ensure path exists.\"\"\" dirname = os.path.dirname(path) if not os.path.exists(dirname): print('-- Creating", "tag in tags if tag not in text] if len(tags): new_tags = '.", "LS_decorator(func): @functools.wraps(func) def LS_wrapper(*args, **kwargs): # dbpath = os.path.join(godbpath, dbname) dbpath = dbname", "db_fp: pickle.dump(result, db_fp) return result return LS_wrapper return LS_decorator # # Network functions", "') return sentence def removeRepostApp(caption): \"\"\" Remove content that was posted by another", "if text[i - 1] != ' ': new_text += ' ' new_text +=", "Fields.\"\"\" text = addSpacesBetweenHashTags(text) tags = [tag for tag in tags if tag", "def load_save_return(dbname): \"\"\" What does this do? \"\"\" def LS_decorator(func): @functools.wraps(func) def LS_wrapper(*args,", "-)\") def addSpacesBetweenHashTags(text): \"\"\" Add spaces between hastags: #i#love#newyork -> #i #love #newyork", "print('-- Creating Folders: %s --' % (dirname)) os.makedirs(dirname) def load_save_return(dbname): \"\"\" What does", "re_links.sub('', text) # # File handling functions # def ensurePathExists(path): \"\"\" Ensure path", "text \"\"\" return re_links.sub('', text) # # File handling functions # def ensurePathExists(path):", "using the @repostapp \"\"\" m = re_repostapp.search(caption) if m: start, finish = m.span()", "' ': new_text += ' ' new_text += c return new_text def combineTagsAndText(text,", "def ensurePathExists(path): \"\"\" Ensure path exists.\"\"\" dirname = os.path.dirname(path) if not os.path.exists(dirname): print('--", "# # Functions to handle Instagram Caption/Hashtag # re_repostapp = re.compile(r\"(#Repost @\\w+ with", "# Functions to handle Twitter text # re_all_after_retweet = re.compile(r\"rt @[a-zA-Z0-9_]+.+\", re.IGNORECASE |", "if tag not in text] if len(tags): new_tags = '. '.join(['#' + w", "What does this do? \"\"\" def LS_decorator(func): @functools.wraps(func) def LS_wrapper(*args, **kwargs): # dbpath", "Functions to handle general social media text # re_atmention = re.compile(r\"@[a-zA-Z0-9_]+\") re_hashtagsymbol =", "new_text def combineTagsAndText(text, tags): \"\"\" Combine Both Tags and Text Fields.\"\"\" text =", "text) def removeLinks(text): \"\"\" remove links from text \"\"\" return re_links.sub('', text) #", "re.compile(r\"rt @[a-zA-Z0-9_]+.+\", re.IGNORECASE | re.UNICODE) def removeAllAfterRetweet(text): \"\"\" Remove everything after a retweet", "**kwargs) with open(dbpath, 'wb') as db_fp: pickle.dump(result, db_fp) return result return LS_wrapper return", "functions # def ensurePathExists(path): \"\"\" Ensure path exists.\"\"\" dirname = os.path.dirname(path) if not", "with open(dbpath, 'wb') as db_fp: pickle.dump(result, db_fp) return result return LS_wrapper return LS_decorator", "= '. '.join(['#' + w for w in tags]) tagsandtext = text +", "# def ensurePathExists(path): \"\"\" Ensure path exists.\"\"\" dirname = os.path.dirname(path) if not os.path.exists(dirname):", "text = addSpacesBetweenHashTags(text) tags = [tag for tag in tags if tag not", "def removeAllAfterRetweet(text): \"\"\" Remove everything after a retweet is seen.\"\"\" return re_all_after_retweet.sub(text, '')", "import pickle import numpy as np # # Functions to handle Twitter text", "result return LS_wrapper return LS_decorator # # Network functions # def prox2dist(p): \"\"\"Transforms", "``[0,inf]`` interval: Args: p (float): proximity value Returns: d (float): distance value \"\"\"", "**kwargs): # dbpath = os.path.join(godbpath, dbname) dbpath = dbname if os.path.isfile(dbpath): with open(dbpath,", "non-negative ``[0,1]`` proximity to distance in the ``[0,inf]`` interval: Args: p (float): proximity", "+ '. ' + new_tags + '.' else: tagsandtext = text return tagsandtext", "os.makedirs(dirname) def load_save_return(dbname): \"\"\" What does this do? \"\"\" def LS_decorator(func): @functools.wraps(func) def", "@mentions\"\"\" return re_atmention.sub('', text) def removeHashtagSymbol(text): \"\"\" # - remove # symbol \"\"\"", "value \"\"\" if (p == 0): return np.inf else: return (1 / float(p))", "# File handling functions # def ensurePathExists(path): \"\"\" Ensure path exists.\"\"\" dirname =", "func(*args, **kwargs) with open(dbpath, 'wb') as db_fp: pickle.dump(result, db_fp) return result return LS_wrapper", "value Returns: d (float): distance value \"\"\" if (p == 0): return np.inf", "def LS_decorator(func): @functools.wraps(func) def LS_wrapper(*args, **kwargs): # dbpath = os.path.join(godbpath, dbname) dbpath =", "Add spaces between hastags: #i#love#newyork -> #i #love #newyork \"\"\" if len(text) ==", "text # re_atmention = re.compile(r\"@[a-zA-Z0-9_]+\") re_hashtagsymbol = re.compile(r\"#([a-zA-Z0-9_]+)\") re_links = re.compile(r\"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\") def removeAtMention(text):", "with open(dbpath, 'rb') as db_fp: return pickle.load(db_fp) else: result = func(*args, **kwargs) with", "-> #i #love #newyork \"\"\" if len(text) == 0: return '' # Add", "\"\"\" Add spaces between hastags: #i#love#newyork -> #i #love #newyork \"\"\" if len(text)", "= '' for i, c in enumerate(text, start=0): if (c in ['#', '@'])", "(dirname)) os.makedirs(dirname) def load_save_return(dbname): \"\"\" What does this do? \"\"\" def LS_decorator(func): @functools.wraps(func)", "2021 # # Description: Utility functions # import os import re import functools", "len(tags): new_tags = '. '.join(['#' + w for w in tags]) tagsandtext =", "'. '.join(['#' + w for w in tags]) tagsandtext = text + '.", "w for w in tags]) tagsandtext = text + '. ' + new_tags", "return re_all_after_retweet.sub(text, '') # # Functions to handle Instagram Caption/Hashtag # re_repostapp =", "if len(text) == 0: return '' # Add spaces if hashtags are togerther", "- 1] != ' ': new_text += ' ' new_text += c return", "def removeNewLines(sentence): \"\"\" Remove new lines \"\"\" sentence = sentence.replace('\\r\\n', ' ').replace('\\n', '", "for i, c in enumerate(text, start=0): if (c in ['#', '@']) and (i", "do? \"\"\" def LS_decorator(func): @functools.wraps(func) def LS_wrapper(*args, **kwargs): # dbpath = os.path.join(godbpath, dbname)", "re_hashtagsymbol = re.compile(r\"#([a-zA-Z0-9_]+)\") re_links = re.compile(r\"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\") def removeAtMention(text): \"\"\" Remove @mentions\"\"\" return re_atmention.sub('',", "\"\"\" Remove new lines \"\"\" sentence = sentence.replace('\\r\\n', ' ').replace('\\n', ' ').replace('\\r', '", "import functools import pickle import numpy as np # # Functions to handle", "+ w for w in tags]) tagsandtext = text + '. ' +", "\"\"\" Combine Both Tags and Text Fields.\"\"\" text = addSpacesBetweenHashTags(text) tags = [tag", "return re_atmention.sub('', text) def removeHashtagSymbol(text): \"\"\" # - remove # symbol \"\"\" return", "return sentence def removeRepostApp(caption): \"\"\" Remove content that was posted by another person", "' ' new_text += c return new_text def combineTagsAndText(text, tags): \"\"\" Combine Both", "@\\w+ -)\") def addSpacesBetweenHashTags(text): \"\"\" Add spaces between hastags: #i#love#newyork -> #i #love", "').replace('\\n', ' ').replace('\\r', ' ') return sentence def removeRepostApp(caption): \"\"\" Remove content that", "to handle Twitter text # re_all_after_retweet = re.compile(r\"rt @[a-zA-Z0-9_]+.+\", re.IGNORECASE | re.UNICODE) def", "dbpath = os.path.join(godbpath, dbname) dbpath = dbname if os.path.isfile(dbpath): with open(dbpath, 'rb') as", "in ['#', '@']) and (i > 0): if text[i - 1] != '", "Date: Jan 06, 2021 # # Description: Utility functions # import os import", "as np # # Functions to handle Twitter text # re_all_after_retweet = re.compile(r\"rt", "Author: <NAME> & <NAME> # Date: Jan 06, 2021 # # Description: Utility", "Combine Both Tags and Text Fields.\"\"\" text = addSpacesBetweenHashTags(text) tags = [tag for", "dbname) dbpath = dbname if os.path.isfile(dbpath): with open(dbpath, 'rb') as db_fp: return pickle.load(db_fp)", "a non-negative ``[0,1]`` proximity to distance in the ``[0,inf]`` interval: Args: p (float):", "and (i > 0): if text[i - 1] != ' ': new_text +=", "'wb') as db_fp: pickle.dump(result, db_fp) return result return LS_wrapper return LS_decorator # #", "<NAME> & <NAME> # Date: Jan 06, 2021 # # Description: Utility functions", "return '' # Add spaces if hashtags are togerther new_text = '' for", "content that was posted by another person using the @repostapp \"\"\" m =", "# Functions to handle Instagram Caption/Hashtag # re_repostapp = re.compile(r\"(#Repost @\\w+ with @repostapp)|(#EzRepost", "\"\"\" remove links from text \"\"\" return re_links.sub('', text) # # File handling", "proximity to distance in the ``[0,inf]`` interval: Args: p (float): proximity value Returns:", "re.compile(r\"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\") def removeAtMention(text): \"\"\" Remove @mentions\"\"\" return re_atmention.sub('', text) def removeHashtagSymbol(text): \"\"\" #", "@\\w+ with @ezrepostapp)|(Regrann from @\\w+ -)\") def addSpacesBetweenHashTags(text): \"\"\" Add spaces between hastags:", "handle Twitter text # re_all_after_retweet = re.compile(r\"rt @[a-zA-Z0-9_]+.+\", re.IGNORECASE | re.UNICODE) def removeAllAfterRetweet(text):", "i, c in enumerate(text, start=0): if (c in ['#', '@']) and (i >", "# Functions to handle general social media text # re_atmention = re.compile(r\"@[a-zA-Z0-9_]+\") re_hashtagsymbol", "\"\"\"Transforms a non-negative ``[0,1]`` proximity to distance in the ``[0,inf]`` interval: Args: p", "new_tags = '. '.join(['#' + w for w in tags]) tagsandtext = text", "return tagsandtext def removeNewLines(sentence): \"\"\" Remove new lines \"\"\" sentence = sentence.replace('\\r\\n', '", "media text # re_atmention = re.compile(r\"@[a-zA-Z0-9_]+\") re_hashtagsymbol = re.compile(r\"#([a-zA-Z0-9_]+)\") re_links = re.compile(r\"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\") def", "= re.compile(r\"(#Repost @\\w+ with @repostapp)|(#EzRepost @\\w+ with @ezrepostapp)|(Regrann from @\\w+ -)\") def addSpacesBetweenHashTags(text):", "togerther new_text = '' for i, c in enumerate(text, start=0): if (c in", "def prox2dist(p): \"\"\"Transforms a non-negative ``[0,1]`` proximity to distance in the ``[0,inf]`` interval:", "does this do? \"\"\" def LS_decorator(func): @functools.wraps(func) def LS_wrapper(*args, **kwargs): # dbpath =", "text] if len(tags): new_tags = '. '.join(['#' + w for w in tags])", "sentence = sentence.replace('\\r\\n', ' ').replace('\\n', ' ').replace('\\r', ' ') return sentence def removeRepostApp(caption):", "= re.compile(r\"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\") def removeAtMention(text): \"\"\" Remove @mentions\"\"\" return re_atmention.sub('', text) def removeHashtagSymbol(text): \"\"\"", "remove links from text \"\"\" return re_links.sub('', text) # # File handling functions", "= dbname if os.path.isfile(dbpath): with open(dbpath, 'rb') as db_fp: return pickle.load(db_fp) else: result", "a retweet is seen.\"\"\" return re_all_after_retweet.sub(text, '') # # Functions to handle Instagram", "finish = m.span() return caption[:start] else: return caption # # Functions to handle", "new lines \"\"\" sentence = sentence.replace('\\r\\n', ' ').replace('\\n', ' ').replace('\\r', ' ') return", "if m: start, finish = m.span() return caption[:start] else: return caption # #", "db_fp: return pickle.load(db_fp) else: result = func(*args, **kwargs) with open(dbpath, 'wb') as db_fp:", "np # # Functions to handle Twitter text # re_all_after_retweet = re.compile(r\"rt @[a-zA-Z0-9_]+.+\",", "\"\"\" if (p == 0): return np.inf else: return (1 / float(p)) -", "@repostapp \"\"\" m = re_repostapp.search(caption) if m: start, finish = m.span() return caption[:start]", "tagsandtext def removeNewLines(sentence): \"\"\" Remove new lines \"\"\" sentence = sentence.replace('\\r\\n', ' ').replace('\\n',", "links from text \"\"\" return re_links.sub('', text) # # File handling functions #", "Instagram Caption/Hashtag # re_repostapp = re.compile(r\"(#Repost @\\w+ with @repostapp)|(#EzRepost @\\w+ with @ezrepostapp)|(Regrann from", "is seen.\"\"\" return re_all_after_retweet.sub(text, '') # # Functions to handle Instagram Caption/Hashtag #", "--' % (dirname)) os.makedirs(dirname) def load_save_return(dbname): \"\"\" What does this do? \"\"\" def", "len(text) == 0: return '' # Add spaces if hashtags are togerther new_text", "pickle import numpy as np # # Functions to handle Twitter text #", "start, finish = m.span() return caption[:start] else: return caption # # Functions to", "= text return tagsandtext def removeNewLines(sentence): \"\"\" Remove new lines \"\"\" sentence =", "else: result = func(*args, **kwargs) with open(dbpath, 'wb') as db_fp: pickle.dump(result, db_fp) return", "# def prox2dist(p): \"\"\"Transforms a non-negative ``[0,1]`` proximity to distance in the ``[0,inf]``", "Add spaces if hashtags are togerther new_text = '' for i, c in", "functions # import os import re import functools import pickle import numpy as", "== 0: return '' # Add spaces if hashtags are togerther new_text =", "was posted by another person using the @repostapp \"\"\" m = re_repostapp.search(caption) if", "exists.\"\"\" dirname = os.path.dirname(path) if not os.path.exists(dirname): print('-- Creating Folders: %s --' %", "tagsandtext = text return tagsandtext def removeNewLines(sentence): \"\"\" Remove new lines \"\"\" sentence", "Returns: d (float): distance value \"\"\" if (p == 0): return np.inf else:", "return LS_decorator # # Network functions # def prox2dist(p): \"\"\"Transforms a non-negative ``[0,1]``", "\"\"\" Remove everything after a retweet is seen.\"\"\" return re_all_after_retweet.sub(text, '') # #", "tags): \"\"\" Combine Both Tags and Text Fields.\"\"\" text = addSpacesBetweenHashTags(text) tags =", "load_save_return(dbname): \"\"\" What does this do? \"\"\" def LS_decorator(func): @functools.wraps(func) def LS_wrapper(*args, **kwargs):", "distance in the ``[0,inf]`` interval: Args: p (float): proximity value Returns: d (float):", "' ') return sentence def removeRepostApp(caption): \"\"\" Remove content that was posted by", "removeAllAfterRetweet(text): \"\"\" Remove everything after a retweet is seen.\"\"\" return re_all_after_retweet.sub(text, '') #", "in text] if len(tags): new_tags = '. '.join(['#' + w for w in", "re_atmention.sub('', text) def removeHashtagSymbol(text): \"\"\" # - remove # symbol \"\"\" return re_hashtagsymbol.sub(r'\\1',", "re_atmention = re.compile(r\"@[a-zA-Z0-9_]+\") re_hashtagsymbol = re.compile(r\"#([a-zA-Z0-9_]+)\") re_links = re.compile(r\"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+\") def removeAtMention(text): \"\"\" Remove" ]
[ "sys.version_info[0] < 3: import urllib else: import urllib.parse as urllib class Gravtr(object): GRAVATAR_URL", "'http://www.gravatar.com/avatar/' class ratingType(object): G = 'g' PG = 'pg' R = 'r' X", "+ '.jpg' if default: params['d'] = str(default) if force_default: params['f'] = 'y' if", "GRAVATAR_URL_UNSECURE = 'http://www.gravatar.com/avatar/' class ratingType(object): G = 'g' PG = 'pg' R =", "self.email = email.encode('utf-8') def generate(self, unsecure=False, size=None, typed=False, default=None, force_default=False, rating_type=None): gravatar_url =", "else: import urllib.parse as urllib class Gravtr(object): GRAVATAR_URL = 'https://www.gravatar.com/avatar/' GRAVATAR_URL_UNSECURE = 'http://www.gravatar.com/avatar/'", "self.url + '.jpg' if default: params['d'] = str(default) if force_default: params['f'] = 'y'", "= self.url + '.jpg' if default: params['d'] = str(default) if force_default: params['f'] =", "str(size) if typed: self.url = self.url + '.jpg' if default: params['d'] = str(default)", "= 'http://www.gravatar.com/avatar/' class ratingType(object): G = 'g' PG = 'pg' R = 'r'", "if sys.version_info[0] < 3: import urllib else: import urllib.parse as urllib class Gravtr(object):", "params['f'] = 'y' if rating_type: params['r'] = str(rating_type) return self.url + '?' +", "import sys if sys.version_info[0] < 3: import urllib else: import urllib.parse as urllib", "urllib else: import urllib.parse as urllib class Gravtr(object): GRAVATAR_URL = 'https://www.gravatar.com/avatar/' GRAVATAR_URL_UNSECURE =", "= 'r' X = 'x' def __init__(self, email): self.email = email.encode('utf-8') def generate(self,", "< 3: import urllib else: import urllib.parse as urllib class Gravtr(object): GRAVATAR_URL =", "class Gravtr(object): GRAVATAR_URL = 'https://www.gravatar.com/avatar/' GRAVATAR_URL_UNSECURE = 'http://www.gravatar.com/avatar/' class ratingType(object): G = 'g'", "ratingType(object): G = 'g' PG = 'pg' R = 'r' X = 'x'", "hashlib import sys if sys.version_info[0] < 3: import urllib else: import urllib.parse as", "'.jpg' if default: params['d'] = str(default) if force_default: params['f'] = 'y' if rating_type:", "X = 'x' def __init__(self, email): self.email = email.encode('utf-8') def generate(self, unsecure=False, size=None,", "import urllib else: import urllib.parse as urllib class Gravtr(object): GRAVATAR_URL = 'https://www.gravatar.com/avatar/' GRAVATAR_URL_UNSECURE", "= 'y' if rating_type: params['r'] = str(rating_type) return self.url + '?' + urllib.urlencode(params)", "'r' X = 'x' def __init__(self, email): self.email = email.encode('utf-8') def generate(self, unsecure=False,", "dict() if size: params['s'] = str(size) if typed: self.url = self.url + '.jpg'", "def generate(self, unsecure=False, size=None, typed=False, default=None, force_default=False, rating_type=None): gravatar_url = self.GRAVATAR_URL if not", "generate(self, unsecure=False, size=None, typed=False, default=None, force_default=False, rating_type=None): gravatar_url = self.GRAVATAR_URL if not unsecure", "if force_default: params['f'] = 'y' if rating_type: params['r'] = str(rating_type) return self.url +", "rating_type=None): gravatar_url = self.GRAVATAR_URL if not unsecure else self.GRAVATAR_URL_UNSECURE self.url = gravatar_url +", "= 'pg' R = 'r' X = 'x' def __init__(self, email): self.email =", "sys if sys.version_info[0] < 3: import urllib else: import urllib.parse as urllib class", "G = 'g' PG = 'pg' R = 'r' X = 'x' def", "= gravatar_url + hashlib.md5(self.email).hexdigest() params = dict() if size: params['s'] = str(size) if", "default=None, force_default=False, rating_type=None): gravatar_url = self.GRAVATAR_URL if not unsecure else self.GRAVATAR_URL_UNSECURE self.url =", "if default: params['d'] = str(default) if force_default: params['f'] = 'y' if rating_type: params['r']", "3: import urllib else: import urllib.parse as urllib class Gravtr(object): GRAVATAR_URL = 'https://www.gravatar.com/avatar/'", "unsecure else self.GRAVATAR_URL_UNSECURE self.url = gravatar_url + hashlib.md5(self.email).hexdigest() params = dict() if size:", "R = 'r' X = 'x' def __init__(self, email): self.email = email.encode('utf-8') def", "gravatar_url + hashlib.md5(self.email).hexdigest() params = dict() if size: params['s'] = str(size) if typed:", "self.url = gravatar_url + hashlib.md5(self.email).hexdigest() params = dict() if size: params['s'] = str(size)", "urllib class Gravtr(object): GRAVATAR_URL = 'https://www.gravatar.com/avatar/' GRAVATAR_URL_UNSECURE = 'http://www.gravatar.com/avatar/' class ratingType(object): G =", "__init__(self, email): self.email = email.encode('utf-8') def generate(self, unsecure=False, size=None, typed=False, default=None, force_default=False, rating_type=None):", "GRAVATAR_URL = 'https://www.gravatar.com/avatar/' GRAVATAR_URL_UNSECURE = 'http://www.gravatar.com/avatar/' class ratingType(object): G = 'g' PG =", "= str(default) if force_default: params['f'] = 'y' if rating_type: params['r'] = str(rating_type) return", "force_default: params['f'] = 'y' if rating_type: params['r'] = str(rating_type) return self.url + '?'", "unsecure=False, size=None, typed=False, default=None, force_default=False, rating_type=None): gravatar_url = self.GRAVATAR_URL if not unsecure else", "force_default=False, rating_type=None): gravatar_url = self.GRAVATAR_URL if not unsecure else self.GRAVATAR_URL_UNSECURE self.url = gravatar_url", "'g' PG = 'pg' R = 'r' X = 'x' def __init__(self, email):", "= str(size) if typed: self.url = self.url + '.jpg' if default: params['d'] =", "self.GRAVATAR_URL_UNSECURE self.url = gravatar_url + hashlib.md5(self.email).hexdigest() params = dict() if size: params['s'] =", "email.encode('utf-8') def generate(self, unsecure=False, size=None, typed=False, default=None, force_default=False, rating_type=None): gravatar_url = self.GRAVATAR_URL if", "str(default) if force_default: params['f'] = 'y' if rating_type: params['r'] = str(rating_type) return self.url", "size=None, typed=False, default=None, force_default=False, rating_type=None): gravatar_url = self.GRAVATAR_URL if not unsecure else self.GRAVATAR_URL_UNSECURE", "'x' def __init__(self, email): self.email = email.encode('utf-8') def generate(self, unsecure=False, size=None, typed=False, default=None,", "= 'https://www.gravatar.com/avatar/' GRAVATAR_URL_UNSECURE = 'http://www.gravatar.com/avatar/' class ratingType(object): G = 'g' PG = 'pg'", "urllib.parse as urllib class Gravtr(object): GRAVATAR_URL = 'https://www.gravatar.com/avatar/' GRAVATAR_URL_UNSECURE = 'http://www.gravatar.com/avatar/' class ratingType(object):", "+ hashlib.md5(self.email).hexdigest() params = dict() if size: params['s'] = str(size) if typed: self.url", "import hashlib import sys if sys.version_info[0] < 3: import urllib else: import urllib.parse", "params = dict() if size: params['s'] = str(size) if typed: self.url = self.url", "not unsecure else self.GRAVATAR_URL_UNSECURE self.url = gravatar_url + hashlib.md5(self.email).hexdigest() params = dict() if", "gravatar_url = self.GRAVATAR_URL if not unsecure else self.GRAVATAR_URL_UNSECURE self.url = gravatar_url + hashlib.md5(self.email).hexdigest()", "if not unsecure else self.GRAVATAR_URL_UNSECURE self.url = gravatar_url + hashlib.md5(self.email).hexdigest() params = dict()", "typed: self.url = self.url + '.jpg' if default: params['d'] = str(default) if force_default:", "hashlib.md5(self.email).hexdigest() params = dict() if size: params['s'] = str(size) if typed: self.url =", "typed=False, default=None, force_default=False, rating_type=None): gravatar_url = self.GRAVATAR_URL if not unsecure else self.GRAVATAR_URL_UNSECURE self.url", "= email.encode('utf-8') def generate(self, unsecure=False, size=None, typed=False, default=None, force_default=False, rating_type=None): gravatar_url = self.GRAVATAR_URL", "if typed: self.url = self.url + '.jpg' if default: params['d'] = str(default) if", "'pg' R = 'r' X = 'x' def __init__(self, email): self.email = email.encode('utf-8')", "= 'x' def __init__(self, email): self.email = email.encode('utf-8') def generate(self, unsecure=False, size=None, typed=False,", "params['s'] = str(size) if typed: self.url = self.url + '.jpg' if default: params['d']", "default: params['d'] = str(default) if force_default: params['f'] = 'y' if rating_type: params['r'] =", "else self.GRAVATAR_URL_UNSECURE self.url = gravatar_url + hashlib.md5(self.email).hexdigest() params = dict() if size: params['s']", "params['d'] = str(default) if force_default: params['f'] = 'y' if rating_type: params['r'] = str(rating_type)", "if size: params['s'] = str(size) if typed: self.url = self.url + '.jpg' if", "as urllib class Gravtr(object): GRAVATAR_URL = 'https://www.gravatar.com/avatar/' GRAVATAR_URL_UNSECURE = 'http://www.gravatar.com/avatar/' class ratingType(object): G", "class ratingType(object): G = 'g' PG = 'pg' R = 'r' X =", "self.GRAVATAR_URL if not unsecure else self.GRAVATAR_URL_UNSECURE self.url = gravatar_url + hashlib.md5(self.email).hexdigest() params =", "import urllib.parse as urllib class Gravtr(object): GRAVATAR_URL = 'https://www.gravatar.com/avatar/' GRAVATAR_URL_UNSECURE = 'http://www.gravatar.com/avatar/' class", "def __init__(self, email): self.email = email.encode('utf-8') def generate(self, unsecure=False, size=None, typed=False, default=None, force_default=False,", "self.url = self.url + '.jpg' if default: params['d'] = str(default) if force_default: params['f']", "= dict() if size: params['s'] = str(size) if typed: self.url = self.url +", "PG = 'pg' R = 'r' X = 'x' def __init__(self, email): self.email", "size: params['s'] = str(size) if typed: self.url = self.url + '.jpg' if default:", "= self.GRAVATAR_URL if not unsecure else self.GRAVATAR_URL_UNSECURE self.url = gravatar_url + hashlib.md5(self.email).hexdigest() params", "= 'g' PG = 'pg' R = 'r' X = 'x' def __init__(self,", "Gravtr(object): GRAVATAR_URL = 'https://www.gravatar.com/avatar/' GRAVATAR_URL_UNSECURE = 'http://www.gravatar.com/avatar/' class ratingType(object): G = 'g' PG", "email): self.email = email.encode('utf-8') def generate(self, unsecure=False, size=None, typed=False, default=None, force_default=False, rating_type=None): gravatar_url", "'https://www.gravatar.com/avatar/' GRAVATAR_URL_UNSECURE = 'http://www.gravatar.com/avatar/' class ratingType(object): G = 'g' PG = 'pg' R" ]
[ "part.getMeasure(measure=1, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[1].type self.value = \"top\" class Note1Measure2(testBar):", "class Note4Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "in keys: if measure in self.note_num: measure_obj = part.getMeasure(measure=measure, staff=1) self.assertIsInstance( Search( NoteNode,", "= Note.Arpeggiate class Note2Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1)", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].type self.value =", "part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1(testBar): def", "20: 1, 21: 1, 22: 1, 23: 1, 24: 1, 25: 1, 26:", "piece self.assertTrue(piece.getPart(self.p_id) is not None) self.assertEqual(self.p_name, piece.getPart(self.p_id).GetItem().name) def testMeasures(self): self.assertIsInstance( FindByIndex( piece.getPart( self.p_id),", "Search( NoteNode, measure, 3).GetItem().wrap_notation[0].lineType self.value = \"wavy\" class Note4Measure2(testBar): def setUp(self): self.p_id =", "Note3Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "Search(NoteNode, measure, 2).GetItem() self.item = note.wrap_notation[0].direction self.value = \"up\" class Note3Measure1(testBar): def setUp(self):", "= \"stop\" class Note2Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "# self.value = \"bottom\" class Note4Measure1Notation2Type(testBar): def setUp(self): self.p_id = \"P1\" part =", "self.value = \"stop\" class Note2Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "= 1 class Note2Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate class", "7: 1, 8: 1, 9: 1, 10: 1, 11: 1, 12: 1, 13:", "Note3Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "testParts(self): global piece self.assertTrue(piece.getPart(self.p_id) is not None) self.assertEqual(self.p_name, piece.getPart(self.p_id).GetItem().name) def testMeasures(self): self.assertIsInstance( FindByIndex(", "1, 31: 1, 32: 1} def testParts(self): global piece self.assertTrue(piece.getPart(self.p_id) is not None)", "self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search(", "NoteNode, measure, 3).GetItem().wrap_notation[0].number self.value = 1 class Note3Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\"", "28: 1, 29: 1, 30: 1, 31: 1, 32: 1} def testParts(self): global", "measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].number self.value = 1", "# def setUp(self): # self.p_id = \"P1\" # part = piece.getPart(self.p_id) # measure", "measure, 1).GetItem() self.instance_type = Note.Slide class Note1Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part", "self.assertTrue(piece.getPart(self.p_id) is not None) self.assertEqual(self.p_name, piece.getPart(self.p_id).GetItem().name) def testMeasures(self): self.assertIsInstance( FindByIndex( piece.getPart( self.p_id), self.m_num),", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].type self.value", "MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode import MeasureNode from MuseParse.classes.ObjectHierarchy.ItemClasses import Note from MuseParse.SampleMusicXML import testcases partname =", "1, 16: 1, 17: 1, 18: 1, 19: 1, 20: 1, 21: 1,", "part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].lineType self.value = \"wavy\" class Note4Measure2(testBar):", "self.value = 1 class Note2Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "1, 17: 1, 18: 1, 19: 1, 20: 1, 21: 1, 22: 1,", "measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].number self.value = 1", "1, 25: 1, 26: 1, 27: 1, 28: 1, 29: 1, 30: 1,", "Search(NoteNode, measure, 4).GetItem().wrap_notation[0].type # self.value = \"bottom\" class Note4Measure1Notation2Type(testBar): def setUp(self): self.p_id =", "part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].type self.value = \"stop\" class Note2Measure2Number(testBar):", "3).GetItem() self.instance_type = Note.Glissando class Note3Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part =", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].number self.value =", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].number self.value =", "1, 26: 1, 27: 1, 28: 1, 29: 1, 30: 1, 31: 1,", "1, 27: 1, 28: 1, 29: 1, 30: 1, 31: 1, 32: 1}", "MeasureNode) def testNotes(self): part = piece.getPart(self.p_id) staff = part.getStaff(1) keys = staff.GetChildrenIndexes() for", "self.instance_type = Note.Slide class Note1Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "piece = parsePiece(os.path.join(directory, partname)) class testArpeg(xmlSet): def setUp(self): xmlSet.setUp(self) self.m_num = 32 self.p_id", "class Note1Measure1(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1,", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].number self.value", "class Note4Measure1Notation1Type(testBar): # def setUp(self): # self.p_id = \"P1\" # part = piece.getPart(self.p_id)", "{1: 4, 2: 4, 3: 1, 4: 1, 5: 1, 6: 1, 7:", "class Note4Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "Note.Glissando class Note4Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "1, 14: 1, 15: 1, 16: 1, 17: 1, 18: 1, 19: 1,", "self.instance_type = Note.Slide class Note2Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "self.value = \"wavy\" class Note4Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "FindByIndex from MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode import NoteNode from MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode import MeasureNode from MuseParse.classes.ObjectHierarchy.ItemClasses import Note", "\"instance_type\"): self.assertIsInstance( self.item.wrap_notation[0], self.instance_type) def testEquality(self): if hasattr(self, \"value\"): self.assertEqual(self.item, self.value) class Note1Measure1(testBar):", "measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Arpeggiate class", "staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].type self.value = \"start\" class Note1Measure2Number(testBar): def", "= part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1DirectionValue(testBar):", "hasattr(self, \"value\"): self.assertEqual(self.item, self.value) class Note1Measure1(testBar): def setUp(self): self.p_id = \"P1\" part =", "4).GetItem() self.instance_type = Note.Glissando class Note4Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part =", "= testcases.__path__._path[0] piece = parsePiece(os.path.join(directory, partname)) class testArpeg(xmlSet): def setUp(self): xmlSet.setUp(self) self.m_num =", "NoteNode, measure, 4).GetItem().wrap_notation[1].type self.value = \"top\" class Note1Measure2(testBar): def setUp(self): self.p_id = \"P1\"", "1, 32: 1} def testParts(self): global piece self.assertTrue(piece.getPart(self.p_id) is not None) self.assertEqual(self.p_name, piece.getPart(self.p_id).GetItem().name)", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type", "part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type", "= piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type =", "4).GetItem() self.instance_type = Note.NonArpeggiate class Note4Measure1SecondNotation(testBar): def setUp(self): self.p_id = \"P1\" part =", "staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate # TODO: fix this", "self.p_id = \"P1\" # part = piece.getPart(self.p_id) # measure = part.getMeasure(measure=1,staff=1) # self.item", "partname)) class testArpeg(xmlSet): def setUp(self): xmlSet.setUp(self) self.m_num = 32 self.p_id = \"P1\" self.p_name", "piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate", "class Note3Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note2Measure2(testBar): def", "measure, 4).GetItem().wrap_notation[0].type self.value = \"stop\" class Note4Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part", "in self.note_num: measure_obj = part.getMeasure(measure=measure, staff=1) self.assertIsInstance( Search( NoteNode, measure_obj.getVoice(1), self.note_num[measure]), NoteNode) class", "measure, 2).GetItem() self.instance_type = Note.Slide class Note2Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part", "part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure, 2).GetItem() self.item = note.wrap_notation[0].direction self.value = \"up\"", "= \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure,", "part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate class Note4Measure1SecondNotation(testBar): def", "part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Slide class Note1Measure2Type(testBar): def", "Note2Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "from MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode import MeasureNode from MuseParse.classes.ObjectHierarchy.ItemClasses import Note from MuseParse.SampleMusicXML import testcases partname", "3).GetItem() self.item = note.wrap_notation[0].direction self.value = \"down\" class Note4Measure1FirstNotation(testBar): def setUp(self): self.p_id =", "= \"solid\" class Note3Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "= piece.getPart(self.p_id) # measure = part.getMeasure(measure=1,staff=1) # self.item = Search(NoteNode, measure, 4).GetItem().wrap_notation[0].type #", "17: 1, 18: 1, 19: 1, 20: 1, 21: 1, 22: 1, 23:", "19: 1, 20: 1, 21: 1, 22: 1, 23: 1, 24: 1, 25:", "self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[1].type self.value = \"top\" class Note1Measure2(testBar): def setUp(self):", "self.value = \"solid\" class Note3Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "\"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 2).GetItem()", "= part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].type self.value = \"start\" class", "part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Arpeggiate class Note3Measure1DirectionValue(testBar): def", "11: 1, 12: 1, 13: 1, 14: 1, 15: 1, 16: 1, 17:", "\"start\" class Note3Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Glissando class", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure, 2).GetItem() self.item =", "part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].type self.value = \"stop\" class Note4Measure2Number(testBar):", "1, 18: 1, 19: 1, 20: 1, 21: 1, 22: 1, 23: 1,", "1).GetItem().wrap_notation[0].type self.value = \"start\" class Note1Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part =", "Search( NoteNode, measure, 3).GetItem().wrap_notation[0].number self.value = 1 class Note3Measure2LineType(testBar): def setUp(self): self.p_id =", "staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].type self.value = \"stop\" class Note2Measure2Number(testBar): def", "self.m_num = 32 self.p_id = \"P1\" self.p_name = \"Piccolo\" self.note_num = {1: 4,", "measure, 3).GetItem().wrap_notation[0].lineType self.value = \"wavy\" class Note4Measure2(testBar): def setUp(self): self.p_id = \"P1\" part", "import unittest from MuseParse.tests.testUsingXML.xmlSet import xmlSet, parsePiece from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex from", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure, 3).GetItem() self.item =", "class Note1Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].number self.value = 1", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure, 3).GetItem() self.item", "self.value = \"bottom\" class Note4Measure1Notation2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Glissando class Note3Measure2Type(testBar): def setUp(self): self.p_id", "os import unittest from MuseParse.tests.testUsingXML.xmlSet import xmlSet, parsePiece from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex", "21: 1, 22: 1, 23: 1, 24: 1, 25: 1, 26: 1, 27:", "MeasureNode from MuseParse.classes.ObjectHierarchy.ItemClasses import Note from MuseParse.SampleMusicXML import testcases partname = \"arpeggiosAndGlissandos.xml\" directory", "None) self.assertEqual(self.p_name, piece.getPart(self.p_id).GetItem().name) def testMeasures(self): self.assertIsInstance( FindByIndex( piece.getPart( self.p_id), self.m_num), MeasureNode) def testNotes(self):", "1, 19: 1, 20: 1, 21: 1, 22: 1, 23: 1, 24: 1,", "part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].number self.value = 1 class Note1Measure2LineType(testBar):", "staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].lineType self.value = \"wavy\" class Note4Measure2(testBar): def", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Slide", "Note2Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "class Note2Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "piece.getPart(self.p_id) staff = part.getStaff(1) keys = staff.GetChildrenIndexes() for measure in keys: if measure", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].type", "2).GetItem() self.instance_type = Note.Slide class Note2Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part =", "9: 1, 10: 1, 11: 1, 12: 1, 13: 1, 14: 1, 15:", "MuseParse.SampleMusicXML import testcases partname = \"arpeggiosAndGlissandos.xml\" directory = testcases.__path__._path[0] piece = parsePiece(os.path.join(directory, partname))", "1).GetItem() self.instance_type = Note.Slide class Note1Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part =", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].type self.value =", "= part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate class Note4Measure1SecondNotation(testBar):", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type =", "NoteNode from MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode import MeasureNode from MuseParse.classes.ObjectHierarchy.ItemClasses import Note from MuseParse.SampleMusicXML import testcases", "= part.getMeasure(measure=1, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[1].type self.value = \"top\" class", "4, 3: 1, 4: 1, 5: 1, 6: 1, 7: 1, 8: 1,", "not None) self.assertEqual(self.p_name, piece.getPart(self.p_id).GetItem().name) def testMeasures(self): self.assertIsInstance( FindByIndex( piece.getPart( self.p_id), self.m_num), MeasureNode) def", "Search( NoteNode, measure, 4).GetItem().wrap_notation[0].number self.value = 1 class Note4Measure2LineType(testBar): def setUp(self): self.p_id =", "measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Slide class", "\"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search( NoteNode, measure,", "Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\"", "self.p_id), self.m_num), MeasureNode) def testNotes(self): part = piece.getPart(self.p_id) staff = part.getStaff(1) keys =", "part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate # TODO: fix", "self.instance_type = Note.Arpeggiate class Note2Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1,", "= Note.Arpeggiate class Note2Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "Note4Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "Note2Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode,", "1, 4: 1, 5: 1, 6: 1, 7: 1, 8: 1, 9: 1,", "= part.getMeasure(measure=1,staff=1) # self.item = Search(NoteNode, measure, 4).GetItem().wrap_notation[0].type # self.value = \"bottom\" class", "Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.Glissando class Note4Measure2Type(testBar): def setUp(self): self.p_id = \"P1\"", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].type self.value", "\"P1\" # part = piece.getPart(self.p_id) # measure = part.getMeasure(measure=1,staff=1) # self.item = Search(NoteNode,", "= note.wrap_notation[0].direction self.value = \"up\" class Note3Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\") measure", "piece.getPart( self.p_id), self.m_num), MeasureNode) def testNotes(self): part = piece.getPart(self.p_id) staff = part.getStaff(1) keys", "= Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Arpeggiate class Note3Measure1DirectionValue(testBar): def setUp(self): self.p_id =", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type", "def testMeasures(self): self.assertIsInstance( FindByIndex( piece.getPart( self.p_id), self.m_num), MeasureNode) def testNotes(self): part = piece.getPart(self.p_id)", "self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode,", "= \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure,", "self.instance_type = Note.Arpeggiate class Note3Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "Search( NoteNode, measure, 4).GetItem().wrap_notation[1].type self.value = \"top\" class Note1Measure2(testBar): def setUp(self): self.p_id =", "Note.NonArpeggiate # TODO: fix this # class Note4Measure1Notation1Type(testBar): # def setUp(self): # self.p_id", "26: 1, 27: 1, 28: 1, 29: 1, 30: 1, 31: 1, 32:", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].lineType self.value =", "self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Slide class Note1Measure2Type(testBar): def setUp(self): self.p_id", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].type", "part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].number self.value = 1 class Note4Measure2LineType(testBar):", "\"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure, 3).GetItem()", "from MuseParse.SampleMusicXML import testcases partname = \"arpeggiosAndGlissandos.xml\" directory = testcases.__path__._path[0] piece = parsePiece(os.path.join(directory,", "staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].number self.value = 1 class Note3Measure2LineType(testBar): def", "\"start\" class Note1Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type", "measure, 1).GetItem().wrap_notation[0].number self.value = 1 class Note1Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part", "part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].number self.value = 1 class Note2Measure2LineType(testBar):", "1, 15: 1, 16: 1, 17: 1, 18: 1, 19: 1, 20: 1,", "self.assertIsInstance( self.item.wrap_notation[0], self.instance_type) def testEquality(self): if hasattr(self, \"value\"): self.assertEqual(self.item, self.value) class Note1Measure1(testBar): def", "note = Search(NoteNode, measure, 3).GetItem() self.item = note.wrap_notation[0].direction self.value = \"down\" class Note4Measure1FirstNotation(testBar):", "= Search( NoteNode, measure, 1).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note2Measure2(testBar): def setUp(self): self.p_id", "= Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate class Note4Measure1SecondNotation(testBar): def setUp(self): self.p_id =", "measure, 3).GetItem() self.instance_type = Note.Arpeggiate class Note3Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\" part", "staff=1) self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1DirectionValue(testBar): def setUp(self):", "testcases.__path__._path[0] piece = parsePiece(os.path.join(directory, partname)) class testArpeg(xmlSet): def setUp(self): xmlSet.setUp(self) self.m_num = 32", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].type self.value", "NoteNode, measure, 2).GetItem().wrap_notation[0].type self.value = \"stop\" class Note2Measure2Number(testBar): def setUp(self): self.p_id = \"P1\"", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].type self.value =", "measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].lineType self.value = \"wavy\"", "this # class Note4Measure1Notation1Type(testBar): # def setUp(self): # self.p_id = \"P1\" # part", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].lineType self.value", "import Note from MuseParse.SampleMusicXML import testcases partname = \"arpeggiosAndGlissandos.xml\" directory = testcases.__path__._path[0] piece", "NoteNode, measure, 1).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note2Measure2(testBar): def setUp(self): self.p_id = \"P1\"", "self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note3Measure2(testBar): def setUp(self):", "import os import unittest from MuseParse.tests.testUsingXML.xmlSet import xmlSet, parsePiece from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search,", "= \"arpeggiosAndGlissandos.xml\" directory = testcases.__path__._path[0] piece = parsePiece(os.path.join(directory, partname)) class testArpeg(xmlSet): def setUp(self):", "= part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.Glissando class Note4Measure2Type(testBar):", "= part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].number self.value = 1 class", "12: 1, 13: 1, 14: 1, 15: 1, 16: 1, 17: 1, 18:", "= Search( NoteNode, measure, 4).GetItem().wrap_notation[0].type self.value = \"stop\" class Note4Measure2Number(testBar): def setUp(self): self.p_id", "MuseParse.classes.ObjectHierarchy.ItemClasses import Note from MuseParse.SampleMusicXML import testcases partname = \"arpeggiosAndGlissandos.xml\" directory = testcases.__path__._path[0]", "MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex from MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode import NoteNode from MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode import MeasureNode from", "measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].type self.value = \"stop\"", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type =", "self.assertIsInstance( FindByIndex( piece.getPart( self.p_id), self.m_num), MeasureNode) def testNotes(self): part = piece.getPart(self.p_id) staff =", "Search( NoteNode, measure_obj.getVoice(1), self.note_num[measure]), NoteNode) class testBar(unittest.TestCase): def testInstance(self): if hasattr(self, \"instance_type\"): self.assertIsInstance(", "def setUp(self): part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure,", "1).GetItem().wrap_notation[0].number self.value = 1 class Note1Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part =", "self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate # TODO: fix this #", "1, 22: 1, 23: 1, 24: 1, 25: 1, 26: 1, 27: 1,", "Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate # TODO: fix this # class Note4Measure1Notation1Type(testBar):", "= \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search( NoteNode,", "10: 1, 11: 1, 12: 1, 13: 1, 14: 1, 15: 1, 16:", "Note1Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "NoteNode, measure, 2).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note3Measure2(testBar): def setUp(self): self.p_id = \"P1\"", "import Search, FindByIndex from MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode import NoteNode from MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode import MeasureNode from MuseParse.classes.ObjectHierarchy.ItemClasses", "14: 1, 15: 1, 16: 1, 17: 1, 18: 1, 19: 1, 20:", "Note3Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1)", "Note2Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Glissando", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].lineType self.value =", "measure, 2).GetItem() self.item = note.wrap_notation[0].direction self.value = \"up\" class Note3Measure1(testBar): def setUp(self): part", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].number self.value", "Note4Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "= {1: 4, 2: 4, 3: 1, 4: 1, 5: 1, 6: 1,", "\"solid\" class Note3Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "= Search( NoteNode, measure, 2).GetItem().wrap_notation[0].number self.value = 1 class Note2Measure2LineType(testBar): def setUp(self): self.p_id", "part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure, 3).GetItem() self.item = note.wrap_notation[0].direction self.value = \"down\"", "measure, 1).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note2Measure2(testBar): def setUp(self): self.p_id = \"P1\" part", "measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].lineType self.value = \"solid\"", "partname = \"arpeggiosAndGlissandos.xml\" directory = testcases.__path__._path[0] piece = parsePiece(os.path.join(directory, partname)) class testArpeg(xmlSet): def", "measure = part.getMeasure(measure=1, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[1].type self.value = \"top\"", "staff=1) self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Glissando class Note3Measure2Type(testBar): def setUp(self):", "self.p_name = \"Piccolo\" self.note_num = {1: 4, 2: 4, 3: 1, 4: 1,", "# self.item = Search(NoteNode, measure, 4).GetItem().wrap_notation[0].type # self.value = \"bottom\" class Note4Measure1Notation2Type(testBar): def", "def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item", "parsePiece from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex from MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode import NoteNode from MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode import", "= Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.Glissando class Note4Measure2Type(testBar): def setUp(self): self.p_id =", "part.getMeasure(measure=1,staff=1) # self.item = Search(NoteNode, measure, 4).GetItem().wrap_notation[0].type # self.value = \"bottom\" class Note4Measure1Notation2Type(testBar):", "2: 4, 3: 1, 4: 1, 5: 1, 6: 1, 7: 1, 8:", "= 1 class Note1Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "self.instance_type) def testEquality(self): if hasattr(self, \"value\"): self.assertEqual(self.item, self.value) class Note1Measure1(testBar): def setUp(self): self.p_id", "class Note2Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1,", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].type", "# part = piece.getPart(self.p_id) # measure = part.getMeasure(measure=1,staff=1) # self.item = Search(NoteNode, measure,", "testcases partname = \"arpeggiosAndGlissandos.xml\" directory = testcases.__path__._path[0] piece = parsePiece(os.path.join(directory, partname)) class testArpeg(xmlSet):", "1, 5: 1, 6: 1, 7: 1, 8: 1, 9: 1, 10: 1,", "measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Arpeggiate class", "Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate class Note4Measure1SecondNotation(testBar): def setUp(self): self.p_id = \"P1\"", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].number self.value =", "measure, 3).GetItem() self.instance_type = Note.Glissando class Note3Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part", "= Search( NoteNode, measure, 2).GetItem().wrap_notation[0].type self.value = \"stop\" class Note2Measure2Number(testBar): def setUp(self): self.p_id", "measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].type self.value = \"start\"", "self.m_num), MeasureNode) def testNotes(self): part = piece.getPart(self.p_id) staff = part.getStaff(1) keys = staff.GetChildrenIndexes()", "measure, 4).GetItem().wrap_notation[0].number self.value = 1 class Note4Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part", "self.assertEqual(self.p_name, piece.getPart(self.p_id).GetItem().name) def testMeasures(self): self.assertIsInstance( FindByIndex( piece.getPart( self.p_id), self.m_num), MeasureNode) def testNotes(self): part", "= part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure, 3).GetItem() self.item = note.wrap_notation[0].direction self.value =", "measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].type self.value = \"start\"", "self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Arpeggiate class Note3Measure1DirectionValue(testBar): def setUp(self): self.p_id", "class Note1Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "def testInstance(self): if hasattr(self, \"instance_type\"): self.assertIsInstance( self.item.wrap_notation[0], self.instance_type) def testEquality(self): if hasattr(self, \"value\"):", "= Note.NonArpeggiate # TODO: fix this # class Note4Measure1Notation1Type(testBar): # def setUp(self): #", "Note1Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "Note1Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].number self.value = 1 class Note1Measure2LineType(testBar): def setUp(self):", "Note from MuseParse.SampleMusicXML import testcases partname = \"arpeggiosAndGlissandos.xml\" directory = testcases.__path__._path[0] piece =", "\"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 4).GetItem()", "= part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Glissando class Note3Measure2Type(testBar):", "= staff.GetChildrenIndexes() for measure in keys: if measure in self.note_num: measure_obj = part.getMeasure(measure=measure,", "= part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure, 2).GetItem() self.item = note.wrap_notation[0].direction self.value =", "NoteNode, measure, 1).GetItem().wrap_notation[0].number self.value = 1 class Note1Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\"", "3: 1, 4: 1, 5: 1, 6: 1, 7: 1, 8: 1, 9:", "staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].type self.value = \"start\" class Note3Measure2Number(testBar): def", "if measure in self.note_num: measure_obj = part.getMeasure(measure=measure, staff=1) self.assertIsInstance( Search( NoteNode, measure_obj.getVoice(1), self.note_num[measure]),", "staff=1) self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Arpeggiate class Note3Measure1DirectionValue(testBar): def setUp(self):", "self.p_id = \"P1\" self.p_name = \"Piccolo\" self.note_num = {1: 4, 2: 4, 3:", "= note.wrap_notation[0].direction self.value = \"down\" class Note4Measure1FirstNotation(testBar): def setUp(self): self.p_id = \"P1\" part", "NoteNode, measure, 3).GetItem().wrap_notation[0].lineType self.value = \"wavy\" class Note4Measure2(testBar): def setUp(self): self.p_id = \"P1\"", "xmlSet.setUp(self) self.m_num = 32 self.p_id = \"P1\" self.p_name = \"Piccolo\" self.note_num = {1:", "Note4Measure1FirstNotation(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1)", "class Note4Measure1FirstNotation(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1,", "\"wavy\" class Note4Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "1, 13: 1, 14: 1, 15: 1, 16: 1, 17: 1, 18: 1,", "= piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type =", "= part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Arpeggiate class Note3Measure1DirectionValue(testBar):", "= \"P1\" # part = piece.getPart(self.p_id) # measure = part.getMeasure(measure=1,staff=1) # self.item =", "part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note3Measure2(testBar):", "3).GetItem().wrap_notation[0].lineType self.value = \"wavy\" class Note4Measure2(testBar): def setUp(self): self.p_id = \"P1\" part =", "= Search(NoteNode, measure, 4).GetItem().wrap_notation[0].type # self.value = \"bottom\" class Note4Measure1Notation2Type(testBar): def setUp(self): self.p_id", "part.getMeasure(measure=measure, staff=1) self.assertIsInstance( Search( NoteNode, measure_obj.getVoice(1), self.note_num[measure]), NoteNode) class testBar(unittest.TestCase): def testInstance(self): if", "measure_obj.getVoice(1), self.note_num[measure]), NoteNode) class testBar(unittest.TestCase): def testInstance(self): if hasattr(self, \"instance_type\"): self.assertIsInstance( self.item.wrap_notation[0], self.instance_type)", "class Note4Measure1SecondNotation(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1,", "1).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note2Measure2(testBar): def setUp(self): self.p_id = \"P1\" part =", "= Note.NonArpeggiate class Note4Measure1SecondNotation(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Glissando class Note3Measure2Type(testBar): def setUp(self): self.p_id = \"P1\"", "18: 1, 19: 1, 20: 1, 21: 1, 22: 1, 23: 1, 24:", "class Note3Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1,", "piece.getPart(self.p_id).GetItem().name) def testMeasures(self): self.assertIsInstance( FindByIndex( piece.getPart( self.p_id), self.m_num), MeasureNode) def testNotes(self): part =", "\"stop\" class Note2Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].type", "self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1(testBar): def setUp(self): part", "= part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].lineType self.value = \"solid\" class", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].number", "measure, 4).GetItem() self.instance_type = Note.NonArpeggiate # TODO: fix this # class Note4Measure1Notation1Type(testBar): #", "if hasattr(self, \"instance_type\"): self.assertIsInstance( self.item.wrap_notation[0], self.instance_type) def testEquality(self): if hasattr(self, \"value\"): self.assertEqual(self.item, self.value)", "self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(", "testEquality(self): if hasattr(self, \"value\"): self.assertEqual(self.item, self.value) class Note1Measure1(testBar): def setUp(self): self.p_id = \"P1\"", "self.instance_type = Note.Glissando class Note4Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "= Search( NoteNode, measure, 1).GetItem().wrap_notation[0].number self.value = 1 class Note1Measure2LineType(testBar): def setUp(self): self.p_id", "measure, 2).GetItem().wrap_notation[0].type self.value = \"stop\" class Note2Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part", "self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate class Note4Measure1SecondNotation(testBar): def setUp(self): self.p_id", "NoteNode, measure, 3).GetItem().wrap_notation[0].type self.value = \"start\" class Note3Measure2Number(testBar): def setUp(self): self.p_id = \"P1\"", "1, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1, 12: 1,", "= part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].lineType self.value = \"solid\" class", "part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note2Measure2(testBar):", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure, 2).GetItem() self.item", "piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Arpeggiate", "Search, FindByIndex from MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode import NoteNode from MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode import MeasureNode from MuseParse.classes.ObjectHierarchy.ItemClasses import", "testArpeg(xmlSet): def setUp(self): xmlSet.setUp(self) self.m_num = 32 self.p_id = \"P1\" self.p_name = \"Piccolo\"", "1 class Note3Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "4).GetItem().wrap_notation[0].type self.value = \"stop\" class Note4Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part =", "measure, 4).GetItem().wrap_notation[0].type # self.value = \"bottom\" class Note4Measure1Notation2Type(testBar): def setUp(self): self.p_id = \"P1\"", "\"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 1).GetItem()", "hasattr(self, \"instance_type\"): self.assertIsInstance( self.item.wrap_notation[0], self.instance_type) def testEquality(self): if hasattr(self, \"value\"): self.assertEqual(self.item, self.value) class", "part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1DirectionValue(testBar): def", "self.value = \"start\" class Note1Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].type self.value = \"start\" class Note1Measure2Number(testBar): def setUp(self):", "1, 20: 1, 21: 1, 22: 1, 23: 1, 24: 1, 25: 1,", "def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) note", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[1].type", "\"value\"): self.assertEqual(self.item, self.value) class Note1Measure1(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "Note3Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "3).GetItem().wrap_notation[0].type self.value = \"start\" class Note3Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part =", "Note4Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "= 1 class Note4Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "keys = staff.GetChildrenIndexes() for measure in keys: if measure in self.note_num: measure_obj =", "fix this # class Note4Measure1Notation1Type(testBar): # def setUp(self): # self.p_id = \"P1\" #", "class Note3Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item =", "piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Arpeggiate", "Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\")", "27: 1, 28: 1, 29: 1, 30: 1, 31: 1, 32: 1} def", "1 class Note2Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "\"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure, 2).GetItem()", "NoteNode, measure, 1).GetItem().wrap_notation[0].type self.value = \"start\" class Note1Measure2Number(testBar): def setUp(self): self.p_id = \"P1\"", "measure_obj = part.getMeasure(measure=measure, staff=1) self.assertIsInstance( Search( NoteNode, measure_obj.getVoice(1), self.note_num[measure]), NoteNode) class testBar(unittest.TestCase): def", "1).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\") measure =", "piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Arpeggiate", "staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.Glissando class Note4Measure2Type(testBar): def setUp(self):", "15: 1, 16: 1, 17: 1, 18: 1, 19: 1, 20: 1, 21:", "self.value = 1 class Note1Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "measure = part.getMeasure(measure=1,staff=1) # self.item = Search(NoteNode, measure, 4).GetItem().wrap_notation[0].type # self.value = \"bottom\"", "import testcases partname = \"arpeggiosAndGlissandos.xml\" directory = testcases.__path__._path[0] piece = parsePiece(os.path.join(directory, partname)) class", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].number", "= Search(NoteNode, measure, 3).GetItem() self.item = note.wrap_notation[0].direction self.value = \"down\" class Note4Measure1FirstNotation(testBar): def", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].lineType", "= part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].number self.value = 1 class", "class Note4Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "Search( NoteNode, measure, 1).GetItem().wrap_notation[0].number self.value = 1 class Note1Measure2LineType(testBar): def setUp(self): self.p_id =", "measure = part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure, 2).GetItem() self.item = note.wrap_notation[0].direction self.value", "staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate class Note4Measure1SecondNotation(testBar): def setUp(self):", "31: 1, 32: 1} def testParts(self): global piece self.assertTrue(piece.getPart(self.p_id) is not None) self.assertEqual(self.p_name,", "xmlSet, parsePiece from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex from MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode import NoteNode from MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode", "= Search( NoteNode, measure, 1).GetItem().wrap_notation[0].type self.value = \"start\" class Note1Measure2Number(testBar): def setUp(self): self.p_id", "from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex from MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode import NoteNode from MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode import MeasureNode", "class Note3Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].number", "class Note2Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item =", "self.instance_type = Note.NonArpeggiate # TODO: fix this # class Note4Measure1Notation1Type(testBar): # def setUp(self):", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Slide", "self.value = \"solid\" class Note2Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "1, 23: 1, 24: 1, 25: 1, 26: 1, 27: 1, 28: 1,", "self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.Glissando class Note4Measure2Type(testBar): def setUp(self): self.p_id", "30: 1, 31: 1, 32: 1} def testParts(self): global piece self.assertTrue(piece.getPart(self.p_id) is not", "staff = part.getStaff(1) keys = staff.GetChildrenIndexes() for measure in keys: if measure in", "self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) note = Search(NoteNode,", "part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Slide class Note2Measure2Type(testBar): def", "self.item.wrap_notation[0], self.instance_type) def testEquality(self): if hasattr(self, \"value\"): self.assertEqual(self.item, self.value) class Note1Measure1(testBar): def setUp(self):", "= Note.Arpeggiate class Note3Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "4).GetItem().wrap_notation[0].number self.value = 1 class Note4Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part =", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].lineType", "Note.Arpeggiate class Note3Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].number self.value = 1 class Note1Measure2LineType(testBar): def", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].lineType self.value", "1, 11: 1, 12: 1, 13: 1, 14: 1, 15: 1, 16: 1,", "testMeasures(self): self.assertIsInstance( FindByIndex( piece.getPart( self.p_id), self.m_num), MeasureNode) def testNotes(self): part = piece.getPart(self.p_id) staff", "= part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].number self.value = 1 class", "staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note3Measure2(testBar): def", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].lineType self.value =", "= part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1(testBar):", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].lineType", "self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].number self.value = 1 class Note4Measure2LineType(testBar): def setUp(self):", "def testNotes(self): part = piece.getPart(self.p_id) staff = part.getStaff(1) keys = staff.GetChildrenIndexes() for measure", "testBar(unittest.TestCase): def testInstance(self): if hasattr(self, \"instance_type\"): self.assertIsInstance( self.item.wrap_notation[0], self.instance_type) def testEquality(self): if hasattr(self,", "NoteNode, measure, 4).GetItem().wrap_notation[0].number self.value = 1 class Note4Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\"", "= \"up\" class Note3Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1)", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].lineType self.value =", "self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].lineType self.value = \"wavy\" class Note4Measure2(testBar): def setUp(self):", "measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate #", "self.item = note.wrap_notation[0].direction self.value = \"down\" class Note4Measure1FirstNotation(testBar): def setUp(self): self.p_id = \"P1\"", "29: 1, 30: 1, 31: 1, 32: 1} def testParts(self): global piece self.assertTrue(piece.getPart(self.p_id)", "Search( NoteNode, measure, 3).GetItem().wrap_notation[0].type self.value = \"start\" class Note3Measure2Number(testBar): def setUp(self): self.p_id =", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].number", "staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].type self.value = \"stop\" class Note4Measure2Number(testBar): def", "= Search( NoteNode, measure, 4).GetItem().wrap_notation[1].type self.value = \"top\" class Note1Measure2(testBar): def setUp(self): self.p_id", "part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].type self.value = \"start\" class Note1Measure2Number(testBar):", "= Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Glissando class Note3Measure2Type(testBar): def setUp(self): self.p_id =", "measure = part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure, 3).GetItem() self.item = note.wrap_notation[0].direction self.value", "1, 24: 1, 25: 1, 26: 1, 27: 1, 28: 1, 29: 1,", "unittest from MuseParse.tests.testUsingXML.xmlSet import xmlSet, parsePiece from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex from MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode", "staff=1) self.assertIsInstance( Search( NoteNode, measure_obj.getVoice(1), self.note_num[measure]), NoteNode) class testBar(unittest.TestCase): def testInstance(self): if hasattr(self,", "1, 10: 1, 11: 1, 12: 1, 13: 1, 14: 1, 15: 1,", "= parsePiece(os.path.join(directory, partname)) class testArpeg(xmlSet): def setUp(self): xmlSet.setUp(self) self.m_num = 32 self.p_id =", "piece.getPart(self.p_id) # measure = part.getMeasure(measure=1,staff=1) # self.item = Search(NoteNode, measure, 4).GetItem().wrap_notation[0].type # self.value", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].type self.value =", "= part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].type self.value = \"stop\" class", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type", "measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].type self.value = \"stop\"", "self.assertEqual(self.item, self.value) class Note1Measure1(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "Search(NoteNode, measure, 3).GetItem() self.item = note.wrap_notation[0].direction self.value = \"down\" class Note4Measure1FirstNotation(testBar): def setUp(self):", "25: 1, 26: 1, 27: 1, 28: 1, 29: 1, 30: 1, 31:", "part.getStaff(1) keys = staff.GetChildrenIndexes() for measure in keys: if measure in self.note_num: measure_obj", "\"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 4).GetItem()", "4).GetItem() self.instance_type = Note.NonArpeggiate # TODO: fix this # class Note4Measure1Notation1Type(testBar): # def", "note = Search(NoteNode, measure, 2).GetItem() self.item = note.wrap_notation[0].direction self.value = \"up\" class Note3Measure1(testBar):", "piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[1].type self.value =", "\"arpeggiosAndGlissandos.xml\" directory = testcases.__path__._path[0] piece = parsePiece(os.path.join(directory, partname)) class testArpeg(xmlSet): def setUp(self): xmlSet.setUp(self)", "measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Slide class", "= part.getStaff(1) keys = staff.GetChildrenIndexes() for measure in keys: if measure in self.note_num:", "6: 1, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1, 12:", "staff=1) self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Slide class Note1Measure2Type(testBar): def setUp(self):", "self.note_num = {1: 4, 2: 4, 3: 1, 4: 1, 5: 1, 6:", "5: 1, 6: 1, 7: 1, 8: 1, 9: 1, 10: 1, 11:", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[1].type self.value", "Note.Arpeggiate class Note2Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item", "self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Slide class Note2Measure2Type(testBar): def setUp(self): self.p_id", "class Note2Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "class Note4Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].number self.value =", "\"down\" class Note4Measure1FirstNotation(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "Search( NoteNode, measure, 2).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note3Measure2(testBar): def setUp(self): self.p_id =", "import NoteNode from MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode import MeasureNode from MuseParse.classes.ObjectHierarchy.ItemClasses import Note from MuseParse.SampleMusicXML import", "= Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Slide class Note2Measure2Type(testBar): def setUp(self): self.p_id =", "# TODO: fix this # class Note4Measure1Notation1Type(testBar): # def setUp(self): # self.p_id =", "= part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Slide class Note1Measure2Type(testBar):", "TODO: fix this # class Note4Measure1Notation1Type(testBar): # def setUp(self): # self.p_id = \"P1\"", "class Note2Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "22: 1, 23: 1, 24: 1, 25: 1, 26: 1, 27: 1, 28:", "= Search( NoteNode, measure, 3).GetItem().wrap_notation[0].lineType self.value = \"wavy\" class Note4Measure2(testBar): def setUp(self): self.p_id", "MuseParse.tests.testUsingXML.xmlSet import xmlSet, parsePiece from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex from MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode import NoteNode", "1, 8: 1, 9: 1, 10: 1, 11: 1, 12: 1, 13: 1,", "piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure, 3).GetItem() self.item = note.wrap_notation[0].direction", "measure, 3).GetItem().wrap_notation[0].type self.value = \"start\" class Note3Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part", "= Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Slide class Note1Measure2Type(testBar): def setUp(self): self.p_id =", "from MuseParse.tests.testUsingXML.xmlSet import xmlSet, parsePiece from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex from MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode import", "measure in self.note_num: measure_obj = part.getMeasure(measure=measure, staff=1) self.assertIsInstance( Search( NoteNode, measure_obj.getVoice(1), self.note_num[measure]), NoteNode)", "class Note4Measure1Notation2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1,", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].type self.value", "2).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note3Measure2(testBar): def setUp(self): self.p_id = \"P1\" part =", "= part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].type self.value = \"stop\" class", "2).GetItem().wrap_notation[0].number self.value = 1 class Note2Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part =", "= part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].type self.value = \"start\" class", "= \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode,", "2).GetItem().wrap_notation[0].type self.value = \"stop\" class Note2Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part =", "1} def testParts(self): global piece self.assertTrue(piece.getPart(self.p_id) is not None) self.assertEqual(self.p_name, piece.getPart(self.p_id).GetItem().name) def testMeasures(self):", "self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1DirectionValue(testBar): def setUp(self): self.p_id", "32: 1} def testParts(self): global piece self.assertTrue(piece.getPart(self.p_id) is not None) self.assertEqual(self.p_name, piece.getPart(self.p_id).GetItem().name) def", "= part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].number self.value = 1 class", "NoteNode) class testBar(unittest.TestCase): def testInstance(self): if hasattr(self, \"instance_type\"): self.assertIsInstance( self.item.wrap_notation[0], self.instance_type) def testEquality(self):", "self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode,", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type =", "= Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1DirectionValue(testBar): def setUp(self): self.p_id =", "13: 1, 14: 1, 15: 1, 16: 1, 17: 1, 18: 1, 19:", "Note.Slide class Note2Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].lineType self.value = \"wavy\"", "parsePiece(os.path.join(directory, partname)) class testArpeg(xmlSet): def setUp(self): xmlSet.setUp(self) self.m_num = 32 self.p_id = \"P1\"", "= Search( NoteNode, measure, 3).GetItem().wrap_notation[0].number self.value = 1 class Note3Measure2LineType(testBar): def setUp(self): self.p_id", "self.note_num[measure]), NoteNode) class testBar(unittest.TestCase): def testInstance(self): if hasattr(self, \"instance_type\"): self.assertIsInstance( self.item.wrap_notation[0], self.instance_type) def", "23: 1, 24: 1, 25: 1, 26: 1, 27: 1, 28: 1, 29:", "part = piece.getPart(self.p_id) staff = part.getStaff(1) keys = staff.GetChildrenIndexes() for measure in keys:", "measure, 2).GetItem().wrap_notation[0].number self.value = 1 class Note2Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part", "setUp(self): # self.p_id = \"P1\" # part = piece.getPart(self.p_id) # measure = part.getMeasure(measure=1,staff=1)", "def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item", "self.item = Search(NoteNode, measure, 4).GetItem().wrap_notation[0].type # self.value = \"bottom\" class Note4Measure1Notation2Type(testBar): def setUp(self):", "class Note3Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "\"solid\" class Note2Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Slide class Note2Measure2Type(testBar): def setUp(self): self.p_id = \"P1\"", "measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.Glissando class", "from MuseParse.classes.ObjectHierarchy.ItemClasses import Note from MuseParse.SampleMusicXML import testcases partname = \"arpeggiosAndGlissandos.xml\" directory =", "\"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 1).GetItem()", "= \"down\" class Note4Measure1FirstNotation(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "NoteNode, measure_obj.getVoice(1), self.note_num[measure]), NoteNode) class testBar(unittest.TestCase): def testInstance(self): if hasattr(self, \"instance_type\"): self.assertIsInstance( self.item.wrap_notation[0],", "measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Arpeggiate class", "staff.GetChildrenIndexes() for measure in keys: if measure in self.note_num: measure_obj = part.getMeasure(measure=measure, staff=1)", "Note3Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "= \"wavy\" class Note4Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "3).GetItem() self.instance_type = Note.Arpeggiate class Note3Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\" part =", "staff=1) note = Search(NoteNode, measure, 2).GetItem() self.item = note.wrap_notation[0].direction self.value = \"up\" class", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type =", "import xmlSet, parsePiece from MuseParse.classes.ObjectHierarchy.TreeClasses.BaseTree import Search, FindByIndex from MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode import NoteNode from", "is not None) self.assertEqual(self.p_name, piece.getPart(self.p_id).GetItem().name) def testMeasures(self): self.assertIsInstance( FindByIndex( piece.getPart( self.p_id), self.m_num), MeasureNode)", "Search( NoteNode, measure, 1).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note2Measure2(testBar): def setUp(self): self.p_id =", "1, 9: 1, 10: 1, 11: 1, 12: 1, 13: 1, 14: 1,", "from MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode import NoteNode from MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode import MeasureNode from MuseParse.classes.ObjectHierarchy.ItemClasses import Note from", "4, 2: 4, 3: 1, 4: 1, 5: 1, 6: 1, 7: 1,", "def setUp(self): # self.p_id = \"P1\" # part = piece.getPart(self.p_id) # measure =", "Note1Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].lineType self.value", "self.value = \"up\" class Note3Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1,", "3).GetItem().wrap_notation[0].number self.value = 1 class Note3Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part =", "measure, 2).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note3Measure2(testBar): def setUp(self): self.p_id = \"P1\" part", "MuseParse.classes.ObjectHierarchy.TreeClasses.NoteNode import NoteNode from MuseParse.classes.ObjectHierarchy.TreeClasses.MeasureNode import MeasureNode from MuseParse.classes.ObjectHierarchy.ItemClasses import Note from MuseParse.SampleMusicXML", "= Search( NoteNode, measure, 4).GetItem().wrap_notation[0].number self.value = 1 class Note4Measure2LineType(testBar): def setUp(self): self.p_id", "class testArpeg(xmlSet): def setUp(self): xmlSet.setUp(self) self.m_num = 32 self.p_id = \"P1\" self.p_name =", "self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].number self.value = 1 class Note2Measure2LineType(testBar): def setUp(self):", "self.note_num: measure_obj = part.getMeasure(measure=measure, staff=1) self.assertIsInstance( Search( NoteNode, measure_obj.getVoice(1), self.note_num[measure]), NoteNode) class testBar(unittest.TestCase):", "= \"top\" class Note1Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "2).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\" part =", "class Note1Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "def testParts(self): global piece self.assertTrue(piece.getPart(self.p_id) is not None) self.assertEqual(self.p_name, piece.getPart(self.p_id).GetItem().name) def testMeasures(self): self.assertIsInstance(", "= 1 class Note3Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "4: 1, 5: 1, 6: 1, 7: 1, 8: 1, 9: 1, 10:", "class Note2Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[1].type self.value = \"top\" class Note1Measure2(testBar): def", "keys: if measure in self.note_num: measure_obj = part.getMeasure(measure=measure, staff=1) self.assertIsInstance( Search( NoteNode, measure_obj.getVoice(1),", "= \"start\" class Note3Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "Search( NoteNode, measure, 2).GetItem().wrap_notation[0].type self.value = \"stop\" class Note2Measure2Number(testBar): def setUp(self): self.p_id =", "= Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate # TODO: fix this # class", "# self.p_id = \"P1\" # part = piece.getPart(self.p_id) # measure = part.getMeasure(measure=1,staff=1) #", "part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.Glissando class Note4Measure2Type(testBar): def", "Note1Measure1(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1)", "measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].number self.value = 1", "FindByIndex( piece.getPart( self.p_id), self.m_num), MeasureNode) def testNotes(self): part = piece.getPart(self.p_id) staff = part.getStaff(1)", "= Note.Glissando class Note3Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "8: 1, 9: 1, 10: 1, 11: 1, 12: 1, 13: 1, 14:", "self.instance_type = Note.Glissando class Note3Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Slide class Note1Measure2Type(testBar): def setUp(self): self.p_id = \"P1\"", "= \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure,", "\"Piccolo\" self.note_num = {1: 4, 2: 4, 3: 1, 4: 1, 5: 1,", "measure, 1).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\") measure", "for measure in keys: if measure in self.note_num: measure_obj = part.getMeasure(measure=measure, staff=1) self.assertIsInstance(", "measure in keys: if measure in self.note_num: measure_obj = part.getMeasure(measure=measure, staff=1) self.assertIsInstance( Search(", "self.value) class Note1Measure1(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "= part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.NonArpeggiate # TODO:", "Note.Glissando class Note3Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "testNotes(self): part = piece.getPart(self.p_id) staff = part.getStaff(1) keys = staff.GetChildrenIndexes() for measure in", "= Note.Slide class Note2Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "self.value = 1 class Note4Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].number self.value", "measure, 1).GetItem().wrap_notation[0].type self.value = \"start\" class Note1Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part", "measure, 3).GetItem().wrap_notation[0].number self.value = 1 class Note3Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part", "24: 1, 25: 1, 26: 1, 27: 1, 28: 1, 29: 1, 30:", "staff=1) self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1(testBar): def setUp(self):", "16: 1, 17: 1, 18: 1, 19: 1, 20: 1, 21: 1, 22:", "def testEquality(self): if hasattr(self, \"value\"): self.assertEqual(self.item, self.value) class Note1Measure1(testBar): def setUp(self): self.p_id =", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type =", "note.wrap_notation[0].direction self.value = \"up\" class Note3Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\") measure =", "measure, 4).GetItem() self.instance_type = Note.Glissando class Note4Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part", "= Search(NoteNode, measure, 1).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1(testBar): def setUp(self): part =", "\"bottom\" class Note4Measure1Notation2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "= \"P1\" self.p_name = \"Piccolo\" self.note_num = {1: 4, 2: 4, 3: 1,", "global piece self.assertTrue(piece.getPart(self.p_id) is not None) self.assertEqual(self.p_name, piece.getPart(self.p_id).GetItem().name) def testMeasures(self): self.assertIsInstance( FindByIndex( piece.getPart(", "Note.Slide class Note1Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Glissando class Note3Measure2Type(testBar): def", "Note2Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1)", "def setUp(self): xmlSet.setUp(self) self.m_num = 32 self.p_id = \"P1\" self.p_name = \"Piccolo\" self.note_num", "1, 6: 1, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1,", "setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) note =", "= Search( NoteNode, measure, 3).GetItem().wrap_notation[0].type self.value = \"start\" class Note3Measure2Number(testBar): def setUp(self): self.p_id", "setUp(self): xmlSet.setUp(self) self.m_num = 32 self.p_id = \"P1\" self.p_name = \"Piccolo\" self.note_num =", "1, 12: 1, 13: 1, 14: 1, 15: 1, 16: 1, 17: 1,", "measure, 3).GetItem() self.item = note.wrap_notation[0].direction self.value = \"down\" class Note4Measure1FirstNotation(testBar): def setUp(self): self.p_id", "= Note.Glissando class Note4Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "self.instance_type = Note.NonArpeggiate class Note4Measure1SecondNotation(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "= part.getMeasure(measure=measure, staff=1) self.assertIsInstance( Search( NoteNode, measure_obj.getVoice(1), self.note_num[measure]), NoteNode) class testBar(unittest.TestCase): def testInstance(self):", "= Search(NoteNode, measure, 2).GetItem() self.item = note.wrap_notation[0].direction self.value = \"up\" class Note3Measure1(testBar): def", "measure, 2).GetItem() self.instance_type = Note.Arpeggiate class Note2Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\" part", "self.value = 1 class Note3Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].type self.value = \"stop\" class Note2Measure2Number(testBar): def setUp(self):", "self.value = \"start\" class Note3Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "staff=1) self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].number self.value = 1 class Note4Measure2LineType(testBar): def", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 3).GetItem() self.instance_type =", "staff=1) self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Slide class Note2Measure2Type(testBar): def setUp(self):", "= \"bottom\" class Note4Measure1Notation2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "Search(NoteNode, measure, 3).GetItem() self.instance_type = Note.Arpeggiate class Note3Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\"", "4).GetItem().wrap_notation[1].type self.value = \"top\" class Note1Measure2(testBar): def setUp(self): self.p_id = \"P1\" part =", "= Note.Slide class Note1Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "class Note1Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].number self.value = 1 class Note3Measure2LineType(testBar):", "= 32 self.p_id = \"P1\" self.p_name = \"Piccolo\" self.note_num = {1: 4, 2:", "staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].number self.value = 1 class Note2Measure2LineType(testBar): def", "measure, 4).GetItem().wrap_notation[1].type self.value = \"top\" class Note1Measure2(testBar): def setUp(self): self.p_id = \"P1\" part", "self.assertIsInstance( Search( NoteNode, measure_obj.getVoice(1), self.note_num[measure]), NoteNode) class testBar(unittest.TestCase): def testInstance(self): if hasattr(self, \"instance_type\"):", "Note4Measure1SecondNotation(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1)", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 1).GetItem() self.instance_type", "self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note2Measure2(testBar): def setUp(self):", "= piece.getPart(self.p_id) staff = part.getStaff(1) keys = staff.GetChildrenIndexes() for measure in keys: if", "= part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].lineType self.value = \"wavy\" class", "4).GetItem().wrap_notation[0].type # self.value = \"bottom\" class Note4Measure1Notation2Type(testBar): def setUp(self): self.p_id = \"P1\" part", "\"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure,", "1, 21: 1, 22: 1, 23: 1, 24: 1, 25: 1, 26: 1,", "Search( NoteNode, measure, 1).GetItem().wrap_notation[0].type self.value = \"start\" class Note1Measure2Number(testBar): def setUp(self): self.p_id =", "\"top\" class Note1Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) self.item =", "note.wrap_notation[0].direction self.value = \"down\" class Note4Measure1FirstNotation(testBar): def setUp(self): self.p_id = \"P1\" part =", "part = piece.getPart(self.p_id) # measure = part.getMeasure(measure=1,staff=1) # self.item = Search(NoteNode, measure, 4).GetItem().wrap_notation[0].type", "\"stop\" class Note4Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "setUp(self): part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 2).GetItem()", "Search( NoteNode, measure, 2).GetItem().wrap_notation[0].number self.value = 1 class Note2Measure2LineType(testBar): def setUp(self): self.p_id =", "= Search( NoteNode, measure, 2).GetItem().wrap_notation[0].lineType self.value = \"solid\" class Note3Measure2(testBar): def setUp(self): self.p_id", "= \"stop\" class Note4Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "# class Note4Measure1Notation1Type(testBar): # def setUp(self): # self.p_id = \"P1\" # part =", "class testBar(unittest.TestCase): def testInstance(self): if hasattr(self, \"instance_type\"): self.assertIsInstance( self.item.wrap_notation[0], self.instance_type) def testEquality(self): if", "part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].lineType", "class Note3Measure2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2,", "Note4Measure1Notation2Type(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1)", "testInstance(self): if hasattr(self, \"instance_type\"): self.assertIsInstance( self.item.wrap_notation[0], self.instance_type) def testEquality(self): if hasattr(self, \"value\"): self.assertEqual(self.item,", "self.value = \"down\" class Note4Measure1FirstNotation(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 4).GetItem() self.instance_type = Note.Glissando", "= part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 2).GetItem() self.instance_type = Note.Slide class Note2Measure2Type(testBar):", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].number self.value", "= \"solid\" class Note2Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "Search( NoteNode, measure, 4).GetItem().wrap_notation[0].type self.value = \"stop\" class Note4Measure2Number(testBar): def setUp(self): self.p_id =", "staff=1) note = Search(NoteNode, measure, 3).GetItem() self.item = note.wrap_notation[0].direction self.value = \"down\" class", "1 class Note1Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].number self.value = 1 class Note3Measure2LineType(testBar): def setUp(self):", "1, 28: 1, 29: 1, 30: 1, 31: 1, 32: 1} def testParts(self):", "= \"start\" class Note1Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure", "self.value = \"top\" class Note1Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 2).GetItem().wrap_notation[0].lineType self.value = \"solid\"", "Note3Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode,", "2).GetItem() self.item = note.wrap_notation[0].direction self.value = \"up\" class Note3Measure1(testBar): def setUp(self): part =", "1, 29: 1, 30: 1, 31: 1, 32: 1} def testParts(self): global piece", "Note.NonArpeggiate class Note4Measure1SecondNotation(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "= \"Piccolo\" self.note_num = {1: 4, 2: 4, 3: 1, 4: 1, 5:", "\"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search(NoteNode, measure, 3).GetItem()", "Note4Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "# measure = part.getMeasure(measure=1,staff=1) # self.item = Search(NoteNode, measure, 4).GetItem().wrap_notation[0].type # self.value =", "Note2Measure2(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1)", "import MeasureNode from MuseParse.classes.ObjectHierarchy.ItemClasses import Note from MuseParse.SampleMusicXML import testcases partname = \"arpeggiosAndGlissandos.xml\"", "self.item = Search( NoteNode, measure, 4).GetItem().wrap_notation[0].type self.value = \"stop\" class Note4Measure2Number(testBar): def setUp(self):", "1 class Note4Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "1, 30: 1, 31: 1, 32: 1} def testParts(self): global piece self.assertTrue(piece.getPart(self.p_id) is", "Note.Arpeggiate class Note2Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure =", "\"up\" class Note3Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item", "setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item =", "= piece.getPart(self.p_id) measure = part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 1).GetItem().wrap_notation[0].lineType self.value", "directory = testcases.__path__._path[0] piece = parsePiece(os.path.join(directory, partname)) class testArpeg(xmlSet): def setUp(self): xmlSet.setUp(self) self.m_num", "self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].type self.value = \"start\" class Note3Measure2Number(testBar): def setUp(self):", "Note4Measure1Notation1Type(testBar): # def setUp(self): # self.p_id = \"P1\" # part = piece.getPart(self.p_id) #", "self.item = note.wrap_notation[0].direction self.value = \"up\" class Note3Measure1(testBar): def setUp(self): part = piece.getPart(\"P1\")", "if hasattr(self, \"value\"): self.assertEqual(self.item, self.value) class Note1Measure1(testBar): def setUp(self): self.p_id = \"P1\" part", "NoteNode, measure, 4).GetItem().wrap_notation[0].type self.value = \"stop\" class Note4Measure2Number(testBar): def setUp(self): self.p_id = \"P1\"", "32 self.p_id = \"P1\" self.p_name = \"Piccolo\" self.note_num = {1: 4, 2: 4,", "\"P1\" self.p_name = \"Piccolo\" self.note_num = {1: 4, 2: 4, 3: 1, 4:", "self.instance_type = Note.Arpeggiate class Note2Measure1DirectionValue(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "part.getMeasure(measure=2, staff=1) self.item = Search( NoteNode, measure, 3).GetItem().wrap_notation[0].type self.value = \"start\" class Note3Measure2Number(testBar):", "measure, 4).GetItem() self.instance_type = Note.NonArpeggiate class Note4Measure1SecondNotation(testBar): def setUp(self): self.p_id = \"P1\" part", "piece.getPart(self.p_id) measure = part.getMeasure(measure=1, staff=1) note = Search(NoteNode, measure, 2).GetItem() self.item = note.wrap_notation[0].direction", "NoteNode, measure, 2).GetItem().wrap_notation[0].number self.value = 1 class Note2Measure2LineType(testBar): def setUp(self): self.p_id = \"P1\"", "self.value = \"stop\" class Note4Measure2Number(testBar): def setUp(self): self.p_id = \"P1\" part = piece.getPart(self.p_id)", "setUp(self): part = piece.getPart(\"P1\") measure = part.getMeasure(measure=1, staff=1) self.item = Search(NoteNode, measure, 3).GetItem()" ]
[ "List, Optional __all__ = ( \"load\", \"loads\", \"load_all\", \"dump\", \"dumps\", \"dump_all\", ) class", "<filename>packages/pegasus-python/src/Pegasus/json.py<gh_stars>0 \"\"\" Abstract :mod:`json` with Pegasus specific defaults. .. moduleauthor:: <NAME> <<EMAIL>> \"\"\"", "to \"aaa\" return str(o) elif hasattr(o, \"__html__\"): return o.__html__() elif hasattr(o, \"__json__\"): return", "for d in objs: fp.write(dumps(d, *args, **kwargs) + \"\\n\") return fp.getvalue() if isinstance(fp,", "\"dump_all\", ) class _CustomJSONEncoder(_json.JSONEncoder): def default(self, o): if isinstance(o, uuid.UUID): return str(o) elif", "Enum): return o.name elif isinstance(o, Path): # Serializing Python `Path` objects to `str`", "isinstance(s, str): fp = io.StringIO(s) elif hasattr(s, \"read\"): fp = s else: raise", "= _json.load loads = _json.loads def load_all(s, *args, **kwargs) -> Iterator: \"\"\" Deserialize", "*args, **kwargs) -> Optional[str]: \"\"\" Serialize ``obj`` to a JSON formatted ``str``. [extended_summary]", "def default(self, o): if isinstance(o, uuid.UUID): return str(o) elif isinstance(o, Enum): return o.name", "\"dump\", \"dumps\", \"dump_all\", ) class _CustomJSONEncoder(_json.JSONEncoder): def default(self, o): if isinstance(o, uuid.UUID): return", "# Disables pretty printing, when :meth:`dump_all` is called; to support ndjson. kwargs.update({\"indent\": None,", "file\") for d in fp.readline(): yield loads(d.strip(), *args, **kwargs) dump = partial(_json.dump, cls=_CustomJSONEncoder)", "= io.StringIO(s) elif hasattr(s, \"read\"): fp = s else: raise TypeError(\"s must either", "o) load = _json.load loads = _json.loads def load_all(s, *args, **kwargs) -> Iterator:", "**kwargs) -> Optional[str]: \"\"\" Serialize ``obj`` to a JSON formatted ``str``. [extended_summary] :param", "fp = s else: raise TypeError(\"s must either be a string or an", "type(o) ) return _json.JSONEncoder.default(self, o) load = _json.load loads = _json.loads def load_all(s,", "(a ``str``, ``bytes`` or ``bytearray`` instance containing a JSON document) to a Python", "if isinstance(s, str): fp = io.StringIO(s) elif hasattr(s, \"read\"): fp = s else:", "\"\"\" import io import json as _json import logging import uuid from enum", "called; to support ndjson. kwargs.update({\"indent\": None, \"separators\": None}) for d in objs: fp.write(dumps(d,", "\"\"\" if isinstance(s, str): fp = io.StringIO(s) elif hasattr(s, \"read\"): fp = s", "NOTE: Path(\"./aaa\") serializes to \"aaa\" return str(o) elif hasattr(o, \"__html__\"): return o.__html__() elif", "\"write\"): fp = fp else: raise TypeError(\"s must either be None or an", "fp = io.StringIO(s) elif hasattr(s, \"read\"): fp = s else: raise TypeError(\"s must", "uuid.UUID): return str(o) elif isinstance(o, Enum): return o.name elif isinstance(o, Path): # Serializing", "= s else: raise TypeError(\"s must either be a string or an open", "\"load_all\", \"dump\", \"dumps\", \"dump_all\", ) class _CustomJSONEncoder(_json.JSONEncoder): def default(self, o): if isinstance(o, uuid.UUID):", "\"\"\" Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing a JSON document)", "hasattr(o, \"__json__\"): return o.__json__() elif hasattr(o, \"__table__\"): return {k: getattr(o, k) for k", "elif isinstance(o, Path): # Serializing Python `Path` objects to `str` # NOTE: Path(\"./aaa\")", "io.StringIO() elif hasattr(fp, \"write\"): fp = fp else: raise TypeError(\"s must either be", "defaults. .. moduleauthor:: <NAME> <<EMAIL>> \"\"\" import io import json as _json import", "\"__html__\"): return o.__html__() elif hasattr(o, \"__json__\"): return o.__json__() elif hasattr(o, \"__table__\"): return {k:", ":return: [description] :rtype: Iterator \"\"\" if isinstance(s, str): fp = io.StringIO(s) elif hasattr(s,", ":type obj: Dict :return: [description] :rtype: Iterator \"\"\" if isinstance(s, str): fp =", "Python `Path` objects to `str` # NOTE: Path(\"./aaa\") serializes to \"aaa\" return str(o)", "load = _json.load loads = _json.loads def load_all(s, *args, **kwargs) -> Iterator: \"\"\"", "Iterator: \"\"\" Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing a JSON", "hasattr(fp, \"write\"): fp = fp else: raise TypeError(\"s must either be None or", "document) to a Python dictionary. [extended_summary] :param obj: [description] :type obj: Dict :return:", "import logging import uuid from enum import Enum from functools import partial from", "**kwargs) -> Iterator: \"\"\" Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing", "objs: List :return: [description] :rtype: str \"\"\" if fp is None: fp =", "**kwargs) dump = partial(_json.dump, cls=_CustomJSONEncoder) dumps = partial(_json.dumps, cls=_CustomJSONEncoder) def dump_all(objs: List, fp=None,", "import json as _json import logging import uuid from enum import Enum from", "either be a string or an open text file\") for d in fp.readline():", "default(self, o): if isinstance(o, uuid.UUID): return str(o) elif isinstance(o, Enum): return o.name elif", ":param obj: [description] :type obj: Dict :return: [description] :rtype: Iterator \"\"\" if isinstance(s,", "Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing a JSON document) to", "from functools import partial from pathlib import Path from typing import Iterator, List,", "None}) for d in objs: fp.write(dumps(d, *args, **kwargs) + \"\\n\") return fp.getvalue() if", "partial(_json.dump, cls=_CustomJSONEncoder) dumps = partial(_json.dumps, cls=_CustomJSONEncoder) def dump_all(objs: List, fp=None, *args, **kwargs) ->", "objs: fp.write(dumps(d, *args, **kwargs) + \"\\n\") return fp.getvalue() if isinstance(fp, io.StringIO) else None", "``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing a JSON document) to a", "to `str` # NOTE: Path(\"./aaa\") serializes to \"aaa\" return str(o) elif hasattr(o, \"__html__\"):", "printing, when :meth:`dump_all` is called; to support ndjson. kwargs.update({\"indent\": None, \"separators\": None}) for", "Serialize ``obj`` to a JSON formatted ``str``. [extended_summary] :param objs: [description] :type objs:", "else: raise TypeError(\"s must either be None or an open text file\") #", "TypeError(\"s must either be None or an open text file\") # Disables pretty", "Enum from functools import partial from pathlib import Path from typing import Iterator,", "when :meth:`dump_all` is called; to support ndjson. kwargs.update({\"indent\": None, \"separators\": None}) for d", "as _json import logging import uuid from enum import Enum from functools import", "# Serializing Python `Path` objects to `str` # NOTE: Path(\"./aaa\") serializes to \"aaa\"", "``obj`` to a JSON formatted ``str``. [extended_summary] :param objs: [description] :type objs: List", "\"loads\", \"load_all\", \"dump\", \"dumps\", \"dump_all\", ) class _CustomJSONEncoder(_json.JSONEncoder): def default(self, o): if isinstance(o,", "_json.JSONEncoder.default(self, o) load = _json.load loads = _json.loads def load_all(s, *args, **kwargs) ->", "= partial(_json.dump, cls=_CustomJSONEncoder) dumps = partial(_json.dumps, cls=_CustomJSONEncoder) def dump_all(objs: List, fp=None, *args, **kwargs)", "instance containing a JSON document) to a Python dictionary. [extended_summary] :param obj: [description]", "k in o.__table__.columns.keys()} else: logging.getLogger(__name__).warning( \"Don't know how to handle type %s\" %", "from pathlib import Path from typing import Iterator, List, Optional __all__ = (", "[description] :rtype: Iterator \"\"\" if isinstance(s, str): fp = io.StringIO(s) elif hasattr(s, \"read\"):", "isinstance(o, Path): # Serializing Python `Path` objects to `str` # NOTE: Path(\"./aaa\") serializes", "_json.loads def load_all(s, *args, **kwargs) -> Iterator: \"\"\" Deserialize ``s`` (a ``str``, ``bytes``", "for k in o.__table__.columns.keys()} else: logging.getLogger(__name__).warning( \"Don't know how to handle type %s\"", "return o.__html__() elif hasattr(o, \"__json__\"): return o.__json__() elif hasattr(o, \"__table__\"): return {k: getattr(o,", "to support ndjson. kwargs.update({\"indent\": None, \"separators\": None}) for d in objs: fp.write(dumps(d, *args,", "obj: [description] :type obj: Dict :return: [description] :rtype: Iterator \"\"\" if isinstance(s, str):", ":rtype: str \"\"\" if fp is None: fp = io.StringIO() elif hasattr(fp, \"write\"):", "d in fp.readline(): yield loads(d.strip(), *args, **kwargs) dump = partial(_json.dump, cls=_CustomJSONEncoder) dumps =", "k) for k in o.__table__.columns.keys()} else: logging.getLogger(__name__).warning( \"Don't know how to handle type", "Iterator, List, Optional __all__ = ( \"load\", \"loads\", \"load_all\", \"dump\", \"dumps\", \"dump_all\", )", "json as _json import logging import uuid from enum import Enum from functools", "str(o) elif hasattr(o, \"__html__\"): return o.__html__() elif hasattr(o, \"__json__\"): return o.__json__() elif hasattr(o,", "or ``bytearray`` instance containing a JSON document) to a Python dictionary. [extended_summary] :param", "is called; to support ndjson. kwargs.update({\"indent\": None, \"separators\": None}) for d in objs:", "io import json as _json import logging import uuid from enum import Enum", "<NAME> <<EMAIL>> \"\"\" import io import json as _json import logging import uuid", "# NOTE: Path(\"./aaa\") serializes to \"aaa\" return str(o) elif hasattr(o, \"__html__\"): return o.__html__()", "else: logging.getLogger(__name__).warning( \"Don't know how to handle type %s\" % type(o) ) return", "open text file\") # Disables pretty printing, when :meth:`dump_all` is called; to support", "functools import partial from pathlib import Path from typing import Iterator, List, Optional", "List :return: [description] :rtype: str \"\"\" if fp is None: fp = io.StringIO()", "pretty printing, when :meth:`dump_all` is called; to support ndjson. kwargs.update({\"indent\": None, \"separators\": None})", "yield loads(d.strip(), *args, **kwargs) dump = partial(_json.dump, cls=_CustomJSONEncoder) dumps = partial(_json.dumps, cls=_CustomJSONEncoder) def", "logging.getLogger(__name__).warning( \"Don't know how to handle type %s\" % type(o) ) return _json.JSONEncoder.default(self,", "type %s\" % type(o) ) return _json.JSONEncoder.default(self, o) load = _json.load loads =", "return _json.JSONEncoder.default(self, o) load = _json.load loads = _json.loads def load_all(s, *args, **kwargs)", "d in objs: fp.write(dumps(d, *args, **kwargs) + \"\\n\") return fp.getvalue() if isinstance(fp, io.StringIO)", ":rtype: Iterator \"\"\" if isinstance(s, str): fp = io.StringIO(s) elif hasattr(s, \"read\"): fp", "%s\" % type(o) ) return _json.JSONEncoder.default(self, o) load = _json.load loads = _json.loads", "return str(o) elif hasattr(o, \"__html__\"): return o.__html__() elif hasattr(o, \"__json__\"): return o.__json__() elif", "to a JSON formatted ``str``. [extended_summary] :param objs: [description] :type objs: List :return:", "( \"load\", \"loads\", \"load_all\", \"dump\", \"dumps\", \"dump_all\", ) class _CustomJSONEncoder(_json.JSONEncoder): def default(self, o):", "\"\"\" Serialize ``obj`` to a JSON formatted ``str``. [extended_summary] :param objs: [description] :type", "cls=_CustomJSONEncoder) dumps = partial(_json.dumps, cls=_CustomJSONEncoder) def dump_all(objs: List, fp=None, *args, **kwargs) -> Optional[str]:", "typing import Iterator, List, Optional __all__ = ( \"load\", \"loads\", \"load_all\", \"dump\", \"dumps\",", "obj: Dict :return: [description] :rtype: Iterator \"\"\" if isinstance(s, str): fp = io.StringIO(s)", "Python dictionary. [extended_summary] :param obj: [description] :type obj: Dict :return: [description] :rtype: Iterator", "*args, **kwargs) dump = partial(_json.dump, cls=_CustomJSONEncoder) dumps = partial(_json.dumps, cls=_CustomJSONEncoder) def dump_all(objs: List,", "fp = fp else: raise TypeError(\"s must either be None or an open", "in objs: fp.write(dumps(d, *args, **kwargs) + \"\\n\") return fp.getvalue() if isinstance(fp, io.StringIO) else", "if isinstance(o, uuid.UUID): return str(o) elif isinstance(o, Enum): return o.name elif isinstance(o, Path):", "enum import Enum from functools import partial from pathlib import Path from typing", "dump = partial(_json.dump, cls=_CustomJSONEncoder) dumps = partial(_json.dumps, cls=_CustomJSONEncoder) def dump_all(objs: List, fp=None, *args,", "uuid from enum import Enum from functools import partial from pathlib import Path", "return str(o) elif isinstance(o, Enum): return o.name elif isinstance(o, Path): # Serializing Python", "str): fp = io.StringIO(s) elif hasattr(s, \"read\"): fp = s else: raise TypeError(\"s", "loads(d.strip(), *args, **kwargs) dump = partial(_json.dump, cls=_CustomJSONEncoder) dumps = partial(_json.dumps, cls=_CustomJSONEncoder) def dump_all(objs:", "Iterator \"\"\" if isinstance(s, str): fp = io.StringIO(s) elif hasattr(s, \"read\"): fp =", "dump_all(objs: List, fp=None, *args, **kwargs) -> Optional[str]: \"\"\" Serialize ``obj`` to a JSON", "o.__json__() elif hasattr(o, \"__table__\"): return {k: getattr(o, k) for k in o.__table__.columns.keys()} else:", "{k: getattr(o, k) for k in o.__table__.columns.keys()} else: logging.getLogger(__name__).warning( \"Don't know how to", "\"Don't know how to handle type %s\" % type(o) ) return _json.JSONEncoder.default(self, o)", "to handle type %s\" % type(o) ) return _json.JSONEncoder.default(self, o) load = _json.load", "a Python dictionary. [extended_summary] :param obj: [description] :type obj: Dict :return: [description] :rtype:", "hasattr(s, \"read\"): fp = s else: raise TypeError(\"s must either be a string", ":return: [description] :rtype: str \"\"\" if fp is None: fp = io.StringIO() elif", "\"\"\" if fp is None: fp = io.StringIO() elif hasattr(fp, \"write\"): fp =", "know how to handle type %s\" % type(o) ) return _json.JSONEncoder.default(self, o) load", "text file\") for d in fp.readline(): yield loads(d.strip(), *args, **kwargs) dump = partial(_json.dump,", "[description] :type obj: Dict :return: [description] :rtype: Iterator \"\"\" if isinstance(s, str): fp", "s else: raise TypeError(\"s must either be a string or an open text", "file\") # Disables pretty printing, when :meth:`dump_all` is called; to support ndjson. kwargs.update({\"indent\":", "from enum import Enum from functools import partial from pathlib import Path from", "string or an open text file\") for d in fp.readline(): yield loads(d.strip(), *args,", "elif hasattr(s, \"read\"): fp = s else: raise TypeError(\"s must either be a", "load_all(s, *args, **kwargs) -> Iterator: \"\"\" Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray``", "fp=None, *args, **kwargs) -> Optional[str]: \"\"\" Serialize ``obj`` to a JSON formatted ``str``.", "must either be None or an open text file\") # Disables pretty printing,", "import Iterator, List, Optional __all__ = ( \"load\", \"loads\", \"load_all\", \"dump\", \"dumps\", \"dump_all\",", "Optional[str]: \"\"\" Serialize ``obj`` to a JSON formatted ``str``. [extended_summary] :param objs: [description]", "a string or an open text file\") for d in fp.readline(): yield loads(d.strip(),", "kwargs.update({\"indent\": None, \"separators\": None}) for d in objs: fp.write(dumps(d, *args, **kwargs) + \"\\n\")", "dictionary. [extended_summary] :param obj: [description] :type obj: Dict :return: [description] :rtype: Iterator \"\"\"", "= _json.loads def load_all(s, *args, **kwargs) -> Iterator: \"\"\" Deserialize ``s`` (a ``str``,", ") return _json.JSONEncoder.default(self, o) load = _json.load loads = _json.loads def load_all(s, *args,", "with Pegasus specific defaults. .. moduleauthor:: <NAME> <<EMAIL>> \"\"\" import io import json", "None or an open text file\") # Disables pretty printing, when :meth:`dump_all` is", "a JSON formatted ``str``. [extended_summary] :param objs: [description] :type objs: List :return: [description]", "_json import logging import uuid from enum import Enum from functools import partial", "import uuid from enum import Enum from functools import partial from pathlib import", "class _CustomJSONEncoder(_json.JSONEncoder): def default(self, o): if isinstance(o, uuid.UUID): return str(o) elif isinstance(o, Enum):", "elif hasattr(o, \"__table__\"): return {k: getattr(o, k) for k in o.__table__.columns.keys()} else: logging.getLogger(__name__).warning(", "_json.load loads = _json.loads def load_all(s, *args, **kwargs) -> Iterator: \"\"\" Deserialize ``s``", ":mod:`json` with Pegasus specific defaults. .. moduleauthor:: <NAME> <<EMAIL>> \"\"\" import io import", "\"\"\" Abstract :mod:`json` with Pegasus specific defaults. .. moduleauthor:: <NAME> <<EMAIL>> \"\"\" import", "elif hasattr(o, \"__html__\"): return o.__html__() elif hasattr(o, \"__json__\"): return o.__json__() elif hasattr(o, \"__table__\"):", "[description] :type objs: List :return: [description] :rtype: str \"\"\" if fp is None:", "Optional __all__ = ( \"load\", \"loads\", \"load_all\", \"dump\", \"dumps\", \"dump_all\", ) class _CustomJSONEncoder(_json.JSONEncoder):", "partial(_json.dumps, cls=_CustomJSONEncoder) def dump_all(objs: List, fp=None, *args, **kwargs) -> Optional[str]: \"\"\" Serialize ``obj``", "``bytearray`` instance containing a JSON document) to a Python dictionary. [extended_summary] :param obj:", "elif isinstance(o, Enum): return o.name elif isinstance(o, Path): # Serializing Python `Path` objects", "import io import json as _json import logging import uuid from enum import", "fp else: raise TypeError(\"s must either be None or an open text file\")", "objects to `str` # NOTE: Path(\"./aaa\") serializes to \"aaa\" return str(o) elif hasattr(o,", "return o.__json__() elif hasattr(o, \"__table__\"): return {k: getattr(o, k) for k in o.__table__.columns.keys()}", "return {k: getattr(o, k) for k in o.__table__.columns.keys()} else: logging.getLogger(__name__).warning( \"Don't know how", "formatted ``str``. [extended_summary] :param objs: [description] :type objs: List :return: [description] :rtype: str", "\"dumps\", \"dump_all\", ) class _CustomJSONEncoder(_json.JSONEncoder): def default(self, o): if isinstance(o, uuid.UUID): return str(o)", "import Path from typing import Iterator, List, Optional __all__ = ( \"load\", \"loads\",", "Path from typing import Iterator, List, Optional __all__ = ( \"load\", \"loads\", \"load_all\",", "partial from pathlib import Path from typing import Iterator, List, Optional __all__ =", "``str``. [extended_summary] :param objs: [description] :type objs: List :return: [description] :rtype: str \"\"\"", ":param objs: [description] :type objs: List :return: [description] :rtype: str \"\"\" if fp", "an open text file\") # Disables pretty printing, when :meth:`dump_all` is called; to", "\"separators\": None}) for d in objs: fp.write(dumps(d, *args, **kwargs) + \"\\n\") return fp.getvalue()", "``str``, ``bytes`` or ``bytearray`` instance containing a JSON document) to a Python dictionary.", "a JSON document) to a Python dictionary. [extended_summary] :param obj: [description] :type obj:", "= fp else: raise TypeError(\"s must either be None or an open text", "import Enum from functools import partial from pathlib import Path from typing import", "fp.readline(): yield loads(d.strip(), *args, **kwargs) dump = partial(_json.dump, cls=_CustomJSONEncoder) dumps = partial(_json.dumps, cls=_CustomJSONEncoder)", "raise TypeError(\"s must either be a string or an open text file\") for", "for d in fp.readline(): yield loads(d.strip(), *args, **kwargs) dump = partial(_json.dump, cls=_CustomJSONEncoder) dumps", "str \"\"\" if fp is None: fp = io.StringIO() elif hasattr(fp, \"write\"): fp", "_CustomJSONEncoder(_json.JSONEncoder): def default(self, o): if isinstance(o, uuid.UUID): return str(o) elif isinstance(o, Enum): return", "-> Iterator: \"\"\" Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing a", "o.__html__() elif hasattr(o, \"__json__\"): return o.__json__() elif hasattr(o, \"__table__\"): return {k: getattr(o, k)", "elif hasattr(o, \"__json__\"): return o.__json__() elif hasattr(o, \"__table__\"): return {k: getattr(o, k) for", "text file\") # Disables pretty printing, when :meth:`dump_all` is called; to support ndjson.", "JSON document) to a Python dictionary. [extended_summary] :param obj: [description] :type obj: Dict", "return o.name elif isinstance(o, Path): # Serializing Python `Path` objects to `str` #", "fp is None: fp = io.StringIO() elif hasattr(fp, \"write\"): fp = fp else:", "% type(o) ) return _json.JSONEncoder.default(self, o) load = _json.load loads = _json.loads def", "io.StringIO(s) elif hasattr(s, \"read\"): fp = s else: raise TypeError(\"s must either be", "<<EMAIL>> \"\"\" import io import json as _json import logging import uuid from", "must either be a string or an open text file\") for d in", "str(o) elif isinstance(o, Enum): return o.name elif isinstance(o, Path): # Serializing Python `Path`", "containing a JSON document) to a Python dictionary. [extended_summary] :param obj: [description] :type", "to a Python dictionary. [extended_summary] :param obj: [description] :type obj: Dict :return: [description]", ":type objs: List :return: [description] :rtype: str \"\"\" if fp is None: fp", "loads = _json.loads def load_all(s, *args, **kwargs) -> Iterator: \"\"\" Deserialize ``s`` (a", "isinstance(o, uuid.UUID): return str(o) elif isinstance(o, Enum): return o.name elif isinstance(o, Path): #", "import partial from pathlib import Path from typing import Iterator, List, Optional __all__", "either be None or an open text file\") # Disables pretty printing, when", "List, fp=None, *args, **kwargs) -> Optional[str]: \"\"\" Serialize ``obj`` to a JSON formatted", "`Path` objects to `str` # NOTE: Path(\"./aaa\") serializes to \"aaa\" return str(o) elif", "[description] :rtype: str \"\"\" if fp is None: fp = io.StringIO() elif hasattr(fp,", "specific defaults. .. moduleauthor:: <NAME> <<EMAIL>> \"\"\" import io import json as _json", "= io.StringIO() elif hasattr(fp, \"write\"): fp = fp else: raise TypeError(\"s must either", "hasattr(o, \"__table__\"): return {k: getattr(o, k) for k in o.__table__.columns.keys()} else: logging.getLogger(__name__).warning( \"Don't", "o.name elif isinstance(o, Path): # Serializing Python `Path` objects to `str` # NOTE:", "cls=_CustomJSONEncoder) def dump_all(objs: List, fp=None, *args, **kwargs) -> Optional[str]: \"\"\" Serialize ``obj`` to", "getattr(o, k) for k in o.__table__.columns.keys()} else: logging.getLogger(__name__).warning( \"Don't know how to handle", "``bytes`` or ``bytearray`` instance containing a JSON document) to a Python dictionary. [extended_summary]", "be a string or an open text file\") for d in fp.readline(): yield", "-> Optional[str]: \"\"\" Serialize ``obj`` to a JSON formatted ``str``. [extended_summary] :param objs:", ") class _CustomJSONEncoder(_json.JSONEncoder): def default(self, o): if isinstance(o, uuid.UUID): return str(o) elif isinstance(o,", "serializes to \"aaa\" return str(o) elif hasattr(o, \"__html__\"): return o.__html__() elif hasattr(o, \"__json__\"):", "in o.__table__.columns.keys()} else: logging.getLogger(__name__).warning( \"Don't know how to handle type %s\" % type(o)", "raise TypeError(\"s must either be None or an open text file\") # Disables", "None: fp = io.StringIO() elif hasattr(fp, \"write\"): fp = fp else: raise TypeError(\"s", "hasattr(o, \"__html__\"): return o.__html__() elif hasattr(o, \"__json__\"): return o.__json__() elif hasattr(o, \"__table__\"): return", "\"aaa\" return str(o) elif hasattr(o, \"__html__\"): return o.__html__() elif hasattr(o, \"__json__\"): return o.__json__()", "open text file\") for d in fp.readline(): yield loads(d.strip(), *args, **kwargs) dump =", "= partial(_json.dumps, cls=_CustomJSONEncoder) def dump_all(objs: List, fp=None, *args, **kwargs) -> Optional[str]: \"\"\" Serialize", "is None: fp = io.StringIO() elif hasattr(fp, \"write\"): fp = fp else: raise", "an open text file\") for d in fp.readline(): yield loads(d.strip(), *args, **kwargs) dump", "Disables pretty printing, when :meth:`dump_all` is called; to support ndjson. kwargs.update({\"indent\": None, \"separators\":", "__all__ = ( \"load\", \"loads\", \"load_all\", \"dump\", \"dumps\", \"dump_all\", ) class _CustomJSONEncoder(_json.JSONEncoder): def", "\"read\"): fp = s else: raise TypeError(\"s must either be a string or", "Path(\"./aaa\") serializes to \"aaa\" return str(o) elif hasattr(o, \"__html__\"): return o.__html__() elif hasattr(o,", "ndjson. kwargs.update({\"indent\": None, \"separators\": None}) for d in objs: fp.write(dumps(d, *args, **kwargs) +", "JSON formatted ``str``. [extended_summary] :param objs: [description] :type objs: List :return: [description] :rtype:", "TypeError(\"s must either be a string or an open text file\") for d", "in fp.readline(): yield loads(d.strip(), *args, **kwargs) dump = partial(_json.dump, cls=_CustomJSONEncoder) dumps = partial(_json.dumps,", "= ( \"load\", \"loads\", \"load_all\", \"dump\", \"dumps\", \"dump_all\", ) class _CustomJSONEncoder(_json.JSONEncoder): def default(self,", "def dump_all(objs: List, fp=None, *args, **kwargs) -> Optional[str]: \"\"\" Serialize ``obj`` to a", "from typing import Iterator, List, Optional __all__ = ( \"load\", \"loads\", \"load_all\", \"dump\",", "be None or an open text file\") # Disables pretty printing, when :meth:`dump_all`", "how to handle type %s\" % type(o) ) return _json.JSONEncoder.default(self, o) load =", "Pegasus specific defaults. .. moduleauthor:: <NAME> <<EMAIL>> \"\"\" import io import json as", "o.__table__.columns.keys()} else: logging.getLogger(__name__).warning( \"Don't know how to handle type %s\" % type(o) )", "elif hasattr(fp, \"write\"): fp = fp else: raise TypeError(\"s must either be None", "Path): # Serializing Python `Path` objects to `str` # NOTE: Path(\"./aaa\") serializes to", "\"__table__\"): return {k: getattr(o, k) for k in o.__table__.columns.keys()} else: logging.getLogger(__name__).warning( \"Don't know", "pathlib import Path from typing import Iterator, List, Optional __all__ = ( \"load\",", "or an open text file\") # Disables pretty printing, when :meth:`dump_all` is called;", "isinstance(o, Enum): return o.name elif isinstance(o, Path): # Serializing Python `Path` objects to", "o): if isinstance(o, uuid.UUID): return str(o) elif isinstance(o, Enum): return o.name elif isinstance(o,", "Dict :return: [description] :rtype: Iterator \"\"\" if isinstance(s, str): fp = io.StringIO(s) elif", "if fp is None: fp = io.StringIO() elif hasattr(fp, \"write\"): fp = fp", "[extended_summary] :param objs: [description] :type objs: List :return: [description] :rtype: str \"\"\" if", "`str` # NOTE: Path(\"./aaa\") serializes to \"aaa\" return str(o) elif hasattr(o, \"__html__\"): return", "Serializing Python `Path` objects to `str` # NOTE: Path(\"./aaa\") serializes to \"aaa\" return", "*args, **kwargs) -> Iterator: \"\"\" Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance", "moduleauthor:: <NAME> <<EMAIL>> \"\"\" import io import json as _json import logging import", "or an open text file\") for d in fp.readline(): yield loads(d.strip(), *args, **kwargs)", "handle type %s\" % type(o) ) return _json.JSONEncoder.default(self, o) load = _json.load loads", "dumps = partial(_json.dumps, cls=_CustomJSONEncoder) def dump_all(objs: List, fp=None, *args, **kwargs) -> Optional[str]: \"\"\"", "None, \"separators\": None}) for d in objs: fp.write(dumps(d, *args, **kwargs) + \"\\n\") return", "\"load\", \"loads\", \"load_all\", \"dump\", \"dumps\", \"dump_all\", ) class _CustomJSONEncoder(_json.JSONEncoder): def default(self, o): if", "else: raise TypeError(\"s must either be a string or an open text file\")", "\"__json__\"): return o.__json__() elif hasattr(o, \"__table__\"): return {k: getattr(o, k) for k in", "support ndjson. kwargs.update({\"indent\": None, \"separators\": None}) for d in objs: fp.write(dumps(d, *args, **kwargs)", "logging import uuid from enum import Enum from functools import partial from pathlib", "def load_all(s, *args, **kwargs) -> Iterator: \"\"\" Deserialize ``s`` (a ``str``, ``bytes`` or", ".. moduleauthor:: <NAME> <<EMAIL>> \"\"\" import io import json as _json import logging", ":meth:`dump_all` is called; to support ndjson. kwargs.update({\"indent\": None, \"separators\": None}) for d in", "fp = io.StringIO() elif hasattr(fp, \"write\"): fp = fp else: raise TypeError(\"s must", "[extended_summary] :param obj: [description] :type obj: Dict :return: [description] :rtype: Iterator \"\"\" if", "objs: [description] :type objs: List :return: [description] :rtype: str \"\"\" if fp is", "Abstract :mod:`json` with Pegasus specific defaults. .. moduleauthor:: <NAME> <<EMAIL>> \"\"\" import io" ]
[ "rrs[0][-9:-1] rr=rrs[1] rr=rr.split(' ') one_r= { 'date':date, 'name':rr[0], 'value':'', 'unit':'', 'mark':'' } if", "value:16.93, unit:10^9/L, mark:'↑' } ''' chemical_splited.drop() for r in chemical_source.find(): results = []", "info= paients_info.find_one({'住院号':p['_id'].lower()}) if not info: logger.error('cannot find: '+p['_id']) continue # logger.info(info) date_in=info['入科日期'][0:10].replace('-','') date_out=info['出科日期'][0:10].replace('-','')", "or '白蛋白(干片法)' == c['name']: if date_bdb_in>=c['date'] and c['date']>=date_in: bdb_in=c['value'] date_bdb_in=c['date'] if date_bdb_out<=c['date'] and", "with open(f,encoding='utf8') as fp: for l in fp.readlines(): l=l.strip() if len(l)==6: id=l.upper() else:", "'):' in rr: rrs=rr.split(':') date = rrs[0][-9:-1] rr=rrs[1] rr=rr.split(' ') one_r= { 'date':date,", "in file_list: logger.info(f) id='' with open(f,encoding='utf8') as fp: for l in fp.readlines(): l=l.strip()", "identifier: logger.error('duplicateKeyError: '+id) id='' else: logger.error('wrong format: '+id) def chemical_examination_split(): ''' { date:20160202,", "logger.error('wrong format: '+id) def chemical_examination_split(): ''' { date:20160202, name:白细胞计数, value:16.93, unit:10^9/L, mark:'↑' }", "f.write(','.join([p['_id'],xhdb_in,xhdb_out,bdb_in,bdb_out])+'\\n') def main(): start = datetime.now() logger.info('hello..') dir_root = r\"C:\\pdata\\xxxxxxxxx\\huayan\" # chemical_examination_parse(dir_root) #", "results.append(one_r) chemical_splited.insert_one({'_id':r['_id'], 'data':results}) def find_xx(): with open('chemical_examination.csv',encoding='utf8',mode='a') as f: f.write('住院号,血红蛋白(入科),血红蛋白(出科),白蛋白(入科),白蛋白(出科)'+'\\n') for p in", "listdir(file_root) file_list=[path.join(file_root,f) for f in dir_list] for f in file_list: logger.info(f) id='' with", "= rr.strip() if rr: # print(rr) if '):' in rr: rrs=rr.split(':') date =", "if len(rr)>=4: one_r['unit']=' '.join(rr[3:]) results.append(one_r) chemical_splited.insert_one({'_id':r['_id'], 'data':results}) def find_xx(): with open('chemical_examination.csv',encoding='utf8',mode='a') as f:", "for p in chemical_splited.find(): info= paients_info.find_one({'住院号':p['_id']}) if not info: info= paients_info.find_one({'住院号':p['_id'].lower()}) if not", "logger.info(f) id='' with open(f,encoding='utf8') as fp: for l in fp.readlines(): l=l.strip() if len(l)==6:", "'+id) id='' else: logger.error('wrong format: '+id) def chemical_examination_split(): ''' { date:20160202, name:白细胞计数, value:16.93,", "rr=rr.split(' ') one_r= { 'date':date, 'name':rr[0], 'value':'', 'unit':'', 'mark':'' } if len(rr)>=2: one_r['value']=rr[1]", "format: '+id) def chemical_examination_split(): ''' { date:20160202, name:白细胞计数, value:16.93, unit:10^9/L, mark:'↑' } '''", "and id !='': try: chemical_source.insert_one({'_id':id,'data':l}) except DuplicateKeyError as identifier: logger.error('duplicateKeyError: '+id) id='' else:", "if len(rr)>=2: one_r['value']=rr[1] if len(rr)>=3: one_r['mark']=rr[2] if len(rr)>=4: one_r['unit']=' '.join(rr[3:]) results.append(one_r) chemical_splited.insert_one({'_id':r['_id'], 'data':results})", "datetime import datetime from logger import logger from os import listdir, path from", "except DuplicateKeyError as identifier: logger.error('duplicateKeyError: '+id) id='' else: logger.error('wrong format: '+id) def chemical_examination_split():", "as f: f.write('住院号,血红蛋白(入科),血红蛋白(出科),白蛋白(入科),白蛋白(出科)'+'\\n') for p in chemical_splited.find(): info= paients_info.find_one({'住院号':p['_id']}) if not info: info=", "unit:10^9/L, mark:'↑' } ''' chemical_splited.drop() for r in chemical_source.find(): results = [] logger.info(r['_id'])", "'date':date, 'name':rr[0], 'value':'', 'unit':'', 'mark':'' } if len(rr)>=2: one_r['value']=rr[1] if len(rr)>=3: one_r['mark']=rr[2] if", "if not info: info= paients_info.find_one({'住院号':p['_id'].lower()}) if not info: logger.error('cannot find: '+p['_id']) continue #", "not info: info= paients_info.find_one({'住院号':p['_id'].lower()}) if not info: logger.error('cannot find: '+p['_id']) continue # logger.info(info)", "bdb_out='' date_xhdb_in=date_out date_xhdb_out=date_in date_bdb_in=date_out date_bdb_out=date_in # f.write(p['_id']+'\\n') # [].sort() # p['data'].sort(key=lambda a: a['date'])", "date_xhdb_out=date_in date_bdb_in=date_out date_bdb_out=date_in # f.write(p['_id']+'\\n') # [].sort() # p['data'].sort(key=lambda a: a['date']) for c", "date_bdb_out=date_in # f.write(p['_id']+'\\n') # [].sort() # p['data'].sort(key=lambda a: a['date']) for c in p['data']:", "chemical_source.drop() dir_list = listdir(file_root) file_list=[path.join(file_root,f) for f in dir_list] for f in file_list:", "c['name']: if date_bdb_in>=c['date'] and c['date']>=date_in: bdb_in=c['value'] date_bdb_in=c['date'] if date_bdb_out<=c['date'] and c['date']<=date_out: bdb_out=c['value'] date_bdb_out=c['date']", "c['date']<=date_out: bdb_out=c['value'] date_bdb_out=c['date'] # if '血红蛋白'in c['name'] and '平均' not in c['name']: if", "'血红蛋白'in c['name'] and '平均' not in c['name']: if '血红蛋白'== c['name']: # print(c) if", "if '血红蛋白'in c['name'] and '平均' not in c['name']: if '血红蛋白'== c['name']: # print(c)", "xhdb_in=c['value'] date_xhdb_in=c['date'] if date_xhdb_out<=c['date'] and c['date']<=date_out: xhdb_out=c['value'] date_xhdb_out=c['date'] f.write(','.join([p['_id'],xhdb_in,xhdb_out,bdb_in,bdb_out])+'\\n') def main(): start =", "l in fp.readlines(): l=l.strip() if len(l)==6: id=l.upper() else: if len(l)>0 and id !='':", "{ date:20160202, name:白细胞计数, value:16.93, unit:10^9/L, mark:'↑' } ''' chemical_splited.drop() for r in chemical_source.find():", "one_r['unit']=' '.join(rr[3:]) results.append(one_r) chemical_splited.insert_one({'_id':r['_id'], 'data':results}) def find_xx(): with open('chemical_examination.csv',encoding='utf8',mode='a') as f: f.write('住院号,血红蛋白(入科),血红蛋白(出科),白蛋白(入科),白蛋白(出科)'+'\\n') for", "'.join(rr[3:]) results.append(one_r) chemical_splited.insert_one({'_id':r['_id'], 'data':results}) def find_xx(): with open('chemical_examination.csv',encoding='utf8',mode='a') as f: f.write('住院号,血红蛋白(入科),血红蛋白(出科),白蛋白(入科),白蛋白(出科)'+'\\n') for p", "in chemical_splited.find(): info= paients_info.find_one({'住院号':p['_id']}) if not info: info= paients_info.find_one({'住院号':p['_id'].lower()}) if not info: logger.error('cannot", "if date_bdb_in>=c['date'] and c['date']>=date_in: bdb_in=c['value'] date_bdb_in=c['date'] if date_bdb_out<=c['date'] and c['date']<=date_out: bdb_out=c['value'] date_bdb_out=c['date'] #", "and c['date']>=date_in: bdb_in=c['value'] date_bdb_in=c['date'] if date_bdb_out<=c['date'] and c['date']<=date_out: bdb_out=c['value'] date_bdb_out=c['date'] # if '血红蛋白'in", "r in chemical_source.find(): results = [] logger.info(r['_id']) for rr in r['data'].split('|'): rr =", "rr=rrs[1] rr=rr.split(' ') one_r= { 'date':date, 'name':rr[0], 'value':'', 'unit':'', 'mark':'' } if len(rr)>=2:", "'unit':'', 'mark':'' } if len(rr)>=2: one_r['value']=rr[1] if len(rr)>=3: one_r['mark']=rr[2] if len(rr)>=4: one_r['unit']=' '.join(rr[3:])", "date_xhdb_in=c['date'] if date_xhdb_out<=c['date'] and c['date']<=date_out: xhdb_out=c['value'] date_xhdb_out=c['date'] f.write(','.join([p['_id'],xhdb_in,xhdb_out,bdb_in,bdb_out])+'\\n') def main(): start = datetime.now()", "chemical_splited.find(): info= paients_info.find_one({'住院号':p['_id']}) if not info: info= paients_info.find_one({'住院号':p['_id'].lower()}) if not info: logger.error('cannot find:", "if len(l)>0 and id !='': try: chemical_source.insert_one({'_id':id,'data':l}) except DuplicateKeyError as identifier: logger.error('duplicateKeyError: '+id)", "bdb_in=c['value'] date_bdb_in=c['date'] if date_bdb_out<=c['date'] and c['date']<=date_out: bdb_out=c['value'] date_bdb_out=c['date'] # if '血红蛋白'in c['name'] and", "in c['name']: if '血红蛋白'== c['name']: # print(c) if date_xhdb_in>=c['date'] and c['date']>=date_in: xhdb_in=c['value'] date_xhdb_in=c['date']", "logger from os import listdir, path from db import chemical_source, DuplicateKeyError,chemical_splited,paients_info def chemical_examination_parse(file_root):", "logger.error('duplicateKeyError: '+id) id='' else: logger.error('wrong format: '+id) def chemical_examination_split(): ''' { date:20160202, name:白细胞计数,", "date = rrs[0][-9:-1] rr=rrs[1] rr=rr.split(' ') one_r= { 'date':date, 'name':rr[0], 'value':'', 'unit':'', 'mark':''", "if '血红蛋白'== c['name']: # print(c) if date_xhdb_in>=c['date'] and c['date']>=date_in: xhdb_in=c['value'] date_xhdb_in=c['date'] if date_xhdb_out<=c['date']", "id=l.upper() else: if len(l)>0 and id !='': try: chemical_source.insert_one({'_id':id,'data':l}) except DuplicateKeyError as identifier:", "logger.info(r['_id']) for rr in r['data'].split('|'): rr = rr.strip() if rr: # print(rr) if", "chemical_source.insert_one({'_id':id,'data':l}) except DuplicateKeyError as identifier: logger.error('duplicateKeyError: '+id) id='' else: logger.error('wrong format: '+id) def", "try: chemical_source.insert_one({'_id':id,'data':l}) except DuplicateKeyError as identifier: logger.error('duplicateKeyError: '+id) id='' else: logger.error('wrong format: '+id)", "date:20160202, name:白细胞计数, value:16.93, unit:10^9/L, mark:'↑' } ''' chemical_splited.drop() for r in chemical_source.find(): results", "from os import listdir, path from db import chemical_source, DuplicateKeyError,chemical_splited,paients_info def chemical_examination_parse(file_root): chemical_source.drop()", "c['date']>=date_in: bdb_in=c['value'] date_bdb_in=c['date'] if date_bdb_out<=c['date'] and c['date']<=date_out: bdb_out=c['value'] date_bdb_out=c['date'] # if '血红蛋白'in c['name']", "c['date']<=date_out: xhdb_out=c['value'] date_xhdb_out=c['date'] f.write(','.join([p['_id'],xhdb_in,xhdb_out,bdb_in,bdb_out])+'\\n') def main(): start = datetime.now() logger.info('hello..') dir_root = r\"C:\\pdata\\xxxxxxxxx\\huayan\"", "# chemical_examination_parse(dir_root) # chemical_examination_split() find_xx() logger.info('done: '+str(datetime.now() - start)) if __name__ == '__main__':", "logger.error('cannot find: '+p['_id']) continue # logger.info(info) date_in=info['入科日期'][0:10].replace('-','') date_out=info['出科日期'][0:10].replace('-','') xhdb_in='' xhdb_out='' bdb_in='' bdb_out='' date_xhdb_in=date_out", "in dir_list] for f in file_list: logger.info(f) id='' with open(f,encoding='utf8') as fp: for", "date_bdb_out<=c['date'] and c['date']<=date_out: bdb_out=c['value'] date_bdb_out=c['date'] # if '血红蛋白'in c['name'] and '平均' not in", "len(l)>0 and id !='': try: chemical_source.insert_one({'_id':id,'data':l}) except DuplicateKeyError as identifier: logger.error('duplicateKeyError: '+id) id=''", "import chemical_source, DuplicateKeyError,chemical_splited,paients_info def chemical_examination_parse(file_root): chemical_source.drop() dir_list = listdir(file_root) file_list=[path.join(file_root,f) for f in", "date_bdb_in=date_out date_bdb_out=date_in # f.write(p['_id']+'\\n') # [].sort() # p['data'].sort(key=lambda a: a['date']) for c in", "f.write(p['_id']+'\\n') # [].sort() # p['data'].sort(key=lambda a: a['date']) for c in p['data']: if '白蛋白'", "'白蛋白' == c['name'] or '白蛋白(干片法)' == c['name']: if date_bdb_in>=c['date'] and c['date']>=date_in: bdb_in=c['value'] date_bdb_in=c['date']", "rr in r['data'].split('|'): rr = rr.strip() if rr: # print(rr) if '):' in", "'平均' not in c['name']: if '血红蛋白'== c['name']: # print(c) if date_xhdb_in>=c['date'] and c['date']>=date_in:", "# logger.info(info) date_in=info['入科日期'][0:10].replace('-','') date_out=info['出科日期'][0:10].replace('-','') xhdb_in='' xhdb_out='' bdb_in='' bdb_out='' date_xhdb_in=date_out date_xhdb_out=date_in date_bdb_in=date_out date_bdb_out=date_in #", "path from db import chemical_source, DuplicateKeyError,chemical_splited,paients_info def chemical_examination_parse(file_root): chemical_source.drop() dir_list = listdir(file_root) file_list=[path.join(file_root,f)", "paients_info.find_one({'住院号':p['_id'].lower()}) if not info: logger.error('cannot find: '+p['_id']) continue # logger.info(info) date_in=info['入科日期'][0:10].replace('-','') date_out=info['出科日期'][0:10].replace('-','') xhdb_in=''", "[].sort() # p['data'].sort(key=lambda a: a['date']) for c in p['data']: if '白蛋白' == c['name']", "info= paients_info.find_one({'住院号':p['_id']}) if not info: info= paients_info.find_one({'住院号':p['_id'].lower()}) if not info: logger.error('cannot find: '+p['_id'])", "'白蛋白(干片法)' == c['name']: if date_bdb_in>=c['date'] and c['date']>=date_in: bdb_in=c['value'] date_bdb_in=c['date'] if date_bdb_out<=c['date'] and c['date']<=date_out:", "logger.info(info) date_in=info['入科日期'][0:10].replace('-','') date_out=info['出科日期'][0:10].replace('-','') xhdb_in='' xhdb_out='' bdb_in='' bdb_out='' date_xhdb_in=date_out date_xhdb_out=date_in date_bdb_in=date_out date_bdb_out=date_in # f.write(p['_id']+'\\n')", "and c['date']<=date_out: xhdb_out=c['value'] date_xhdb_out=c['date'] f.write(','.join([p['_id'],xhdb_in,xhdb_out,bdb_in,bdb_out])+'\\n') def main(): start = datetime.now() logger.info('hello..') dir_root =", "import listdir, path from db import chemical_source, DuplicateKeyError,chemical_splited,paients_info def chemical_examination_parse(file_root): chemical_source.drop() dir_list =", "'mark':'' } if len(rr)>=2: one_r['value']=rr[1] if len(rr)>=3: one_r['mark']=rr[2] if len(rr)>=4: one_r['unit']=' '.join(rr[3:]) results.append(one_r)", "c['name'] or '白蛋白(干片法)' == c['name']: if date_bdb_in>=c['date'] and c['date']>=date_in: bdb_in=c['value'] date_bdb_in=c['date'] if date_bdb_out<=c['date']", "if date_xhdb_out<=c['date'] and c['date']<=date_out: xhdb_out=c['value'] date_xhdb_out=c['date'] f.write(','.join([p['_id'],xhdb_in,xhdb_out,bdb_in,bdb_out])+'\\n') def main(): start = datetime.now() logger.info('hello..')", "rr: # print(rr) if '):' in rr: rrs=rr.split(':') date = rrs[0][-9:-1] rr=rrs[1] rr=rr.split('", "print(rr) if '):' in rr: rrs=rr.split(':') date = rrs[0][-9:-1] rr=rrs[1] rr=rr.split(' ') one_r=", "''' chemical_splited.drop() for r in chemical_source.find(): results = [] logger.info(r['_id']) for rr in", "open(f,encoding='utf8') as fp: for l in fp.readlines(): l=l.strip() if len(l)==6: id=l.upper() else: if", "find: '+p['_id']) continue # logger.info(info) date_in=info['入科日期'][0:10].replace('-','') date_out=info['出科日期'][0:10].replace('-','') xhdb_in='' xhdb_out='' bdb_in='' bdb_out='' date_xhdb_in=date_out date_xhdb_out=date_in", "one_r= { 'date':date, 'name':rr[0], 'value':'', 'unit':'', 'mark':'' } if len(rr)>=2: one_r['value']=rr[1] if len(rr)>=3:", "!='': try: chemical_source.insert_one({'_id':id,'data':l}) except DuplicateKeyError as identifier: logger.error('duplicateKeyError: '+id) id='' else: logger.error('wrong format:", "= r\"C:\\pdata\\xxxxxxxxx\\huayan\" # chemical_examination_parse(dir_root) # chemical_examination_split() find_xx() logger.info('done: '+str(datetime.now() - start)) if __name__", "date_in=info['入科日期'][0:10].replace('-','') date_out=info['出科日期'][0:10].replace('-','') xhdb_in='' xhdb_out='' bdb_in='' bdb_out='' date_xhdb_in=date_out date_xhdb_out=date_in date_bdb_in=date_out date_bdb_out=date_in # f.write(p['_id']+'\\n') #", "from db import chemical_source, DuplicateKeyError,chemical_splited,paients_info def chemical_examination_parse(file_root): chemical_source.drop() dir_list = listdir(file_root) file_list=[path.join(file_root,f) for", "in rr: rrs=rr.split(':') date = rrs[0][-9:-1] rr=rrs[1] rr=rr.split(' ') one_r= { 'date':date, 'name':rr[0],", "import logger from os import listdir, path from db import chemical_source, DuplicateKeyError,chemical_splited,paients_info def", "datetime from logger import logger from os import listdir, path from db import", "dir_list] for f in file_list: logger.info(f) id='' with open(f,encoding='utf8') as fp: for l", "def chemical_examination_parse(file_root): chemical_source.drop() dir_list = listdir(file_root) file_list=[path.join(file_root,f) for f in dir_list] for f", "len(rr)>=4: one_r['unit']=' '.join(rr[3:]) results.append(one_r) chemical_splited.insert_one({'_id':r['_id'], 'data':results}) def find_xx(): with open('chemical_examination.csv',encoding='utf8',mode='a') as f: f.write('住院号,血红蛋白(入科),血红蛋白(出科),白蛋白(入科),白蛋白(出科)'+'\\n')", "not info: logger.error('cannot find: '+p['_id']) continue # logger.info(info) date_in=info['入科日期'][0:10].replace('-','') date_out=info['出科日期'][0:10].replace('-','') xhdb_in='' xhdb_out='' bdb_in=''", "} if len(rr)>=2: one_r['value']=rr[1] if len(rr)>=3: one_r['mark']=rr[2] if len(rr)>=4: one_r['unit']=' '.join(rr[3:]) results.append(one_r) chemical_splited.insert_one({'_id':r['_id'],", "def find_xx(): with open('chemical_examination.csv',encoding='utf8',mode='a') as f: f.write('住院号,血红蛋白(入科),血红蛋白(出科),白蛋白(入科),白蛋白(出科)'+'\\n') for p in chemical_splited.find(): info= paients_info.find_one({'住院号':p['_id']})", "f: f.write('住院号,血红蛋白(入科),血红蛋白(出科),白蛋白(入科),白蛋白(出科)'+'\\n') for p in chemical_splited.find(): info= paients_info.find_one({'住院号':p['_id']}) if not info: info= paients_info.find_one({'住院号':p['_id'].lower()})", "else: if len(l)>0 and id !='': try: chemical_source.insert_one({'_id':id,'data':l}) except DuplicateKeyError as identifier: logger.error('duplicateKeyError:", "date_out=info['出科日期'][0:10].replace('-','') xhdb_in='' xhdb_out='' bdb_in='' bdb_out='' date_xhdb_in=date_out date_xhdb_out=date_in date_bdb_in=date_out date_bdb_out=date_in # f.write(p['_id']+'\\n') # [].sort()", "id='' else: logger.error('wrong format: '+id) def chemical_examination_split(): ''' { date:20160202, name:白细胞计数, value:16.93, unit:10^9/L,", "xhdb_out='' bdb_in='' bdb_out='' date_xhdb_in=date_out date_xhdb_out=date_in date_bdb_in=date_out date_bdb_out=date_in # f.write(p['_id']+'\\n') # [].sort() # p['data'].sort(key=lambda", "# [].sort() # p['data'].sort(key=lambda a: a['date']) for c in p['data']: if '白蛋白' ==", "info: info= paients_info.find_one({'住院号':p['_id'].lower()}) if not info: logger.error('cannot find: '+p['_id']) continue # logger.info(info) date_in=info['入科日期'][0:10].replace('-','')", "r\"C:\\pdata\\xxxxxxxxx\\huayan\" # chemical_examination_parse(dir_root) # chemical_examination_split() find_xx() logger.info('done: '+str(datetime.now() - start)) if __name__ ==", "os import listdir, path from db import chemical_source, DuplicateKeyError,chemical_splited,paients_info def chemical_examination_parse(file_root): chemical_source.drop() dir_list", "import datetime from logger import logger from os import listdir, path from db", "c['date']>=date_in: xhdb_in=c['value'] date_xhdb_in=c['date'] if date_xhdb_out<=c['date'] and c['date']<=date_out: xhdb_out=c['value'] date_xhdb_out=c['date'] f.write(','.join([p['_id'],xhdb_in,xhdb_out,bdb_in,bdb_out])+'\\n') def main(): start", "dir_list = listdir(file_root) file_list=[path.join(file_root,f) for f in dir_list] for f in file_list: logger.info(f)", "paients_info.find_one({'住院号':p['_id']}) if not info: info= paients_info.find_one({'住院号':p['_id'].lower()}) if not info: logger.error('cannot find: '+p['_id']) continue", "logger.info('hello..') dir_root = r\"C:\\pdata\\xxxxxxxxx\\huayan\" # chemical_examination_parse(dir_root) # chemical_examination_split() find_xx() logger.info('done: '+str(datetime.now() - start))", "find_xx(): with open('chemical_examination.csv',encoding='utf8',mode='a') as f: f.write('住院号,血红蛋白(入科),血红蛋白(出科),白蛋白(入科),白蛋白(出科)'+'\\n') for p in chemical_splited.find(): info= paients_info.find_one({'住院号':p['_id']}) if", "chemical_examination_parse(dir_root) # chemical_examination_split() find_xx() logger.info('done: '+str(datetime.now() - start)) if __name__ == '__main__': main()", "# print(rr) if '):' in rr: rrs=rr.split(':') date = rrs[0][-9:-1] rr=rrs[1] rr=rr.split(' ')", "c in p['data']: if '白蛋白' == c['name'] or '白蛋白(干片法)' == c['name']: if date_bdb_in>=c['date']", "bdb_in='' bdb_out='' date_xhdb_in=date_out date_xhdb_out=date_in date_bdb_in=date_out date_bdb_out=date_in # f.write(p['_id']+'\\n') # [].sort() # p['data'].sort(key=lambda a:", "rr.strip() if rr: # print(rr) if '):' in rr: rrs=rr.split(':') date = rrs[0][-9:-1]", "f in dir_list] for f in file_list: logger.info(f) id='' with open(f,encoding='utf8') as fp:", "if date_bdb_out<=c['date'] and c['date']<=date_out: bdb_out=c['value'] date_bdb_out=c['date'] # if '血红蛋白'in c['name'] and '平均' not", "for l in fp.readlines(): l=l.strip() if len(l)==6: id=l.upper() else: if len(l)>0 and id", "logger import logger from os import listdir, path from db import chemical_source, DuplicateKeyError,chemical_splited,paients_info", "for r in chemical_source.find(): results = [] logger.info(r['_id']) for rr in r['data'].split('|'): rr", "print(c) if date_xhdb_in>=c['date'] and c['date']>=date_in: xhdb_in=c['value'] date_xhdb_in=c['date'] if date_xhdb_out<=c['date'] and c['date']<=date_out: xhdb_out=c['value'] date_xhdb_out=c['date']", "xhdb_out=c['value'] date_xhdb_out=c['date'] f.write(','.join([p['_id'],xhdb_in,xhdb_out,bdb_in,bdb_out])+'\\n') def main(): start = datetime.now() logger.info('hello..') dir_root = r\"C:\\pdata\\xxxxxxxxx\\huayan\" #", "datetime.now() logger.info('hello..') dir_root = r\"C:\\pdata\\xxxxxxxxx\\huayan\" # chemical_examination_parse(dir_root) # chemical_examination_split() find_xx() logger.info('done: '+str(datetime.now() -", "id !='': try: chemical_source.insert_one({'_id':id,'data':l}) except DuplicateKeyError as identifier: logger.error('duplicateKeyError: '+id) id='' else: logger.error('wrong", "id='' with open(f,encoding='utf8') as fp: for l in fp.readlines(): l=l.strip() if len(l)==6: id=l.upper()", "# print(c) if date_xhdb_in>=c['date'] and c['date']>=date_in: xhdb_in=c['value'] date_xhdb_in=c['date'] if date_xhdb_out<=c['date'] and c['date']<=date_out: xhdb_out=c['value']", "date_xhdb_out=c['date'] f.write(','.join([p['_id'],xhdb_in,xhdb_out,bdb_in,bdb_out])+'\\n') def main(): start = datetime.now() logger.info('hello..') dir_root = r\"C:\\pdata\\xxxxxxxxx\\huayan\" # chemical_examination_parse(dir_root)", "a['date']) for c in p['data']: if '白蛋白' == c['name'] or '白蛋白(干片法)' == c['name']:", "as fp: for l in fp.readlines(): l=l.strip() if len(l)==6: id=l.upper() else: if len(l)>0", "== c['name'] or '白蛋白(干片法)' == c['name']: if date_bdb_in>=c['date'] and c['date']>=date_in: bdb_in=c['value'] date_bdb_in=c['date'] if", "chemical_source, DuplicateKeyError,chemical_splited,paients_info def chemical_examination_parse(file_root): chemical_source.drop() dir_list = listdir(file_root) file_list=[path.join(file_root,f) for f in dir_list]", "one_r['mark']=rr[2] if len(rr)>=4: one_r['unit']=' '.join(rr[3:]) results.append(one_r) chemical_splited.insert_one({'_id':r['_id'], 'data':results}) def find_xx(): with open('chemical_examination.csv',encoding='utf8',mode='a') as", "if rr: # print(rr) if '):' in rr: rrs=rr.split(':') date = rrs[0][-9:-1] rr=rrs[1]", "xhdb_in='' xhdb_out='' bdb_in='' bdb_out='' date_xhdb_in=date_out date_xhdb_out=date_in date_bdb_in=date_out date_bdb_out=date_in # f.write(p['_id']+'\\n') # [].sort() #", "date_xhdb_in=date_out date_xhdb_out=date_in date_bdb_in=date_out date_bdb_out=date_in # f.write(p['_id']+'\\n') # [].sort() # p['data'].sort(key=lambda a: a['date']) for", "p['data']: if '白蛋白' == c['name'] or '白蛋白(干片法)' == c['name']: if date_bdb_in>=c['date'] and c['date']>=date_in:", "f in file_list: logger.info(f) id='' with open(f,encoding='utf8') as fp: for l in fp.readlines():", "date_bdb_in>=c['date'] and c['date']>=date_in: bdb_in=c['value'] date_bdb_in=c['date'] if date_bdb_out<=c['date'] and c['date']<=date_out: bdb_out=c['value'] date_bdb_out=c['date'] # if", "date_xhdb_out<=c['date'] and c['date']<=date_out: xhdb_out=c['value'] date_xhdb_out=c['date'] f.write(','.join([p['_id'],xhdb_in,xhdb_out,bdb_in,bdb_out])+'\\n') def main(): start = datetime.now() logger.info('hello..') dir_root", "as identifier: logger.error('duplicateKeyError: '+id) id='' else: logger.error('wrong format: '+id) def chemical_examination_split(): ''' {", "if len(l)==6: id=l.upper() else: if len(l)>0 and id !='': try: chemical_source.insert_one({'_id':id,'data':l}) except DuplicateKeyError", "if '白蛋白' == c['name'] or '白蛋白(干片法)' == c['name']: if date_bdb_in>=c['date'] and c['date']>=date_in: bdb_in=c['value']", "and '平均' not in c['name']: if '血红蛋白'== c['name']: # print(c) if date_xhdb_in>=c['date'] and", "c['name']: # print(c) if date_xhdb_in>=c['date'] and c['date']>=date_in: xhdb_in=c['value'] date_xhdb_in=c['date'] if date_xhdb_out<=c['date'] and c['date']<=date_out:", "= listdir(file_root) file_list=[path.join(file_root,f) for f in dir_list] for f in file_list: logger.info(f) id=''", "in r['data'].split('|'): rr = rr.strip() if rr: # print(rr) if '):' in rr:", "'+id) def chemical_examination_split(): ''' { date:20160202, name:白细胞计数, value:16.93, unit:10^9/L, mark:'↑' } ''' chemical_splited.drop()", "chemical_splited.insert_one({'_id':r['_id'], 'data':results}) def find_xx(): with open('chemical_examination.csv',encoding='utf8',mode='a') as f: f.write('住院号,血红蛋白(入科),血红蛋白(出科),白蛋白(入科),白蛋白(出科)'+'\\n') for p in chemical_splited.find():", "bdb_out=c['value'] date_bdb_out=c['date'] # if '血红蛋白'in c['name'] and '平均' not in c['name']: if '血红蛋白'==", "from logger import logger from os import listdir, path from db import chemical_source,", "and c['date']<=date_out: bdb_out=c['value'] date_bdb_out=c['date'] # if '血红蛋白'in c['name'] and '平均' not in c['name']:", "with open('chemical_examination.csv',encoding='utf8',mode='a') as f: f.write('住院号,血红蛋白(入科),血红蛋白(出科),白蛋白(入科),白蛋白(出科)'+'\\n') for p in chemical_splited.find(): info= paients_info.find_one({'住院号':p['_id']}) if not", "# if '血红蛋白'in c['name'] and '平均' not in c['name']: if '血红蛋白'== c['name']: #", "''' { date:20160202, name:白细胞计数, value:16.93, unit:10^9/L, mark:'↑' } ''' chemical_splited.drop() for r in", "in p['data']: if '白蛋白' == c['name'] or '白蛋白(干片法)' == c['name']: if date_bdb_in>=c['date'] and", "else: logger.error('wrong format: '+id) def chemical_examination_split(): ''' { date:20160202, name:白细胞计数, value:16.93, unit:10^9/L, mark:'↑'", "# p['data'].sort(key=lambda a: a['date']) for c in p['data']: if '白蛋白' == c['name'] or", "for c in p['data']: if '白蛋白' == c['name'] or '白蛋白(干片法)' == c['name']: if", "'血红蛋白'== c['name']: # print(c) if date_xhdb_in>=c['date'] and c['date']>=date_in: xhdb_in=c['value'] date_xhdb_in=c['date'] if date_xhdb_out<=c['date'] and", "if len(rr)>=3: one_r['mark']=rr[2] if len(rr)>=4: one_r['unit']=' '.join(rr[3:]) results.append(one_r) chemical_splited.insert_one({'_id':r['_id'], 'data':results}) def find_xx(): with", "file_list: logger.info(f) id='' with open(f,encoding='utf8') as fp: for l in fp.readlines(): l=l.strip() if", "info: logger.error('cannot find: '+p['_id']) continue # logger.info(info) date_in=info['入科日期'][0:10].replace('-','') date_out=info['出科日期'][0:10].replace('-','') xhdb_in='' xhdb_out='' bdb_in='' bdb_out=''", "one_r['value']=rr[1] if len(rr)>=3: one_r['mark']=rr[2] if len(rr)>=4: one_r['unit']=' '.join(rr[3:]) results.append(one_r) chemical_splited.insert_one({'_id':r['_id'], 'data':results}) def find_xx():", "results = [] logger.info(r['_id']) for rr in r['data'].split('|'): rr = rr.strip() if rr:", "# f.write(p['_id']+'\\n') # [].sort() # p['data'].sort(key=lambda a: a['date']) for c in p['data']: if", "for f in dir_list] for f in file_list: logger.info(f) id='' with open(f,encoding='utf8') as", "} ''' chemical_splited.drop() for r in chemical_source.find(): results = [] logger.info(r['_id']) for rr", "listdir, path from db import chemical_source, DuplicateKeyError,chemical_splited,paients_info def chemical_examination_parse(file_root): chemical_source.drop() dir_list = listdir(file_root)", "p['data'].sort(key=lambda a: a['date']) for c in p['data']: if '白蛋白' == c['name'] or '白蛋白(干片法)'", "date_bdb_in=c['date'] if date_bdb_out<=c['date'] and c['date']<=date_out: bdb_out=c['value'] date_bdb_out=c['date'] # if '血红蛋白'in c['name'] and '平均'", "continue # logger.info(info) date_in=info['入科日期'][0:10].replace('-','') date_out=info['出科日期'][0:10].replace('-','') xhdb_in='' xhdb_out='' bdb_in='' bdb_out='' date_xhdb_in=date_out date_xhdb_out=date_in date_bdb_in=date_out date_bdb_out=date_in", "<reponame>goowell/DrAdvice from datetime import datetime from logger import logger from os import listdir,", "for rr in r['data'].split('|'): rr = rr.strip() if rr: # print(rr) if '):'", "f.write('住院号,血红蛋白(入科),血红蛋白(出科),白蛋白(入科),白蛋白(出科)'+'\\n') for p in chemical_splited.find(): info= paients_info.find_one({'住院号':p['_id']}) if not info: info= paients_info.find_one({'住院号':p['_id'].lower()}) if", "if date_xhdb_in>=c['date'] and c['date']>=date_in: xhdb_in=c['value'] date_xhdb_in=c['date'] if date_xhdb_out<=c['date'] and c['date']<=date_out: xhdb_out=c['value'] date_xhdb_out=c['date'] f.write(','.join([p['_id'],xhdb_in,xhdb_out,bdb_in,bdb_out])+'\\n')", "chemical_source.find(): results = [] logger.info(r['_id']) for rr in r['data'].split('|'): rr = rr.strip() if", "len(rr)>=2: one_r['value']=rr[1] if len(rr)>=3: one_r['mark']=rr[2] if len(rr)>=4: one_r['unit']=' '.join(rr[3:]) results.append(one_r) chemical_splited.insert_one({'_id':r['_id'], 'data':results}) def", "for f in file_list: logger.info(f) id='' with open(f,encoding='utf8') as fp: for l in", "rr: rrs=rr.split(':') date = rrs[0][-9:-1] rr=rrs[1] rr=rr.split(' ') one_r= { 'date':date, 'name':rr[0], 'value':'',", "date_xhdb_in>=c['date'] and c['date']>=date_in: xhdb_in=c['value'] date_xhdb_in=c['date'] if date_xhdb_out<=c['date'] and c['date']<=date_out: xhdb_out=c['value'] date_xhdb_out=c['date'] f.write(','.join([p['_id'],xhdb_in,xhdb_out,bdb_in,bdb_out])+'\\n') def", "'name':rr[0], 'value':'', 'unit':'', 'mark':'' } if len(rr)>=2: one_r['value']=rr[1] if len(rr)>=3: one_r['mark']=rr[2] if len(rr)>=4:", "db import chemical_source, DuplicateKeyError,chemical_splited,paients_info def chemical_examination_parse(file_root): chemical_source.drop() dir_list = listdir(file_root) file_list=[path.join(file_root,f) for f", "r['data'].split('|'): rr = rr.strip() if rr: # print(rr) if '):' in rr: rrs=rr.split(':')", "DuplicateKeyError as identifier: logger.error('duplicateKeyError: '+id) id='' else: logger.error('wrong format: '+id) def chemical_examination_split(): '''", "date_bdb_out=c['date'] # if '血红蛋白'in c['name'] and '平均' not in c['name']: if '血红蛋白'== c['name']:", "chemical_splited.drop() for r in chemical_source.find(): results = [] logger.info(r['_id']) for rr in r['data'].split('|'):", "open('chemical_examination.csv',encoding='utf8',mode='a') as f: f.write('住院号,血红蛋白(入科),血红蛋白(出科),白蛋白(入科),白蛋白(出科)'+'\\n') for p in chemical_splited.find(): info= paients_info.find_one({'住院号':p['_id']}) if not info:", "not in c['name']: if '血红蛋白'== c['name']: # print(c) if date_xhdb_in>=c['date'] and c['date']>=date_in: xhdb_in=c['value']", "name:白细胞计数, value:16.93, unit:10^9/L, mark:'↑' } ''' chemical_splited.drop() for r in chemical_source.find(): results =", "len(l)==6: id=l.upper() else: if len(l)>0 and id !='': try: chemical_source.insert_one({'_id':id,'data':l}) except DuplicateKeyError as", "[] logger.info(r['_id']) for rr in r['data'].split('|'): rr = rr.strip() if rr: # print(rr)", "c['name'] and '平均' not in c['name']: if '血红蛋白'== c['name']: # print(c) if date_xhdb_in>=c['date']", "rrs=rr.split(':') date = rrs[0][-9:-1] rr=rrs[1] rr=rr.split(' ') one_r= { 'date':date, 'name':rr[0], 'value':'', 'unit':'',", "if '):' in rr: rrs=rr.split(':') date = rrs[0][-9:-1] rr=rrs[1] rr=rr.split(' ') one_r= {", "') one_r= { 'date':date, 'name':rr[0], 'value':'', 'unit':'', 'mark':'' } if len(rr)>=2: one_r['value']=rr[1] if", "== c['name']: if date_bdb_in>=c['date'] and c['date']>=date_in: bdb_in=c['value'] date_bdb_in=c['date'] if date_bdb_out<=c['date'] and c['date']<=date_out: bdb_out=c['value']", "dir_root = r\"C:\\pdata\\xxxxxxxxx\\huayan\" # chemical_examination_parse(dir_root) # chemical_examination_split() find_xx() logger.info('done: '+str(datetime.now() - start)) if", "len(rr)>=3: one_r['mark']=rr[2] if len(rr)>=4: one_r['unit']=' '.join(rr[3:]) results.append(one_r) chemical_splited.insert_one({'_id':r['_id'], 'data':results}) def find_xx(): with open('chemical_examination.csv',encoding='utf8',mode='a')", "def chemical_examination_split(): ''' { date:20160202, name:白细胞计数, value:16.93, unit:10^9/L, mark:'↑' } ''' chemical_splited.drop() for", "'value':'', 'unit':'', 'mark':'' } if len(rr)>=2: one_r['value']=rr[1] if len(rr)>=3: one_r['mark']=rr[2] if len(rr)>=4: one_r['unit']='", "fp.readlines(): l=l.strip() if len(l)==6: id=l.upper() else: if len(l)>0 and id !='': try: chemical_source.insert_one({'_id':id,'data':l})", "c['name']: if '血红蛋白'== c['name']: # print(c) if date_xhdb_in>=c['date'] and c['date']>=date_in: xhdb_in=c['value'] date_xhdb_in=c['date'] if", "in chemical_source.find(): results = [] logger.info(r['_id']) for rr in r['data'].split('|'): rr = rr.strip()", "start = datetime.now() logger.info('hello..') dir_root = r\"C:\\pdata\\xxxxxxxxx\\huayan\" # chemical_examination_parse(dir_root) # chemical_examination_split() find_xx() logger.info('done:", "def main(): start = datetime.now() logger.info('hello..') dir_root = r\"C:\\pdata\\xxxxxxxxx\\huayan\" # chemical_examination_parse(dir_root) # chemical_examination_split()", "chemical_examination_split(): ''' { date:20160202, name:白细胞计数, value:16.93, unit:10^9/L, mark:'↑' } ''' chemical_splited.drop() for r", "{ 'date':date, 'name':rr[0], 'value':'', 'unit':'', 'mark':'' } if len(rr)>=2: one_r['value']=rr[1] if len(rr)>=3: one_r['mark']=rr[2]", "'data':results}) def find_xx(): with open('chemical_examination.csv',encoding='utf8',mode='a') as f: f.write('住院号,血红蛋白(入科),血红蛋白(出科),白蛋白(入科),白蛋白(出科)'+'\\n') for p in chemical_splited.find(): info=", "= datetime.now() logger.info('hello..') dir_root = r\"C:\\pdata\\xxxxxxxxx\\huayan\" # chemical_examination_parse(dir_root) # chemical_examination_split() find_xx() logger.info('done: '+str(datetime.now()", "l=l.strip() if len(l)==6: id=l.upper() else: if len(l)>0 and id !='': try: chemical_source.insert_one({'_id':id,'data':l}) except", "= rrs[0][-9:-1] rr=rrs[1] rr=rr.split(' ') one_r= { 'date':date, 'name':rr[0], 'value':'', 'unit':'', 'mark':'' }", "= [] logger.info(r['_id']) for rr in r['data'].split('|'): rr = rr.strip() if rr: #", "mark:'↑' } ''' chemical_splited.drop() for r in chemical_source.find(): results = [] logger.info(r['_id']) for", "if not info: logger.error('cannot find: '+p['_id']) continue # logger.info(info) date_in=info['入科日期'][0:10].replace('-','') date_out=info['出科日期'][0:10].replace('-','') xhdb_in='' xhdb_out=''", "DuplicateKeyError,chemical_splited,paients_info def chemical_examination_parse(file_root): chemical_source.drop() dir_list = listdir(file_root) file_list=[path.join(file_root,f) for f in dir_list] for", "'+p['_id']) continue # logger.info(info) date_in=info['入科日期'][0:10].replace('-','') date_out=info['出科日期'][0:10].replace('-','') xhdb_in='' xhdb_out='' bdb_in='' bdb_out='' date_xhdb_in=date_out date_xhdb_out=date_in date_bdb_in=date_out", "from datetime import datetime from logger import logger from os import listdir, path", "file_list=[path.join(file_root,f) for f in dir_list] for f in file_list: logger.info(f) id='' with open(f,encoding='utf8')", "in fp.readlines(): l=l.strip() if len(l)==6: id=l.upper() else: if len(l)>0 and id !='': try:", "rr = rr.strip() if rr: # print(rr) if '):' in rr: rrs=rr.split(':') date", "p in chemical_splited.find(): info= paients_info.find_one({'住院号':p['_id']}) if not info: info= paients_info.find_one({'住院号':p['_id'].lower()}) if not info:", "a: a['date']) for c in p['data']: if '白蛋白' == c['name'] or '白蛋白(干片法)' ==", "and c['date']>=date_in: xhdb_in=c['value'] date_xhdb_in=c['date'] if date_xhdb_out<=c['date'] and c['date']<=date_out: xhdb_out=c['value'] date_xhdb_out=c['date'] f.write(','.join([p['_id'],xhdb_in,xhdb_out,bdb_in,bdb_out])+'\\n') def main():", "fp: for l in fp.readlines(): l=l.strip() if len(l)==6: id=l.upper() else: if len(l)>0 and", "main(): start = datetime.now() logger.info('hello..') dir_root = r\"C:\\pdata\\xxxxxxxxx\\huayan\" # chemical_examination_parse(dir_root) # chemical_examination_split() find_xx()", "chemical_examination_parse(file_root): chemical_source.drop() dir_list = listdir(file_root) file_list=[path.join(file_root,f) for f in dir_list] for f in" ]
[ "(\"pszName\", c_char_p ), (\"pszType\", c_char_p ), (\"pszInfo\", c_char_p ), (\"pszHomeDir\", c_char_p ), (\"pszFSDir\",", "Stops all threads from a specified process. ''' self.SNPS3ProcessStop = self.TMAPI_DLL.SNPS3ProcessStop self.SNPS3ProcessStop.argtypes =", "SN_E_NOT_CONNECTED = (-4) SN_E_COMMS_ERR = (-5) SN_E_TM_COMMS_ERR = (-6) SN_E_TIMEOUT = (-7) SN_E_HOST_NOT_FOUND", "''' self.SNPS3SetDefaultTarget = self.TMAPI_DLL.SNPS3SetDefaultTarget self.SNPS3SetDefaultTarget.argtypes = [ c_uint32 ] self.SNPS3SetDefaultTarget.restype = SNResult '''", "WriteBuffer = (c_char * len(Bytes)).from_buffer(bytearray(Bytes)) return self.NativeAPI.SNPS3ProcessSetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0, Address, len(Bytes), WriteBuffer)", "), (\"pszHomeDir\", c_char_p ), (\"pszFSDir\", c_char_p ), (\"boot\", c_uint64 ), ] class TMAPIExports:", "def GetProcessList(self): self.ThrowIfNotConnected() NumProcessesPtr = pointer(c_uint32(0)) if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, None) != SNResult.SN_S_OK: raise", "POINTER(c_uint32) ] self.SNPS3GetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT SNPS3SetDefaultTarget( HTARGET hTarget ); Gets", "c_uint64 ), ] class TMAPIExports: def __init__(self): os.add_dll_directory(os.getcwd()) os.add_dll_directory(os.path.join(os.getenv('SN_PS3_PATH'), \"bin\")) self.TMAPI_DLL = CDLL(\"ps3tmapi.dll\")", "''' self.SNPS3ProcessList = self.TMAPI_DLL.SNPS3ProcessList self.SNPS3ProcessList.argtypes = [ c_uint32, POINTER(c_uint32), POINTER(c_uint32) ] self.SNPS3ProcessList.restype =", "= (-2) SN_E_BAD_TARGET = (-3) SN_E_NOT_CONNECTED = (-4) SN_E_COMMS_ERR = (-5) SN_E_TM_COMMS_ERR =", "(-14) SN_E_DLL_NOT_INITIALISED = (-15) SN_E_TARGET_RUNNING = (-17) SN_E_BAD_MEMSPACE = (-18) SN_E_NO_TARGETS = (-19)", "SN_E_NO_TARGETS = (-19) SN_E_NO_SEL = (-20) SN_E_BAD_PARAM = (-21) SN_E_BUSY = (-22) SN_E_DECI_ERROR", "SN_E_DATA_TOO_LONG = (-26) SN_E_DEPRECATED = (-27) SN_E_BAD_ALIGN = (-28) SN_E_FILE_ERROR = (-29) SN_E_NOT_SUPPORTED_IN_SDK_VERSION", "Returns SN_E_BUSY if a search is already in progress. ''' self.SNPS3IsScanning = self.TMAPI_DLL.SNPS3IsScanning", "a list of processes running on the specified target. ''' self.SNPS3ProcessList = self.TMAPI_DLL.SNPS3ProcessList", "hTarget, UINT32 uProcessID ); Stops all threads from a specified process. ''' self.SNPS3ProcessStop", "= False if self.NativeAPI.SNPS3InitTargetComms() != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") def ThrowIfNotConnected(self): if self.IsConnected", "processes running on the specified target. ''' self.SNPS3ProcessList = self.TMAPI_DLL.SNPS3ProcessList self.SNPS3ProcessList.argtypes = [", "communications (but does not close the Target Manager) and frees resources. ''' self.SNPS3CloseTargetComms", "POINTER(c_uint32), POINTER(c_uint32) ] self.SNPS3ProcessList.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessAttach( HTARGET hTarget, UINT32", "UINT64 uThreadID, UINT64 uAddress, int nCount, const BYTE *pBuffer ); ''' self.SNPS3ProcessSetMemory =", "ctypes import _Pointer from .common import CEnum class SNResult(CEnum): SN_S_OK = (0) SN_S_PENDING", "= (-38) SN_E_COMMS_EVENT_MISMATCHED_ERR = (-39) SN_E_TARGET_IS_POWERED_OFF = (-40) class SNTargetInfoFlags(CEnum): SN_TI_TARGETID = (0x00000001)", "= NumProcessesPtr.contents.value if NumProcesses == 0: raise Exception(\"No process running\") ProcessList = (c_uint32*NumProcesses)()", "SNResult.SN_S_OK, SNResult.SN_S_CONNECTED ]: return False self.PS3TargetIndex = TargetIndex self.IsConnected = True return True", "Exception(\"Error: Not Connected to PS3\") def GetDefaultTarget(self): DefaultTargetIndex = pointer(c_uint32(0)) if self.NativeAPI.SNPS3GetDefaultTarget(DefaultTargetIndex) !=", "on the specified target. ''' self.SNPS3ProcessList = self.TMAPI_DLL.SNPS3ProcessList self.SNPS3ProcessList.argtypes = [ c_uint32, POINTER(c_uint32),", "c_uint32 ] self.SNPS3ProcessStop.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessGetMemory( HTARGET hTarget, UINT32 uUnit,", "= [ c_uint32 ] self.SNPS3SetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessList( HTARGET hTarget,", "SNPS3Connect( HTARGET hTarget, const char *pszApplication ); Connect to specified target. ''' self.SNPS3Connect", "hTarget, const char *pszApplication ); Connect to specified target. ''' self.SNPS3Connect = self.TMAPI_DLL.SNPS3Connect", "= TMAPIExports() self.PS3TargetIndex = -1 self.IsConnected = False if self.NativeAPI.SNPS3InitTargetComms() != SNResult.SN_S_OK: raise", "UINT32 uProcessID ); Stops all threads from a specified process. ''' self.SNPS3ProcessStop =", "= (-36) SN_E_PROTOCOL_ALREADY_REGISTERED = (-37) SN_E_CONNECTED = (-38) SN_E_COMMS_EVENT_MISMATCHED_ERR = (-39) SN_E_TARGET_IS_POWERED_OFF =", "= self.TMAPI_DLL.SNPS3InitTargetComms self.SNPS3InitTargetComms.argtypes = [] self.SNPS3InitTargetComms.restype = SNResult ''' SNAPI SNRESULT SNPS3CloseTargetComms(void); Shuts", "(\"pszInfo\", c_char_p ), (\"pszHomeDir\", c_char_p ), (\"pszFSDir\", c_char_p ), (\"boot\", c_uint64 ), ]", "= (5) SN_S_NO_ACTION = (6) SN_S_CONNECTED = SN_S_NO_ACTION SN_S_TARGET_STILL_REGISTERED = (7) SN_E_NOT_IMPL =", "= (0x00000020) class SNPS3TargetInfo(Structure): _fields_ = [ (\"nFlags\", c_uint32 ), (\"hTarget\", c_uint32 ),", "hTarget ); Gets the default target. ''' self.SNPS3SetDefaultTarget = self.TMAPI_DLL.SNPS3SetDefaultTarget self.SNPS3SetDefaultTarget.argtypes = [", "SNRESULT SNPS3ProcessList( HTARGET hTarget, UINT32 *puCount, UINT32 *puBuffer ); Fetches a list of", "SNAPI SNRESULT SNPS3SetDefaultTarget( HTARGET hTarget ); Gets the default target. ''' self.SNPS3SetDefaultTarget =", "[] self.SNPS3CloseTargetComms.restype = SNResult ''' SNAPI SNRESULT SNPS3IsScanning(); Returns SN_E_BUSY if a search", "from ctypes import _Pointer from .common import CEnum class SNResult(CEnum): SN_S_OK = (0)", "SNRESULT SNPS3ProcessContinue( HTARGET hTarget, UINT32 uProcessID ); Continues all threads from a specified", "(-7) SN_E_HOST_NOT_FOUND = (-8) SN_E_TARGET_IN_USE = (-9) SN_E_LOAD_ELF_FAILED = (-10) SN_E_BAD_UNIT = (-11)", "= (-22) SN_E_DECI_ERROR = (-23) SN_E_INSUFFICIENT_DATA = (-25) SN_E_DATA_TOO_LONG = (-26) SN_E_DEPRECATED =", "[ c_uint32 ] self.SNPS3SetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessList( HTARGET hTarget, UINT32", "''' self.SNPS3ProcessAttach = self.TMAPI_DLL.SNPS3ProcessAttach self.SNPS3ProcessAttach.argtypes = [ c_uint32, c_uint32, c_uint32 ] self.SNPS3ProcessAttach.restype =", "= SNResult ''' SNAPI SNRESULT SNPS3ConnectEx( HTARGET hTarget, const char *pszApplication, BOOL bForceFlag", "self.SNPS3GetDefaultTarget.argtypes = [ POINTER(c_uint32) ] self.SNPS3GetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT SNPS3SetDefaultTarget( HTARGET", "the specified target. ''' self.SNPS3ProcessList = self.TMAPI_DLL.SNPS3ProcessList self.SNPS3ProcessList.argtypes = [ c_uint32, POINTER(c_uint32), POINTER(c_uint32)", "SNPS3ProcessGetMemory( HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress, int nCount,", "SN_S_OK = (0) SN_S_PENDING = (1) SN_S_NO_MSG = (3) SN_S_TM_VERSION = (4) SN_S_REPLACED", "target specified by hTarget member of SNPS3TargetInfo() structure. ''' self.SNPS3GetTargetInfo = self.TMAPI_DLL.SNPS3GetTargetInfo self.SNPS3GetTargetInfo.argtypes", "(-25) SN_E_DATA_TOO_LONG = (-26) SN_E_DEPRECATED = (-27) SN_E_BAD_ALIGN = (-28) SN_E_FILE_ERROR = (-29)", "ThrowIfNotConnected(self): if self.IsConnected == False: raise Exception(\"Error: Not Connected to PS3\") def GetDefaultTarget(self):", "= (4) SN_S_REPLACED = (5) SN_S_NO_ACTION = (6) SN_S_CONNECTED = SN_S_NO_ACTION SN_S_TARGET_STILL_REGISTERED =", "char *pszApplication, BOOL bForceFlag ); Connect to specified target. ''' self.SNPS3ConnectEx = self.TMAPI_DLL.SNPS3ConnectEx", "SN_E_NOT_IMPL = (-1) SN_E_TM_NOT_RUNNING = (-2) SN_E_BAD_TARGET = (-3) SN_E_NOT_CONNECTED = (-4) SN_E_COMMS_ERR", "SN_E_TARGET_IS_POWERED_OFF = (-40) class SNTargetInfoFlags(CEnum): SN_TI_TARGETID = (0x00000001) SN_TI_NAME = (0x00000002) SN_TI_INFO =", "= self.TMAPI_DLL.SNPS3SetDefaultTarget self.SNPS3SetDefaultTarget.argtypes = [ c_uint32 ] self.SNPS3SetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT", "= self.TMAPI_DLL.SNPS3IsScanning self.SNPS3IsScanning.argtypes = [] self.SNPS3IsScanning.restype = SNResult ''' SNAPI SNRESULT SNPS3Connect( HTARGET", "ProcessID) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessContinue() Failed\") self.ProcessID = ProcessID return True def ReadMemory(self,", "] self.SNPS3ProcessContinue.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessStop( HTARGET hTarget, UINT32 uProcessID );", "a specified process. ''' self.SNPS3ProcessStop = self.TMAPI_DLL.SNPS3ProcessStop self.SNPS3ProcessStop.argtypes = [ c_uint32, c_uint32 ]", "all threads from a specified process. ''' self.SNPS3ProcessStop = self.TMAPI_DLL.SNPS3ProcessStop self.SNPS3ProcessStop.argtypes = [", "SNResult ''' SNAPI SNRESULT SNPS3ProcessAttach( HTARGET hTarget, UINT32 uUnitID, UINT32 uProcessID ); Attach", "is already in progress. ''' self.SNPS3IsScanning = self.TMAPI_DLL.SNPS3IsScanning self.SNPS3IsScanning.argtypes = [] self.SNPS3IsScanning.restype =", "does not close the Target Manager) and frees resources. ''' self.SNPS3CloseTargetComms = self.TMAPI_DLL.SNPS3CloseTargetComms", "SN_E_TM_COMMS_ERR = (-6) SN_E_TIMEOUT = (-7) SN_E_HOST_NOT_FOUND = (-8) SN_E_TARGET_IN_USE = (-9) SN_E_LOAD_ELF_FAILED", "target. ''' self.SNPS3GetDefaultTarget = self.TMAPI_DLL.SNPS3GetDefaultTarget self.SNPS3GetDefaultTarget.argtypes = [ POINTER(c_uint32) ] self.SNPS3GetDefaultTarget.restype = SNResult", "] self.SNPS3ProcessList.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessAttach( HTARGET hTarget, UINT32 uUnitID, UINT32", "self.ThrowIfNotConnected() MemoryBuffer = (c_char * Size)() self.NativeAPI.SNPS3ProcessGetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0, Address, Size, MemoryBuffer)", "= self.TMAPI_DLL.SNPS3ConnectEx self.SNPS3ConnectEx.argtypes = [ c_uint32, c_char_p, c_bool ] self.SNPS3ConnectEx.restype = SNResult '''", "self.IsConnected = True return True def GetProcessList(self): self.ThrowIfNotConnected() NumProcessesPtr = pointer(c_uint32(0)) if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex,", "(0x00000010) SN_TI_BOOT = (0x00000020) class SNPS3TargetInfo(Structure): _fields_ = [ (\"nFlags\", c_uint32 ), (\"hTarget\",", "self.SNPS3Connect.argtypes = [ c_uint32, c_char_p ] self.SNPS3Connect.restype = SNResult ''' SNAPI SNRESULT SNPS3ConnectEx(", "= [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessGetMemory.restype = SNResult", "self.SNPS3ProcessStop.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessGetMemory( HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID,", "(-10) SN_E_BAD_UNIT = (-11) SN_E_OUT_OF_MEM = (-12) SN_E_NOT_LISTED = (-13) SN_E_TM_VERSION = (-14)", "SN_E_BAD_UNIT = (-11) SN_E_OUT_OF_MEM = (-12) SN_E_NOT_LISTED = (-13) SN_E_TM_VERSION = (-14) SN_E_DLL_NOT_INITIALISED", "c_uint32 ), (\"pszName\", c_char_p ), (\"pszType\", c_char_p ), (\"pszInfo\", c_char_p ), (\"pszHomeDir\", c_char_p", "[ c_uint32, POINTER(c_uint32), POINTER(c_uint32) ] self.SNPS3ProcessList.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessAttach( HTARGET", "= (-19) SN_E_NO_SEL = (-20) SN_E_BAD_PARAM = (-21) SN_E_BUSY = (-22) SN_E_DECI_ERROR =", "(\"hTarget\", c_uint32 ), (\"pszName\", c_char_p ), (\"pszType\", c_char_p ), (\"pszInfo\", c_char_p ), (\"pszHomeDir\",", "] self.SNPS3ProcessAttach.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessContinue( HTARGET hTarget, UINT32 uProcessID );", "int nCount, BYTE *pBuffer ); ''' self.SNPS3ProcessGetMemory = self.TMAPI_DLL.SNPS3ProcessGetMemory self.SNPS3ProcessGetMemory.argtypes = [ c_uint32,", "if len(ProcessList) == 0: return False ProcessID = ProcessList[0] if self.NativeAPI.SNPS3ProcessAttach(self.PS3TargetIndex, 0, ProcessID)", "SN_E_FILE_ERROR = (-29) SN_E_NOT_SUPPORTED_IN_SDK_VERSION = (-30) SN_E_LOAD_MODULE_FAILED = (-31) SN_E_CHECK_TARGET_CONFIGURATION = (-33) SN_E_MODULE_NOT_FOUND", "SNPS3InitTargetComms(void); Initialises target communications and launches Target Manager. ''' self.SNPS3InitTargetComms = self.TMAPI_DLL.SNPS3InitTargetComms self.SNPS3InitTargetComms.argtypes", "= SNResult ''' SNAPI SNRESULT SNPS3ProcessContinue( HTARGET hTarget, UINT32 uProcessID ); Continues all", "self.IsConnected = False if TargetIndex == -1: TargetIndex = self.GetDefaultTarget() if self.NativeAPI.SNPS3ConnectEx(TargetIndex, None,", "SN_E_CONNECTED = (-38) SN_E_COMMS_EVENT_MISMATCHED_ERR = (-39) SN_E_TARGET_IS_POWERED_OFF = (-40) class SNTargetInfoFlags(CEnum): SN_TI_TARGETID =", "*pTargetInfo ); Retrieves information for a target specified by hTarget member of SNPS3TargetInfo()", "*puCount, UINT32 *puBuffer ); Fetches a list of processes running on the specified", "nCount, BYTE *pBuffer ); ''' self.SNPS3ProcessGetMemory = self.TMAPI_DLL.SNPS3ProcessGetMemory self.SNPS3ProcessGetMemory.argtypes = [ c_uint32, c_uint32,", "self.GetProcessList() if len(ProcessList) == 0: return False ProcessID = ProcessList[0] if self.NativeAPI.SNPS3ProcessAttach(self.PS3TargetIndex, 0,", "TargetIndex=-1): self.IsConnected = False if TargetIndex == -1: TargetIndex = self.GetDefaultTarget() if self.NativeAPI.SNPS3ConnectEx(TargetIndex,", "ctypes import _SimpleCData from ctypes import _Pointer from .common import CEnum class SNResult(CEnum):", "process running\") ProcessList = (c_uint32*NumProcesses)() if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, ProcessList) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList():", "default target. ''' self.SNPS3GetDefaultTarget = self.TMAPI_DLL.SNPS3GetDefaultTarget self.SNPS3GetDefaultTarget.argtypes = [ POINTER(c_uint32) ] self.SNPS3GetDefaultTarget.restype =", "== 0: return False ProcessID = ProcessList[0] if self.NativeAPI.SNPS3ProcessAttach(self.PS3TargetIndex, 0, ProcessID) != SNResult.SN_S_OK:", "), (\"boot\", c_uint64 ), ] class TMAPIExports: def __init__(self): os.add_dll_directory(os.getcwd()) os.add_dll_directory(os.path.join(os.getenv('SN_PS3_PATH'), \"bin\")) self.TMAPI_DLL", "GetDefaultTarget(self): DefaultTargetIndex = pointer(c_uint32(0)) if self.NativeAPI.SNPS3GetDefaultTarget(DefaultTargetIndex) != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") return DefaultTargetIndex[0]", "SNAPI SNRESULT SNPS3InitTargetComms(void); Initialises target communications and launches Target Manager. ''' self.SNPS3InitTargetComms =", "NumProcessesPtr.contents.value if NumProcesses == 0: raise Exception(\"No process running\") ProcessList = (c_uint32*NumProcesses)() if", "c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessGetMemory.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessSetMemory( HTARGET", "SNPS3TargetInfo *pTargetInfo ); Retrieves information for a target specified by hTarget member of", "self.NativeAPI.SNPS3ProcessAttach(self.PS3TargetIndex, 0, ProcessID) != SNResult.SN_S_OK: return False if self.NativeAPI.SNPS3ProcessContinue(self.PS3TargetIndex, ProcessID) != SNResult.SN_S_OK: raise", "== 0: raise Exception(\"No process running\") ProcessList = (c_uint32*NumProcesses)() if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, ProcessList)", "''' SNAPI SNRESULT SNPS3ConnectEx( HTARGET hTarget, const char *pszApplication, BOOL bForceFlag ); Connect", "Failed\") self.ProcessID = ProcessID return True def ReadMemory(self, Address, Size): self.ThrowIfNotConnected() MemoryBuffer =", "); ''' self.SNPS3ProcessSetMemory = self.TMAPI_DLL.SNPS3ProcessSetMemory self.SNPS3ProcessSetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64,", "hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress, int nCount, const BYTE", "hTarget, UINT32 uProcessID ); Continues all threads from a specified process. ''' self.SNPS3ProcessContinue", "SN_E_NOT_LISTED = (-13) SN_E_TM_VERSION = (-14) SN_E_DLL_NOT_INITIALISED = (-15) SN_E_TARGET_RUNNING = (-17) SN_E_BAD_MEMSPACE", "UINT32 uProcessID ); Continues all threads from a specified process. ''' self.SNPS3ProcessContinue =", "self.SNPS3ProcessList = self.TMAPI_DLL.SNPS3ProcessList self.SNPS3ProcessList.argtypes = [ c_uint32, POINTER(c_uint32), POINTER(c_uint32) ] self.SNPS3ProcessList.restype = SNResult", "to a process. ''' self.SNPS3ProcessAttach = self.TMAPI_DLL.SNPS3ProcessAttach self.SNPS3ProcessAttach.argtypes = [ c_uint32, c_uint32, c_uint32", "Connect to specified target. ''' self.SNPS3Connect = self.TMAPI_DLL.SNPS3Connect self.SNPS3Connect.argtypes = [ c_uint32, c_char_p", "False if self.NativeAPI.SNPS3InitTargetComms() != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") def ThrowIfNotConnected(self): if self.IsConnected ==", "os.add_dll_directory(os.path.join(os.getenv('SN_PS3_PATH'), \"bin\")) self.TMAPI_DLL = CDLL(\"ps3tmapi.dll\") ''' SNAPI SNRESULT SNPS3InitTargetComms(void); Initialises target communications and", "structure. ''' self.SNPS3GetTargetInfo = self.TMAPI_DLL.SNPS3GetTargetInfo self.SNPS3GetTargetInfo.argtypes = [ POINTER(SNPS3TargetInfo) ] self.SNPS3GetTargetInfo.restype = SNResult", "c_uint32, c_uint32 ] self.SNPS3ProcessContinue.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessStop( HTARGET hTarget, UINT32", "c_int32, POINTER(c_char) ] self.SNPS3ProcessSetMemory.restype = SNResult class TMAPI: def __init__(self): self.NativeAPI = TMAPIExports()", "c_uint32, c_char_p ] self.SNPS3Connect.restype = SNResult ''' SNAPI SNRESULT SNPS3ConnectEx( HTARGET hTarget, const", "= self.GetDefaultTarget() if self.NativeAPI.SNPS3ConnectEx(TargetIndex, None, True) not in [ SNResult.SN_S_OK, SNResult.SN_S_CONNECTED ]: return", "SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") return DefaultTargetIndex[0] def ConnectTarget(self, TargetIndex=-1): self.IsConnected = False if", "= ProcessList[0] if self.NativeAPI.SNPS3ProcessAttach(self.PS3TargetIndex, 0, ProcessID) != SNResult.SN_S_OK: return False if self.NativeAPI.SNPS3ProcessContinue(self.PS3TargetIndex, ProcessID)", "''' self.SNPS3ProcessGetMemory = self.TMAPI_DLL.SNPS3ProcessGetMemory self.SNPS3ProcessGetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32,", "== -1: TargetIndex = self.GetDefaultTarget() if self.NativeAPI.SNPS3ConnectEx(TargetIndex, None, True) not in [ SNResult.SN_S_OK,", "SNRESULT SNPS3Connect( HTARGET hTarget, const char *pszApplication ); Connect to specified target. '''", "import _SimpleCData from ctypes import _Pointer from .common import CEnum class SNResult(CEnum): SN_S_OK", "if self.NativeAPI.SNPS3ConnectEx(TargetIndex, None, True) not in [ SNResult.SN_S_OK, SNResult.SN_S_CONNECTED ]: return False self.PS3TargetIndex", "''' self.SNPS3GetTargetInfo = self.TMAPI_DLL.SNPS3GetTargetInfo self.SNPS3GetTargetInfo.argtypes = [ POINTER(SNPS3TargetInfo) ] self.SNPS3GetTargetInfo.restype = SNResult '''", "target. ''' self.SNPS3Connect = self.TMAPI_DLL.SNPS3Connect self.SNPS3Connect.argtypes = [ c_uint32, c_char_p ] self.SNPS3Connect.restype =", "if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, ProcessList) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetProcessInfos Failed\") return list(ProcessList) def", "SN_S_REPLACED = (5) SN_S_NO_ACTION = (6) SN_S_CONNECTED = SN_S_NO_ACTION SN_S_TARGET_STILL_REGISTERED = (7) SN_E_NOT_IMPL", "(-22) SN_E_DECI_ERROR = (-23) SN_E_INSUFFICIENT_DATA = (-25) SN_E_DATA_TOO_LONG = (-26) SN_E_DEPRECATED = (-27)", "True def ReadMemory(self, Address, Size): self.ThrowIfNotConnected() MemoryBuffer = (c_char * Size)() self.NativeAPI.SNPS3ProcessGetMemory(self.PS3TargetIndex, 0,", "self.SNPS3IsScanning.argtypes = [] self.SNPS3IsScanning.restype = SNResult ''' SNAPI SNRESULT SNPS3Connect( HTARGET hTarget, const", "resources. ''' self.SNPS3CloseTargetComms = self.TMAPI_DLL.SNPS3CloseTargetComms self.SNPS3CloseTargetComms.argtypes = [] self.SNPS3CloseTargetComms.restype = SNResult ''' SNAPI", "ctypes import * from ctypes import _SimpleCData from ctypes import _Pointer from .common", "self.TMAPI_DLL.SNPS3SetDefaultTarget self.SNPS3SetDefaultTarget.argtypes = [ c_uint32 ] self.SNPS3SetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessList(", "SNPS3GetTargetInfo( SNPS3TargetInfo *pTargetInfo ); Retrieves information for a target specified by hTarget member", "= (0x00000004) SN_TI_HOMEDIR = (0x00000008) SN_TI_FILESERVEDIR = (0x00000010) SN_TI_BOOT = (0x00000020) class SNPS3TargetInfo(Structure):", "self.NativeAPI.SNPS3ProcessContinue(self.PS3TargetIndex, ProcessID) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessContinue() Failed\") self.ProcessID = ProcessID return True def", "= self.TMAPI_DLL.SNPS3ProcessSetMemory self.SNPS3ProcessSetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ]", "SNResult ''' SNAPI SNRESULT SNPS3SetDefaultTarget( HTARGET hTarget ); Gets the default target. '''", "''' SNAPI SNRESULT SNPS3SetDefaultTarget( HTARGET hTarget ); Gets the default target. ''' self.SNPS3SetDefaultTarget", "Failed\") return DefaultTargetIndex[0] def ConnectTarget(self, TargetIndex=-1): self.IsConnected = False if TargetIndex == -1:", "= [] self.SNPS3InitTargetComms.restype = SNResult ''' SNAPI SNRESULT SNPS3CloseTargetComms(void); Shuts down internal communications", "''' self.SNPS3ConnectEx = self.TMAPI_DLL.SNPS3ConnectEx self.SNPS3ConnectEx.argtypes = [ c_uint32, c_char_p, c_bool ] self.SNPS3ConnectEx.restype =", "''' self.SNPS3Connect = self.TMAPI_DLL.SNPS3Connect self.SNPS3Connect.argtypes = [ c_uint32, c_char_p ] self.SNPS3Connect.restype = SNResult", "self.SNPS3SetDefaultTarget.argtypes = [ c_uint32 ] self.SNPS3SetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessList( HTARGET", "self.SNPS3Connect = self.TMAPI_DLL.SNPS3Connect self.SNPS3Connect.argtypes = [ c_uint32, c_char_p ] self.SNPS3Connect.restype = SNResult '''", "Shuts down internal communications (but does not close the Target Manager) and frees", "self.TMAPI_DLL.SNPS3ProcessStop self.SNPS3ProcessStop.argtypes = [ c_uint32, c_uint32 ] self.SNPS3ProcessStop.restype = SNResult ''' SNAPI SNRESULT", "''' SNAPI SNRESULT SNPS3GetTargetInfo( SNPS3TargetInfo *pTargetInfo ); Retrieves information for a target specified", "SNPS3ProcessStop( HTARGET hTarget, UINT32 uProcessID ); Stops all threads from a specified process.", "SN_E_NO_SEL = (-20) SN_E_BAD_PARAM = (-21) SN_E_BUSY = (-22) SN_E_DECI_ERROR = (-23) SN_E_INSUFFICIENT_DATA", "SN_S_NO_ACTION = (6) SN_S_CONNECTED = SN_S_NO_ACTION SN_S_TARGET_STILL_REGISTERED = (7) SN_E_NOT_IMPL = (-1) SN_E_TM_NOT_RUNNING", "HTARGET hTarget, const char *pszApplication, BOOL bForceFlag ); Connect to specified target. '''", "c_uint32 ), (\"hTarget\", c_uint32 ), (\"pszName\", c_char_p ), (\"pszType\", c_char_p ), (\"pszInfo\", c_char_p", "= self.TMAPI_DLL.SNPS3ProcessGetMemory self.SNPS3ProcessGetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ]", "c_char_p ), (\"pszHomeDir\", c_char_p ), (\"pszFSDir\", c_char_p ), (\"boot\", c_uint64 ), ] class", "c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessGetMemory.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessSetMemory( HTARGET hTarget,", "= (-25) SN_E_DATA_TOO_LONG = (-26) SN_E_DEPRECATED = (-27) SN_E_BAD_ALIGN = (-28) SN_E_FILE_ERROR =", "Size): self.ThrowIfNotConnected() MemoryBuffer = (c_char * Size)() self.NativeAPI.SNPS3ProcessGetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0, Address, Size,", "SNPS3CloseTargetComms(void); Shuts down internal communications (but does not close the Target Manager) and", "SNAPI SNRESULT SNPS3GetTargetInfo( SNPS3TargetInfo *pTargetInfo ); Retrieves information for a target specified by", "= (-12) SN_E_NOT_LISTED = (-13) SN_E_TM_VERSION = (-14) SN_E_DLL_NOT_INITIALISED = (-15) SN_E_TARGET_RUNNING =", "TargetIndex == -1: TargetIndex = self.GetDefaultTarget() if self.NativeAPI.SNPS3ConnectEx(TargetIndex, None, True) not in [", "= (-28) SN_E_FILE_ERROR = (-29) SN_E_NOT_SUPPORTED_IN_SDK_VERSION = (-30) SN_E_LOAD_MODULE_FAILED = (-31) SN_E_CHECK_TARGET_CONFIGURATION =", "SN_TI_INFO = (0x00000004) SN_TI_HOMEDIR = (0x00000008) SN_TI_FILESERVEDIR = (0x00000010) SN_TI_BOOT = (0x00000020) class", "TMAPI: def __init__(self): self.NativeAPI = TMAPIExports() self.PS3TargetIndex = -1 self.IsConnected = False if", "(0x00000008) SN_TI_FILESERVEDIR = (0x00000010) SN_TI_BOOT = (0x00000020) class SNPS3TargetInfo(Structure): _fields_ = [ (\"nFlags\",", "] self.SNPS3Connect.restype = SNResult ''' SNAPI SNRESULT SNPS3ConnectEx( HTARGET hTarget, const char *pszApplication,", "c_uint32 ] self.SNPS3SetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessList( HTARGET hTarget, UINT32 *puCount,", "specified target. ''' self.SNPS3ConnectEx = self.TMAPI_DLL.SNPS3ConnectEx self.SNPS3ConnectEx.argtypes = [ c_uint32, c_char_p, c_bool ]", "Failed\") NumProcesses = NumProcessesPtr.contents.value if NumProcesses == 0: raise Exception(\"No process running\") ProcessList", "BOOL bForceFlag ); Connect to specified target. ''' self.SNPS3ConnectEx = self.TMAPI_DLL.SNPS3ConnectEx self.SNPS3ConnectEx.argtypes =", "Gets the default target. ''' self.SNPS3SetDefaultTarget = self.TMAPI_DLL.SNPS3SetDefaultTarget self.SNPS3SetDefaultTarget.argtypes = [ c_uint32 ]", "UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress, int nCount, const BYTE *pBuffer", "c_uint32 ] self.SNPS3ProcessAttach.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessContinue( HTARGET hTarget, UINT32 uProcessID", "DefaultTargetIndex = pointer(c_uint32(0)) if self.NativeAPI.SNPS3GetDefaultTarget(DefaultTargetIndex) != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") return DefaultTargetIndex[0] def", "def ThrowIfNotConnected(self): if self.IsConnected == False: raise Exception(\"Error: Not Connected to PS3\") def", "0: return False ProcessID = ProcessList[0] if self.NativeAPI.SNPS3ProcessAttach(self.PS3TargetIndex, 0, ProcessID) != SNResult.SN_S_OK: return", "(-12) SN_E_NOT_LISTED = (-13) SN_E_TM_VERSION = (-14) SN_E_DLL_NOT_INITIALISED = (-15) SN_E_TARGET_RUNNING = (-17)", "= (-10) SN_E_BAD_UNIT = (-11) SN_E_OUT_OF_MEM = (-12) SN_E_NOT_LISTED = (-13) SN_E_TM_VERSION =", "*pszApplication, BOOL bForceFlag ); Connect to specified target. ''' self.SNPS3ConnectEx = self.TMAPI_DLL.SNPS3ConnectEx self.SNPS3ConnectEx.argtypes", "self.SNPS3GetTargetInfo.argtypes = [ POINTER(SNPS3TargetInfo) ] self.SNPS3GetTargetInfo.restype = SNResult ''' SNAPI SNRESULT SNPS3GetDefaultTarget( HTARGET", "(-15) SN_E_TARGET_RUNNING = (-17) SN_E_BAD_MEMSPACE = (-18) SN_E_NO_TARGETS = (-19) SN_E_NO_SEL = (-20)", "def __init__(self): self.NativeAPI = TMAPIExports() self.PS3TargetIndex = -1 self.IsConnected = False if self.NativeAPI.SNPS3InitTargetComms()", "0: raise Exception(\"No process running\") ProcessList = (c_uint32*NumProcesses)() if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, ProcessList) !=", "(-20) SN_E_BAD_PARAM = (-21) SN_E_BUSY = (-22) SN_E_DECI_ERROR = (-23) SN_E_INSUFFICIENT_DATA = (-25)", "specified by hTarget member of SNPS3TargetInfo() structure. ''' self.SNPS3GetTargetInfo = self.TMAPI_DLL.SNPS3GetTargetInfo self.SNPS3GetTargetInfo.argtypes =", "return True def GetProcessList(self): self.ThrowIfNotConnected() NumProcessesPtr = pointer(c_uint32(0)) if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, None) !=", "self.TMAPI_DLL.SNPS3ProcessContinue self.SNPS3ProcessContinue.argtypes = [ c_uint32, c_uint32 ] self.SNPS3ProcessContinue.restype = SNResult ''' SNAPI SNRESULT", "= ProcessID return True def ReadMemory(self, Address, Size): self.ThrowIfNotConnected() MemoryBuffer = (c_char *", "self.SNPS3CloseTargetComms.restype = SNResult ''' SNAPI SNRESULT SNPS3IsScanning(); Returns SN_E_BUSY if a search is", "= (-27) SN_E_BAD_ALIGN = (-28) SN_E_FILE_ERROR = (-29) SN_E_NOT_SUPPORTED_IN_SDK_VERSION = (-30) SN_E_LOAD_MODULE_FAILED =", "ConnectTarget(self, TargetIndex=-1): self.IsConnected = False if TargetIndex == -1: TargetIndex = self.GetDefaultTarget() if", "SNPS3IsScanning(); Returns SN_E_BUSY if a search is already in progress. ''' self.SNPS3IsScanning =", "uProcessID, UINT64 uThreadID, UINT64 uAddress, int nCount, BYTE *pBuffer ); ''' self.SNPS3ProcessGetMemory =", "c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessGetMemory.restype = SNResult ''' SNAPI SNRESULT", "return True def ReadMemory(self, Address, Size): self.ThrowIfNotConnected() MemoryBuffer = (c_char * Size)() self.NativeAPI.SNPS3ProcessGetMemory(self.PS3TargetIndex,", "SNResult.SN_S_CONNECTED ]: return False self.PS3TargetIndex = TargetIndex self.IsConnected = True return True def", "self.ThrowIfNotConnected() if ProcessID == -1: ProcessList = self.GetProcessList() if len(ProcessList) == 0: return", "Attach to a process. ''' self.SNPS3ProcessAttach = self.TMAPI_DLL.SNPS3ProcessAttach self.SNPS3ProcessAttach.argtypes = [ c_uint32, c_uint32,", "= (-7) SN_E_HOST_NOT_FOUND = (-8) SN_E_TARGET_IN_USE = (-9) SN_E_LOAD_ELF_FAILED = (-10) SN_E_BAD_UNIT =", "UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress, int nCount, BYTE *pBuffer ); ''' self.SNPS3ProcessGetMemory", "Failed\") return list(ProcessList) def AttachProcess(self, ProcessID=-1): self.ThrowIfNotConnected() if ProcessID == -1: ProcessList =", "!= SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") return DefaultTargetIndex[0] def ConnectTarget(self, TargetIndex=-1): self.IsConnected = False", "(but does not close the Target Manager) and frees resources. ''' self.SNPS3CloseTargetComms =", "SNPS3ProcessContinue( HTARGET hTarget, UINT32 uProcessID ); Continues all threads from a specified process.", "uUnitID, UINT32 uProcessID ); Attach to a process. ''' self.SNPS3ProcessAttach = self.TMAPI_DLL.SNPS3ProcessAttach self.SNPS3ProcessAttach.argtypes", "False if self.NativeAPI.SNPS3ProcessContinue(self.PS3TargetIndex, ProcessID) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessContinue() Failed\") self.ProcessID = ProcessID return", "member of SNPS3TargetInfo() structure. ''' self.SNPS3GetTargetInfo = self.TMAPI_DLL.SNPS3GetTargetInfo self.SNPS3GetTargetInfo.argtypes = [ POINTER(SNPS3TargetInfo) ]", "(-17) SN_E_BAD_MEMSPACE = (-18) SN_E_NO_TARGETS = (-19) SN_E_NO_SEL = (-20) SN_E_BAD_PARAM = (-21)", "SNResult ''' SNAPI SNRESULT SNPS3ProcessGetMemory( HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID,", "self.NativeAPI = TMAPIExports() self.PS3TargetIndex = -1 self.IsConnected = False if self.NativeAPI.SNPS3InitTargetComms() != SNResult.SN_S_OK:", "ProcessList[0] if self.NativeAPI.SNPS3ProcessAttach(self.PS3TargetIndex, 0, ProcessID) != SNResult.SN_S_OK: return False if self.NativeAPI.SNPS3ProcessContinue(self.PS3TargetIndex, ProcessID) !=", "0, self.ProcessID, 0, Address, Size, MemoryBuffer) return bytes(MemoryBuffer) def WriteMemory(self, Address, Bytes): self.ThrowIfNotConnected()", "= (-21) SN_E_BUSY = (-22) SN_E_DECI_ERROR = (-23) SN_E_INSUFFICIENT_DATA = (-25) SN_E_DATA_TOO_LONG =", "] class TMAPIExports: def __init__(self): os.add_dll_directory(os.getcwd()) os.add_dll_directory(os.path.join(os.getenv('SN_PS3_PATH'), \"bin\")) self.TMAPI_DLL = CDLL(\"ps3tmapi.dll\") ''' SNAPI", "[ c_uint32, c_uint32, c_uint32 ] self.SNPS3ProcessAttach.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessContinue( HTARGET", "(c_uint32*NumProcesses)() if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, ProcessList) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetProcessInfos Failed\") return list(ProcessList)", "[ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessGetMemory.restype = SNResult '''", "= (-17) SN_E_BAD_MEMSPACE = (-18) SN_E_NO_TARGETS = (-19) SN_E_NO_SEL = (-20) SN_E_BAD_PARAM =", "SNRESULT SNPS3ConnectEx( HTARGET hTarget, const char *pszApplication, BOOL bForceFlag ); Connect to specified", "self.ThrowIfNotConnected() WriteBuffer = (c_char * len(Bytes)).from_buffer(bytearray(Bytes)) return self.NativeAPI.SNPS3ProcessSetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0, Address, len(Bytes),", "from ctypes import _SimpleCData from ctypes import _Pointer from .common import CEnum class", "HTARGET hTarget ); Gets the default target. ''' self.SNPS3SetDefaultTarget = self.TMAPI_DLL.SNPS3SetDefaultTarget self.SNPS3SetDefaultTarget.argtypes =", "SNPS3ProcessAttach( HTARGET hTarget, UINT32 uUnitID, UINT32 uProcessID ); Attach to a process. '''", "(0) SN_S_PENDING = (1) SN_S_NO_MSG = (3) SN_S_TM_VERSION = (4) SN_S_REPLACED = (5)", "SN_E_MODULE_NOT_FOUND = (-34) SN_E_CONNECT_TO_GAMEPORT_FAILED = (-35) SN_E_COMMAND_CANCELLED = (-36) SN_E_PROTOCOL_ALREADY_REGISTERED = (-37) SN_E_CONNECTED", "(-35) SN_E_COMMAND_CANCELLED = (-36) SN_E_PROTOCOL_ALREADY_REGISTERED = (-37) SN_E_CONNECTED = (-38) SN_E_COMMS_EVENT_MISMATCHED_ERR = (-39)", "[ (\"nFlags\", c_uint32 ), (\"hTarget\", c_uint32 ), (\"pszName\", c_char_p ), (\"pszType\", c_char_p ),", "and launches Target Manager. ''' self.SNPS3InitTargetComms = self.TMAPI_DLL.SNPS3InitTargetComms self.SNPS3InitTargetComms.argtypes = [] self.SNPS3InitTargetComms.restype =", "= SNResult ''' SNAPI SNRESULT SNPS3IsScanning(); Returns SN_E_BUSY if a search is already", "= SNResult ''' SNAPI SNRESULT SNPS3GetDefaultTarget( HTARGET *pTarget ); Gets the default target.", "True return True def GetProcessList(self): self.ThrowIfNotConnected() NumProcessesPtr = pointer(c_uint32(0)) if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, None)", "''' SNAPI SNRESULT SNPS3InitTargetComms(void); Initialises target communications and launches Target Manager. ''' self.SNPS3InitTargetComms", "process. ''' self.SNPS3ProcessAttach = self.TMAPI_DLL.SNPS3ProcessAttach self.SNPS3ProcessAttach.argtypes = [ c_uint32, c_uint32, c_uint32 ] self.SNPS3ProcessAttach.restype", "int nCount, const BYTE *pBuffer ); ''' self.SNPS3ProcessSetMemory = self.TMAPI_DLL.SNPS3ProcessSetMemory self.SNPS3ProcessSetMemory.argtypes = [", "self.TMAPI_DLL.SNPS3CloseTargetComms self.SNPS3CloseTargetComms.argtypes = [] self.SNPS3CloseTargetComms.restype = SNResult ''' SNAPI SNRESULT SNPS3IsScanning(); Returns SN_E_BUSY", "TMAPIExports: def __init__(self): os.add_dll_directory(os.getcwd()) os.add_dll_directory(os.path.join(os.getenv('SN_PS3_PATH'), \"bin\")) self.TMAPI_DLL = CDLL(\"ps3tmapi.dll\") ''' SNAPI SNRESULT SNPS3InitTargetComms(void);", "(\"pszHomeDir\", c_char_p ), (\"pszFSDir\", c_char_p ), (\"boot\", c_uint64 ), ] class TMAPIExports: def", "True def GetProcessList(self): self.ThrowIfNotConnected() NumProcessesPtr = pointer(c_uint32(0)) if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, None) != SNResult.SN_S_OK:", "def AttachProcess(self, ProcessID=-1): self.ThrowIfNotConnected() if ProcessID == -1: ProcessList = self.GetProcessList() if len(ProcessList)", "from ctypes import * from ctypes import _SimpleCData from ctypes import _Pointer from", "bForceFlag ); Connect to specified target. ''' self.SNPS3ConnectEx = self.TMAPI_DLL.SNPS3ConnectEx self.SNPS3ConnectEx.argtypes = [", "specified process. ''' self.SNPS3ProcessContinue = self.TMAPI_DLL.SNPS3ProcessContinue self.SNPS3ProcessContinue.argtypes = [ c_uint32, c_uint32 ] self.SNPS3ProcessContinue.restype", "Address, Size): self.ThrowIfNotConnected() MemoryBuffer = (c_char * Size)() self.NativeAPI.SNPS3ProcessGetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0, Address,", "SN_E_BAD_ALIGN = (-28) SN_E_FILE_ERROR = (-29) SN_E_NOT_SUPPORTED_IN_SDK_VERSION = (-30) SN_E_LOAD_MODULE_FAILED = (-31) SN_E_CHECK_TARGET_CONFIGURATION", "def GetDefaultTarget(self): DefaultTargetIndex = pointer(c_uint32(0)) if self.NativeAPI.SNPS3GetDefaultTarget(DefaultTargetIndex) != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") return", "), ] class TMAPIExports: def __init__(self): os.add_dll_directory(os.getcwd()) os.add_dll_directory(os.path.join(os.getenv('SN_PS3_PATH'), \"bin\")) self.TMAPI_DLL = CDLL(\"ps3tmapi.dll\") '''", "POINTER(SNPS3TargetInfo) ] self.SNPS3GetTargetInfo.restype = SNResult ''' SNAPI SNRESULT SNPS3GetDefaultTarget( HTARGET *pTarget ); Gets", "c_char_p ), (\"pszInfo\", c_char_p ), (\"pszHomeDir\", c_char_p ), (\"pszFSDir\", c_char_p ), (\"boot\", c_uint64", "specified process. ''' self.SNPS3ProcessStop = self.TMAPI_DLL.SNPS3ProcessStop self.SNPS3ProcessStop.argtypes = [ c_uint32, c_uint32 ] self.SNPS3ProcessStop.restype", "self.SNPS3InitTargetComms = self.TMAPI_DLL.SNPS3InitTargetComms self.SNPS3InitTargetComms.argtypes = [] self.SNPS3InitTargetComms.restype = SNResult ''' SNAPI SNRESULT SNPS3CloseTargetComms(void);", "= (-14) SN_E_DLL_NOT_INITIALISED = (-15) SN_E_TARGET_RUNNING = (-17) SN_E_BAD_MEMSPACE = (-18) SN_E_NO_TARGETS =", "(3) SN_S_TM_VERSION = (4) SN_S_REPLACED = (5) SN_S_NO_ACTION = (6) SN_S_CONNECTED = SN_S_NO_ACTION", "Gets the default target. ''' self.SNPS3GetDefaultTarget = self.TMAPI_DLL.SNPS3GetDefaultTarget self.SNPS3GetDefaultTarget.argtypes = [ POINTER(c_uint32) ]", "SNResult ''' SNAPI SNRESULT SNPS3ProcessList( HTARGET hTarget, UINT32 *puCount, UINT32 *puBuffer ); Fetches", "hTarget, UINT32 uUnitID, UINT32 uProcessID ); Attach to a process. ''' self.SNPS3ProcessAttach =", "(-30) SN_E_LOAD_MODULE_FAILED = (-31) SN_E_CHECK_TARGET_CONFIGURATION = (-33) SN_E_MODULE_NOT_FOUND = (-34) SN_E_CONNECT_TO_GAMEPORT_FAILED = (-35)", "SNRESULT SNPS3GetDefaultTarget( HTARGET *pTarget ); Gets the default target. ''' self.SNPS3GetDefaultTarget = self.TMAPI_DLL.SNPS3GetDefaultTarget", "-1 self.IsConnected = False if self.NativeAPI.SNPS3InitTargetComms() != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") def ThrowIfNotConnected(self):", "* from ctypes import _SimpleCData from ctypes import _Pointer from .common import CEnum", "self.NativeAPI.SNPS3InitTargetComms() != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") def ThrowIfNotConnected(self): if self.IsConnected == False: raise", "Address, Bytes): self.ThrowIfNotConnected() WriteBuffer = (c_char * len(Bytes)).from_buffer(bytearray(Bytes)) return self.NativeAPI.SNPS3ProcessSetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0,", "= (-40) class SNTargetInfoFlags(CEnum): SN_TI_TARGETID = (0x00000001) SN_TI_NAME = (0x00000002) SN_TI_INFO = (0x00000004)", "DefaultTargetIndex[0] def ConnectTarget(self, TargetIndex=-1): self.IsConnected = False if TargetIndex == -1: TargetIndex =", "self.NativeAPI.SNPS3GetDefaultTarget(DefaultTargetIndex) != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") return DefaultTargetIndex[0] def ConnectTarget(self, TargetIndex=-1): self.IsConnected =", "list of processes running on the specified target. ''' self.SNPS3ProcessList = self.TMAPI_DLL.SNPS3ProcessList self.SNPS3ProcessList.argtypes", "SNResult class TMAPI: def __init__(self): self.NativeAPI = TMAPIExports() self.PS3TargetIndex = -1 self.IsConnected =", "Connect to specified target. ''' self.SNPS3ConnectEx = self.TMAPI_DLL.SNPS3ConnectEx self.SNPS3ConnectEx.argtypes = [ c_uint32, c_char_p,", "self.TMAPI_DLL.SNPS3GetDefaultTarget self.SNPS3GetDefaultTarget.argtypes = [ POINTER(c_uint32) ] self.SNPS3GetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT SNPS3SetDefaultTarget(", "''' self.SNPS3CloseTargetComms = self.TMAPI_DLL.SNPS3CloseTargetComms self.SNPS3CloseTargetComms.argtypes = [] self.SNPS3CloseTargetComms.restype = SNResult ''' SNAPI SNRESULT", "= SNResult ''' SNAPI SNRESULT SNPS3ProcessGetMemory( HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64", "communications and launches Target Manager. ''' self.SNPS3InitTargetComms = self.TMAPI_DLL.SNPS3InitTargetComms self.SNPS3InitTargetComms.argtypes = [] self.SNPS3InitTargetComms.restype", "= self.TMAPI_DLL.SNPS3GetTargetInfo self.SNPS3GetTargetInfo.argtypes = [ POINTER(SNPS3TargetInfo) ] self.SNPS3GetTargetInfo.restype = SNResult ''' SNAPI SNRESULT", "c_char_p ), (\"boot\", c_uint64 ), ] class TMAPIExports: def __init__(self): os.add_dll_directory(os.getcwd()) os.add_dll_directory(os.path.join(os.getenv('SN_PS3_PATH'), \"bin\"))", "raise Exception(\"SNPS3InitTargetComms() Failed\") def ThrowIfNotConnected(self): if self.IsConnected == False: raise Exception(\"Error: Not Connected", "= (-1) SN_E_TM_NOT_RUNNING = (-2) SN_E_BAD_TARGET = (-3) SN_E_NOT_CONNECTED = (-4) SN_E_COMMS_ERR =", "Not Connected to PS3\") def GetDefaultTarget(self): DefaultTargetIndex = pointer(c_uint32(0)) if self.NativeAPI.SNPS3GetDefaultTarget(DefaultTargetIndex) != SNResult.SN_S_OK:", "ProcessList) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetProcessInfos Failed\") return list(ProcessList) def AttachProcess(self, ProcessID=-1): self.ThrowIfNotConnected()", "Failed\") def ThrowIfNotConnected(self): if self.IsConnected == False: raise Exception(\"Error: Not Connected to PS3\")", "PS3\") def GetDefaultTarget(self): DefaultTargetIndex = pointer(c_uint32(0)) if self.NativeAPI.SNPS3GetDefaultTarget(DefaultTargetIndex) != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\")", "from a specified process. ''' self.SNPS3ProcessContinue = self.TMAPI_DLL.SNPS3ProcessContinue self.SNPS3ProcessContinue.argtypes = [ c_uint32, c_uint32", "self.NativeAPI.SNPS3ProcessGetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0, Address, Size, MemoryBuffer) return bytes(MemoryBuffer) def WriteMemory(self, Address, Bytes):", "const char *pszApplication ); Connect to specified target. ''' self.SNPS3Connect = self.TMAPI_DLL.SNPS3Connect self.SNPS3Connect.argtypes", "self.SNPS3ConnectEx.argtypes = [ c_uint32, c_char_p, c_bool ] self.SNPS3ConnectEx.restype = SNResult ''' SNAPI SNRESULT", "__init__(self): os.add_dll_directory(os.getcwd()) os.add_dll_directory(os.path.join(os.getenv('SN_PS3_PATH'), \"bin\")) self.TMAPI_DLL = CDLL(\"ps3tmapi.dll\") ''' SNAPI SNRESULT SNPS3InitTargetComms(void); Initialises target", "''' SNAPI SNRESULT SNPS3ProcessGetMemory( HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64", "already in progress. ''' self.SNPS3IsScanning = self.TMAPI_DLL.SNPS3IsScanning self.SNPS3IsScanning.argtypes = [] self.SNPS3IsScanning.restype = SNResult", "a target specified by hTarget member of SNPS3TargetInfo() structure. ''' self.SNPS3GetTargetInfo = self.TMAPI_DLL.SNPS3GetTargetInfo", "] self.SNPS3ProcessStop.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessGetMemory( HTARGET hTarget, UINT32 uUnit, UINT32", "SNPS3TargetInfo(Structure): _fields_ = [ (\"nFlags\", c_uint32 ), (\"hTarget\", c_uint32 ), (\"pszName\", c_char_p ),", "self.TMAPI_DLL.SNPS3ConnectEx self.SNPS3ConnectEx.argtypes = [ c_uint32, c_char_p, c_bool ] self.SNPS3ConnectEx.restype = SNResult ''' SNAPI", "SNAPI SNRESULT SNPS3ProcessAttach( HTARGET hTarget, UINT32 uUnitID, UINT32 uProcessID ); Attach to a", "SN_E_LOAD_MODULE_FAILED = (-31) SN_E_CHECK_TARGET_CONFIGURATION = (-33) SN_E_MODULE_NOT_FOUND = (-34) SN_E_CONNECT_TO_GAMEPORT_FAILED = (-35) SN_E_COMMAND_CANCELLED", "Connected to PS3\") def GetDefaultTarget(self): DefaultTargetIndex = pointer(c_uint32(0)) if self.NativeAPI.SNPS3GetDefaultTarget(DefaultTargetIndex) != SNResult.SN_S_OK: raise", "(-39) SN_E_TARGET_IS_POWERED_OFF = (-40) class SNTargetInfoFlags(CEnum): SN_TI_TARGETID = (0x00000001) SN_TI_NAME = (0x00000002) SN_TI_INFO", "of SNPS3TargetInfo() structure. ''' self.SNPS3GetTargetInfo = self.TMAPI_DLL.SNPS3GetTargetInfo self.SNPS3GetTargetInfo.argtypes = [ POINTER(SNPS3TargetInfo) ] self.SNPS3GetTargetInfo.restype", "Continues all threads from a specified process. ''' self.SNPS3ProcessContinue = self.TMAPI_DLL.SNPS3ProcessContinue self.SNPS3ProcessContinue.argtypes =", "return False ProcessID = ProcessList[0] if self.NativeAPI.SNPS3ProcessAttach(self.PS3TargetIndex, 0, ProcessID) != SNResult.SN_S_OK: return False", "SN_TI_NAME = (0x00000002) SN_TI_INFO = (0x00000004) SN_TI_HOMEDIR = (0x00000008) SN_TI_FILESERVEDIR = (0x00000010) SN_TI_BOOT", "pointer(c_uint32(0)) if self.NativeAPI.SNPS3GetDefaultTarget(DefaultTargetIndex) != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") return DefaultTargetIndex[0] def ConnectTarget(self, TargetIndex=-1):", "UINT32 *puCount, UINT32 *puBuffer ); Fetches a list of processes running on the", "os.add_dll_directory(os.getcwd()) os.add_dll_directory(os.path.join(os.getenv('SN_PS3_PATH'), \"bin\")) self.TMAPI_DLL = CDLL(\"ps3tmapi.dll\") ''' SNAPI SNRESULT SNPS3InitTargetComms(void); Initialises target communications", "self.NativeAPI.SNPS3ConnectEx(TargetIndex, None, True) not in [ SNResult.SN_S_OK, SNResult.SN_S_CONNECTED ]: return False self.PS3TargetIndex =", "]: return False self.PS3TargetIndex = TargetIndex self.IsConnected = True return True def GetProcessList(self):", "UINT64 uAddress, int nCount, BYTE *pBuffer ); ''' self.SNPS3ProcessGetMemory = self.TMAPI_DLL.SNPS3ProcessGetMemory self.SNPS3ProcessGetMemory.argtypes =", "(-4) SN_E_COMMS_ERR = (-5) SN_E_TM_COMMS_ERR = (-6) SN_E_TIMEOUT = (-7) SN_E_HOST_NOT_FOUND = (-8)", "SN_S_CONNECTED = SN_S_NO_ACTION SN_S_TARGET_STILL_REGISTERED = (7) SN_E_NOT_IMPL = (-1) SN_E_TM_NOT_RUNNING = (-2) SN_E_BAD_TARGET", "self.IsConnected = False if self.NativeAPI.SNPS3InitTargetComms() != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") def ThrowIfNotConnected(self): if", "self.SNPS3ConnectEx = self.TMAPI_DLL.SNPS3ConnectEx self.SNPS3ConnectEx.argtypes = [ c_uint32, c_char_p, c_bool ] self.SNPS3ConnectEx.restype = SNResult", "Exception(\"SNPS3ProcessList(): GetProcessInfos Failed\") return list(ProcessList) def AttachProcess(self, ProcessID=-1): self.ThrowIfNotConnected() if ProcessID == -1:", "''' self.SNPS3GetDefaultTarget = self.TMAPI_DLL.SNPS3GetDefaultTarget self.SNPS3GetDefaultTarget.argtypes = [ POINTER(c_uint32) ] self.SNPS3GetDefaultTarget.restype = SNResult '''", "SN_E_BUSY = (-22) SN_E_DECI_ERROR = (-23) SN_E_INSUFFICIENT_DATA = (-25) SN_E_DATA_TOO_LONG = (-26) SN_E_DEPRECATED", "SN_E_TARGET_RUNNING = (-17) SN_E_BAD_MEMSPACE = (-18) SN_E_NO_TARGETS = (-19) SN_E_NO_SEL = (-20) SN_E_BAD_PARAM", "''' SNAPI SNRESULT SNPS3ProcessSetMemory( HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64", "SN_E_HOST_NOT_FOUND = (-8) SN_E_TARGET_IN_USE = (-9) SN_E_LOAD_ELF_FAILED = (-10) SN_E_BAD_UNIT = (-11) SN_E_OUT_OF_MEM", "= (0x00000010) SN_TI_BOOT = (0x00000020) class SNPS3TargetInfo(Structure): _fields_ = [ (\"nFlags\", c_uint32 ),", "''' SNAPI SNRESULT SNPS3CloseTargetComms(void); Shuts down internal communications (but does not close the", "UINT64 uAddress, int nCount, const BYTE *pBuffer ); ''' self.SNPS3ProcessSetMemory = self.TMAPI_DLL.SNPS3ProcessSetMemory self.SNPS3ProcessSetMemory.argtypes", "SNAPI SNRESULT SNPS3ConnectEx( HTARGET hTarget, const char *pszApplication, BOOL bForceFlag ); Connect to", "const BYTE *pBuffer ); ''' self.SNPS3ProcessSetMemory = self.TMAPI_DLL.SNPS3ProcessSetMemory self.SNPS3ProcessSetMemory.argtypes = [ c_uint32, c_uint32,", "_SimpleCData from ctypes import _Pointer from .common import CEnum class SNResult(CEnum): SN_S_OK =", "= self.TMAPI_DLL.SNPS3Connect self.SNPS3Connect.argtypes = [ c_uint32, c_char_p ] self.SNPS3Connect.restype = SNResult ''' SNAPI", "ProcessID = ProcessList[0] if self.NativeAPI.SNPS3ProcessAttach(self.PS3TargetIndex, 0, ProcessID) != SNResult.SN_S_OK: return False if self.NativeAPI.SNPS3ProcessContinue(self.PS3TargetIndex,", "= (-15) SN_E_TARGET_RUNNING = (-17) SN_E_BAD_MEMSPACE = (-18) SN_E_NO_TARGETS = (-19) SN_E_NO_SEL =", "launches Target Manager. ''' self.SNPS3InitTargetComms = self.TMAPI_DLL.SNPS3InitTargetComms self.SNPS3InitTargetComms.argtypes = [] self.SNPS3InitTargetComms.restype = SNResult", "SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessContinue() Failed\") self.ProcessID = ProcessID return True def ReadMemory(self, Address, Size):", "if self.NativeAPI.SNPS3ProcessAttach(self.PS3TargetIndex, 0, ProcessID) != SNResult.SN_S_OK: return False if self.NativeAPI.SNPS3ProcessContinue(self.PS3TargetIndex, ProcessID) != SNResult.SN_S_OK:", "down internal communications (but does not close the Target Manager) and frees resources.", "uProcessID ); Continues all threads from a specified process. ''' self.SNPS3ProcessContinue = self.TMAPI_DLL.SNPS3ProcessContinue", "ProcessList = self.GetProcessList() if len(ProcessList) == 0: return False ProcessID = ProcessList[0] if", "to PS3\") def GetDefaultTarget(self): DefaultTargetIndex = pointer(c_uint32(0)) if self.NativeAPI.SNPS3GetDefaultTarget(DefaultTargetIndex) != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms()", "self.SNPS3SetDefaultTarget = self.TMAPI_DLL.SNPS3SetDefaultTarget self.SNPS3SetDefaultTarget.argtypes = [ c_uint32 ] self.SNPS3SetDefaultTarget.restype = SNResult ''' SNAPI", "SN_S_PENDING = (1) SN_S_NO_MSG = (3) SN_S_TM_VERSION = (4) SN_S_REPLACED = (5) SN_S_NO_ACTION", "UINT32 uProcessID ); Attach to a process. ''' self.SNPS3ProcessAttach = self.TMAPI_DLL.SNPS3ProcessAttach self.SNPS3ProcessAttach.argtypes =", "uProcessID, UINT64 uThreadID, UINT64 uAddress, int nCount, const BYTE *pBuffer ); ''' self.SNPS3ProcessSetMemory", "uProcessID ); Stops all threads from a specified process. ''' self.SNPS3ProcessStop = self.TMAPI_DLL.SNPS3ProcessStop", "= (-26) SN_E_DEPRECATED = (-27) SN_E_BAD_ALIGN = (-28) SN_E_FILE_ERROR = (-29) SN_E_NOT_SUPPORTED_IN_SDK_VERSION =", "(\"pszFSDir\", c_char_p ), (\"boot\", c_uint64 ), ] class TMAPIExports: def __init__(self): os.add_dll_directory(os.getcwd()) os.add_dll_directory(os.path.join(os.getenv('SN_PS3_PATH'),", "import os import pathlib from ctypes import * from ctypes import _SimpleCData from", "specified target. ''' self.SNPS3ProcessList = self.TMAPI_DLL.SNPS3ProcessList self.SNPS3ProcessList.argtypes = [ c_uint32, POINTER(c_uint32), POINTER(c_uint32) ]", "raise Exception(\"SNPS3ProcessContinue() Failed\") self.ProcessID = ProcessID return True def ReadMemory(self, Address, Size): self.ThrowIfNotConnected()", "import * from ctypes import _SimpleCData from ctypes import _Pointer from .common import", "char *pszApplication ); Connect to specified target. ''' self.SNPS3Connect = self.TMAPI_DLL.SNPS3Connect self.SNPS3Connect.argtypes =", "= [ c_uint32, c_uint32 ] self.SNPS3ProcessContinue.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessStop( HTARGET", "(-19) SN_E_NO_SEL = (-20) SN_E_BAD_PARAM = (-21) SN_E_BUSY = (-22) SN_E_DECI_ERROR = (-23)", "CDLL(\"ps3tmapi.dll\") ''' SNAPI SNRESULT SNPS3InitTargetComms(void); Initialises target communications and launches Target Manager. '''", "HTARGET *pTarget ); Gets the default target. ''' self.SNPS3GetDefaultTarget = self.TMAPI_DLL.SNPS3GetDefaultTarget self.SNPS3GetDefaultTarget.argtypes =", "), (\"pszFSDir\", c_char_p ), (\"boot\", c_uint64 ), ] class TMAPIExports: def __init__(self): os.add_dll_directory(os.getcwd())", "return False self.PS3TargetIndex = TargetIndex self.IsConnected = True return True def GetProcessList(self): self.ThrowIfNotConnected()", "-1: TargetIndex = self.GetDefaultTarget() if self.NativeAPI.SNPS3ConnectEx(TargetIndex, None, True) not in [ SNResult.SN_S_OK, SNResult.SN_S_CONNECTED", "self.SNPS3GetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT SNPS3SetDefaultTarget( HTARGET hTarget ); Gets the default", "self.PS3TargetIndex = TargetIndex self.IsConnected = True return True def GetProcessList(self): self.ThrowIfNotConnected() NumProcessesPtr =", "HTARGET hTarget, UINT32 uProcessID ); Continues all threads from a specified process. '''", "''' SNAPI SNRESULT SNPS3ProcessStop( HTARGET hTarget, UINT32 uProcessID ); Stops all threads from", "SNAPI SNRESULT SNPS3IsScanning(); Returns SN_E_BUSY if a search is already in progress. '''", "internal communications (but does not close the Target Manager) and frees resources. '''", "Initialises target communications and launches Target Manager. ''' self.SNPS3InitTargetComms = self.TMAPI_DLL.SNPS3InitTargetComms self.SNPS3InitTargetComms.argtypes =", "_Pointer from .common import CEnum class SNResult(CEnum): SN_S_OK = (0) SN_S_PENDING = (1)", "= (-8) SN_E_TARGET_IN_USE = (-9) SN_E_LOAD_ELF_FAILED = (-10) SN_E_BAD_UNIT = (-11) SN_E_OUT_OF_MEM =", "SN_E_CHECK_TARGET_CONFIGURATION = (-33) SN_E_MODULE_NOT_FOUND = (-34) SN_E_CONNECT_TO_GAMEPORT_FAILED = (-35) SN_E_COMMAND_CANCELLED = (-36) SN_E_PROTOCOL_ALREADY_REGISTERED", "close the Target Manager) and frees resources. ''' self.SNPS3CloseTargetComms = self.TMAPI_DLL.SNPS3CloseTargetComms self.SNPS3CloseTargetComms.argtypes =", "hTarget member of SNPS3TargetInfo() structure. ''' self.SNPS3GetTargetInfo = self.TMAPI_DLL.SNPS3GetTargetInfo self.SNPS3GetTargetInfo.argtypes = [ POINTER(SNPS3TargetInfo)", "uThreadID, UINT64 uAddress, int nCount, const BYTE *pBuffer ); ''' self.SNPS3ProcessSetMemory = self.TMAPI_DLL.SNPS3ProcessSetMemory", "(-29) SN_E_NOT_SUPPORTED_IN_SDK_VERSION = (-30) SN_E_LOAD_MODULE_FAILED = (-31) SN_E_CHECK_TARGET_CONFIGURATION = (-33) SN_E_MODULE_NOT_FOUND = (-34)", "__init__(self): self.NativeAPI = TMAPIExports() self.PS3TargetIndex = -1 self.IsConnected = False if self.NativeAPI.SNPS3InitTargetComms() !=", "(-11) SN_E_OUT_OF_MEM = (-12) SN_E_NOT_LISTED = (-13) SN_E_TM_VERSION = (-14) SN_E_DLL_NOT_INITIALISED = (-15)", "(\"pszType\", c_char_p ), (\"pszInfo\", c_char_p ), (\"pszHomeDir\", c_char_p ), (\"pszFSDir\", c_char_p ), (\"boot\",", "''' SNAPI SNRESULT SNPS3ProcessAttach( HTARGET hTarget, UINT32 uUnitID, UINT32 uProcessID ); Attach to", "UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress, int nCount, const BYTE *pBuffer ); '''", "(-8) SN_E_TARGET_IN_USE = (-9) SN_E_LOAD_ELF_FAILED = (-10) SN_E_BAD_UNIT = (-11) SN_E_OUT_OF_MEM = (-12)", "hTarget, const char *pszApplication, BOOL bForceFlag ); Connect to specified target. ''' self.SNPS3ConnectEx", "TargetIndex = self.GetDefaultTarget() if self.NativeAPI.SNPS3ConnectEx(TargetIndex, None, True) not in [ SNResult.SN_S_OK, SNResult.SN_S_CONNECTED ]:", "default target. ''' self.SNPS3SetDefaultTarget = self.TMAPI_DLL.SNPS3SetDefaultTarget self.SNPS3SetDefaultTarget.argtypes = [ c_uint32 ] self.SNPS3SetDefaultTarget.restype =", "Exception(\"SNPS3InitTargetComms() Failed\") def ThrowIfNotConnected(self): if self.IsConnected == False: raise Exception(\"Error: Not Connected to", "SNAPI SNRESULT SNPS3Connect( HTARGET hTarget, const char *pszApplication ); Connect to specified target.", "self.SNPS3IsScanning = self.TMAPI_DLL.SNPS3IsScanning self.SNPS3IsScanning.argtypes = [] self.SNPS3IsScanning.restype = SNResult ''' SNAPI SNRESULT SNPS3Connect(", "self.TMAPI_DLL.SNPS3IsScanning self.SNPS3IsScanning.argtypes = [] self.SNPS3IsScanning.restype = SNResult ''' SNAPI SNRESULT SNPS3Connect( HTARGET hTarget,", "self.SNPS3ProcessStop = self.TMAPI_DLL.SNPS3ProcessStop self.SNPS3ProcessStop.argtypes = [ c_uint32, c_uint32 ] self.SNPS3ProcessStop.restype = SNResult '''", "[ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessSetMemory.restype = SNResult class", "running on the specified target. ''' self.SNPS3ProcessList = self.TMAPI_DLL.SNPS3ProcessList self.SNPS3ProcessList.argtypes = [ c_uint32,", "= (0) SN_S_PENDING = (1) SN_S_NO_MSG = (3) SN_S_TM_VERSION = (4) SN_S_REPLACED =", "(-5) SN_E_TM_COMMS_ERR = (-6) SN_E_TIMEOUT = (-7) SN_E_HOST_NOT_FOUND = (-8) SN_E_TARGET_IN_USE = (-9)", "c_uint32, c_uint32 ] self.SNPS3ProcessAttach.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessContinue( HTARGET hTarget, UINT32", "self.SNPS3ProcessGetMemory.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessSetMemory( HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID,", "!= SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessContinue() Failed\") self.ProcessID = ProcessID return True def ReadMemory(self, Address,", "= pointer(c_uint32(0)) if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, None) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetNumProcesses Failed\") NumProcesses", "os import pathlib from ctypes import * from ctypes import _SimpleCData from ctypes", "self.SNPS3ConnectEx.restype = SNResult ''' SNAPI SNRESULT SNPS3GetTargetInfo( SNPS3TargetInfo *pTargetInfo ); Retrieves information for", "c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessSetMemory.restype = SNResult class TMAPI: def", "0, Address, Size, MemoryBuffer) return bytes(MemoryBuffer) def WriteMemory(self, Address, Bytes): self.ThrowIfNotConnected() WriteBuffer =", "c_char_p ] self.SNPS3Connect.restype = SNResult ''' SNAPI SNRESULT SNPS3ConnectEx( HTARGET hTarget, const char", "self.SNPS3ProcessSetMemory.restype = SNResult class TMAPI: def __init__(self): self.NativeAPI = TMAPIExports() self.PS3TargetIndex = -1", "self.SNPS3GetTargetInfo = self.TMAPI_DLL.SNPS3GetTargetInfo self.SNPS3GetTargetInfo.argtypes = [ POINTER(SNPS3TargetInfo) ] self.SNPS3GetTargetInfo.restype = SNResult ''' SNAPI", "list(ProcessList) def AttachProcess(self, ProcessID=-1): self.ThrowIfNotConnected() if ProcessID == -1: ProcessList = self.GetProcessList() if", "Size, MemoryBuffer) return bytes(MemoryBuffer) def WriteMemory(self, Address, Bytes): self.ThrowIfNotConnected() WriteBuffer = (c_char *", "= (-4) SN_E_COMMS_ERR = (-5) SN_E_TM_COMMS_ERR = (-6) SN_E_TIMEOUT = (-7) SN_E_HOST_NOT_FOUND =", "SNResult ''' SNAPI SNRESULT SNPS3ProcessContinue( HTARGET hTarget, UINT32 uProcessID ); Continues all threads", "(\"boot\", c_uint64 ), ] class TMAPIExports: def __init__(self): os.add_dll_directory(os.getcwd()) os.add_dll_directory(os.path.join(os.getenv('SN_PS3_PATH'), \"bin\")) self.TMAPI_DLL =", "); Stops all threads from a specified process. ''' self.SNPS3ProcessStop = self.TMAPI_DLL.SNPS3ProcessStop self.SNPS3ProcessStop.argtypes", "nCount, const BYTE *pBuffer ); ''' self.SNPS3ProcessSetMemory = self.TMAPI_DLL.SNPS3ProcessSetMemory self.SNPS3ProcessSetMemory.argtypes = [ c_uint32,", "c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessSetMemory.restype = SNResult class TMAPI: def __init__(self): self.NativeAPI =", "if self.NativeAPI.SNPS3ProcessContinue(self.PS3TargetIndex, ProcessID) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessContinue() Failed\") self.ProcessID = ProcessID return True", "MemoryBuffer) return bytes(MemoryBuffer) def WriteMemory(self, Address, Bytes): self.ThrowIfNotConnected() WriteBuffer = (c_char * len(Bytes)).from_buffer(bytearray(Bytes))", "(-23) SN_E_INSUFFICIENT_DATA = (-25) SN_E_DATA_TOO_LONG = (-26) SN_E_DEPRECATED = (-27) SN_E_BAD_ALIGN = (-28)", "information for a target specified by hTarget member of SNPS3TargetInfo() structure. ''' self.SNPS3GetTargetInfo", "SNAPI SNRESULT SNPS3ProcessStop( HTARGET hTarget, UINT32 uProcessID ); Stops all threads from a", "= (0x00000002) SN_TI_INFO = (0x00000004) SN_TI_HOMEDIR = (0x00000008) SN_TI_FILESERVEDIR = (0x00000010) SN_TI_BOOT =", "SN_TI_TARGETID = (0x00000001) SN_TI_NAME = (0x00000002) SN_TI_INFO = (0x00000004) SN_TI_HOMEDIR = (0x00000008) SN_TI_FILESERVEDIR", "= pointer(c_uint32(0)) if self.NativeAPI.SNPS3GetDefaultTarget(DefaultTargetIndex) != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") return DefaultTargetIndex[0] def ConnectTarget(self,", "(-33) SN_E_MODULE_NOT_FOUND = (-34) SN_E_CONNECT_TO_GAMEPORT_FAILED = (-35) SN_E_COMMAND_CANCELLED = (-36) SN_E_PROTOCOL_ALREADY_REGISTERED = (-37)", "self.SNPS3ProcessAttach = self.TMAPI_DLL.SNPS3ProcessAttach self.SNPS3ProcessAttach.argtypes = [ c_uint32, c_uint32, c_uint32 ] self.SNPS3ProcessAttach.restype = SNResult", "to specified target. ''' self.SNPS3ConnectEx = self.TMAPI_DLL.SNPS3ConnectEx self.SNPS3ConnectEx.argtypes = [ c_uint32, c_char_p, c_bool", "self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, None) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetNumProcesses Failed\") NumProcesses = NumProcessesPtr.contents.value if", "SN_E_TM_NOT_RUNNING = (-2) SN_E_BAD_TARGET = (-3) SN_E_NOT_CONNECTED = (-4) SN_E_COMMS_ERR = (-5) SN_E_TM_COMMS_ERR", "uProcessID ); Attach to a process. ''' self.SNPS3ProcessAttach = self.TMAPI_DLL.SNPS3ProcessAttach self.SNPS3ProcessAttach.argtypes = [", "NumProcessesPtr, ProcessList) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetProcessInfos Failed\") return list(ProcessList) def AttachProcess(self, ProcessID=-1):", "= (-6) SN_E_TIMEOUT = (-7) SN_E_HOST_NOT_FOUND = (-8) SN_E_TARGET_IN_USE = (-9) SN_E_LOAD_ELF_FAILED =", "= [ c_uint32, c_char_p ] self.SNPS3Connect.restype = SNResult ''' SNAPI SNRESULT SNPS3ConnectEx( HTARGET", "self.TMAPI_DLL.SNPS3ProcessSetMemory self.SNPS3ProcessSetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessSetMemory.restype", "c_uint32, POINTER(c_uint32), POINTER(c_uint32) ] self.SNPS3ProcessList.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessAttach( HTARGET hTarget,", "a search is already in progress. ''' self.SNPS3IsScanning = self.TMAPI_DLL.SNPS3IsScanning self.SNPS3IsScanning.argtypes = []", "(-31) SN_E_CHECK_TARGET_CONFIGURATION = (-33) SN_E_MODULE_NOT_FOUND = (-34) SN_E_CONNECT_TO_GAMEPORT_FAILED = (-35) SN_E_COMMAND_CANCELLED = (-36)", "SN_E_INSUFFICIENT_DATA = (-25) SN_E_DATA_TOO_LONG = (-26) SN_E_DEPRECATED = (-27) SN_E_BAD_ALIGN = (-28) SN_E_FILE_ERROR", "SN_E_PROTOCOL_ALREADY_REGISTERED = (-37) SN_E_CONNECTED = (-38) SN_E_COMMS_EVENT_MISMATCHED_ERR = (-39) SN_E_TARGET_IS_POWERED_OFF = (-40) class", "(-13) SN_E_TM_VERSION = (-14) SN_E_DLL_NOT_INITIALISED = (-15) SN_E_TARGET_RUNNING = (-17) SN_E_BAD_MEMSPACE = (-18)", "), (\"hTarget\", c_uint32 ), (\"pszName\", c_char_p ), (\"pszType\", c_char_p ), (\"pszInfo\", c_char_p ),", "Target Manager. ''' self.SNPS3InitTargetComms = self.TMAPI_DLL.SNPS3InitTargetComms self.SNPS3InitTargetComms.argtypes = [] self.SNPS3InitTargetComms.restype = SNResult '''", "] self.SNPS3GetTargetInfo.restype = SNResult ''' SNAPI SNRESULT SNPS3GetDefaultTarget( HTARGET *pTarget ); Gets the", "NumProcesses == 0: raise Exception(\"No process running\") ProcessList = (c_uint32*NumProcesses)() if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr,", "ProcessID return True def ReadMemory(self, Address, Size): self.ThrowIfNotConnected() MemoryBuffer = (c_char * Size)()", "= (-29) SN_E_NOT_SUPPORTED_IN_SDK_VERSION = (-30) SN_E_LOAD_MODULE_FAILED = (-31) SN_E_CHECK_TARGET_CONFIGURATION = (-33) SN_E_MODULE_NOT_FOUND =", "= SNResult ''' SNAPI SNRESULT SNPS3ProcessList( HTARGET hTarget, UINT32 *puCount, UINT32 *puBuffer );", "True) not in [ SNResult.SN_S_OK, SNResult.SN_S_CONNECTED ]: return False self.PS3TargetIndex = TargetIndex self.IsConnected", "NumProcesses = NumProcessesPtr.contents.value if NumProcesses == 0: raise Exception(\"No process running\") ProcessList =", "const char *pszApplication, BOOL bForceFlag ); Connect to specified target. ''' self.SNPS3ConnectEx =", "self.SNPS3ProcessSetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessSetMemory.restype =", "SN_E_DECI_ERROR = (-23) SN_E_INSUFFICIENT_DATA = (-25) SN_E_DATA_TOO_LONG = (-26) SN_E_DEPRECATED = (-27) SN_E_BAD_ALIGN", "the default target. ''' self.SNPS3GetDefaultTarget = self.TMAPI_DLL.SNPS3GetDefaultTarget self.SNPS3GetDefaultTarget.argtypes = [ POINTER(c_uint32) ] self.SNPS3GetDefaultTarget.restype", "CEnum class SNResult(CEnum): SN_S_OK = (0) SN_S_PENDING = (1) SN_S_NO_MSG = (3) SN_S_TM_VERSION", "(-6) SN_E_TIMEOUT = (-7) SN_E_HOST_NOT_FOUND = (-8) SN_E_TARGET_IN_USE = (-9) SN_E_LOAD_ELF_FAILED = (-10)", "= (-23) SN_E_INSUFFICIENT_DATA = (-25) SN_E_DATA_TOO_LONG = (-26) SN_E_DEPRECATED = (-27) SN_E_BAD_ALIGN =", "BYTE *pBuffer ); ''' self.SNPS3ProcessGetMemory = self.TMAPI_DLL.SNPS3ProcessGetMemory self.SNPS3ProcessGetMemory.argtypes = [ c_uint32, c_uint32, c_uint32,", "AttachProcess(self, ProcessID=-1): self.ThrowIfNotConnected() if ProcessID == -1: ProcessList = self.GetProcessList() if len(ProcessList) ==", "target. ''' self.SNPS3ProcessList = self.TMAPI_DLL.SNPS3ProcessList self.SNPS3ProcessList.argtypes = [ c_uint32, POINTER(c_uint32), POINTER(c_uint32) ] self.SNPS3ProcessList.restype", "GetNumProcesses Failed\") NumProcesses = NumProcessesPtr.contents.value if NumProcesses == 0: raise Exception(\"No process running\")", "Retrieves information for a target specified by hTarget member of SNPS3TargetInfo() structure. '''", "def ReadMemory(self, Address, Size): self.ThrowIfNotConnected() MemoryBuffer = (c_char * Size)() self.NativeAPI.SNPS3ProcessGetMemory(self.PS3TargetIndex, 0, self.ProcessID,", "bytes(MemoryBuffer) def WriteMemory(self, Address, Bytes): self.ThrowIfNotConnected() WriteBuffer = (c_char * len(Bytes)).from_buffer(bytearray(Bytes)) return self.NativeAPI.SNPS3ProcessSetMemory(self.PS3TargetIndex,", "self.TMAPI_DLL.SNPS3Connect self.SNPS3Connect.argtypes = [ c_uint32, c_char_p ] self.SNPS3Connect.restype = SNResult ''' SNAPI SNRESULT", "a specified process. ''' self.SNPS3ProcessContinue = self.TMAPI_DLL.SNPS3ProcessContinue self.SNPS3ProcessContinue.argtypes = [ c_uint32, c_uint32 ]", "(-37) SN_E_CONNECTED = (-38) SN_E_COMMS_EVENT_MISMATCHED_ERR = (-39) SN_E_TARGET_IS_POWERED_OFF = (-40) class SNTargetInfoFlags(CEnum): SN_TI_TARGETID", "self.SNPS3SetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessList( HTARGET hTarget, UINT32 *puCount, UINT32 *puBuffer", "SNPS3ProcessList( HTARGET hTarget, UINT32 *puCount, UINT32 *puBuffer ); Fetches a list of processes", "SN_E_OUT_OF_MEM = (-12) SN_E_NOT_LISTED = (-13) SN_E_TM_VERSION = (-14) SN_E_DLL_NOT_INITIALISED = (-15) SN_E_TARGET_RUNNING", "0, ProcessID) != SNResult.SN_S_OK: return False if self.NativeAPI.SNPS3ProcessContinue(self.PS3TargetIndex, ProcessID) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessContinue()", "SN_TI_FILESERVEDIR = (0x00000010) SN_TI_BOOT = (0x00000020) class SNPS3TargetInfo(Structure): _fields_ = [ (\"nFlags\", c_uint32", "), (\"pszName\", c_char_p ), (\"pszType\", c_char_p ), (\"pszInfo\", c_char_p ), (\"pszHomeDir\", c_char_p ),", "POINTER(c_uint32) ] self.SNPS3ProcessList.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessAttach( HTARGET hTarget, UINT32 uUnitID,", "= self.TMAPI_DLL.SNPS3ProcessList self.SNPS3ProcessList.argtypes = [ c_uint32, POINTER(c_uint32), POINTER(c_uint32) ] self.SNPS3ProcessList.restype = SNResult '''", "SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetNumProcesses Failed\") NumProcesses = NumProcessesPtr.contents.value if NumProcesses == 0: raise", "self.GetDefaultTarget() if self.NativeAPI.SNPS3ConnectEx(TargetIndex, None, True) not in [ SNResult.SN_S_OK, SNResult.SN_S_CONNECTED ]: return False", "ProcessList = (c_uint32*NumProcesses)() if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, ProcessList) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetProcessInfos Failed\")", "= (-35) SN_E_COMMAND_CANCELLED = (-36) SN_E_PROTOCOL_ALREADY_REGISTERED = (-37) SN_E_CONNECTED = (-38) SN_E_COMMS_EVENT_MISMATCHED_ERR =", "''' self.SNPS3InitTargetComms = self.TMAPI_DLL.SNPS3InitTargetComms self.SNPS3InitTargetComms.argtypes = [] self.SNPS3InitTargetComms.restype = SNResult ''' SNAPI SNRESULT", "self.IsConnected == False: raise Exception(\"Error: Not Connected to PS3\") def GetDefaultTarget(self): DefaultTargetIndex =", "); ''' self.SNPS3ProcessGetMemory = self.TMAPI_DLL.SNPS3ProcessGetMemory self.SNPS3ProcessGetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64,", "c_int32, POINTER(c_char) ] self.SNPS3ProcessGetMemory.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessSetMemory( HTARGET hTarget, UINT32", "target. ''' self.SNPS3SetDefaultTarget = self.TMAPI_DLL.SNPS3SetDefaultTarget self.SNPS3SetDefaultTarget.argtypes = [ c_uint32 ] self.SNPS3SetDefaultTarget.restype = SNResult", "class SNTargetInfoFlags(CEnum): SN_TI_TARGETID = (0x00000001) SN_TI_NAME = (0x00000002) SN_TI_INFO = (0x00000004) SN_TI_HOMEDIR =", "SNRESULT SNPS3ProcessGetMemory( HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress, int", "from a specified process. ''' self.SNPS3ProcessStop = self.TMAPI_DLL.SNPS3ProcessStop self.SNPS3ProcessStop.argtypes = [ c_uint32, c_uint32", "TargetIndex self.IsConnected = True return True def GetProcessList(self): self.ThrowIfNotConnected() NumProcessesPtr = pointer(c_uint32(0)) if", "!= SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetProcessInfos Failed\") return list(ProcessList) def AttachProcess(self, ProcessID=-1): self.ThrowIfNotConnected() if", "not in [ SNResult.SN_S_OK, SNResult.SN_S_CONNECTED ]: return False self.PS3TargetIndex = TargetIndex self.IsConnected =", "= self.TMAPI_DLL.SNPS3ProcessContinue self.SNPS3ProcessContinue.argtypes = [ c_uint32, c_uint32 ] self.SNPS3ProcessContinue.restype = SNResult ''' SNAPI", "c_uint32 ] self.SNPS3ProcessContinue.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessStop( HTARGET hTarget, UINT32 uProcessID", "SN_TI_BOOT = (0x00000020) class SNPS3TargetInfo(Structure): _fields_ = [ (\"nFlags\", c_uint32 ), (\"hTarget\", c_uint32", "self.TMAPI_DLL.SNPS3ProcessGetMemory self.SNPS3ProcessGetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessGetMemory.restype", "= TargetIndex self.IsConnected = True return True def GetProcessList(self): self.ThrowIfNotConnected() NumProcessesPtr = pointer(c_uint32(0))", "''' SNAPI SNRESULT SNPS3ProcessList( HTARGET hTarget, UINT32 *puCount, UINT32 *puBuffer ); Fetches a", "None) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetNumProcesses Failed\") NumProcesses = NumProcessesPtr.contents.value if NumProcesses ==", "] self.SNPS3ConnectEx.restype = SNResult ''' SNAPI SNRESULT SNPS3GetTargetInfo( SNPS3TargetInfo *pTargetInfo ); Retrieves information", "c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessSetMemory.restype = SNResult class TMAPI: def __init__(self):", "c_char_p ), (\"pszType\", c_char_p ), (\"pszInfo\", c_char_p ), (\"pszHomeDir\", c_char_p ), (\"pszFSDir\", c_char_p", "False self.PS3TargetIndex = TargetIndex self.IsConnected = True return True def GetProcessList(self): self.ThrowIfNotConnected() NumProcessesPtr", "SN_E_DEPRECATED = (-27) SN_E_BAD_ALIGN = (-28) SN_E_FILE_ERROR = (-29) SN_E_NOT_SUPPORTED_IN_SDK_VERSION = (-30) SN_E_LOAD_MODULE_FAILED", "in [ SNResult.SN_S_OK, SNResult.SN_S_CONNECTED ]: return False self.PS3TargetIndex = TargetIndex self.IsConnected = True", "from .common import CEnum class SNResult(CEnum): SN_S_OK = (0) SN_S_PENDING = (1) SN_S_NO_MSG", "GetProcessInfos Failed\") return list(ProcessList) def AttachProcess(self, ProcessID=-1): self.ThrowIfNotConnected() if ProcessID == -1: ProcessList", "HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress, int nCount, BYTE", "Bytes): self.ThrowIfNotConnected() WriteBuffer = (c_char * len(Bytes)).from_buffer(bytearray(Bytes)) return self.NativeAPI.SNPS3ProcessSetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0, Address,", "= [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessSetMemory.restype = SNResult", "= (1) SN_S_NO_MSG = (3) SN_S_TM_VERSION = (4) SN_S_REPLACED = (5) SN_S_NO_ACTION =", "import _Pointer from .common import CEnum class SNResult(CEnum): SN_S_OK = (0) SN_S_PENDING =", "(-36) SN_E_PROTOCOL_ALREADY_REGISTERED = (-37) SN_E_CONNECTED = (-38) SN_E_COMMS_EVENT_MISMATCHED_ERR = (-39) SN_E_TARGET_IS_POWERED_OFF = (-40)", "SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") def ThrowIfNotConnected(self): if self.IsConnected == False: raise Exception(\"Error: Not", "SNResult ''' SNAPI SNRESULT SNPS3ProcessSetMemory( HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID,", "def __init__(self): os.add_dll_directory(os.getcwd()) os.add_dll_directory(os.path.join(os.getenv('SN_PS3_PATH'), \"bin\")) self.TMAPI_DLL = CDLL(\"ps3tmapi.dll\") ''' SNAPI SNRESULT SNPS3InitTargetComms(void); Initialises", "self.PS3TargetIndex = -1 self.IsConnected = False if self.NativeAPI.SNPS3InitTargetComms() != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\")", "[ POINTER(SNPS3TargetInfo) ] self.SNPS3GetTargetInfo.restype = SNResult ''' SNAPI SNRESULT SNPS3GetDefaultTarget( HTARGET *pTarget );", "[] self.SNPS3IsScanning.restype = SNResult ''' SNAPI SNRESULT SNPS3Connect( HTARGET hTarget, const char *pszApplication", "[] self.SNPS3InitTargetComms.restype = SNResult ''' SNAPI SNRESULT SNPS3CloseTargetComms(void); Shuts down internal communications (but", "= (-11) SN_E_OUT_OF_MEM = (-12) SN_E_NOT_LISTED = (-13) SN_E_TM_VERSION = (-14) SN_E_DLL_NOT_INITIALISED =", ".common import CEnum class SNResult(CEnum): SN_S_OK = (0) SN_S_PENDING = (1) SN_S_NO_MSG =", "= [ c_uint32, c_uint32, c_uint32 ] self.SNPS3ProcessAttach.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessContinue(", "SN_E_BAD_TARGET = (-3) SN_E_NOT_CONNECTED = (-4) SN_E_COMMS_ERR = (-5) SN_E_TM_COMMS_ERR = (-6) SN_E_TIMEOUT", "SN_S_TARGET_STILL_REGISTERED = (7) SN_E_NOT_IMPL = (-1) SN_E_TM_NOT_RUNNING = (-2) SN_E_BAD_TARGET = (-3) SN_E_NOT_CONNECTED", "= (-5) SN_E_TM_COMMS_ERR = (-6) SN_E_TIMEOUT = (-7) SN_E_HOST_NOT_FOUND = (-8) SN_E_TARGET_IN_USE =", "= SNResult ''' SNAPI SNRESULT SNPS3Connect( HTARGET hTarget, const char *pszApplication ); Connect", "if self.IsConnected == False: raise Exception(\"Error: Not Connected to PS3\") def GetDefaultTarget(self): DefaultTargetIndex", "= [ (\"nFlags\", c_uint32 ), (\"hTarget\", c_uint32 ), (\"pszName\", c_char_p ), (\"pszType\", c_char_p", "= SN_S_NO_ACTION SN_S_TARGET_STILL_REGISTERED = (7) SN_E_NOT_IMPL = (-1) SN_E_TM_NOT_RUNNING = (-2) SN_E_BAD_TARGET =", "SNResult ''' SNAPI SNRESULT SNPS3ProcessStop( HTARGET hTarget, UINT32 uProcessID ); Stops all threads", "HTARGET hTarget, UINT32 *puCount, UINT32 *puBuffer ); Fetches a list of processes running", "= SNResult ''' SNAPI SNRESULT SNPS3SetDefaultTarget( HTARGET hTarget ); Gets the default target.", "ReadMemory(self, Address, Size): self.ThrowIfNotConnected() MemoryBuffer = (c_char * Size)() self.NativeAPI.SNPS3ProcessGetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0,", "= SNResult ''' SNAPI SNRESULT SNPS3ProcessAttach( HTARGET hTarget, UINT32 uUnitID, UINT32 uProcessID );", "not close the Target Manager) and frees resources. ''' self.SNPS3CloseTargetComms = self.TMAPI_DLL.SNPS3CloseTargetComms self.SNPS3CloseTargetComms.argtypes", "SNRESULT SNPS3ProcessSetMemory( HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress, int", "\"bin\")) self.TMAPI_DLL = CDLL(\"ps3tmapi.dll\") ''' SNAPI SNRESULT SNPS3InitTargetComms(void); Initialises target communications and launches", "SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetProcessInfos Failed\") return list(ProcessList) def AttachProcess(self, ProcessID=-1): self.ThrowIfNotConnected() if ProcessID", "*pTarget ); Gets the default target. ''' self.SNPS3GetDefaultTarget = self.TMAPI_DLL.SNPS3GetDefaultTarget self.SNPS3GetDefaultTarget.argtypes = [", "ProcessID == -1: ProcessList = self.GetProcessList() if len(ProcessList) == 0: return False ProcessID", "to specified target. ''' self.SNPS3Connect = self.TMAPI_DLL.SNPS3Connect self.SNPS3Connect.argtypes = [ c_uint32, c_char_p ]", "* Size)() self.NativeAPI.SNPS3ProcessGetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0, Address, Size, MemoryBuffer) return bytes(MemoryBuffer) def WriteMemory(self,", "[ c_uint32, c_uint32 ] self.SNPS3ProcessStop.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessGetMemory( HTARGET hTarget,", "self.TMAPI_DLL.SNPS3InitTargetComms self.SNPS3InitTargetComms.argtypes = [] self.SNPS3InitTargetComms.restype = SNResult ''' SNAPI SNRESULT SNPS3CloseTargetComms(void); Shuts down", "SNAPI SNRESULT SNPS3ProcessSetMemory( HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress,", "= SNResult ''' SNAPI SNRESULT SNPS3ProcessStop( HTARGET hTarget, UINT32 uProcessID ); Stops all", "self.SNPS3ProcessSetMemory = self.TMAPI_DLL.SNPS3ProcessSetMemory self.SNPS3ProcessSetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char)", "UINT64 uThreadID, UINT64 uAddress, int nCount, BYTE *pBuffer ); ''' self.SNPS3ProcessGetMemory = self.TMAPI_DLL.SNPS3ProcessGetMemory", "SN_E_CONNECT_TO_GAMEPORT_FAILED = (-35) SN_E_COMMAND_CANCELLED = (-36) SN_E_PROTOCOL_ALREADY_REGISTERED = (-37) SN_E_CONNECTED = (-38) SN_E_COMMS_EVENT_MISMATCHED_ERR", "= SNResult ''' SNAPI SNRESULT SNPS3GetTargetInfo( SNPS3TargetInfo *pTargetInfo ); Retrieves information for a", "UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress, int nCount, BYTE *pBuffer );", "= (-39) SN_E_TARGET_IS_POWERED_OFF = (-40) class SNTargetInfoFlags(CEnum): SN_TI_TARGETID = (0x00000001) SN_TI_NAME = (0x00000002)", "hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress, int nCount, BYTE *pBuffer", "SNPS3ConnectEx( HTARGET hTarget, const char *pszApplication, BOOL bForceFlag ); Connect to specified target.", "(7) SN_E_NOT_IMPL = (-1) SN_E_TM_NOT_RUNNING = (-2) SN_E_BAD_TARGET = (-3) SN_E_NOT_CONNECTED = (-4)", "SNAPI SNRESULT SNPS3GetDefaultTarget( HTARGET *pTarget ); Gets the default target. ''' self.SNPS3GetDefaultTarget =", "False: raise Exception(\"Error: Not Connected to PS3\") def GetDefaultTarget(self): DefaultTargetIndex = pointer(c_uint32(0)) if", "''' self.SNPS3IsScanning = self.TMAPI_DLL.SNPS3IsScanning self.SNPS3IsScanning.argtypes = [] self.SNPS3IsScanning.restype = SNResult ''' SNAPI SNRESULT", "SN_S_NO_MSG = (3) SN_S_TM_VERSION = (4) SN_S_REPLACED = (5) SN_S_NO_ACTION = (6) SN_S_CONNECTED", "self.SNPS3ProcessAttach.argtypes = [ c_uint32, c_uint32, c_uint32 ] self.SNPS3ProcessAttach.restype = SNResult ''' SNAPI SNRESULT", "return bytes(MemoryBuffer) def WriteMemory(self, Address, Bytes): self.ThrowIfNotConnected() WriteBuffer = (c_char * len(Bytes)).from_buffer(bytearray(Bytes)) return", "= (-13) SN_E_TM_VERSION = (-14) SN_E_DLL_NOT_INITIALISED = (-15) SN_E_TARGET_RUNNING = (-17) SN_E_BAD_MEMSPACE =", "UINT32 *puBuffer ); Fetches a list of processes running on the specified target.", "HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress, int nCount, const", "[ c_uint32, c_char_p, c_bool ] self.SNPS3ConnectEx.restype = SNResult ''' SNAPI SNRESULT SNPS3GetTargetInfo( SNPS3TargetInfo", "Exception(\"SNPS3InitTargetComms() Failed\") return DefaultTargetIndex[0] def ConnectTarget(self, TargetIndex=-1): self.IsConnected = False if TargetIndex ==", "SNRESULT SNPS3GetTargetInfo( SNPS3TargetInfo *pTargetInfo ); Retrieves information for a target specified by hTarget", "= self.TMAPI_DLL.SNPS3ProcessStop self.SNPS3ProcessStop.argtypes = [ c_uint32, c_uint32 ] self.SNPS3ProcessStop.restype = SNResult ''' SNAPI", "''' SNAPI SNRESULT SNPS3ProcessContinue( HTARGET hTarget, UINT32 uProcessID ); Continues all threads from", "Address, Size, MemoryBuffer) return bytes(MemoryBuffer) def WriteMemory(self, Address, Bytes): self.ThrowIfNotConnected() WriteBuffer = (c_char", "= (6) SN_S_CONNECTED = SN_S_NO_ACTION SN_S_TARGET_STILL_REGISTERED = (7) SN_E_NOT_IMPL = (-1) SN_E_TM_NOT_RUNNING =", "if NumProcesses == 0: raise Exception(\"No process running\") ProcessList = (c_uint32*NumProcesses)() if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex,", "if a search is already in progress. ''' self.SNPS3IsScanning = self.TMAPI_DLL.SNPS3IsScanning self.SNPS3IsScanning.argtypes =", "if self.NativeAPI.SNPS3GetDefaultTarget(DefaultTargetIndex) != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") return DefaultTargetIndex[0] def ConnectTarget(self, TargetIndex=-1): self.IsConnected", "= SNResult class TMAPI: def __init__(self): self.NativeAPI = TMAPIExports() self.PS3TargetIndex = -1 self.IsConnected", "if TargetIndex == -1: TargetIndex = self.GetDefaultTarget() if self.NativeAPI.SNPS3ConnectEx(TargetIndex, None, True) not in", "import CEnum class SNResult(CEnum): SN_S_OK = (0) SN_S_PENDING = (1) SN_S_NO_MSG = (3)", "(-2) SN_E_BAD_TARGET = (-3) SN_E_NOT_CONNECTED = (-4) SN_E_COMMS_ERR = (-5) SN_E_TM_COMMS_ERR = (-6)", "(\"nFlags\", c_uint32 ), (\"hTarget\", c_uint32 ), (\"pszName\", c_char_p ), (\"pszType\", c_char_p ), (\"pszInfo\",", "import pathlib from ctypes import * from ctypes import _SimpleCData from ctypes import", "HTARGET hTarget, UINT32 uUnitID, UINT32 uProcessID ); Attach to a process. ''' self.SNPS3ProcessAttach", "== -1: ProcessList = self.GetProcessList() if len(ProcessList) == 0: return False ProcessID =", "progress. ''' self.SNPS3IsScanning = self.TMAPI_DLL.SNPS3IsScanning self.SNPS3IsScanning.argtypes = [] self.SNPS3IsScanning.restype = SNResult ''' SNAPI", "specified target. ''' self.SNPS3Connect = self.TMAPI_DLL.SNPS3Connect self.SNPS3Connect.argtypes = [ c_uint32, c_char_p ] self.SNPS3Connect.restype", "if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, None) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetNumProcesses Failed\") NumProcesses = NumProcessesPtr.contents.value", "c_uint32, c_uint32, c_uint32 ] self.SNPS3ProcessAttach.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessContinue( HTARGET hTarget,", "_fields_ = [ (\"nFlags\", c_uint32 ), (\"hTarget\", c_uint32 ), (\"pszName\", c_char_p ), (\"pszType\",", "class TMAPIExports: def __init__(self): os.add_dll_directory(os.getcwd()) os.add_dll_directory(os.path.join(os.getenv('SN_PS3_PATH'), \"bin\")) self.TMAPI_DLL = CDLL(\"ps3tmapi.dll\") ''' SNAPI SNRESULT", "= True return True def GetProcessList(self): self.ThrowIfNotConnected() NumProcessesPtr = pointer(c_uint32(0)) if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr,", "!= SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") def ThrowIfNotConnected(self): if self.IsConnected == False: raise Exception(\"Error:", "SN_E_BAD_MEMSPACE = (-18) SN_E_NO_TARGETS = (-19) SN_E_NO_SEL = (-20) SN_E_BAD_PARAM = (-21) SN_E_BUSY", "class SNPS3TargetInfo(Structure): _fields_ = [ (\"nFlags\", c_uint32 ), (\"hTarget\", c_uint32 ), (\"pszName\", c_char_p", "= self.GetProcessList() if len(ProcessList) == 0: return False ProcessID = ProcessList[0] if self.NativeAPI.SNPS3ProcessAttach(self.PS3TargetIndex,", "raise Exception(\"SNPS3ProcessList(): GetProcessInfos Failed\") return list(ProcessList) def AttachProcess(self, ProcessID=-1): self.ThrowIfNotConnected() if ProcessID ==", "if self.NativeAPI.SNPS3InitTargetComms() != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") def ThrowIfNotConnected(self): if self.IsConnected == False:", "self.SNPS3Connect.restype = SNResult ''' SNAPI SNRESULT SNPS3ConnectEx( HTARGET hTarget, const char *pszApplication, BOOL", "= [ POINTER(SNPS3TargetInfo) ] self.SNPS3GetTargetInfo.restype = SNResult ''' SNAPI SNRESULT SNPS3GetDefaultTarget( HTARGET *pTarget", "SN_E_COMMS_ERR = (-5) SN_E_TM_COMMS_ERR = (-6) SN_E_TIMEOUT = (-7) SN_E_HOST_NOT_FOUND = (-8) SN_E_TARGET_IN_USE", "SNRESULT SNPS3SetDefaultTarget( HTARGET hTarget ); Gets the default target. ''' self.SNPS3SetDefaultTarget = self.TMAPI_DLL.SNPS3SetDefaultTarget", "Exception(\"No process running\") ProcessList = (c_uint32*NumProcesses)() if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, ProcessList) != SNResult.SN_S_OK: raise", "uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress, int nCount, const BYTE *pBuffer );", "!= SNResult.SN_S_OK: return False if self.NativeAPI.SNPS3ProcessContinue(self.PS3TargetIndex, ProcessID) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessContinue() Failed\") self.ProcessID", "SN_E_TIMEOUT = (-7) SN_E_HOST_NOT_FOUND = (-8) SN_E_TARGET_IN_USE = (-9) SN_E_LOAD_ELF_FAILED = (-10) SN_E_BAD_UNIT", "''' self.SNPS3ProcessContinue = self.TMAPI_DLL.SNPS3ProcessContinue self.SNPS3ProcessContinue.argtypes = [ c_uint32, c_uint32 ] self.SNPS3ProcessContinue.restype = SNResult", "= (-34) SN_E_CONNECT_TO_GAMEPORT_FAILED = (-35) SN_E_COMMAND_CANCELLED = (-36) SN_E_PROTOCOL_ALREADY_REGISTERED = (-37) SN_E_CONNECTED =", "self.SNPS3GetDefaultTarget = self.TMAPI_DLL.SNPS3GetDefaultTarget self.SNPS3GetDefaultTarget.argtypes = [ POINTER(c_uint32) ] self.SNPS3GetDefaultTarget.restype = SNResult ''' SNAPI", "of processes running on the specified target. ''' self.SNPS3ProcessList = self.TMAPI_DLL.SNPS3ProcessList self.SNPS3ProcessList.argtypes =", "(-1) SN_E_TM_NOT_RUNNING = (-2) SN_E_BAD_TARGET = (-3) SN_E_NOT_CONNECTED = (-4) SN_E_COMMS_ERR = (-5)", "self.TMAPI_DLL = CDLL(\"ps3tmapi.dll\") ''' SNAPI SNRESULT SNPS3InitTargetComms(void); Initialises target communications and launches Target", "target communications and launches Target Manager. ''' self.SNPS3InitTargetComms = self.TMAPI_DLL.SNPS3InitTargetComms self.SNPS3InitTargetComms.argtypes = []", "= self.TMAPI_DLL.SNPS3GetDefaultTarget self.SNPS3GetDefaultTarget.argtypes = [ POINTER(c_uint32) ] self.SNPS3GetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT", "TMAPIExports() self.PS3TargetIndex = -1 self.IsConnected = False if self.NativeAPI.SNPS3InitTargetComms() != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms()", "uAddress, int nCount, BYTE *pBuffer ); ''' self.SNPS3ProcessGetMemory = self.TMAPI_DLL.SNPS3ProcessGetMemory self.SNPS3ProcessGetMemory.argtypes = [", "= [ c_uint32, c_char_p, c_bool ] self.SNPS3ConnectEx.restype = SNResult ''' SNAPI SNRESULT SNPS3GetTargetInfo(", "*pBuffer ); ''' self.SNPS3ProcessSetMemory = self.TMAPI_DLL.SNPS3ProcessSetMemory self.SNPS3ProcessSetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64,", "self.SNPS3GetTargetInfo.restype = SNResult ''' SNAPI SNRESULT SNPS3GetDefaultTarget( HTARGET *pTarget ); Gets the default", "SNPS3GetDefaultTarget( HTARGET *pTarget ); Gets the default target. ''' self.SNPS3GetDefaultTarget = self.TMAPI_DLL.SNPS3GetDefaultTarget self.SNPS3GetDefaultTarget.argtypes", "= (-31) SN_E_CHECK_TARGET_CONFIGURATION = (-33) SN_E_MODULE_NOT_FOUND = (-34) SN_E_CONNECT_TO_GAMEPORT_FAILED = (-35) SN_E_COMMAND_CANCELLED =", "''' SNAPI SNRESULT SNPS3IsScanning(); Returns SN_E_BUSY if a search is already in progress.", "self.SNPS3ProcessStop.argtypes = [ c_uint32, c_uint32 ] self.SNPS3ProcessStop.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessGetMemory(", "= (-30) SN_E_LOAD_MODULE_FAILED = (-31) SN_E_CHECK_TARGET_CONFIGURATION = (-33) SN_E_MODULE_NOT_FOUND = (-34) SN_E_CONNECT_TO_GAMEPORT_FAILED =", "c_char_p ), (\"pszFSDir\", c_char_p ), (\"boot\", c_uint64 ), ] class TMAPIExports: def __init__(self):", "return list(ProcessList) def AttachProcess(self, ProcessID=-1): self.ThrowIfNotConnected() if ProcessID == -1: ProcessList = self.GetProcessList()", "def ConnectTarget(self, TargetIndex=-1): self.IsConnected = False if TargetIndex == -1: TargetIndex = self.GetDefaultTarget()", "= (-9) SN_E_LOAD_ELF_FAILED = (-10) SN_E_BAD_UNIT = (-11) SN_E_OUT_OF_MEM = (-12) SN_E_NOT_LISTED =", "threads from a specified process. ''' self.SNPS3ProcessStop = self.TMAPI_DLL.SNPS3ProcessStop self.SNPS3ProcessStop.argtypes = [ c_uint32,", "(-27) SN_E_BAD_ALIGN = (-28) SN_E_FILE_ERROR = (-29) SN_E_NOT_SUPPORTED_IN_SDK_VERSION = (-30) SN_E_LOAD_MODULE_FAILED = (-31)", "= (-18) SN_E_NO_TARGETS = (-19) SN_E_NO_SEL = (-20) SN_E_BAD_PARAM = (-21) SN_E_BUSY =", "pathlib from ctypes import * from ctypes import _SimpleCData from ctypes import _Pointer", "); Gets the default target. ''' self.SNPS3SetDefaultTarget = self.TMAPI_DLL.SNPS3SetDefaultTarget self.SNPS3SetDefaultTarget.argtypes = [ c_uint32", "); Gets the default target. ''' self.SNPS3GetDefaultTarget = self.TMAPI_DLL.SNPS3GetDefaultTarget self.SNPS3GetDefaultTarget.argtypes = [ POINTER(c_uint32)", "NumProcessesPtr = pointer(c_uint32(0)) if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, None) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetNumProcesses Failed\")", "= [ c_uint32, c_uint32 ] self.SNPS3ProcessStop.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessGetMemory( HTARGET", "(0x00000020) class SNPS3TargetInfo(Structure): _fields_ = [ (\"nFlags\", c_uint32 ), (\"hTarget\", c_uint32 ), (\"pszName\",", "(-3) SN_E_NOT_CONNECTED = (-4) SN_E_COMMS_ERR = (-5) SN_E_TM_COMMS_ERR = (-6) SN_E_TIMEOUT = (-7)", "(-18) SN_E_NO_TARGETS = (-19) SN_E_NO_SEL = (-20) SN_E_BAD_PARAM = (-21) SN_E_BUSY = (-22)", "self.SNPS3CloseTargetComms = self.TMAPI_DLL.SNPS3CloseTargetComms self.SNPS3CloseTargetComms.argtypes = [] self.SNPS3CloseTargetComms.restype = SNResult ''' SNAPI SNRESULT SNPS3IsScanning();", "raise Exception(\"No process running\") ProcessList = (c_uint32*NumProcesses)() if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, ProcessList) != SNResult.SN_S_OK:", "self.SNPS3InitTargetComms.argtypes = [] self.SNPS3InitTargetComms.restype = SNResult ''' SNAPI SNRESULT SNPS3CloseTargetComms(void); Shuts down internal", "= self.TMAPI_DLL.SNPS3ProcessAttach self.SNPS3ProcessAttach.argtypes = [ c_uint32, c_uint32, c_uint32 ] self.SNPS3ProcessAttach.restype = SNResult '''", "self.SNPS3ProcessContinue.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessStop( HTARGET hTarget, UINT32 uProcessID ); Stops", "*puBuffer ); Fetches a list of processes running on the specified target. '''", "NumProcessesPtr, None) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetNumProcesses Failed\") NumProcesses = NumProcessesPtr.contents.value if NumProcesses", "[ c_uint32, c_char_p ] self.SNPS3Connect.restype = SNResult ''' SNAPI SNRESULT SNPS3ConnectEx( HTARGET hTarget,", "(6) SN_S_CONNECTED = SN_S_NO_ACTION SN_S_TARGET_STILL_REGISTERED = (7) SN_E_NOT_IMPL = (-1) SN_E_TM_NOT_RUNNING = (-2)", "SNResult ''' SNAPI SNRESULT SNPS3IsScanning(); Returns SN_E_BUSY if a search is already in", "c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessSetMemory.restype = SNResult class TMAPI:", "class SNResult(CEnum): SN_S_OK = (0) SN_S_PENDING = (1) SN_S_NO_MSG = (3) SN_S_TM_VERSION =", "def WriteMemory(self, Address, Bytes): self.ThrowIfNotConnected() WriteBuffer = (c_char * len(Bytes)).from_buffer(bytearray(Bytes)) return self.NativeAPI.SNPS3ProcessSetMemory(self.PS3TargetIndex, 0,", "self.SNPS3ProcessList.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessAttach( HTARGET hTarget, UINT32 uUnitID, UINT32 uProcessID", "SNPS3SetDefaultTarget( HTARGET hTarget ); Gets the default target. ''' self.SNPS3SetDefaultTarget = self.TMAPI_DLL.SNPS3SetDefaultTarget self.SNPS3SetDefaultTarget.argtypes", "!= SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetNumProcesses Failed\") NumProcesses = NumProcessesPtr.contents.value if NumProcesses == 0:", "= (3) SN_S_TM_VERSION = (4) SN_S_REPLACED = (5) SN_S_NO_ACTION = (6) SN_S_CONNECTED =", "= False if TargetIndex == -1: TargetIndex = self.GetDefaultTarget() if self.NativeAPI.SNPS3ConnectEx(TargetIndex, None, True)", "self.TMAPI_DLL.SNPS3ProcessAttach self.SNPS3ProcessAttach.argtypes = [ c_uint32, c_uint32, c_uint32 ] self.SNPS3ProcessAttach.restype = SNResult ''' SNAPI", "BYTE *pBuffer ); ''' self.SNPS3ProcessSetMemory = self.TMAPI_DLL.SNPS3ProcessSetMemory self.SNPS3ProcessSetMemory.argtypes = [ c_uint32, c_uint32, c_uint32,", "= self.TMAPI_DLL.SNPS3CloseTargetComms self.SNPS3CloseTargetComms.argtypes = [] self.SNPS3CloseTargetComms.restype = SNResult ''' SNAPI SNRESULT SNPS3IsScanning(); Returns", "c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessGetMemory.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessSetMemory(", "); Connect to specified target. ''' self.SNPS3ConnectEx = self.TMAPI_DLL.SNPS3ConnectEx self.SNPS3ConnectEx.argtypes = [ c_uint32,", "uAddress, int nCount, const BYTE *pBuffer ); ''' self.SNPS3ProcessSetMemory = self.TMAPI_DLL.SNPS3ProcessSetMemory self.SNPS3ProcessSetMemory.argtypes =", "ProcessID) != SNResult.SN_S_OK: return False if self.NativeAPI.SNPS3ProcessContinue(self.PS3TargetIndex, ProcessID) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessContinue() Failed\")", "self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, ProcessList) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetProcessInfos Failed\") return list(ProcessList) def AttachProcess(self,", "SN_E_LOAD_ELF_FAILED = (-10) SN_E_BAD_UNIT = (-11) SN_E_OUT_OF_MEM = (-12) SN_E_NOT_LISTED = (-13) SN_E_TM_VERSION", "self.SNPS3InitTargetComms.restype = SNResult ''' SNAPI SNRESULT SNPS3CloseTargetComms(void); Shuts down internal communications (but does", "self.SNPS3ProcessGetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessGetMemory.restype =", "all threads from a specified process. ''' self.SNPS3ProcessContinue = self.TMAPI_DLL.SNPS3ProcessContinue self.SNPS3ProcessContinue.argtypes = [", "UINT32 uUnitID, UINT32 uProcessID ); Attach to a process. ''' self.SNPS3ProcessAttach = self.TMAPI_DLL.SNPS3ProcessAttach", "self.SNPS3ProcessContinue = self.TMAPI_DLL.SNPS3ProcessContinue self.SNPS3ProcessContinue.argtypes = [ c_uint32, c_uint32 ] self.SNPS3ProcessContinue.restype = SNResult '''", "SNResult.SN_S_OK: return False if self.NativeAPI.SNPS3ProcessContinue(self.PS3TargetIndex, ProcessID) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessContinue() Failed\") self.ProcessID =", "= CDLL(\"ps3tmapi.dll\") ''' SNAPI SNRESULT SNPS3InitTargetComms(void); Initialises target communications and launches Target Manager.", "self.TMAPI_DLL.SNPS3ProcessList self.SNPS3ProcessList.argtypes = [ c_uint32, POINTER(c_uint32), POINTER(c_uint32) ] self.SNPS3ProcessList.restype = SNResult ''' SNAPI", "by hTarget member of SNPS3TargetInfo() structure. ''' self.SNPS3GetTargetInfo = self.TMAPI_DLL.SNPS3GetTargetInfo self.SNPS3GetTargetInfo.argtypes = [", "POINTER(c_char) ] self.SNPS3ProcessSetMemory.restype = SNResult class TMAPI: def __init__(self): self.NativeAPI = TMAPIExports() self.PS3TargetIndex", "MemoryBuffer = (c_char * Size)() self.NativeAPI.SNPS3ProcessGetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0, Address, Size, MemoryBuffer) return", "== False: raise Exception(\"Error: Not Connected to PS3\") def GetDefaultTarget(self): DefaultTargetIndex = pointer(c_uint32(0))", "SNAPI SNRESULT SNPS3ProcessGetMemory( HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress,", "(-28) SN_E_FILE_ERROR = (-29) SN_E_NOT_SUPPORTED_IN_SDK_VERSION = (-30) SN_E_LOAD_MODULE_FAILED = (-31) SN_E_CHECK_TARGET_CONFIGURATION = (-33)", "SNRESULT SNPS3ProcessStop( HTARGET hTarget, UINT32 uProcessID ); Stops all threads from a specified", "self.SNPS3ProcessAttach.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessContinue( HTARGET hTarget, UINT32 uProcessID ); Continues", "self.ProcessID, 0, Address, Size, MemoryBuffer) return bytes(MemoryBuffer) def WriteMemory(self, Address, Bytes): self.ThrowIfNotConnected() WriteBuffer", "HTARGET hTarget, const char *pszApplication ); Connect to specified target. ''' self.SNPS3Connect =", "frees resources. ''' self.SNPS3CloseTargetComms = self.TMAPI_DLL.SNPS3CloseTargetComms self.SNPS3CloseTargetComms.argtypes = [] self.SNPS3CloseTargetComms.restype = SNResult '''", "the default target. ''' self.SNPS3SetDefaultTarget = self.TMAPI_DLL.SNPS3SetDefaultTarget self.SNPS3SetDefaultTarget.argtypes = [ c_uint32 ] self.SNPS3SetDefaultTarget.restype", "(4) SN_S_REPLACED = (5) SN_S_NO_ACTION = (6) SN_S_CONNECTED = SN_S_NO_ACTION SN_S_TARGET_STILL_REGISTERED = (7)", "SNResult ''' SNAPI SNRESULT SNPS3GetTargetInfo( SNPS3TargetInfo *pTargetInfo ); Retrieves information for a target", "*pBuffer ); ''' self.SNPS3ProcessGetMemory = self.TMAPI_DLL.SNPS3ProcessGetMemory self.SNPS3ProcessGetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64,", "Size)() self.NativeAPI.SNPS3ProcessGetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0, Address, Size, MemoryBuffer) return bytes(MemoryBuffer) def WriteMemory(self, Address,", "(-21) SN_E_BUSY = (-22) SN_E_DECI_ERROR = (-23) SN_E_INSUFFICIENT_DATA = (-25) SN_E_DATA_TOO_LONG = (-26)", "c_char_p, c_bool ] self.SNPS3ConnectEx.restype = SNResult ''' SNAPI SNRESULT SNPS3GetTargetInfo( SNPS3TargetInfo *pTargetInfo );", "HTARGET hTarget, UINT32 uProcessID ); Stops all threads from a specified process. '''", "if ProcessID == -1: ProcessList = self.GetProcessList() if len(ProcessList) == 0: return False", "c_uint32, c_uint32 ] self.SNPS3ProcessStop.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessGetMemory( HTARGET hTarget, UINT32", "(-34) SN_E_CONNECT_TO_GAMEPORT_FAILED = (-35) SN_E_COMMAND_CANCELLED = (-36) SN_E_PROTOCOL_ALREADY_REGISTERED = (-37) SN_E_CONNECTED = (-38)", "= (-33) SN_E_MODULE_NOT_FOUND = (-34) SN_E_CONNECT_TO_GAMEPORT_FAILED = (-35) SN_E_COMMAND_CANCELLED = (-36) SN_E_PROTOCOL_ALREADY_REGISTERED =", "(0x00000002) SN_TI_INFO = (0x00000004) SN_TI_HOMEDIR = (0x00000008) SN_TI_FILESERVEDIR = (0x00000010) SN_TI_BOOT = (0x00000020)", "*pszApplication ); Connect to specified target. ''' self.SNPS3Connect = self.TMAPI_DLL.SNPS3Connect self.SNPS3Connect.argtypes = [", "SN_E_COMMAND_CANCELLED = (-36) SN_E_PROTOCOL_ALREADY_REGISTERED = (-37) SN_E_CONNECTED = (-38) SN_E_COMMS_EVENT_MISMATCHED_ERR = (-39) SN_E_TARGET_IS_POWERED_OFF", "SNPS3ProcessSetMemory( HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress, int nCount,", "] self.SNPS3ProcessSetMemory.restype = SNResult class TMAPI: def __init__(self): self.NativeAPI = TMAPIExports() self.PS3TargetIndex =", "SNAPI SNRESULT SNPS3ProcessContinue( HTARGET hTarget, UINT32 uProcessID ); Continues all threads from a", "= [] self.SNPS3IsScanning.restype = SNResult ''' SNAPI SNRESULT SNPS3Connect( HTARGET hTarget, const char", "); Continues all threads from a specified process. ''' self.SNPS3ProcessContinue = self.TMAPI_DLL.SNPS3ProcessContinue self.SNPS3ProcessContinue.argtypes", "None, True) not in [ SNResult.SN_S_OK, SNResult.SN_S_CONNECTED ]: return False self.PS3TargetIndex = TargetIndex", "SNResult ''' SNAPI SNRESULT SNPS3ConnectEx( HTARGET hTarget, const char *pszApplication, BOOL bForceFlag );", "= [ c_uint32, POINTER(c_uint32), POINTER(c_uint32) ] self.SNPS3ProcessList.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessAttach(", "= (c_char * Size)() self.NativeAPI.SNPS3ProcessGetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0, Address, Size, MemoryBuffer) return bytes(MemoryBuffer)", "SNResult ''' SNAPI SNRESULT SNPS3Connect( HTARGET hTarget, const char *pszApplication ); Connect to", "SNPS3TargetInfo() structure. ''' self.SNPS3GetTargetInfo = self.TMAPI_DLL.SNPS3GetTargetInfo self.SNPS3GetTargetInfo.argtypes = [ POINTER(SNPS3TargetInfo) ] self.SNPS3GetTargetInfo.restype =", "a process. ''' self.SNPS3ProcessAttach = self.TMAPI_DLL.SNPS3ProcessAttach self.SNPS3ProcessAttach.argtypes = [ c_uint32, c_uint32, c_uint32 ]", "), (\"pszInfo\", c_char_p ), (\"pszHomeDir\", c_char_p ), (\"pszFSDir\", c_char_p ), (\"boot\", c_uint64 ),", "for a target specified by hTarget member of SNPS3TargetInfo() structure. ''' self.SNPS3GetTargetInfo =", "WriteMemory(self, Address, Bytes): self.ThrowIfNotConnected() WriteBuffer = (c_char * len(Bytes)).from_buffer(bytearray(Bytes)) return self.NativeAPI.SNPS3ProcessSetMemory(self.PS3TargetIndex, 0, self.ProcessID,", "self.SNPS3ProcessGetMemory = self.TMAPI_DLL.SNPS3ProcessGetMemory self.SNPS3ProcessGetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char)", "[ SNResult.SN_S_OK, SNResult.SN_S_CONNECTED ]: return False self.PS3TargetIndex = TargetIndex self.IsConnected = True return", "= [] self.SNPS3CloseTargetComms.restype = SNResult ''' SNAPI SNRESULT SNPS3IsScanning(); Returns SN_E_BUSY if a", "(1) SN_S_NO_MSG = (3) SN_S_TM_VERSION = (4) SN_S_REPLACED = (5) SN_S_NO_ACTION = (6)", "SNAPI SNRESULT SNPS3CloseTargetComms(void); Shuts down internal communications (but does not close the Target", "Target Manager) and frees resources. ''' self.SNPS3CloseTargetComms = self.TMAPI_DLL.SNPS3CloseTargetComms self.SNPS3CloseTargetComms.argtypes = [] self.SNPS3CloseTargetComms.restype", "= -1 self.IsConnected = False if self.NativeAPI.SNPS3InitTargetComms() != SNResult.SN_S_OK: raise Exception(\"SNPS3InitTargetComms() Failed\") def", "c_bool ] self.SNPS3ConnectEx.restype = SNResult ''' SNAPI SNRESULT SNPS3GetTargetInfo( SNPS3TargetInfo *pTargetInfo ); Retrieves", "(-40) class SNTargetInfoFlags(CEnum): SN_TI_TARGETID = (0x00000001) SN_TI_NAME = (0x00000002) SN_TI_INFO = (0x00000004) SN_TI_HOMEDIR", "False if TargetIndex == -1: TargetIndex = self.GetDefaultTarget() if self.NativeAPI.SNPS3ConnectEx(TargetIndex, None, True) not", "ProcessID=-1): self.ThrowIfNotConnected() if ProcessID == -1: ProcessList = self.GetProcessList() if len(ProcessList) == 0:", "SN_E_BUSY if a search is already in progress. ''' self.SNPS3IsScanning = self.TMAPI_DLL.SNPS3IsScanning self.SNPS3IsScanning.argtypes", "(c_char * Size)() self.NativeAPI.SNPS3ProcessGetMemory(self.PS3TargetIndex, 0, self.ProcessID, 0, Address, Size, MemoryBuffer) return bytes(MemoryBuffer) def", "self.SNPS3CloseTargetComms.argtypes = [] self.SNPS3CloseTargetComms.restype = SNResult ''' SNAPI SNRESULT SNPS3IsScanning(); Returns SN_E_BUSY if", "Exception(\"SNPS3ProcessList(): GetNumProcesses Failed\") NumProcesses = NumProcessesPtr.contents.value if NumProcesses == 0: raise Exception(\"No process", "class TMAPI: def __init__(self): self.NativeAPI = TMAPIExports() self.PS3TargetIndex = -1 self.IsConnected = False", "SN_E_NOT_SUPPORTED_IN_SDK_VERSION = (-30) SN_E_LOAD_MODULE_FAILED = (-31) SN_E_CHECK_TARGET_CONFIGURATION = (-33) SN_E_MODULE_NOT_FOUND = (-34) SN_E_CONNECT_TO_GAMEPORT_FAILED", "c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessGetMemory.restype = SNResult ''' SNAPI", "uThreadID, UINT64 uAddress, int nCount, BYTE *pBuffer ); ''' self.SNPS3ProcessGetMemory = self.TMAPI_DLL.SNPS3ProcessGetMemory self.SNPS3ProcessGetMemory.argtypes", "self.ProcessID = ProcessID return True def ReadMemory(self, Address, Size): self.ThrowIfNotConnected() MemoryBuffer = (c_char", "process. ''' self.SNPS3ProcessStop = self.TMAPI_DLL.SNPS3ProcessStop self.SNPS3ProcessStop.argtypes = [ c_uint32, c_uint32 ] self.SNPS3ProcessStop.restype =", "SN_E_DLL_NOT_INITIALISED = (-15) SN_E_TARGET_RUNNING = (-17) SN_E_BAD_MEMSPACE = (-18) SN_E_NO_TARGETS = (-19) SN_E_NO_SEL", "''' SNAPI SNRESULT SNPS3Connect( HTARGET hTarget, const char *pszApplication ); Connect to specified", "return DefaultTargetIndex[0] def ConnectTarget(self, TargetIndex=-1): self.IsConnected = False if TargetIndex == -1: TargetIndex", "target. ''' self.SNPS3ConnectEx = self.TMAPI_DLL.SNPS3ConnectEx self.SNPS3ConnectEx.argtypes = [ c_uint32, c_char_p, c_bool ] self.SNPS3ConnectEx.restype", "SNResult ''' SNAPI SNRESULT SNPS3GetDefaultTarget( HTARGET *pTarget ); Gets the default target. '''", "); Connect to specified target. ''' self.SNPS3Connect = self.TMAPI_DLL.SNPS3Connect self.SNPS3Connect.argtypes = [ c_uint32,", "self.ThrowIfNotConnected() NumProcessesPtr = pointer(c_uint32(0)) if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, None) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetNumProcesses", "search is already in progress. ''' self.SNPS3IsScanning = self.TMAPI_DLL.SNPS3IsScanning self.SNPS3IsScanning.argtypes = [] self.SNPS3IsScanning.restype", "SN_E_BAD_PARAM = (-21) SN_E_BUSY = (-22) SN_E_DECI_ERROR = (-23) SN_E_INSUFFICIENT_DATA = (-25) SN_E_DATA_TOO_LONG", "= [ POINTER(c_uint32) ] self.SNPS3GetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT SNPS3SetDefaultTarget( HTARGET hTarget", "pointer(c_uint32(0)) if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, None) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetNumProcesses Failed\") NumProcesses =", "SN_E_TARGET_IN_USE = (-9) SN_E_LOAD_ELF_FAILED = (-10) SN_E_BAD_UNIT = (-11) SN_E_OUT_OF_MEM = (-12) SN_E_NOT_LISTED", "Manager) and frees resources. ''' self.SNPS3CloseTargetComms = self.TMAPI_DLL.SNPS3CloseTargetComms self.SNPS3CloseTargetComms.argtypes = [] self.SNPS3CloseTargetComms.restype =", "GetProcessList(self): self.ThrowIfNotConnected() NumProcessesPtr = pointer(c_uint32(0)) if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, None) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList():", "hTarget, UINT32 *puCount, UINT32 *puBuffer ); Fetches a list of processes running on", "= SNResult ''' SNAPI SNRESULT SNPS3CloseTargetComms(void); Shuts down internal communications (but does not", "self.SNPS3ProcessContinue.argtypes = [ c_uint32, c_uint32 ] self.SNPS3ProcessContinue.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessStop(", "self.SNPS3IsScanning.restype = SNResult ''' SNAPI SNRESULT SNPS3Connect( HTARGET hTarget, const char *pszApplication );", "= (-20) SN_E_BAD_PARAM = (-21) SN_E_BUSY = (-22) SN_E_DECI_ERROR = (-23) SN_E_INSUFFICIENT_DATA =", "SNRESULT SNPS3IsScanning(); Returns SN_E_BUSY if a search is already in progress. ''' self.SNPS3IsScanning", "''' self.SNPS3ProcessStop = self.TMAPI_DLL.SNPS3ProcessStop self.SNPS3ProcessStop.argtypes = [ c_uint32, c_uint32 ] self.SNPS3ProcessStop.restype = SNResult", "-1: ProcessList = self.GetProcessList() if len(ProcessList) == 0: return False ProcessID = ProcessList[0]", "SNResult ''' SNAPI SNRESULT SNPS3CloseTargetComms(void); Shuts down internal communications (but does not close", "[ POINTER(c_uint32) ] self.SNPS3GetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT SNPS3SetDefaultTarget( HTARGET hTarget );", "Exception(\"SNPS3ProcessContinue() Failed\") self.ProcessID = ProcessID return True def ReadMemory(self, Address, Size): self.ThrowIfNotConnected() MemoryBuffer", "SNRESULT SNPS3CloseTargetComms(void); Shuts down internal communications (but does not close the Target Manager)", "Manager. ''' self.SNPS3InitTargetComms = self.TMAPI_DLL.SNPS3InitTargetComms self.SNPS3InitTargetComms.argtypes = [] self.SNPS3InitTargetComms.restype = SNResult ''' SNAPI", "and frees resources. ''' self.SNPS3CloseTargetComms = self.TMAPI_DLL.SNPS3CloseTargetComms self.SNPS3CloseTargetComms.argtypes = [] self.SNPS3CloseTargetComms.restype = SNResult", "), (\"pszType\", c_char_p ), (\"pszInfo\", c_char_p ), (\"pszHomeDir\", c_char_p ), (\"pszFSDir\", c_char_p ),", "SNAPI SNRESULT SNPS3ProcessList( HTARGET hTarget, UINT32 *puCount, UINT32 *puBuffer ); Fetches a list", "Fetches a list of processes running on the specified target. ''' self.SNPS3ProcessList =", "); Attach to a process. ''' self.SNPS3ProcessAttach = self.TMAPI_DLL.SNPS3ProcessAttach self.SNPS3ProcessAttach.argtypes = [ c_uint32,", "''' SNAPI SNRESULT SNPS3GetDefaultTarget( HTARGET *pTarget ); Gets the default target. ''' self.SNPS3GetDefaultTarget", "uUnit, UINT32 uProcessID, UINT64 uThreadID, UINT64 uAddress, int nCount, BYTE *pBuffer ); '''", "raise Exception(\"Error: Not Connected to PS3\") def GetDefaultTarget(self): DefaultTargetIndex = pointer(c_uint32(0)) if self.NativeAPI.SNPS3GetDefaultTarget(DefaultTargetIndex)", "= (0x00000001) SN_TI_NAME = (0x00000002) SN_TI_INFO = (0x00000004) SN_TI_HOMEDIR = (0x00000008) SN_TI_FILESERVEDIR =", "SN_E_TM_VERSION = (-14) SN_E_DLL_NOT_INITIALISED = (-15) SN_E_TARGET_RUNNING = (-17) SN_E_BAD_MEMSPACE = (-18) SN_E_NO_TARGETS", "c_uint64, c_uint64, c_int32, POINTER(c_char) ] self.SNPS3ProcessSetMemory.restype = SNResult class TMAPI: def __init__(self): self.NativeAPI", "threads from a specified process. ''' self.SNPS3ProcessContinue = self.TMAPI_DLL.SNPS3ProcessContinue self.SNPS3ProcessContinue.argtypes = [ c_uint32,", "); Retrieves information for a target specified by hTarget member of SNPS3TargetInfo() structure.", "in progress. ''' self.SNPS3IsScanning = self.TMAPI_DLL.SNPS3IsScanning self.SNPS3IsScanning.argtypes = [] self.SNPS3IsScanning.restype = SNResult '''", "SNTargetInfoFlags(CEnum): SN_TI_TARGETID = (0x00000001) SN_TI_NAME = (0x00000002) SN_TI_INFO = (0x00000004) SN_TI_HOMEDIR = (0x00000008)", "(-26) SN_E_DEPRECATED = (-27) SN_E_BAD_ALIGN = (-28) SN_E_FILE_ERROR = (-29) SN_E_NOT_SUPPORTED_IN_SDK_VERSION = (-30)", "len(ProcessList) == 0: return False ProcessID = ProcessList[0] if self.NativeAPI.SNPS3ProcessAttach(self.PS3TargetIndex, 0, ProcessID) !=", "SNRESULT SNPS3ProcessAttach( HTARGET hTarget, UINT32 uUnitID, UINT32 uProcessID ); Attach to a process.", "= SNResult ''' SNAPI SNRESULT SNPS3ProcessSetMemory( HTARGET hTarget, UINT32 uUnit, UINT32 uProcessID, UINT64", "POINTER(c_char) ] self.SNPS3ProcessGetMemory.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessSetMemory( HTARGET hTarget, UINT32 uUnit,", "] self.SNPS3ProcessGetMemory.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessSetMemory( HTARGET hTarget, UINT32 uUnit, UINT32", "process. ''' self.SNPS3ProcessContinue = self.TMAPI_DLL.SNPS3ProcessContinue self.SNPS3ProcessContinue.argtypes = [ c_uint32, c_uint32 ] self.SNPS3ProcessContinue.restype =", "); Fetches a list of processes running on the specified target. ''' self.SNPS3ProcessList", "= (7) SN_E_NOT_IMPL = (-1) SN_E_TM_NOT_RUNNING = (-2) SN_E_BAD_TARGET = (-3) SN_E_NOT_CONNECTED =", "[ c_uint32, c_uint32 ] self.SNPS3ProcessContinue.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessStop( HTARGET hTarget,", "] self.SNPS3GetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT SNPS3SetDefaultTarget( HTARGET hTarget ); Gets the", "= (c_uint32*NumProcesses)() if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, ProcessList) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetProcessInfos Failed\") return", "''' self.SNPS3ProcessSetMemory = self.TMAPI_DLL.SNPS3ProcessSetMemory self.SNPS3ProcessSetMemory.argtypes = [ c_uint32, c_uint32, c_uint32, c_uint64, c_uint64, c_int32,", "raise Exception(\"SNPS3InitTargetComms() Failed\") return DefaultTargetIndex[0] def ConnectTarget(self, TargetIndex=-1): self.IsConnected = False if TargetIndex", "the Target Manager) and frees resources. ''' self.SNPS3CloseTargetComms = self.TMAPI_DLL.SNPS3CloseTargetComms self.SNPS3CloseTargetComms.argtypes = []", "running\") ProcessList = (c_uint32*NumProcesses)() if self.NativeAPI.SNPS3ProcessList(self.PS3TargetIndex, NumProcessesPtr, ProcessList) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessList(): GetProcessInfos", "(-38) SN_E_COMMS_EVENT_MISMATCHED_ERR = (-39) SN_E_TARGET_IS_POWERED_OFF = (-40) class SNTargetInfoFlags(CEnum): SN_TI_TARGETID = (0x00000001) SN_TI_NAME", "SN_S_TM_VERSION = (4) SN_S_REPLACED = (5) SN_S_NO_ACTION = (6) SN_S_CONNECTED = SN_S_NO_ACTION SN_S_TARGET_STILL_REGISTERED", "c_uint32, c_char_p, c_bool ] self.SNPS3ConnectEx.restype = SNResult ''' SNAPI SNRESULT SNPS3GetTargetInfo( SNPS3TargetInfo *pTargetInfo", "SNResult(CEnum): SN_S_OK = (0) SN_S_PENDING = (1) SN_S_NO_MSG = (3) SN_S_TM_VERSION = (4)", "(0x00000001) SN_TI_NAME = (0x00000002) SN_TI_INFO = (0x00000004) SN_TI_HOMEDIR = (0x00000008) SN_TI_FILESERVEDIR = (0x00000010)", "] self.SNPS3SetDefaultTarget.restype = SNResult ''' SNAPI SNRESULT SNPS3ProcessList( HTARGET hTarget, UINT32 *puCount, UINT32", "False ProcessID = ProcessList[0] if self.NativeAPI.SNPS3ProcessAttach(self.PS3TargetIndex, 0, ProcessID) != SNResult.SN_S_OK: return False if", "SN_TI_HOMEDIR = (0x00000008) SN_TI_FILESERVEDIR = (0x00000010) SN_TI_BOOT = (0x00000020) class SNPS3TargetInfo(Structure): _fields_ =", "SNRESULT SNPS3InitTargetComms(void); Initialises target communications and launches Target Manager. ''' self.SNPS3InitTargetComms = self.TMAPI_DLL.SNPS3InitTargetComms", "self.SNPS3ProcessList.argtypes = [ c_uint32, POINTER(c_uint32), POINTER(c_uint32) ] self.SNPS3ProcessList.restype = SNResult ''' SNAPI SNRESULT", "(-9) SN_E_LOAD_ELF_FAILED = (-10) SN_E_BAD_UNIT = (-11) SN_E_OUT_OF_MEM = (-12) SN_E_NOT_LISTED = (-13)", "SN_S_NO_ACTION SN_S_TARGET_STILL_REGISTERED = (7) SN_E_NOT_IMPL = (-1) SN_E_TM_NOT_RUNNING = (-2) SN_E_BAD_TARGET = (-3)", "= (-37) SN_E_CONNECTED = (-38) SN_E_COMMS_EVENT_MISMATCHED_ERR = (-39) SN_E_TARGET_IS_POWERED_OFF = (-40) class SNTargetInfoFlags(CEnum):", "self.TMAPI_DLL.SNPS3GetTargetInfo self.SNPS3GetTargetInfo.argtypes = [ POINTER(SNPS3TargetInfo) ] self.SNPS3GetTargetInfo.restype = SNResult ''' SNAPI SNRESULT SNPS3GetDefaultTarget(", "= (0x00000008) SN_TI_FILESERVEDIR = (0x00000010) SN_TI_BOOT = (0x00000020) class SNPS3TargetInfo(Structure): _fields_ = [", "= (-3) SN_E_NOT_CONNECTED = (-4) SN_E_COMMS_ERR = (-5) SN_E_TM_COMMS_ERR = (-6) SN_E_TIMEOUT =", "raise Exception(\"SNPS3ProcessList(): GetNumProcesses Failed\") NumProcesses = NumProcessesPtr.contents.value if NumProcesses == 0: raise Exception(\"No", "(5) SN_S_NO_ACTION = (6) SN_S_CONNECTED = SN_S_NO_ACTION SN_S_TARGET_STILL_REGISTERED = (7) SN_E_NOT_IMPL = (-1)", "(0x00000004) SN_TI_HOMEDIR = (0x00000008) SN_TI_FILESERVEDIR = (0x00000010) SN_TI_BOOT = (0x00000020) class SNPS3TargetInfo(Structure): _fields_", "SN_E_COMMS_EVENT_MISMATCHED_ERR = (-39) SN_E_TARGET_IS_POWERED_OFF = (-40) class SNTargetInfoFlags(CEnum): SN_TI_TARGETID = (0x00000001) SN_TI_NAME =", "return False if self.NativeAPI.SNPS3ProcessContinue(self.PS3TargetIndex, ProcessID) != SNResult.SN_S_OK: raise Exception(\"SNPS3ProcessContinue() Failed\") self.ProcessID = ProcessID" ]
[ "for serialization exceptions # Check pake propagates the exception correctly pake.de_init(clear_conf=False) pk =", "ProcessTest(unittest.TestCase): def test_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.call(*cmd,", "cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.call(*cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL)", "0.1), exc.exception.cmd) _ = str(exc.exception) # just test for serialization exceptions cmd =", "with self.assertRaises(process.TimeoutExpired) as exc: process.check_output(*cmd, timeout=0.1, stderr=process.DEVNULL) _ = str(exc.exception) # just test", "pake.init() @pk.task def dummy(ctx): process.check_output(cmd, stderr=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception), process.CalledProcessException)", "# just test for serialization exceptions # Check pake propagates the exception correctly", "stdout=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception), process.CalledProcessException) def test_check_output(self): cmd = [sys.executable,", "exception correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx): process.check_output(cmd, stderr=process.DEVNULL) with self.assertRaises(pake.TaskException)", "self.assertRaises(process.CalledProcessException) as exc: process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertListEqual(cmd, exc.exception.cmd) _ = str(exc.exception) # just", "def test_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.call(*cmd, timeout=0.1,", "os script_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, os.path.abspath( os.path.join(script_dir, os.path.join('..', '..')))) from pake import process", "cmd = [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertListEqual(cmd,", "stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertListEqual(cmd, exc.exception.cmd) _ = str(exc.exception) # just test for serialization exceptions", "process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception), process.CalledProcessException) def test_check_output(self): cmd", "just test for serialization exceptions cmd = [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as", "pake import process import pake.program import pake class ProcessTest(unittest.TestCase): def test_call(self): cmd =", "'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.call(*cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) self.assertNotEqual(process.call(sys.executable,", "self.assertRaises(process.TimeoutExpired) as exc: process.check_call(cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) _ = str(exc.exception)", "process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertListEqual(cmd, exc.exception.cmd) _ = str(exc.exception) # just test for serialization", "os.path.abspath( os.path.join(script_dir, os.path.join('..', '..')))) from pake import process import pake.program import pake class", "pake.init() @pk.task def dummy(ctx): process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception),", "'..')))) from pake import process import pake.program import pake class ProcessTest(unittest.TestCase): def test_call(self):", "str(exc.exception) # just test for serialization exceptions cmd = [sys.executable, os.path.join(script_dir, 'throw.py')] with", "as exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception), process.CalledProcessException) def test_check_output(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with", "test for serialization exceptions cmd = [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc:", "_ = str(exc.exception) # just test for serialization exceptions cmd = [sys.executable, os.path.join(script_dir,", "pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx): process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) with self.assertRaises(pake.TaskException) as", "process.check_output(cmd, stderr=process.DEVNULL) _ = str(exc.exception) # just test for serialization exceptions # Check", "os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, os.path.abspath( os.path.join(script_dir, os.path.join('..', '..')))) from pake import process import pake.program import", "= [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_output(cmd, stderr=process.DEVNULL) _ = str(exc.exception)", "the exception correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx): process.check_output(cmd, stderr=process.DEVNULL) with", "= pake.init() @pk.task def dummy(ctx): process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy)", "os.path.join(script_dir, 'throw.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'killself.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) def test_check_call(self):", "as exc: process.check_output(*cmd, timeout=0.1, stderr=process.DEVNULL) _ = str(exc.exception) # just test for serialization", "from pake import process import pake.program import pake class ProcessTest(unittest.TestCase): def test_call(self): cmd", "test_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.call(*cmd, timeout=0.1, stderr=process.DEVNULL,", "'throw.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'killself.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) def test_check_call(self): cmd", "= str(exc.exception) # just test for serialization exceptions cmd = [sys.executable, os.path.join(script_dir, 'throw.py')]", "pk = pake.init() @pk.task def dummy(ctx): process.check_output(cmd, stderr=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy)", "'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_output(*cmd, timeout=0.1, stderr=process.DEVNULL) _ = str(exc.exception) # just", "import unittest import os script_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, os.path.abspath( os.path.join(script_dir, os.path.join('..', '..')))) from", "with self.assertRaises(process.TimeoutExpired) as exc: process.call(*cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir,", "script_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, os.path.abspath( os.path.join(script_dir, os.path.join('..', '..')))) from pake import process import", "self.assertEqual(type(exc.exception.exception), process.CalledProcessException) def test_check_output(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc:", "stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'throw.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'killself.py'),", "for serialization exceptions cmd = [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_call(cmd,", "process.check_call(cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) _ = str(exc.exception) # just test", "self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'throw.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'killself.py'), stderr=process.DEVNULL,", "'killself.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) def test_check_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired)", "stderr=process.DEVNULL) _ = str(exc.exception) # just test for serialization exceptions cmd = [sys.executable,", "= [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_call(cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd,", "os.path.join(script_dir, 'killself.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) def test_check_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with", "timeout=0.1, stderr=process.DEVNULL) _ = str(exc.exception) # just test for serialization exceptions cmd =", "stderr=process.DEVNULL, stdout=process.DEVNULL), 0) def test_check_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as", "test_check_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_call(cmd, timeout=0.1, stderr=process.DEVNULL,", "self.assertRaises(process.CalledProcessException) as exc: process.check_output(cmd, stderr=process.DEVNULL) _ = str(exc.exception) # just test for serialization", "as exc: process.check_output(cmd, stderr=process.DEVNULL) _ = str(exc.exception) # just test for serialization exceptions", "unittest import os script_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, os.path.abspath( os.path.join(script_dir, os.path.join('..', '..')))) from pake", "= [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertListEqual(cmd, exc.exception.cmd)", "timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'throw.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) self.assertNotEqual(process.call(sys.executable,", "serialization exceptions cmd = [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_output(cmd, stderr=process.DEVNULL)", "@pk.task def dummy(ctx): process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception), process.CalledProcessException)", "with self.assertRaises(process.CalledProcessException) as exc: process.check_output(cmd, stderr=process.DEVNULL) _ = str(exc.exception) # just test for", "[sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertListEqual(cmd, exc.exception.cmd) _", "Check pake propagates the exception correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx):", "stderr=process.DEVNULL, stdout=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception), process.CalledProcessException) def test_check_output(self): cmd =", "os.path.join(script_dir, os.path.join('..', '..')))) from pake import process import pake.program import pake class ProcessTest(unittest.TestCase):", "pk = pake.init() @pk.task def dummy(ctx): process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc:", "stdout=process.DEVNULL), 0) def test_check_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc:", "exceptions cmd = [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_output(cmd, stderr=process.DEVNULL) _", "os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_output(*cmd, timeout=0.1, stderr=process.DEVNULL) _ = str(exc.exception) #", "the exception correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx): process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL)", "cmd = [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_output(cmd, stderr=process.DEVNULL) _ =", "exc.exception.cmd) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'throw.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'killself.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0)", "'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertListEqual(cmd, exc.exception.cmd) _ = str(exc.exception)", "pake propagates the exception correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx): process.check_call(cmd,", "propagates the exception correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx): process.check_call(cmd, stderr=process.DEVNULL,", "0) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'killself.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) def test_check_call(self): cmd = [sys.executable, os.path.join(script_dir,", "stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'throw.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir,", "as exc: process.check_call(cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) _ = str(exc.exception) #", "def dummy(ctx): process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception), process.CalledProcessException) def", "exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception), process.CalledProcessException) def test_check_output(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired)", "0) def test_check_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_call(cmd,", "exc: process.check_output(cmd, stderr=process.DEVNULL) _ = str(exc.exception) # just test for serialization exceptions #", "[sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_output(cmd, stderr=process.DEVNULL) _ = str(exc.exception) #", "import os script_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, os.path.abspath( os.path.join(script_dir, os.path.join('..', '..')))) from pake import", "correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx): process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) with self.assertRaises(pake.TaskException)", "os.path.join('..', '..')))) from pake import process import pake.program import pake class ProcessTest(unittest.TestCase): def", "import pake.program import pake class ProcessTest(unittest.TestCase): def test_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')]", "with self.assertRaises(process.CalledProcessException) as exc: process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertListEqual(cmd, exc.exception.cmd) _ = str(exc.exception) #", "just test for serialization exceptions # Check pake propagates the exception correctly pake.de_init(clear_conf=False)", "exc.exception.cmd) _ = str(exc.exception) # just test for serialization exceptions # Check pake", "# just test for serialization exceptions cmd = [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException)", "process.call(*cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'throw.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0)", "_ = str(exc.exception) # just test for serialization exceptions # Check pake propagates", "= os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, os.path.abspath( os.path.join(script_dir, os.path.join('..', '..')))) from pake import process import pake.program", "pake.program import pake class ProcessTest(unittest.TestCase): def test_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with", "exc: process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertListEqual(cmd, exc.exception.cmd) _ = str(exc.exception) # just test for", "= [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.call(*cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd,", "[sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_output(*cmd, timeout=0.1, stderr=process.DEVNULL) _ = str(exc.exception)", "= str(exc.exception) # just test for serialization exceptions # Check pake propagates the", "exc: process.check_output(*cmd, timeout=0.1, stderr=process.DEVNULL) _ = str(exc.exception) # just test for serialization exceptions", "sys import unittest import os script_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, os.path.abspath( os.path.join(script_dir, os.path.join('..', '..'))))", "exc: process.call(*cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'throw.py'), stderr=process.DEVNULL, stdout=process.DEVNULL),", "dummy(ctx): process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception), process.CalledProcessException) def test_check_output(self):", "class ProcessTest(unittest.TestCase): def test_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc:", "'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_output(cmd, stderr=process.DEVNULL) _ = str(exc.exception) # just test", "stdout=process.DEVNULL), 0) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'killself.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) def test_check_call(self): cmd = [sys.executable,", "pake class ProcessTest(unittest.TestCase): def test_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as", "self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'throw.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'killself.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) def", "with self.assertRaises(process.TimeoutExpired) as exc: process.check_call(cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) _ =", "test_check_output(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_output(*cmd, timeout=0.1, stderr=process.DEVNULL)", "os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.call(*cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd)", "self.assertRaises(process.TimeoutExpired) as exc: process.call(*cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'throw.py'),", "os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertListEqual(cmd, exc.exception.cmd) _ =", "process.check_output(*cmd, timeout=0.1, stderr=process.DEVNULL) _ = str(exc.exception) # just test for serialization exceptions cmd", "self.assertListEqual(cmd, exc.exception.cmd) _ = str(exc.exception) # just test for serialization exceptions # Check", "test for serialization exceptions # Check pake propagates the exception correctly pake.de_init(clear_conf=False) pk", "stderr=process.DEVNULL) _ = str(exc.exception) # just test for serialization exceptions # Check pake", "self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) _ = str(exc.exception) # just test for serialization exceptions cmd", "cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_call(cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL)", "[sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.call(*cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1),", "stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) _ = str(exc.exception) # just test for serialization", "os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_call(cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd)", "[sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_call(cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1),", "os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_output(cmd, stderr=process.DEVNULL) _ = str(exc.exception) # just", "exception correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx): process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) with", "pake propagates the exception correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx): process.check_output(cmd,", "correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx): process.check_output(cmd, stderr=process.DEVNULL) with self.assertRaises(pake.TaskException) as", "for serialization exceptions cmd = [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_output(cmd,", "# Check pake propagates the exception correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task def", "exceptions cmd = [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL)", "stdout=process.DEVNULL) self.assertListEqual(cmd, exc.exception.cmd) _ = str(exc.exception) # just test for serialization exceptions #", "'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_call(cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) _", "pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception), process.CalledProcessException) def test_check_output(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as", "with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception), process.CalledProcessException) def test_check_output(self): cmd = [sys.executable, os.path.join(script_dir,", "sys.path.insert(1, os.path.abspath( os.path.join(script_dir, os.path.join('..', '..')))) from pake import process import pake.program import pake", "pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx): process.check_output(cmd, stderr=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc:", "self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'killself.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) def test_check_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')]", "import process import pake.program import pake class ProcessTest(unittest.TestCase): def test_call(self): cmd = [sys.executable,", "import sys import unittest import os script_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, os.path.abspath( os.path.join(script_dir, os.path.join('..',", "self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception), process.CalledProcessException) def test_check_output(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')]", "import pake class ProcessTest(unittest.TestCase): def test_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired)", "= pake.init() @pk.task def dummy(ctx): process.check_output(cmd, stderr=process.DEVNULL) with self.assertRaises(pake.TaskException) as exc: pk.run(tasks=dummy) self.assertEqual(type(exc.exception.exception),", "serialization exceptions cmd = [sys.executable, os.path.join(script_dir, 'throw.py')] with self.assertRaises(process.CalledProcessException) as exc: process.check_call(cmd, stderr=process.DEVNULL,", "process import pake.program import pake class ProcessTest(unittest.TestCase): def test_call(self): cmd = [sys.executable, os.path.join(script_dir,", "cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_output(*cmd, timeout=0.1, stderr=process.DEVNULL) _", "propagates the exception correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task def dummy(ctx): process.check_output(cmd, stderr=process.DEVNULL)", "exc: process.check_call(cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) _ = str(exc.exception) # just", "def test_check_call(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_call(cmd, timeout=0.1,", "serialization exceptions # Check pake propagates the exception correctly pake.de_init(clear_conf=False) pk = pake.init()", "0.1), exc.exception.cmd) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'throw.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'killself.py'), stderr=process.DEVNULL, stdout=process.DEVNULL),", "exceptions # Check pake propagates the exception correctly pake.de_init(clear_conf=False) pk = pake.init() @pk.task", "str(exc.exception) # just test for serialization exceptions # Check pake propagates the exception", "as exc: process.call(*cmd, timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'throw.py'), stderr=process.DEVNULL,", "def test_check_output(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_output(*cmd, timeout=0.1,", "self.assertRaises(process.TimeoutExpired) as exc: process.check_output(*cmd, timeout=0.1, stderr=process.DEVNULL) _ = str(exc.exception) # just test for", "process.CalledProcessException) def test_check_output(self): cmd = [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_output(*cmd,", "as exc: process.check_call(cmd, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertListEqual(cmd, exc.exception.cmd) _ = str(exc.exception) # just test", "stderr=process.DEVNULL, stdout=process.DEVNULL), 0) self.assertNotEqual(process.call(sys.executable, os.path.join(script_dir, 'killself.py'), stderr=process.DEVNULL, stdout=process.DEVNULL), 0) def test_check_call(self): cmd =", "= [sys.executable, os.path.join(script_dir, 'timeout.py')] with self.assertRaises(process.TimeoutExpired) as exc: process.check_output(*cmd, timeout=0.1, stderr=process.DEVNULL) _ =", "stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) _ = str(exc.exception) # just test for serialization exceptions", "exc.exception.cmd) _ = str(exc.exception) # just test for serialization exceptions cmd = [sys.executable,", "timeout=0.1, stderr=process.DEVNULL, stdout=process.DEVNULL) self.assertSequenceEqual((cmd, 0.1), exc.exception.cmd) _ = str(exc.exception) # just test for" ]
[ "1000 per_step_mseconds = epoch_mseconds / cb_params.batch_num print(\"epoch time: {:5.3f}, per step time: {:5.3f},", "define opt opt = Momentum(params=net.trainable_params(), learning_rate=lr, momentum=config.momentum, weight_decay=config.weight_decay, loss_scale=config.loss_scale) # define loss, model", "loss_scale=config.loss_scale) # define loss, model if args_opt.loss_name == 'softmax': loss = Softmaxloss(sparse=True, smooth_factor=0.1,", "step_size) def step_begin(self, run_context): self.step_time = time.time() def step_end(self, run_context): \"\"\"step_end\"\"\" cb_params =", "KIND, either express or implied. # See the License for the specific language", "default='resnet-120_625.ckpt', help='Checkpoint file') parser.add_argument('--loss_name', type=str, default='softmax', help='loss name: softmax(pretrained) triplet quadruplet') # Ascend", "Unless required by applicable law or agreed to in writing, software # distributed", "name') parser.add_argument('--run_distribute', type=ast.literal_eval, default=False, help='Run distribute') parser.add_argument('--device_id', type=int, default=0, help='Device id') parser.add_argument('--run_modelarts', type=ast.literal_eval,", "device_target=\"Ascend\", save_graphs=False) # init distributed if args_opt.run_modelarts: import moxing as mox device_id =", "else: if args_opt.run_distribute: device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) init() context.reset_auto_parallel_context() context.set_auto_parallel_context(device_num=device_num,", "import FixedLossScaleManager from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.common import set_seed from mindspore.communication.management", "margin=0.1) else: print('loss no') loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) if args_opt.loss_name == 'softmax': model", "from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.common import set_seed from mindspore.communication.management import init", "per_step_mseconds = epoch_mseconds / cb_params.batch_num print(\"epoch time: {:5.3f}, per step time: {:5.3f}, avg", "config2 as config from src.dataset import create_dataset1 as create_dataset else: print('loss no') context.set_context(mode=context.GRAPH_MODE,", "1000 step_loss = cb_params.net_outputs if isinstance(step_loss, (tuple, list)) and isinstance(step_loss[0], Tensor): step_loss =", "Model from mindspore.context import ParallelMode from mindspore.train.callback import ModelCheckpoint, CheckpointConfig from mindspore.train.loss_scale_manager import", "'softmax': from src.config import config0 as config from src.dataset import create_dataset0 as create_dataset", "name: softmax(pretrained) triplet quadruplet') # Ascend parameter parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_path',", "context.set_context(device_id=device_id) local_data_url = '/cache/data' local_ckpt_url = '/cache/ckpt' local_train_url = '/cache/train' if device_num >", "amp_level='O3', keep_batchnorm_fp32=False) else: model = Model(net.backbone, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) #define", "import argparse import ast import numpy as np from mindspore import context from", "CheckpointConfig from mindspore.train.loss_scale_manager import FixedLossScaleManager from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.common import", "cb_params.batch_num, step_loss, np.mean(self.losses), step_mseconds, self.lr_init[cb_params.cur_step_num - 1])) if __name__ == '__main__': if args_opt.loss_name", "super(Monitor, self).__init__() self.lr_init = lr_init self.lr_init_len = len(lr_init) def epoch_begin(self, run_context): self.losses =", "== 'quadruplet': dataset_generator = GetDatasetGenerator_quadruplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) else: print('loss no') dataset = create_dataset(dataset_generator, do_train=True,", "this file except in compliance with the License. # You may obtain a", "softmax(pretrained) triplet quadruplet') # Ascend parameter parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_path', type=str,", "= lr_init self.lr_init_len = len(lr_init) def epoch_begin(self, run_context): self.losses = [] self.epoch_time =", "= os.path.join(config.save_checkpoint_path, 'model_'+ str(device_id) +'/') ckpt_cb = ModelCheckpoint(prefix=check_name, directory=save_ckpt_path, config=config_ck) cb += [ckpt_cb]", "dataset_generator = GetDatasetGenerator_triplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'quadruplet': dataset_generator = GetDatasetGenerator_quadruplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) else:", "np.mean(self.losses))) print('batch_size:', config.batch_size, 'epochs_size:', config.epoch_size, 'lr_model:', config.lr_decay_mode, 'lr:', config.lr_max, 'step_size:', step_size) def step_begin(self,", "default=None, help='ckpt path name') parser.add_argument('--run_distribute', type=ast.literal_eval, default=False, help='Run distribute') parser.add_argument('--device_id', type=int, default=0, help='Device", "import moxing as mox device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) local_data_url =", "import ModelCheckpoint, CheckpointConfig from mindspore.train.loss_scale_manager import FixedLossScaleManager from mindspore.train.serialization import load_checkpoint, load_param_into_net from", "elif args_opt.loss_name == 'triplet': dataset_generator = GetDatasetGenerator_triplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'quadruplet': dataset_generator", "self).__init__() self.lr_init = lr_init self.lr_init_len = len(lr_init) def epoch_begin(self, run_context): self.losses = []", "= np.mean(step_loss.asnumpy()) self.losses.append(step_loss) cur_step_in_epoch = (cb_params.cur_step_num - 1) % cb_params.batch_num print(\"epochs: [{:3d}/{:3d}], step:[{:5d}/{:5d}],", "Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0", "config.lr_max, 'step_size:', step_size) def step_begin(self, run_context): self.step_time = time.time() def step_end(self, run_context): \"\"\"step_end\"\"\"", "ANY KIND, either express or implied. # See the License for the specific", "+ 'train_half.txt' if args_opt.loss_name == 'softmax': dataset_generator = GetDatasetGenerator_softmax(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name ==", "= (time.time() - self.step_time) * 1000 step_loss = cb_params.net_outputs if isinstance(step_loss, (tuple, list))", "context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) else: context.set_context(device_id=args_opt.device_id) device_num = 1 device_id = args_opt.device_id DATA_DIR =", "else: print('loss no') context.set_context(mode=context.GRAPH_MODE, device_target=\"Ascend\", save_graphs=False) # init distributed if args_opt.run_modelarts: import moxing", "help='loss name: softmax(pretrained) triplet quadruplet') # Ascend parameter parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path')", "define loss, model if args_opt.loss_name == 'softmax': loss = Softmaxloss(sparse=True, smooth_factor=0.1, num_classes=config.class_num) elif", "run_context): \"\"\"step_end\"\"\" cb_params = run_context.original_args() step_mseconds = (time.time() - self.step_time) * 1000 step_loss", "Model(net.backbone, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) #define callback cb = [] if", "src.resnet import resnet50 from src.utility import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet set_seed(1) parser = argparse.ArgumentParser(description='Image", "parser.add_argument('--train_url', type=str, default=None, help='Train output path') parser.add_argument('--data_url', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_url', type=str,", "int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) init() context.reset_auto_parallel_context() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) else: context.set_context(device_id=args_opt.device_id) device_num", "cb += [Monitor(lr_init=lr.asnumpy())] # train model model.train(config.epoch_size - config.pretrain_epoch_size, dataset, callbacks=cb, dataset_sink_mode=True) if", "total_epochs=config.epoch_size, steps_per_epoch=step_size, lr_decay_mode=config.lr_decay_mode)) # define opt opt = Momentum(params=net.trainable_params(), learning_rate=lr, momentum=config.momentum, weight_decay=config.weight_decay, loss_scale=config.loss_scale)", "'triplet': dataset_generator = GetDatasetGenerator_triplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'quadruplet': dataset_generator = GetDatasetGenerator_quadruplet(data_dir=DATA_DIR, train_list=TRAIN_LIST)", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) init() context.reset_auto_parallel_context() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) else: context.set_context(device_id=args_opt.device_id) device_num = 1 device_id", "lr lr = Tensor(get_lr(lr_init=config.lr_init, lr_end=config.lr_end, lr_max=config.lr_max, warmup_epochs=config.warmup_epochs, total_epochs=config.epoch_size, steps_per_epoch=step_size, lr_decay_mode=config.lr_decay_mode)) # define opt", "Monitor(Callback): \"\"\"Monitor\"\"\" def __init__(self, lr_init=None): super(Monitor, self).__init__() self.lr_init = lr_init self.lr_init_len = len(lr_init)", "step_size, keep_checkpoint_max=config.keep_checkpoint_max) check_name = 'ResNet50_' + args_opt.loss_name if args_opt.run_modelarts: ckpt_cb = ModelCheckpoint(prefix=check_name, directory=local_train_url,", "lr = Tensor(get_lr(lr_init=config.lr_init, lr_end=config.lr_end, lr_max=config.lr_max, warmup_epochs=config.warmup_epochs, total_epochs=config.epoch_size, steps_per_epoch=step_size, lr_decay_mode=config.lr_decay_mode)) # define opt opt", "id') parser.add_argument('--run_modelarts', type=ast.literal_eval, default=False, help='Run distribute') args_opt = parser.parse_args() class Monitor(Callback): \"\"\"Monitor\"\"\" def", "time:[{:5.3f}], lr:[{:8.5f}]\".format( cb_params.cur_epoch_num, config.epoch_size, cur_step_in_epoch, cb_params.batch_num, step_loss, np.mean(self.losses), step_mseconds, self.lr_init[cb_params.cur_step_num - 1])) if", "type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_path', type=str, default=None, help='ckpt path name') parser.add_argument('--run_distribute', type=ast.literal_eval, default=False,", "args_opt.loss_name == 'softmax': loss = Softmaxloss(sparse=True, smooth_factor=0.1, num_classes=config.class_num) elif args_opt.loss_name == 'triplet': loss", "help='Run distribute') parser.add_argument('--device_id', type=int, default=0, help='Device id') parser.add_argument('--run_modelarts', type=ast.literal_eval, default=False, help='Run distribute') args_opt", "(time.time() - self.epoch_time) * 1000 per_step_mseconds = epoch_mseconds / cb_params.batch_num print(\"epoch time: {:5.3f},", "default=None, help='Pretrained ckpt path') parser.add_argument('--checkpoint_name', type=str, default='resnet-120_625.ckpt', help='Checkpoint file') parser.add_argument('--loss_name', type=str, default='softmax', help='loss", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "src.utility import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet set_seed(1) parser = argparse.ArgumentParser(description='Image classification') # modelarts parameter", "import Momentum from mindspore.train.model import Model from mindspore.context import ParallelMode from mindspore.train.callback import", "callback cb = [] if config.save_checkpoint and (device_num == 1 or device_id ==", "__init__(self, lr_init=None): super(Monitor, self).__init__() self.lr_init = lr_init self.lr_init_len = len(lr_init) def epoch_begin(self, run_context):", "args_opt.run_modelarts and config.save_checkpoint and (device_num == 1 or device_id == 0): mox.file.copy_parallel(src_url=local_train_url, dst_url=args_opt.train_url)", "== 'triplet': loss = Tripletloss(margin=0.1) elif args_opt.loss_name == 'quadruplet': loss = Quadrupletloss(train_batch_size=config.batch_size, samples_each_class=2,", "if __name__ == '__main__': if args_opt.loss_name == 'softmax': from src.config import config0 as", "config.epoch_size, 'lr_model:', config.lr_decay_mode, 'lr:', config.lr_max, 'step_size:', step_size) def step_begin(self, run_context): self.step_time = time.time()", "'/' # create dataset TRAIN_LIST = DATA_DIR + 'train_half.txt' if args_opt.loss_name == 'softmax':", "[] if config.save_checkpoint and (device_num == 1 or device_id == 0): config_ck =", "== '__main__': if args_opt.loss_name == 'softmax': from src.config import config0 as config from", "OF ANY KIND, either express or implied. # See the License for the", "0): config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size, keep_checkpoint_max=config.keep_checkpoint_max) check_name = 'ResNet50_' + args_opt.loss_name if", "[] self.epoch_time = time.time() dataset_generator.__init__(data_dir=DATA_DIR, train_list=TRAIN_LIST) def epoch_end(self, run_context): cb_params = run_context.original_args() epoch_mseconds", "Quadrupletloss(train_batch_size=config.batch_size, samples_each_class=2, margin=0.1) else: print('loss no') loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) if args_opt.loss_name ==", "loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) if args_opt.loss_name == 'softmax': model = Model(net, loss_fn=loss, optimizer=opt,", "else: context.set_context(device_id=args_opt.device_id) device_num = 1 device_id = args_opt.device_id DATA_DIR = args_opt.dataset_path + '/'", "from src.dataset import create_dataset0 as create_dataset elif args_opt.loss_name == 'triplet': from src.config import", "type=str, default=None, help='Pretrained ckpt path') parser.add_argument('--checkpoint_name', type=str, default='resnet-120_625.ckpt', help='Checkpoint file') parser.add_argument('--loss_name', type=str, default='softmax',", "1: init() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) local_data_url = os.path.join(local_data_url, str(device_id)) local_ckpt_url = os.path.join(local_ckpt_url, str(device_id))", "keep_batchnorm_fp32=False) #define callback cb = [] if config.save_checkpoint and (device_num == 1 or", "[ckpt_cb] cb += [Monitor(lr_init=lr.asnumpy())] # train model model.train(config.epoch_size - config.pretrain_epoch_size, dataset, callbacks=cb, dataset_sink_mode=True)", "loss = Softmaxloss(sparse=True, smooth_factor=0.1, num_classes=config.class_num) elif args_opt.loss_name == 'triplet': loss = Tripletloss(margin=0.1) elif", "= Tensor(get_lr(lr_init=config.lr_init, lr_end=config.lr_end, lr_max=config.lr_max, warmup_epochs=config.warmup_epochs, total_epochs=config.epoch_size, steps_per_epoch=step_size, lr_decay_mode=config.lr_decay_mode)) # define opt opt =", "default=None, help='Dataset path') parser.add_argument('--ckpt_path', type=str, default=None, help='ckpt path name') parser.add_argument('--run_distribute', type=ast.literal_eval, default=False, help='Run", "loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) else: model = Model(net.backbone, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale,", "argparse.ArgumentParser(description='Image classification') # modelarts parameter parser.add_argument('--train_url', type=str, default=None, help='Train output path') parser.add_argument('--data_url', type=str,", "keep_batchnorm_fp32=False) else: model = Model(net.backbone, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) #define callback", "== 'softmax': dataset_generator = GetDatasetGenerator_softmax(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'triplet': dataset_generator = GetDatasetGenerator_triplet(data_dir=DATA_DIR,", "= Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) else: model = Model(net.backbone, loss_fn=loss,", "os.path.join(local_ckpt_url, str(device_id)) mox.file.copy_parallel(args_opt.data_url, local_data_url) mox.file.copy_parallel(args_opt.ckpt_url, local_ckpt_url) DATA_DIR = local_data_url + '/' else: if", "if args_opt.run_modelarts and config.save_checkpoint and (device_num == 1 or device_id == 0): mox.file.copy_parallel(src_url=local_train_url,", "= Tripletloss(margin=0.1) elif args_opt.loss_name == 'quadruplet': loss = Quadrupletloss(train_batch_size=config.batch_size, samples_each_class=2, margin=0.1) else: print('loss", "model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) else: model = Model(net.backbone,", "\"\"\"Monitor\"\"\" def __init__(self, lr_init=None): super(Monitor, self).__init__() self.lr_init = lr_init self.lr_init_len = len(lr_init) def", "device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) init() context.reset_auto_parallel_context() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) else: context.set_context(device_id=args_opt.device_id) device_num =", "limitations under the License. # ============================================================================ \"\"\"train resnet.\"\"\" import os import time import", "Ascend parameter parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_path', type=str, default=None, help='ckpt path name')", "help='Dataset path') parser.add_argument('--ckpt_path', type=str, default=None, help='ckpt path name') parser.add_argument('--run_distribute', type=ast.literal_eval, default=False, help='Run distribute')", "step_end(self, run_context): \"\"\"step_end\"\"\" cb_params = run_context.original_args() step_mseconds = (time.time() - self.step_time) * 1000", "= FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) if args_opt.loss_name == 'softmax': model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale,", "config1 as config from src.dataset import create_dataset1 as create_dataset elif args_opt.loss_name == 'quadruplet':", "Co., Ltd # # Licensed under the Apache License, Version 2.0 (the \"License\");", "parser.add_argument('--run_modelarts', type=ast.literal_eval, default=False, help='Run distribute') args_opt = parser.parse_args() class Monitor(Callback): \"\"\"Monitor\"\"\" def __init__(self,", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "create_dataset(dataset_generator, do_train=True, batch_size=config.batch_size, device_num=device_num, rank_id=device_id) step_size = dataset.get_dataset_size() # define net net =", "ckpt_cb = ModelCheckpoint(prefix=check_name, directory=local_train_url, config=config_ck) else: save_ckpt_path = os.path.join(config.save_checkpoint_path, 'model_'+ str(device_id) +'/') ckpt_cb", "file') parser.add_argument('--loss_name', type=str, default='softmax', help='loss name: softmax(pretrained) triplet quadruplet') # Ascend parameter parser.add_argument('--dataset_path',", "GetDatasetGenerator_softmax(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'triplet': dataset_generator = GetDatasetGenerator_triplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name ==", "{:5.3f}, per step time: {:5.3f}, avg loss: {:8.5f}\" .format(epoch_mseconds, per_step_mseconds, np.mean(self.losses))) print('batch_size:', config.batch_size,", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "as config from src.dataset import create_dataset1 as create_dataset else: print('loss no') context.set_context(mode=context.GRAPH_MODE, device_target=\"Ascend\",", "= int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) init() context.reset_auto_parallel_context() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) else: context.set_context(device_id=args_opt.device_id)", "drop_overflow_update=False) if args_opt.loss_name == 'softmax': model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3',", "type=str, default='resnet-120_625.ckpt', help='Checkpoint file') parser.add_argument('--loss_name', type=str, default='softmax', help='loss name: softmax(pretrained) triplet quadruplet') #", "save_graphs=False) # init distributed if args_opt.run_modelarts: import moxing as mox device_id = int(os.getenv('DEVICE_ID'))", "samples_each_class=2, margin=0.1) else: print('loss no') loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) if args_opt.loss_name == 'softmax':", "step_size = dataset.get_dataset_size() # define net net = resnet50(class_num=config.class_num) # init weight if", "Softmaxloss from src.loss import Tripletloss from src.loss import Quadrupletloss from src.lr_generator import get_lr", "import resnet50 from src.utility import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet set_seed(1) parser = argparse.ArgumentParser(description='Image classification')", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "from src.config import config2 as config from src.dataset import create_dataset1 as create_dataset else:", "under the License. # ============================================================================ \"\"\"train resnet.\"\"\" import os import time import argparse", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "dataset_generator = GetDatasetGenerator_quadruplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) else: print('loss no') dataset = create_dataset(dataset_generator, do_train=True, batch_size=config.batch_size, device_num=device_num,", "src.dataset import create_dataset0 as create_dataset elif args_opt.loss_name == 'triplet': from src.config import config1", "# Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache", "import numpy as np from mindspore import context from mindspore import Tensor from", "import create_dataset1 as create_dataset else: print('loss no') context.set_context(mode=context.GRAPH_MODE, device_target=\"Ascend\", save_graphs=False) # init distributed", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "= Model(net.backbone, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) #define callback cb = []", "type=str, default=None, help='Train output path') parser.add_argument('--data_url', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_url', type=str, default=None,", "device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) local_data_url = '/cache/data' local_ckpt_url = '/cache/ckpt'", "required by applicable law or agreed to in writing, software # distributed under", "dataset_generator.__init__(data_dir=DATA_DIR, train_list=TRAIN_LIST) def epoch_end(self, run_context): cb_params = run_context.original_args() epoch_mseconds = (time.time() - self.epoch_time)", "lr_decay_mode=config.lr_decay_mode)) # define opt opt = Momentum(params=net.trainable_params(), learning_rate=lr, momentum=config.momentum, weight_decay=config.weight_decay, loss_scale=config.loss_scale) # define", "callbacks=cb, dataset_sink_mode=True) if args_opt.run_modelarts and config.save_checkpoint and (device_num == 1 or device_id ==", "per step time: {:5.3f}, avg loss: {:8.5f}\" .format(epoch_mseconds, per_step_mseconds, np.mean(self.losses))) print('batch_size:', config.batch_size, 'epochs_size:',", "checkpoint_path = os.path.join(local_ckpt_url, args_opt.checkpoint_name) else: checkpoint_path = args_opt.ckpt_path param_dict = load_checkpoint(checkpoint_path) load_param_into_net(net.backbone, param_dict)", "(device_num == 1 or device_id == 0): config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size, keep_checkpoint_max=config.keep_checkpoint_max)", "applicable law or agreed to in writing, software # distributed under the License", "'lr:', config.lr_max, 'step_size:', step_size) def step_begin(self, run_context): self.step_time = time.time() def step_end(self, run_context):", "== 'softmax': loss = Softmaxloss(sparse=True, smooth_factor=0.1, num_classes=config.class_num) elif args_opt.loss_name == 'triplet': loss =", "= len(lr_init) def epoch_begin(self, run_context): self.losses = [] self.epoch_time = time.time() dataset_generator.__init__(data_dir=DATA_DIR, train_list=TRAIN_LIST)", "dataset = create_dataset(dataset_generator, do_train=True, batch_size=config.batch_size, device_num=device_num, rank_id=device_id) step_size = dataset.get_dataset_size() # define net", "'softmax': dataset_generator = GetDatasetGenerator_softmax(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'triplet': dataset_generator = GetDatasetGenerator_triplet(data_dir=DATA_DIR, train_list=TRAIN_LIST)", "def step_end(self, run_context): \"\"\"step_end\"\"\" cb_params = run_context.original_args() step_mseconds = (time.time() - self.step_time) *", "parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_path', type=str, default=None, help='ckpt path name') parser.add_argument('--run_distribute', type=ast.literal_eval,", "loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) else: model = Model(net.backbone, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3',", "if isinstance(step_loss, Tensor): step_loss = np.mean(step_loss.asnumpy()) self.losses.append(step_loss) cur_step_in_epoch = (cb_params.cur_step_num - 1) %", "cb_params.net_outputs if isinstance(step_loss, (tuple, list)) and isinstance(step_loss[0], Tensor): step_loss = step_loss[0] if isinstance(step_loss,", "or agreed to in writing, software # distributed under the License is distributed", "= (time.time() - self.epoch_time) * 1000 per_step_mseconds = epoch_mseconds / cb_params.batch_num print(\"epoch time:", "int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) local_data_url = '/cache/data' local_ckpt_url = '/cache/ckpt' local_train_url = '/cache/train' if device_num", "'triplet': from src.config import config1 as config from src.dataset import create_dataset1 as create_dataset", "loss: {:8.5f}\" .format(epoch_mseconds, per_step_mseconds, np.mean(self.losses))) print('batch_size:', config.batch_size, 'epochs_size:', config.epoch_size, 'lr_model:', config.lr_decay_mode, 'lr:', config.lr_max,", "if args_opt.loss_name == 'softmax': from src.config import config0 as config from src.dataset import", "step_loss = np.mean(step_loss.asnumpy()) self.losses.append(step_loss) cur_step_in_epoch = (cb_params.cur_step_num - 1) % cb_params.batch_num print(\"epochs: [{:3d}/{:3d}],", "local_ckpt_url) DATA_DIR = local_data_url + '/' else: if args_opt.run_distribute: device_id = int(os.getenv('DEVICE_ID')) device_num", "parser.add_argument('--ckpt_path', type=str, default=None, help='ckpt path name') parser.add_argument('--run_distribute', type=ast.literal_eval, default=False, help='Run distribute') parser.add_argument('--device_id', type=int,", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "Momentum from mindspore.train.model import Model from mindspore.context import ParallelMode from mindspore.train.callback import ModelCheckpoint,", "opt opt = Momentum(params=net.trainable_params(), learning_rate=lr, momentum=config.momentum, weight_decay=config.weight_decay, loss_scale=config.loss_scale) # define loss, model if", "= Softmaxloss(sparse=True, smooth_factor=0.1, num_classes=config.class_num) elif args_opt.loss_name == 'triplet': loss = Tripletloss(margin=0.1) elif args_opt.loss_name", "from src.config import config0 as config from src.dataset import create_dataset0 as create_dataset elif", "src.lr_generator import get_lr from src.resnet import resnet50 from src.utility import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet", "and isinstance(step_loss[0], Tensor): step_loss = step_loss[0] if isinstance(step_loss, Tensor): step_loss = np.mean(step_loss.asnumpy()) self.losses.append(step_loss)", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "ModelCheckpoint(prefix=check_name, directory=local_train_url, config=config_ck) else: save_ckpt_path = os.path.join(config.save_checkpoint_path, 'model_'+ str(device_id) +'/') ckpt_cb = ModelCheckpoint(prefix=check_name,", "Tensor): step_loss = np.mean(step_loss.asnumpy()) self.losses.append(step_loss) cur_step_in_epoch = (cb_params.cur_step_num - 1) % cb_params.batch_num print(\"epochs:", "writing, software # distributed under the License is distributed on an \"AS IS\"", "load_checkpoint(checkpoint_path) load_param_into_net(net.backbone, param_dict) # init lr lr = Tensor(get_lr(lr_init=config.lr_init, lr_end=config.lr_end, lr_max=config.lr_max, warmup_epochs=config.warmup_epochs, total_epochs=config.epoch_size,", "check_name = 'ResNet50_' + args_opt.loss_name if args_opt.run_modelarts: ckpt_cb = ModelCheckpoint(prefix=check_name, directory=local_train_url, config=config_ck) else:", "triplet quadruplet') # Ascend parameter parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_path', type=str, default=None,", "args_opt.loss_name == 'softmax': dataset_generator = GetDatasetGenerator_softmax(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'triplet': dataset_generator =", "args_opt.loss_name == 'triplet': loss = Tripletloss(margin=0.1) elif args_opt.loss_name == 'quadruplet': loss = Quadrupletloss(train_batch_size=config.batch_size,", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "License. # You may obtain a copy of the License at # #", "elif args_opt.loss_name == 'triplet': loss = Tripletloss(margin=0.1) elif args_opt.loss_name == 'quadruplet': loss =", "cb_params.cur_epoch_num, config.epoch_size, cur_step_in_epoch, cb_params.batch_num, step_loss, np.mean(self.losses), step_mseconds, self.lr_init[cb_params.cur_step_num - 1])) if __name__ ==", "run_context): cb_params = run_context.original_args() epoch_mseconds = (time.time() - self.epoch_time) * 1000 per_step_mseconds =", "path') parser.add_argument('--ckpt_url', type=str, default=None, help='Pretrained ckpt path') parser.add_argument('--checkpoint_name', type=str, default='resnet-120_625.ckpt', help='Checkpoint file') parser.add_argument('--loss_name',", "mox.file.copy_parallel(args_opt.ckpt_url, local_ckpt_url) DATA_DIR = local_data_url + '/' else: if args_opt.run_distribute: device_id = int(os.getenv('DEVICE_ID'))", "+ '/' # create dataset TRAIN_LIST = DATA_DIR + 'train_half.txt' if args_opt.loss_name ==", "create_dataset elif args_opt.loss_name == 'triplet': from src.config import config1 as config from src.dataset", "= run_context.original_args() epoch_mseconds = (time.time() - self.epoch_time) * 1000 per_step_mseconds = epoch_mseconds /", "net net = resnet50(class_num=config.class_num) # init weight if args_opt.run_modelarts: checkpoint_path = os.path.join(local_ckpt_url, args_opt.checkpoint_name)", "Callback from src.loss import Softmaxloss from src.loss import Tripletloss from src.loss import Quadrupletloss", "# ============================================================================ \"\"\"train resnet.\"\"\" import os import time import argparse import ast import", "config.batch_size, 'epochs_size:', config.epoch_size, 'lr_model:', config.lr_decay_mode, 'lr:', config.lr_max, 'step_size:', step_size) def step_begin(self, run_context): self.step_time", "momentum=config.momentum, weight_decay=config.weight_decay, loss_scale=config.loss_scale) # define loss, model if args_opt.loss_name == 'softmax': loss =", "[Monitor(lr_init=lr.asnumpy())] # train model model.train(config.epoch_size - config.pretrain_epoch_size, dataset, callbacks=cb, dataset_sink_mode=True) if args_opt.run_modelarts and", "num_classes=config.class_num) elif args_opt.loss_name == 'triplet': loss = Tripletloss(margin=0.1) elif args_opt.loss_name == 'quadruplet': loss", "default=False, help='Run distribute') parser.add_argument('--device_id', type=int, default=0, help='Device id') parser.add_argument('--run_modelarts', type=ast.literal_eval, default=False, help='Run distribute')", "compliance with the License. # You may obtain a copy of the License", "class Monitor(Callback): \"\"\"Monitor\"\"\" def __init__(self, lr_init=None): super(Monitor, self).__init__() self.lr_init = lr_init self.lr_init_len =", "import ParallelMode from mindspore.train.callback import ModelCheckpoint, CheckpointConfig from mindspore.train.loss_scale_manager import FixedLossScaleManager from mindspore.train.serialization", ".format(epoch_mseconds, per_step_mseconds, np.mean(self.losses))) print('batch_size:', config.batch_size, 'epochs_size:', config.epoch_size, 'lr_model:', config.lr_decay_mode, 'lr:', config.lr_max, 'step_size:', step_size)", "lr_max=config.lr_max, warmup_epochs=config.warmup_epochs, total_epochs=config.epoch_size, steps_per_epoch=step_size, lr_decay_mode=config.lr_decay_mode)) # define opt opt = Momentum(params=net.trainable_params(), learning_rate=lr, momentum=config.momentum,", "import ast import numpy as np from mindspore import context from mindspore import", "FixedLossScaleManager from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.common import set_seed from mindspore.communication.management import", "np.mean(self.losses), step_mseconds, self.lr_init[cb_params.cur_step_num - 1])) if __name__ == '__main__': if args_opt.loss_name == 'softmax':", "resnet50 from src.utility import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet set_seed(1) parser = argparse.ArgumentParser(description='Image classification') #", "#define callback cb = [] if config.save_checkpoint and (device_num == 1 or device_id", "args_opt.run_modelarts: ckpt_cb = ModelCheckpoint(prefix=check_name, directory=local_train_url, config=config_ck) else: save_ckpt_path = os.path.join(config.save_checkpoint_path, 'model_'+ str(device_id) +'/')", "context from mindspore import Tensor from mindspore.nn.optim.momentum import Momentum from mindspore.train.model import Model", "import create_dataset1 as create_dataset elif args_opt.loss_name == 'quadruplet': from src.config import config2 as", "ckpt_cb = ModelCheckpoint(prefix=check_name, directory=save_ckpt_path, config=config_ck) cb += [ckpt_cb] cb += [Monitor(lr_init=lr.asnumpy())] # train", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "config.save_checkpoint and (device_num == 1 or device_id == 0): config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs *", "'step_size:', step_size) def step_begin(self, run_context): self.step_time = time.time() def step_end(self, run_context): \"\"\"step_end\"\"\" cb_params", "resnet.\"\"\" import os import time import argparse import ast import numpy as np", "+ args_opt.loss_name if args_opt.run_modelarts: ckpt_cb = ModelCheckpoint(prefix=check_name, directory=local_train_url, config=config_ck) else: save_ckpt_path = os.path.join(config.save_checkpoint_path,", "as mox device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) local_data_url = '/cache/data' local_ckpt_url", "import set_seed from mindspore.communication.management import init from mindspore.train.callback import Callback from src.loss import", "Tripletloss(margin=0.1) elif args_opt.loss_name == 'quadruplet': loss = Quadrupletloss(train_batch_size=config.batch_size, samples_each_class=2, margin=0.1) else: print('loss no')", "from src.dataset import create_dataset1 as create_dataset elif args_opt.loss_name == 'quadruplet': from src.config import", "create_dataset elif args_opt.loss_name == 'quadruplet': from src.config import config2 as config from src.dataset", "model = Model(net.backbone, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) #define callback cb =", "local_data_url + '/' else: if args_opt.run_distribute: device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id)", "local_data_url = '/cache/data' local_ckpt_url = '/cache/ckpt' local_train_url = '/cache/train' if device_num > 1:", "Softmaxloss(sparse=True, smooth_factor=0.1, num_classes=config.class_num) elif args_opt.loss_name == 'triplet': loss = Tripletloss(margin=0.1) elif args_opt.loss_name ==", "optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) else: model = Model(net.backbone, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None,", "(cb_params.cur_step_num - 1) % cb_params.batch_num print(\"epochs: [{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:8.5f}/{:8.5f}], time:[{:5.3f}], lr:[{:8.5f}]\".format( cb_params.cur_epoch_num, config.epoch_size,", "create dataset TRAIN_LIST = DATA_DIR + 'train_half.txt' if args_opt.loss_name == 'softmax': dataset_generator =", "# define opt opt = Momentum(params=net.trainable_params(), learning_rate=lr, momentum=config.momentum, weight_decay=config.weight_decay, loss_scale=config.loss_scale) # define loss,", "dataset TRAIN_LIST = DATA_DIR + 'train_half.txt' if args_opt.loss_name == 'softmax': dataset_generator = GetDatasetGenerator_softmax(data_dir=DATA_DIR,", "- self.step_time) * 1000 step_loss = cb_params.net_outputs if isinstance(step_loss, (tuple, list)) and isinstance(step_loss[0],", "os.path.join(local_ckpt_url, args_opt.checkpoint_name) else: checkpoint_path = args_opt.ckpt_path param_dict = load_checkpoint(checkpoint_path) load_param_into_net(net.backbone, param_dict) # init", "# limitations under the License. # ============================================================================ \"\"\"train resnet.\"\"\" import os import time", "not use this file except in compliance with the License. # You may", "from src.config import config1 as config from src.dataset import create_dataset1 as create_dataset elif", "lr_init=None): super(Monitor, self).__init__() self.lr_init = lr_init self.lr_init_len = len(lr_init) def epoch_begin(self, run_context): self.losses", "epoch_mseconds = (time.time() - self.epoch_time) * 1000 per_step_mseconds = epoch_mseconds / cb_params.batch_num print(\"epoch", "'epochs_size:', config.epoch_size, 'lr_model:', config.lr_decay_mode, 'lr:', config.lr_max, 'step_size:', step_size) def step_begin(self, run_context): self.step_time =", "= GetDatasetGenerator_quadruplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) else: print('loss no') dataset = create_dataset(dataset_generator, do_train=True, batch_size=config.batch_size, device_num=device_num, rank_id=device_id)", "optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) #define callback cb = [] if config.save_checkpoint and", "== 'quadruplet': from src.config import config2 as config from src.dataset import create_dataset1 as", "args_opt.loss_name == 'softmax': from src.config import config0 as config from src.dataset import create_dataset0", "TRAIN_LIST = DATA_DIR + 'train_half.txt' if args_opt.loss_name == 'softmax': dataset_generator = GetDatasetGenerator_softmax(data_dir=DATA_DIR, train_list=TRAIN_LIST)", "License, Version 2.0 (the \"License\"); # you may not use this file except", "train_list=TRAIN_LIST) def epoch_end(self, run_context): cb_params = run_context.original_args() epoch_mseconds = (time.time() - self.epoch_time) *", "DATA_DIR + 'train_half.txt' if args_opt.loss_name == 'softmax': dataset_generator = GetDatasetGenerator_softmax(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name", "if args_opt.loss_name == 'softmax': loss = Softmaxloss(sparse=True, smooth_factor=0.1, num_classes=config.class_num) elif args_opt.loss_name == 'triplet':", "* step_size, keep_checkpoint_max=config.keep_checkpoint_max) check_name = 'ResNet50_' + args_opt.loss_name if args_opt.run_modelarts: ckpt_cb = ModelCheckpoint(prefix=check_name,", "help='Device id') parser.add_argument('--run_modelarts', type=ast.literal_eval, default=False, help='Run distribute') args_opt = parser.parse_args() class Monitor(Callback): \"\"\"Monitor\"\"\"", "self.losses.append(step_loss) cur_step_in_epoch = (cb_params.cur_step_num - 1) % cb_params.batch_num print(\"epochs: [{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:8.5f}/{:8.5f}], time:[{:5.3f}],", "import Tensor from mindspore.nn.optim.momentum import Momentum from mindspore.train.model import Model from mindspore.context import", "Tensor(get_lr(lr_init=config.lr_init, lr_end=config.lr_end, lr_max=config.lr_max, warmup_epochs=config.warmup_epochs, total_epochs=config.epoch_size, steps_per_epoch=step_size, lr_decay_mode=config.lr_decay_mode)) # define opt opt = Momentum(params=net.trainable_params(),", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "parser.add_argument('--loss_name', type=str, default='softmax', help='loss name: softmax(pretrained) triplet quadruplet') # Ascend parameter parser.add_argument('--dataset_path', type=str,", "= '/cache/data' local_ckpt_url = '/cache/ckpt' local_train_url = '/cache/train' if device_num > 1: init()", "device_id = args_opt.device_id DATA_DIR = args_opt.dataset_path + '/' # create dataset TRAIN_LIST =", "= GetDatasetGenerator_softmax(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'triplet': dataset_generator = GetDatasetGenerator_triplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name", "= int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) init() context.reset_auto_parallel_context() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) else: context.set_context(device_id=args_opt.device_id) device_num = 1", "cur_step_in_epoch = (cb_params.cur_step_num - 1) % cb_params.batch_num print(\"epochs: [{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:8.5f}/{:8.5f}], time:[{:5.3f}], lr:[{:8.5f}]\".format(", "ModelCheckpoint(prefix=check_name, directory=save_ckpt_path, config=config_ck) cb += [ckpt_cb] cb += [Monitor(lr_init=lr.asnumpy())] # train model model.train(config.epoch_size", "# define net net = resnet50(class_num=config.class_num) # init weight if args_opt.run_modelarts: checkpoint_path =", "warmup_epochs=config.warmup_epochs, total_epochs=config.epoch_size, steps_per_epoch=step_size, lr_decay_mode=config.lr_decay_mode)) # define opt opt = Momentum(params=net.trainable_params(), learning_rate=lr, momentum=config.momentum, weight_decay=config.weight_decay,", "from src.dataset import create_dataset1 as create_dataset else: print('loss no') context.set_context(mode=context.GRAPH_MODE, device_target=\"Ascend\", save_graphs=False) #", "print('batch_size:', config.batch_size, 'epochs_size:', config.epoch_size, 'lr_model:', config.lr_decay_mode, 'lr:', config.lr_max, 'step_size:', step_size) def step_begin(self, run_context):", "parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) else: context.set_context(device_id=args_opt.device_id) device_num = 1 device_id = args_opt.device_id DATA_DIR = args_opt.dataset_path", "# you may not use this file except in compliance with the License.", "print('loss no') dataset = create_dataset(dataset_generator, do_train=True, batch_size=config.batch_size, device_num=device_num, rank_id=device_id) step_size = dataset.get_dataset_size() #", "no') dataset = create_dataset(dataset_generator, do_train=True, batch_size=config.batch_size, device_num=device_num, rank_id=device_id) step_size = dataset.get_dataset_size() # define", "agreed to in writing, software # distributed under the License is distributed on", "import os import time import argparse import ast import numpy as np from", "train_list=TRAIN_LIST) elif args_opt.loss_name == 'quadruplet': dataset_generator = GetDatasetGenerator_quadruplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) else: print('loss no') dataset", "> 1: init() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) local_data_url = os.path.join(local_data_url, str(device_id)) local_ckpt_url = os.path.join(local_ckpt_url,", "dataset.get_dataset_size() # define net net = resnet50(class_num=config.class_num) # init weight if args_opt.run_modelarts: checkpoint_path", "/ cb_params.batch_num print(\"epoch time: {:5.3f}, per step time: {:5.3f}, avg loss: {:8.5f}\" .format(epoch_mseconds,", "do_train=True, batch_size=config.batch_size, device_num=device_num, rank_id=device_id) step_size = dataset.get_dataset_size() # define net net = resnet50(class_num=config.class_num)", "device_id == 0): config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size, keep_checkpoint_max=config.keep_checkpoint_max) check_name = 'ResNet50_' +", "src.loss import Quadrupletloss from src.lr_generator import get_lr from src.resnet import resnet50 from src.utility", "(the \"License\"); # you may not use this file except in compliance with", "default=False, help='Run distribute') args_opt = parser.parse_args() class Monitor(Callback): \"\"\"Monitor\"\"\" def __init__(self, lr_init=None): super(Monitor,", "weight_decay=config.weight_decay, loss_scale=config.loss_scale) # define loss, model if args_opt.loss_name == 'softmax': loss = Softmaxloss(sparse=True,", "net = resnet50(class_num=config.class_num) # init weight if args_opt.run_modelarts: checkpoint_path = os.path.join(local_ckpt_url, args_opt.checkpoint_name) else:", "keep_checkpoint_max=config.keep_checkpoint_max) check_name = 'ResNet50_' + args_opt.loss_name if args_opt.run_modelarts: ckpt_cb = ModelCheckpoint(prefix=check_name, directory=local_train_url, config=config_ck)", "device_num > 1: init() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) local_data_url = os.path.join(local_data_url, str(device_id)) local_ckpt_url =", "self.epoch_time = time.time() dataset_generator.__init__(data_dir=DATA_DIR, train_list=TRAIN_LIST) def epoch_end(self, run_context): cb_params = run_context.original_args() epoch_mseconds =", "GetDatasetGenerator_quadruplet set_seed(1) parser = argparse.ArgumentParser(description='Image classification') # modelarts parameter parser.add_argument('--train_url', type=str, default=None, help='Train", "from mindspore.train.model import Model from mindspore.context import ParallelMode from mindspore.train.callback import ModelCheckpoint, CheckpointConfig", "from mindspore.train.callback import Callback from src.loss import Softmaxloss from src.loss import Tripletloss from", "# Unless required by applicable law or agreed to in writing, software #", "'softmax': model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) else: model =", "args_opt.loss_name if args_opt.run_modelarts: ckpt_cb = ModelCheckpoint(prefix=check_name, directory=local_train_url, config=config_ck) else: save_ckpt_path = os.path.join(config.save_checkpoint_path, 'model_'+", "dataset_generator = GetDatasetGenerator_softmax(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'triplet': dataset_generator = GetDatasetGenerator_triplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif", "step_mseconds = (time.time() - self.step_time) * 1000 step_loss = cb_params.net_outputs if isinstance(step_loss, (tuple,", "by applicable law or agreed to in writing, software # distributed under the", "Tensor from mindspore.nn.optim.momentum import Momentum from mindspore.train.model import Model from mindspore.context import ParallelMode", "local_ckpt_url = '/cache/ckpt' local_train_url = '/cache/train' if device_num > 1: init() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL,", "parser.parse_args() class Monitor(Callback): \"\"\"Monitor\"\"\" def __init__(self, lr_init=None): super(Monitor, self).__init__() self.lr_init = lr_init self.lr_init_len", "'lr_model:', config.lr_decay_mode, 'lr:', config.lr_max, 'step_size:', step_size) def step_begin(self, run_context): self.step_time = time.time() def", "config=config_ck) cb += [ckpt_cb] cb += [Monitor(lr_init=lr.asnumpy())] # train model model.train(config.epoch_size - config.pretrain_epoch_size,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "step time: {:5.3f}, avg loss: {:8.5f}\" .format(epoch_mseconds, per_step_mseconds, np.mean(self.losses))) print('batch_size:', config.batch_size, 'epochs_size:', config.epoch_size,", "'/cache/data' local_ckpt_url = '/cache/ckpt' local_train_url = '/cache/train' if device_num > 1: init() context.set_auto_parallel_context(device_num=device_num,", "context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) local_data_url = os.path.join(local_data_url, str(device_id)) local_ckpt_url = os.path.join(local_ckpt_url, str(device_id)) mox.file.copy_parallel(args_opt.data_url, local_data_url)", "= [] if config.save_checkpoint and (device_num == 1 or device_id == 0): config_ck", "local_data_url = os.path.join(local_data_url, str(device_id)) local_ckpt_url = os.path.join(local_ckpt_url, str(device_id)) mox.file.copy_parallel(args_opt.data_url, local_data_url) mox.file.copy_parallel(args_opt.ckpt_url, local_ckpt_url) DATA_DIR", "mindspore.nn.optim.momentum import Momentum from mindspore.train.model import Model from mindspore.context import ParallelMode from mindspore.train.callback", "type=int, default=0, help='Device id') parser.add_argument('--run_modelarts', type=ast.literal_eval, default=False, help='Run distribute') args_opt = parser.parse_args() class", "= step_loss[0] if isinstance(step_loss, Tensor): step_loss = np.mean(step_loss.asnumpy()) self.losses.append(step_loss) cur_step_in_epoch = (cb_params.cur_step_num -", "import Model from mindspore.context import ParallelMode from mindspore.train.callback import ModelCheckpoint, CheckpointConfig from mindspore.train.loss_scale_manager", "learning_rate=lr, momentum=config.momentum, weight_decay=config.weight_decay, loss_scale=config.loss_scale) # define loss, model if args_opt.loss_name == 'softmax': loss", "if args_opt.loss_name == 'softmax': dataset_generator = GetDatasetGenerator_softmax(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'triplet': dataset_generator", "else: print('loss no') dataset = create_dataset(dataset_generator, do_train=True, batch_size=config.batch_size, device_num=device_num, rank_id=device_id) step_size = dataset.get_dataset_size()", "train model model.train(config.epoch_size - config.pretrain_epoch_size, dataset, callbacks=cb, dataset_sink_mode=True) if args_opt.run_modelarts and config.save_checkpoint and", "file except in compliance with the License. # You may obtain a copy", "epoch_mseconds / cb_params.batch_num print(\"epoch time: {:5.3f}, per step time: {:5.3f}, avg loss: {:8.5f}\"", "src.loss import Softmaxloss from src.loss import Tripletloss from src.loss import Quadrupletloss from src.lr_generator", "= os.path.join(local_data_url, str(device_id)) local_ckpt_url = os.path.join(local_ckpt_url, str(device_id)) mox.file.copy_parallel(args_opt.data_url, local_data_url) mox.file.copy_parallel(args_opt.ckpt_url, local_ckpt_url) DATA_DIR =", "ckpt path') parser.add_argument('--checkpoint_name', type=str, default='resnet-120_625.ckpt', help='Checkpoint file') parser.add_argument('--loss_name', type=str, default='softmax', help='loss name: softmax(pretrained)", "= ModelCheckpoint(prefix=check_name, directory=local_train_url, config=config_ck) else: save_ckpt_path = os.path.join(config.save_checkpoint_path, 'model_'+ str(device_id) +'/') ckpt_cb =", "dataset_sink_mode=True) if args_opt.run_modelarts and config.save_checkpoint and (device_num == 1 or device_id == 0):", "= epoch_mseconds / cb_params.batch_num print(\"epoch time: {:5.3f}, per step time: {:5.3f}, avg loss:", "self.lr_init[cb_params.cur_step_num - 1])) if __name__ == '__main__': if args_opt.loss_name == 'softmax': from src.config", "License for the specific language governing permissions and # limitations under the License.", "= os.path.join(local_ckpt_url, args_opt.checkpoint_name) else: checkpoint_path = args_opt.ckpt_path param_dict = load_checkpoint(checkpoint_path) load_param_into_net(net.backbone, param_dict) #", "as create_dataset elif args_opt.loss_name == 'triplet': from src.config import config1 as config from", "to in writing, software # distributed under the License is distributed on an", "import Quadrupletloss from src.lr_generator import get_lr from src.resnet import resnet50 from src.utility import", "lr:[{:8.5f}]\".format( cb_params.cur_epoch_num, config.epoch_size, cur_step_in_epoch, cb_params.batch_num, step_loss, np.mean(self.losses), step_mseconds, self.lr_init[cb_params.cur_step_num - 1])) if __name__", "as np from mindspore import context from mindspore import Tensor from mindspore.nn.optim.momentum import", "implied. # See the License for the specific language governing permissions and #", "init() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) local_data_url = os.path.join(local_data_url, str(device_id)) local_ckpt_url = os.path.join(local_ckpt_url, str(device_id)) mox.file.copy_parallel(args_opt.data_url,", "= Momentum(params=net.trainable_params(), learning_rate=lr, momentum=config.momentum, weight_decay=config.weight_decay, loss_scale=config.loss_scale) # define loss, model if args_opt.loss_name ==", "\"License\"); # you may not use this file except in compliance with the", "from mindspore import Tensor from mindspore.nn.optim.momentum import Momentum from mindspore.train.model import Model from", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "help='Train output path') parser.add_argument('--data_url', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_url', type=str, default=None, help='Pretrained ckpt", "os import time import argparse import ast import numpy as np from mindspore", "mindspore.train.loss_scale_manager import FixedLossScaleManager from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.common import set_seed from", "cur_step_in_epoch, cb_params.batch_num, step_loss, np.mean(self.losses), step_mseconds, self.lr_init[cb_params.cur_step_num - 1])) if __name__ == '__main__': if", "isinstance(step_loss, Tensor): step_loss = np.mean(step_loss.asnumpy()) self.losses.append(step_loss) cur_step_in_epoch = (cb_params.cur_step_num - 1) % cb_params.batch_num", "'/cache/ckpt' local_train_url = '/cache/train' if device_num > 1: init() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) local_data_url", "int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) local_data_url = '/cache/data' local_ckpt_url = '/cache/ckpt' local_train_url =", "rank_id=device_id) step_size = dataset.get_dataset_size() # define net net = resnet50(class_num=config.class_num) # init weight", "\"\"\"step_end\"\"\" cb_params = run_context.original_args() step_mseconds = (time.time() - self.step_time) * 1000 step_loss =", "or implied. # See the License for the specific language governing permissions and", "ModelCheckpoint, CheckpointConfig from mindspore.train.loss_scale_manager import FixedLossScaleManager from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.common", "elif args_opt.loss_name == 'quadruplet': loss = Quadrupletloss(train_batch_size=config.batch_size, samples_each_class=2, margin=0.1) else: print('loss no') loss_scale", "governing permissions and # limitations under the License. # ============================================================================ \"\"\"train resnet.\"\"\" import", "lr_end=config.lr_end, lr_max=config.lr_max, warmup_epochs=config.warmup_epochs, total_epochs=config.epoch_size, steps_per_epoch=step_size, lr_decay_mode=config.lr_decay_mode)) # define opt opt = Momentum(params=net.trainable_params(), learning_rate=lr,", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "'softmax': loss = Softmaxloss(sparse=True, smooth_factor=0.1, num_classes=config.class_num) elif args_opt.loss_name == 'triplet': loss = Tripletloss(margin=0.1)", "'quadruplet': loss = Quadrupletloss(train_batch_size=config.batch_size, samples_each_class=2, margin=0.1) else: print('loss no') loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False)", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "import time import argparse import ast import numpy as np from mindspore import", "- 1) % cb_params.batch_num print(\"epochs: [{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:8.5f}/{:8.5f}], time:[{:5.3f}], lr:[{:8.5f}]\".format( cb_params.cur_epoch_num, config.epoch_size, cur_step_in_epoch,", "else: print('loss no') loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) if args_opt.loss_name == 'softmax': model =", "src.config import config0 as config from src.dataset import create_dataset0 as create_dataset elif args_opt.loss_name", "epoch_begin(self, run_context): self.losses = [] self.epoch_time = time.time() dataset_generator.__init__(data_dir=DATA_DIR, train_list=TRAIN_LIST) def epoch_end(self, run_context):", "'quadruplet': from src.config import config2 as config from src.dataset import create_dataset1 as create_dataset", "# define loss, model if args_opt.loss_name == 'softmax': loss = Softmaxloss(sparse=True, smooth_factor=0.1, num_classes=config.class_num)", "- config.pretrain_epoch_size, dataset, callbacks=cb, dataset_sink_mode=True) if args_opt.run_modelarts and config.save_checkpoint and (device_num == 1", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "the License. # ============================================================================ \"\"\"train resnet.\"\"\" import os import time import argparse import", "in writing, software # distributed under the License is distributed on an \"AS", "= GetDatasetGenerator_triplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'quadruplet': dataset_generator = GetDatasetGenerator_quadruplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) else: print('loss", "time: {:5.3f}, avg loss: {:8.5f}\" .format(epoch_mseconds, per_step_mseconds, np.mean(self.losses))) print('batch_size:', config.batch_size, 'epochs_size:', config.epoch_size, 'lr_model:',", "# init distributed if args_opt.run_modelarts: import moxing as mox device_id = int(os.getenv('DEVICE_ID')) device_num", "from src.loss import Quadrupletloss from src.lr_generator import get_lr from src.resnet import resnet50 from", "define net net = resnet50(class_num=config.class_num) # init weight if args_opt.run_modelarts: checkpoint_path = os.path.join(local_ckpt_url,", "== 0): config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size, keep_checkpoint_max=config.keep_checkpoint_max) check_name = 'ResNet50_' + args_opt.loss_name", "path') parser.add_argument('--ckpt_path', type=str, default=None, help='ckpt path name') parser.add_argument('--run_distribute', type=ast.literal_eval, default=False, help='Run distribute') parser.add_argument('--device_id',", "- 1])) if __name__ == '__main__': if args_opt.loss_name == 'softmax': from src.config import", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "= DATA_DIR + 'train_half.txt' if args_opt.loss_name == 'softmax': dataset_generator = GetDatasetGenerator_softmax(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif", "distribute') args_opt = parser.parse_args() class Monitor(Callback): \"\"\"Monitor\"\"\" def __init__(self, lr_init=None): super(Monitor, self).__init__() self.lr_init", "parser.add_argument('--run_distribute', type=ast.literal_eval, default=False, help='Run distribute') parser.add_argument('--device_id', type=int, default=0, help='Device id') parser.add_argument('--run_modelarts', type=ast.literal_eval, default=False,", "type=str, default='softmax', help='loss name: softmax(pretrained) triplet quadruplet') # Ascend parameter parser.add_argument('--dataset_path', type=str, default=None,", "args_opt.loss_name == 'quadruplet': loss = Quadrupletloss(train_batch_size=config.batch_size, samples_each_class=2, margin=0.1) else: print('loss no') loss_scale =", "= 1 device_id = args_opt.device_id DATA_DIR = args_opt.dataset_path + '/' # create dataset", "import config2 as config from src.dataset import create_dataset1 as create_dataset else: print('loss no')", "= '/cache/train' if device_num > 1: init() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) local_data_url = os.path.join(local_data_url,", "args_opt.run_modelarts: import moxing as mox device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) local_data_url", "type=str, default=None, help='ckpt path name') parser.add_argument('--run_distribute', type=ast.literal_eval, default=False, help='Run distribute') parser.add_argument('--device_id', type=int, default=0,", "train_list=TRAIN_LIST) else: print('loss no') dataset = create_dataset(dataset_generator, do_train=True, batch_size=config.batch_size, device_num=device_num, rank_id=device_id) step_size =", "if args_opt.run_modelarts: checkpoint_path = os.path.join(local_ckpt_url, args_opt.checkpoint_name) else: checkpoint_path = args_opt.ckpt_path param_dict = load_checkpoint(checkpoint_path)", "get_lr from src.resnet import resnet50 from src.utility import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet set_seed(1) parser", "epoch_end(self, run_context): cb_params = run_context.original_args() epoch_mseconds = (time.time() - self.epoch_time) * 1000 per_step_mseconds", "import Callback from src.loss import Softmaxloss from src.loss import Tripletloss from src.loss import", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "context.set_context(device_id=device_id) init() context.reset_auto_parallel_context() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) else: context.set_context(device_id=args_opt.device_id) device_num = 1 device_id =", "you may not use this file except in compliance with the License. #", "init from mindspore.train.callback import Callback from src.loss import Softmaxloss from src.loss import Tripletloss", "if args_opt.run_distribute: device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) init() context.reset_auto_parallel_context() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL,", "as create_dataset elif args_opt.loss_name == 'quadruplet': from src.config import config2 as config from", "directory=local_train_url, config=config_ck) else: save_ckpt_path = os.path.join(config.save_checkpoint_path, 'model_'+ str(device_id) +'/') ckpt_cb = ModelCheckpoint(prefix=check_name, directory=save_ckpt_path,", "def epoch_begin(self, run_context): self.losses = [] self.epoch_time = time.time() dataset_generator.__init__(data_dir=DATA_DIR, train_list=TRAIN_LIST) def epoch_end(self,", "============================================================================ \"\"\"train resnet.\"\"\" import os import time import argparse import ast import numpy", "def epoch_end(self, run_context): cb_params = run_context.original_args() epoch_mseconds = (time.time() - self.epoch_time) * 1000", "import Tripletloss from src.loss import Quadrupletloss from src.lr_generator import get_lr from src.resnet import", "model.train(config.epoch_size - config.pretrain_epoch_size, dataset, callbacks=cb, dataset_sink_mode=True) if args_opt.run_modelarts and config.save_checkpoint and (device_num ==", "len(lr_init) def epoch_begin(self, run_context): self.losses = [] self.epoch_time = time.time() dataset_generator.__init__(data_dir=DATA_DIR, train_list=TRAIN_LIST) def", "* 1000 per_step_mseconds = epoch_mseconds / cb_params.batch_num print(\"epoch time: {:5.3f}, per step time:", "path') parser.add_argument('--data_url', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_url', type=str, default=None, help='Pretrained ckpt path') parser.add_argument('--checkpoint_name',", "Tensor): step_loss = step_loss[0] if isinstance(step_loss, Tensor): step_loss = np.mean(step_loss.asnumpy()) self.losses.append(step_loss) cur_step_in_epoch =", "= CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size, keep_checkpoint_max=config.keep_checkpoint_max) check_name = 'ResNet50_' + args_opt.loss_name if args_opt.run_modelarts: ckpt_cb", "self.lr_init_len = len(lr_init) def epoch_begin(self, run_context): self.losses = [] self.epoch_time = time.time() dataset_generator.__init__(data_dir=DATA_DIR,", "= dataset.get_dataset_size() # define net net = resnet50(class_num=config.class_num) # init weight if args_opt.run_modelarts:", "language governing permissions and # limitations under the License. # ============================================================================ \"\"\"train resnet.\"\"\"", "use this file except in compliance with the License. # You may obtain", "mindspore.train.callback import Callback from src.loss import Softmaxloss from src.loss import Tripletloss from src.loss", "cb_params.batch_num print(\"epochs: [{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:8.5f}/{:8.5f}], time:[{:5.3f}], lr:[{:8.5f}]\".format( cb_params.cur_epoch_num, config.epoch_size, cur_step_in_epoch, cb_params.batch_num, step_loss, np.mean(self.losses),", "save_ckpt_path = os.path.join(config.save_checkpoint_path, 'model_'+ str(device_id) +'/') ckpt_cb = ModelCheckpoint(prefix=check_name, directory=save_ckpt_path, config=config_ck) cb +=", "= resnet50(class_num=config.class_num) # init weight if args_opt.run_modelarts: checkpoint_path = os.path.join(local_ckpt_url, args_opt.checkpoint_name) else: checkpoint_path", "FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) if args_opt.loss_name == 'softmax': model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None,", "= run_context.original_args() step_mseconds = (time.time() - self.step_time) * 1000 step_loss = cb_params.net_outputs if", "mindspore.train.callback import ModelCheckpoint, CheckpointConfig from mindspore.train.loss_scale_manager import FixedLossScaleManager from mindspore.train.serialization import load_checkpoint, load_param_into_net", "ParallelMode from mindspore.train.callback import ModelCheckpoint, CheckpointConfig from mindspore.train.loss_scale_manager import FixedLossScaleManager from mindspore.train.serialization import", "run_context.original_args() step_mseconds = (time.time() - self.step_time) * 1000 step_loss = cb_params.net_outputs if isinstance(step_loss,", "run_context): self.losses = [] self.epoch_time = time.time() dataset_generator.__init__(data_dir=DATA_DIR, train_list=TRAIN_LIST) def epoch_end(self, run_context): cb_params", "# Ascend parameter parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_path', type=str, default=None, help='ckpt path", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "= time.time() def step_end(self, run_context): \"\"\"step_end\"\"\" cb_params = run_context.original_args() step_mseconds = (time.time() -", "mindspore import Tensor from mindspore.nn.optim.momentum import Momentum from mindspore.train.model import Model from mindspore.context", "step_loss, np.mean(self.losses), step_mseconds, self.lr_init[cb_params.cur_step_num - 1])) if __name__ == '__main__': if args_opt.loss_name ==", "create_dataset else: print('loss no') context.set_context(mode=context.GRAPH_MODE, device_target=\"Ascend\", save_graphs=False) # init distributed if args_opt.run_modelarts: import", "from src.resnet import resnet50 from src.utility import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet set_seed(1) parser =", "[{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:8.5f}/{:8.5f}], time:[{:5.3f}], lr:[{:8.5f}]\".format( cb_params.cur_epoch_num, config.epoch_size, cur_step_in_epoch, cb_params.batch_num, step_loss, np.mean(self.losses), step_mseconds, self.lr_init[cb_params.cur_step_num", "time.time() def step_end(self, run_context): \"\"\"step_end\"\"\" cb_params = run_context.original_args() step_mseconds = (time.time() - self.step_time)", "metrics=None, amp_level='O3', keep_batchnorm_fp32=False) #define callback cb = [] if config.save_checkpoint and (device_num ==", "step_loss[0] if isinstance(step_loss, Tensor): step_loss = np.mean(step_loss.asnumpy()) self.losses.append(step_loss) cur_step_in_epoch = (cb_params.cur_step_num - 1)", "= local_data_url + '/' else: if args_opt.run_distribute: device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE'))", "loss = Quadrupletloss(train_batch_size=config.batch_size, samples_each_class=2, margin=0.1) else: print('loss no') loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) if", "import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet set_seed(1) parser = argparse.ArgumentParser(description='Image classification') # modelarts parameter parser.add_argument('--train_url',", "(time.time() - self.step_time) * 1000 step_loss = cb_params.net_outputs if isinstance(step_loss, (tuple, list)) and", "else: save_ckpt_path = os.path.join(config.save_checkpoint_path, 'model_'+ str(device_id) +'/') ckpt_cb = ModelCheckpoint(prefix=check_name, directory=save_ckpt_path, config=config_ck) cb", "= 'ResNet50_' + args_opt.loss_name if args_opt.run_modelarts: ckpt_cb = ModelCheckpoint(prefix=check_name, directory=local_train_url, config=config_ck) else: save_ckpt_path", "import config1 as config from src.dataset import create_dataset1 as create_dataset elif args_opt.loss_name ==", "config from src.dataset import create_dataset0 as create_dataset elif args_opt.loss_name == 'triplet': from src.config", "init lr lr = Tensor(get_lr(lr_init=config.lr_init, lr_end=config.lr_end, lr_max=config.lr_max, warmup_epochs=config.warmup_epochs, total_epochs=config.epoch_size, steps_per_epoch=step_size, lr_decay_mode=config.lr_decay_mode)) # define", "2.0 (the \"License\"); # you may not use this file except in compliance", "# modelarts parameter parser.add_argument('--train_url', type=str, default=None, help='Train output path') parser.add_argument('--data_url', type=str, default=None, help='Dataset", "config from src.dataset import create_dataset1 as create_dataset elif args_opt.loss_name == 'quadruplet': from src.config", "* 1000 step_loss = cb_params.net_outputs if isinstance(step_loss, (tuple, list)) and isinstance(step_loss[0], Tensor): step_loss", "for the specific language governing permissions and # limitations under the License. #", "= '/cache/ckpt' local_train_url = '/cache/train' if device_num > 1: init() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True)", "src.config import config2 as config from src.dataset import create_dataset1 as create_dataset else: print('loss", "# init lr lr = Tensor(get_lr(lr_init=config.lr_init, lr_end=config.lr_end, lr_max=config.lr_max, warmup_epochs=config.warmup_epochs, total_epochs=config.epoch_size, steps_per_epoch=step_size, lr_decay_mode=config.lr_decay_mode)) #", "config.pretrain_epoch_size, dataset, callbacks=cb, dataset_sink_mode=True) if args_opt.run_modelarts and config.save_checkpoint and (device_num == 1 or", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "1])) if __name__ == '__main__': if args_opt.loss_name == 'softmax': from src.config import config0", "default='softmax', help='loss name: softmax(pretrained) triplet quadruplet') # Ascend parameter parser.add_argument('--dataset_path', type=str, default=None, help='Dataset", "as config from src.dataset import create_dataset1 as create_dataset elif args_opt.loss_name == 'quadruplet': from", "opt = Momentum(params=net.trainable_params(), learning_rate=lr, momentum=config.momentum, weight_decay=config.weight_decay, loss_scale=config.loss_scale) # define loss, model if args_opt.loss_name", "from src.loss import Softmaxloss from src.loss import Tripletloss from src.loss import Quadrupletloss from", "= int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) local_data_url = '/cache/data' local_ckpt_url = '/cache/ckpt' local_train_url = '/cache/train' if", "= create_dataset(dataset_generator, do_train=True, batch_size=config.batch_size, device_num=device_num, rank_id=device_id) step_size = dataset.get_dataset_size() # define net net", "# # Unless required by applicable law or agreed to in writing, software", "'quadruplet': dataset_generator = GetDatasetGenerator_quadruplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) else: print('loss no') dataset = create_dataset(dataset_generator, do_train=True, batch_size=config.batch_size,", "else: model = Model(net.backbone, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) #define callback cb", "express or implied. # See the License for the specific language governing permissions", "+= [ckpt_cb] cb += [Monitor(lr_init=lr.asnumpy())] # train model model.train(config.epoch_size - config.pretrain_epoch_size, dataset, callbacks=cb,", "ast import numpy as np from mindspore import context from mindspore import Tensor", "CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size, keep_checkpoint_max=config.keep_checkpoint_max) check_name = 'ResNet50_' + args_opt.loss_name if args_opt.run_modelarts: ckpt_cb =", "Tripletloss from src.loss import Quadrupletloss from src.lr_generator import get_lr from src.resnet import resnet50", "either express or implied. # See the License for the specific language governing", "GetDatasetGenerator_quadruplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) else: print('loss no') dataset = create_dataset(dataset_generator, do_train=True, batch_size=config.batch_size, device_num=device_num, rank_id=device_id) step_size", "self.lr_init = lr_init self.lr_init_len = len(lr_init) def epoch_begin(self, run_context): self.losses = [] self.epoch_time", "model model.train(config.epoch_size - config.pretrain_epoch_size, dataset, callbacks=cb, dataset_sink_mode=True) if args_opt.run_modelarts and config.save_checkpoint and (device_num", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "parser.add_argument('--checkpoint_name', type=str, default='resnet-120_625.ckpt', help='Checkpoint file') parser.add_argument('--loss_name', type=str, default='softmax', help='loss name: softmax(pretrained) triplet quadruplet')", "time.time() dataset_generator.__init__(data_dir=DATA_DIR, train_list=TRAIN_LIST) def epoch_end(self, run_context): cb_params = run_context.original_args() epoch_mseconds = (time.time() -", "= int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) local_data_url = '/cache/data' local_ckpt_url = '/cache/ckpt' local_train_url", "DATA_DIR = local_data_url + '/' else: if args_opt.run_distribute: device_id = int(os.getenv('DEVICE_ID')) device_num =", "specific language governing permissions and # limitations under the License. # ============================================================================ \"\"\"train", "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig from mindspore.train.loss_scale_manager import FixedLossScaleManager from mindspore.train.serialization import load_checkpoint,", "from src.loss import Tripletloss from src.loss import Quadrupletloss from src.lr_generator import get_lr from", "2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version", "avg loss: {:8.5f}\" .format(epoch_mseconds, per_step_mseconds, np.mean(self.losses))) print('batch_size:', config.batch_size, 'epochs_size:', config.epoch_size, 'lr_model:', config.lr_decay_mode, 'lr:',", "src.config import config1 as config from src.dataset import create_dataset1 as create_dataset elif args_opt.loss_name", "= parser.parse_args() class Monitor(Callback): \"\"\"Monitor\"\"\" def __init__(self, lr_init=None): super(Monitor, self).__init__() self.lr_init = lr_init", "classification') # modelarts parameter parser.add_argument('--train_url', type=str, default=None, help='Train output path') parser.add_argument('--data_url', type=str, default=None,", "np.mean(step_loss.asnumpy()) self.losses.append(step_loss) cur_step_in_epoch = (cb_params.cur_step_num - 1) % cb_params.batch_num print(\"epochs: [{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:8.5f}/{:8.5f}],", "1) % cb_params.batch_num print(\"epochs: [{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:8.5f}/{:8.5f}], time:[{:5.3f}], lr:[{:8.5f}]\".format( cb_params.cur_epoch_num, config.epoch_size, cur_step_in_epoch, cb_params.batch_num,", "mindspore import context from mindspore import Tensor from mindspore.nn.optim.momentum import Momentum from mindspore.train.model", "device_num=device_num, rank_id=device_id) step_size = dataset.get_dataset_size() # define net net = resnet50(class_num=config.class_num) # init", "the License. # You may obtain a copy of the License at #", "or device_id == 0): config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size, keep_checkpoint_max=config.keep_checkpoint_max) check_name = 'ResNet50_'", "== 'triplet': dataset_generator = GetDatasetGenerator_triplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'quadruplet': dataset_generator = GetDatasetGenerator_quadruplet(data_dir=DATA_DIR,", "1 or device_id == 0): config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size, keep_checkpoint_max=config.keep_checkpoint_max) check_name =", "and (device_num == 1 or device_id == 0): config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size,", "import load_checkpoint, load_param_into_net from mindspore.common import set_seed from mindspore.communication.management import init from mindspore.train.callback", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "% cb_params.batch_num print(\"epochs: [{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:8.5f}/{:8.5f}], time:[{:5.3f}], lr:[{:8.5f}]\".format( cb_params.cur_epoch_num, config.epoch_size, cur_step_in_epoch, cb_params.batch_num, step_loss,", "elif args_opt.loss_name == 'quadruplet': dataset_generator = GetDatasetGenerator_quadruplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) else: print('loss no') dataset =", "print('loss no') context.set_context(mode=context.GRAPH_MODE, device_target=\"Ascend\", save_graphs=False) # init distributed if args_opt.run_modelarts: import moxing as", "numpy as np from mindspore import context from mindspore import Tensor from mindspore.nn.optim.momentum", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "import init from mindspore.train.callback import Callback from src.loss import Softmaxloss from src.loss import", "from mindspore.nn.optim.momentum import Momentum from mindspore.train.model import Model from mindspore.context import ParallelMode from", "per_step_mseconds, np.mean(self.losses))) print('batch_size:', config.batch_size, 'epochs_size:', config.epoch_size, 'lr_model:', config.lr_decay_mode, 'lr:', config.lr_max, 'step_size:', step_size) def", "= ModelCheckpoint(prefix=check_name, directory=save_ckpt_path, config=config_ck) cb += [ckpt_cb] cb += [Monitor(lr_init=lr.asnumpy())] # train model", "import get_lr from src.resnet import resnet50 from src.utility import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet set_seed(1)", "Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License,", "config.lr_decay_mode, 'lr:', config.lr_max, 'step_size:', step_size) def step_begin(self, run_context): self.step_time = time.time() def step_end(self,", "self.losses = [] self.epoch_time = time.time() dataset_generator.__init__(data_dir=DATA_DIR, train_list=TRAIN_LIST) def epoch_end(self, run_context): cb_params =", "args_opt.loss_name == 'quadruplet': dataset_generator = GetDatasetGenerator_quadruplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) else: print('loss no') dataset = create_dataset(dataset_generator,", "steps_per_epoch=step_size, lr_decay_mode=config.lr_decay_mode)) # define opt opt = Momentum(params=net.trainable_params(), learning_rate=lr, momentum=config.momentum, weight_decay=config.weight_decay, loss_scale=config.loss_scale) #", "parser.add_argument('--device_id', type=int, default=0, help='Device id') parser.add_argument('--run_modelarts', type=ast.literal_eval, default=False, help='Run distribute') args_opt = parser.parse_args()", "default=None, help='Dataset path') parser.add_argument('--ckpt_url', type=str, default=None, help='Pretrained ckpt path') parser.add_argument('--checkpoint_name', type=str, default='resnet-120_625.ckpt', help='Checkpoint", "GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet set_seed(1) parser = argparse.ArgumentParser(description='Image classification') # modelarts parameter parser.add_argument('--train_url', type=str, default=None,", "Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the", "config0 as config from src.dataset import create_dataset0 as create_dataset elif args_opt.loss_name == 'triplet':", "'train_half.txt' if args_opt.loss_name == 'softmax': dataset_generator = GetDatasetGenerator_softmax(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'triplet':", "type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_url', type=str, default=None, help='Pretrained ckpt path') parser.add_argument('--checkpoint_name', type=str, default='resnet-120_625.ckpt',", "from mindspore.common import set_seed from mindspore.communication.management import init from mindspore.train.callback import Callback from", "path name') parser.add_argument('--run_distribute', type=ast.literal_eval, default=False, help='Run distribute') parser.add_argument('--device_id', type=int, default=0, help='Device id') parser.add_argument('--run_modelarts',", "self.step_time = time.time() def step_end(self, run_context): \"\"\"step_end\"\"\" cb_params = run_context.original_args() step_mseconds = (time.time()", "cb_params = run_context.original_args() step_mseconds = (time.time() - self.step_time) * 1000 step_loss = cb_params.net_outputs", "moxing as mox device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) local_data_url = '/cache/data'", "cb_params = run_context.original_args() epoch_mseconds = (time.time() - self.epoch_time) * 1000 per_step_mseconds = epoch_mseconds", "Momentum(params=net.trainable_params(), learning_rate=lr, momentum=config.momentum, weight_decay=config.weight_decay, loss_scale=config.loss_scale) # define loss, model if args_opt.loss_name == 'softmax':", "str(device_id) +'/') ckpt_cb = ModelCheckpoint(prefix=check_name, directory=save_ckpt_path, config=config_ck) cb += [ckpt_cb] cb += [Monitor(lr_init=lr.asnumpy())]", "cb += [ckpt_cb] cb += [Monitor(lr_init=lr.asnumpy())] # train model model.train(config.epoch_size - config.pretrain_epoch_size, dataset,", "args_opt.loss_name == 'triplet': from src.config import config1 as config from src.dataset import create_dataset1", "step_loss = cb_params.net_outputs if isinstance(step_loss, (tuple, list)) and isinstance(step_loss[0], Tensor): step_loss = step_loss[0]", "load_checkpoint, load_param_into_net from mindspore.common import set_seed from mindspore.communication.management import init from mindspore.train.callback import", "+= [Monitor(lr_init=lr.asnumpy())] # train model model.train(config.epoch_size - config.pretrain_epoch_size, dataset, callbacks=cb, dataset_sink_mode=True) if args_opt.run_modelarts", "help='ckpt path name') parser.add_argument('--run_distribute', type=ast.literal_eval, default=False, help='Run distribute') parser.add_argument('--device_id', type=int, default=0, help='Device id')", "type=ast.literal_eval, default=False, help='Run distribute') args_opt = parser.parse_args() class Monitor(Callback): \"\"\"Monitor\"\"\" def __init__(self, lr_init=None):", "init weight if args_opt.run_modelarts: checkpoint_path = os.path.join(local_ckpt_url, args_opt.checkpoint_name) else: checkpoint_path = args_opt.ckpt_path param_dict", "with the License. # You may obtain a copy of the License at", "= args_opt.dataset_path + '/' # create dataset TRAIN_LIST = DATA_DIR + 'train_half.txt' if", "loss:[{:8.5f}/{:8.5f}], time:[{:5.3f}], lr:[{:8.5f}]\".format( cb_params.cur_epoch_num, config.epoch_size, cur_step_in_epoch, cb_params.batch_num, step_loss, np.mean(self.losses), step_mseconds, self.lr_init[cb_params.cur_step_num - 1]))", "isinstance(step_loss, (tuple, list)) and isinstance(step_loss[0], Tensor): step_loss = step_loss[0] if isinstance(step_loss, Tensor): step_loss", "os.path.join(local_data_url, str(device_id)) local_ckpt_url = os.path.join(local_ckpt_url, str(device_id)) mox.file.copy_parallel(args_opt.data_url, local_data_url) mox.file.copy_parallel(args_opt.ckpt_url, local_ckpt_url) DATA_DIR = local_data_url", "step_mseconds, self.lr_init[cb_params.cur_step_num - 1])) if __name__ == '__main__': if args_opt.loss_name == 'softmax': from", "config.epoch_size, cur_step_in_epoch, cb_params.batch_num, step_loss, np.mean(self.losses), step_mseconds, self.lr_init[cb_params.cur_step_num - 1])) if __name__ == '__main__':", "step_loss = step_loss[0] if isinstance(step_loss, Tensor): step_loss = np.mean(step_loss.asnumpy()) self.losses.append(step_loss) cur_step_in_epoch = (cb_params.cur_step_num", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "context.reset_auto_parallel_context() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) else: context.set_context(device_id=args_opt.device_id) device_num = 1 device_id = args_opt.device_id DATA_DIR", "run_context): self.step_time = time.time() def step_end(self, run_context): \"\"\"step_end\"\"\" cb_params = run_context.original_args() step_mseconds =", "def __init__(self, lr_init=None): super(Monitor, self).__init__() self.lr_init = lr_init self.lr_init_len = len(lr_init) def epoch_begin(self,", "args_opt = parser.parse_args() class Monitor(Callback): \"\"\"Monitor\"\"\" def __init__(self, lr_init=None): super(Monitor, self).__init__() self.lr_init =", "self.epoch_time) * 1000 per_step_mseconds = epoch_mseconds / cb_params.batch_num print(\"epoch time: {:5.3f}, per step", "smooth_factor=0.1, num_classes=config.class_num) elif args_opt.loss_name == 'triplet': loss = Tripletloss(margin=0.1) elif args_opt.loss_name == 'quadruplet':", "from src.utility import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet set_seed(1) parser = argparse.ArgumentParser(description='Image classification') # modelarts", "step:[{:5d}/{:5d}], loss:[{:8.5f}/{:8.5f}], time:[{:5.3f}], lr:[{:8.5f}]\".format( cb_params.cur_epoch_num, config.epoch_size, cur_step_in_epoch, cb_params.batch_num, step_loss, np.mean(self.losses), step_mseconds, self.lr_init[cb_params.cur_step_num -", "if args_opt.run_modelarts: ckpt_cb = ModelCheckpoint(prefix=check_name, directory=local_train_url, config=config_ck) else: save_ckpt_path = os.path.join(config.save_checkpoint_path, 'model_'+ str(device_id)", "print(\"epoch time: {:5.3f}, per step time: {:5.3f}, avg loss: {:8.5f}\" .format(epoch_mseconds, per_step_mseconds, np.mean(self.losses)))", "{:5.3f}, avg loss: {:8.5f}\" .format(epoch_mseconds, per_step_mseconds, np.mean(self.losses))) print('batch_size:', config.batch_size, 'epochs_size:', config.epoch_size, 'lr_model:', config.lr_decay_mode,", "param_dict = load_checkpoint(checkpoint_path) load_param_into_net(net.backbone, param_dict) # init lr lr = Tensor(get_lr(lr_init=config.lr_init, lr_end=config.lr_end, lr_max=config.lr_max,", "License. # ============================================================================ \"\"\"train resnet.\"\"\" import os import time import argparse import ast", "law or agreed to in writing, software # distributed under the License is", "args_opt.run_distribute: device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) init() context.reset_auto_parallel_context() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True)", "the License for the specific language governing permissions and # limitations under the", "= time.time() dataset_generator.__init__(data_dir=DATA_DIR, train_list=TRAIN_LIST) def epoch_end(self, run_context): cb_params = run_context.original_args() epoch_mseconds = (time.time()", "local_train_url = '/cache/train' if device_num > 1: init() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) local_data_url =", "config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size, keep_checkpoint_max=config.keep_checkpoint_max) check_name = 'ResNet50_' + args_opt.loss_name if args_opt.run_modelarts:", "distribute') parser.add_argument('--device_id', type=int, default=0, help='Device id') parser.add_argument('--run_modelarts', type=ast.literal_eval, default=False, help='Run distribute') args_opt =", "dataset, callbacks=cb, dataset_sink_mode=True) if args_opt.run_modelarts and config.save_checkpoint and (device_num == 1 or device_id", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) local_data_url = '/cache/data' local_ckpt_url = '/cache/ckpt' local_train_url = '/cache/train'", "from src.lr_generator import get_lr from src.resnet import resnet50 from src.utility import GetDatasetGenerator_softmax, GetDatasetGenerator_triplet,", "local_ckpt_url = os.path.join(local_ckpt_url, str(device_id)) mox.file.copy_parallel(args_opt.data_url, local_data_url) mox.file.copy_parallel(args_opt.ckpt_url, local_ckpt_url) DATA_DIR = local_data_url + '/'", "output path') parser.add_argument('--data_url', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_url', type=str, default=None, help='Pretrained ckpt path')", "config=config_ck) else: save_ckpt_path = os.path.join(config.save_checkpoint_path, 'model_'+ str(device_id) +'/') ckpt_cb = ModelCheckpoint(prefix=check_name, directory=save_ckpt_path, config=config_ck)", "load_param_into_net from mindspore.common import set_seed from mindspore.communication.management import init from mindspore.train.callback import Callback", "device_num = 1 device_id = args_opt.device_id DATA_DIR = args_opt.dataset_path + '/' # create", "from mindspore import context from mindspore import Tensor from mindspore.nn.optim.momentum import Momentum from", "src.dataset import create_dataset1 as create_dataset elif args_opt.loss_name == 'quadruplet': from src.config import config2", "= Quadrupletloss(train_batch_size=config.batch_size, samples_each_class=2, margin=0.1) else: print('loss no') loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) if args_opt.loss_name", "path') parser.add_argument('--checkpoint_name', type=str, default='resnet-120_625.ckpt', help='Checkpoint file') parser.add_argument('--loss_name', type=str, default='softmax', help='loss name: softmax(pretrained) triplet", "- self.epoch_time) * 1000 per_step_mseconds = epoch_mseconds / cb_params.batch_num print(\"epoch time: {:5.3f}, per", "== 'softmax': from src.config import config0 as config from src.dataset import create_dataset0 as", "context.set_context(device_id=args_opt.device_id) device_num = 1 device_id = args_opt.device_id DATA_DIR = args_opt.dataset_path + '/' #", "directory=save_ckpt_path, config=config_ck) cb += [ckpt_cb] cb += [Monitor(lr_init=lr.asnumpy())] # train model model.train(config.epoch_size -", "mindspore.communication.management import init from mindspore.train.callback import Callback from src.loss import Softmaxloss from src.loss", "== 'quadruplet': loss = Quadrupletloss(train_batch_size=config.batch_size, samples_each_class=2, margin=0.1) else: print('loss no') loss_scale = FixedLossScaleManager(config.loss_scale,", "isinstance(step_loss[0], Tensor): step_loss = step_loss[0] if isinstance(step_loss, Tensor): step_loss = np.mean(step_loss.asnumpy()) self.losses.append(step_loss) cur_step_in_epoch", "in compliance with the License. # You may obtain a copy of the", "{:8.5f}\" .format(epoch_mseconds, per_step_mseconds, np.mean(self.losses))) print('batch_size:', config.batch_size, 'epochs_size:', config.epoch_size, 'lr_model:', config.lr_decay_mode, 'lr:', config.lr_max, 'step_size:',", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "'/cache/train' if device_num > 1: init() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) local_data_url = os.path.join(local_data_url, str(device_id))", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "1 device_id = args_opt.device_id DATA_DIR = args_opt.dataset_path + '/' # create dataset TRAIN_LIST", "= cb_params.net_outputs if isinstance(step_loss, (tuple, list)) and isinstance(step_loss[0], Tensor): step_loss = step_loss[0] if", "import config0 as config from src.dataset import create_dataset0 as create_dataset elif args_opt.loss_name ==", "train_list=TRAIN_LIST) elif args_opt.loss_name == 'triplet': dataset_generator = GetDatasetGenerator_triplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'quadruplet':", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.common import set_seed from mindspore.communication.management import init from", "help='Pretrained ckpt path') parser.add_argument('--checkpoint_name', type=str, default='resnet-120_625.ckpt', help='Checkpoint file') parser.add_argument('--loss_name', type=str, default='softmax', help='loss name:", "== 'softmax': model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) else: model", "See the License for the specific language governing permissions and # limitations under", "str(device_id)) mox.file.copy_parallel(args_opt.data_url, local_data_url) mox.file.copy_parallel(args_opt.ckpt_url, local_ckpt_url) DATA_DIR = local_data_url + '/' else: if args_opt.run_distribute:", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "cb = [] if config.save_checkpoint and (device_num == 1 or device_id == 0):", "np from mindspore import context from mindspore import Tensor from mindspore.nn.optim.momentum import Momentum", "+'/') ckpt_cb = ModelCheckpoint(prefix=check_name, directory=save_ckpt_path, config=config_ck) cb += [ckpt_cb] cb += [Monitor(lr_init=lr.asnumpy())] #", "create_dataset0 as create_dataset elif args_opt.loss_name == 'triplet': from src.config import config1 as config", "device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) init() context.reset_auto_parallel_context() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) else:", "print('loss no') loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) if args_opt.loss_name == 'softmax': model = Model(net,", "default=0, help='Device id') parser.add_argument('--run_modelarts', type=ast.literal_eval, default=False, help='Run distribute') args_opt = parser.parse_args() class Monitor(Callback):", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "= argparse.ArgumentParser(description='Image classification') # modelarts parameter parser.add_argument('--train_url', type=str, default=None, help='Train output path') parser.add_argument('--data_url',", "checkpoint_path = args_opt.ckpt_path param_dict = load_checkpoint(checkpoint_path) load_param_into_net(net.backbone, param_dict) # init lr lr =", "metrics=None, amp_level='O3', keep_batchnorm_fp32=False) else: model = Model(net.backbone, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False)", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "weight if args_opt.run_modelarts: checkpoint_path = os.path.join(local_ckpt_url, args_opt.checkpoint_name) else: checkpoint_path = args_opt.ckpt_path param_dict =", "distributed if args_opt.run_modelarts: import moxing as mox device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE'))", "no') loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False) if args_opt.loss_name == 'softmax': model = Model(net, loss_fn=loss,", "# train model model.train(config.epoch_size - config.pretrain_epoch_size, dataset, callbacks=cb, dataset_sink_mode=True) if args_opt.run_modelarts and config.save_checkpoint", "parser.add_argument('--data_url', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_url', type=str, default=None, help='Pretrained ckpt path') parser.add_argument('--checkpoint_name', type=str,", "GetDatasetGenerator_softmax, GetDatasetGenerator_triplet, GetDatasetGenerator_quadruplet set_seed(1) parser = argparse.ArgumentParser(description='Image classification') # modelarts parameter parser.add_argument('--train_url', type=str,", "step_begin(self, run_context): self.step_time = time.time() def step_end(self, run_context): \"\"\"step_end\"\"\" cb_params = run_context.original_args() step_mseconds", "= os.path.join(local_ckpt_url, str(device_id)) mox.file.copy_parallel(args_opt.data_url, local_data_url) mox.file.copy_parallel(args_opt.ckpt_url, local_ckpt_url) DATA_DIR = local_data_url + '/' else:", "create_dataset1 as create_dataset else: print('loss no') context.set_context(mode=context.GRAPH_MODE, device_target=\"Ascend\", save_graphs=False) # init distributed if", "gradients_mean=True) else: context.set_context(device_id=args_opt.device_id) device_num = 1 device_id = args_opt.device_id DATA_DIR = args_opt.dataset_path +", "argparse import ast import numpy as np from mindspore import context from mindspore", "no') context.set_context(mode=context.GRAPH_MODE, device_target=\"Ascend\", save_graphs=False) # init distributed if args_opt.run_modelarts: import moxing as mox", "and # limitations under the License. # ============================================================================ \"\"\"train resnet.\"\"\" import os import", "config from src.dataset import create_dataset1 as create_dataset else: print('loss no') context.set_context(mode=context.GRAPH_MODE, device_target=\"Ascend\", save_graphs=False)", "DATA_DIR = args_opt.dataset_path + '/' # create dataset TRAIN_LIST = DATA_DIR + 'train_half.txt'", "'__main__': if args_opt.loss_name == 'softmax': from src.config import config0 as config from src.dataset", "resnet50(class_num=config.class_num) # init weight if args_opt.run_modelarts: checkpoint_path = os.path.join(local_ckpt_url, args_opt.checkpoint_name) else: checkpoint_path =", "parser = argparse.ArgumentParser(description='Image classification') # modelarts parameter parser.add_argument('--train_url', type=str, default=None, help='Train output path')", "loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) #define callback cb = [] if config.save_checkpoint and (device_num", "self.step_time) * 1000 step_loss = cb_params.net_outputs if isinstance(step_loss, (tuple, list)) and isinstance(step_loss[0], Tensor):", "== 'triplet': from src.config import config1 as config from src.dataset import create_dataset1 as", "default=None, help='Train output path') parser.add_argument('--data_url', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_url', type=str, default=None, help='Pretrained", "else: checkpoint_path = args_opt.ckpt_path param_dict = load_checkpoint(checkpoint_path) load_param_into_net(net.backbone, param_dict) # init lr lr", "from mindspore.train.loss_scale_manager import FixedLossScaleManager from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.common import set_seed", "Version 2.0 (the \"License\"); # you may not use this file except in", "except in compliance with the License. # You may obtain a copy of", "gradients_mean=True) local_data_url = os.path.join(local_data_url, str(device_id)) local_ckpt_url = os.path.join(local_ckpt_url, str(device_id)) mox.file.copy_parallel(args_opt.data_url, local_data_url) mox.file.copy_parallel(args_opt.ckpt_url, local_ckpt_url)", "mindspore.train.model import Model from mindspore.context import ParallelMode from mindspore.train.callback import ModelCheckpoint, CheckpointConfig from", "cb_params.batch_num print(\"epoch time: {:5.3f}, per step time: {:5.3f}, avg loss: {:8.5f}\" .format(epoch_mseconds, per_step_mseconds,", "elif args_opt.loss_name == 'triplet': from src.config import config1 as config from src.dataset import", "print(\"epochs: [{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:8.5f}/{:8.5f}], time:[{:5.3f}], lr:[{:8.5f}]\".format( cb_params.cur_epoch_num, config.epoch_size, cur_step_in_epoch, cb_params.batch_num, step_loss, np.mean(self.losses), step_mseconds,", "__name__ == '__main__': if args_opt.loss_name == 'softmax': from src.config import config0 as config", "args_opt.run_modelarts: checkpoint_path = os.path.join(local_ckpt_url, args_opt.checkpoint_name) else: checkpoint_path = args_opt.ckpt_path param_dict = load_checkpoint(checkpoint_path) load_param_into_net(net.backbone,", "if args_opt.loss_name == 'softmax': model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False)", "help='Dataset path') parser.add_argument('--ckpt_url', type=str, default=None, help='Pretrained ckpt path') parser.add_argument('--checkpoint_name', type=str, default='resnet-120_625.ckpt', help='Checkpoint file')", "if isinstance(step_loss, (tuple, list)) and isinstance(step_loss[0], Tensor): step_loss = step_loss[0] if isinstance(step_loss, Tensor):", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "'model_'+ str(device_id) +'/') ckpt_cb = ModelCheckpoint(prefix=check_name, directory=save_ckpt_path, config=config_ck) cb += [ckpt_cb] cb +=", "permissions and # limitations under the License. # ============================================================================ \"\"\"train resnet.\"\"\" import os", "= (cb_params.cur_step_num - 1) % cb_params.batch_num print(\"epochs: [{:3d}/{:3d}], step:[{:5d}/{:5d}], loss:[{:8.5f}/{:8.5f}], time:[{:5.3f}], lr:[{:8.5f}]\".format( cb_params.cur_epoch_num,", "args_opt.dataset_path + '/' # create dataset TRAIN_LIST = DATA_DIR + 'train_half.txt' if args_opt.loss_name", "# init weight if args_opt.run_modelarts: checkpoint_path = os.path.join(local_ckpt_url, args_opt.checkpoint_name) else: checkpoint_path = args_opt.ckpt_path", "if config.save_checkpoint and (device_num == 1 or device_id == 0): config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs", "'/' else: if args_opt.run_distribute: device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) init() context.reset_auto_parallel_context()", "args_opt.device_id DATA_DIR = args_opt.dataset_path + '/' # create dataset TRAIN_LIST = DATA_DIR +", "help='Checkpoint file') parser.add_argument('--loss_name', type=str, default='softmax', help='loss name: softmax(pretrained) triplet quadruplet') # Ascend parameter", "set_seed(1) parser = argparse.ArgumentParser(description='Image classification') # modelarts parameter parser.add_argument('--train_url', type=str, default=None, help='Train output", "init distributed if args_opt.run_modelarts: import moxing as mox device_id = int(os.getenv('DEVICE_ID')) device_num =", "+ '/' else: if args_opt.run_distribute: device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) init()", "init() context.reset_auto_parallel_context() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) else: context.set_context(device_id=args_opt.device_id) device_num = 1 device_id = args_opt.device_id", "batch_size=config.batch_size, device_num=device_num, rank_id=device_id) step_size = dataset.get_dataset_size() # define net net = resnet50(class_num=config.class_num) #", "args_opt.checkpoint_name) else: checkpoint_path = args_opt.ckpt_path param_dict = load_checkpoint(checkpoint_path) load_param_into_net(net.backbone, param_dict) # init lr", "src.loss import Tripletloss from src.loss import Quadrupletloss from src.lr_generator import get_lr from src.resnet", "from mindspore.communication.management import init from mindspore.train.callback import Callback from src.loss import Softmaxloss from", "loss = Tripletloss(margin=0.1) elif args_opt.loss_name == 'quadruplet': loss = Quadrupletloss(train_batch_size=config.batch_size, samples_each_class=2, margin=0.1) else:", "parameter parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_path', type=str, default=None, help='ckpt path name') parser.add_argument('--run_distribute',", "create_dataset1 as create_dataset elif args_opt.loss_name == 'quadruplet': from src.config import config2 as config", "run_context.original_args() epoch_mseconds = (time.time() - self.epoch_time) * 1000 per_step_mseconds = epoch_mseconds / cb_params.batch_num", "if device_num > 1: init() context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) local_data_url = os.path.join(local_data_url, str(device_id)) local_ckpt_url", "args_opt.loss_name == 'triplet': dataset_generator = GetDatasetGenerator_triplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'quadruplet': dataset_generator =", "GetDatasetGenerator_triplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) elif args_opt.loss_name == 'quadruplet': dataset_generator = GetDatasetGenerator_quadruplet(data_dir=DATA_DIR, train_list=TRAIN_LIST) else: print('loss no')", "args_opt.loss_name == 'softmax': model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) else:", "the specific language governing permissions and # limitations under the License. # ============================================================================", "parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=True) local_data_url = os.path.join(local_data_url, str(device_id)) local_ckpt_url = os.path.join(local_ckpt_url, str(device_id)) mox.file.copy_parallel(args_opt.data_url, local_data_url) mox.file.copy_parallel(args_opt.ckpt_url,", "list)) and isinstance(step_loss[0], Tensor): step_loss = step_loss[0] if isinstance(step_loss, Tensor): step_loss = np.mean(step_loss.asnumpy())", "if args_opt.run_modelarts: import moxing as mox device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id)", "mox device_id = int(os.getenv('DEVICE_ID')) device_num = int(os.getenv('RANK_SIZE')) context.set_context(device_id=device_id) local_data_url = '/cache/data' local_ckpt_url =", "Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) else: model = Model(net.backbone, loss_fn=loss, optimizer=opt,", "def step_begin(self, run_context): self.step_time = time.time() def step_end(self, run_context): \"\"\"step_end\"\"\" cb_params = run_context.original_args()", "lr_init self.lr_init_len = len(lr_init) def epoch_begin(self, run_context): self.losses = [] self.epoch_time = time.time()", "context.set_context(mode=context.GRAPH_MODE, device_target=\"Ascend\", save_graphs=False) # init distributed if args_opt.run_modelarts: import moxing as mox device_id", "loss, model if args_opt.loss_name == 'softmax': loss = Softmaxloss(sparse=True, smooth_factor=0.1, num_classes=config.class_num) elif args_opt.loss_name", "import Softmaxloss from src.loss import Tripletloss from src.loss import Quadrupletloss from src.lr_generator import", "mindspore.common import set_seed from mindspore.communication.management import init from mindspore.train.callback import Callback from src.loss", "parser.add_argument('--ckpt_url', type=str, default=None, help='Pretrained ckpt path') parser.add_argument('--checkpoint_name', type=str, default='resnet-120_625.ckpt', help='Checkpoint file') parser.add_argument('--loss_name', type=str,", "load_param_into_net(net.backbone, param_dict) # init lr lr = Tensor(get_lr(lr_init=config.lr_init, lr_end=config.lr_end, lr_max=config.lr_max, warmup_epochs=config.warmup_epochs, total_epochs=config.epoch_size, steps_per_epoch=step_size,", "= [] self.epoch_time = time.time() dataset_generator.__init__(data_dir=DATA_DIR, train_list=TRAIN_LIST) def epoch_end(self, run_context): cb_params = run_context.original_args()", "time import argparse import ast import numpy as np from mindspore import context", "'ResNet50_' + args_opt.loss_name if args_opt.run_modelarts: ckpt_cb = ModelCheckpoint(prefix=check_name, directory=local_train_url, config=config_ck) else: save_ckpt_path =", "(tuple, list)) and isinstance(step_loss[0], Tensor): step_loss = step_loss[0] if isinstance(step_loss, Tensor): step_loss =", "quadruplet') # Ascend parameter parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_path', type=str, default=None, help='ckpt", "loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=None, amp_level='O3', keep_batchnorm_fp32=False) #define callback cb = [] if config.save_checkpoint", "args_opt.ckpt_path param_dict = load_checkpoint(checkpoint_path) load_param_into_net(net.backbone, param_dict) # init lr lr = Tensor(get_lr(lr_init=config.lr_init, lr_end=config.lr_end,", "Ltd # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "time: {:5.3f}, per step time: {:5.3f}, avg loss: {:8.5f}\" .format(epoch_mseconds, per_step_mseconds, np.mean(self.losses))) print('batch_size:',", "model if args_opt.loss_name == 'softmax': loss = Softmaxloss(sparse=True, smooth_factor=0.1, num_classes=config.class_num) elif args_opt.loss_name ==", "param_dict) # init lr lr = Tensor(get_lr(lr_init=config.lr_init, lr_end=config.lr_end, lr_max=config.lr_max, warmup_epochs=config.warmup_epochs, total_epochs=config.epoch_size, steps_per_epoch=step_size, lr_decay_mode=config.lr_decay_mode))", "modelarts parameter parser.add_argument('--train_url', type=str, default=None, help='Train output path') parser.add_argument('--data_url', type=str, default=None, help='Dataset path')", "= load_checkpoint(checkpoint_path) load_param_into_net(net.backbone, param_dict) # init lr lr = Tensor(get_lr(lr_init=config.lr_init, lr_end=config.lr_end, lr_max=config.lr_max, warmup_epochs=config.warmup_epochs,", "help='Run distribute') args_opt = parser.parse_args() class Monitor(Callback): \"\"\"Monitor\"\"\" def __init__(self, lr_init=None): super(Monitor, self).__init__()", "str(device_id)) local_ckpt_url = os.path.join(local_ckpt_url, str(device_id)) mox.file.copy_parallel(args_opt.data_url, local_data_url) mox.file.copy_parallel(args_opt.ckpt_url, local_ckpt_url) DATA_DIR = local_data_url +", "import create_dataset0 as create_dataset elif args_opt.loss_name == 'triplet': from src.config import config1 as", "# create dataset TRAIN_LIST = DATA_DIR + 'train_half.txt' if args_opt.loss_name == 'softmax': dataset_generator", "Quadrupletloss from src.lr_generator import get_lr from src.resnet import resnet50 from src.utility import GetDatasetGenerator_softmax,", "'triplet': loss = Tripletloss(margin=0.1) elif args_opt.loss_name == 'quadruplet': loss = Quadrupletloss(train_batch_size=config.batch_size, samples_each_class=2, margin=0.1)", "elif args_opt.loss_name == 'quadruplet': from src.config import config2 as config from src.dataset import", "local_data_url) mox.file.copy_parallel(args_opt.ckpt_url, local_ckpt_url) DATA_DIR = local_data_url + '/' else: if args_opt.run_distribute: device_id =", "== 1 or device_id == 0): config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_epochs * step_size, keep_checkpoint_max=config.keep_checkpoint_max) check_name", "= args_opt.ckpt_path param_dict = load_checkpoint(checkpoint_path) load_param_into_net(net.backbone, param_dict) # init lr lr = Tensor(get_lr(lr_init=config.lr_init,", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "amp_level='O3', keep_batchnorm_fp32=False) #define callback cb = [] if config.save_checkpoint and (device_num == 1", "mindspore.context import ParallelMode from mindspore.train.callback import ModelCheckpoint, CheckpointConfig from mindspore.train.loss_scale_manager import FixedLossScaleManager from", "mox.file.copy_parallel(args_opt.data_url, local_data_url) mox.file.copy_parallel(args_opt.ckpt_url, local_ckpt_url) DATA_DIR = local_data_url + '/' else: if args_opt.run_distribute: device_id", "type=ast.literal_eval, default=False, help='Run distribute') parser.add_argument('--device_id', type=int, default=0, help='Device id') parser.add_argument('--run_modelarts', type=ast.literal_eval, default=False, help='Run", "set_seed from mindspore.communication.management import init from mindspore.train.callback import Callback from src.loss import Softmaxloss", "import context from mindspore import Tensor from mindspore.nn.optim.momentum import Momentum from mindspore.train.model import", "os.path.join(config.save_checkpoint_path, 'model_'+ str(device_id) +'/') ckpt_cb = ModelCheckpoint(prefix=check_name, directory=save_ckpt_path, config=config_ck) cb += [ckpt_cb] cb", "from mindspore.context import ParallelMode from mindspore.train.callback import ModelCheckpoint, CheckpointConfig from mindspore.train.loss_scale_manager import FixedLossScaleManager", "parameter parser.add_argument('--train_url', type=str, default=None, help='Train output path') parser.add_argument('--data_url', type=str, default=None, help='Dataset path') parser.add_argument('--ckpt_url',", "src.dataset import create_dataset1 as create_dataset else: print('loss no') context.set_context(mode=context.GRAPH_MODE, device_target=\"Ascend\", save_graphs=False) # init", "as config from src.dataset import create_dataset0 as create_dataset elif args_opt.loss_name == 'triplet': from", "\"\"\"train resnet.\"\"\" import os import time import argparse import ast import numpy as", "args_opt.loss_name == 'quadruplet': from src.config import config2 as config from src.dataset import create_dataset1", "as create_dataset else: print('loss no') context.set_context(mode=context.GRAPH_MODE, device_target=\"Ascend\", save_graphs=False) # init distributed if args_opt.run_modelarts:", "= args_opt.device_id DATA_DIR = args_opt.dataset_path + '/' # create dataset TRAIN_LIST = DATA_DIR" ]
[ "who = who.lower() if not who in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins", "Ticket:\\ *(\\d+(\\.\\d+)?)') TRAC_AUTHOR_REGEXP = re.compile(r'^Changes \\(by (.*)\\)\\:') TRAC_COMPONENT_REGEXP = re.compile(r'.*Component:\\ *([^|]*)') def __init__(self,", "if match: component = match.group(1).strip() continue DEBUG(u'Found bug title %(subject)s component %(component)s, by", "%s' % (component, )) return project = self.projects[project_id] LOG(u\"Will add entry for user", "lookups mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, product, component) if project_id is None:", "= re.compile(r'^\\s*Hours Worked\\|\\s*\\|(\\d+(\\.\\d+)?)$') HOURS_NEW_BUG_REGEXP = re.compile(r'^\\s*Hours Worked: (\\d+(\\.\\d+)?)$') TRAC_SUBJECT_REGEXP = re.compile(r'^(Re\\:\\ +)?\\[.+\\] \\#\\d+\\:", "timeentry from mail \"\"\" SUBJECT_REGEXP = re.compile(r'^\\[Bug (\\d+)\\](.*)') HOURS_REGEXP = re.compile(r'^\\s*Hours Worked\\|\\s*\\|(\\d+(\\.\\d+)?)$') HOURS_NEW_BUG_REGEXP", "re.compile(r'^Changes \\(by (.*)\\)\\:') TRAC_COMPONENT_REGEXP = re.compile(r'.*Component:\\ *([^|]*)') def __init__(self, trackers, logins_mappings, projects, selector_mappings):", "product, component, )) return project = self.projects[project_id] LOG(u\"Will add entry for user %s", "return add_time(user_id, date, bug_id, project_id, hours, subject) class MailFetcher(object): HOST = 'pop.gmail.com' MAX_EMAILS", "line in payload.split('\\n'): match = self.TRAC_HOURS_REGEXP.match(line) if match: hours = float(match.group(1)) continue match", "encoding else encoding) for val, encoding in decode_header(header) ).strip() Q_ENCODING_REGEXP = re.compile(r'(\\=\\?[^\\?]+\\?[QB]\\?[^\\?]+\\?\\=)') def", "poplib.POP3_SSL(self.HOST) pop_conn.user(self.login) pop_conn.pass_(self.password) stats = pop_conn.stat() LOG(u'Emails: %s' % (pformat(stats))) num, _ =", "'\\r\\n' in payload: DEBUG(u'Using CRLF istead of LF') newline = '\\r\\n' for line", "all pre-conditions should be checked by now # start fetching fetcher = MailFetcher(", "create timeentries from them extractor = TimeEntryMailExtractor( trackers, logins_mappings, projects, selector_mappings, ) for", "u'')) match = self.TRAC_SUBJECT_REGEXP.match(subject) if not match: WARN(u\"Trac subject not matched %r\" %", "return user.id, date, bug_id, project_id, hours, subject handle_cookie_trac_email = handle_trac_email handle_igozilla_email = handle_bugzilla_email", "date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) subject = decode_subject(subject.replace('\\n', u'').replace(u':', u' ')) match = self.SUBJECT_REGEXP.match(subject)", "intranet3.utils.timeentry import add_time try: from cStringIO import StringIO except ImportError: from StringIO import", "bug_id, hours, subject )) return user.id, date, bug_id, project_id, hours, subject handle_cookie_trac_email =", "tracker is None: DEBUG(u'Email from %s ignored, no tracker matched' % (sender, ))", "(\\d+(\\.\\d+)?)$') TRAC_SUBJECT_REGEXP = re.compile(r'^(Re\\:\\ +)?\\[.+\\] \\#\\d+\\: (.*)') TRAC_HOURS_REGEXP = re.compile(r'.*Add Hours to Ticket:\\", "0.0 DEBUG(u'Found bug #%(bug_id)s with title %(subject)s product %(product)s, component %(component)s, by %(who)s,", "is None: DEBUG(u'Project not found for product %s, component %s' % ( product,", "= getattr(self, 'handle_%s_email' % tracker.type) # handler should parse the response and return", "LOG = INFO_LOG(__name__) EXCEPTION = EXCEPTION_LOG(__name__) WARN = WARN_LOG(__name__) DEBUG = DEBUG_LOG(__name__) MIN_HOURS", "= match.group(1) continue match = self.TRAC_COMPONENT_REGEXP.match(line) if match: component = match.group(1).strip() continue DEBUG(u'Found", "%s' % (user.name, )) # selector_mapping given explicitly to avoid cache lookups mapping", "login self.password = password def __iter__(self): pop_conn = poplib.POP3_SSL(self.HOST) pop_conn.user(self.login) pop_conn.pass_(self.password) stats =", "msg): \"\"\" When single message was retrieved \"\"\" sender = decode(msg['From']) tracker =", "def __call__(self, *args, **kwargs): config = ApplicationConfig.get_current_config(allow_empty=True) if config is None: WARN(u'Application config", "in payload: DEBUG(u'Using CRLF istead of LF') newline = '\\r\\n' for line in", "= payload[0] # first is plaintext, second - html encoding = a_msg.get('Content-Transfer-Encoding') payload", "import b64decode from pprint import pformat from email.header import decode_header from email.utils import", "TrackerCredentials, DBSession from intranet3.models.project import SelectorMapping from intranet3.log import DEBUG_LOG, WARN_LOG, EXCEPTION_LOG, INFO_LOG", ") # all pre-conditions should be checked by now # start fetching fetcher", "with subject %r retrieved from date %r' % (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date)))", "if project_id is None: DEBUG(u'Project not found for component %s' % (component, ))", "intranet3.log import DEBUG_LOG, WARN_LOG, EXCEPTION_LOG, INFO_LOG from intranet3.utils.timeentry import add_time try: from cStringIO", "MAX_EMAILS = 100 def __init__(self, login, password): self.login = login self.password = password", "in payload.split(newline): if is_new_bug: match = self.HOURS_NEW_BUG_REGEXP.match(line) else: match = self.HOURS_REGEXP.match(line) if match:", "config is None: WARN(u'Application config not found, emails cannot be checked') return trackers", "match: hours = float(match.groups()[0]) break else: hours = 0.0 DEBUG(u'Found bug #%(bug_id)s with", "who = '' component = '' payload = get_msg_payload(msg) for line in payload.split('\\n'):", "date = decode(msg['Date']) component = decode(msg['X-Bugzilla-Component']) product = decode(msg['X-Bugzilla-Product']) who = decode(msg['X-Bugzilla-Who']) subject", "subject def handle_bugzilla_email(self, msg, tracker): date = decode(msg['Date']) component = decode(msg['X-Bugzilla-Component']) product =", "self.login = login self.password = password def __iter__(self): pop_conn = poplib.POP3_SSL(self.HOST) pop_conn.user(self.login) pop_conn.pass_(self.password)", "self.trackers: if email in sender: return self.trackers[email] else: return None def get(self, msg):", "return val.strip() def get_msg_payload(msg): encoding = msg.get('Content-Transfer-Encoding') payload = msg.get_payload() if type(payload) ==", "= MailFetcher( username, password, ) # ok, we have all mails, lets create", "match.group(1) continue match = self.TRAC_COMPONENT_REGEXP.match(line) if match: component = match.group(1).strip() continue DEBUG(u'Found bug", "from %(date)s, hours %(hours)s' % locals()) if hours <= 0.0: DEBUG(u\"Ignoring bug with", "first is plaintext, second - html encoding = a_msg.get('Content-Transfer-Encoding') payload = a_msg.get_payload() DEBUG(u'Extracted", "in payload.split('\\n'): match = self.TRAC_HOURS_REGEXP.match(line) if match: hours = float(match.group(1)) continue match =", "num, _ = stats num = num if num < self.MAX_EMAILS else self.MAX_EMAILS", "config.google_user_email.encode('utf-8') password = config.google_user_password.encode('<PASSWORD>') # TODO logins_mappings = dict( (tracker.id, TrackerCredentials.get_logins_mapping(tracker)) for tracker", "if not match: WARN(u\"Trac subject not matched %r\" % (subject, )) return subject", "%r\" % subject) return bug_id, subject = match.groups() subject = subject.strip() is_new_bug =", "transaction from intranet3.models import ApplicationConfig, Project, Tracker, TrackerCredentials, DBSession from intranet3.models.project import SelectorMapping", "hours, subject) class MailFetcher(object): HOST = 'pop.gmail.com' MAX_EMAILS = 100 def __init__(self, login,", "self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, 'none', component) if project_id is None: DEBUG(u'Project not found", "'\\r\\n' for line in payload.split(newline): if is_new_bug: match = self.HOURS_NEW_BUG_REGEXP.match(line) else: match =", "return essential info or None data = handler(msg, tracker) if data is None:", "a_msg.get_payload() DEBUG(u'Extracted email msg %r with encoding %r' % (payload, encoding)) if encoding", "for user %s project %s bug #%s hours %s title %s\" % (", "') payload = get_msg_payload(msg) username = who.lower() if username not in self.logins_mappings[tracker.id]: DEBUG(u'User", "= self.TRAC_COMPONENT_REGEXP.match(line) if match: component = match.group(1).strip() continue DEBUG(u'Found bug title %(subject)s component", "def get_msg_payload(msg): encoding = msg.get('Content-Transfer-Encoding') payload = msg.get_payload() if type(payload) == list: a_msg", "= dict( (tracker.id, TrackerCredentials.get_logins_mapping(tracker)) for tracker in trackers.itervalues() ) selector_mappings = dict( (tracker.id,", "decode_header from email.utils import parsedate import transaction from intranet3.models import ApplicationConfig, Project, Tracker,", "= match.group(2) hours = 0.0 who = '' component = '' payload =", "= config.google_user_password.encode('<PASSWORD>') # TODO logins_mappings = dict( (tracker.id, TrackerCredentials.get_logins_mapping(tracker)) for tracker in trackers.itervalues()", "payload = msg.get_payload() if type(payload) == list: a_msg = payload[0] # first is", "bug title %(subject)s component %(component)s, by %(who)s from %(date)s, hours %(hours)s' % locals())", "self.TRAC_AUTHOR_REGEXP.match(line) if match: who = match.group(1) continue match = self.TRAC_COMPONENT_REGEXP.match(line) if match: component", "(pop_conn.retr(i) for i in range(1, num + 1)) messages = (\"\\n\".join(mssg[1]) for mssg", "(.*)') TRAC_HOURS_REGEXP = re.compile(r'.*Add Hours to Ticket:\\ *(\\d+(\\.\\d+)?)') TRAC_AUTHOR_REGEXP = re.compile(r'^Changes \\(by (.*)\\)\\:')", "subject = data return add_time(user_id, date, bug_id, project_id, hours, subject) class MailFetcher(object): HOST", "of \\n if '\\r\\n' in payload: DEBUG(u'Using CRLF istead of LF') newline =", "for tracker in trackers.itervalues() ) # find all projects connected to the tracker", "in decode_header(header) ).strip() Q_ENCODING_REGEXP = re.compile(r'(\\=\\?[^\\?]+\\?[QB]\\?[^\\?]+\\?\\=)') def decode_subject(val): for value in Q_ENCODING_REGEXP.findall(val): val", "encoding %r' % (payload, encoding)) if encoding == 'quoted-printable': payload = quopri.decodestring(payload) elif", "= handle_bugzilla_email handle_rockzilla_email = handle_bugzilla_email def match_tracker(self, msg): sender = decode(msg['From']) for email", "handle_bugzilla_email def match_tracker(self, msg): sender = decode(msg['From']) for email in self.trackers: if email", "connected to the tracker projects = dict( (project.id, project) for project in Project.query.all()", "subject.startswith('New ') payload = get_msg_payload(msg) username = who.lower() if username not in self.logins_mappings[tracker.id]:", "= decode(msg['X-Bugzilla-Component']) product = decode(msg['X-Bugzilla-Product']) who = decode(msg['X-Bugzilla-Who']) subject = msg['Subject'] DEBUG(u'Message with", "%(hours)f %(date)s' % locals()) if is_new_bug: # new bug - create with 0", "= (\"\\n\".join(mssg[1]) for mssg in messages) messages = (email.parser.Parser().parsestr(mssg) for mssg in messages)", "username = config.google_user_email.encode('utf-8') password = config.google_user_password.encode('<PASSWORD>') # TODO logins_mappings = dict( (tracker.id, TrackerCredentials.get_logins_mapping(tracker))", "(subject, )) return subject = match.group(2) hours = 0.0 who = '' component", "be checked') return trackers = dict( (tracker.mailer, tracker) for tracker in Tracker.query.filter(Tracker.mailer !=", "pformat from email.header import decode_header from email.utils import parsedate import transaction from intranet3.models", "project = self.projects[project_id] LOG(u\"Will add entry for user %s project %s bug #%s", "__init__(self, trackers, logins_mappings, projects, selector_mappings): self.trackers = trackers self.logins_mappings = logins_mappings self.projects =", "essential info or None data = handler(msg, tracker) if data is None: #", "the tracker projects = dict( (project.id, project) for project in Project.query.all() ) #", "from date %r' % (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) subject = decode_subject(subject.replace('\\n', u'').replace(u':',", "match = self.TRAC_HOURS_REGEXP.match(line) if match: hours = float(match.group(1)) continue match = self.TRAC_AUTHOR_REGEXP.match(line) if", "SelectorMapping from intranet3.log import DEBUG_LOG, WARN_LOG, EXCEPTION_LOG, INFO_LOG from intranet3.utils.timeentry import add_time try:", "and return essential info or None data = handler(msg, tracker) if data is", "payload = get_msg_payload(msg) username = who.lower() if username not in self.logins_mappings[tracker.id]: DEBUG(u'User %s", "a_msg.get('Content-Transfer-Encoding') payload = a_msg.get_payload() DEBUG(u'Extracted email msg %r with encoding %r' % (payload,", "msg): sender = decode(msg['From']) for email in self.trackers: if email in sender: return", "\"\"\" Extracts timeentry from mail \"\"\" SUBJECT_REGEXP = re.compile(r'^\\[Bug (\\d+)\\](.*)') HOURS_REGEXP = re.compile(r'^\\s*Hours", "found %s' % (subject, )) elif hours == 0.0: DEBUG(u'Ignoring non-new bug without", "\"\"\" sender = decode(msg['From']) tracker = self.match_tracker(msg) if tracker is None: DEBUG(u'Email from", "projects connected to the tracker projects = dict( (project.id, project) for project in", "= logins_mappings self.projects = projects self.selector_mappings = selector_mappings def handle_trac_email(self, msg, tracker): date", "hours\") return who = who.lower() if not who in self.logins_mappings[tracker.id]: DEBUG(u'User %s not", "match = self.HOURS_NEW_BUG_REGEXP.match(line) else: match = self.HOURS_REGEXP.match(line) if match: hours = float(match.groups()[0]) break", ") if not len(trackers): WARN(u'No trackers have mailers configured, email will not be", "email will not be checked') return username = config.google_user_email.encode('utf-8') password = config.google_user_password.encode('<PASSWORD>') #", "\\#\\d+\\: (.*)') TRAC_HOURS_REGEXP = re.compile(r'.*Add Hours to Ticket:\\ *(\\d+(\\.\\d+)?)') TRAC_AUTHOR_REGEXP = re.compile(r'^Changes \\(by", "LOG(u'Emails: %s' % (pformat(stats))) num, _ = stats num = num if num", "from email.header import decode_header from email.utils import parsedate import transaction from intranet3.models import", "DEBUG(u'User %s not in logins mapping' % (who, )) return user = self.logins_mappings[tracker.id][who]", "for line in payload.split('\\n'): match = self.TRAC_HOURS_REGEXP.match(line) if match: hours = float(match.group(1)) continue", "pop_conn = poplib.POP3_SSL(self.HOST) pop_conn.user(self.login) pop_conn.pass_(self.password) stats = pop_conn.stat() LOG(u'Emails: %s' % (pformat(stats))) num,", "ImportError: from StringIO import StringIO LOG = INFO_LOG(__name__) EXCEPTION = EXCEPTION_LOG(__name__) WARN =", "= data return add_time(user_id, date, bug_id, project_id, hours, subject) class MailFetcher(object): HOST =", "def __iter__(self): pop_conn = poplib.POP3_SSL(self.HOST) pop_conn.user(self.login) pop_conn.pass_(self.password) stats = pop_conn.stat() LOG(u'Emails: %s' %", "datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) subject = decode_subject(subject.replace('\\n', u'').replace(u':', u' ')) match = self.SUBJECT_REGEXP.match(subject) if not match:", "= self.TRAC_SUBJECT_REGEXP.match(subject) if not match: WARN(u\"Trac subject not matched %r\" % (subject, ))", "%s' % (subject, )) elif hours == 0.0: DEBUG(u'Ignoring non-new bug without hours')", "handle_trac_email(self, msg, tracker): date = decode(msg['Date']) subject = msg['Subject'] DEBUG(u'Message with subject %r", "not found for product %s, component %s' % ( product, component, )) return", "%s' % (user.name, )) mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, 'none', component) if", "self.trackers = trackers self.logins_mappings = logins_mappings self.projects = projects self.selector_mappings = selector_mappings def", "fetching fetcher = MailFetcher( username, password, ) # ok, we have all mails,", "0 h, first strip title subject = subject[4:].strip() DEBUG(u'Bug creation found %s' %", "handler should parse the response and return essential info or None data =", "EXCEPTION_LOG, INFO_LOG from intranet3.utils.timeentry import add_time try: from cStringIO import StringIO except ImportError:", "should parse the response and return essential info or None data = handler(msg,", "MailFetcher(object): HOST = 'pop.gmail.com' MAX_EMAILS = 100 def __init__(self, login, password): self.login =", "decode_header(header) ).strip() Q_ENCODING_REGEXP = re.compile(r'(\\=\\?[^\\?]+\\?[QB]\\?[^\\?]+\\?\\=)') def decode_subject(val): for value in Q_ENCODING_REGEXP.findall(val): val =", "%s' % (pformat(stats))) num, _ = stats num = num if num <", "pop_conn.user(self.login) pop_conn.pass_(self.password) stats = pop_conn.stat() LOG(u'Emails: %s' % (pformat(stats))) num, _ = stats", "for product %s, component %s' % ( product, component, )) return project =", "%r\" % (subject, )) return subject = match.group(2) hours = 0.0 who =", "subject = msg['Subject'] DEBUG(u'Message with subject %r retrieved from date %r' % (subject,", "else: match = self.HOURS_REGEXP.match(line) if match: hours = float(match.groups()[0]) break else: hours =", "encoding)) if encoding == 'quoted-printable': payload = quopri.decodestring(payload) elif encoding == 'base64': payload", "(pformat(stats))) num, _ = stats num = num if num < self.MAX_EMAILS else", "import parsedate import transaction from intranet3.models import ApplicationConfig, Project, Tracker, TrackerCredentials, DBSession from", "= projects self.selector_mappings = selector_mappings def handle_trac_email(self, msg, tracker): date = decode(msg['Date']) subject", "username = who.lower() if username not in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins", "INFO_LOG(__name__) EXCEPTION = EXCEPTION_LOG(__name__) WARN = WARN_LOG(__name__) DEBUG = DEBUG_LOG(__name__) MIN_HOURS = 6.995", "by %(who)s from %(date)s' % locals()) bug_id = int(bug_id) newline = '\\n' #", "payload[0] # first is plaintext, second - html encoding = a_msg.get('Content-Transfer-Encoding') payload =", ")) return user.id, date, bug_id, project_id, hours, subject def handle_bugzilla_email(self, msg, tracker): date", "if match: hours = float(match.groups()[0]) break else: hours = 0.0 DEBUG(u'Found bug #%(bug_id)s", "mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, product, component) if project_id is None: DEBUG(u'Project", "# ok, we have all mails, lets create timeentries from them extractor =", "component) if project_id is None: DEBUG(u'Project not found for component %s' % (component,", "intranet3.models import ApplicationConfig, Project, Tracker, TrackerCredentials, DBSession from intranet3.models.project import SelectorMapping from intranet3.log", "return payload class TimeEntryMailExtractor(object): \"\"\" Extracts timeentry from mail \"\"\" SUBJECT_REGEXP = re.compile(r'^\\[Bug", "for mssg in messages) for msg in messages: yield msg pop_conn.quit() class MailCheckerTask(object):", "TRAC_HOURS_REGEXP = re.compile(r'.*Add Hours to Ticket:\\ *(\\d+(\\.\\d+)?)') TRAC_AUTHOR_REGEXP = re.compile(r'^Changes \\(by (.*)\\)\\:') TRAC_COMPONENT_REGEXP", "messages = (email.parser.Parser().parsestr(mssg) for mssg in messages) for msg in messages: yield msg", "payload = a_msg.get_payload() DEBUG(u'Extracted email msg %r with encoding %r' % (payload, encoding))", "for msg in messages: yield msg pop_conn.quit() class MailCheckerTask(object): def __call__(self, *args, **kwargs):", "handle_cookie_trac_email = handle_trac_email handle_igozilla_email = handle_bugzilla_email handle_rockzilla_email = handle_bugzilla_email def match_tracker(self, msg): sender", ") selector_mappings = dict( (tracker.id, SelectorMapping(tracker)) for tracker in trackers.itervalues() ) # find", "date, bug_id, project_id, hours, subject handle_cookie_trac_email = handle_trac_email handle_igozilla_email = handle_bugzilla_email handle_rockzilla_email =", "int(bug_id) newline = '\\n' # some emails have \\r\\n insted of \\n if", "handle_bugzilla_email handle_rockzilla_email = handle_bugzilla_email def match_tracker(self, msg): sender = decode(msg['From']) for email in", "hours = 0.0 DEBUG(u'Found bug #%(bug_id)s with title %(subject)s product %(product)s, component %(component)s,", "mails, lets create timeentries from them extractor = TimeEntryMailExtractor( trackers, logins_mappings, projects, selector_mappings,", "continue DEBUG(u'Found bug title %(subject)s component %(component)s, by %(who)s from %(date)s, hours %(hours)s'", "(who, )) return user = self.logins_mappings[tracker.id][who] DEBUG(u'Found user %s' % (user.name, )) mapping", "hours, subject )) return user.id, date, bug_id, project_id, hours, subject def handle_bugzilla_email(self, msg,", "(tracker.id, SelectorMapping(tracker)) for tracker in trackers.itervalues() ) # find all projects connected to", "# -*- coding: utf-8 -*- \"\"\" Sending emails \"\"\" import re import email", "in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins mapping' % (who, )) return user", "def handle_bugzilla_email(self, msg, tracker): date = decode(msg['Date']) component = decode(msg['X-Bugzilla-Component']) product = decode(msg['X-Bugzilla-Product'])", "stats = pop_conn.stat() LOG(u'Emails: %s' % (pformat(stats))) num, _ = stats num =", "= re.compile(r'^(Re\\:\\ +)?\\[.+\\] \\#\\d+\\: (.*)') TRAC_HOURS_REGEXP = re.compile(r'.*Add Hours to Ticket:\\ *(\\d+(\\.\\d+)?)') TRAC_AUTHOR_REGEXP", "# handler should parse the response and return essential info or None data", "DEBUG(u'User %s not in logins mapping' % (who, )) return DEBUG(u'Found bug title", "should be ignored return user_id, date, bug_id, project_id, hours, subject = data return", "def __init__(self, trackers, logins_mappings, projects, selector_mappings): self.trackers = trackers self.logins_mappings = logins_mappings self.projects", "bug_id, project_id, hours, subject = data return add_time(user_id, date, bug_id, project_id, hours, subject)", "LOG(u\"Will add entry for user %s project %s bug #%s hours %s title", "for value in Q_ENCODING_REGEXP.findall(val): val = val.replace(value, decode(value)) return val.strip() def get_msg_payload(msg): encoding", "dict( (tracker.id, SelectorMapping(tracker)) for tracker in trackers.itervalues() ) # find all projects connected", "= match.group(1).strip() continue DEBUG(u'Found bug title %(subject)s component %(component)s, by %(who)s from %(date)s,", "them extractor = TimeEntryMailExtractor( trackers, logins_mappings, projects, selector_mappings, ) for msg in fetcher:", "if not encoding else encoding) for val, encoding in decode_header(header) ).strip() Q_ENCODING_REGEXP =", "- html encoding = a_msg.get('Content-Transfer-Encoding') payload = a_msg.get_payload() DEBUG(u'Extracted email msg %r with", "from mail \"\"\" SUBJECT_REGEXP = re.compile(r'^\\[Bug (\\d+)\\](.*)') HOURS_REGEXP = re.compile(r'^\\s*Hours Worked\\|\\s*\\|(\\d+(\\.\\d+)?)$') HOURS_NEW_BUG_REGEXP =", "payload = get_msg_payload(msg) for line in payload.split('\\n'): match = self.TRAC_HOURS_REGEXP.match(line) if match: hours", "= 0.0 DEBUG(u'Found bug #%(bug_id)s with title %(subject)s product %(product)s, component %(component)s, by", ")) return user.id, date, bug_id, project_id, hours, subject handle_cookie_trac_email = handle_trac_email handle_igozilla_email =", "in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins mapping' % (who, )) return DEBUG(u'Found", "mapping' % (who, )) return DEBUG(u'Found bug title %(subject)s product %(product)s, component %(component)s,", "trackers = dict( (tracker.mailer, tracker) for tracker in Tracker.query.filter(Tracker.mailer != None).filter(Tracker.mailer != '')", "not len(trackers): WARN(u'No trackers have mailers configured, email will not be checked') return", "have all mails, lets create timeentries from them extractor = TimeEntryMailExtractor( trackers, logins_mappings,", "email in sender: return self.trackers[email] else: return None def get(self, msg): \"\"\" When", "new bug - create with 0 h, first strip title subject = subject[4:].strip()", "subject not matched %r\" % (subject, )) return subject = match.group(2) hours =", "# find all projects connected to the tracker projects = dict( (project.id, project)", "% (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) bug_id = decode(msg['X-Trac-Ticket-ID']) subject = decode(subject.replace('\\n', u''))", "DEBUG(u'Email from %s ignored, no tracker matched' % (sender, )) return # find", "self.MAX_EMAILS else self.MAX_EMAILS messages = (pop_conn.retr(i) for i in range(1, num + 1))", "float(match.groups()[0]) break else: hours = 0.0 DEBUG(u'Found bug #%(bug_id)s with title %(subject)s product", "MailFetcher( username, password, ) # ok, we have all mails, lets create timeentries", "= re.compile(r'^Changes \\(by (.*)\\)\\:') TRAC_COMPONENT_REGEXP = re.compile(r'.*Component:\\ *([^|]*)') def __init__(self, trackers, logins_mappings, projects,", "subject %r retrieved from date %r' % (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) subject", "first strip title subject = subject[4:].strip() DEBUG(u'Bug creation found %s' % (subject, ))", "return trackers = dict( (tracker.mailer, tracker) for tracker in Tracker.query.filter(Tracker.mailer != None).filter(Tracker.mailer !=", "bug #%s hours %s title %s\" % ( user.name, project.name, bug_id, hours, subject", "DEBUG(u'Found bug title %(subject)s component %(component)s, by %(who)s from %(date)s, hours %(hours)s' %", "match: WARN(u\"Trac subject not matched %r\" % (subject, )) return subject = match.group(2)", "= '\\n' # some emails have \\r\\n insted of \\n if '\\r\\n' in", "!= None).filter(Tracker.mailer != '') ) if not len(trackers): WARN(u'No trackers have mailers configured,", "#%(bug_id)s with title %(subject)s product %(product)s, component %(component)s, by %(who)s, hours %(hours)f %(date)s'", "re import email import quopri import datetime import time import poplib from base64", "= self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, 'none', component) if project_id is None: DEBUG(u'Project not", "None data = handler(msg, tracker) if data is None: # email should be", "datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) bug_id = decode(msg['X-Trac-Ticket-ID']) subject = decode(subject.replace('\\n', u'')) match = self.TRAC_SUBJECT_REGEXP.match(subject) if not", "match: DEBUG(u\"Subject doesn't match regexp: %r\" % subject) return bug_id, subject = match.groups()", "create with 0 h, first strip title subject = subject[4:].strip() DEBUG(u'Bug creation found", "num < self.MAX_EMAILS else self.MAX_EMAILS messages = (pop_conn.retr(i) for i in range(1, num", "mailers configured, email will not be checked') return username = config.google_user_email.encode('utf-8') password =", "in messages: yield msg pop_conn.quit() class MailCheckerTask(object): def __call__(self, *args, **kwargs): config =", "_ = stats num = num if num < self.MAX_EMAILS else self.MAX_EMAILS messages", "add entry for user %s project %s bug #%s hours %s title %s\"", "= selector_mappings def handle_trac_email(self, msg, tracker): date = decode(msg['Date']) subject = msg['Subject'] DEBUG(u'Message", "% tracker.type) # handler should parse the response and return essential info or", "TRAC_SUBJECT_REGEXP = re.compile(r'^(Re\\:\\ +)?\\[.+\\] \\#\\d+\\: (.*)') TRAC_HOURS_REGEXP = re.compile(r'.*Add Hours to Ticket:\\ *(\\d+(\\.\\d+)?)')", "self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, product, component) if project_id is None: DEBUG(u'Project not found", "% (component, )) return project = self.projects[project_id] LOG(u\"Will add entry for user %s", "add_time try: from cStringIO import StringIO except ImportError: from StringIO import StringIO LOG", "(user.name, )) # selector_mapping given explicitly to avoid cache lookups mapping = self.selector_mappings[tracker.id]", "projects, selector_mappings, ) for msg in fetcher: timeentry = extractor.get(msg) if timeentry: DBSession.add(timeentry)", "self.HOURS_NEW_BUG_REGEXP.match(line) else: match = self.HOURS_REGEXP.match(line) if match: hours = float(match.groups()[0]) break else: hours", "# start fetching fetcher = MailFetcher( username, password, ) # ok, we have", "Q_ENCODING_REGEXP.findall(val): val = val.replace(value, decode(value)) return val.strip() def get_msg_payload(msg): encoding = msg.get('Content-Transfer-Encoding') payload", "not in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins mapping' % (who, )) return", "in Project.query.all() ) # all pre-conditions should be checked by now # start", "for line in payload.split(newline): if is_new_bug: match = self.HOURS_NEW_BUG_REGEXP.match(line) else: match = self.HOURS_REGEXP.match(line)", "ignored, no tracker matched' % (sender, )) return # find appopriate handler handler", "%(date)s' % locals()) bug_id = int(bug_id) newline = '\\n' # some emails have", "continue match = self.TRAC_AUTHOR_REGEXP.match(line) if match: who = match.group(1) continue match = self.TRAC_COMPONENT_REGEXP.match(line)", "%(who)s from %(date)s, hours %(hours)s' % locals()) if hours <= 0.0: DEBUG(u\"Ignoring bug", "product %s, component %s' % ( product, component, )) return project = self.projects[project_id]", "= '' payload = get_msg_payload(msg) for line in payload.split('\\n'): match = self.TRAC_HOURS_REGEXP.match(line) if", "% locals()) if hours <= 0.0: DEBUG(u\"Ignoring bug with no hours\") return who", "payload = b64decode(payload) return payload class TimeEntryMailExtractor(object): \"\"\" Extracts timeentry from mail \"\"\"", "= b64decode(payload) return payload class TimeEntryMailExtractor(object): \"\"\" Extracts timeentry from mail \"\"\" SUBJECT_REGEXP", "= decode(msg['From']) for email in self.trackers: if email in sender: return self.trackers[email] else:", "dict( (tracker.mailer, tracker) for tracker in Tracker.query.filter(Tracker.mailer != None).filter(Tracker.mailer != '') ) if", "MailCheckerTask(object): def __call__(self, *args, **kwargs): config = ApplicationConfig.get_current_config(allow_empty=True) if config is None: WARN(u'Application", "component %(component)s, by %(who)s, hours %(hours)f %(date)s' % locals()) if is_new_bug: # new", "data return add_time(user_id, date, bug_id, project_id, hours, subject) class MailFetcher(object): HOST = 'pop.gmail.com'", "hours, subject def handle_bugzilla_email(self, msg, tracker): date = decode(msg['Date']) component = decode(msg['X-Bugzilla-Component']) product", "subject = match.groups() subject = subject.strip() is_new_bug = subject.startswith('New ') payload = get_msg_payload(msg)", "handle_igozilla_email = handle_bugzilla_email handle_rockzilla_email = handle_bugzilla_email def match_tracker(self, msg): sender = decode(msg['From']) for", "= 'pop.gmail.com' MAX_EMAILS = 100 def __init__(self, login, password): self.login = login self.password", "title %(subject)s product %(product)s, component %(component)s, by %(who)s, hours %(hours)f %(date)s' % locals())", "quopri import datetime import time import poplib from base64 import b64decode from pprint", "email.header import decode_header from email.utils import parsedate import transaction from intranet3.models import ApplicationConfig,", "0.0 who = '' component = '' payload = get_msg_payload(msg) for line in", "= get_msg_payload(msg) username = who.lower() if username not in self.logins_mappings[tracker.id]: DEBUG(u'User %s not", "decode(msg['Date']) subject = msg['Subject'] DEBUG(u'Message with subject %r retrieved from date %r' %", "class MailFetcher(object): HOST = 'pop.gmail.com' MAX_EMAILS = 100 def __init__(self, login, password): self.login", "SUBJECT_REGEXP = re.compile(r'^\\[Bug (\\d+)\\](.*)') HOURS_REGEXP = re.compile(r'^\\s*Hours Worked\\|\\s*\\|(\\d+(\\.\\d+)?)$') HOURS_NEW_BUG_REGEXP = re.compile(r'^\\s*Hours Worked: (\\d+(\\.\\d+)?)$')", "%(subject)s product %(product)s, component %(component)s, by %(who)s, hours %(hours)f %(date)s' % locals()) if", "h, first strip title subject = subject[4:].strip() DEBUG(u'Bug creation found %s' % (subject,", "DEBUG(u'Ignoring non-new bug without hours') return user = self.logins_mappings[tracker.id][username] DEBUG(u'Found user %s' %", "\"\"\" When single message was retrieved \"\"\" sender = decode(msg['From']) tracker = self.match_tracker(msg)", "%r' % (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) bug_id = decode(msg['X-Trac-Ticket-ID']) subject = decode(subject.replace('\\n',", "logins_mappings, projects, selector_mappings, ) for msg in fetcher: timeentry = extractor.get(msg) if timeentry:", "DEBUG(u'Using CRLF istead of LF') newline = '\\r\\n' for line in payload.split(newline): if", "= decode(subject.replace('\\n', u'')) match = self.TRAC_SUBJECT_REGEXP.match(subject) if not match: WARN(u\"Trac subject not matched", "for tracker in trackers.itervalues() ) selector_mappings = dict( (tracker.id, SelectorMapping(tracker)) for tracker in", "in messages) messages = (email.parser.Parser().parsestr(mssg) for mssg in messages) for msg in messages:", "configured, email will not be checked') return username = config.google_user_email.encode('utf-8') password = config.google_user_password.encode('<PASSWORD>')", "*args, **kwargs): config = ApplicationConfig.get_current_config(allow_empty=True) if config is None: WARN(u'Application config not found,", "if tracker is None: DEBUG(u'Email from %s ignored, no tracker matched' % (sender,", "else encoding) for val, encoding in decode_header(header) ).strip() Q_ENCODING_REGEXP = re.compile(r'(\\=\\?[^\\?]+\\?[QB]\\?[^\\?]+\\?\\=)') def decode_subject(val):", "DEBUG(u'Extracted email msg %r with encoding %r' % (payload, encoding)) if encoding ==", "= float(match.group(1)) continue match = self.TRAC_AUTHOR_REGEXP.match(line) if match: who = match.group(1) continue match", "who = match.group(1) continue match = self.TRAC_COMPONENT_REGEXP.match(line) if match: component = match.group(1).strip() continue", "pprint import pformat from email.header import decode_header from email.utils import parsedate import transaction", "handle_rockzilla_email = handle_bugzilla_email def match_tracker(self, msg): sender = decode(msg['From']) for email in self.trackers:", "DEBUG = DEBUG_LOG(__name__) MIN_HOURS = 6.995 #record hours decode = lambda header: u''.join(", "be ignored return user_id, date, bug_id, project_id, hours, subject = data return add_time(user_id,", "messages) messages = (email.parser.Parser().parsestr(mssg) for mssg in messages) for msg in messages: yield", "cannot be checked') return trackers = dict( (tracker.mailer, tracker) for tracker in Tracker.query.filter(Tracker.mailer", "len(trackers): WARN(u'No trackers have mailers configured, email will not be checked') return username", "%s title %s\" % ( user.name, project.name, bug_id, hours, subject )) return user.id,", "messages: yield msg pop_conn.quit() class MailCheckerTask(object): def __call__(self, *args, **kwargs): config = ApplicationConfig.get_current_config(allow_empty=True)", "add_time(user_id, date, bug_id, project_id, hours, subject) class MailFetcher(object): HOST = 'pop.gmail.com' MAX_EMAILS =", "if match: hours = float(match.group(1)) continue match = self.TRAC_AUTHOR_REGEXP.match(line) if match: who =", "info or None data = handler(msg, tracker) if data is None: # email", "trackers, logins_mappings, projects, selector_mappings, ) for msg in fetcher: timeentry = extractor.get(msg) if", "range(1, num + 1)) messages = (\"\\n\".join(mssg[1]) for mssg in messages) messages =", "# new bug - create with 0 h, first strip title subject =", "password def __iter__(self): pop_conn = poplib.POP3_SSL(self.HOST) pop_conn.user(self.login) pop_conn.pass_(self.password) stats = pop_conn.stat() LOG(u'Emails: %s'", "%(subject)s component %(component)s, by %(who)s from %(date)s, hours %(hours)s' % locals()) if hours", "% subject) return bug_id, subject = match.groups() subject = subject.strip() is_new_bug = subject.startswith('New", "% (payload, encoding)) if encoding == 'quoted-printable': payload = quopri.decodestring(payload) elif encoding ==", "class TimeEntryMailExtractor(object): \"\"\" Extracts timeentry from mail \"\"\" SUBJECT_REGEXP = re.compile(r'^\\[Bug (\\d+)\\](.*)') HOURS_REGEXP", "msg.get('Content-Transfer-Encoding') payload = msg.get_payload() if type(payload) == list: a_msg = payload[0] # first", "')) match = self.SUBJECT_REGEXP.match(subject) if not match: DEBUG(u\"Subject doesn't match regexp: %r\" %", "decode(msg['X-Bugzilla-Who']) subject = msg['Subject'] DEBUG(u'Message with subject %r retrieved from date %r' %", "'quoted-printable': payload = quopri.decodestring(payload) elif encoding == 'base64': payload = b64decode(payload) return payload", "msg, tracker): date = decode(msg['Date']) subject = msg['Subject'] DEBUG(u'Message with subject %r retrieved", "= msg['Subject'] DEBUG(u'Message with subject %r retrieved from date %r' % (subject, date))", "import add_time try: from cStringIO import StringIO except ImportError: from StringIO import StringIO", "project_id = mapping.match(bug_id, product, component) if project_id is None: DEBUG(u'Project not found for", "None: # email should be ignored return user_id, date, bug_id, project_id, hours, subject", "import ApplicationConfig, Project, Tracker, TrackerCredentials, DBSession from intranet3.models.project import SelectorMapping from intranet3.log import", "= decode(msg['From']) tracker = self.match_tracker(msg) if tracker is None: DEBUG(u'Email from %s ignored,", "data is None: # email should be ignored return user_id, date, bug_id, project_id,", "self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins mapping' % (who, )) return user =", "mssg in messages) messages = (email.parser.Parser().parsestr(mssg) for mssg in messages) for msg in", "lambda header: u''.join( val.decode('utf-8' if not encoding else encoding) for val, encoding in", "second - html encoding = a_msg.get('Content-Transfer-Encoding') payload = a_msg.get_payload() DEBUG(u'Extracted email msg %r", "%(component)s, by %(who)s, hours %(hours)f %(date)s' % locals()) if is_new_bug: # new bug", "%r retrieved from date %r' % (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) subject =", "not matched %r\" % (subject, )) return subject = match.group(2) hours = 0.0", "= re.compile(r'.*Add Hours to Ticket:\\ *(\\d+(\\.\\d+)?)') TRAC_AUTHOR_REGEXP = re.compile(r'^Changes \\(by (.*)\\)\\:') TRAC_COMPONENT_REGEXP =", "= login self.password = password def __iter__(self): pop_conn = poplib.POP3_SSL(self.HOST) pop_conn.user(self.login) pop_conn.pass_(self.password) stats", "from pprint import pformat from email.header import decode_header from email.utils import parsedate import", "DEBUG(u'Found user %s' % (user.name, )) mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, 'none',", "(subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) bug_id = decode(msg['X-Trac-Ticket-ID']) subject = decode(subject.replace('\\n', u'')) match", "= a_msg.get('Content-Transfer-Encoding') payload = a_msg.get_payload() DEBUG(u'Extracted email msg %r with encoding %r' %", "1)) messages = (\"\\n\".join(mssg[1]) for mssg in messages) messages = (email.parser.Parser().parsestr(mssg) for mssg", "component %s' % ( product, component, )) return project = self.projects[project_id] LOG(u\"Will add", "'\\n' # some emails have \\r\\n insted of \\n if '\\r\\n' in payload:", "= decode(msg['X-Trac-Ticket-ID']) subject = decode(subject.replace('\\n', u'')) match = self.TRAC_SUBJECT_REGEXP.match(subject) if not match: WARN(u\"Trac", "None: WARN(u'Application config not found, emails cannot be checked') return trackers = dict(", "%s ignored, no tracker matched' % (sender, )) return # find appopriate handler", "list: a_msg = payload[0] # first is plaintext, second - html encoding =", "%(date)s' % locals()) if is_new_bug: # new bug - create with 0 h,", "hours = float(match.group(1)) continue match = self.TRAC_AUTHOR_REGEXP.match(line) if match: who = match.group(1) continue", "trackers, logins_mappings, projects, selector_mappings): self.trackers = trackers self.logins_mappings = logins_mappings self.projects = projects", "date, bug_id, project_id, hours, subject = data return add_time(user_id, date, bug_id, project_id, hours,", "selector_mappings def handle_trac_email(self, msg, tracker): date = decode(msg['Date']) subject = msg['Subject'] DEBUG(u'Message with", "who in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins mapping' % (who, )) return", "component %(component)s, by %(who)s from %(date)s' % locals()) bug_id = int(bug_id) newline =", "appopriate handler handler = getattr(self, 'handle_%s_email' % tracker.type) # handler should parse the", "for email in self.trackers: if email in sender: return self.trackers[email] else: return None", "'handle_%s_email' % tracker.type) # handler should parse the response and return essential info", "find appopriate handler handler = getattr(self, 'handle_%s_email' % tracker.type) # handler should parse", "% locals()) bug_id = int(bug_id) newline = '\\n' # some emails have \\r\\n", "entry for user %s project %s bug #%s hours %s title %s\" %", "payload = quopri.decodestring(payload) elif encoding == 'base64': payload = b64decode(payload) return payload class", "some emails have \\r\\n insted of \\n if '\\r\\n' in payload: DEBUG(u'Using CRLF", "logins mapping' % (who, )) return DEBUG(u'Found bug title %(subject)s product %(product)s, component", "MIN_HOURS = 6.995 #record hours decode = lambda header: u''.join( val.decode('utf-8' if not", "payload.split(newline): if is_new_bug: match = self.HOURS_NEW_BUG_REGEXP.match(line) else: match = self.HOURS_REGEXP.match(line) if match: hours", "EXCEPTION = EXCEPTION_LOG(__name__) WARN = WARN_LOG(__name__) DEBUG = DEBUG_LOG(__name__) MIN_HOURS = 6.995 #record", "= self.TRAC_HOURS_REGEXP.match(line) if match: hours = float(match.group(1)) continue match = self.TRAC_AUTHOR_REGEXP.match(line) if match:", "product %(product)s, component %(component)s, by %(who)s from %(date)s' % locals()) bug_id = int(bug_id)", "parse the response and return essential info or None data = handler(msg, tracker)", "= decode(msg['X-Bugzilla-Who']) subject = msg['Subject'] DEBUG(u'Message with subject %r retrieved from date %r'", "email import quopri import datetime import time import poplib from base64 import b64decode", "= int(bug_id) newline = '\\n' # some emails have \\r\\n insted of \\n", "found for component %s' % (component, )) return project = self.projects[project_id] LOG(u\"Will add", "title %s\" % ( user.name, project.name, bug_id, hours, subject )) return user.id, date,", "= match.groups() subject = subject.strip() is_new_bug = subject.startswith('New ') payload = get_msg_payload(msg) username", "def match_tracker(self, msg): sender = decode(msg['From']) for email in self.trackers: if email in", "bug title %(subject)s product %(product)s, component %(component)s, by %(who)s from %(date)s' % locals())", "if '\\r\\n' in payload: DEBUG(u'Using CRLF istead of LF') newline = '\\r\\n' for", "decode = lambda header: u''.join( val.decode('utf-8' if not encoding else encoding) for val,", "cache lookups mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, product, component) if project_id is", "login, password): self.login = login self.password = password def __iter__(self): pop_conn = poplib.POP3_SSL(self.HOST)", "self.selector_mappings = selector_mappings def handle_trac_email(self, msg, tracker): date = decode(msg['Date']) subject = msg['Subject']", "= self.match_tracker(msg) if tracker is None: DEBUG(u'Email from %s ignored, no tracker matched'", "DEBUG(u\"Subject doesn't match regexp: %r\" % subject) return bug_id, subject = match.groups() subject", "subject) return bug_id, subject = match.groups() subject = subject.strip() is_new_bug = subject.startswith('New ')", "import DEBUG_LOG, WARN_LOG, EXCEPTION_LOG, INFO_LOG from intranet3.utils.timeentry import add_time try: from cStringIO import", "quopri.decodestring(payload) elif encoding == 'base64': payload = b64decode(payload) return payload class TimeEntryMailExtractor(object): \"\"\"", "sender: return self.trackers[email] else: return None def get(self, msg): \"\"\" When single message", "component = match.group(1).strip() continue DEBUG(u'Found bug title %(subject)s component %(component)s, by %(who)s from", "all projects connected to the tracker projects = dict( (project.id, project) for project", "pre-conditions should be checked by now # start fetching fetcher = MailFetcher( username,", "yield msg pop_conn.quit() class MailCheckerTask(object): def __call__(self, *args, **kwargs): config = ApplicationConfig.get_current_config(allow_empty=True) if", "return user = self.logins_mappings[tracker.id][username] DEBUG(u'Found user %s' % (user.name, )) # selector_mapping given", "'pop.gmail.com' MAX_EMAILS = 100 def __init__(self, login, password): self.login = login self.password =", "product = decode(msg['X-Bugzilla-Product']) who = decode(msg['X-Bugzilla-Who']) subject = msg['Subject'] DEBUG(u'Message with subject %r", "= self.TRAC_AUTHOR_REGEXP.match(line) if match: who = match.group(1) continue match = self.TRAC_COMPONENT_REGEXP.match(line) if match:", "val.decode('utf-8' if not encoding else encoding) for val, encoding in decode_header(header) ).strip() Q_ENCODING_REGEXP", "for val, encoding in decode_header(header) ).strip() Q_ENCODING_REGEXP = re.compile(r'(\\=\\?[^\\?]+\\?[QB]\\?[^\\?]+\\?\\=)') def decode_subject(val): for value", "subject.strip() is_new_bug = subject.startswith('New ') payload = get_msg_payload(msg) username = who.lower() if username", "pop_conn.pass_(self.password) stats = pop_conn.stat() LOG(u'Emails: %s' % (pformat(stats))) num, _ = stats num", "%r retrieved from date %r' % (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) bug_id =", "CRLF istead of LF') newline = '\\r\\n' for line in payload.split(newline): if is_new_bug:", "DEBUG(u'Message with subject %r retrieved from date %r' % (subject, date)) date =", "%s, component %s' % ( product, component, )) return project = self.projects[project_id] LOG(u\"Will", "handler(msg, tracker) if data is None: # email should be ignored return user_id,", "project_id = mapping.match(bug_id, 'none', component) if project_id is None: DEBUG(u'Project not found for", "messages = (pop_conn.retr(i) for i in range(1, num + 1)) messages = (\"\\n\".join(mssg[1])", "hours, subject = data return add_time(user_id, date, bug_id, project_id, hours, subject) class MailFetcher(object):", "handle_bugzilla_email(self, msg, tracker): date = decode(msg['Date']) component = decode(msg['X-Bugzilla-Component']) product = decode(msg['X-Bugzilla-Product']) who", "get_msg_payload(msg) username = who.lower() if username not in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in", "subject )) return user.id, date, bug_id, project_id, hours, subject handle_cookie_trac_email = handle_trac_email handle_igozilla_email", "was retrieved \"\"\" sender = decode(msg['From']) tracker = self.match_tracker(msg) if tracker is None:", "= re.compile(r'(\\=\\?[^\\?]+\\?[QB]\\?[^\\?]+\\?\\=)') def decode_subject(val): for value in Q_ENCODING_REGEXP.findall(val): val = val.replace(value, decode(value)) return", "subject %r retrieved from date %r' % (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) bug_id", "ApplicationConfig.get_current_config(allow_empty=True) if config is None: WARN(u'Application config not found, emails cannot be checked')", "be checked by now # start fetching fetcher = MailFetcher( username, password, )", ")) return project = self.projects[project_id] LOG(u\"Will add entry for user %s project %s", "+ 1)) messages = (\"\\n\".join(mssg[1]) for mssg in messages) messages = (email.parser.Parser().parsestr(mssg) for", "\"\"\" SUBJECT_REGEXP = re.compile(r'^\\[Bug (\\d+)\\](.*)') HOURS_REGEXP = re.compile(r'^\\s*Hours Worked\\|\\s*\\|(\\d+(\\.\\d+)?)$') HOURS_NEW_BUG_REGEXP = re.compile(r'^\\s*Hours Worked:", "logins_mappings, projects, selector_mappings): self.trackers = trackers self.logins_mappings = logins_mappings self.projects = projects self.selector_mappings", "DEBUG(u'Project not found for product %s, component %s' % ( product, component, ))", "matched %r\" % (subject, )) return subject = match.group(2) hours = 0.0 who", "return subject = match.group(2) hours = 0.0 who = '' component = ''", "def get(self, msg): \"\"\" When single message was retrieved \"\"\" sender = decode(msg['From'])", "self.TRAC_SUBJECT_REGEXP.match(subject) if not match: WARN(u\"Trac subject not matched %r\" % (subject, )) return", "locals()) if hours <= 0.0: DEBUG(u\"Ignoring bug with no hours\") return who =", "type(payload) == list: a_msg = payload[0] # first is plaintext, second - html", "have \\r\\n insted of \\n if '\\r\\n' in payload: DEBUG(u'Using CRLF istead of", "Tracker, TrackerCredentials, DBSession from intranet3.models.project import SelectorMapping from intranet3.log import DEBUG_LOG, WARN_LOG, EXCEPTION_LOG,", "% (sender, )) return # find appopriate handler handler = getattr(self, 'handle_%s_email' %", "WARN(u'Application config not found, emails cannot be checked') return trackers = dict( (tracker.mailer,", "%(product)s, component %(component)s, by %(who)s from %(date)s' % locals()) bug_id = int(bug_id) newline", "val, encoding in decode_header(header) ).strip() Q_ENCODING_REGEXP = re.compile(r'(\\=\\?[^\\?]+\\?[QB]\\?[^\\?]+\\?\\=)') def decode_subject(val): for value in", "return username = config.google_user_email.encode('utf-8') password = config.google_user_password.encode('<PASSWORD>') # TODO logins_mappings = dict( (tracker.id,", "(payload, encoding)) if encoding == 'quoted-printable': payload = quopri.decodestring(payload) elif encoding == 'base64':", "return # find appopriate handler handler = getattr(self, 'handle_%s_email' % tracker.type) # handler", "timeentries from them extractor = TimeEntryMailExtractor( trackers, logins_mappings, projects, selector_mappings, ) for msg", "is None: WARN(u'Application config not found, emails cannot be checked') return trackers =", "who = decode(msg['X-Bugzilla-Who']) subject = msg['Subject'] DEBUG(u'Message with subject %r retrieved from date", "tracker in Tracker.query.filter(Tracker.mailer != None).filter(Tracker.mailer != '') ) if not len(trackers): WARN(u'No trackers", "import StringIO LOG = INFO_LOG(__name__) EXCEPTION = EXCEPTION_LOG(__name__) WARN = WARN_LOG(__name__) DEBUG =", "by %(who)s from %(date)s, hours %(hours)s' % locals()) if hours <= 0.0: DEBUG(u\"Ignoring", "= self.logins_mappings[tracker.id][who] DEBUG(u'Found user %s' % (user.name, )) mapping = self.selector_mappings[tracker.id] project_id =", "import datetime import time import poplib from base64 import b64decode from pprint import", "is_new_bug = subject.startswith('New ') payload = get_msg_payload(msg) username = who.lower() if username not", "in logins mapping' % (who, )) return DEBUG(u'Found bug title %(subject)s product %(product)s,", "email should be ignored return user_id, date, bug_id, project_id, hours, subject = data", "not in logins mapping' % (who, )) return DEBUG(u'Found bug title %(subject)s product", "0.0: DEBUG(u'Ignoring non-new bug without hours') return user = self.logins_mappings[tracker.id][username] DEBUG(u'Found user %s'", "= WARN_LOG(__name__) DEBUG = DEBUG_LOG(__name__) MIN_HOURS = 6.995 #record hours decode = lambda", "title %(subject)s product %(product)s, component %(component)s, by %(who)s from %(date)s' % locals()) bug_id", "encoding == 'base64': payload = b64decode(payload) return payload class TimeEntryMailExtractor(object): \"\"\" Extracts timeentry", "%(hours)s' % locals()) if hours <= 0.0: DEBUG(u\"Ignoring bug with no hours\") return", "locals()) if is_new_bug: # new bug - create with 0 h, first strip", "re.compile(r'(\\=\\?[^\\?]+\\?[QB]\\?[^\\?]+\\?\\=)') def decode_subject(val): for value in Q_ENCODING_REGEXP.findall(val): val = val.replace(value, decode(value)) return val.strip()", "if is_new_bug: # new bug - create with 0 h, first strip title", ")) mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, 'none', component) if project_id is None:", "= poplib.POP3_SSL(self.HOST) pop_conn.user(self.login) pop_conn.pass_(self.password) stats = pop_conn.stat() LOG(u'Emails: %s' % (pformat(stats))) num, _", "ApplicationConfig, Project, Tracker, TrackerCredentials, DBSession from intranet3.models.project import SelectorMapping from intranet3.log import DEBUG_LOG,", "= num if num < self.MAX_EMAILS else self.MAX_EMAILS messages = (pop_conn.retr(i) for i", "mapping' % (who, )) return user = self.logins_mappings[tracker.id][who] DEBUG(u'Found user %s' % (user.name,", "by %(who)s, hours %(hours)f %(date)s' % locals()) if is_new_bug: # new bug -", "trackers self.logins_mappings = logins_mappings self.projects = projects self.selector_mappings = selector_mappings def handle_trac_email(self, msg,", "start fetching fetcher = MailFetcher( username, password, ) # ok, we have all", "% (user.name, )) # selector_mapping given explicitly to avoid cache lookups mapping =", "import pformat from email.header import decode_header from email.utils import parsedate import transaction from", "if encoding == 'quoted-printable': payload = quopri.decodestring(payload) elif encoding == 'base64': payload =", "from cStringIO import StringIO except ImportError: from StringIO import StringIO LOG = INFO_LOG(__name__)", "value in Q_ENCODING_REGEXP.findall(val): val = val.replace(value, decode(value)) return val.strip() def get_msg_payload(msg): encoding =", "self.projects[project_id] LOG(u\"Will add entry for user %s project %s bug #%s hours %s", "= float(match.groups()[0]) break else: hours = 0.0 DEBUG(u'Found bug #%(bug_id)s with title %(subject)s", ")) # selector_mapping given explicitly to avoid cache lookups mapping = self.selector_mappings[tracker.id] project_id", "%r with encoding %r' % (payload, encoding)) if encoding == 'quoted-printable': payload =", "% locals()) if is_new_bug: # new bug - create with 0 h, first", "retrieved from date %r' % (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) subject = decode_subject(subject.replace('\\n',", ")) return user = self.logins_mappings[tracker.id][who] DEBUG(u'Found user %s' % (user.name, )) mapping =", "(who, )) return DEBUG(u'Found bug title %(subject)s product %(product)s, component %(component)s, by %(who)s", "= decode(msg['Date']) subject = msg['Subject'] DEBUG(u'Message with subject %r retrieved from date %r'", "hours == 0.0: DEBUG(u'Ignoring non-new bug without hours') return user = self.logins_mappings[tracker.id][username] DEBUG(u'Found", "= password def __iter__(self): pop_conn = poplib.POP3_SSL(self.HOST) pop_conn.user(self.login) pop_conn.pass_(self.password) stats = pop_conn.stat() LOG(u'Emails:", "selector_mappings = dict( (tracker.id, SelectorMapping(tracker)) for tracker in trackers.itervalues() ) # find all", "if type(payload) == list: a_msg = payload[0] # first is plaintext, second -", "encoding = msg.get('Content-Transfer-Encoding') payload = msg.get_payload() if type(payload) == list: a_msg = payload[0]", "project in Project.query.all() ) # all pre-conditions should be checked by now #", "username, password, ) # ok, we have all mails, lets create timeentries from", "import time import poplib from base64 import b64decode from pprint import pformat from", "tracker matched' % (sender, )) return # find appopriate handler handler = getattr(self,", "LF') newline = '\\r\\n' for line in payload.split(newline): if is_new_bug: match = self.HOURS_NEW_BUG_REGEXP.match(line)", "all mails, lets create timeentries from them extractor = TimeEntryMailExtractor( trackers, logins_mappings, projects,", "bug without hours') return user = self.logins_mappings[tracker.id][username] DEBUG(u'Found user %s' % (user.name, ))", "= handler(msg, tracker) if data is None: # email should be ignored return", "bug_id, project_id, hours, subject def handle_bugzilla_email(self, msg, tracker): date = decode(msg['Date']) component =", "header: u''.join( val.decode('utf-8' if not encoding else encoding) for val, encoding in decode_header(header)", "float(match.group(1)) continue match = self.TRAC_AUTHOR_REGEXP.match(line) if match: who = match.group(1) continue match =", "return project = self.projects[project_id] LOG(u\"Will add entry for user %s project %s bug", "in logins mapping' % (who, )) return user = self.logins_mappings[tracker.id][who] DEBUG(u'Found user %s'", "hours, subject handle_cookie_trac_email = handle_trac_email handle_igozilla_email = handle_bugzilla_email handle_rockzilla_email = handle_bugzilla_email def match_tracker(self,", "user.id, date, bug_id, project_id, hours, subject def handle_bugzilla_email(self, msg, tracker): date = decode(msg['Date'])", "project_id, hours, subject) class MailFetcher(object): HOST = 'pop.gmail.com' MAX_EMAILS = 100 def __init__(self,", "self.logins_mappings[tracker.id][who] DEBUG(u'Found user %s' % (user.name, )) mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id,", "%s\" % ( user.name, project.name, bug_id, hours, subject )) return user.id, date, bug_id,", "found for product %s, component %s' % ( product, component, )) return project", "is None: DEBUG(u'Project not found for component %s' % (component, )) return project", "match: hours = float(match.group(1)) continue match = self.TRAC_AUTHOR_REGEXP.match(line) if match: who = match.group(1)", "% (user.name, )) mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, 'none', component) if project_id", "password, ) # ok, we have all mails, lets create timeentries from them", "return who = who.lower() if not who in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in", "(\"\\n\".join(mssg[1]) for mssg in messages) messages = (email.parser.Parser().parsestr(mssg) for mssg in messages) for", "%(who)s, hours %(hours)f %(date)s' % locals()) if is_new_bug: # new bug - create", "(sender, )) return # find appopriate handler handler = getattr(self, 'handle_%s_email' % tracker.type)", "mssg in messages) for msg in messages: yield msg pop_conn.quit() class MailCheckerTask(object): def", "'') ) if not len(trackers): WARN(u'No trackers have mailers configured, email will not", "try: from cStringIO import StringIO except ImportError: from StringIO import StringIO LOG =", "= (email.parser.Parser().parsestr(mssg) for mssg in messages) for msg in messages: yield msg pop_conn.quit()", "tracker in trackers.itervalues() ) # find all projects connected to the tracker projects", "val = val.replace(value, decode(value)) return val.strip() def get_msg_payload(msg): encoding = msg.get('Content-Transfer-Encoding') payload =", "== 'base64': payload = b64decode(payload) return payload class TimeEntryMailExtractor(object): \"\"\" Extracts timeentry from", "subject = subject[4:].strip() DEBUG(u'Bug creation found %s' % (subject, )) elif hours ==", "self.projects = projects self.selector_mappings = selector_mappings def handle_trac_email(self, msg, tracker): date = decode(msg['Date'])", "subject = subject.strip() is_new_bug = subject.startswith('New ') payload = get_msg_payload(msg) username = who.lower()", "message was retrieved \"\"\" sender = decode(msg['From']) tracker = self.match_tracker(msg) if tracker is", "checked by now # start fetching fetcher = MailFetcher( username, password, ) #", "bug_id, project_id, hours, subject) class MailFetcher(object): HOST = 'pop.gmail.com' MAX_EMAILS = 100 def", "= dict( (project.id, project) for project in Project.query.all() ) # all pre-conditions should", "= mapping.match(bug_id, 'none', component) if project_id is None: DEBUG(u'Project not found for component", "= dict( (tracker.mailer, tracker) for tracker in Tracker.query.filter(Tracker.mailer != None).filter(Tracker.mailer != '') )", "Q_ENCODING_REGEXP = re.compile(r'(\\=\\?[^\\?]+\\?[QB]\\?[^\\?]+\\?\\=)') def decode_subject(val): for value in Q_ENCODING_REGEXP.findall(val): val = val.replace(value, decode(value))", "if not who in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins mapping' % (who,", "DEBUG(u\"Ignoring bug with no hours\") return who = who.lower() if not who in", "re.compile(r'^\\s*Hours Worked\\|\\s*\\|(\\d+(\\.\\d+)?)$') HOURS_NEW_BUG_REGEXP = re.compile(r'^\\s*Hours Worked: (\\d+(\\.\\d+)?)$') TRAC_SUBJECT_REGEXP = re.compile(r'^(Re\\:\\ +)?\\[.+\\] \\#\\d+\\: (.*)')", "with title %(subject)s product %(product)s, component %(component)s, by %(who)s, hours %(hours)f %(date)s' %", ")) return DEBUG(u'Found bug title %(subject)s product %(product)s, component %(component)s, by %(who)s from", "config not found, emails cannot be checked') return trackers = dict( (tracker.mailer, tracker)", "subject) class MailFetcher(object): HOST = 'pop.gmail.com' MAX_EMAILS = 100 def __init__(self, login, password):", "for i in range(1, num + 1)) messages = (\"\\n\".join(mssg[1]) for mssg in", "None def get(self, msg): \"\"\" When single message was retrieved \"\"\" sender =", "with encoding %r' % (payload, encoding)) if encoding == 'quoted-printable': payload = quopri.decodestring(payload)", "'none', component) if project_id is None: DEBUG(u'Project not found for component %s' %", "return user.id, date, bug_id, project_id, hours, subject def handle_bugzilla_email(self, msg, tracker): date =", "sender = decode(msg['From']) for email in self.trackers: if email in sender: return self.trackers[email]", "(tracker.mailer, tracker) for tracker in Tracker.query.filter(Tracker.mailer != None).filter(Tracker.mailer != '') ) if not", "100 def __init__(self, login, password): self.login = login self.password = password def __iter__(self):", "messages) for msg in messages: yield msg pop_conn.quit() class MailCheckerTask(object): def __call__(self, *args,", "= dict( (tracker.id, SelectorMapping(tracker)) for tracker in trackers.itervalues() ) # find all projects", "= stats num = num if num < self.MAX_EMAILS else self.MAX_EMAILS messages =", "Tracker.query.filter(Tracker.mailer != None).filter(Tracker.mailer != '') ) if not len(trackers): WARN(u'No trackers have mailers", "with 0 h, first strip title subject = subject[4:].strip() DEBUG(u'Bug creation found %s'", "hours %s title %s\" % ( user.name, project.name, bug_id, hours, subject )) return", "line in payload.split(newline): if is_new_bug: match = self.HOURS_NEW_BUG_REGEXP.match(line) else: match = self.HOURS_REGEXP.match(line) if", "non-new bug without hours') return user = self.logins_mappings[tracker.id][username] DEBUG(u'Found user %s' % (user.name,", "<reponame>tmodrzynski/intranet-open # -*- coding: utf-8 -*- \"\"\" Sending emails \"\"\" import re import", "handle_trac_email handle_igozilla_email = handle_bugzilla_email handle_rockzilla_email = handle_bugzilla_email def match_tracker(self, msg): sender = decode(msg['From'])", "mail \"\"\" SUBJECT_REGEXP = re.compile(r'^\\[Bug (\\d+)\\](.*)') HOURS_REGEXP = re.compile(r'^\\s*Hours Worked\\|\\s*\\|(\\d+(\\.\\d+)?)$') HOURS_NEW_BUG_REGEXP = re.compile(r'^\\s*Hours", "component %s' % (component, )) return project = self.projects[project_id] LOG(u\"Will add entry for", "bug - create with 0 h, first strip title subject = subject[4:].strip() DEBUG(u'Bug", "not found for component %s' % (component, )) return project = self.projects[project_id] LOG(u\"Will", "\"\"\" import re import email import quopri import datetime import time import poplib", ").strip() Q_ENCODING_REGEXP = re.compile(r'(\\=\\?[^\\?]+\\?[QB]\\?[^\\?]+\\?\\=)') def decode_subject(val): for value in Q_ENCODING_REGEXP.findall(val): val = val.replace(value,", "(.*)\\)\\:') TRAC_COMPONENT_REGEXP = re.compile(r'.*Component:\\ *([^|]*)') def __init__(self, trackers, logins_mappings, projects, selector_mappings): self.trackers =", "WARN(u\"Trac subject not matched %r\" % (subject, )) return subject = match.group(2) hours", "mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, 'none', component) if project_id is None: DEBUG(u'Project", "from intranet3.models.project import SelectorMapping from intranet3.log import DEBUG_LOG, WARN_LOG, EXCEPTION_LOG, INFO_LOG from intranet3.utils.timeentry", "re.compile(r'^\\[Bug (\\d+)\\](.*)') HOURS_REGEXP = re.compile(r'^\\s*Hours Worked\\|\\s*\\|(\\d+(\\.\\d+)?)$') HOURS_NEW_BUG_REGEXP = re.compile(r'^\\s*Hours Worked: (\\d+(\\.\\d+)?)$') TRAC_SUBJECT_REGEXP =", "messages = (\"\\n\".join(mssg[1]) for mssg in messages) messages = (email.parser.Parser().parsestr(mssg) for mssg in", "class MailCheckerTask(object): def __call__(self, *args, **kwargs): config = ApplicationConfig.get_current_config(allow_empty=True) if config is None:", "# TODO logins_mappings = dict( (tracker.id, TrackerCredentials.get_logins_mapping(tracker)) for tracker in trackers.itervalues() ) selector_mappings", "fetcher = MailFetcher( username, password, ) # ok, we have all mails, lets", "be checked') return username = config.google_user_email.encode('utf-8') password = config.google_user_password.encode('<PASSWORD>') # TODO logins_mappings =", "a_msg = payload[0] # first is plaintext, second - html encoding = a_msg.get('Content-Transfer-Encoding')", "= mapping.match(bug_id, product, component) if project_id is None: DEBUG(u'Project not found for product", "self.trackers[email] else: return None def get(self, msg): \"\"\" When single message was retrieved", "bug_id, project_id, hours, subject handle_cookie_trac_email = handle_trac_email handle_igozilla_email = handle_bugzilla_email handle_rockzilla_email = handle_bugzilla_email", "tracker = self.match_tracker(msg) if tracker is None: DEBUG(u'Email from %s ignored, no tracker", "emails cannot be checked') return trackers = dict( (tracker.mailer, tracker) for tracker in", "in trackers.itervalues() ) selector_mappings = dict( (tracker.id, SelectorMapping(tracker)) for tracker in trackers.itervalues() )", "= (pop_conn.retr(i) for i in range(1, num + 1)) messages = (\"\\n\".join(mssg[1]) for", "project_id is None: DEBUG(u'Project not found for product %s, component %s' % (", "0.0: DEBUG(u\"Ignoring bug with no hours\") return who = who.lower() if not who", "tracker): date = decode(msg['Date']) component = decode(msg['X-Bugzilla-Component']) product = decode(msg['X-Bugzilla-Product']) who = decode(msg['X-Bugzilla-Who'])", "date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) bug_id = decode(msg['X-Trac-Ticket-ID']) subject = decode(subject.replace('\\n', u'')) match = self.TRAC_SUBJECT_REGEXP.match(subject)", "tracker) for tracker in Tracker.query.filter(Tracker.mailer != None).filter(Tracker.mailer != '') ) if not len(trackers):", "return None def get(self, msg): \"\"\" When single message was retrieved \"\"\" sender", "subject )) return user.id, date, bug_id, project_id, hours, subject def handle_bugzilla_email(self, msg, tracker):", "found, emails cannot be checked') return trackers = dict( (tracker.mailer, tracker) for tracker", "num if num < self.MAX_EMAILS else self.MAX_EMAILS messages = (pop_conn.retr(i) for i in", "EXCEPTION_LOG(__name__) WARN = WARN_LOG(__name__) DEBUG = DEBUG_LOG(__name__) MIN_HOURS = 6.995 #record hours decode", "DEBUG(u'Bug creation found %s' % (subject, )) elif hours == 0.0: DEBUG(u'Ignoring non-new", "% ( product, component, )) return project = self.projects[project_id] LOG(u\"Will add entry for", "__init__(self, login, password): self.login = login self.password = password def __iter__(self): pop_conn =", "% ( user.name, project.name, bug_id, hours, subject )) return user.id, date, bug_id, project_id,", "match regexp: %r\" % subject) return bug_id, subject = match.groups() subject = subject.strip()", "return user = self.logins_mappings[tracker.id][who] DEBUG(u'Found user %s' % (user.name, )) mapping = self.selector_mappings[tracker.id]", "(component, )) return project = self.projects[project_id] LOG(u\"Will add entry for user %s project", "handler handler = getattr(self, 'handle_%s_email' % tracker.type) # handler should parse the response", "= trackers self.logins_mappings = logins_mappings self.projects = projects self.selector_mappings = selector_mappings def handle_trac_email(self,", "from intranet3.log import DEBUG_LOG, WARN_LOG, EXCEPTION_LOG, INFO_LOG from intranet3.utils.timeentry import add_time try: from", "self.TRAC_COMPONENT_REGEXP.match(line) if match: component = match.group(1).strip() continue DEBUG(u'Found bug title %(subject)s component %(component)s,", "insted of \\n if '\\r\\n' in payload: DEBUG(u'Using CRLF istead of LF') newline", "encoding = a_msg.get('Content-Transfer-Encoding') payload = a_msg.get_payload() DEBUG(u'Extracted email msg %r with encoding %r'", "in sender: return self.trackers[email] else: return None def get(self, msg): \"\"\" When single", "password = config.google_user_password.encode('<PASSWORD>') # TODO logins_mappings = dict( (tracker.id, TrackerCredentials.get_logins_mapping(tracker)) for tracker in", "self.MAX_EMAILS messages = (pop_conn.retr(i) for i in range(1, num + 1)) messages =", "DBSession from intranet3.models.project import SelectorMapping from intranet3.log import DEBUG_LOG, WARN_LOG, EXCEPTION_LOG, INFO_LOG from", "not match: DEBUG(u\"Subject doesn't match regexp: %r\" % subject) return bug_id, subject =", "for project in Project.query.all() ) # all pre-conditions should be checked by now", "Project, Tracker, TrackerCredentials, DBSession from intranet3.models.project import SelectorMapping from intranet3.log import DEBUG_LOG, WARN_LOG,", ")) return # find appopriate handler handler = getattr(self, 'handle_%s_email' % tracker.type) #", "data = handler(msg, tracker) if data is None: # email should be ignored", "= subject.startswith('New ') payload = get_msg_payload(msg) username = who.lower() if username not in", "== 0.0: DEBUG(u'Ignoring non-new bug without hours') return user = self.logins_mappings[tracker.id][username] DEBUG(u'Found user", "to avoid cache lookups mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, product, component) if", "break else: hours = 0.0 DEBUG(u'Found bug #%(bug_id)s with title %(subject)s product %(product)s,", "< self.MAX_EMAILS else self.MAX_EMAILS messages = (pop_conn.retr(i) for i in range(1, num +", "handler = getattr(self, 'handle_%s_email' % tracker.type) # handler should parse the response and", "INFO_LOG from intranet3.utils.timeentry import add_time try: from cStringIO import StringIO except ImportError: from", "product %(product)s, component %(component)s, by %(who)s, hours %(hours)f %(date)s' % locals()) if is_new_bug:", "explicitly to avoid cache lookups mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, product, component)", "find all projects connected to the tracker projects = dict( (project.id, project) for", "lets create timeentries from them extractor = TimeEntryMailExtractor( trackers, logins_mappings, projects, selector_mappings, )", "subject = decode_subject(subject.replace('\\n', u'').replace(u':', u' ')) match = self.SUBJECT_REGEXP.match(subject) if not match: DEBUG(u\"Subject", "who.lower() if username not in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins mapping' %", "When single message was retrieved \"\"\" sender = decode(msg['From']) tracker = self.match_tracker(msg) if", "if config is None: WARN(u'Application config not found, emails cannot be checked') return", "= self.HOURS_NEW_BUG_REGEXP.match(line) else: match = self.HOURS_REGEXP.match(line) if match: hours = float(match.groups()[0]) break else:", "self.HOURS_REGEXP.match(line) if match: hours = float(match.groups()[0]) break else: hours = 0.0 DEBUG(u'Found bug", "if num < self.MAX_EMAILS else self.MAX_EMAILS messages = (pop_conn.retr(i) for i in range(1,", "# selector_mapping given explicitly to avoid cache lookups mapping = self.selector_mappings[tracker.id] project_id =", "import email import quopri import datetime import time import poplib from base64 import", "is None: DEBUG(u'Email from %s ignored, no tracker matched' % (sender, )) return", "Sending emails \"\"\" import re import email import quopri import datetime import time", "'base64': payload = b64decode(payload) return payload class TimeEntryMailExtractor(object): \"\"\" Extracts timeentry from mail", "plaintext, second - html encoding = a_msg.get('Content-Transfer-Encoding') payload = a_msg.get_payload() DEBUG(u'Extracted email msg", "'' component = '' payload = get_msg_payload(msg) for line in payload.split('\\n'): match =", "= self.logins_mappings[tracker.id][username] DEBUG(u'Found user %s' % (user.name, )) # selector_mapping given explicitly to", "email in self.trackers: if email in sender: return self.trackers[email] else: return None def", "- create with 0 h, first strip title subject = subject[4:].strip() DEBUG(u'Bug creation", "decode(msg['X-Trac-Ticket-ID']) subject = decode(subject.replace('\\n', u'')) match = self.TRAC_SUBJECT_REGEXP.match(subject) if not match: WARN(u\"Trac subject", "strip title subject = subject[4:].strip() DEBUG(u'Bug creation found %s' % (subject, )) elif", "def __init__(self, login, password): self.login = login self.password = password def __iter__(self): pop_conn", "trackers have mailers configured, email will not be checked') return username = config.google_user_email.encode('utf-8')", "# first is plaintext, second - html encoding = a_msg.get('Content-Transfer-Encoding') payload = a_msg.get_payload()", "emails have \\r\\n insted of \\n if '\\r\\n' in payload: DEBUG(u'Using CRLF istead", "self.logins_mappings[tracker.id][username] DEBUG(u'Found user %s' % (user.name, )) # selector_mapping given explicitly to avoid", "dict( (tracker.id, TrackerCredentials.get_logins_mapping(tracker)) for tracker in trackers.itervalues() ) selector_mappings = dict( (tracker.id, SelectorMapping(tracker))", "is_new_bug: # new bug - create with 0 h, first strip title subject", "ignored return user_id, date, bug_id, project_id, hours, subject = data return add_time(user_id, date,", "to Ticket:\\ *(\\d+(\\.\\d+)?)') TRAC_AUTHOR_REGEXP = re.compile(r'^Changes \\(by (.*)\\)\\:') TRAC_COMPONENT_REGEXP = re.compile(r'.*Component:\\ *([^|]*)') def", "creation found %s' % (subject, )) elif hours == 0.0: DEBUG(u'Ignoring non-new bug", "= a_msg.get_payload() DEBUG(u'Extracted email msg %r with encoding %r' % (payload, encoding)) if", "password): self.login = login self.password = password def __iter__(self): pop_conn = poplib.POP3_SSL(self.HOST) pop_conn.user(self.login)", "TODO logins_mappings = dict( (tracker.id, TrackerCredentials.get_logins_mapping(tracker)) for tracker in trackers.itervalues() ) selector_mappings =", "user %s project %s bug #%s hours %s title %s\" % ( user.name,", "msg.get_payload() if type(payload) == list: a_msg = payload[0] # first is plaintext, second", "project_id, hours, subject = data return add_time(user_id, date, bug_id, project_id, hours, subject) class", "num = num if num < self.MAX_EMAILS else self.MAX_EMAILS messages = (pop_conn.retr(i) for", "trackers.itervalues() ) selector_mappings = dict( (tracker.id, SelectorMapping(tracker)) for tracker in trackers.itervalues() ) #", "= INFO_LOG(__name__) EXCEPTION = EXCEPTION_LOG(__name__) WARN = WARN_LOG(__name__) DEBUG = DEBUG_LOG(__name__) MIN_HOURS =", "= who.lower() if username not in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins mapping'", "if username not in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins mapping' % (who,", ")) return subject = match.group(2) hours = 0.0 who = '' component =", "if is_new_bug: match = self.HOURS_NEW_BUG_REGEXP.match(line) else: match = self.HOURS_REGEXP.match(line) if match: hours =", "for mssg in messages) messages = (email.parser.Parser().parsestr(mssg) for mssg in messages) for msg", "# find appopriate handler handler = getattr(self, 'handle_%s_email' % tracker.type) # handler should", "title subject = subject[4:].strip() DEBUG(u'Bug creation found %s' % (subject, )) elif hours", "if email in sender: return self.trackers[email] else: return None def get(self, msg): \"\"\"", "checked') return username = config.google_user_email.encode('utf-8') password = config.google_user_password.encode('<PASSWORD>') # TODO logins_mappings = dict(", "# email should be ignored return user_id, date, bug_id, project_id, hours, subject =", "Worked\\|\\s*\\|(\\d+(\\.\\d+)?)$') HOURS_NEW_BUG_REGEXP = re.compile(r'^\\s*Hours Worked: (\\d+(\\.\\d+)?)$') TRAC_SUBJECT_REGEXP = re.compile(r'^(Re\\:\\ +)?\\[.+\\] \\#\\d+\\: (.*)') TRAC_HOURS_REGEXP", "product, component) if project_id is None: DEBUG(u'Project not found for product %s, component", "decode(msg['From']) tracker = self.match_tracker(msg) if tracker is None: DEBUG(u'Email from %s ignored, no", "\\n if '\\r\\n' in payload: DEBUG(u'Using CRLF istead of LF') newline = '\\r\\n'", "None: DEBUG(u'Project not found for product %s, component %s' % ( product, component,", "encoding == 'quoted-printable': payload = quopri.decodestring(payload) elif encoding == 'base64': payload = b64decode(payload)", "*(\\d+(\\.\\d+)?)') TRAC_AUTHOR_REGEXP = re.compile(r'^Changes \\(by (.*)\\)\\:') TRAC_COMPONENT_REGEXP = re.compile(r'.*Component:\\ *([^|]*)') def __init__(self, trackers,", "single message was retrieved \"\"\" sender = decode(msg['From']) tracker = self.match_tracker(msg) if tracker", "the response and return essential info or None data = handler(msg, tracker) if", "if hours <= 0.0: DEBUG(u\"Ignoring bug with no hours\") return who = who.lower()", "newline = '\\r\\n' for line in payload.split(newline): if is_new_bug: match = self.HOURS_NEW_BUG_REGEXP.match(line) else:", "i in range(1, num + 1)) messages = (\"\\n\".join(mssg[1]) for mssg in messages)", "= get_msg_payload(msg) for line in payload.split('\\n'): match = self.TRAC_HOURS_REGEXP.match(line) if match: hours =", "in Tracker.query.filter(Tracker.mailer != None).filter(Tracker.mailer != '') ) if not len(trackers): WARN(u'No trackers have", "WARN_LOG, EXCEPTION_LOG, INFO_LOG from intranet3.utils.timeentry import add_time try: from cStringIO import StringIO except", "match = self.TRAC_AUTHOR_REGEXP.match(line) if match: who = match.group(1) continue match = self.TRAC_COMPONENT_REGEXP.match(line) if", "StringIO import StringIO LOG = INFO_LOG(__name__) EXCEPTION = EXCEPTION_LOG(__name__) WARN = WARN_LOG(__name__) DEBUG", "if match: who = match.group(1) continue match = self.TRAC_COMPONENT_REGEXP.match(line) if match: component =", "TimeEntryMailExtractor( trackers, logins_mappings, projects, selector_mappings, ) for msg in fetcher: timeentry = extractor.get(msg)", "return DEBUG(u'Found bug title %(subject)s product %(product)s, component %(component)s, by %(who)s from %(date)s'", "pop_conn.stat() LOG(u'Emails: %s' % (pformat(stats))) num, _ = stats num = num if", "not be checked') return username = config.google_user_email.encode('utf-8') password = config.google_user_password.encode('<PASSWORD>') # TODO logins_mappings", "else: return None def get(self, msg): \"\"\" When single message was retrieved \"\"\"", "WARN = WARN_LOG(__name__) DEBUG = DEBUG_LOG(__name__) MIN_HOURS = 6.995 #record hours decode =", "hours = 0.0 who = '' component = '' payload = get_msg_payload(msg) for", "with no hours\") return who = who.lower() if not who in self.logins_mappings[tracker.id]: DEBUG(u'User", "TRAC_COMPONENT_REGEXP = re.compile(r'.*Component:\\ *([^|]*)') def __init__(self, trackers, logins_mappings, projects, selector_mappings): self.trackers = trackers", "== 'quoted-printable': payload = quopri.decodestring(payload) elif encoding == 'base64': payload = b64decode(payload) return", "emails \"\"\" import re import email import quopri import datetime import time import", "poplib from base64 import b64decode from pprint import pformat from email.header import decode_header", "hours %(hours)s' % locals()) if hours <= 0.0: DEBUG(u\"Ignoring bug with no hours\")", "= decode_subject(subject.replace('\\n', u'').replace(u':', u' ')) match = self.SUBJECT_REGEXP.match(subject) if not match: DEBUG(u\"Subject doesn't", "else: hours = 0.0 DEBUG(u'Found bug #%(bug_id)s with title %(subject)s product %(product)s, component", "% (pformat(stats))) num, _ = stats num = num if num < self.MAX_EMAILS", "will not be checked') return username = config.google_user_email.encode('utf-8') password = config.google_user_password.encode('<PASSWORD>') # TODO", "#record hours decode = lambda header: u''.join( val.decode('utf-8' if not encoding else encoding)", "DEBUG(u'Found user %s' % (user.name, )) # selector_mapping given explicitly to avoid cache", "= datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) bug_id = decode(msg['X-Trac-Ticket-ID']) subject = decode(subject.replace('\\n', u'')) match = self.TRAC_SUBJECT_REGEXP.match(subject) if", "continue match = self.TRAC_COMPONENT_REGEXP.match(line) if match: component = match.group(1).strip() continue DEBUG(u'Found bug title", "date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) subject = decode_subject(subject.replace('\\n', u'').replace(u':', u' ')) match = self.SUBJECT_REGEXP.match(subject) if", "tracker) if data is None: # email should be ignored return user_id, date,", "None).filter(Tracker.mailer != '') ) if not len(trackers): WARN(u'No trackers have mailers configured, email", "user_id, date, bug_id, project_id, hours, subject = data return add_time(user_id, date, bug_id, project_id,", "__call__(self, *args, **kwargs): config = ApplicationConfig.get_current_config(allow_empty=True) if config is None: WARN(u'Application config not", "re.compile(r'^\\s*Hours Worked: (\\d+(\\.\\d+)?)$') TRAC_SUBJECT_REGEXP = re.compile(r'^(Re\\:\\ +)?\\[.+\\] \\#\\d+\\: (.*)') TRAC_HOURS_REGEXP = re.compile(r'.*Add Hours", "component, )) return project = self.projects[project_id] LOG(u\"Will add entry for user %s project", "is plaintext, second - html encoding = a_msg.get('Content-Transfer-Encoding') payload = a_msg.get_payload() DEBUG(u'Extracted email", "%(date)s, hours %(hours)s' % locals()) if hours <= 0.0: DEBUG(u\"Ignoring bug with no", "self.logins_mappings = logins_mappings self.projects = projects self.selector_mappings = selector_mappings def handle_trac_email(self, msg, tracker):", "subject[4:].strip() DEBUG(u'Bug creation found %s' % (subject, )) elif hours == 0.0: DEBUG(u'Ignoring", "if not len(trackers): WARN(u'No trackers have mailers configured, email will not be checked')", "WARN_LOG(__name__) DEBUG = DEBUG_LOG(__name__) MIN_HOURS = 6.995 #record hours decode = lambda header:", "stats num = num if num < self.MAX_EMAILS else self.MAX_EMAILS messages = (pop_conn.retr(i)", "coding: utf-8 -*- \"\"\" Sending emails \"\"\" import re import email import quopri", "% (who, )) return DEBUG(u'Found bug title %(subject)s product %(product)s, component %(component)s, by", "%(who)s from %(date)s' % locals()) bug_id = int(bug_id) newline = '\\n' # some", "from email.utils import parsedate import transaction from intranet3.models import ApplicationConfig, Project, Tracker, TrackerCredentials,", "msg, tracker): date = decode(msg['Date']) component = decode(msg['X-Bugzilla-Component']) product = decode(msg['X-Bugzilla-Product']) who =", "datetime import time import poplib from base64 import b64decode from pprint import pformat", "no tracker matched' % (sender, )) return # find appopriate handler handler =", "config.google_user_password.encode('<PASSWORD>') # TODO logins_mappings = dict( (tracker.id, TrackerCredentials.get_logins_mapping(tracker)) for tracker in trackers.itervalues() )", "date %r' % (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) subject = decode_subject(subject.replace('\\n', u'').replace(u':', u'", "(subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) subject = decode_subject(subject.replace('\\n', u'').replace(u':', u' ')) match =", "= self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, product, component) if project_id is None: DEBUG(u'Project not", "component) if project_id is None: DEBUG(u'Project not found for product %s, component %s'", "**kwargs): config = ApplicationConfig.get_current_config(allow_empty=True) if config is None: WARN(u'Application config not found, emails", "if data is None: # email should be ignored return user_id, date, bug_id,", "match = self.TRAC_SUBJECT_REGEXP.match(subject) if not match: WARN(u\"Trac subject not matched %r\" % (subject,", "is_new_bug: match = self.HOURS_NEW_BUG_REGEXP.match(line) else: match = self.HOURS_REGEXP.match(line) if match: hours = float(match.groups()[0])", "to the tracker projects = dict( (project.id, project) for project in Project.query.all() )", "regexp: %r\" % subject) return bug_id, subject = match.groups() subject = subject.strip() is_new_bug", "or None data = handler(msg, tracker) if data is None: # email should", "= lambda header: u''.join( val.decode('utf-8' if not encoding else encoding) for val, encoding", "payload class TimeEntryMailExtractor(object): \"\"\" Extracts timeentry from mail \"\"\" SUBJECT_REGEXP = re.compile(r'^\\[Bug (\\d+)\\](.*)')", "def handle_trac_email(self, msg, tracker): date = decode(msg['Date']) subject = msg['Subject'] DEBUG(u'Message with subject", "return self.trackers[email] else: return None def get(self, msg): \"\"\" When single message was", "+)?\\[.+\\] \\#\\d+\\: (.*)') TRAC_HOURS_REGEXP = re.compile(r'.*Add Hours to Ticket:\\ *(\\d+(\\.\\d+)?)') TRAC_AUTHOR_REGEXP = re.compile(r'^Changes", "tracker in trackers.itervalues() ) selector_mappings = dict( (tracker.id, SelectorMapping(tracker)) for tracker in trackers.itervalues()", "= '\\r\\n' for line in payload.split(newline): if is_new_bug: match = self.HOURS_NEW_BUG_REGEXP.match(line) else: match", "Extracts timeentry from mail \"\"\" SUBJECT_REGEXP = re.compile(r'^\\[Bug (\\d+)\\](.*)') HOURS_REGEXP = re.compile(r'^\\s*Hours Worked\\|\\s*\\|(\\d+(\\.\\d+)?)$')", "DEBUG(u'Project not found for component %s' % (component, )) return project = self.projects[project_id]", "match = self.HOURS_REGEXP.match(line) if match: hours = float(match.groups()[0]) break else: hours = 0.0", "who.lower() if not who in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins mapping' %", ")) elif hours == 0.0: DEBUG(u'Ignoring non-new bug without hours') return user =", "%(product)s, component %(component)s, by %(who)s, hours %(hours)f %(date)s' % locals()) if is_new_bug: #", "TimeEntryMailExtractor(object): \"\"\" Extracts timeentry from mail \"\"\" SUBJECT_REGEXP = re.compile(r'^\\[Bug (\\d+)\\](.*)') HOURS_REGEXP =", "= who.lower() if not who in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins mapping'", "# all pre-conditions should be checked by now # start fetching fetcher =", "( user.name, project.name, bug_id, hours, subject )) return user.id, date, bug_id, project_id, hours,", "get_msg_payload(msg): encoding = msg.get('Content-Transfer-Encoding') payload = msg.get_payload() if type(payload) == list: a_msg =", "selector_mappings): self.trackers = trackers self.logins_mappings = logins_mappings self.projects = projects self.selector_mappings = selector_mappings", "bug with no hours\") return who = who.lower() if not who in self.logins_mappings[tracker.id]:", "selector_mapping given explicitly to avoid cache lookups mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id,", "(email.parser.Parser().parsestr(mssg) for mssg in messages) for msg in messages: yield msg pop_conn.quit() class", "not found, emails cannot be checked') return trackers = dict( (tracker.mailer, tracker) for", "bug_id, hours, subject )) return user.id, date, bug_id, project_id, hours, subject def handle_bugzilla_email(self,", "StringIO except ImportError: from StringIO import StringIO LOG = INFO_LOG(__name__) EXCEPTION = EXCEPTION_LOG(__name__)", "decode_subject(subject.replace('\\n', u'').replace(u':', u' ')) match = self.SUBJECT_REGEXP.match(subject) if not match: DEBUG(u\"Subject doesn't match", ") # ok, we have all mails, lets create timeentries from them extractor", "project_id, hours, subject def handle_bugzilla_email(self, msg, tracker): date = decode(msg['Date']) component = decode(msg['X-Bugzilla-Component'])", "getattr(self, 'handle_%s_email' % tracker.type) # handler should parse the response and return essential", "date = decode(msg['Date']) subject = msg['Subject'] DEBUG(u'Message with subject %r retrieved from date", "= self.projects[project_id] LOG(u\"Will add entry for user %s project %s bug #%s hours", "= self.SUBJECT_REGEXP.match(subject) if not match: DEBUG(u\"Subject doesn't match regexp: %r\" % subject) return", "not in logins mapping' % (who, )) return user = self.logins_mappings[tracker.id][who] DEBUG(u'Found user", "num + 1)) messages = (\"\\n\".join(mssg[1]) for mssg in messages) messages = (email.parser.Parser().parsestr(mssg)", "decode_subject(val): for value in Q_ENCODING_REGEXP.findall(val): val = val.replace(value, decode(value)) return val.strip() def get_msg_payload(msg):", "u''.join( val.decode('utf-8' if not encoding else encoding) for val, encoding in decode_header(header) ).strip()", "-*- \"\"\" Sending emails \"\"\" import re import email import quopri import datetime", "= re.compile(r'.*Component:\\ *([^|]*)') def __init__(self, trackers, logins_mappings, projects, selector_mappings): self.trackers = trackers self.logins_mappings", "# some emails have \\r\\n insted of \\n if '\\r\\n' in payload: DEBUG(u'Using", "= handle_trac_email handle_igozilla_email = handle_bugzilla_email handle_rockzilla_email = handle_bugzilla_email def match_tracker(self, msg): sender =", "from %s ignored, no tracker matched' % (sender, )) return # find appopriate", "projects, selector_mappings): self.trackers = trackers self.logins_mappings = logins_mappings self.projects = projects self.selector_mappings =", "re.compile(r'^(Re\\:\\ +)?\\[.+\\] \\#\\d+\\: (.*)') TRAC_HOURS_REGEXP = re.compile(r'.*Add Hours to Ticket:\\ *(\\d+(\\.\\d+)?)') TRAC_AUTHOR_REGEXP =", "utf-8 -*- \"\"\" Sending emails \"\"\" import re import email import quopri import", "__iter__(self): pop_conn = poplib.POP3_SSL(self.HOST) pop_conn.user(self.login) pop_conn.pass_(self.password) stats = pop_conn.stat() LOG(u'Emails: %s' % (pformat(stats)))", "from base64 import b64decode from pprint import pformat from email.header import decode_header from", "in self.trackers: if email in sender: return self.trackers[email] else: return None def get(self,", "decode(msg['From']) for email in self.trackers: if email in sender: return self.trackers[email] else: return", "html encoding = a_msg.get('Content-Transfer-Encoding') payload = a_msg.get_payload() DEBUG(u'Extracted email msg %r with encoding", "%r' % (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) subject = decode_subject(subject.replace('\\n', u'').replace(u':', u' '))", "import quopri import datetime import time import poplib from base64 import b64decode from", "(\\d+)\\](.*)') HOURS_REGEXP = re.compile(r'^\\s*Hours Worked\\|\\s*\\|(\\d+(\\.\\d+)?)$') HOURS_NEW_BUG_REGEXP = re.compile(r'^\\s*Hours Worked: (\\d+(\\.\\d+)?)$') TRAC_SUBJECT_REGEXP = re.compile(r'^(Re\\:\\", "we have all mails, lets create timeentries from them extractor = TimeEntryMailExtractor( trackers,", "for tracker in Tracker.query.filter(Tracker.mailer != None).filter(Tracker.mailer != '') ) if not len(trackers): WARN(u'No", "date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) bug_id = decode(msg['X-Trac-Ticket-ID']) subject = decode(subject.replace('\\n', u'')) match =", "not match: WARN(u\"Trac subject not matched %r\" % (subject, )) return subject =", "checked') return trackers = dict( (tracker.mailer, tracker) for tracker in Tracker.query.filter(Tracker.mailer != None).filter(Tracker.mailer", "re.compile(r'.*Add Hours to Ticket:\\ *(\\d+(\\.\\d+)?)') TRAC_AUTHOR_REGEXP = re.compile(r'^Changes \\(by (.*)\\)\\:') TRAC_COMPONENT_REGEXP = re.compile(r'.*Component:\\", "avoid cache lookups mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, product, component) if project_id", "hours <= 0.0: DEBUG(u\"Ignoring bug with no hours\") return who = who.lower() if", "hours %(hours)f %(date)s' % locals()) if is_new_bug: # new bug - create with", "StringIO LOG = INFO_LOG(__name__) EXCEPTION = EXCEPTION_LOG(__name__) WARN = WARN_LOG(__name__) DEBUG = DEBUG_LOG(__name__)", "retrieved from date %r' % (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) bug_id = decode(msg['X-Trac-Ticket-ID'])", "= pop_conn.stat() LOG(u'Emails: %s' % (pformat(stats))) num, _ = stats num = num", "dict( (project.id, project) for project in Project.query.all() ) # all pre-conditions should be", "WARN(u'No trackers have mailers configured, email will not be checked') return username =", "if project_id is None: DEBUG(u'Project not found for product %s, component %s' %", "without hours') return user = self.logins_mappings[tracker.id][username] DEBUG(u'Found user %s' % (user.name, )) #", "pop_conn.quit() class MailCheckerTask(object): def __call__(self, *args, **kwargs): config = ApplicationConfig.get_current_config(allow_empty=True) if config is", "b64decode from pprint import pformat from email.header import decode_header from email.utils import parsedate", "% (who, )) return user = self.logins_mappings[tracker.id][who] DEBUG(u'Found user %s' % (user.name, ))", "config = ApplicationConfig.get_current_config(allow_empty=True) if config is None: WARN(u'Application config not found, emails cannot", "response and return essential info or None data = handler(msg, tracker) if data", "msg %r with encoding %r' % (payload, encoding)) if encoding == 'quoted-printable': payload", "no hours\") return who = who.lower() if not who in self.logins_mappings[tracker.id]: DEBUG(u'User %s", "tracker): date = decode(msg['Date']) subject = msg['Subject'] DEBUG(u'Message with subject %r retrieved from", "= quopri.decodestring(payload) elif encoding == 'base64': payload = b64decode(payload) return payload class TimeEntryMailExtractor(object):", "(subject, )) elif hours == 0.0: DEBUG(u'Ignoring non-new bug without hours') return user", "= EXCEPTION_LOG(__name__) WARN = WARN_LOG(__name__) DEBUG = DEBUG_LOG(__name__) MIN_HOURS = 6.995 #record hours", "HOURS_NEW_BUG_REGEXP = re.compile(r'^\\s*Hours Worked: (\\d+(\\.\\d+)?)$') TRAC_SUBJECT_REGEXP = re.compile(r'^(Re\\:\\ +)?\\[.+\\] \\#\\d+\\: (.*)') TRAC_HOURS_REGEXP =", "def decode_subject(val): for value in Q_ENCODING_REGEXP.findall(val): val = val.replace(value, decode(value)) return val.strip() def", "tracker.type) # handler should parse the response and return essential info or None", "match.group(2) hours = 0.0 who = '' component = '' payload = get_msg_payload(msg)", "= TimeEntryMailExtractor( trackers, logins_mappings, projects, selector_mappings, ) for msg in fetcher: timeentry =", "username not in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins mapping' % (who, ))", "component %(component)s, by %(who)s from %(date)s, hours %(hours)s' % locals()) if hours <=", "of LF') newline = '\\r\\n' for line in payload.split(newline): if is_new_bug: match =", "self.password = password def __iter__(self): pop_conn = poplib.POP3_SSL(self.HOST) pop_conn.user(self.login) pop_conn.pass_(self.password) stats = pop_conn.stat()", "= val.replace(value, decode(value)) return val.strip() def get_msg_payload(msg): encoding = msg.get('Content-Transfer-Encoding') payload = msg.get_payload()", "project_id, hours, subject handle_cookie_trac_email = handle_trac_email handle_igozilla_email = handle_bugzilla_email handle_rockzilla_email = handle_bugzilla_email def", "HOST = 'pop.gmail.com' MAX_EMAILS = 100 def __init__(self, login, password): self.login = login", "return user_id, date, bug_id, project_id, hours, subject = data return add_time(user_id, date, bug_id,", "logins_mappings self.projects = projects self.selector_mappings = selector_mappings def handle_trac_email(self, msg, tracker): date =", "= 0.0 who = '' component = '' payload = get_msg_payload(msg) for line", "get(self, msg): \"\"\" When single message was retrieved \"\"\" sender = decode(msg['From']) tracker", "hours decode = lambda header: u''.join( val.decode('utf-8' if not encoding else encoding) for", "email msg %r with encoding %r' % (payload, encoding)) if encoding == 'quoted-printable':", "encoding in decode_header(header) ).strip() Q_ENCODING_REGEXP = re.compile(r'(\\=\\?[^\\?]+\\?[QB]\\?[^\\?]+\\?\\=)') def decode_subject(val): for value in Q_ENCODING_REGEXP.findall(val):", "elif encoding == 'base64': payload = b64decode(payload) return payload class TimeEntryMailExtractor(object): \"\"\" Extracts", "re.compile(r'.*Component:\\ *([^|]*)') def __init__(self, trackers, logins_mappings, projects, selector_mappings): self.trackers = trackers self.logins_mappings =", "return bug_id, subject = match.groups() subject = subject.strip() is_new_bug = subject.startswith('New ') payload", "msg pop_conn.quit() class MailCheckerTask(object): def __call__(self, *args, **kwargs): config = ApplicationConfig.get_current_config(allow_empty=True) if config", "user %s' % (user.name, )) # selector_mapping given explicitly to avoid cache lookups", "match_tracker(self, msg): sender = decode(msg['From']) for email in self.trackers: if email in sender:", "None: DEBUG(u'Email from %s ignored, no tracker matched' % (sender, )) return #", "bug_id = int(bug_id) newline = '\\n' # some emails have \\r\\n insted of", "( product, component, )) return project = self.projects[project_id] LOG(u\"Will add entry for user", "now # start fetching fetcher = MailFetcher( username, password, ) # ok, we", "%s not in logins mapping' % (who, )) return DEBUG(u'Found bug title %(subject)s", "hours') return user = self.logins_mappings[tracker.id][username] DEBUG(u'Found user %s' % (user.name, )) # selector_mapping", "= '' component = '' payload = get_msg_payload(msg) for line in payload.split('\\n'): match", "date %r' % (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) bug_id = decode(msg['X-Trac-Ticket-ID']) subject =", "decode(msg['X-Bugzilla-Product']) who = decode(msg['X-Bugzilla-Who']) subject = msg['Subject'] DEBUG(u'Message with subject %r retrieved from", "= subject[4:].strip() DEBUG(u'Bug creation found %s' % (subject, )) elif hours == 0.0:", "payload.split('\\n'): match = self.TRAC_HOURS_REGEXP.match(line) if match: hours = float(match.group(1)) continue match = self.TRAC_AUTHOR_REGEXP.match(line)", "cStringIO import StringIO except ImportError: from StringIO import StringIO LOG = INFO_LOG(__name__) EXCEPTION", "else self.MAX_EMAILS messages = (pop_conn.retr(i) for i in range(1, num + 1)) messages", "TRAC_AUTHOR_REGEXP = re.compile(r'^Changes \\(by (.*)\\)\\:') TRAC_COMPONENT_REGEXP = re.compile(r'.*Component:\\ *([^|]*)') def __init__(self, trackers, logins_mappings,", "%s project %s bug #%s hours %s title %s\" % ( user.name, project.name,", "\\r\\n insted of \\n if '\\r\\n' in payload: DEBUG(u'Using CRLF istead of LF')", "%s' % ( product, component, )) return project = self.projects[project_id] LOG(u\"Will add entry", "= config.google_user_email.encode('utf-8') password = config.google_user_password.encode('<PASSWORD>') # TODO logins_mappings = dict( (tracker.id, TrackerCredentials.get_logins_mapping(tracker)) for", "locals()) bug_id = int(bug_id) newline = '\\n' # some emails have \\r\\n insted", "from them extractor = TimeEntryMailExtractor( trackers, logins_mappings, projects, selector_mappings, ) for msg in", "% (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) subject = decode_subject(subject.replace('\\n', u'').replace(u':', u' ')) match", "(tracker.id, TrackerCredentials.get_logins_mapping(tracker)) for tracker in trackers.itervalues() ) selector_mappings = dict( (tracker.id, SelectorMapping(tracker)) for", "u'').replace(u':', u' ')) match = self.SUBJECT_REGEXP.match(subject) if not match: DEBUG(u\"Subject doesn't match regexp:", "= msg.get('Content-Transfer-Encoding') payload = msg.get_payload() if type(payload) == list: a_msg = payload[0] #", "\"\"\" Sending emails \"\"\" import re import email import quopri import datetime import", "%(subject)s product %(product)s, component %(component)s, by %(who)s from %(date)s' % locals()) bug_id =", "in range(1, num + 1)) messages = (\"\\n\".join(mssg[1]) for mssg in messages) messages", "= self.HOURS_REGEXP.match(line) if match: hours = float(match.groups()[0]) break else: hours = 0.0 DEBUG(u'Found", "project_id is None: DEBUG(u'Project not found for component %s' % (component, )) return", "mapping.match(bug_id, product, component) if project_id is None: DEBUG(u'Project not found for product %s,", "is None: # email should be ignored return user_id, date, bug_id, project_id, hours,", "= handle_bugzilla_email def match_tracker(self, msg): sender = decode(msg['From']) for email in self.trackers: if", "import StringIO except ImportError: from StringIO import StringIO LOG = INFO_LOG(__name__) EXCEPTION =", "hours = float(match.groups()[0]) break else: hours = 0.0 DEBUG(u'Found bug #%(bug_id)s with title", "self.TRAC_HOURS_REGEXP.match(line) if match: hours = float(match.group(1)) continue match = self.TRAC_AUTHOR_REGEXP.match(line) if match: who", "have mailers configured, email will not be checked') return username = config.google_user_email.encode('utf-8') password", "== list: a_msg = payload[0] # first is plaintext, second - html encoding", "%s bug #%s hours %s title %s\" % ( user.name, project.name, bug_id, hours,", "in messages) for msg in messages: yield msg pop_conn.quit() class MailCheckerTask(object): def __call__(self,", "msg in messages: yield msg pop_conn.quit() class MailCheckerTask(object): def __call__(self, *args, **kwargs): config", "import poplib from base64 import b64decode from pprint import pformat from email.header import", "= decode(msg['X-Bugzilla-Product']) who = decode(msg['X-Bugzilla-Who']) subject = msg['Subject'] DEBUG(u'Message with subject %r retrieved", "decode(msg['X-Bugzilla-Component']) product = decode(msg['X-Bugzilla-Product']) who = decode(msg['X-Bugzilla-Who']) subject = msg['Subject'] DEBUG(u'Message with subject", "projects self.selector_mappings = selector_mappings def handle_trac_email(self, msg, tracker): date = decode(msg['Date']) subject =", "msg['Subject'] DEBUG(u'Message with subject %r retrieved from date %r' % (subject, date)) date", "None: DEBUG(u'Project not found for component %s' % (component, )) return project =", "time import poplib from base64 import b64decode from pprint import pformat from email.header", "not encoding else encoding) for val, encoding in decode_header(header) ).strip() Q_ENCODING_REGEXP = re.compile(r'(\\=\\?[^\\?]+\\?[QB]\\?[^\\?]+\\?\\=)')", "decode(value)) return val.strip() def get_msg_payload(msg): encoding = msg.get('Content-Transfer-Encoding') payload = msg.get_payload() if type(payload)", "match: component = match.group(1).strip() continue DEBUG(u'Found bug title %(subject)s component %(component)s, by %(who)s", "%(component)s, by %(who)s from %(date)s, hours %(hours)s' % locals()) if hours <= 0.0:", "DEBUG_LOG(__name__) MIN_HOURS = 6.995 #record hours decode = lambda header: u''.join( val.decode('utf-8' if", "component = decode(msg['X-Bugzilla-Component']) product = decode(msg['X-Bugzilla-Product']) who = decode(msg['X-Bugzilla-Who']) subject = msg['Subject'] DEBUG(u'Message", "Worked: (\\d+(\\.\\d+)?)$') TRAC_SUBJECT_REGEXP = re.compile(r'^(Re\\:\\ +)?\\[.+\\] \\#\\d+\\: (.*)') TRAC_HOURS_REGEXP = re.compile(r'.*Add Hours to", "from intranet3.models import ApplicationConfig, Project, Tracker, TrackerCredentials, DBSession from intranet3.models.project import SelectorMapping from", "= 6.995 #record hours decode = lambda header: u''.join( val.decode('utf-8' if not encoding", "val.strip() def get_msg_payload(msg): encoding = msg.get('Content-Transfer-Encoding') payload = msg.get_payload() if type(payload) == list:", "-*- coding: utf-8 -*- \"\"\" Sending emails \"\"\" import re import email import", "user.name, project.name, bug_id, hours, subject )) return user.id, date, bug_id, project_id, hours, subject", "mapping.match(bug_id, 'none', component) if project_id is None: DEBUG(u'Project not found for component %s'", "from date %r' % (subject, date)) date = datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) bug_id = decode(msg['X-Trac-Ticket-ID']) subject", "should be checked by now # start fetching fetcher = MailFetcher( username, password,", "from intranet3.utils.timeentry import add_time try: from cStringIO import StringIO except ImportError: from StringIO", "in Q_ENCODING_REGEXP.findall(val): val = val.replace(value, decode(value)) return val.strip() def get_msg_payload(msg): encoding = msg.get('Content-Transfer-Encoding')", "% (subject, )) return subject = match.group(2) hours = 0.0 who = ''", "title %(subject)s component %(component)s, by %(who)s from %(date)s, hours %(hours)s' % locals()) if", "u' ')) match = self.SUBJECT_REGEXP.match(subject) if not match: DEBUG(u\"Subject doesn't match regexp: %r\"", "= DEBUG_LOG(__name__) MIN_HOURS = 6.995 #record hours decode = lambda header: u''.join( val.decode('utf-8'", "*([^|]*)') def __init__(self, trackers, logins_mappings, projects, selector_mappings): self.trackers = trackers self.logins_mappings = logins_mappings", "% (subject, )) elif hours == 0.0: DEBUG(u'Ignoring non-new bug without hours') return", "= msg.get_payload() if type(payload) == list: a_msg = payload[0] # first is plaintext,", "istead of LF') newline = '\\r\\n' for line in payload.split(newline): if is_new_bug: match", "bug #%(bug_id)s with title %(subject)s product %(product)s, component %(component)s, by %(who)s, hours %(hours)f", "user = self.logins_mappings[tracker.id][username] DEBUG(u'Found user %s' % (user.name, )) # selector_mapping given explicitly", "doesn't match regexp: %r\" % subject) return bug_id, subject = match.groups() subject =", "%s not in logins mapping' % (who, )) return user = self.logins_mappings[tracker.id][who] DEBUG(u'Found", "bug_id = decode(msg['X-Trac-Ticket-ID']) subject = decode(subject.replace('\\n', u'')) match = self.TRAC_SUBJECT_REGEXP.match(subject) if not match:", "= subject.strip() is_new_bug = subject.startswith('New ') payload = get_msg_payload(msg) username = who.lower() if", "import decode_header from email.utils import parsedate import transaction from intranet3.models import ApplicationConfig, Project,", "extractor = TimeEntryMailExtractor( trackers, logins_mappings, projects, selector_mappings, ) for msg in fetcher: timeentry", "for component %s' % (component, )) return project = self.projects[project_id] LOG(u\"Will add entry", "self.match_tracker(msg) if tracker is None: DEBUG(u'Email from %s ignored, no tracker matched' %", "by now # start fetching fetcher = MailFetcher( username, password, ) # ok,", "sender = decode(msg['From']) tracker = self.match_tracker(msg) if tracker is None: DEBUG(u'Email from %s", "SelectorMapping(tracker)) for tracker in trackers.itervalues() ) # find all projects connected to the", "val.replace(value, decode(value)) return val.strip() def get_msg_payload(msg): encoding = msg.get('Content-Transfer-Encoding') payload = msg.get_payload() if", "project) for project in Project.query.all() ) # all pre-conditions should be checked by", "in trackers.itervalues() ) # find all projects connected to the tracker projects =", "Project.query.all() ) # all pre-conditions should be checked by now # start fetching", "not who in self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins mapping' % (who, ))", "bug_id, subject = match.groups() subject = subject.strip() is_new_bug = subject.startswith('New ') payload =", "DEBUG_LOG, WARN_LOG, EXCEPTION_LOG, INFO_LOG from intranet3.utils.timeentry import add_time try: from cStringIO import StringIO", "match = self.TRAC_COMPONENT_REGEXP.match(line) if match: component = match.group(1).strip() continue DEBUG(u'Found bug title %(subject)s", "if not match: DEBUG(u\"Subject doesn't match regexp: %r\" % subject) return bug_id, subject", "import SelectorMapping from intranet3.log import DEBUG_LOG, WARN_LOG, EXCEPTION_LOG, INFO_LOG from intranet3.utils.timeentry import add_time", "TrackerCredentials.get_logins_mapping(tracker)) for tracker in trackers.itervalues() ) selector_mappings = dict( (tracker.id, SelectorMapping(tracker)) for tracker", "selector_mappings, ) for msg in fetcher: timeentry = extractor.get(msg) if timeentry: DBSession.add(timeentry) transaction.commit()", "given explicitly to avoid cache lookups mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, product,", "match = self.SUBJECT_REGEXP.match(subject) if not match: DEBUG(u\"Subject doesn't match regexp: %r\" % subject)", "project.name, bug_id, hours, subject )) return user.id, date, bug_id, project_id, hours, subject handle_cookie_trac_email", "decode(msg['Date']) component = decode(msg['X-Bugzilla-Component']) product = decode(msg['X-Bugzilla-Product']) who = decode(msg['X-Bugzilla-Who']) subject = msg['Subject']", "email.utils import parsedate import transaction from intranet3.models import ApplicationConfig, Project, Tracker, TrackerCredentials, DBSession", ") # find all projects connected to the tracker projects = dict( (project.id,", "trackers.itervalues() ) # find all projects connected to the tracker projects = dict(", "logins_mappings = dict( (tracker.id, TrackerCredentials.get_logins_mapping(tracker)) for tracker in trackers.itervalues() ) selector_mappings = dict(", "decode(subject.replace('\\n', u'')) match = self.TRAC_SUBJECT_REGEXP.match(subject) if not match: WARN(u\"Trac subject not matched %r\"", "project %s bug #%s hours %s title %s\" % ( user.name, project.name, bug_id,", "elif hours == 0.0: DEBUG(u'Ignoring non-new bug without hours') return user = self.logins_mappings[tracker.id][username]", "date, bug_id, project_id, hours, subject) class MailFetcher(object): HOST = 'pop.gmail.com' MAX_EMAILS = 100", "hours, subject )) return user.id, date, bug_id, project_id, hours, subject handle_cookie_trac_email = handle_trac_email", "except ImportError: from StringIO import StringIO LOG = INFO_LOG(__name__) EXCEPTION = EXCEPTION_LOG(__name__) WARN", "user %s' % (user.name, )) mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, 'none', component)", "DEBUG(u'Found bug #%(bug_id)s with title %(subject)s product %(product)s, component %(component)s, by %(who)s, hours", "subject = decode(subject.replace('\\n', u'')) match = self.TRAC_SUBJECT_REGEXP.match(subject) if not match: WARN(u\"Trac subject not", "(user.name, )) mapping = self.selector_mappings[tracker.id] project_id = mapping.match(bug_id, 'none', component) if project_id is", "%r' % (payload, encoding)) if encoding == 'quoted-printable': payload = quopri.decodestring(payload) elif encoding", "tracker projects = dict( (project.id, project) for project in Project.query.all() ) # all", "logins mapping' % (who, )) return user = self.logins_mappings[tracker.id][who] DEBUG(u'Found user %s' %", "projects = dict( (project.id, project) for project in Project.query.all() ) # all pre-conditions", "base64 import b64decode from pprint import pformat from email.header import decode_header from email.utils", "user.id, date, bug_id, project_id, hours, subject handle_cookie_trac_email = handle_trac_email handle_igozilla_email = handle_bugzilla_email handle_rockzilla_email", "component = '' payload = get_msg_payload(msg) for line in payload.split('\\n'): match = self.TRAC_HOURS_REGEXP.match(line)", "from %(date)s' % locals()) bug_id = int(bug_id) newline = '\\n' # some emails", "ok, we have all mails, lets create timeentries from them extractor = TimeEntryMailExtractor(", "subject = match.group(2) hours = 0.0 who = '' component = '' payload", "'' payload = get_msg_payload(msg) for line in payload.split('\\n'): match = self.TRAC_HOURS_REGEXP.match(line) if match:", "import re import email import quopri import datetime import time import poplib from", "from StringIO import StringIO LOG = INFO_LOG(__name__) EXCEPTION = EXCEPTION_LOG(__name__) WARN = WARN_LOG(__name__)", "match: who = match.group(1) continue match = self.TRAC_COMPONENT_REGEXP.match(line) if match: component = match.group(1).strip()", "6.995 #record hours decode = lambda header: u''.join( val.decode('utf-8' if not encoding else", "%(component)s, by %(who)s from %(date)s' % locals()) bug_id = int(bug_id) newline = '\\n'", "= datetime.datetime.fromtimestamp(time.mktime(parsedate(date))) subject = decode_subject(subject.replace('\\n', u'').replace(u':', u' ')) match = self.SUBJECT_REGEXP.match(subject) if not", "intranet3.models.project import SelectorMapping from intranet3.log import DEBUG_LOG, WARN_LOG, EXCEPTION_LOG, INFO_LOG from intranet3.utils.timeentry import", "date, bug_id, project_id, hours, subject def handle_bugzilla_email(self, msg, tracker): date = decode(msg['Date']) component", "retrieved \"\"\" sender = decode(msg['From']) tracker = self.match_tracker(msg) if tracker is None: DEBUG(u'Email", "matched' % (sender, )) return # find appopriate handler handler = getattr(self, 'handle_%s_email'", "#%s hours %s title %s\" % ( user.name, project.name, bug_id, hours, subject ))", "self.logins_mappings[tracker.id]: DEBUG(u'User %s not in logins mapping' % (who, )) return DEBUG(u'Found bug", "parsedate import transaction from intranet3.models import ApplicationConfig, Project, Tracker, TrackerCredentials, DBSession from intranet3.models.project", "Hours to Ticket:\\ *(\\d+(\\.\\d+)?)') TRAC_AUTHOR_REGEXP = re.compile(r'^Changes \\(by (.*)\\)\\:') TRAC_COMPONENT_REGEXP = re.compile(r'.*Component:\\ *([^|]*)')", "= decode(msg['Date']) component = decode(msg['X-Bugzilla-Component']) product = decode(msg['X-Bugzilla-Product']) who = decode(msg['X-Bugzilla-Who']) subject =", "subject handle_cookie_trac_email = handle_trac_email handle_igozilla_email = handle_bugzilla_email handle_rockzilla_email = handle_bugzilla_email def match_tracker(self, msg):", "(project.id, project) for project in Project.query.all() ) # all pre-conditions should be checked", "= re.compile(r'^\\[Bug (\\d+)\\](.*)') HOURS_REGEXP = re.compile(r'^\\s*Hours Worked\\|\\s*\\|(\\d+(\\.\\d+)?)$') HOURS_NEW_BUG_REGEXP = re.compile(r'^\\s*Hours Worked: (\\d+(\\.\\d+)?)$') TRAC_SUBJECT_REGEXP", "b64decode(payload) return payload class TimeEntryMailExtractor(object): \"\"\" Extracts timeentry from mail \"\"\" SUBJECT_REGEXP =", "self.SUBJECT_REGEXP.match(subject) if not match: DEBUG(u\"Subject doesn't match regexp: %r\" % subject) return bug_id,", "DEBUG(u'Found bug title %(subject)s product %(product)s, component %(component)s, by %(who)s from %(date)s' %", "= re.compile(r'^\\s*Hours Worked: (\\d+(\\.\\d+)?)$') TRAC_SUBJECT_REGEXP = re.compile(r'^(Re\\:\\ +)?\\[.+\\] \\#\\d+\\: (.*)') TRAC_HOURS_REGEXP = re.compile(r'.*Add", "= ApplicationConfig.get_current_config(allow_empty=True) if config is None: WARN(u'Application config not found, emails cannot be", "match.group(1).strip() continue DEBUG(u'Found bug title %(subject)s component %(component)s, by %(who)s from %(date)s, hours", "project.name, bug_id, hours, subject )) return user.id, date, bug_id, project_id, hours, subject def", "get_msg_payload(msg) for line in payload.split('\\n'): match = self.TRAC_HOURS_REGEXP.match(line) if match: hours = float(match.group(1))", "<= 0.0: DEBUG(u\"Ignoring bug with no hours\") return who = who.lower() if not", "payload: DEBUG(u'Using CRLF istead of LF') newline = '\\r\\n' for line in payload.split(newline):", "\\(by (.*)\\)\\:') TRAC_COMPONENT_REGEXP = re.compile(r'.*Component:\\ *([^|]*)') def __init__(self, trackers, logins_mappings, projects, selector_mappings): self.trackers", "newline = '\\n' # some emails have \\r\\n insted of \\n if '\\r\\n'", "encoding) for val, encoding in decode_header(header) ).strip() Q_ENCODING_REGEXP = re.compile(r'(\\=\\?[^\\?]+\\?[QB]\\?[^\\?]+\\?\\=)') def decode_subject(val): for", "!= '') ) if not len(trackers): WARN(u'No trackers have mailers configured, email will", "match.groups() subject = subject.strip() is_new_bug = subject.startswith('New ') payload = get_msg_payload(msg) username =", "import transaction from intranet3.models import ApplicationConfig, Project, Tracker, TrackerCredentials, DBSession from intranet3.models.project import", "= 100 def __init__(self, login, password): self.login = login self.password = password def", "user = self.logins_mappings[tracker.id][who] DEBUG(u'Found user %s' % (user.name, )) mapping = self.selector_mappings[tracker.id] project_id", "HOURS_REGEXP = re.compile(r'^\\s*Hours Worked\\|\\s*\\|(\\d+(\\.\\d+)?)$') HOURS_NEW_BUG_REGEXP = re.compile(r'^\\s*Hours Worked: (\\d+(\\.\\d+)?)$') TRAC_SUBJECT_REGEXP = re.compile(r'^(Re\\:\\ +)?\\[.+\\]" ]
[ "= Column('date', Date, nullable=False, primary_key=True) total = Column('total', Integer()) calories_bmr = Column('calories_bmr', Integer())", "= Column('date', Date, nullable=False, primary_key=True) level = Column('level', Integer()) mets = Column('mets', Integer())", "MetaData, Table, Column, UniqueConstraint, Numeric, Date, Integer from sqlalchemy.ext.declarative import declarative_base meta =", "schema='activity' ) class CaloriesIntraday(Base): __tablename__ = 'intraday' date = Column('date', Date, nullable=False, primary_key=True)", "'intraday' date = Column('date', Date, nullable=False, primary_key=True) level = Column('level', Integer()) mets =", "meta = MetaData() Base = declarative_base() class Calories(Base): __tablename__ = 'calories' date =", "__eq__(self, other): return self.date, self.total, self.calories_bmr == other.date, other.total, other.calories_bmr def __str__(self): return", "UniqueConstraint('date', name='date') schema = 'activity' def __eq__(self, other): return self.date, self.total, self.calories_bmr ==", "Calories(Base): __tablename__ = 'calories' date = Column('date', Date, nullable=False, primary_key=True) total = Column('total',", "Table( 'calories', meta, Column('date', Date), Column('total', Integer()), Column('calories_bmr', Numeric(10, 5)), Column('activity_calories', Numeric(10, 5)),", "meta, Column('date', Date), Column('total', Integer()), Column('calories_bmr', Numeric(10, 5)), Column('activity_calories', Numeric(10, 5)), UniqueConstraint('date', name='date'),", "Column('activity_calories', Integer()) UniqueConstraint('date', name='date') schema = 'activity' def __eq__(self, other): return self.date, self.total,", "import declarative_base meta = MetaData() Base = declarative_base() class Calories(Base): __tablename__ = 'calories'", "primary_key=True) total = Column('total', Integer()) calories_bmr = Column('calories_bmr', Integer()) activity_calories = Column('activity_calories', Integer())", "= Column('calories_bmr', Integer()) activity_calories = Column('activity_calories', Integer()) UniqueConstraint('date', name='date') schema = 'activity' def", "Column('date', Date, nullable=False, primary_key=True) level = Column('level', Integer()) mets = Column('mets', Integer()) value", "level = Column('level', Integer()) mets = Column('mets', Integer()) value = Column('value', Numeric()) schema", "Date, nullable=False, primary_key=True) total = Column('total', Integer()) calories_bmr = Column('calories_bmr', Integer()) activity_calories =", "calories_bmr = Column('calories_bmr', Integer()) activity_calories = Column('activity_calories', Integer()) UniqueConstraint('date', name='date') schema = 'activity'", "class Calories(Base): __tablename__ = 'calories' date = Column('date', Date, nullable=False, primary_key=True) total =", "= MetaData() Base = declarative_base() class Calories(Base): __tablename__ = 'calories' date = Column('date',", "Date), Column('total', Integer()), Column('calories_bmr', Numeric(10, 5)), Column('activity_calories', Numeric(10, 5)), UniqueConstraint('date', name='date'), schema='activity' )", "Column('total', Integer()) calories_bmr = Column('calories_bmr', Integer()) activity_calories = Column('activity_calories', Integer()) UniqueConstraint('date', name='date') schema", "other.date, other.total, other.calories_bmr def __str__(self): return f\"{self.date}, {self.total}, {self.calories_bmr}, {self.activity_calories}\" calories_table = Table(", "'activity' def __eq__(self, other): return self.date, self.total, self.calories_bmr == other.date, other.total, other.calories_bmr def", "self.total, self.calories_bmr == other.date, other.total, other.calories_bmr def __str__(self): return f\"{self.date}, {self.total}, {self.calories_bmr}, {self.activity_calories}\"", "Column('date', Date, nullable=False, primary_key=True) total = Column('total', Integer()) calories_bmr = Column('calories_bmr', Integer()) activity_calories", "Column('total', Integer()), Column('calories_bmr', Numeric(10, 5)), Column('activity_calories', Numeric(10, 5)), UniqueConstraint('date', name='date'), schema='activity' ) class", "UniqueConstraint('date', name='date'), schema='activity' ) class CaloriesIntraday(Base): __tablename__ = 'intraday' date = Column('date', Date,", "= 'intraday' date = Column('date', Date, nullable=False, primary_key=True) level = Column('level', Integer()) mets", "'calories' date = Column('date', Date, nullable=False, primary_key=True) total = Column('total', Integer()) calories_bmr =", "__str__(self): return f\"{self.date}, {self.total}, {self.calories_bmr}, {self.activity_calories}\" calories_table = Table( 'calories', meta, Column('date', Date),", "other.calories_bmr def __str__(self): return f\"{self.date}, {self.total}, {self.calories_bmr}, {self.activity_calories}\" calories_table = Table( 'calories', meta,", "UniqueConstraint, Numeric, Date, Integer from sqlalchemy.ext.declarative import declarative_base meta = MetaData() Base =", "return self.date, self.total, self.calories_bmr == other.date, other.total, other.calories_bmr def __str__(self): return f\"{self.date}, {self.total},", "Date, Integer from sqlalchemy.ext.declarative import declarative_base meta = MetaData() Base = declarative_base() class", "__tablename__ = 'intraday' date = Column('date', Date, nullable=False, primary_key=True) level = Column('level', Integer())", "def __eq__(self, other): return self.date, self.total, self.calories_bmr == other.date, other.total, other.calories_bmr def __str__(self):", "MetaData() Base = declarative_base() class Calories(Base): __tablename__ = 'calories' date = Column('date', Date,", "Integer()) UniqueConstraint('date', name='date') schema = 'activity' def __eq__(self, other): return self.date, self.total, self.calories_bmr", "calories_table = Table( 'calories', meta, Column('date', Date), Column('total', Integer()), Column('calories_bmr', Numeric(10, 5)), Column('activity_calories',", "Numeric, Date, Integer from sqlalchemy.ext.declarative import declarative_base meta = MetaData() Base = declarative_base()", "Integer from sqlalchemy.ext.declarative import declarative_base meta = MetaData() Base = declarative_base() class Calories(Base):", "date = Column('date', Date, nullable=False, primary_key=True) level = Column('level', Integer()) mets = Column('mets',", "class CaloriesIntraday(Base): __tablename__ = 'intraday' date = Column('date', Date, nullable=False, primary_key=True) level =", "Column('calories_bmr', Integer()) activity_calories = Column('activity_calories', Integer()) UniqueConstraint('date', name='date') schema = 'activity' def __eq__(self,", "other.total, other.calories_bmr def __str__(self): return f\"{self.date}, {self.total}, {self.calories_bmr}, {self.activity_calories}\" calories_table = Table( 'calories',", "= Column('level', Integer()) mets = Column('mets', Integer()) value = Column('value', Numeric()) schema =", "Date, nullable=False, primary_key=True) level = Column('level', Integer()) mets = Column('mets', Integer()) value =", "primary_key=True) level = Column('level', Integer()) mets = Column('mets', Integer()) value = Column('value', Numeric())", "= Table( 'calories', meta, Column('date', Date), Column('total', Integer()), Column('calories_bmr', Numeric(10, 5)), Column('activity_calories', Numeric(10,", "f\"{self.date}, {self.total}, {self.calories_bmr}, {self.activity_calories}\" calories_table = Table( 'calories', meta, Column('date', Date), Column('total', Integer()),", ") class CaloriesIntraday(Base): __tablename__ = 'intraday' date = Column('date', Date, nullable=False, primary_key=True) level", "from sqlalchemy.ext.declarative import declarative_base meta = MetaData() Base = declarative_base() class Calories(Base): __tablename__", "nullable=False, primary_key=True) total = Column('total', Integer()) calories_bmr = Column('calories_bmr', Integer()) activity_calories = Column('activity_calories',", "Column('date', Date), Column('total', Integer()), Column('calories_bmr', Numeric(10, 5)), Column('activity_calories', Numeric(10, 5)), UniqueConstraint('date', name='date'), schema='activity'", "__tablename__ = 'calories' date = Column('date', Date, nullable=False, primary_key=True) total = Column('total', Integer())", "Table, Column, UniqueConstraint, Numeric, Date, Integer from sqlalchemy.ext.declarative import declarative_base meta = MetaData()", "Integer()) calories_bmr = Column('calories_bmr', Integer()) activity_calories = Column('activity_calories', Integer()) UniqueConstraint('date', name='date') schema =", "'calories', meta, Column('date', Date), Column('total', Integer()), Column('calories_bmr', Numeric(10, 5)), Column('activity_calories', Numeric(10, 5)), UniqueConstraint('date',", "total = Column('total', Integer()) calories_bmr = Column('calories_bmr', Integer()) activity_calories = Column('activity_calories', Integer()) UniqueConstraint('date',", "{self.activity_calories}\" calories_table = Table( 'calories', meta, Column('date', Date), Column('total', Integer()), Column('calories_bmr', Numeric(10, 5)),", "<filename>fitnick/activity/models/calories.py from sqlalchemy import MetaData, Table, Column, UniqueConstraint, Numeric, Date, Integer from sqlalchemy.ext.declarative", "date = Column('date', Date, nullable=False, primary_key=True) total = Column('total', Integer()) calories_bmr = Column('calories_bmr',", "self.calories_bmr == other.date, other.total, other.calories_bmr def __str__(self): return f\"{self.date}, {self.total}, {self.calories_bmr}, {self.activity_calories}\" calories_table", "= Column('activity_calories', Integer()) UniqueConstraint('date', name='date') schema = 'activity' def __eq__(self, other): return self.date,", "Column('level', Integer()) mets = Column('mets', Integer()) value = Column('value', Numeric()) schema = 'calories'", "= declarative_base() class Calories(Base): __tablename__ = 'calories' date = Column('date', Date, nullable=False, primary_key=True)", "Base = declarative_base() class Calories(Base): __tablename__ = 'calories' date = Column('date', Date, nullable=False,", "nullable=False, primary_key=True) level = Column('level', Integer()) mets = Column('mets', Integer()) value = Column('value',", "Numeric(10, 5)), Column('activity_calories', Numeric(10, 5)), UniqueConstraint('date', name='date'), schema='activity' ) class CaloriesIntraday(Base): __tablename__ =", "= Column('total', Integer()) calories_bmr = Column('calories_bmr', Integer()) activity_calories = Column('activity_calories', Integer()) UniqueConstraint('date', name='date')", "5)), Column('activity_calories', Numeric(10, 5)), UniqueConstraint('date', name='date'), schema='activity' ) class CaloriesIntraday(Base): __tablename__ = 'intraday'", "schema = 'activity' def __eq__(self, other): return self.date, self.total, self.calories_bmr == other.date, other.total,", "Column('calories_bmr', Numeric(10, 5)), Column('activity_calories', Numeric(10, 5)), UniqueConstraint('date', name='date'), schema='activity' ) class CaloriesIntraday(Base): __tablename__", "from sqlalchemy import MetaData, Table, Column, UniqueConstraint, Numeric, Date, Integer from sqlalchemy.ext.declarative import", "declarative_base() class Calories(Base): __tablename__ = 'calories' date = Column('date', Date, nullable=False, primary_key=True) total", "CaloriesIntraday(Base): __tablename__ = 'intraday' date = Column('date', Date, nullable=False, primary_key=True) level = Column('level',", "import MetaData, Table, Column, UniqueConstraint, Numeric, Date, Integer from sqlalchemy.ext.declarative import declarative_base meta", "activity_calories = Column('activity_calories', Integer()) UniqueConstraint('date', name='date') schema = 'activity' def __eq__(self, other): return", "return f\"{self.date}, {self.total}, {self.calories_bmr}, {self.activity_calories}\" calories_table = Table( 'calories', meta, Column('date', Date), Column('total',", "self.date, self.total, self.calories_bmr == other.date, other.total, other.calories_bmr def __str__(self): return f\"{self.date}, {self.total}, {self.calories_bmr},", "name='date'), schema='activity' ) class CaloriesIntraday(Base): __tablename__ = 'intraday' date = Column('date', Date, nullable=False,", "5)), UniqueConstraint('date', name='date'), schema='activity' ) class CaloriesIntraday(Base): __tablename__ = 'intraday' date = Column('date',", "Integer()) activity_calories = Column('activity_calories', Integer()) UniqueConstraint('date', name='date') schema = 'activity' def __eq__(self, other):", "sqlalchemy.ext.declarative import declarative_base meta = MetaData() Base = declarative_base() class Calories(Base): __tablename__ =", "Column, UniqueConstraint, Numeric, Date, Integer from sqlalchemy.ext.declarative import declarative_base meta = MetaData() Base", "= 'activity' def __eq__(self, other): return self.date, self.total, self.calories_bmr == other.date, other.total, other.calories_bmr", "== other.date, other.total, other.calories_bmr def __str__(self): return f\"{self.date}, {self.total}, {self.calories_bmr}, {self.activity_calories}\" calories_table =", "Integer()), Column('calories_bmr', Numeric(10, 5)), Column('activity_calories', Numeric(10, 5)), UniqueConstraint('date', name='date'), schema='activity' ) class CaloriesIntraday(Base):", "{self.total}, {self.calories_bmr}, {self.activity_calories}\" calories_table = Table( 'calories', meta, Column('date', Date), Column('total', Integer()), Column('calories_bmr',", "{self.calories_bmr}, {self.activity_calories}\" calories_table = Table( 'calories', meta, Column('date', Date), Column('total', Integer()), Column('calories_bmr', Numeric(10,", "= 'calories' date = Column('date', Date, nullable=False, primary_key=True) total = Column('total', Integer()) calories_bmr", "declarative_base meta = MetaData() Base = declarative_base() class Calories(Base): __tablename__ = 'calories' date", "Column('activity_calories', Numeric(10, 5)), UniqueConstraint('date', name='date'), schema='activity' ) class CaloriesIntraday(Base): __tablename__ = 'intraday' date", "sqlalchemy import MetaData, Table, Column, UniqueConstraint, Numeric, Date, Integer from sqlalchemy.ext.declarative import declarative_base", "def __str__(self): return f\"{self.date}, {self.total}, {self.calories_bmr}, {self.activity_calories}\" calories_table = Table( 'calories', meta, Column('date',", "Numeric(10, 5)), UniqueConstraint('date', name='date'), schema='activity' ) class CaloriesIntraday(Base): __tablename__ = 'intraday' date =", "name='date') schema = 'activity' def __eq__(self, other): return self.date, self.total, self.calories_bmr == other.date,", "other): return self.date, self.total, self.calories_bmr == other.date, other.total, other.calories_bmr def __str__(self): return f\"{self.date}," ]
[ "-*- # Module iaero def iaero(f, b=None): from ia870 import ianeg,iadil,iasereflect,iasecross if b", "import ianeg,iadil,iasereflect,iasecross if b is None: b = iasecross() y = ianeg( iadil(", "encoding: utf-8 -*- # Module iaero def iaero(f, b=None): from ia870 import ianeg,iadil,iasereflect,iasecross", "def iaero(f, b=None): from ia870 import ianeg,iadil,iasereflect,iasecross if b is None: b =", "from ia870 import ianeg,iadil,iasereflect,iasecross if b is None: b = iasecross() y =", "-*- encoding: utf-8 -*- # Module iaero def iaero(f, b=None): from ia870 import", "b=None): from ia870 import ianeg,iadil,iasereflect,iasecross if b is None: b = iasecross() y", "if b is None: b = iasecross() y = ianeg( iadil( ianeg(f),iasereflect(b))) return", "<gh_stars>1-10 # -*- encoding: utf-8 -*- # Module iaero def iaero(f, b=None): from", "# -*- encoding: utf-8 -*- # Module iaero def iaero(f, b=None): from ia870", "ianeg,iadil,iasereflect,iasecross if b is None: b = iasecross() y = ianeg( iadil( ianeg(f),iasereflect(b)))", "Module iaero def iaero(f, b=None): from ia870 import ianeg,iadil,iasereflect,iasecross if b is None:", "b is None: b = iasecross() y = ianeg( iadil( ianeg(f),iasereflect(b))) return y", "# Module iaero def iaero(f, b=None): from ia870 import ianeg,iadil,iasereflect,iasecross if b is", "utf-8 -*- # Module iaero def iaero(f, b=None): from ia870 import ianeg,iadil,iasereflect,iasecross if", "ia870 import ianeg,iadil,iasereflect,iasecross if b is None: b = iasecross() y = ianeg(", "iaero def iaero(f, b=None): from ia870 import ianeg,iadil,iasereflect,iasecross if b is None: b", "iaero(f, b=None): from ia870 import ianeg,iadil,iasereflect,iasecross if b is None: b = iasecross()" ]
[ "python3 from mpi4py import MPI from datetime import datetime def print_hostname(): comm =", "rank = comm.Get_rank() size = comm.Get_size() hname = MPI.Get_processor_name() tod = datetime.now().isoformat(' ')", "from datetime import datetime def print_hostname(): comm = MPI.COMM_WORLD rank = comm.Get_rank() size", "comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() hname = MPI.Get_processor_name() tod", "= MPI.Get_processor_name() tod = datetime.now().isoformat(' ') print(\"this is rank = %2i (total: %2i)", "%2i) running on %s at %s\" % (rank,size,hname,tod)) comm.Barrier() if __name__ == '__main__':", "datetime.now().isoformat(' ') print(\"this is rank = %2i (total: %2i) running on %s at", "print(\"this is rank = %2i (total: %2i) running on %s at %s\" %", "is rank = %2i (total: %2i) running on %s at %s\" % (rank,size,hname,tod))", "comm.Get_size() hname = MPI.Get_processor_name() tod = datetime.now().isoformat(' ') print(\"this is rank = %2i", "<gh_stars>10-100 #/usr/bin/env python3 from mpi4py import MPI from datetime import datetime def print_hostname():", "') print(\"this is rank = %2i (total: %2i) running on %s at %s\"", "MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() hname = MPI.Get_processor_name() tod = datetime.now().isoformat('", "#/usr/bin/env python3 from mpi4py import MPI from datetime import datetime def print_hostname(): comm", "rank = %2i (total: %2i) running on %s at %s\" % (rank,size,hname,tod)) comm.Barrier()", "= %2i (total: %2i) running on %s at %s\" % (rank,size,hname,tod)) comm.Barrier() if", "(total: %2i) running on %s at %s\" % (rank,size,hname,tod)) comm.Barrier() if __name__ ==", "from mpi4py import MPI from datetime import datetime def print_hostname(): comm = MPI.COMM_WORLD", "= MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() hname = MPI.Get_processor_name() tod =", "import datetime def print_hostname(): comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size()", "running on %s at %s\" % (rank,size,hname,tod)) comm.Barrier() if __name__ == '__main__': print_hostname()", "def print_hostname(): comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() hname =", "%2i (total: %2i) running on %s at %s\" % (rank,size,hname,tod)) comm.Barrier() if __name__", "MPI from datetime import datetime def print_hostname(): comm = MPI.COMM_WORLD rank = comm.Get_rank()", "datetime import datetime def print_hostname(): comm = MPI.COMM_WORLD rank = comm.Get_rank() size =", "size = comm.Get_size() hname = MPI.Get_processor_name() tod = datetime.now().isoformat(' ') print(\"this is rank", "comm.Get_rank() size = comm.Get_size() hname = MPI.Get_processor_name() tod = datetime.now().isoformat(' ') print(\"this is", "= comm.Get_size() hname = MPI.Get_processor_name() tod = datetime.now().isoformat(' ') print(\"this is rank =", "= datetime.now().isoformat(' ') print(\"this is rank = %2i (total: %2i) running on %s", "datetime def print_hostname(): comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() hname", "print_hostname(): comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() hname = MPI.Get_processor_name()", "tod = datetime.now().isoformat(' ') print(\"this is rank = %2i (total: %2i) running on", "hname = MPI.Get_processor_name() tod = datetime.now().isoformat(' ') print(\"this is rank = %2i (total:", "MPI.Get_processor_name() tod = datetime.now().isoformat(' ') print(\"this is rank = %2i (total: %2i) running", "= comm.Get_rank() size = comm.Get_size() hname = MPI.Get_processor_name() tod = datetime.now().isoformat(' ') print(\"this", "mpi4py import MPI from datetime import datetime def print_hostname(): comm = MPI.COMM_WORLD rank", "import MPI from datetime import datetime def print_hostname(): comm = MPI.COMM_WORLD rank =" ]
[ "import setup setup( name='clouds_are_fun', version='0.0.1', description=\"Clouds are (as stated) fun!\", author='<NAME>', author_email='<EMAIL>', license='',", "setup setup( name='clouds_are_fun', version='0.0.1', description=\"Clouds are (as stated) fun!\", author='<NAME>', author_email='<EMAIL>', license='', packages=['clouds_are_fun'],", "<gh_stars>0 from setuptools import setup setup( name='clouds_are_fun', version='0.0.1', description=\"Clouds are (as stated) fun!\",", "from setuptools import setup setup( name='clouds_are_fun', version='0.0.1', description=\"Clouds are (as stated) fun!\", author='<NAME>',", "setuptools import setup setup( name='clouds_are_fun', version='0.0.1', description=\"Clouds are (as stated) fun!\", author='<NAME>', author_email='<EMAIL>',", "setup( name='clouds_are_fun', version='0.0.1', description=\"Clouds are (as stated) fun!\", author='<NAME>', author_email='<EMAIL>', license='', packages=['clouds_are_fun'], zip_safe=False)" ]
[ "*args, **kwargs) class CreateView(SingleObjectTemplateResponseMixin, BaseCreateView): \"\"\" View for creating an new object instance,", "document = self.document elif hasattr(self, 'object') and self.object is not None: # If", "*args, **kwargs): self.object = None return super(BaseCreateView, self).post(request, *args, **kwargs) class CreateView(SingleObjectTemplateResponseMixin, BaseCreateView):", "mixin. \"\"\" def get(self, request, *args, **kwargs): self.object = None return super(BaseCreateView, self).get(request,", "DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView): \"\"\" View for deleting an object retrieved with `self.get_object()`, with a", "= form.save() return super(DocumentFormMixin, self).form_valid(form) def get_context_data(self, **kwargs): context = kwargs if getattr(self,", "requires subclassing to provide a response mixin. \"\"\" def get(self, request, *args, **kwargs):", "Try to get a queryset and extract the document class # from that", "deleting an object. Using this base class requires subclassing to provide a response", "editview from detail import SingleObjectMixin, SingleObjectTemplateResponseMixin, BaseDetailView from dockit.forms import DocumentForm class DocumentFormMixin(editview.FormMixin,", "explicitly provided, use it document = self.document elif hasattr(self, 'object') and self.object is", "*args, **kwargs): self.object = self.get_object() return super(BaseUpdateView, self).post(request, *args, **kwargs) class UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView):", "object, use # the class of that object document = self.object.__class__ else: #", "request, *args, **kwargs): self.object = self.get_object() return super(BaseUpdateView, self).get(request, *args, **kwargs) def post(self,", "of that object document = self.object.__class__ else: # Try to get a queryset", "BaseDeleteView): \"\"\" View for deleting an object retrieved with `self.get_object()`, with a response", "% self.object.__dict__ else: try: url = self.object.get_absolute_url() except AttributeError: raise ImproperlyConfigured( \"No URL", "use in this view \"\"\" if self.form_class: return self.form_class else: if self.document is", "get a queryset and extract the document class # from that document =", "\"\"\" Returns the keyword arguments for instanciating the form. \"\"\" kwargs = super(DocumentFormMixin,", "= None return super(BaseCreateView, self).post(request, *args, **kwargs) class CreateView(SingleObjectTemplateResponseMixin, BaseCreateView): \"\"\" View for", "BaseDetailView): \"\"\" Base view for deleting an object. Using this base class requires", "for updating an object, with a response rendered by template.. \"\"\" template_name_suffix =", "context[context_object_name] = self.object return context class BaseCreateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for creating", "None return super(BaseCreateView, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): self.object =", "URL to redirect to. Either provide a url or define\" \" a get_absolute_url", "object retrieved with `self.get_object()`, with a response rendered by template. \"\"\" template_name_suffix =", "= self.document elif hasattr(self, 'object') and self.object is not None: # If this", "\" a get_absolute_url method on the document.\") return url def form_valid(self, form): self.object", "context['object'] = self.object context_object_name = self.get_context_object_name(self.object) if context_object_name: context[context_object_name] = self.object return context", "class requires subclassing to provide a response mixin. \"\"\" def get(self, request, *args,", "and self.object is not None: # If this view is operating on a", "for deleting an object retrieved with `self.get_object()`, with a response rendered by template.", "a response rendered by template. \"\"\" template_name_suffix = '_form' class BaseUpdateView(DocumentFormMixin, editview.ProcessFormView): \"\"\"", "object, with a response rendered by template.. \"\"\" template_name_suffix = '_form' DeletionMixin =", "\"\"\" View for deleting an object retrieved with `self.get_object()`, with a response rendered", "class BaseCreateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for creating an new object instance. Using", "instance, with a response rendered by template. \"\"\" template_name_suffix = '_form' class BaseUpdateView(DocumentFormMixin,", "BaseDeleteView(DeletionMixin, BaseDetailView): \"\"\" Base view for deleting an object. Using this base class", "DocumentForm class DocumentFormMixin(editview.FormMixin, SingleObjectMixin): def get_form_class(self): \"\"\" Returns the form class to use", "a queryset and extract the document class # from that document = self.get_queryset().document", "dockit.forms import DocumentForm class DocumentFormMixin(editview.FormMixin, SingleObjectMixin): def get_form_class(self): \"\"\" Returns the form class", "def get_success_url(self): if self.success_url: url = self.success_url % self.object.__dict__ else: try: url =", "template.. \"\"\" template_name_suffix = '_form' DeletionMixin = editview.DeletionMixin class BaseDeleteView(DeletionMixin, BaseDetailView): \"\"\" Base", "url def form_valid(self, form): self.object = form.save() return super(DocumentFormMixin, self).form_valid(form) def get_context_data(self, **kwargs):", "is not None: # If this view is operating on a single object,", "= editview.DeletionMixin class BaseDeleteView(DeletionMixin, BaseDetailView): \"\"\" Base view for deleting an object. Using", "updating an existing object. Using this base class requires subclassing to provide a", "'object', None): context['object'] = self.object context_object_name = self.get_context_object_name(self.object) if context_object_name: context[context_object_name] = self.object", "self).post(request, *args, **kwargs) class UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView): \"\"\" View for updating an object, with", "method on the document.\") return url def form_valid(self, form): self.object = form.save() return", "for creating an new object instance. Using this base class requires subclassing to", "or define\" \" a get_absolute_url method on the document.\") return url def form_valid(self,", "operating on a single object, use # the class of that object document", "for deleting an object. Using this base class requires subclassing to provide a", "get_context_data(self, **kwargs): context = kwargs if getattr(self, 'object', None): context['object'] = self.object context_object_name", "a response mixin. \"\"\" def get(self, request, *args, **kwargs): self.object = self.get_object() return", "updating an object, with a response rendered by template.. \"\"\" template_name_suffix = '_form'", "self.object.get_absolute_url() except AttributeError: raise ImproperlyConfigured( \"No URL to redirect to. Either provide a", "def get(self, request, *args, **kwargs): self.object = None return super(BaseCreateView, self).get(request, *args, **kwargs)", "request, *args, **kwargs): self.object = self.get_object() return super(BaseUpdateView, self).post(request, *args, **kwargs) class UpdateView(SingleObjectTemplateResponseMixin,", "response mixin. \"\"\" def get(self, request, *args, **kwargs): self.object = None return super(BaseCreateView,", "url = self.object.get_absolute_url() except AttributeError: raise ImproperlyConfigured( \"No URL to redirect to. Either", "editview.DeletionMixin class BaseDeleteView(DeletionMixin, BaseDetailView): \"\"\" Base view for deleting an object. Using this", "None: # If this view is operating on a single object, use #", "request, *args, **kwargs): self.object = None return super(BaseCreateView, self).post(request, *args, **kwargs) class CreateView(SingleObjectTemplateResponseMixin,", "subclassing to provide a response mixin. \"\"\" def get(self, request, *args, **kwargs): self.object", "self.object = self.get_object() return super(BaseUpdateView, self).post(request, *args, **kwargs) class UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView): \"\"\" View", "in this view \"\"\" if self.form_class: return self.form_class else: if self.document is not", "new object instance, with a response rendered by template. \"\"\" template_name_suffix = '_form'", "instance. Using this base class requires subclassing to provide a response mixin. \"\"\"", "form): self.object = form.save() return super(DocumentFormMixin, self).form_valid(form) def get_context_data(self, **kwargs): context = kwargs", "the document class # from that document = self.get_queryset().document #fields = fields_for_document(document) class", "Using this base class requires subclassing to provide a response mixin. \"\"\" def", "import ImproperlyConfigured from django.views.generic import edit as editview from detail import SingleObjectMixin, SingleObjectTemplateResponseMixin,", "self.object.__class__ else: # Try to get a queryset and extract the document class", "'_form' DeletionMixin = editview.DeletionMixin class BaseDeleteView(DeletionMixin, BaseDetailView): \"\"\" Base view for deleting an", "class DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView): \"\"\" View for deleting an object retrieved with `self.get_object()`, with", "\"\"\" def get(self, request, *args, **kwargs): self.object = None return super(BaseCreateView, self).get(request, *args,", "been explicitly provided, use it document = self.document elif hasattr(self, 'object') and self.object", "# Try to get a queryset and extract the document class # from", "= document #CustomDocumentForm.base_fields.update(fields) return CustomDocumentForm def get_form_kwargs(self): \"\"\" Returns the keyword arguments for", "on a single object, use # the class of that object document =", "**kwargs): self.object = None return super(BaseCreateView, self).get(request, *args, **kwargs) def post(self, request, *args,", "document has been explicitly provided, use it document = self.document elif hasattr(self, 'object')", "subclassing to provide a response mixin. \"\"\" class DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView): \"\"\" View for", "for creating an new object instance, with a response rendered by template. \"\"\"", "an new object instance. Using this base class requires subclassing to provide a", "\"\"\" Returns the form class to use in this view \"\"\" if self.form_class:", "BaseUpdateView): \"\"\" View for updating an object, with a response rendered by template..", "self.object return context class BaseCreateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for creating an new", "view for deleting an object. Using this base class requires subclassing to provide", "get(self, request, *args, **kwargs): self.object = None return super(BaseCreateView, self).get(request, *args, **kwargs) def", "response rendered by template. \"\"\" template_name_suffix = '_form' class BaseUpdateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base", "\"No URL to redirect to. Either provide a url or define\" \" a", "url or define\" \" a get_absolute_url method on the document.\") return url def", "get_success_url(self): if self.success_url: url = self.success_url % self.object.__dict__ else: try: url = self.object.get_absolute_url()", "redirect to. Either provide a url or define\" \" a get_absolute_url method on", "editview.ProcessFormView): \"\"\" Base view for updating an existing object. Using this base class", "an existing object. Using this base class requires subclassing to provide a response", "context_object_name: context[context_object_name] = self.object return context class BaseCreateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for", "if getattr(self, 'object', None): context['object'] = self.object context_object_name = self.get_context_object_name(self.object) if context_object_name: context[context_object_name]", "url = self.success_url % self.object.__dict__ else: try: url = self.object.get_absolute_url() except AttributeError: raise", "= '_form' class BaseUpdateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for updating an existing object.", "document = self.get_queryset().document #fields = fields_for_document(document) class CustomDocumentForm(DocumentForm): class Meta: document = document", "def get_context_data(self, **kwargs): context = kwargs if getattr(self, 'object', None): context['object'] = self.object", "else: if self.document is not None: # If a document has been explicitly", "post(self, request, *args, **kwargs): self.object = self.get_object() return super(BaseUpdateView, self).post(request, *args, **kwargs) class", "getattr(self, 'object', None): context['object'] = self.object context_object_name = self.get_context_object_name(self.object) if context_object_name: context[context_object_name] =", "Base view for creating an new object instance. Using this base class requires", "= self.get_context_object_name(self.object) if context_object_name: context[context_object_name] = self.object return context class BaseCreateView(DocumentFormMixin, editview.ProcessFormView): \"\"\"", "# from that document = self.get_queryset().document #fields = fields_for_document(document) class CustomDocumentForm(DocumentForm): class Meta:", "class requires subclassing to provide a response mixin. \"\"\" class DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView): \"\"\"", "extract the document class # from that document = self.get_queryset().document #fields = fields_for_document(document)", "use it document = self.document elif hasattr(self, 'object') and self.object is not None:", "\"\"\" Base view for creating an new object instance. Using this base class", "template_name_suffix = '_form' class BaseUpdateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for updating an existing", "else: try: url = self.object.get_absolute_url() except AttributeError: raise ImproperlyConfigured( \"No URL to redirect", "self.document is not None: # If a document has been explicitly provided, use", "return CustomDocumentForm def get_form_kwargs(self): \"\"\" Returns the keyword arguments for instanciating the form.", "not None: # If this view is operating on a single object, use", "super(BaseCreateView, self).post(request, *args, **kwargs) class CreateView(SingleObjectTemplateResponseMixin, BaseCreateView): \"\"\" View for creating an new", "**kwargs): self.object = None return super(BaseCreateView, self).post(request, *args, **kwargs) class CreateView(SingleObjectTemplateResponseMixin, BaseCreateView): \"\"\"", "creating an new object instance. Using this base class requires subclassing to provide", "**kwargs) def post(self, request, *args, **kwargs): self.object = self.get_object() return super(BaseUpdateView, self).post(request, *args,", "class DocumentFormMixin(editview.FormMixin, SingleObjectMixin): def get_form_class(self): \"\"\" Returns the form class to use in", "class CreateView(SingleObjectTemplateResponseMixin, BaseCreateView): \"\"\" View for creating an new object instance, with a", "self).post(request, *args, **kwargs) class CreateView(SingleObjectTemplateResponseMixin, BaseCreateView): \"\"\" View for creating an new object", "by template.. \"\"\" template_name_suffix = '_form' DeletionMixin = editview.DeletionMixin class BaseDeleteView(DeletionMixin, BaseDetailView): \"\"\"", "return super(BaseUpdateView, self).post(request, *args, **kwargs) class UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView): \"\"\" View for updating an", "editview.ProcessFormView): \"\"\" Base view for creating an new object instance. Using this base", "queryset and extract the document class # from that document = self.get_queryset().document #fields", "define\" \" a get_absolute_url method on the document.\") return url def form_valid(self, form):", "self.form_class else: if self.document is not None: # If a document has been", "response rendered by template.. \"\"\" template_name_suffix = '_form' DeletionMixin = editview.DeletionMixin class BaseDeleteView(DeletionMixin,", "view for updating an existing object. Using this base class requires subclassing to", "context class BaseCreateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for creating an new object instance.", "if self.form_class: return self.form_class else: if self.document is not None: # If a", "Base view for deleting an object. Using this base class requires subclassing to", "single object, use # the class of that object document = self.object.__class__ else:", "self.object}) return kwargs def get_success_url(self): if self.success_url: url = self.success_url % self.object.__dict__ else:", "return url def form_valid(self, form): self.object = form.save() return super(DocumentFormMixin, self).form_valid(form) def get_context_data(self,", "'_form' class BaseUpdateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for updating an existing object. Using", "**kwargs): self.object = self.get_object() return super(BaseUpdateView, self).post(request, *args, **kwargs) class UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView): \"\"\"", "if context_object_name: context[context_object_name] = self.object return context class BaseCreateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view", "def post(self, request, *args, **kwargs): self.object = self.get_object() return super(BaseUpdateView, self).post(request, *args, **kwargs)", "it document = self.document elif hasattr(self, 'object') and self.object is not None: #", "to use in this view \"\"\" if self.form_class: return self.form_class else: if self.document", "BaseCreateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for creating an new object instance. Using this", "super(BaseUpdateView, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): self.object = self.get_object() return", "\"\"\" class DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView): \"\"\" View for deleting an object retrieved with `self.get_object()`,", "class BaseDeleteView(DeletionMixin, BaseDetailView): \"\"\" Base view for deleting an object. Using this base", "import SingleObjectMixin, SingleObjectTemplateResponseMixin, BaseDetailView from dockit.forms import DocumentForm class DocumentFormMixin(editview.FormMixin, SingleObjectMixin): def get_form_class(self):", "to. Either provide a url or define\" \" a get_absolute_url method on the", "object document = self.object.__class__ else: # Try to get a queryset and extract", "a response mixin. \"\"\" def get(self, request, *args, **kwargs): self.object = None return", "**kwargs) class CreateView(SingleObjectTemplateResponseMixin, BaseCreateView): \"\"\" View for creating an new object instance, with", "context = kwargs if getattr(self, 'object', None): context['object'] = self.object context_object_name = self.get_context_object_name(self.object)", "this base class requires subclassing to provide a response mixin. \"\"\" def get(self,", "Base view for updating an existing object. Using this base class requires subclassing", "for updating an existing object. Using this base class requires subclassing to provide", "response mixin. \"\"\" class DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView): \"\"\" View for deleting an object retrieved", "class to use in this view \"\"\" if self.form_class: return self.form_class else: if", "class BaseUpdateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for updating an existing object. Using this", "BaseDetailView from dockit.forms import DocumentForm class DocumentFormMixin(editview.FormMixin, SingleObjectMixin): def get_form_class(self): \"\"\" Returns the", "None): context['object'] = self.object context_object_name = self.get_context_object_name(self.object) if context_object_name: context[context_object_name] = self.object return", "View for deleting an object retrieved with `self.get_object()`, with a response rendered by", "= kwargs if getattr(self, 'object', None): context['object'] = self.object context_object_name = self.get_context_object_name(self.object) if", "to provide a response mixin. \"\"\" def get(self, request, *args, **kwargs): self.object =", "Returns the form class to use in this view \"\"\" if self.form_class: return", "\"\"\" def get(self, request, *args, **kwargs): self.object = self.get_object() return super(BaseUpdateView, self).get(request, *args,", "= None return super(BaseCreateView, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): self.object", "<filename>dockit/views/edit.py from django.core.exceptions import ImproperlyConfigured from django.views.generic import edit as editview from detail", "# If this view is operating on a single object, use # the", "document class # from that document = self.get_queryset().document #fields = fields_for_document(document) class CustomDocumentForm(DocumentForm):", "form.save() return super(DocumentFormMixin, self).form_valid(form) def get_context_data(self, **kwargs): context = kwargs if getattr(self, 'object',", "CustomDocumentForm(DocumentForm): class Meta: document = document #CustomDocumentForm.base_fields.update(fields) return CustomDocumentForm def get_form_kwargs(self): \"\"\" Returns", "= self.object context_object_name = self.get_context_object_name(self.object) if context_object_name: context[context_object_name] = self.object return context class", "#fields = fields_for_document(document) class CustomDocumentForm(DocumentForm): class Meta: document = document #CustomDocumentForm.base_fields.update(fields) return CustomDocumentForm", "self.get_queryset().document #fields = fields_for_document(document) class CustomDocumentForm(DocumentForm): class Meta: document = document #CustomDocumentForm.base_fields.update(fields) return", "self.document elif hasattr(self, 'object') and self.object is not None: # If this view", "CreateView(SingleObjectTemplateResponseMixin, BaseCreateView): \"\"\" View for creating an new object instance, with a response", "self.get_object() return super(BaseUpdateView, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): self.object =", "return self.form_class else: if self.document is not None: # If a document has", "\"\"\" View for creating an new object instance, with a response rendered by", "arguments for instanciating the form. \"\"\" kwargs = super(DocumentFormMixin, self).get_form_kwargs() kwargs.update({'instance': self.object}) return", "return super(BaseUpdateView, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): self.object = self.get_object()", "base class requires subclassing to provide a response mixin. \"\"\" def get(self, request,", "to get a queryset and extract the document class # from that document", "raise ImproperlyConfigured( \"No URL to redirect to. Either provide a url or define\"", "get_form_kwargs(self): \"\"\" Returns the keyword arguments for instanciating the form. \"\"\" kwargs =", "view for creating an new object instance. Using this base class requires subclassing", "form. \"\"\" kwargs = super(DocumentFormMixin, self).get_form_kwargs() kwargs.update({'instance': self.object}) return kwargs def get_success_url(self): if", "**kwargs): self.object = self.get_object() return super(BaseUpdateView, self).get(request, *args, **kwargs) def post(self, request, *args,", "with a response rendered by template.. \"\"\" template_name_suffix = '_form' DeletionMixin = editview.DeletionMixin", "is not None: # If a document has been explicitly provided, use it", "if self.success_url: url = self.success_url % self.object.__dict__ else: try: url = self.object.get_absolute_url() except", "creating an new object instance, with a response rendered by template. \"\"\" template_name_suffix", "def get_form_class(self): \"\"\" Returns the form class to use in this view \"\"\"", "*args, **kwargs): self.object = self.get_object() return super(BaseUpdateView, self).get(request, *args, **kwargs) def post(self, request,", "provide a url or define\" \" a get_absolute_url method on the document.\") return", "post(self, request, *args, **kwargs): self.object = None return super(BaseCreateView, self).post(request, *args, **kwargs) class", "None return super(BaseCreateView, self).post(request, *args, **kwargs) class CreateView(SingleObjectTemplateResponseMixin, BaseCreateView): \"\"\" View for creating", "as editview from detail import SingleObjectMixin, SingleObjectTemplateResponseMixin, BaseDetailView from dockit.forms import DocumentForm class", "Meta: document = document #CustomDocumentForm.base_fields.update(fields) return CustomDocumentForm def get_form_kwargs(self): \"\"\" Returns the keyword", "document #CustomDocumentForm.base_fields.update(fields) return CustomDocumentForm def get_form_kwargs(self): \"\"\" Returns the keyword arguments for instanciating", "self).form_valid(form) def get_context_data(self, **kwargs): context = kwargs if getattr(self, 'object', None): context['object'] =", "ImproperlyConfigured from django.views.generic import edit as editview from detail import SingleObjectMixin, SingleObjectTemplateResponseMixin, BaseDetailView", "not None: # If a document has been explicitly provided, use it document", "#CustomDocumentForm.base_fields.update(fields) return CustomDocumentForm def get_form_kwargs(self): \"\"\" Returns the keyword arguments for instanciating the", "retrieved with `self.get_object()`, with a response rendered by template. \"\"\" template_name_suffix = '_confirm_delete'", "an object. Using this base class requires subclassing to provide a response mixin.", "= self.object.__class__ else: # Try to get a queryset and extract the document", "django.core.exceptions import ImproperlyConfigured from django.views.generic import edit as editview from detail import SingleObjectMixin,", "new object instance. Using this base class requires subclassing to provide a response", "kwargs = super(DocumentFormMixin, self).get_form_kwargs() kwargs.update({'instance': self.object}) return kwargs def get_success_url(self): if self.success_url: url", "is operating on a single object, use # the class of that object", "def form_valid(self, form): self.object = form.save() return super(DocumentFormMixin, self).form_valid(form) def get_context_data(self, **kwargs): context", "If a document has been explicitly provided, use it document = self.document elif", "# the class of that object document = self.object.__class__ else: # Try to", "def post(self, request, *args, **kwargs): self.object = None return super(BaseCreateView, self).post(request, *args, **kwargs)", "the keyword arguments for instanciating the form. \"\"\" kwargs = super(DocumentFormMixin, self).get_form_kwargs() kwargs.update({'instance':", "return context class BaseCreateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for creating an new object", "= self.get_queryset().document #fields = fields_for_document(document) class CustomDocumentForm(DocumentForm): class Meta: document = document #CustomDocumentForm.base_fields.update(fields)", "self.success_url: url = self.success_url % self.object.__dict__ else: try: url = self.object.get_absolute_url() except AttributeError:", "def get_form_kwargs(self): \"\"\" Returns the keyword arguments for instanciating the form. \"\"\" kwargs", "a document has been explicitly provided, use it document = self.document elif hasattr(self,", "return super(BaseCreateView, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): self.object = None", "object. Using this base class requires subclassing to provide a response mixin. \"\"\"", "view is operating on a single object, use # the class of that", "= self.success_url % self.object.__dict__ else: try: url = self.object.get_absolute_url() except AttributeError: raise ImproperlyConfigured(", "DocumentFormMixin(editview.FormMixin, SingleObjectMixin): def get_form_class(self): \"\"\" Returns the form class to use in this", "rendered by template.. \"\"\" template_name_suffix = '_form' DeletionMixin = editview.DeletionMixin class BaseDeleteView(DeletionMixin, BaseDetailView):", "with a response rendered by template. \"\"\" template_name_suffix = '_form' class BaseUpdateView(DocumentFormMixin, editview.ProcessFormView):", "use # the class of that object document = self.object.__class__ else: # Try", "requires subclassing to provide a response mixin. \"\"\" class DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView): \"\"\" View", "**kwargs) def post(self, request, *args, **kwargs): self.object = None return super(BaseCreateView, self).post(request, *args,", "CustomDocumentForm def get_form_kwargs(self): \"\"\" Returns the keyword arguments for instanciating the form. \"\"\"", "def get(self, request, *args, **kwargs): self.object = self.get_object() return super(BaseUpdateView, self).get(request, *args, **kwargs)", "self.object context_object_name = self.get_context_object_name(self.object) if context_object_name: context[context_object_name] = self.object return context class BaseCreateView(DocumentFormMixin,", "except AttributeError: raise ImproperlyConfigured( \"No URL to redirect to. Either provide a url", "= self.object.get_absolute_url() except AttributeError: raise ImproperlyConfigured( \"No URL to redirect to. Either provide", "elif hasattr(self, 'object') and self.object is not None: # If this view is", "get_absolute_url method on the document.\") return url def form_valid(self, form): self.object = form.save()", "from detail import SingleObjectMixin, SingleObjectTemplateResponseMixin, BaseDetailView from dockit.forms import DocumentForm class DocumentFormMixin(editview.FormMixin, SingleObjectMixin):", "**kwargs): context = kwargs if getattr(self, 'object', None): context['object'] = self.object context_object_name =", "SingleObjectMixin, SingleObjectTemplateResponseMixin, BaseDetailView from dockit.forms import DocumentForm class DocumentFormMixin(editview.FormMixin, SingleObjectMixin): def get_form_class(self): \"\"\"", "to redirect to. Either provide a url or define\" \" a get_absolute_url method", "**kwargs) class UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView): \"\"\" View for updating an object, with a response", "class # from that document = self.get_queryset().document #fields = fields_for_document(document) class CustomDocumentForm(DocumentForm): class", "Returns the keyword arguments for instanciating the form. \"\"\" kwargs = super(DocumentFormMixin, self).get_form_kwargs()", "= super(DocumentFormMixin, self).get_form_kwargs() kwargs.update({'instance': self.object}) return kwargs def get_success_url(self): if self.success_url: url =", "return super(DocumentFormMixin, self).form_valid(form) def get_context_data(self, **kwargs): context = kwargs if getattr(self, 'object', None):", "self.object is not None: # If this view is operating on a single", "an object, with a response rendered by template.. \"\"\" template_name_suffix = '_form' DeletionMixin", "fields_for_document(document) class CustomDocumentForm(DocumentForm): class Meta: document = document #CustomDocumentForm.base_fields.update(fields) return CustomDocumentForm def get_form_kwargs(self):", "form class to use in this view \"\"\" if self.form_class: return self.form_class else:", "an new object instance, with a response rendered by template. \"\"\" template_name_suffix =", "class Meta: document = document #CustomDocumentForm.base_fields.update(fields) return CustomDocumentForm def get_form_kwargs(self): \"\"\" Returns the", "import DocumentForm class DocumentFormMixin(editview.FormMixin, SingleObjectMixin): def get_form_class(self): \"\"\" Returns the form class to", "self.success_url % self.object.__dict__ else: try: url = self.object.get_absolute_url() except AttributeError: raise ImproperlyConfigured( \"No", "ImproperlyConfigured( \"No URL to redirect to. Either provide a url or define\" \"", "edit as editview from detail import SingleObjectMixin, SingleObjectTemplateResponseMixin, BaseDetailView from dockit.forms import DocumentForm", "class UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView): \"\"\" View for updating an object, with a response rendered", "existing object. Using this base class requires subclassing to provide a response mixin.", "a response mixin. \"\"\" class DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView): \"\"\" View for deleting an object", "this base class requires subclassing to provide a response mixin. \"\"\" class DeleteView(SingleObjectTemplateResponseMixin,", "Either provide a url or define\" \" a get_absolute_url method on the document.\")", "self.object = None return super(BaseCreateView, self).post(request, *args, **kwargs) class CreateView(SingleObjectTemplateResponseMixin, BaseCreateView): \"\"\" View", "document.\") return url def form_valid(self, form): self.object = form.save() return super(DocumentFormMixin, self).form_valid(form) def", "\"\"\" template_name_suffix = '_form' class BaseUpdateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for updating an", "for instanciating the form. \"\"\" kwargs = super(DocumentFormMixin, self).get_form_kwargs() kwargs.update({'instance': self.object}) return kwargs", "= fields_for_document(document) class CustomDocumentForm(DocumentForm): class Meta: document = document #CustomDocumentForm.base_fields.update(fields) return CustomDocumentForm def", "self.get_context_object_name(self.object) if context_object_name: context[context_object_name] = self.object return context class BaseCreateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base", "\"\"\" kwargs = super(DocumentFormMixin, self).get_form_kwargs() kwargs.update({'instance': self.object}) return kwargs def get_success_url(self): if self.success_url:", "super(DocumentFormMixin, self).form_valid(form) def get_context_data(self, **kwargs): context = kwargs if getattr(self, 'object', None): context['object']", "kwargs def get_success_url(self): if self.success_url: url = self.success_url % self.object.__dict__ else: try: url", "a url or define\" \" a get_absolute_url method on the document.\") return url", "by template. \"\"\" template_name_suffix = '_form' class BaseUpdateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for", "a single object, use # the class of that object document = self.object.__class__", "kwargs.update({'instance': self.object}) return kwargs def get_success_url(self): if self.success_url: url = self.success_url % self.object.__dict__", "return super(BaseCreateView, self).post(request, *args, **kwargs) class CreateView(SingleObjectTemplateResponseMixin, BaseCreateView): \"\"\" View for creating an", "the form class to use in this view \"\"\" if self.form_class: return self.form_class", "object instance, with a response rendered by template. \"\"\" template_name_suffix = '_form' class", "AttributeError: raise ImproperlyConfigured( \"No URL to redirect to. Either provide a url or", "provided, use it document = self.document elif hasattr(self, 'object') and self.object is not", "DeletionMixin = editview.DeletionMixin class BaseDeleteView(DeletionMixin, BaseDetailView): \"\"\" Base view for deleting an object.", "= self.object return context class BaseCreateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for creating an", "from dockit.forms import DocumentForm class DocumentFormMixin(editview.FormMixin, SingleObjectMixin): def get_form_class(self): \"\"\" Returns the form", "response mixin. \"\"\" def get(self, request, *args, **kwargs): self.object = self.get_object() return super(BaseUpdateView,", "form_valid(self, form): self.object = form.save() return super(DocumentFormMixin, self).form_valid(form) def get_context_data(self, **kwargs): context =", "base class requires subclassing to provide a response mixin. \"\"\" class DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView):", "object instance. Using this base class requires subclassing to provide a response mixin.", "\"\"\" template_name_suffix = '_form' DeletionMixin = editview.DeletionMixin class BaseDeleteView(DeletionMixin, BaseDetailView): \"\"\" Base view", "class CustomDocumentForm(DocumentForm): class Meta: document = document #CustomDocumentForm.base_fields.update(fields) return CustomDocumentForm def get_form_kwargs(self): \"\"\"", "provide a response mixin. \"\"\" def get(self, request, *args, **kwargs): self.object = None", "\"\"\" if self.form_class: return self.form_class else: if self.document is not None: # If", "has been explicitly provided, use it document = self.document elif hasattr(self, 'object') and", "\"\"\" View for updating an object, with a response rendered by template.. \"\"\"", "template. \"\"\" template_name_suffix = '_form' class BaseUpdateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for updating", "UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView): \"\"\" View for updating an object, with a response rendered by", "an object retrieved with `self.get_object()`, with a response rendered by template. \"\"\" template_name_suffix", "get_form_class(self): \"\"\" Returns the form class to use in this view \"\"\" if", "*args, **kwargs): self.object = None return super(BaseCreateView, self).get(request, *args, **kwargs) def post(self, request,", "the document.\") return url def form_valid(self, form): self.object = form.save() return super(DocumentFormMixin, self).form_valid(form)", "self.object = form.save() return super(DocumentFormMixin, self).form_valid(form) def get_context_data(self, **kwargs): context = kwargs if", "self.object = None return super(BaseCreateView, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs):", "super(DocumentFormMixin, self).get_form_kwargs() kwargs.update({'instance': self.object}) return kwargs def get_success_url(self): if self.success_url: url = self.success_url", "hasattr(self, 'object') and self.object is not None: # If this view is operating", "= '_form' DeletionMixin = editview.DeletionMixin class BaseDeleteView(DeletionMixin, BaseDetailView): \"\"\" Base view for deleting", "self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): self.object = None return super(BaseCreateView,", "view \"\"\" if self.form_class: return self.form_class else: if self.document is not None: #", "provide a response mixin. \"\"\" def get(self, request, *args, **kwargs): self.object = self.get_object()", "this view \"\"\" if self.form_class: return self.form_class else: if self.document is not None:", "self.object.__dict__ else: try: url = self.object.get_absolute_url() except AttributeError: raise ImproperlyConfigured( \"No URL to", "from django.views.generic import edit as editview from detail import SingleObjectMixin, SingleObjectTemplateResponseMixin, BaseDetailView from", "request, *args, **kwargs): self.object = None return super(BaseCreateView, self).get(request, *args, **kwargs) def post(self,", "detail import SingleObjectMixin, SingleObjectTemplateResponseMixin, BaseDetailView from dockit.forms import DocumentForm class DocumentFormMixin(editview.FormMixin, SingleObjectMixin): def", "BaseCreateView): \"\"\" View for creating an new object instance, with a response rendered", "Using this base class requires subclassing to provide a response mixin. \"\"\" class", "\"\"\" Base view for updating an existing object. Using this base class requires", "SingleObjectMixin): def get_form_class(self): \"\"\" Returns the form class to use in this view", "self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): self.object = self.get_object() return super(BaseUpdateView,", "None: # If a document has been explicitly provided, use it document =", "on the document.\") return url def form_valid(self, form): self.object = form.save() return super(DocumentFormMixin,", "keyword arguments for instanciating the form. \"\"\" kwargs = super(DocumentFormMixin, self).get_form_kwargs() kwargs.update({'instance': self.object})", "View for updating an object, with a response rendered by template.. \"\"\" template_name_suffix", "*args, **kwargs) class UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView): \"\"\" View for updating an object, with a", "document = document #CustomDocumentForm.base_fields.update(fields) return CustomDocumentForm def get_form_kwargs(self): \"\"\" Returns the keyword arguments", "a response rendered by template.. \"\"\" template_name_suffix = '_form' DeletionMixin = editview.DeletionMixin class", "that object document = self.object.__class__ else: # Try to get a queryset and", "and extract the document class # from that document = self.get_queryset().document #fields =", "rendered by template. \"\"\" template_name_suffix = '_form' class BaseUpdateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view", "= self.get_object() return super(BaseUpdateView, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): self.object", "*args, **kwargs) def post(self, request, *args, **kwargs): self.object = self.get_object() return super(BaseUpdateView, self).post(request,", "template_name_suffix = '_form' DeletionMixin = editview.DeletionMixin class BaseDeleteView(DeletionMixin, BaseDetailView): \"\"\" Base view for", "instanciating the form. \"\"\" kwargs = super(DocumentFormMixin, self).get_form_kwargs() kwargs.update({'instance': self.object}) return kwargs def", "try: url = self.object.get_absolute_url() except AttributeError: raise ImproperlyConfigured( \"No URL to redirect to.", "the class of that object document = self.object.__class__ else: # Try to get", "kwargs if getattr(self, 'object', None): context['object'] = self.object context_object_name = self.get_context_object_name(self.object) if context_object_name:", "mixin. \"\"\" class DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView): \"\"\" View for deleting an object retrieved with", "*args, **kwargs) def post(self, request, *args, **kwargs): self.object = None return super(BaseCreateView, self).post(request,", "from django.core.exceptions import ImproperlyConfigured from django.views.generic import edit as editview from detail import", "\"\"\" Base view for deleting an object. Using this base class requires subclassing", "from that document = self.get_queryset().document #fields = fields_for_document(document) class CustomDocumentForm(DocumentForm): class Meta: document", "document = self.object.__class__ else: # Try to get a queryset and extract the", "if self.document is not None: # If a document has been explicitly provided,", "self).get_form_kwargs() kwargs.update({'instance': self.object}) return kwargs def get_success_url(self): if self.success_url: url = self.success_url %", "self.form_class: return self.form_class else: if self.document is not None: # If a document", "mixin. \"\"\" def get(self, request, *args, **kwargs): self.object = self.get_object() return super(BaseUpdateView, self).get(request,", "import edit as editview from detail import SingleObjectMixin, SingleObjectTemplateResponseMixin, BaseDetailView from dockit.forms import", "that document = self.get_queryset().document #fields = fields_for_document(document) class CustomDocumentForm(DocumentForm): class Meta: document =", "context_object_name = self.get_context_object_name(self.object) if context_object_name: context[context_object_name] = self.object return context class BaseCreateView(DocumentFormMixin, editview.ProcessFormView):", "get(self, request, *args, **kwargs): self.object = self.get_object() return super(BaseUpdateView, self).get(request, *args, **kwargs) def", "BaseUpdateView(DocumentFormMixin, editview.ProcessFormView): \"\"\" Base view for updating an existing object. Using this base", "else: # Try to get a queryset and extract the document class #", "self.object = self.get_object() return super(BaseUpdateView, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs):", "this view is operating on a single object, use # the class of", "return kwargs def get_success_url(self): if self.success_url: url = self.success_url % self.object.__dict__ else: try:", "a get_absolute_url method on the document.\") return url def form_valid(self, form): self.object =", "django.views.generic import edit as editview from detail import SingleObjectMixin, SingleObjectTemplateResponseMixin, BaseDetailView from dockit.forms", "the form. \"\"\" kwargs = super(DocumentFormMixin, self).get_form_kwargs() kwargs.update({'instance': self.object}) return kwargs def get_success_url(self):", "class of that object document = self.object.__class__ else: # Try to get a", "# If a document has been explicitly provided, use it document = self.document", "deleting an object retrieved with `self.get_object()`, with a response rendered by template. \"\"\"", "'object') and self.object is not None: # If this view is operating on", "If this view is operating on a single object, use # the class", "to provide a response mixin. \"\"\" class DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView): \"\"\" View for deleting", "SingleObjectTemplateResponseMixin, BaseDetailView from dockit.forms import DocumentForm class DocumentFormMixin(editview.FormMixin, SingleObjectMixin): def get_form_class(self): \"\"\" Returns", "View for creating an new object instance, with a response rendered by template.", "super(BaseUpdateView, self).post(request, *args, **kwargs) class UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView): \"\"\" View for updating an object,", "provide a response mixin. \"\"\" class DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView): \"\"\" View for deleting an", "= self.get_object() return super(BaseUpdateView, self).post(request, *args, **kwargs) class UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView): \"\"\" View for", "self.get_object() return super(BaseUpdateView, self).post(request, *args, **kwargs) class UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView): \"\"\" View for updating", "super(BaseCreateView, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): self.object = None return" ]
[ "wtforms.StringField( 'Description', [wtforms.validators.optional()]) tags = ListField('Tags', [wtforms.validators.optional()]) image_average_color = wtforms.StringField( 'Average Color', [wtforms.validators.optional()])", "util from main import app from views import ListField class ResourceForm(wtf.Form): name =", "description = wtforms.StringField( 'Description', [wtforms.validators.optional()]) tags = ListField('Tags', [wtforms.validators.optional()]) image_average_color = wtforms.StringField( 'Average", "from main import app from views import ListField class ResourceForm(wtf.Form): name = wtforms.TextField('Name',", "ResourceForm(wtf.Form): name = wtforms.TextField('Name', [wtforms.validators.optional()]) description = wtforms.StringField( 'Description', [wtforms.validators.optional()]) tags = ListField('Tags',", "views import ListField class ResourceForm(wtf.Form): name = wtforms.TextField('Name', [wtforms.validators.optional()]) description = wtforms.StringField( 'Description',", "blobstore import flask import wtforms import auth import config import model import util", "google.appengine.ext import blobstore import flask import wtforms import auth import config import model", "wtforms.TextField('Name', [wtforms.validators.optional()]) description = wtforms.StringField( 'Description', [wtforms.validators.optional()]) tags = ListField('Tags', [wtforms.validators.optional()]) image_average_color =", "<filename>main/views/admin/resource/resource_form.py import urllib from flask.ext import wtf from google.appengine.ext import blobstore import flask", "wtf from google.appengine.ext import blobstore import flask import wtforms import auth import config", "model import util from main import app from views import ListField class ResourceForm(wtf.Form):", "main import app from views import ListField class ResourceForm(wtf.Form): name = wtforms.TextField('Name', [wtforms.validators.optional()])", "wtforms import auth import config import model import util from main import app", "class ResourceForm(wtf.Form): name = wtforms.TextField('Name', [wtforms.validators.optional()]) description = wtforms.StringField( 'Description', [wtforms.validators.optional()]) tags =", "import config import model import util from main import app from views import", "flask.ext import wtf from google.appengine.ext import blobstore import flask import wtforms import auth", "import auth import config import model import util from main import app from", "from views import ListField class ResourceForm(wtf.Form): name = wtforms.TextField('Name', [wtforms.validators.optional()]) description = wtforms.StringField(", "= wtforms.TextField('Name', [wtforms.validators.optional()]) description = wtforms.StringField( 'Description', [wtforms.validators.optional()]) tags = ListField('Tags', [wtforms.validators.optional()]) image_average_color", "= wtforms.StringField( 'Description', [wtforms.validators.optional()]) tags = ListField('Tags', [wtforms.validators.optional()]) image_average_color = wtforms.StringField( 'Average Color',", "from flask.ext import wtf from google.appengine.ext import blobstore import flask import wtforms import", "app from views import ListField class ResourceForm(wtf.Form): name = wtforms.TextField('Name', [wtforms.validators.optional()]) description =", "[wtforms.validators.optional()]) description = wtforms.StringField( 'Description', [wtforms.validators.optional()]) tags = ListField('Tags', [wtforms.validators.optional()]) image_average_color = wtforms.StringField(", "config import model import util from main import app from views import ListField", "import urllib from flask.ext import wtf from google.appengine.ext import blobstore import flask import", "from google.appengine.ext import blobstore import flask import wtforms import auth import config import", "import blobstore import flask import wtforms import auth import config import model import", "import wtf from google.appengine.ext import blobstore import flask import wtforms import auth import", "import flask import wtforms import auth import config import model import util from", "import util from main import app from views import ListField class ResourceForm(wtf.Form): name", "import model import util from main import app from views import ListField class", "import app from views import ListField class ResourceForm(wtf.Form): name = wtforms.TextField('Name', [wtforms.validators.optional()]) description", "import ListField class ResourceForm(wtf.Form): name = wtforms.TextField('Name', [wtforms.validators.optional()]) description = wtforms.StringField( 'Description', [wtforms.validators.optional()])", "name = wtforms.TextField('Name', [wtforms.validators.optional()]) description = wtforms.StringField( 'Description', [wtforms.validators.optional()]) tags = ListField('Tags', [wtforms.validators.optional()])", "urllib from flask.ext import wtf from google.appengine.ext import blobstore import flask import wtforms", "flask import wtforms import auth import config import model import util from main", "import wtforms import auth import config import model import util from main import", "auth import config import model import util from main import app from views", "ListField class ResourceForm(wtf.Form): name = wtforms.TextField('Name', [wtforms.validators.optional()]) description = wtforms.StringField( 'Description', [wtforms.validators.optional()]) tags" ]
[ "sources\"\"\" from .gnomad import GnomAD from .cbioportal import CBioPortal from .cancer_hotspots import CancerHotspots", "<filename>evidence/data_sources/__init__.py \"\"\"Import data sources\"\"\" from .gnomad import GnomAD from .cbioportal import CBioPortal from", "\"\"\"Import data sources\"\"\" from .gnomad import GnomAD from .cbioportal import CBioPortal from .cancer_hotspots", "data sources\"\"\" from .gnomad import GnomAD from .cbioportal import CBioPortal from .cancer_hotspots import" ]
[ "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "writing, software # distributed under the License is distributed on an \"AS IS\"", "import * class JobShower(Base): def __init__(self): super(JobShower, self).__init__( extra_args='JOB_ID', description=\"This command can be", "= self.args[0] context = self._create_context() inquirer = Inquirer(context) job = inquirer.get_job_status(job_id, list_files=self.options.json) if", "dteam \"\"\" ) def validate(self): if len(self.args) == 0: self.logger.critical('Need a job id')", "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "# See the License for the specific language governing permissions and # limitations", "0: self.logger.critical('Need a job id') sys.exit(1) def run(self): job_id = self.args[0] context =", "Collaboration, 2013. # # See www.eu-emi.eu for details on the copyright holders #", "License. # You may obtain a copy of the License at # #", "VO Name: dteam \"\"\" ) def validate(self): if len(self.args) == 0: self.logger.critical('Need a", "validate(self): if len(self.args) == 0: self.logger.critical('Need a job id') sys.exit(1) def run(self): job_id", "self).__init__( extra_args='JOB_ID', description=\"This command can be used to check the current status of", "on the copyright holders # # Licensed under the Apache License, Version 2.0", "status of a given job\", example=\"\"\" $ %(prog)s -s https://fts3-devel.cern.ch:8446 c079a636-c363-11e3-b7e5-02163e009f5a Request ID:", "len(self.args) == 0: self.logger.critical('Need a job id') sys.exit(1) def run(self): job_id = self.args[0]", "Base from utils import * class JobShower(Base): def __init__(self): super(JobShower, self).__init__( extra_args='JOB_ID', description=\"This", "law or agreed to in writing, software # distributed under the License is", "the License for the specific language governing permissions and # limitations under the", "$ %(prog)s -s https://fts3-devel.cern.ch:8446 c079a636-c363-11e3-b7e5-02163e009f5a Request ID: c079a636-c363-11e3-b7e5-02163e009f5a Status: FINISHED Client DN: /DC=ch/DC=cern/OU=Organic", "compliance with the License. # You may obtain a copy of the License", "a given job\", example=\"\"\" $ %(prog)s -s https://fts3-devel.cern.ch:8446 c079a636-c363-11e3-b7e5-02163e009f5a Request ID: c079a636-c363-11e3-b7e5-02163e009f5a Status:", "Copyright Members of the EMI Collaboration, 2013. # # See www.eu-emi.eu for details", "job_id = self.args[0] context = self._create_context() inquirer = Inquirer(context) job = inquirer.get_job_status(job_id, list_files=self.options.json)", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "this file except in compliance with the License. # You may obtain a", "See www.eu-emi.eu for details on the copyright holders # # Licensed under the", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "for the specific language governing permissions and # limitations under the License. import", "%(prog)s -s https://fts3-devel.cern.ch:8446 c079a636-c363-11e3-b7e5-02163e009f5a Request ID: c079a636-c363-11e3-b7e5-02163e009f5a Status: FINISHED Client DN: /DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=saketag/CN=678984/CN=<NAME>", "notice: # Copyright Members of the EMI Collaboration, 2013. # # See www.eu-emi.eu", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "given job\", example=\"\"\" $ %(prog)s -s https://fts3-devel.cern.ch:8446 c079a636-c363-11e3-b7e5-02163e009f5a Request ID: c079a636-c363-11e3-b7e5-02163e009f5a Status: FINISHED", "current status of a given job\", example=\"\"\" $ %(prog)s -s https://fts3-devel.cern.ch:8446 c079a636-c363-11e3-b7e5-02163e009f5a Request", "# # See www.eu-emi.eu for details on the copyright holders # # Licensed", "Members of the EMI Collaboration, 2013. # # See www.eu-emi.eu for details on", "Copyright notice: # Copyright Members of the EMI Collaboration, 2013. # # See", "the current status of a given job\", example=\"\"\" $ %(prog)s -s https://fts3-devel.cern.ch:8446 c079a636-c363-11e3-b7e5-02163e009f5a", "of the EMI Collaboration, 2013. # # See www.eu-emi.eu for details on the", "ANY KIND, either express or implied. # See the License for the specific", "job id') sys.exit(1) def run(self): job_id = self.args[0] context = self._create_context() inquirer =", "import sys from fts3.rest.client import Inquirer from base import Base from utils import", "def validate(self): if len(self.args) == 0: self.logger.critical('Need a job id') sys.exit(1) def run(self):", "language governing permissions and # limitations under the License. import sys from fts3.rest.client", "used to check the current status of a given job\", example=\"\"\" $ %(prog)s", "Reason: Submission time: 2014-04-13T23:31:34 Priority: 3 VO Name: dteam \"\"\" ) def validate(self):", "-s https://fts3-devel.cern.ch:8446 c079a636-c363-11e3-b7e5-02163e009f5a Request ID: c079a636-c363-11e3-b7e5-02163e009f5a Status: FINISHED Client DN: /DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=saketag/CN=678984/CN=<NAME> Reason:", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "can be used to check the current status of a given job\", example=\"\"\"", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "permissions and # limitations under the License. import sys from fts3.rest.client import Inquirer", "use this file except in compliance with the License. # You may obtain", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "example=\"\"\" $ %(prog)s -s https://fts3-devel.cern.ch:8446 c079a636-c363-11e3-b7e5-02163e009f5a Request ID: c079a636-c363-11e3-b7e5-02163e009f5a Status: FINISHED Client DN:", "not use this file except in compliance with the License. # You may", "super(JobShower, self).__init__( extra_args='JOB_ID', description=\"This command can be used to check the current status", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "FINISHED Client DN: /DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=saketag/CN=678984/CN=<NAME> Reason: Submission time: 2014-04-13T23:31:34 Priority: 3 VO Name:", "context = self._create_context() inquirer = Inquirer(context) job = inquirer.get_job_status(job_id, list_files=self.options.json) if not self.options.json:", "ID: c079a636-c363-11e3-b7e5-02163e009f5a Status: FINISHED Client DN: /DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=saketag/CN=678984/CN=<NAME> Reason: Submission time: 2014-04-13T23:31:34 Priority:", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "* class JobShower(Base): def __init__(self): super(JobShower, self).__init__( extra_args='JOB_ID', description=\"This command can be used", "License, Version 2.0 (the \"License\"); # you may not use this file except", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "copyright holders # # Licensed under the Apache License, Version 2.0 (the \"License\");", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "self._create_context() inquirer = Inquirer(context) job = inquirer.get_job_status(job_id, list_files=self.options.json) if not self.options.json: self.logger.info(job_human_readable(job)) else:", "and # limitations under the License. import sys from fts3.rest.client import Inquirer from", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "a job id') sys.exit(1) def run(self): job_id = self.args[0] context = self._create_context() inquirer", "2013. # # See www.eu-emi.eu for details on the copyright holders # #", "be used to check the current status of a given job\", example=\"\"\" $", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "2014-04-13T23:31:34 Priority: 3 VO Name: dteam \"\"\" ) def validate(self): if len(self.args) ==", "if len(self.args) == 0: self.logger.critical('Need a job id') sys.exit(1) def run(self): job_id =", "sys.exit(1) def run(self): job_id = self.args[0] context = self._create_context() inquirer = Inquirer(context) job", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "OF ANY KIND, either express or implied. # See the License for the", "holders # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "2.0 (the \"License\"); # you may not use this file except in compliance", "the specific language governing permissions and # limitations under the License. import sys", "# you may not use this file except in compliance with the License.", "the EMI Collaboration, 2013. # # See www.eu-emi.eu for details on the copyright", "fts3.rest.client import Inquirer from base import Base from utils import * class JobShower(Base):", "== 0: self.logger.critical('Need a job id') sys.exit(1) def run(self): job_id = self.args[0] context", "agreed to in writing, software # distributed under the License is distributed on", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "<filename>src/fts3/cli/jobshower.py<gh_stars>1-10 # Copyright notice: # Copyright Members of the EMI Collaboration, 2013. #", "(the \"License\"); # you may not use this file except in compliance with", "from fts3.rest.client import Inquirer from base import Base from utils import * class", "3 VO Name: dteam \"\"\" ) def validate(self): if len(self.args) == 0: self.logger.critical('Need", "DN: /DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=saketag/CN=678984/CN=<NAME> Reason: Submission time: 2014-04-13T23:31:34 Priority: 3 VO Name: dteam \"\"\"", "# Copyright notice: # Copyright Members of the EMI Collaboration, 2013. # #", "# # Unless required by applicable law or agreed to in writing, software", "Inquirer from base import Base from utils import * class JobShower(Base): def __init__(self):", "express or implied. # See the License for the specific language governing permissions", "check the current status of a given job\", example=\"\"\" $ %(prog)s -s https://fts3-devel.cern.ch:8446", "Version 2.0 (the \"License\"); # you may not use this file except in", "# Unless required by applicable law or agreed to in writing, software #", "except in compliance with the License. # You may obtain a copy of", "class JobShower(Base): def __init__(self): super(JobShower, self).__init__( extra_args='JOB_ID', description=\"This command can be used to", "by applicable law or agreed to in writing, software # distributed under the", "import Base from utils import * class JobShower(Base): def __init__(self): super(JobShower, self).__init__( extra_args='JOB_ID',", "under the License. import sys from fts3.rest.client import Inquirer from base import Base", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "from utils import * class JobShower(Base): def __init__(self): super(JobShower, self).__init__( extra_args='JOB_ID', description=\"This command", "for details on the copyright holders # # Licensed under the Apache License,", "either express or implied. # See the License for the specific language governing", "the License. import sys from fts3.rest.client import Inquirer from base import Base from", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "sys from fts3.rest.client import Inquirer from base import Base from utils import *", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "www.eu-emi.eu for details on the copyright holders # # Licensed under the Apache", "self.logger.critical('Need a job id') sys.exit(1) def run(self): job_id = self.args[0] context = self._create_context()", "c079a636-c363-11e3-b7e5-02163e009f5a Status: FINISHED Client DN: /DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=saketag/CN=678984/CN=<NAME> Reason: Submission time: 2014-04-13T23:31:34 Priority: 3", "to check the current status of a given job\", example=\"\"\" $ %(prog)s -s", "Status: FINISHED Client DN: /DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=saketag/CN=678984/CN=<NAME> Reason: Submission time: 2014-04-13T23:31:34 Priority: 3 VO", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "EMI Collaboration, 2013. # # See www.eu-emi.eu for details on the copyright holders", "governing permissions and # limitations under the License. import sys from fts3.rest.client import", "file except in compliance with the License. # You may obtain a copy", "specific language governing permissions and # limitations under the License. import sys from", "# Copyright Members of the EMI Collaboration, 2013. # # See www.eu-emi.eu for", "id') sys.exit(1) def run(self): job_id = self.args[0] context = self._create_context() inquirer = Inquirer(context)", "job\", example=\"\"\" $ %(prog)s -s https://fts3-devel.cern.ch:8446 c079a636-c363-11e3-b7e5-02163e009f5a Request ID: c079a636-c363-11e3-b7e5-02163e009f5a Status: FINISHED Client", "# limitations under the License. import sys from fts3.rest.client import Inquirer from base", "Request ID: c079a636-c363-11e3-b7e5-02163e009f5a Status: FINISHED Client DN: /DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=saketag/CN=678984/CN=<NAME> Reason: Submission time: 2014-04-13T23:31:34", "details on the copyright holders # # Licensed under the Apache License, Version", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "Submission time: 2014-04-13T23:31:34 Priority: 3 VO Name: dteam \"\"\" ) def validate(self): if", "License for the specific language governing permissions and # limitations under the License.", "utils import * class JobShower(Base): def __init__(self): super(JobShower, self).__init__( extra_args='JOB_ID', description=\"This command can", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "the License. # You may obtain a copy of the License at #", "https://fts3-devel.cern.ch:8446 c079a636-c363-11e3-b7e5-02163e009f5a Request ID: c079a636-c363-11e3-b7e5-02163e009f5a Status: FINISHED Client DN: /DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=saketag/CN=678984/CN=<NAME> Reason: Submission", "# See www.eu-emi.eu for details on the copyright holders # # Licensed under", "to in writing, software # distributed under the License is distributed on an", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "implied. # See the License for the specific language governing permissions and #", "Units/OU=Users/CN=saketag/CN=678984/CN=<NAME> Reason: Submission time: 2014-04-13T23:31:34 Priority: 3 VO Name: dteam \"\"\" ) def", "\"License\"); # you may not use this file except in compliance with the", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "the copyright holders # # Licensed under the Apache License, Version 2.0 (the", "required by applicable law or agreed to in writing, software # distributed under", "inquirer = Inquirer(context) job = inquirer.get_job_status(job_id, list_files=self.options.json) if not self.options.json: self.logger.info(job_human_readable(job)) else: self.logger.info(job_as_json(job))", "def run(self): job_id = self.args[0] context = self._create_context() inquirer = Inquirer(context) job =", "applicable law or agreed to in writing, software # distributed under the License", "License. import sys from fts3.rest.client import Inquirer from base import Base from utils", "from base import Base from utils import * class JobShower(Base): def __init__(self): super(JobShower,", "time: 2014-04-13T23:31:34 Priority: 3 VO Name: dteam \"\"\" ) def validate(self): if len(self.args)", "base import Base from utils import * class JobShower(Base): def __init__(self): super(JobShower, self).__init__(", "import Inquirer from base import Base from utils import * class JobShower(Base): def", "description=\"This command can be used to check the current status of a given", "Client DN: /DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=saketag/CN=678984/CN=<NAME> Reason: Submission time: 2014-04-13T23:31:34 Priority: 3 VO Name: dteam", "\"\"\" ) def validate(self): if len(self.args) == 0: self.logger.critical('Need a job id') sys.exit(1)", "c079a636-c363-11e3-b7e5-02163e009f5a Request ID: c079a636-c363-11e3-b7e5-02163e009f5a Status: FINISHED Client DN: /DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=saketag/CN=678984/CN=<NAME> Reason: Submission time:", "or agreed to in writing, software # distributed under the License is distributed", "run(self): job_id = self.args[0] context = self._create_context() inquirer = Inquirer(context) job = inquirer.get_job_status(job_id,", "= self._create_context() inquirer = Inquirer(context) job = inquirer.get_job_status(job_id, list_files=self.options.json) if not self.options.json: self.logger.info(job_human_readable(job))", "or implied. # See the License for the specific language governing permissions and", "Priority: 3 VO Name: dteam \"\"\" ) def validate(self): if len(self.args) == 0:", "__init__(self): super(JobShower, self).__init__( extra_args='JOB_ID', description=\"This command can be used to check the current", "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=saketag/CN=678984/CN=<NAME> Reason: Submission time: 2014-04-13T23:31:34 Priority: 3 VO Name: dteam \"\"\" )", "extra_args='JOB_ID', description=\"This command can be used to check the current status of a", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "JobShower(Base): def __init__(self): super(JobShower, self).__init__( extra_args='JOB_ID', description=\"This command can be used to check", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "of a given job\", example=\"\"\" $ %(prog)s -s https://fts3-devel.cern.ch:8446 c079a636-c363-11e3-b7e5-02163e009f5a Request ID: c079a636-c363-11e3-b7e5-02163e009f5a", "limitations under the License. import sys from fts3.rest.client import Inquirer from base import", "self.args[0] context = self._create_context() inquirer = Inquirer(context) job = inquirer.get_job_status(job_id, list_files=self.options.json) if not", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "Name: dteam \"\"\" ) def validate(self): if len(self.args) == 0: self.logger.critical('Need a job", "def __init__(self): super(JobShower, self).__init__( extra_args='JOB_ID', description=\"This command can be used to check the", "with the License. # You may obtain a copy of the License at", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "command can be used to check the current status of a given job\",", "in writing, software # distributed under the License is distributed on an \"AS", ") def validate(self): if len(self.args) == 0: self.logger.critical('Need a job id') sys.exit(1) def", "under the Apache License, Version 2.0 (the \"License\"); # you may not use" ]
[ "with it.\") # else: # # while there is more than one digit", "MMDDYYYY]:\") if len(bday) != 8 or not date.isdigit(): print(\"Birthday dat must be 8", "8 or not date.isdigit(): # print(\"Invalid date - sorry, we can do nothing", "must be 8 digits in length\") else: while len(bday) != 1: lst =", "(in the following format: YYYYMMDD or YYYYDDMM, 8 digits): \") # if len(date)", "= input(\"Enter your birthday date (in the following format: YYYYMMDD or YYYYDDMM, 8", "input(\"Enter your birthday date (in the following format: YYYYMMDD or YYYYDDMM, 8 digits):", "input(\"Enter your birthday [YYYYMMDD or YYYYDDMM or MMDDYYYY]:\") if len(bday) != 8 or", "# print(\"Invalid date - sorry, we can do nothing with it.\") # else:", "# ... and store sum inside the string # date = str(sum) #", "# date = str(sum) # print(\"Your Digit of Life is: \" + date)", "digits): \") # if len(date) != 8 or not date.isdigit(): # print(\"Invalid date", "= input(\"Enter your birthday [YYYYMMDD or YYYYDDMM or MMDDYYYY]:\") if len(bday) != 8", "# else: # # while there is more than one digit in the", "solution follows # date = input(\"Enter your birthday date (in the following format:", "the date... # while len(date) > 1: # sum = 0 # #", "lst = list(bday) sum = 0 for num in lst: sum += int(num)", "date - sorry, we can do nothing with it.\") # else: # #", "or YYYYDDMM or MMDDYYYY]:\") if len(bday) != 8 or not date.isdigit(): print(\"Birthday dat", "!= 1: lst = list(bday) sum = 0 for num in lst: sum", "1: lst = list(bday) sum = 0 for num in lst: sum +=", "0 for num in lst: sum += int(num) bday = str(sum) print(bday) #", "len(bday) != 8 or not date.isdigit(): print(\"Birthday dat must be 8 digits in", "YYYYDDMM, 8 digits): \") # if len(date) != 8 or not date.isdigit(): #", "# sum = 0 # # ... sum all the digits... # for", "sum += int(num) bday = str(sum) print(bday) # Better solution follows # date", "!= 8 or not date.isdigit(): print(\"Birthday dat must be 8 digits in length\")", "+= int(dig) # print(date) # # ... and store sum inside the string", "# for dig in date: # sum += int(dig) # print(date) # #", "for num in lst: sum += int(num) bday = str(sum) print(bday) # Better", "print(bday) # Better solution follows # date = input(\"Enter your birthday date (in", "and store sum inside the string # date = str(sum) # print(\"Your Digit", "it.\") # else: # # while there is more than one digit in", "digit in the date... # while len(date) > 1: # sum = 0", "birthday date (in the following format: YYYYMMDD or YYYYDDMM, 8 digits): \") #", "8 digits in length\") else: while len(bday) != 1: lst = list(bday) sum", "in the date... # while len(date) > 1: # sum = 0 #", "digits... # for dig in date: # sum += int(dig) # print(date) #", "# while len(date) > 1: # sum = 0 # # ... sum", "while len(bday) != 1: lst = list(bday) sum = 0 for num in", "len(date) != 8 or not date.isdigit(): # print(\"Invalid date - sorry, we can", "sorry, we can do nothing with it.\") # else: # # while there", "inside the string # date = str(sum) # print(\"Your Digit of Life is:", "if len(bday) != 8 or not date.isdigit(): print(\"Birthday dat must be 8 digits", "digits in length\") else: while len(bday) != 1: lst = list(bday) sum =", "int(dig) # print(date) # # ... and store sum inside the string #", "if len(date) != 8 or not date.isdigit(): # print(\"Invalid date - sorry, we", "# print(date) # # ... and store sum inside the string # date", "... and store sum inside the string # date = str(sum) # print(\"Your", "# # ... and store sum inside the string # date = str(sum)", "sum inside the string # date = str(sum) # print(\"Your Digit of Life", "- sorry, we can do nothing with it.\") # else: # # while", "in date: # sum += int(dig) # print(date) # # ... and store", "format: YYYYMMDD or YYYYDDMM, 8 digits): \") # if len(date) != 8 or", "we can do nothing with it.\") # else: # # while there is", "print(date) # # ... and store sum inside the string # date =", "not date.isdigit(): print(\"Birthday dat must be 8 digits in length\") else: while len(bday)", "for dig in date: # sum += int(dig) # print(date) # # ...", "is more than one digit in the date... # while len(date) > 1:", "int(num) bday = str(sum) print(bday) # Better solution follows # date = input(\"Enter", "str(sum) print(bday) # Better solution follows # date = input(\"Enter your birthday date", "else: while len(bday) != 1: lst = list(bday) sum = 0 for num", "or YYYYDDMM, 8 digits): \") # if len(date) != 8 or not date.isdigit():", "be 8 digits in length\") else: while len(bday) != 1: lst = list(bday)", "sum all the digits... # for dig in date: # sum += int(dig)", "date = input(\"Enter your birthday date (in the following format: YYYYMMDD or YYYYDDMM,", "the digits... # for dig in date: # sum += int(dig) # print(date)", "store sum inside the string # date = str(sum) # print(\"Your Digit of", "can do nothing with it.\") # else: # # while there is more", "# # while there is more than one digit in the date... #", "= str(sum) print(bday) # Better solution follows # date = input(\"Enter your birthday", "YYYYMMDD or YYYYDDMM, 8 digits): \") # if len(date) != 8 or not", "or not date.isdigit(): # print(\"Invalid date - sorry, we can do nothing with", "nothing with it.\") # else: # # while there is more than one", "= 0 for num in lst: sum += int(num) bday = str(sum) print(bday)", "the following format: YYYYMMDD or YYYYDDMM, 8 digits): \") # if len(date) !=", "sum += int(dig) # print(date) # # ... and store sum inside the", "bday = input(\"Enter your birthday [YYYYMMDD or YYYYDDMM or MMDDYYYY]:\") if len(bday) !=", "1: # sum = 0 # # ... sum all the digits... #", "while there is more than one digit in the date... # while len(date)", "# sum += int(dig) # print(date) # # ... and store sum inside", "or not date.isdigit(): print(\"Birthday dat must be 8 digits in length\") else: while", "# ... sum all the digits... # for dig in date: # sum", "Better solution follows # date = input(\"Enter your birthday date (in the following", "than one digit in the date... # while len(date) > 1: # sum", "date: # sum += int(dig) # print(date) # # ... and store sum", "else: # # while there is more than one digit in the date...", "lst: sum += int(num) bday = str(sum) print(bday) # Better solution follows #", "length\") else: while len(bday) != 1: lst = list(bday) sum = 0 for", "num in lst: sum += int(num) bday = str(sum) print(bday) # Better solution", "in lst: sum += int(num) bday = str(sum) print(bday) # Better solution follows", "+= int(num) bday = str(sum) print(bday) # Better solution follows # date =", "list(bday) sum = 0 for num in lst: sum += int(num) bday =", "# while there is more than one digit in the date... # while", "8 digits): \") # if len(date) != 8 or not date.isdigit(): # print(\"Invalid", "your birthday date (in the following format: YYYYMMDD or YYYYDDMM, 8 digits): \")", "one digit in the date... # while len(date) > 1: # sum =", "more than one digit in the date... # while len(date) > 1: #", "= 0 # # ... sum all the digits... # for dig in", "dig in date: # sum += int(dig) # print(date) # # ... and", "8 or not date.isdigit(): print(\"Birthday dat must be 8 digits in length\") else:", "sum = 0 for num in lst: sum += int(num) bday = str(sum)", "following format: YYYYMMDD or YYYYDDMM, 8 digits): \") # if len(date) != 8", "len(date) > 1: # sum = 0 # # ... sum all the", "not date.isdigit(): # print(\"Invalid date - sorry, we can do nothing with it.\")", "# # ... sum all the digits... # for dig in date: #", "or MMDDYYYY]:\") if len(bday) != 8 or not date.isdigit(): print(\"Birthday dat must be", "birthday [YYYYMMDD or YYYYDDMM or MMDDYYYY]:\") if len(bday) != 8 or not date.isdigit():", "= list(bday) sum = 0 for num in lst: sum += int(num) bday", "# date = input(\"Enter your birthday date (in the following format: YYYYMMDD or", "... sum all the digits... # for dig in date: # sum +=", "do nothing with it.\") # else: # # while there is more than", "the string # date = str(sum) # print(\"Your Digit of Life is: \"", "follows # date = input(\"Enter your birthday date (in the following format: YYYYMMDD", "while len(date) > 1: # sum = 0 # # ... sum all", "sum = 0 # # ... sum all the digits... # for dig", "YYYYDDMM or MMDDYYYY]:\") if len(bday) != 8 or not date.isdigit(): print(\"Birthday dat must", "there is more than one digit in the date... # while len(date) >", "all the digits... # for dig in date: # sum += int(dig) #", "date (in the following format: YYYYMMDD or YYYYDDMM, 8 digits): \") # if", "!= 8 or not date.isdigit(): # print(\"Invalid date - sorry, we can do", "in length\") else: while len(bday) != 1: lst = list(bday) sum = 0", "your birthday [YYYYMMDD or YYYYDDMM or MMDDYYYY]:\") if len(bday) != 8 or not", "print(\"Invalid date - sorry, we can do nothing with it.\") # else: #", "[YYYYMMDD or YYYYDDMM or MMDDYYYY]:\") if len(bday) != 8 or not date.isdigit(): print(\"Birthday", "date... # while len(date) > 1: # sum = 0 # # ...", "\") # if len(date) != 8 or not date.isdigit(): # print(\"Invalid date -", "print(\"Birthday dat must be 8 digits in length\") else: while len(bday) != 1:", "# if len(date) != 8 or not date.isdigit(): # print(\"Invalid date - sorry,", "0 # # ... sum all the digits... # for dig in date:", "string # date = str(sum) # print(\"Your Digit of Life is: \" +", "date.isdigit(): # print(\"Invalid date - sorry, we can do nothing with it.\") #", "date.isdigit(): print(\"Birthday dat must be 8 digits in length\") else: while len(bday) !=", "# Better solution follows # date = input(\"Enter your birthday date (in the", "> 1: # sum = 0 # # ... sum all the digits...", "bday = str(sum) print(bday) # Better solution follows # date = input(\"Enter your", "dat must be 8 digits in length\") else: while len(bday) != 1: lst", "len(bday) != 1: lst = list(bday) sum = 0 for num in lst:" ]
[ "print(numarr) #changing 1st to 5th index values numarr[1:5] = arr.array('i',[-8,-5,-6,-7]) print(\"\\n Array items", "#changing index 3 value numarr[3] = 44 print('\\n Array items (after modifing):') print(numarr)", "array as arr numarr = arr.array('i',[10,20,30,40,50,60,70,80]) print(\"Array items:\") print(numarr) #changing index 3 value", "value numarr[3] = 44 print('\\n Array items (after modifing):') print(numarr) #changing 1st to", "5th index values numarr[1:5] = arr.array('i',[-8,-5,-6,-7]) print(\"\\n Array items (after modifing in range", "Array items (after modifing):') print(numarr) #changing 1st to 5th index values numarr[1:5] =", "= arr.array('i',[10,20,30,40,50,60,70,80]) print(\"Array items:\") print(numarr) #changing index 3 value numarr[3] = 44 print('\\n", "to 5th index values numarr[1:5] = arr.array('i',[-8,-5,-6,-7]) print(\"\\n Array items (after modifing in", "print('\\n Array items (after modifing):') print(numarr) #changing 1st to 5th index values numarr[1:5]", "44 print('\\n Array items (after modifing):') print(numarr) #changing 1st to 5th index values", "arr numarr = arr.array('i',[10,20,30,40,50,60,70,80]) print(\"Array items:\") print(numarr) #changing index 3 value numarr[3] =", "print(\"Array items:\") print(numarr) #changing index 3 value numarr[3] = 44 print('\\n Array items", "items:\") print(numarr) #changing index 3 value numarr[3] = 44 print('\\n Array items (after", "values numarr[1:5] = arr.array('i',[-8,-5,-6,-7]) print(\"\\n Array items (after modifing in range ):\") print(numarr)", "3 value numarr[3] = 44 print('\\n Array items (after modifing):') print(numarr) #changing 1st", "numarr[3] = 44 print('\\n Array items (after modifing):') print(numarr) #changing 1st to 5th", "items (after modifing):') print(numarr) #changing 1st to 5th index values numarr[1:5] = arr.array('i',[-8,-5,-6,-7])", "1st to 5th index values numarr[1:5] = arr.array('i',[-8,-5,-6,-7]) print(\"\\n Array items (after modifing", "index 3 value numarr[3] = 44 print('\\n Array items (after modifing):') print(numarr) #changing", "index values numarr[1:5] = arr.array('i',[-8,-5,-6,-7]) print(\"\\n Array items (after modifing in range ):\")", "= 44 print('\\n Array items (after modifing):') print(numarr) #changing 1st to 5th index", "print(numarr) #changing index 3 value numarr[3] = 44 print('\\n Array items (after modifing):')", "arr.array('i',[10,20,30,40,50,60,70,80]) print(\"Array items:\") print(numarr) #changing index 3 value numarr[3] = 44 print('\\n Array", "modifing):') print(numarr) #changing 1st to 5th index values numarr[1:5] = arr.array('i',[-8,-5,-6,-7]) print(\"\\n Array", "import array as arr numarr = arr.array('i',[10,20,30,40,50,60,70,80]) print(\"Array items:\") print(numarr) #changing index 3", "<filename>Array/4.Modifying-items.py import array as arr numarr = arr.array('i',[10,20,30,40,50,60,70,80]) print(\"Array items:\") print(numarr) #changing index", "#changing 1st to 5th index values numarr[1:5] = arr.array('i',[-8,-5,-6,-7]) print(\"\\n Array items (after", "as arr numarr = arr.array('i',[10,20,30,40,50,60,70,80]) print(\"Array items:\") print(numarr) #changing index 3 value numarr[3]", "(after modifing):') print(numarr) #changing 1st to 5th index values numarr[1:5] = arr.array('i',[-8,-5,-6,-7]) print(\"\\n", "numarr = arr.array('i',[10,20,30,40,50,60,70,80]) print(\"Array items:\") print(numarr) #changing index 3 value numarr[3] = 44" ]
[ "class ArraySubstitutionPrinter(SubstitutionPrinter): def __init__(self, arr_name ,arr_symbols,*args, **kwargs): self.d = {symbol : '%s[%i]'%(arr_name, i)", "reps_to_converge(x, value = 0.1): return np.argwhere(x<value)[0][0] + 1 class SubstitutionPrinter(LambdaPrinter): d = {}", "-*- coding: utf-8 -*- \"\"\" Created on Mon Nov 24 12:05:09 2014 @author:", "#more nonsensical code... @numba.jit('void(f8[:],f8[:],u2)', nopython=True) def memcopy(dest, src, size): \"\"\"Copy the first `size`", "the first `size` elements of `dest`. Note that there is no bounds checking", "from sympy.printing.lambdarepr import LambdaPrinter import numba #more nonsensical code... @numba.jit('void(f8[:],f8[:],u2)', nopython=True) def memcopy(dest,", "of `dest`. Note that there is no bounds checking and can break the", "def reps_to_converge(x, value = 0.1): return np.argwhere(x<value)[0][0] + 1 class SubstitutionPrinter(LambdaPrinter): d =", "`dest`. Note that there is no bounds checking and can break the program,\"\"\"", "for i, symbol in enumerate(arr_symbols)} super().__init__(*args, **kwargs) class ReplaceComaPrinter(LambdaPrinter): def _print_Symbol(self, expr): result", "`src` into the first `size` elements of `dest`. Note that there is no", "0.1): return np.argwhere(x<value)[0][0] + 1 class SubstitutionPrinter(LambdaPrinter): d = {} def _print_Symbol(self, expr):", "checking and can break the program,\"\"\" for i in range(size): dest[i] = src[i]", "+ 1 class SubstitutionPrinter(LambdaPrinter): d = {} def _print_Symbol(self, expr): return self.d.get(expr, super()._print_Symbol(expr)", "1 class SubstitutionPrinter(LambdaPrinter): d = {} def _print_Symbol(self, expr): return self.d.get(expr, super()._print_Symbol(expr) )", "the first `size` elements of `src` into the first `size` elements of `dest`.", "LambdaPrinter import numba #more nonsensical code... @numba.jit('void(f8[:],f8[:],u2)', nopython=True) def memcopy(dest, src, size): \"\"\"Copy", "numpy as np from sympy.printing.lambdarepr import LambdaPrinter import numba #more nonsensical code... @numba.jit('void(f8[:],f8[:],u2)',", "the program,\"\"\" for i in range(size): dest[i] = src[i] def reps_to_converge(x, value =", "_print_Symbol(self, expr): return self.d.get(expr, super()._print_Symbol(expr) ) class ArraySubstitutionPrinter(SubstitutionPrinter): def __init__(self, arr_name ,arr_symbols,*args, **kwargs):", ", '_') return result class NeuralPrinter(ArraySubstitutionPrinter, ReplaceComaPrinter): pass def cv_split(*arrs, prob_testing = 0.3,", ") class ArraySubstitutionPrinter(SubstitutionPrinter): def __init__(self, arr_name ,arr_symbols,*args, **kwargs): self.d = {symbol : '%s[%i]'%(arr_name,", "\"\"\" import numpy as np from sympy.printing.lambdarepr import LambdaPrinter import numba #more nonsensical", "result = super()._print_Symbol(expr).replace(',' , '_') return result class NeuralPrinter(ArraySubstitutionPrinter, ReplaceComaPrinter): pass def cv_split(*arrs,", ": '%s[%i]'%(arr_name, i) for i, symbol in enumerate(arr_symbols)} super().__init__(*args, **kwargs) class ReplaceComaPrinter(LambdaPrinter): def", "src[i] def reps_to_converge(x, value = 0.1): return np.argwhere(x<value)[0][0] + 1 class SubstitutionPrinter(LambdaPrinter): d", "program,\"\"\" for i in range(size): dest[i] = src[i] def reps_to_converge(x, value = 0.1):", "ReplaceComaPrinter(LambdaPrinter): def _print_Symbol(self, expr): result = super()._print_Symbol(expr).replace(',' , '_') return result class NeuralPrinter(ArraySubstitutionPrinter,", "on Mon Nov 24 12:05:09 2014 @author: zah \"\"\" import numpy as np", "pass def cv_split(*arrs, prob_testing = 0.3, even_splits = None): if even_splits is not", "0.3, even_splits = None): if even_splits is not None: raise NotImplementedError is_validation =", "zah \"\"\" import numpy as np from sympy.printing.lambdarepr import LambdaPrinter import numba #more", "if even_splits is not None: raise NotImplementedError is_validation = np.random.rand(*arrs[0].shape) < prob_testing return", "= 0.3, even_splits = None): if even_splits is not None: raise NotImplementedError is_validation", "raise NotImplementedError is_validation = np.random.rand(*arrs[0].shape) < prob_testing return ((arr[~is_validation],arr[is_validation]) for arr in arrs)", "can break the program,\"\"\" for i in range(size): dest[i] = src[i] def reps_to_converge(x,", "__init__(self, arr_name ,arr_symbols,*args, **kwargs): self.d = {symbol : '%s[%i]'%(arr_name, i) for i, symbol", "of `src` into the first `size` elements of `dest`. Note that there is", "utf-8 -*- \"\"\" Created on Mon Nov 24 12:05:09 2014 @author: zah \"\"\"", "is no bounds checking and can break the program,\"\"\" for i in range(size):", "symbol in enumerate(arr_symbols)} super().__init__(*args, **kwargs) class ReplaceComaPrinter(LambdaPrinter): def _print_Symbol(self, expr): result = super()._print_Symbol(expr).replace(','", "numba #more nonsensical code... @numba.jit('void(f8[:],f8[:],u2)', nopython=True) def memcopy(dest, src, size): \"\"\"Copy the first", "-*- \"\"\" Created on Mon Nov 24 12:05:09 2014 @author: zah \"\"\" import", "nopython=True) def memcopy(dest, src, size): \"\"\"Copy the first `size` elements of `src` into", "no bounds checking and can break the program,\"\"\" for i in range(size): dest[i]", "def __init__(self, arr_name ,arr_symbols,*args, **kwargs): self.d = {symbol : '%s[%i]'%(arr_name, i) for i,", "{} def _print_Symbol(self, expr): return self.d.get(expr, super()._print_Symbol(expr) ) class ArraySubstitutionPrinter(SubstitutionPrinter): def __init__(self, arr_name", "return self.d.get(expr, super()._print_Symbol(expr) ) class ArraySubstitutionPrinter(SubstitutionPrinter): def __init__(self, arr_name ,arr_symbols,*args, **kwargs): self.d =", "Created on Mon Nov 24 12:05:09 2014 @author: zah \"\"\" import numpy as", "prob_testing = 0.3, even_splits = None): if even_splits is not None: raise NotImplementedError", "result class NeuralPrinter(ArraySubstitutionPrinter, ReplaceComaPrinter): pass def cv_split(*arrs, prob_testing = 0.3, even_splits = None):", "even_splits = None): if even_splits is not None: raise NotImplementedError is_validation = np.random.rand(*arrs[0].shape)", "class ReplaceComaPrinter(LambdaPrinter): def _print_Symbol(self, expr): result = super()._print_Symbol(expr).replace(',' , '_') return result class", "import LambdaPrinter import numba #more nonsensical code... @numba.jit('void(f8[:],f8[:],u2)', nopython=True) def memcopy(dest, src, size):", "Nov 24 12:05:09 2014 @author: zah \"\"\" import numpy as np from sympy.printing.lambdarepr", "size): \"\"\"Copy the first `size` elements of `src` into the first `size` elements", "np from sympy.printing.lambdarepr import LambdaPrinter import numba #more nonsensical code... @numba.jit('void(f8[:],f8[:],u2)', nopython=True) def", "= super()._print_Symbol(expr).replace(',' , '_') return result class NeuralPrinter(ArraySubstitutionPrinter, ReplaceComaPrinter): pass def cv_split(*arrs, prob_testing", "self.d.get(expr, super()._print_Symbol(expr) ) class ArraySubstitutionPrinter(SubstitutionPrinter): def __init__(self, arr_name ,arr_symbols,*args, **kwargs): self.d = {symbol", "<reponame>Zaharid/nnets # -*- coding: utf-8 -*- \"\"\" Created on Mon Nov 24 12:05:09", "i, symbol in enumerate(arr_symbols)} super().__init__(*args, **kwargs) class ReplaceComaPrinter(LambdaPrinter): def _print_Symbol(self, expr): result =", "\"\"\"Copy the first `size` elements of `src` into the first `size` elements of", "import numpy as np from sympy.printing.lambdarepr import LambdaPrinter import numba #more nonsensical code...", "super()._print_Symbol(expr).replace(',' , '_') return result class NeuralPrinter(ArraySubstitutionPrinter, ReplaceComaPrinter): pass def cv_split(*arrs, prob_testing =", "into the first `size` elements of `dest`. Note that there is no bounds", "ReplaceComaPrinter): pass def cv_split(*arrs, prob_testing = 0.3, even_splits = None): if even_splits is", "np.argwhere(x<value)[0][0] + 1 class SubstitutionPrinter(LambdaPrinter): d = {} def _print_Symbol(self, expr): return self.d.get(expr,", "memcopy(dest, src, size): \"\"\"Copy the first `size` elements of `src` into the first", "i) for i, symbol in enumerate(arr_symbols)} super().__init__(*args, **kwargs) class ReplaceComaPrinter(LambdaPrinter): def _print_Symbol(self, expr):", "super().__init__(*args, **kwargs) class ReplaceComaPrinter(LambdaPrinter): def _print_Symbol(self, expr): result = super()._print_Symbol(expr).replace(',' , '_') return", "Mon Nov 24 12:05:09 2014 @author: zah \"\"\" import numpy as np from", "break the program,\"\"\" for i in range(size): dest[i] = src[i] def reps_to_converge(x, value", "_print_Symbol(self, expr): result = super()._print_Symbol(expr).replace(',' , '_') return result class NeuralPrinter(ArraySubstitutionPrinter, ReplaceComaPrinter): pass", "class NeuralPrinter(ArraySubstitutionPrinter, ReplaceComaPrinter): pass def cv_split(*arrs, prob_testing = 0.3, even_splits = None): if", "def _print_Symbol(self, expr): result = super()._print_Symbol(expr).replace(',' , '_') return result class NeuralPrinter(ArraySubstitutionPrinter, ReplaceComaPrinter):", "value = 0.1): return np.argwhere(x<value)[0][0] + 1 class SubstitutionPrinter(LambdaPrinter): d = {} def", "'_') return result class NeuralPrinter(ArraySubstitutionPrinter, ReplaceComaPrinter): pass def cv_split(*arrs, prob_testing = 0.3, even_splits", "that there is no bounds checking and can break the program,\"\"\" for i", "12:05:09 2014 @author: zah \"\"\" import numpy as np from sympy.printing.lambdarepr import LambdaPrinter", "super()._print_Symbol(expr) ) class ArraySubstitutionPrinter(SubstitutionPrinter): def __init__(self, arr_name ,arr_symbols,*args, **kwargs): self.d = {symbol :", "return result class NeuralPrinter(ArraySubstitutionPrinter, ReplaceComaPrinter): pass def cv_split(*arrs, prob_testing = 0.3, even_splits =", "bounds checking and can break the program,\"\"\" for i in range(size): dest[i] =", "= {symbol : '%s[%i]'%(arr_name, i) for i, symbol in enumerate(arr_symbols)} super().__init__(*args, **kwargs) class", "# -*- coding: utf-8 -*- \"\"\" Created on Mon Nov 24 12:05:09 2014", "expr): return self.d.get(expr, super()._print_Symbol(expr) ) class ArraySubstitutionPrinter(SubstitutionPrinter): def __init__(self, arr_name ,arr_symbols,*args, **kwargs): self.d", "ArraySubstitutionPrinter(SubstitutionPrinter): def __init__(self, arr_name ,arr_symbols,*args, **kwargs): self.d = {symbol : '%s[%i]'%(arr_name, i) for", "there is no bounds checking and can break the program,\"\"\" for i in", "i in range(size): dest[i] = src[i] def reps_to_converge(x, value = 0.1): return np.argwhere(x<value)[0][0]", "even_splits is not None: raise NotImplementedError is_validation = np.random.rand(*arrs[0].shape) < prob_testing return ((arr[~is_validation],arr[is_validation])", "nonsensical code... @numba.jit('void(f8[:],f8[:],u2)', nopython=True) def memcopy(dest, src, size): \"\"\"Copy the first `size` elements", "enumerate(arr_symbols)} super().__init__(*args, **kwargs) class ReplaceComaPrinter(LambdaPrinter): def _print_Symbol(self, expr): result = super()._print_Symbol(expr).replace(',' , '_')", "import numba #more nonsensical code... @numba.jit('void(f8[:],f8[:],u2)', nopython=True) def memcopy(dest, src, size): \"\"\"Copy the", "coding: utf-8 -*- \"\"\" Created on Mon Nov 24 12:05:09 2014 @author: zah", "first `size` elements of `dest`. Note that there is no bounds checking and", "'%s[%i]'%(arr_name, i) for i, symbol in enumerate(arr_symbols)} super().__init__(*args, **kwargs) class ReplaceComaPrinter(LambdaPrinter): def _print_Symbol(self,", "NeuralPrinter(ArraySubstitutionPrinter, ReplaceComaPrinter): pass def cv_split(*arrs, prob_testing = 0.3, even_splits = None): if even_splits", "range(size): dest[i] = src[i] def reps_to_converge(x, value = 0.1): return np.argwhere(x<value)[0][0] + 1", "expr): result = super()._print_Symbol(expr).replace(',' , '_') return result class NeuralPrinter(ArraySubstitutionPrinter, ReplaceComaPrinter): pass def", "**kwargs): self.d = {symbol : '%s[%i]'%(arr_name, i) for i, symbol in enumerate(arr_symbols)} super().__init__(*args,", "None: raise NotImplementedError is_validation = np.random.rand(*arrs[0].shape) < prob_testing return ((arr[~is_validation],arr[is_validation]) for arr in", "= src[i] def reps_to_converge(x, value = 0.1): return np.argwhere(x<value)[0][0] + 1 class SubstitutionPrinter(LambdaPrinter):", "first `size` elements of `src` into the first `size` elements of `dest`. Note", "24 12:05:09 2014 @author: zah \"\"\" import numpy as np from sympy.printing.lambdarepr import", "sympy.printing.lambdarepr import LambdaPrinter import numba #more nonsensical code... @numba.jit('void(f8[:],f8[:],u2)', nopython=True) def memcopy(dest, src,", "for i in range(size): dest[i] = src[i] def reps_to_converge(x, value = 0.1): return", "self.d = {symbol : '%s[%i]'%(arr_name, i) for i, symbol in enumerate(arr_symbols)} super().__init__(*args, **kwargs)", "None): if even_splits is not None: raise NotImplementedError is_validation = np.random.rand(*arrs[0].shape) < prob_testing", "= 0.1): return np.argwhere(x<value)[0][0] + 1 class SubstitutionPrinter(LambdaPrinter): d = {} def _print_Symbol(self,", "@author: zah \"\"\" import numpy as np from sympy.printing.lambdarepr import LambdaPrinter import numba", "code... @numba.jit('void(f8[:],f8[:],u2)', nopython=True) def memcopy(dest, src, size): \"\"\"Copy the first `size` elements of", "\"\"\" Created on Mon Nov 24 12:05:09 2014 @author: zah \"\"\" import numpy", "in range(size): dest[i] = src[i] def reps_to_converge(x, value = 0.1): return np.argwhere(x<value)[0][0] +", "SubstitutionPrinter(LambdaPrinter): d = {} def _print_Symbol(self, expr): return self.d.get(expr, super()._print_Symbol(expr) ) class ArraySubstitutionPrinter(SubstitutionPrinter):", "@numba.jit('void(f8[:],f8[:],u2)', nopython=True) def memcopy(dest, src, size): \"\"\"Copy the first `size` elements of `src`", "def _print_Symbol(self, expr): return self.d.get(expr, super()._print_Symbol(expr) ) class ArraySubstitutionPrinter(SubstitutionPrinter): def __init__(self, arr_name ,arr_symbols,*args,", "def cv_split(*arrs, prob_testing = 0.3, even_splits = None): if even_splits is not None:", "arr_name ,arr_symbols,*args, **kwargs): self.d = {symbol : '%s[%i]'%(arr_name, i) for i, symbol in", "dest[i] = src[i] def reps_to_converge(x, value = 0.1): return np.argwhere(x<value)[0][0] + 1 class", "= None): if even_splits is not None: raise NotImplementedError is_validation = np.random.rand(*arrs[0].shape) <", "def memcopy(dest, src, size): \"\"\"Copy the first `size` elements of `src` into the", "Note that there is no bounds checking and can break the program,\"\"\" for", "**kwargs) class ReplaceComaPrinter(LambdaPrinter): def _print_Symbol(self, expr): result = super()._print_Symbol(expr).replace(',' , '_') return result", "and can break the program,\"\"\" for i in range(size): dest[i] = src[i] def", "`size` elements of `src` into the first `size` elements of `dest`. Note that", "class SubstitutionPrinter(LambdaPrinter): d = {} def _print_Symbol(self, expr): return self.d.get(expr, super()._print_Symbol(expr) ) class", ",arr_symbols,*args, **kwargs): self.d = {symbol : '%s[%i]'%(arr_name, i) for i, symbol in enumerate(arr_symbols)}", "not None: raise NotImplementedError is_validation = np.random.rand(*arrs[0].shape) < prob_testing return ((arr[~is_validation],arr[is_validation]) for arr", "cv_split(*arrs, prob_testing = 0.3, even_splits = None): if even_splits is not None: raise", "elements of `dest`. Note that there is no bounds checking and can break", "return np.argwhere(x<value)[0][0] + 1 class SubstitutionPrinter(LambdaPrinter): d = {} def _print_Symbol(self, expr): return", "d = {} def _print_Symbol(self, expr): return self.d.get(expr, super()._print_Symbol(expr) ) class ArraySubstitutionPrinter(SubstitutionPrinter): def", "`size` elements of `dest`. Note that there is no bounds checking and can", "src, size): \"\"\"Copy the first `size` elements of `src` into the first `size`", "in enumerate(arr_symbols)} super().__init__(*args, **kwargs) class ReplaceComaPrinter(LambdaPrinter): def _print_Symbol(self, expr): result = super()._print_Symbol(expr).replace(',' ,", "elements of `src` into the first `size` elements of `dest`. Note that there", "2014 @author: zah \"\"\" import numpy as np from sympy.printing.lambdarepr import LambdaPrinter import", "as np from sympy.printing.lambdarepr import LambdaPrinter import numba #more nonsensical code... @numba.jit('void(f8[:],f8[:],u2)', nopython=True)", "{symbol : '%s[%i]'%(arr_name, i) for i, symbol in enumerate(arr_symbols)} super().__init__(*args, **kwargs) class ReplaceComaPrinter(LambdaPrinter):", "= {} def _print_Symbol(self, expr): return self.d.get(expr, super()._print_Symbol(expr) ) class ArraySubstitutionPrinter(SubstitutionPrinter): def __init__(self,", "is not None: raise NotImplementedError is_validation = np.random.rand(*arrs[0].shape) < prob_testing return ((arr[~is_validation],arr[is_validation]) for" ]
[ "str(input('Segundo aluno: ')) n3 = str(input('Terceiro aluno: ')) n4 = str(input('Quarto aluno: '))", "aluno: ')) n3 = str(input('Terceiro aluno: ')) n4 = str(input('Quarto aluno: ')) escolhido", "= str(input('Primeiro aluno: ')) n2 = str(input('Segundo aluno: ')) n3 = str(input('Terceiro aluno:", "= str(input('Quarto aluno: ')) escolhido = choice([n1, n2, n3, n4]) print(f'Aluno escolhido: {escolhido}')", "random import choice n1 = str(input('Primeiro aluno: ')) n2 = str(input('Segundo aluno: '))", "n2 = str(input('Segundo aluno: ')) n3 = str(input('Terceiro aluno: ')) n4 = str(input('Quarto", "n3 = str(input('Terceiro aluno: ')) n4 = str(input('Quarto aluno: ')) escolhido = choice([n1,", "= str(input('Segundo aluno: ')) n3 = str(input('Terceiro aluno: ')) n4 = str(input('Quarto aluno:", "import choice n1 = str(input('Primeiro aluno: ')) n2 = str(input('Segundo aluno: ')) n3", "')) n4 = str(input('Quarto aluno: ')) escolhido = choice([n1, n2, n3, n4]) print(f'Aluno", "aluno: ')) n4 = str(input('Quarto aluno: ')) escolhido = choice([n1, n2, n3, n4])", "from random import choice n1 = str(input('Primeiro aluno: ')) n2 = str(input('Segundo aluno:", "<gh_stars>0 from random import choice n1 = str(input('Primeiro aluno: ')) n2 = str(input('Segundo", "')) n3 = str(input('Terceiro aluno: ')) n4 = str(input('Quarto aluno: ')) escolhido =", "str(input('Terceiro aluno: ')) n4 = str(input('Quarto aluno: ')) escolhido = choice([n1, n2, n3,", "= str(input('Terceiro aluno: ')) n4 = str(input('Quarto aluno: ')) escolhido = choice([n1, n2,", "')) n2 = str(input('Segundo aluno: ')) n3 = str(input('Terceiro aluno: ')) n4 =", "aluno: ')) n2 = str(input('Segundo aluno: ')) n3 = str(input('Terceiro aluno: ')) n4", "n4 = str(input('Quarto aluno: ')) escolhido = choice([n1, n2, n3, n4]) print(f'Aluno escolhido:", "str(input('Primeiro aluno: ')) n2 = str(input('Segundo aluno: ')) n3 = str(input('Terceiro aluno: '))", "choice n1 = str(input('Primeiro aluno: ')) n2 = str(input('Segundo aluno: ')) n3 =", "n1 = str(input('Primeiro aluno: ')) n2 = str(input('Segundo aluno: ')) n3 = str(input('Terceiro" ]
[ "2: {doc2_st}\") print(f\"Cosine Distance:\\n {cos_dist_st}\") print(\"==========\") # Short Text Vectorizers # Let's see", "'Light Missile'), ('Caldari Navy Nova Light Missile', 'Light Missile'), ('Medium Core Defense Field", "= reduce(lambda x, y: f'{x} {y}', [x[1] for x in l1]) cos_dist_st =", "[TEST] Short Text Vectorizers # Again same texts should have cosine distance of", "('Damage Control II', 'Damage Control'), ('50MN Cold-Gas Enduring Microwarpdrive', 'Propulsion Module'), ('Large Shield", "for x in l1]) # Create bag of words cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt)", "y: f'{x} {y}', [x[1] for x in l1]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document", "II', 'Warp Scrambler'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Medium", "String Matching import jellyfish # Distance metrics from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.metrics.pairwise", "'Combat Drone'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('Light Ion Blaster II', 'Hybrid", "Control System II', 'Ballistic Control system'), ('Ballistic Control System II', 'Ballistic Control system'),", "Enduring Warp Scrambler', 'Warp Scrambler'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Caldari Navy", "'Drone Damage Modules'), ('F85 Peripheral Damage System I', 'Damage Control'), ('Null S', 'Advanced", "of words doc2_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in", "for x in l1]) doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1] for", "'Rig Armor'), ('Warrior II', 'Combat Drone'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('Light", "('Medium Core Defense Field Extender I', 'Rig Shield'), ('Caldari Navy Inferno Light Missile',", "f'{x} {y}', [x[0] for x in l1]) # Create bag of words cos_dist_lt", "Shield'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Warp Disruptor II', 'Warp Scrambler'),", "Blaster II', 'Hybrid Weapon'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Caldari Navy Antimatter", "= reduce(lambda x, y: f'{x} {y}', [x[0] for x in l1]) doc2_lt =", "for x in l2]) doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1] for", "Control'), ('Null S', 'Advanced Blaster Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'),", "x, y: f'{x} {y}', [x[1] for x in l1]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st)", "return cos_dist # killmail_id, killmail_time, solar_system_id, character_id, ship_type_id # 46643819, 2015-05-15 19:02:00, 30000157,", "cosine distance of 1 doc1_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for", "Scrambler'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Medium Core Defense", "f'{x} {y}', [x[1] for x in l2]) doc2_st = reduce(lambda x, y: f'{x}", "numpy as np import pandas as pd import nltk # Natural Language Tool", "{y}', [x[1] for x in l1]) doc2_st = reduce(lambda x, y: f'{x} {y}',", "Antimatter Charge S', 'Hybrid Charge'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('J5b Enduring", "[x[0] for x in l1]) # Create bag of words doc2_lt = reduce(lambda", "32872 l2 = [('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Caldari Navy Antimatter", "fuzzywuzzy import fuzz, process # Fuzzy String Matching import jellyfish # Distance metrics", "ship_type_id # 46643869, 2015-05-15 19:05:00, 30000157, 90000814, 32872 l2 = [('Caldari Navy Antimatter", "should have cosine distance of 1 doc1_lt = reduce(lambda x, y: f'{x} {y}',", "x, y: f'{x} {y}', [x[1] for x in l2]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st)", "cos_dist = linear_kernel(tfidf[0:1], tfidf[1:2]).flatten()[0] # Compute cosine distance return cos_dist # killmail_id, killmail_time,", "texts are doc1_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in", "Armor'), ('Warrior II', 'Combat Drone'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('Light Ion", "Defense Field Extender I', 'Rig Shield')] # killmail_id, killmail_time, solar_system_id, character_id, ship_type_id #", "are doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l1])", "'Rig Shield'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Warp Disruptor II', 'Warp", "Scourge Light Missile', 'Light Missile'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Rapid", "l2]) doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l2])", "Ion Blaster II', 'Hybrid Weapon'), ('J5b Enduring Warp Scrambler', 'Warp Scrambler'), ('Light Ion", "Antimatter Charge S', 'Hybrid Charge'), ('Drone Damage Amplifier II', 'Drone Damage Modules'), ('F85", "of 1 doc1_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in", "630 l1 = [('Large Shield Extender II', 'Shield Extender'), ('Rapid Light Missile Launcher", "import linear_kernel def get_cosine_distance(doc1, doc2): \"\"\" \"\"\" tfidf = TfidfVectorizer().fit_transform([doc1, doc2]) # Vectorize", "Long Text Vectorizers # The same document should have cosine distance of 1", "Vectorizers # Again same texts should have cosine distance of 1 doc1_st =", "'Shield Extender'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Caldari Navy", "{y}', [x[0] for x in l1]) # Create bag of words cos_dist_lt =", "Inferno Light Missile', 'Light Missile'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid", "doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l1]) doc2_st", "Navy Mjolnir Light Missile', 'Light Missile'), ('Damage Control II', 'Damage Control'), ('50MN Cold-Gas", "('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Caldari Navy Inferno Light Missile', 'Light", "\"\"\" \"\"\" tfidf = TfidfVectorizer().fit_transform([doc1, doc2]) # Vectorize the bag of words cos_dist", "= reduce(lambda x, y: f'{x} {y}', [x[0] for x in l2]) cos_dist_lt =", "for x in l2]) cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document 2:", "jellyfish # Distance metrics from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.metrics.pairwise import linear_kernel def", "Launcher Rapid Light'), ('Caldari Navy Mjolnir Light Missile', 'Light Missile'), ('Damage Control II',", "are doc1_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in l1])", "Kit from fuzzywuzzy import fuzz, process # Fuzzy String Matching import jellyfish #", "Bulkhead I', 'Rig Armor'), ('Warrior II', 'Combat Drone'), ('Small Transverse Bulkhead I', 'Rig", "('J5b Enduring Warp Scrambler', 'Warp Scrambler'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Caldari", "doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l2]) doc2_st", "x in l2]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\") print(f\"Document 2: {doc2_st}\")", "'Warp Scrambler'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Caldari Navy Antimatter Charge S',", "{y}', [x[0] for x in l1]) # Create bag of words doc2_lt =", "I', 'Rig Armor'), ('Warrior II', 'Combat Drone'), ('Small Transverse Bulkhead I', 'Rig Armor'),", "should be commutable doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x", "Light'), ('Caldari Navy Mjolnir Light Missile', 'Light Missile'), ('Damage Control II', 'Damage Control'),", "the bag of words cos_dist = linear_kernel(tfidf[0:1], tfidf[1:2]).flatten()[0] # Compute cosine distance return", "os import sys from functools import reduce import numpy as np import pandas", "Control system'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Caldari Navy", "reduce(lambda x, y: f'{x} {y}', [x[0] for x in l1]) # Create bag", "Ion Blaster II', 'Hybrid Weapon'), ('X5 Enduring Stasis Webifier', 'Stasis Web'), ('Small Transverse", "30000157, 90000814, 32872 l2 = [('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Caldari", "-*- \"\"\"testing script\"\"\" import os import sys from functools import reduce import numpy", "19:02:00, 30000157, 90000814, 630 l1 = [('Large Shield Extender II', 'Shield Extender'), ('Rapid", "Cold-Gas Enduring Microwarpdrive', 'Propulsion Module'), ('Large Shield Extender II', 'Shield Extender'), ('Caldari Navy", "('Caldari Navy Scourge Light Missile', 'Light Missile'), ('Caldari Navy Inferno Light Missile', 'Light", "'Hybrid Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge')] # [TEST] Long Text", "l2]) doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l1])", "Damage Amplifier II', 'Drone Damage Modules'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('5MN", "bag of words doc2_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x", "Missile'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Phased Scoped Target", "('Small Transverse Bulkhead I', 'Rig Armor'), ('Warrior II', 'Combat Drone'), ('Small Transverse Bulkhead", "# Create bag of words cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document", "'Hybrid Charge'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('J5b Enduring Warp Scrambler', 'Warp", "Module'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('X5 Enduring Stasis Webifier', 'Stasis Web'),", "nltk # Natural Language Tool Kit from fuzzywuzzy import fuzz, process # Fuzzy", "S', 'Hybrid Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge')] # [TEST] Long", "texts are doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in", "Long Text Vectorizers # Let's see how close the long texts are doc1_lt", "('Medium Core Defense Field Extender I', 'Rig Shield')] # killmail_id, killmail_time, solar_system_id, character_id,", "= reduce(lambda x, y: f'{x} {y}', [x[1] for x in l2]) doc2_st =", "{cos_dist_st}\") print(\"==========\") # Short Text Vectorizers # Let's see how close the short", "get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") #", "Painter', 'Target Painter'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Medium Polycarbon Engine", "Charge S', 'Hybrid Charge'), ('Drone Damage Amplifier II', 'Drone Damage Modules'), ('F85 Peripheral", "Compact Microwarpdrive', 'Propulsion Module'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('X5 Enduring Stasis", "Light Missile', 'Light Missile'), ('Damage Control II', 'Damage Control'), ('50MN Cold-Gas Enduring Microwarpdrive',", "{cos_dist_st}\") print(\"==========\") # Short Text Vectorizers # Cosine distance should be commutable doc1_st", "Compute cosine distance return cos_dist # killmail_id, killmail_time, solar_system_id, character_id, ship_type_id # 46643819,", "Nova Light Missile', 'Light Missile'), ('Medium Core Defense Field Extender I', 'Rig Shield'),", "print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # Long Text Vectorizers # Let's see how close", "# Fuzzy String Matching import jellyfish # Distance metrics from sklearn.feature_extraction.text import TfidfVectorizer", "np import pandas as pd import nltk # Natural Language Tool Kit from", "Ion Blaster II', 'Hybrid Weapon'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Caldari Navy", "'Hybrid Weapon'), ('X5 Enduring Stasis Webifier', 'Stasis Web'), ('Small Transverse Bulkhead I', 'Rig", "f'{x} {y}', [x[0] for x in l1]) # Create bag of words doc2_lt", "Fuzzy String Matching import jellyfish # Distance metrics from sklearn.feature_extraction.text import TfidfVectorizer from", "Navy Antimatter Charge S', 'Hybrid Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge')]", "Missile', 'Light Missile'), ('Medium Polycarbon Engine Housing I', 'Rig Navigation'), ('Nanofiber Internal Structure", "Short Text Vectorizers # Cosine distance should be commutable doc1_st = reduce(lambda x,", "# Compute cosine distance return cos_dist # killmail_id, killmail_time, solar_system_id, character_id, ship_type_id #", "words doc2_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in l1])", "'Advanced Blaster Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Light Ion Blaster", "('Caldari Navy Antimatter Charge S', 'Hybrid Charge')] # [TEST] Long Text Vectorizers #", "reduce(lambda x, y: f'{x} {y}', [x[1] for x in l2]) doc2_st = reduce(lambda", "Shield')] # killmail_id, killmail_time, solar_system_id, character_id, ship_type_id # 46643869, 2015-05-15 19:05:00, 30000157, 90000814,", "Modules'), ('F85 Peripheral Damage System I', 'Damage Control'), ('Null S', 'Advanced Blaster Charge'),", "'Light Missile'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Rapid Light Missile Launcher", "Inferno Light Missile', 'Light Missile'), ('Medium Polycarbon Engine Housing I', 'Rig Navigation'), ('Nanofiber", "{doc2_st}\") print(f\"Cosine Distance:\\n {cos_dist_st}\") print(\"==========\") # Short Text Vectorizers # Cosine distance should", "Matching import jellyfish # Distance metrics from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.metrics.pairwise import", "'Missile Launcher Rapid Light'), ('Phased Scoped Target Painter', 'Target Painter'), ('Caldari Navy Inferno", "('Light Ion Blaster II', 'Hybrid Weapon'), ('X5 Enduring Stasis Webifier', 'Stasis Web'), ('Small", "Control'), ('50MN Cold-Gas Enduring Microwarpdrive', 'Propulsion Module'), ('Large Shield Extender II', 'Shield Extender'),", "'Ballistic Control system'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Caldari", "Ion Blaster II', 'Hybrid Weapon'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Caldari", "('Null S', 'Advanced Blaster Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Light", "'Nanofiber Internal Structure'), ('Ballistic Control System II', 'Ballistic Control system'), ('Ballistic Control System", "{y}', [x[0] for x in l2]) cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\")", "ship_type_id # 46643819, 2015-05-15 19:02:00, 30000157, 90000814, 630 l1 = [('Large Shield Extender", "= linear_kernel(tfidf[0:1], tfidf[1:2]).flatten()[0] # Compute cosine distance return cos_dist # killmail_id, killmail_time, solar_system_id,", "II', 'Ballistic Control system'), ('Ballistic Control System II', 'Ballistic Control system'), ('Rapid Light", "Shield Extender II', 'Shield Extender'), ('Caldari Navy Scourge Light Missile', 'Light Missile'), ('Caldari", "Navy Inferno Light Missile', 'Light Missile'), ('Medium Polycarbon Engine Housing I', 'Rig Navigation'),", "'Drone Damage Modules'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('5MN Y-T8 Compact Microwarpdrive',", "('F85 Peripheral Damage System I', 'Damage Control'), ('Null S', 'Advanced Blaster Charge'), ('Caldari", "Blaster II', 'Hybrid Weapon'), ('J5b Enduring Warp Scrambler', 'Warp Scrambler'), ('Light Ion Blaster", "Bulkhead I', 'Rig Armor'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Light Ion Blaster", "'Light Missile'), ('Medium Polycarbon Engine Housing I', 'Rig Navigation'), ('Nanofiber Internal Structure II',", "solar_system_id, character_id, ship_type_id # 46643869, 2015-05-15 19:05:00, 30000157, 90000814, 32872 l2 = [('Caldari", "Extender II', 'Shield Extender'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'),", "'Light Missile'), ('Warp Disruptor II', 'Warp Scrambler'), ('Rapid Light Missile Launcher II', 'Missile", "S', 'Hybrid Charge'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('J5b Enduring Warp Scrambler',", "words cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n", "{doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # Long Text Vectorizers #", "30000157, 90000814, 630 l1 = [('Large Shield Extender II', 'Shield Extender'), ('Rapid Light", "Missile Launcher II', 'Missile Launcher Rapid Light'), ('Caldari Navy Mjolnir Light Missile', 'Light", "Light Missile', 'Light Missile'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Caldari Navy", "doc1_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in l1]) doc2_lt", "Missile Launcher II', 'Missile Launcher Rapid Light'), ('Medium Core Defense Field Extender I',", "Launcher II', 'Missile Launcher Rapid Light'), ('Medium Core Defense Field Extender I', 'Rig", "1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # [TEST] Short Text", "x, y: f'{x} {y}', [x[0] for x in l1]) # Create bag of", "Weapon'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Caldari Navy Antimatter Charge S', 'Hybrid", "[x[1] for x in l2]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\") print(f\"Document", "y: f'{x} {y}', [x[1] for x in l1]) doc2_st = reduce(lambda x, y:", "II', 'Combat Drone'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('Light Ion Blaster II',", "system'), ('Ballistic Control System II', 'Ballistic Control system'), ('Rapid Light Missile Launcher II',", "l1]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\") print(f\"Document 2: {doc2_st}\") print(f\"Cosine Distance:\\n", "Polycarbon Engine Housing I', 'Rig Navigation'), ('Nanofiber Internal Structure II', 'Nanofiber Internal Structure'),", "The same document should have cosine distance of 1 doc1_lt = reduce(lambda x,", "II', 'Shield Extender'), ('Caldari Navy Scourge Light Missile', 'Light Missile'), ('Caldari Navy Inferno", "Light Missile', 'Light Missile'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Rapid Light", "Launcher Rapid Light'), ('Phased Scoped Target Painter', 'Target Painter'), ('Caldari Navy Inferno Light", "Missile', 'Light Missile'), ('Warp Disruptor II', 'Warp Scrambler'), ('Rapid Light Missile Launcher II',", "pd import nltk # Natural Language Tool Kit from fuzzywuzzy import fuzz, process", "Light'), ('Medium Core Defense Field Extender I', 'Rig Shield')] # killmail_id, killmail_time, solar_system_id,", "in l1]) doc2_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in", "print(f\"Document 2: {doc2_st}\") print(f\"Cosine Distance:\\n {cos_dist_st}\") print(\"==========\") # Short Text Vectorizers # Cosine", "= get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\") print(f\"Document 2: {doc2_st}\") print(f\"Cosine Distance:\\n {cos_dist_st}\") print(\"==========\")", "('Light Ion Blaster II', 'Hybrid Weapon'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Caldari", "('Caldari Navy Mjolnir Light Missile', 'Light Missile'), ('Damage Control II', 'Damage Control'), ('50MN", "in l2]) doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in", "Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Drone Damage Amplifier II', 'Drone", "print(\"==========\") # Short Text Vectorizers # Let's see how close the short texts", "{y}', [x[1] for x in l1]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\")", "Blaster II', 'Hybrid Weapon'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Drone Damage", "2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # Long Text Vectorizers # Let's see", "Light Missile', 'Light Missile'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'),", "# The same document should have cosine distance of 1 doc1_lt = reduce(lambda", "Launcher Rapid Light'), ('Medium Core Defense Field Extender I', 'Rig Shield')] # killmail_id,", "# Let's see how close the short texts are doc1_st = reduce(lambda x,", "'Rig Shield')] # killmail_id, killmail_time, solar_system_id, character_id, ship_type_id # 46643869, 2015-05-15 19:05:00, 30000157,", "print(f\"Document 1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # [TEST] Short", "('Ballistic Control System II', 'Ballistic Control system'), ('Rapid Light Missile Launcher II', 'Missile", "Text Vectorizers # Let's see how close the short texts are doc1_st =", "x in l2]) doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x", "Missile'), ('Warp Disruptor II', 'Warp Scrambler'), ('Rapid Light Missile Launcher II', 'Missile Launcher", "Short Text Vectorizers # Again same texts should have cosine distance of 1", "('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Medium Core Defense Field", "Missile', 'Light Missile'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Phased", "Charge')] # [TEST] Long Text Vectorizers # The same document should have cosine", "Rapid Light'), ('Caldari Navy Mjolnir Light Missile', 'Light Missile'), ('Damage Control II', 'Damage", "Missile Launcher II', 'Missile Launcher Rapid Light'), ('Caldari Navy Inferno Light Missile', 'Light", "Rapid Light'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Caldari Navy Inferno Light", "Light Missile', 'Light Missile'), ('Medium Core Defense Field Extender I', 'Rig Shield'), ('Caldari", "for x in l1]) # Create bag of words doc2_lt = reduce(lambda x,", "Vectorizers # Let's see how close the long texts are doc1_lt = reduce(lambda", "# Distance metrics from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.metrics.pairwise import linear_kernel def get_cosine_distance(doc1,", "Amplifier II', 'Drone Damage Modules'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('5MN Y-T8", "# Long Text Vectorizers # Let's see how close the long texts are", "l1]) # Create bag of words cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\")", "x, y: f'{x} {y}', [x[1] for x in l2]) doc2_st = reduce(lambda x,", "doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l1]) cos_dist_st", "Navy Inferno Light Missile', 'Light Missile'), ('Rapid Light Missile Launcher II', 'Missile Launcher", "from functools import reduce import numpy as np import pandas as pd import", "print(f\"Document 1: {doc1_st}\") print(f\"Document 2: {doc2_st}\") print(f\"Cosine Distance:\\n {cos_dist_st}\") print(\"==========\") # Short Text", "Again same texts should have cosine distance of 1 doc1_st = reduce(lambda x,", "in l1]) # Create bag of words cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1:", "('Light Ion Blaster II', 'Hybrid Weapon'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'),", "I', 'Damage Control'), ('Null S', 'Advanced Blaster Charge'), ('Caldari Navy Antimatter Charge S',", "Module'), ('Large Shield Extender II', 'Shield Extender'), ('Caldari Navy Scourge Light Missile', 'Light", "('Ballistic Control System II', 'Ballistic Control system'), ('Ballistic Control System II', 'Ballistic Control", "Light'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Caldari Navy Inferno Light Missile',", "Inferno Light Missile', 'Light Missile'), ('Warp Disruptor II', 'Warp Scrambler'), ('Rapid Light Missile", "'Hybrid Weapon'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Drone Damage Amplifier II',", "Let's see how close the short texts are doc1_st = reduce(lambda x, y:", "'Rig Navigation'), ('Nanofiber Internal Structure II', 'Nanofiber Internal Structure'), ('Ballistic Control System II',", "pandas as pd import nltk # Natural Language Tool Kit from fuzzywuzzy import", "Modules'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('5MN Y-T8 Compact Microwarpdrive', 'Propulsion Module'),", "('Caldari Navy Nova Light Missile', 'Light Missile'), ('Medium Core Defense Field Extender I',", "Distance metrics from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.metrics.pairwise import linear_kernel def get_cosine_distance(doc1, doc2):", "'Missile Launcher Rapid Light'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Caldari Navy", "killmail_time, solar_system_id, character_id, ship_type_id # 46643819, 2015-05-15 19:02:00, 30000157, 90000814, 630 l1 =", "Drone'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('Light Ion Blaster II', 'Hybrid Weapon'),", "Text Vectorizers # Cosine distance should be commutable doc1_st = reduce(lambda x, y:", "reduce(lambda x, y: f'{x} {y}', [x[0] for x in l1]) doc2_lt = reduce(lambda", "Y-T8 Compact Microwarpdrive', 'Propulsion Module'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('X5 Enduring", "Stasis Webifier', 'Stasis Web'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('Warrior II', 'Combat", "in l1]) # Create bag of words doc2_lt = reduce(lambda x, y: f'{x}", "doc2_st) print(f\"Document 1: {doc1_st}\") print(f\"Document 2: {doc2_st}\") print(f\"Cosine Distance:\\n {cos_dist_st}\") print(\"==========\") # Short", "x, y: f'{x} {y}', [x[1] for x in l1]) doc2_st = reduce(lambda x,", "('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Caldari Navy Inferno Light", "killmail_id, killmail_time, solar_system_id, character_id, ship_type_id # 46643819, 2015-05-15 19:02:00, 30000157, 90000814, 630 l1", "I', 'Rig Armor'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Light Ion Blaster II',", "get_cosine_distance(doc1, doc2): \"\"\" \"\"\" tfidf = TfidfVectorizer().fit_transform([doc1, doc2]) # Vectorize the bag of", "[x[1] for x in l2]) doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1]", "Antimatter Charge S', 'Hybrid Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Drone", "Navy Antimatter Charge S', 'Hybrid Charge')] # [TEST] Long Text Vectorizers # The", "'Propulsion Module'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('X5 Enduring Stasis Webifier', 'Stasis", "1: {doc1_st}\") print(f\"Document 2: {doc2_st}\") print(f\"Cosine Distance:\\n {cos_dist_st}\") print(\"==========\") # Short Text Vectorizers", "x in l1]) # Create bag of words doc2_lt = reduce(lambda x, y:", "distance should be commutable doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for", "of 1 doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in", "Text Vectorizers # Again same texts should have cosine distance of 1 doc1_st", "doc2]) # Vectorize the bag of words cos_dist = linear_kernel(tfidf[0:1], tfidf[1:2]).flatten()[0] # Compute", "print(\"==========\") # Short Text Vectorizers # Cosine distance should be commutable doc1_st =", "import sys from functools import reduce import numpy as np import pandas as", "('Phased Scoped Target Painter', 'Target Painter'), ('Caldari Navy Inferno Light Missile', 'Light Missile'),", "Mjolnir Light Missile', 'Light Missile'), ('Damage Control II', 'Damage Control'), ('50MN Cold-Gas Enduring", "('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Drone Damage Amplifier II', 'Drone Damage", "('Small Transverse Bulkhead I', 'Rig Armor'), ('5MN Y-T8 Compact Microwarpdrive', 'Propulsion Module'), ('Light", "# killmail_id, killmail_time, solar_system_id, character_id, ship_type_id # 46643869, 2015-05-15 19:05:00, 30000157, 90000814, 32872", "Tool Kit from fuzzywuzzy import fuzz, process # Fuzzy String Matching import jellyfish", "reduce import numpy as np import pandas as pd import nltk # Natural", "Painter'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Medium Polycarbon Engine Housing I',", "S', 'Hybrid Charge')] # [TEST] Long Text Vectorizers # The same document should", "print(f\"Cosine Distance:\\n {cos_dist_st}\") print(\"==========\") # Short Text Vectorizers # Let's see how close", "Amplifier II', 'Drone Damage Modules'), ('F85 Peripheral Damage System I', 'Damage Control'), ('Null", "Warp Scrambler', 'Warp Scrambler'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Caldari Navy Antimatter", "Damage System I', 'Damage Control'), ('Null S', 'Advanced Blaster Charge'), ('Caldari Navy Antimatter", "Armor'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Light Ion Blaster II', 'Hybrid Weapon'),", "see how close the long texts are doc1_lt = reduce(lambda x, y: f'{x}", "[('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid", "y: f'{x} {y}', [x[1] for x in l2]) doc2_st = reduce(lambda x, y:", "print(f\"Document 1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # Long Text", "Rapid Light'), ('Phased Scoped Target Painter', 'Target Painter'), ('Caldari Navy Inferno Light Missile',", "character_id, ship_type_id # 46643869, 2015-05-15 19:05:00, 30000157, 90000814, 32872 l2 = [('Caldari Navy", "Weapon'), ('J5b Enduring Warp Scrambler', 'Warp Scrambler'), ('Light Ion Blaster II', 'Hybrid Weapon'),", "how close the long texts are doc1_lt = reduce(lambda x, y: f'{x} {y}',", "2015-05-15 19:02:00, 30000157, 90000814, 630 l1 = [('Large Shield Extender II', 'Shield Extender'),", "Control System II', 'Ballistic Control system'), ('Rapid Light Missile Launcher II', 'Missile Launcher", "Charge S', 'Hybrid Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Drone Damage", "for x in l1]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\") print(f\"Document 2:", "Navy Antimatter Charge S', 'Hybrid Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'),", "# 46643869, 2015-05-15 19:05:00, 30000157, 90000814, 32872 l2 = [('Caldari Navy Antimatter Charge", "reduce(lambda x, y: f'{x} {y}', [x[1] for x in l2]) cos_dist_st = get_cosine_distance(doc1_st,", "'Rig Armor'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Light Ion Blaster II', 'Hybrid", "II', 'Missile Launcher Rapid Light'), ('Medium Core Defense Field Extender I', 'Rig Shield')]", "{y}', [x[1] for x in l2]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\")", "Structure II', 'Nanofiber Internal Structure'), ('Ballistic Control System II', 'Ballistic Control system'), ('Ballistic", "f'{x} {y}', [x[1] for x in l2]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1:", "sys from functools import reduce import numpy as np import pandas as pd", "print(f\"Cosine Distance:\\n {cos_dist_st}\") print(\"==========\") # Short Text Vectorizers # Cosine distance should be", "Inferno Light Missile', 'Light Missile'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Caldari", "'Light Missile'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Phased Scoped", "I', 'Rig Navigation'), ('Nanofiber Internal Structure II', 'Nanofiber Internal Structure'), ('Ballistic Control System", "y: f'{x} {y}', [x[0] for x in l1]) doc2_lt = reduce(lambda x, y:", "Scoped Target Painter', 'Target Painter'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Medium", "# killmail_id, killmail_time, solar_system_id, character_id, ship_type_id # 46643819, 2015-05-15 19:02:00, 30000157, 90000814, 630", "Scrambler', 'Warp Scrambler'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Caldari Navy Antimatter Charge", "'Warp Scrambler'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Medium Core", "l1 = [('Large Shield Extender II', 'Shield Extender'), ('Rapid Light Missile Launcher II',", "[x[1] for x in l1]) doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1]", "Transverse Bulkhead I', 'Rig Armor'), ('Warrior II', 'Combat Drone'), ('Small Transverse Bulkhead I',", "Engine Housing I', 'Rig Navigation'), ('Nanofiber Internal Structure II', 'Nanofiber Internal Structure'), ('Ballistic", "Webifier', 'Stasis Web'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('Warrior II', 'Combat Drone'),", "y: f'{x} {y}', [x[0] for x in l2]) cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document", "('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Caldari Navy Mjolnir Light", "coding: utf-8 -*- \"\"\"testing script\"\"\" import os import sys from functools import reduce", "metrics from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.metrics.pairwise import linear_kernel def get_cosine_distance(doc1, doc2): \"\"\"", "Defense Field Extender I', 'Rig Shield'), ('Caldari Navy Inferno Light Missile', 'Light Missile'),", "Charge'), ('Drone Damage Amplifier II', 'Drone Damage Modules'), ('F85 Peripheral Damage System I',", "Launcher Rapid Light'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Caldari Navy Inferno", "l2]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\") print(f\"Document 2: {doc2_st}\") print(f\"Cosine Distance:\\n", "Text Vectorizers # The same document should have cosine distance of 1 doc1_lt", "in l2]) cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine", "System I', 'Damage Control'), ('Null S', 'Advanced Blaster Charge'), ('Caldari Navy Antimatter Charge", "{doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # [TEST] Short Text Vectorizers # Again same", "('Medium Polycarbon Engine Housing I', 'Rig Navigation'), ('Nanofiber Internal Structure II', 'Nanofiber Internal", "Launcher II', 'Missile Launcher Rapid Light'), ('Caldari Navy Mjolnir Light Missile', 'Light Missile'),", "same document should have cosine distance of 1 doc1_lt = reduce(lambda x, y:", "f'{x} {y}', [x[0] for x in l1]) doc2_lt = reduce(lambda x, y: f'{x}", "long texts are doc1_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x", "Vectorizers # Let's see how close the short texts are doc1_st = reduce(lambda", "in l1]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\") print(f\"Document 2: {doc2_st}\") print(f\"Cosine", "for x in l1]) doc2_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for", "killmail_id, killmail_time, solar_system_id, character_id, ship_type_id # 46643869, 2015-05-15 19:05:00, 30000157, 90000814, 32872 l2", "Charge'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('J5b Enduring Warp Scrambler', 'Warp Scrambler'),", "sklearn.feature_extraction.text import TfidfVectorizer from sklearn.metrics.pairwise import linear_kernel def get_cosine_distance(doc1, doc2): \"\"\" \"\"\" tfidf", "II', 'Hybrid Weapon'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Drone Damage Amplifier", "{cos_dist_lt}\") print(\"==========\") # [TEST] Short Text Vectorizers # Again same texts should have", "x in l1]) doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x", "System II', 'Ballistic Control system'), ('Ballistic Control System II', 'Ballistic Control system'), ('Rapid", "90000814, 630 l1 = [('Large Shield Extender II', 'Shield Extender'), ('Rapid Light Missile", "Internal Structure II', 'Nanofiber Internal Structure'), ('Ballistic Control System II', 'Ballistic Control system'),", "('Nanofiber Internal Structure II', 'Nanofiber Internal Structure'), ('Ballistic Control System II', 'Ballistic Control", "'Light Missile'), ('Medium Core Defense Field Extender I', 'Rig Shield'), ('Caldari Navy Inferno", "Transverse Bulkhead I', 'Rig Armor'), ('5MN Y-T8 Compact Microwarpdrive', 'Propulsion Module'), ('Light Ion", "be commutable doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in", "Create bag of words doc2_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for", "'Missile Launcher Rapid Light'), ('Medium Core Defense Field Extender I', 'Rig Shield')] #", "l2]) cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n", "Extender'), ('Caldari Navy Scourge Light Missile', 'Light Missile'), ('Caldari Navy Inferno Light Missile',", "Light Missile', 'Light Missile'), ('Caldari Navy Nova Light Missile', 'Light Missile'), ('Medium Core", "Navy Scourge Light Missile', 'Light Missile'), ('Caldari Navy Inferno Light Missile', 'Light Missile'),", "doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l2]) cos_dist_st", "'Hybrid Charge'), ('Drone Damage Amplifier II', 'Drone Damage Modules'), ('Small Transverse Bulkhead I',", "Vectorizers # Cosine distance should be commutable doc1_st = reduce(lambda x, y: f'{x}", "doc2): \"\"\" \"\"\" tfidf = TfidfVectorizer().fit_transform([doc1, doc2]) # Vectorize the bag of words", "('Drone Damage Amplifier II', 'Drone Damage Modules'), ('Small Transverse Bulkhead I', 'Rig Armor'),", "of words cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine", "1 doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l2])", "Navigation'), ('Nanofiber Internal Structure II', 'Nanofiber Internal Structure'), ('Ballistic Control System II', 'Ballistic", "II', 'Damage Control'), ('50MN Cold-Gas Enduring Microwarpdrive', 'Propulsion Module'), ('Large Shield Extender II',", "reduce(lambda x, y: f'{x} {y}', [x[1] for x in l1]) doc2_st = reduce(lambda", "Navy Inferno Light Missile', 'Light Missile'), ('Warp Disruptor II', 'Warp Scrambler'), ('Rapid Light", "tfidf = TfidfVectorizer().fit_transform([doc1, doc2]) # Vectorize the bag of words cos_dist = linear_kernel(tfidf[0:1],", "Rapid Light'), ('Medium Core Defense Field Extender I', 'Rig Shield')] # killmail_id, killmail_time,", "Distance:\\n {cos_dist_lt}\") print(\"==========\") # Long Text Vectorizers # Let's see how close the", "\"\"\"testing script\"\"\" import os import sys from functools import reduce import numpy as", "linear_kernel(tfidf[0:1], tfidf[1:2]).flatten()[0] # Compute cosine distance return cos_dist # killmail_id, killmail_time, solar_system_id, character_id,", "Bulkhead I', 'Rig Armor'), ('5MN Y-T8 Compact Microwarpdrive', 'Propulsion Module'), ('Light Ion Blaster", "doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # [TEST]", "short texts are doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x", "Field Extender I', 'Rig Shield')] # killmail_id, killmail_time, solar_system_id, character_id, ship_type_id # 46643869,", "II', 'Hybrid Weapon'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Caldari Navy Antimatter", "see how close the short texts are doc1_st = reduce(lambda x, y: f'{x}", "words cos_dist = linear_kernel(tfidf[0:1], tfidf[1:2]).flatten()[0] # Compute cosine distance return cos_dist # killmail_id,", "import jellyfish # Distance metrics from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.metrics.pairwise import linear_kernel", "Weapon'), ('X5 Enduring Stasis Webifier', 'Stasis Web'), ('Small Transverse Bulkhead I', 'Rig Armor'),", "2: {doc2_st}\") print(f\"Cosine Distance:\\n {cos_dist_st}\") print(\"==========\") # Short Text Vectorizers # Cosine distance", "[x[0] for x in l1]) doc2_lt = reduce(lambda x, y: f'{x} {y}', [x[0]", "from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.metrics.pairwise import linear_kernel def get_cosine_distance(doc1, doc2): \"\"\" \"\"\"", "{doc2_st}\") print(f\"Cosine Distance:\\n {cos_dist_st}\") print(\"==========\") # Short Text Vectorizers # Let's see how", "[('Large Shield Extender II', 'Shield Extender'), ('Rapid Light Missile Launcher II', 'Missile Launcher", "('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Caldari Navy Nova Light Missile', 'Light", "Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Medium Core Defense Field Extender", "Charge S', 'Hybrid Charge'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('J5b Enduring Warp", "('Warrior II', 'Combat Drone'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('Light Ion Blaster", "46643819, 2015-05-15 19:02:00, 30000157, 90000814, 630 l1 = [('Large Shield Extender II', 'Shield", "of words cos_dist = linear_kernel(tfidf[0:1], tfidf[1:2]).flatten()[0] # Compute cosine distance return cos_dist #", "tfidf[1:2]).flatten()[0] # Compute cosine distance return cos_dist # killmail_id, killmail_time, solar_system_id, character_id, ship_type_id", "Weapon'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Caldari Navy Antimatter Charge S',", "Missile'), ('Caldari Navy Nova Light Missile', 'Light Missile'), ('Medium Core Defense Field Extender", "# [TEST] Long Text Vectorizers # The same document should have cosine distance", "Create bag of words cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document 2:", "x, y: f'{x} {y}', [x[0] for x in l2]) cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt)", "'Light Missile'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Caldari Navy Nova Light", "Transverse Bulkhead I', 'Rig Armor'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Light Ion", "II', 'Missile Launcher Rapid Light'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Caldari", "# Vectorize the bag of words cos_dist = linear_kernel(tfidf[0:1], tfidf[1:2]).flatten()[0] # Compute cosine", "Natural Language Tool Kit from fuzzywuzzy import fuzz, process # Fuzzy String Matching", "import os import sys from functools import reduce import numpy as np import", "print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # Long Text Vectorizers # Let's", "Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Phased Scoped Target Painter', 'Target", "Antimatter Charge S', 'Hybrid Charge'), ('Drone Damage Amplifier II', 'Drone Damage Modules'), ('Small", "close the short texts are doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1]", "[x[0] for x in l2]) cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document", "have cosine distance of 1 doc1_lt = reduce(lambda x, y: f'{x} {y}', [x[0]", "distance of 1 doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x", "Armor'), ('5MN Y-T8 Compact Microwarpdrive', 'Propulsion Module'), ('Light Ion Blaster II', 'Hybrid Weapon'),", "'Hybrid Weapon'), ('J5b Enduring Warp Scrambler', 'Warp Scrambler'), ('Light Ion Blaster II', 'Hybrid", "Microwarpdrive', 'Propulsion Module'), ('Large Shield Extender II', 'Shield Extender'), ('Caldari Navy Scourge Light", "Inferno Light Missile', 'Light Missile'), ('Caldari Navy Nova Light Missile', 'Light Missile'), ('Medium", "= reduce(lambda x, y: f'{x} {y}', [x[1] for x in l1]) doc2_st =", "Missile'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Rapid Light Missile Launcher II',", "f'{x} {y}', [x[0] for x in l2]) cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1:", "Language Tool Kit from fuzzywuzzy import fuzz, process # Fuzzy String Matching import", "II', 'Nanofiber Internal Structure'), ('Ballistic Control System II', 'Ballistic Control system'), ('Ballistic Control", "II', 'Ballistic Control system'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'),", "close the long texts are doc1_lt = reduce(lambda x, y: f'{x} {y}', [x[0]", "= [('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Caldari Navy Antimatter Charge S',", "Navy Nova Light Missile', 'Light Missile'), ('Medium Core Defense Field Extender I', 'Rig", "process # Fuzzy String Matching import jellyfish # Distance metrics from sklearn.feature_extraction.text import", "Navy Inferno Light Missile', 'Light Missile'), ('Caldari Navy Nova Light Missile', 'Light Missile'),", "= TfidfVectorizer().fit_transform([doc1, doc2]) # Vectorize the bag of words cos_dist = linear_kernel(tfidf[0:1], tfidf[1:2]).flatten()[0]", "l1]) doc2_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in l2])", "same texts should have cosine distance of 1 doc1_st = reduce(lambda x, y:", "Vectorize the bag of words cos_dist = linear_kernel(tfidf[0:1], tfidf[1:2]).flatten()[0] # Compute cosine distance", "Structure'), ('Ballistic Control System II', 'Ballistic Control system'), ('Ballistic Control System II', 'Ballistic", "Charge S', 'Hybrid Charge')] # [TEST] Long Text Vectorizers # The same document", "cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\")", "cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\") print(f\"Document 2: {doc2_st}\") print(f\"Cosine Distance:\\n {cos_dist_st}\")", "II', 'Missile Launcher Rapid Light'), ('Phased Scoped Target Painter', 'Target Painter'), ('Caldari Navy", "import pandas as pd import nltk # Natural Language Tool Kit from fuzzywuzzy", "{doc1_st}\") print(f\"Document 2: {doc2_st}\") print(f\"Cosine Distance:\\n {cos_dist_st}\") print(\"==========\") # Short Text Vectorizers #", "II', 'Drone Damage Modules'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('5MN Y-T8 Compact", "f'{x} {y}', [x[1] for x in l1]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1:", "= reduce(lambda x, y: f'{x} {y}', [x[0] for x in l1]) # Create", "Launcher II', 'Missile Launcher Rapid Light'), ('Caldari Navy Inferno Light Missile', 'Light Missile'),", "Missile', 'Light Missile'), ('Medium Core Defense Field Extender I', 'Rig Shield'), ('Caldari Navy", "Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge')] # [TEST] Long Text Vectorizers", "should have cosine distance of 1 doc1_st = reduce(lambda x, y: f'{x} {y}',", "Missile'), ('Damage Control II', 'Damage Control'), ('50MN Cold-Gas Enduring Microwarpdrive', 'Propulsion Module'), ('Large", "Distance:\\n {cos_dist_st}\") print(\"==========\") # Short Text Vectorizers # Let's see how close the", "Control II', 'Damage Control'), ('50MN Cold-Gas Enduring Microwarpdrive', 'Propulsion Module'), ('Large Shield Extender", "Let's see how close the long texts are doc1_lt = reduce(lambda x, y:", "Microwarpdrive', 'Propulsion Module'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('X5 Enduring Stasis Webifier',", "from fuzzywuzzy import fuzz, process # Fuzzy String Matching import jellyfish # Distance", "'Propulsion Module'), ('Large Shield Extender II', 'Shield Extender'), ('Caldari Navy Scourge Light Missile',", "('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid", "reduce(lambda x, y: f'{x} {y}', [x[1] for x in l1]) cos_dist_st = get_cosine_distance(doc1_st,", "('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Light Ion Blaster II', 'Hybrid Weapon'),", "from sklearn.metrics.pairwise import linear_kernel def get_cosine_distance(doc1, doc2): \"\"\" \"\"\" tfidf = TfidfVectorizer().fit_transform([doc1, doc2])", "Damage Modules'), ('F85 Peripheral Damage System I', 'Damage Control'), ('Null S', 'Advanced Blaster", "# Create bag of words doc2_lt = reduce(lambda x, y: f'{x} {y}', [x[0]", "System II', 'Ballistic Control system'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid", "print(\"==========\") # Long Text Vectorizers # Let's see how close the long texts", "Scrambler'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Caldari Navy Antimatter Charge S', 'Hybrid", "reduce(lambda x, y: f'{x} {y}', [x[0] for x in l2]) cos_dist_lt = get_cosine_distance(doc1_lt,", "l1]) # Create bag of words doc2_lt = reduce(lambda x, y: f'{x} {y}',", "2015-05-15 19:05:00, 30000157, 90000814, 32872 l2 = [('Caldari Navy Antimatter Charge S', 'Hybrid", "Ion Blaster II', 'Hybrid Weapon'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Drone", "def get_cosine_distance(doc1, doc2): \"\"\" \"\"\" tfidf = TfidfVectorizer().fit_transform([doc1, doc2]) # Vectorize the bag", "II', 'Drone Damage Modules'), ('F85 Peripheral Damage System I', 'Damage Control'), ('Null S',", "Web'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('Warrior II', 'Combat Drone'), ('Small Transverse", "Field Extender I', 'Rig Shield'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Warp", "distance return cos_dist # killmail_id, killmail_time, solar_system_id, character_id, ship_type_id # 46643819, 2015-05-15 19:02:00,", "y: f'{x} {y}', [x[1] for x in l2]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document", "[x[1] for x in l1]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\") print(f\"Document", "import reduce import numpy as np import pandas as pd import nltk #", "'Shield Extender'), ('Caldari Navy Scourge Light Missile', 'Light Missile'), ('Caldari Navy Inferno Light", "('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Phased Scoped Target Painter',", "Missile'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Caldari Navy Nova Light Missile',", "Target Painter', 'Target Painter'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Medium Polycarbon", "print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # [TEST] Short Text Vectorizers #", "import numpy as np import pandas as pd import nltk # Natural Language", "Internal Structure'), ('Ballistic Control System II', 'Ballistic Control system'), ('Ballistic Control System II',", "Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Caldari Navy Mjolnir Light Missile',", "Extender'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Caldari Navy Mjolnir", "character_id, ship_type_id # 46643819, 2015-05-15 19:02:00, 30000157, 90000814, 630 l1 = [('Large Shield", "have cosine distance of 1 doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1]", "Missile', 'Light Missile'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Caldari Navy Nova", "# Short Text Vectorizers # Let's see how close the short texts are", "= reduce(lambda x, y: f'{x} {y}', [x[1] for x in l2]) cos_dist_st =", "S', 'Hybrid Charge'), ('Drone Damage Amplifier II', 'Drone Damage Modules'), ('Small Transverse Bulkhead", "x in l1]) doc2_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x", "Housing I', 'Rig Navigation'), ('Nanofiber Internal Structure II', 'Nanofiber Internal Structure'), ('Ballistic Control", "'Damage Control'), ('50MN Cold-Gas Enduring Microwarpdrive', 'Propulsion Module'), ('Large Shield Extender II', 'Shield", "Damage Modules'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('5MN Y-T8 Compact Microwarpdrive', 'Propulsion", "print(f\"Document 2: {doc2_st}\") print(f\"Cosine Distance:\\n {cos_dist_st}\") print(\"==========\") # Short Text Vectorizers # Let's", "commutable doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l2])", "# Short Text Vectorizers # Cosine distance should be commutable doc1_st = reduce(lambda", "Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Caldari Navy Inferno Light Missile',", "II', 'Missile Launcher Rapid Light'), ('Caldari Navy Mjolnir Light Missile', 'Light Missile'), ('Damage", "cosine distance return cos_dist # killmail_id, killmail_time, solar_system_id, character_id, ship_type_id # 46643819, 2015-05-15", "Weapon'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Drone Damage Amplifier II', 'Drone", "# Again same texts should have cosine distance of 1 doc1_st = reduce(lambda", "linear_kernel def get_cosine_distance(doc1, doc2): \"\"\" \"\"\" tfidf = TfidfVectorizer().fit_transform([doc1, doc2]) # Vectorize the", "{doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # Long Text Vectorizers # Let's see how", "x in l1]) # Create bag of words cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document", "{cos_dist_lt}\") print(\"==========\") # Long Text Vectorizers # Let's see how close the long", "S', 'Hybrid Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Drone Damage Amplifier", "'Hybrid Charge')] # [TEST] Long Text Vectorizers # The same document should have", "'Damage Control'), ('Null S', 'Advanced Blaster Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid", "\"\"\" tfidf = TfidfVectorizer().fit_transform([doc1, doc2]) # Vectorize the bag of words cos_dist =", "texts should have cosine distance of 1 doc1_st = reduce(lambda x, y: f'{x}", "Blaster II', 'Hybrid Weapon'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Caldari Navy", "= get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\")", "'Light Missile'), ('Damage Control II', 'Damage Control'), ('50MN Cold-Gas Enduring Microwarpdrive', 'Propulsion Module'),", "print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # [TEST] Short Text Vectorizers # Again same texts", "the short texts are doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for", "Damage Amplifier II', 'Drone Damage Modules'), ('F85 Peripheral Damage System I', 'Damage Control'),", "Charge'), ('Drone Damage Amplifier II', 'Drone Damage Modules'), ('Small Transverse Bulkhead I', 'Rig", "('50MN Cold-Gas Enduring Microwarpdrive', 'Propulsion Module'), ('Large Shield Extender II', 'Shield Extender'), ('Caldari", "# Let's see how close the long texts are doc1_lt = reduce(lambda x,", "import TfidfVectorizer from sklearn.metrics.pairwise import linear_kernel def get_cosine_distance(doc1, doc2): \"\"\" \"\"\" tfidf =", "('5MN Y-T8 Compact Microwarpdrive', 'Propulsion Module'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('X5", "Launcher II', 'Missile Launcher Rapid Light'), ('Phased Scoped Target Painter', 'Target Painter'), ('Caldari", "'Ballistic Control system'), ('Ballistic Control System II', 'Ballistic Control system'), ('Rapid Light Missile", "cosine distance of 1 doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1] for", "Distance:\\n {cos_dist_st}\") print(\"==========\") # Short Text Vectorizers # Cosine distance should be commutable", "bag of words cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\")", "-*- coding: utf-8 -*- \"\"\"testing script\"\"\" import os import sys from functools import", "I', 'Rig Armor'), ('5MN Y-T8 Compact Microwarpdrive', 'Propulsion Module'), ('Light Ion Blaster II',", "Light Missile', 'Light Missile'), ('Warp Disruptor II', 'Warp Scrambler'), ('Rapid Light Missile Launcher", "Missile'), ('Medium Core Defense Field Extender I', 'Rig Shield'), ('Caldari Navy Inferno Light", "doc2_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in l2]) cos_dist_lt", "II', 'Hybrid Weapon'), ('X5 Enduring Stasis Webifier', 'Stasis Web'), ('Small Transverse Bulkhead I',", "Short Text Vectorizers # Let's see how close the short texts are doc1_st", "cos_dist # killmail_id, killmail_time, solar_system_id, character_id, ship_type_id # 46643819, 2015-05-15 19:02:00, 30000157, 90000814,", "Extender II', 'Shield Extender'), ('Caldari Navy Scourge Light Missile', 'Light Missile'), ('Caldari Navy", "# [TEST] Short Text Vectorizers # Again same texts should have cosine distance", "sklearn.metrics.pairwise import linear_kernel def get_cosine_distance(doc1, doc2): \"\"\" \"\"\" tfidf = TfidfVectorizer().fit_transform([doc1, doc2]) #", "script\"\"\" import os import sys from functools import reduce import numpy as np", "[x[0] for x in l1]) # Create bag of words cos_dist_lt = get_cosine_distance(doc1_lt,", "II', 'Shield Extender'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Caldari", "Peripheral Damage System I', 'Damage Control'), ('Null S', 'Advanced Blaster Charge'), ('Caldari Navy", "I', 'Rig Shield')] # killmail_id, killmail_time, solar_system_id, character_id, ship_type_id # 46643869, 2015-05-15 19:05:00,", "Charge S', 'Hybrid Charge'), ('Drone Damage Amplifier II', 'Drone Damage Modules'), ('Small Transverse", "for x in l2]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\") print(f\"Document 2:", "the long texts are doc1_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for", "Core Defense Field Extender I', 'Rig Shield')] # killmail_id, killmail_time, solar_system_id, character_id, ship_type_id", "TfidfVectorizer().fit_transform([doc1, doc2]) # Vectorize the bag of words cos_dist = linear_kernel(tfidf[0:1], tfidf[1:2]).flatten()[0] #", "19:05:00, 30000157, 90000814, 32872 l2 = [('Caldari Navy Antimatter Charge S', 'Hybrid Charge'),", "Navy Antimatter Charge S', 'Hybrid Charge'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('J5b", "Light Missile', 'Light Missile'), ('Medium Polycarbon Engine Housing I', 'Rig Navigation'), ('Nanofiber Internal", "'Hybrid Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Drone Damage Amplifier II',", "'Hybrid Weapon'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Caldari Navy Antimatter Charge", "# Cosine distance should be commutable doc1_st = reduce(lambda x, y: f'{x} {y}',", "('Warp Disruptor II', 'Warp Scrambler'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid", "Blaster II', 'Hybrid Weapon'), ('X5 Enduring Stasis Webifier', 'Stasis Web'), ('Small Transverse Bulkhead", "Charge S', 'Hybrid Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge')] # [TEST]", "Core Defense Field Extender I', 'Rig Shield'), ('Caldari Navy Inferno Light Missile', 'Light", "Enduring Microwarpdrive', 'Propulsion Module'), ('Large Shield Extender II', 'Shield Extender'), ('Caldari Navy Scourge", "x in l2]) cos_dist_lt = get_cosine_distance(doc1_lt, doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\")", "Missile', 'Light Missile'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Rapid Light Missile", "Navy Inferno Light Missile', 'Light Missile'), ('Caldari Navy Inferno Light Missile', 'Light Missile'),", "II', 'Hybrid Weapon'), ('J5b Enduring Warp Scrambler', 'Warp Scrambler'), ('Light Ion Blaster II',", "('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Warp Disruptor II', 'Warp Scrambler'), ('Rapid", "Antimatter Charge S', 'Hybrid Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge')] #", "l2 = [('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Caldari Navy Antimatter Charge", "Light'), ('Phased Scoped Target Painter', 'Target Painter'), ('Caldari Navy Inferno Light Missile', 'Light", "Blaster Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Light Ion Blaster II',", "doc2_lt) print(f\"Document 1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # Long", "y: f'{x} {y}', [x[0] for x in l1]) # Create bag of words", "document should have cosine distance of 1 doc1_lt = reduce(lambda x, y: f'{x}", "'Hybrid Charge'), ('Drone Damage Amplifier II', 'Drone Damage Modules'), ('F85 Peripheral Damage System", "Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Light Ion Blaster II', 'Hybrid", "Extender I', 'Rig Shield'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Warp Disruptor", "('Large Shield Extender II', 'Shield Extender'), ('Caldari Navy Scourge Light Missile', 'Light Missile'),", "f'{x} {y}', [x[1] for x in l1]) doc2_st = reduce(lambda x, y: f'{x}", "x in l1]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\") print(f\"Document 2: {doc2_st}\")", "# 46643819, 2015-05-15 19:02:00, 30000157, 90000814, 630 l1 = [('Large Shield Extender II',", "get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\") print(f\"Document 2: {doc2_st}\") print(f\"Cosine Distance:\\n {cos_dist_st}\") print(\"==========\") #", "'Missile Launcher Rapid Light'), ('Caldari Navy Mjolnir Light Missile', 'Light Missile'), ('Damage Control", "('Drone Damage Amplifier II', 'Drone Damage Modules'), ('F85 Peripheral Damage System I', 'Damage", "bag of words cos_dist = linear_kernel(tfidf[0:1], tfidf[1:2]).flatten()[0] # Compute cosine distance return cos_dist", "1: {doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # Long Text Vectorizers", "S', 'Advanced Blaster Charge'), ('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Light Ion", "x, y: f'{x} {y}', [x[0] for x in l1]) doc2_lt = reduce(lambda x,", "in l2]) cos_dist_st = get_cosine_distance(doc1_st, doc2_st) print(f\"Document 1: {doc1_st}\") print(f\"Document 2: {doc2_st}\") print(f\"Cosine", "('X5 Enduring Stasis Webifier', 'Stasis Web'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('Warrior", "Navy Antimatter Charge S', 'Hybrid Charge'), ('Drone Damage Amplifier II', 'Drone Damage Modules'),", "Vectorizers # The same document should have cosine distance of 1 doc1_lt =", "how close the short texts are doc1_st = reduce(lambda x, y: f'{x} {y}',", "Missile'), ('Medium Polycarbon Engine Housing I', 'Rig Navigation'), ('Nanofiber Internal Structure II', 'Nanofiber", "2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # [TEST] Short Text Vectorizers # Again", "{y}', [x[0] for x in l1]) doc2_lt = reduce(lambda x, y: f'{x} {y}',", "Disruptor II', 'Warp Scrambler'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'),", "# -*- coding: utf-8 -*- \"\"\"testing script\"\"\" import os import sys from functools", "Enduring Stasis Webifier', 'Stasis Web'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('Warrior II',", "('Light Ion Blaster II', 'Hybrid Weapon'), ('J5b Enduring Warp Scrambler', 'Warp Scrambler'), ('Light", "import nltk # Natural Language Tool Kit from fuzzywuzzy import fuzz, process #", "Text Vectorizers # Let's see how close the long texts are doc1_lt =", "Control system'), ('Ballistic Control System II', 'Ballistic Control system'), ('Rapid Light Missile Launcher", "solar_system_id, character_id, ship_type_id # 46643819, 2015-05-15 19:02:00, 30000157, 90000814, 630 l1 = [('Large", "doc1_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in l1]) #", "killmail_time, solar_system_id, character_id, ship_type_id # 46643869, 2015-05-15 19:05:00, 30000157, 90000814, 32872 l2 =", "('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Medium Polycarbon Engine Housing I', 'Rig", "functools import reduce import numpy as np import pandas as pd import nltk", "TfidfVectorizer from sklearn.metrics.pairwise import linear_kernel def get_cosine_distance(doc1, doc2): \"\"\" \"\"\" tfidf = TfidfVectorizer().fit_transform([doc1,", "Shield Extender II', 'Shield Extender'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid", "system'), ('Rapid Light Missile Launcher II', 'Missile Launcher Rapid Light'), ('Caldari Navy Inferno", "II', 'Hybrid Weapon'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Caldari Navy Antimatter Charge", "Distance:\\n {cos_dist_lt}\") print(\"==========\") # [TEST] Short Text Vectorizers # Again same texts should", "{doc1_lt}\") print(f\"Document 2: {doc2_lt}\") print(f\"Cosine Distance:\\n {cos_dist_lt}\") print(\"==========\") # [TEST] Short Text Vectorizers", "'Hybrid Weapon'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Caldari Navy Antimatter Charge S',", "1 doc1_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in l1])", "as pd import nltk # Natural Language Tool Kit from fuzzywuzzy import fuzz,", "90000814, 32872 l2 = [('Caldari Navy Antimatter Charge S', 'Hybrid Charge'), ('Caldari Navy", "in l1]) doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in", "as np import pandas as pd import nltk # Natural Language Tool Kit", "I', 'Rig Shield'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Warp Disruptor II',", "{y}', [x[1] for x in l2]) doc2_st = reduce(lambda x, y: f'{x} {y}',", "fuzz, process # Fuzzy String Matching import jellyfish # Distance metrics from sklearn.feature_extraction.text", "('Small Transverse Bulkhead I', 'Rig Armor'), ('Light Ion Blaster II', 'Hybrid Weapon'), ('Light", "46643869, 2015-05-15 19:05:00, 30000157, 90000814, 32872 l2 = [('Caldari Navy Antimatter Charge S',", "distance of 1 doc1_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x", "'Target Painter'), ('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Medium Polycarbon Engine Housing", "doc2_lt = reduce(lambda x, y: f'{x} {y}', [x[0] for x in l1]) #", "utf-8 -*- \"\"\"testing script\"\"\" import os import sys from functools import reduce import", "S', 'Hybrid Charge'), ('Drone Damage Amplifier II', 'Drone Damage Modules'), ('F85 Peripheral Damage", "'Stasis Web'), ('Small Transverse Bulkhead I', 'Rig Armor'), ('Warrior II', 'Combat Drone'), ('Small", "'Rig Armor'), ('5MN Y-T8 Compact Microwarpdrive', 'Propulsion Module'), ('Light Ion Blaster II', 'Hybrid", "Missile Launcher II', 'Missile Launcher Rapid Light'), ('Phased Scoped Target Painter', 'Target Painter'),", "[TEST] Long Text Vectorizers # The same document should have cosine distance of", "Antimatter Charge S', 'Hybrid Charge')] # [TEST] Long Text Vectorizers # The same", "print(\"==========\") # [TEST] Short Text Vectorizers # Again same texts should have cosine", "Extender I', 'Rig Shield')] # killmail_id, killmail_time, solar_system_id, character_id, ship_type_id # 46643869, 2015-05-15", "('Caldari Navy Inferno Light Missile', 'Light Missile'), ('Rapid Light Missile Launcher II', 'Missile", "= [('Large Shield Extender II', 'Shield Extender'), ('Rapid Light Missile Launcher II', 'Missile", "import fuzz, process # Fuzzy String Matching import jellyfish # Distance metrics from", "Missile', 'Light Missile'), ('Caldari Navy Nova Light Missile', 'Light Missile'), ('Medium Core Defense", "# Natural Language Tool Kit from fuzzywuzzy import fuzz, process # Fuzzy String", "Missile', 'Light Missile'), ('Damage Control II', 'Damage Control'), ('50MN Cold-Gas Enduring Microwarpdrive', 'Propulsion", "Cosine distance should be commutable doc1_st = reduce(lambda x, y: f'{x} {y}', [x[1]", "l1]) doc2_st = reduce(lambda x, y: f'{x} {y}', [x[1] for x in l2])" ]
[ "= Pmw.Counter(self, labelpos='w', label_text='Search depth:', entryfield_value=depth, entryfield_validate = {'validator' : 'integer', 'min' :", "a local callback. self.protocol ('WM_DELETE_WINDOW', self.close) def onOK(self): self.withdraw() self.onChangeSearchDepth(int(self.depthCounter.get())) self.close() # Event", ": 'integer', 'min' : 1, 'max' : 4}, entry_width = 2, entry_bg =", "entryfield_value=depth, entryfield_validate = {'validator' : 'integer', 'min' : 1, 'max' : 4}, entry_width", "self.onOK) box.add('Cancel', command = self.close) box.setdefault('OK') box.alignbuttons() box.grid(row=25) # Override DELETE_WINDOW handler to", "= onClose self.onChangeSearchDepth = onChangeSearchDepth # Position the dialog box relative to parent.", "int(geoparse.get_y (parent.geometry())) + 100 self.geometry ('+' + str(xpos) + '+' + str(ypos)) Label(self).grid(row=1)", "from Tkinter import * import Pmw import geoparse class RebuildSearch(Toplevel): def __init__(self, parent,", "entry_width = 2, entry_bg = 'white' ) self.depthCounter.grid(row=15) Label(self).grid(row=17) Label(self,text='Click OK to proceed.').grid(row=20)", "<gh_stars>10-100 ############################################################################# ############################################################################# # File: RebuildSearch.py # # Date: 04-Dec-2007 # ############################################################################# #############################################################################", "'white' ) self.depthCounter.grid(row=15) Label(self).grid(row=17) Label(self,text='Click OK to proceed.').grid(row=20) box = Pmw.ButtonBox(self) box.add('OK', command", "to a local callback. self.protocol ('WM_DELETE_WINDOW', self.close) def onOK(self): self.withdraw() self.onChangeSearchDepth(int(self.depthCounter.get())) self.close() #", "############################################################################# # File: RebuildSearch.py # # Date: 04-Dec-2007 # ############################################################################# ############################################################################# from Tkinter", "= int(geoparse.get_y (parent.geometry())) + 100 self.geometry ('+' + str(xpos) + '+' + str(ypos))", "box.add('Cancel', command = self.close) box.setdefault('OK') box.alignbuttons() box.grid(row=25) # Override DELETE_WINDOW handler to a", "Pmw import geoparse class RebuildSearch(Toplevel): def __init__(self, parent, onClose, depth, onChangeSearchDepth): Toplevel.__init__ (self,", "import geoparse class RebuildSearch(Toplevel): def __init__(self, parent, onClose, depth, onChangeSearchDepth): Toplevel.__init__ (self, parent)", "= self.onOK) box.add('Cancel', command = self.close) box.setdefault('OK') box.alignbuttons() box.grid(row=25) # Override DELETE_WINDOW handler", "to parent. xpos = int(geoparse.get_x (parent.geometry())) + 100 ypos = int(geoparse.get_y (parent.geometry())) +", "box.alignbuttons() box.grid(row=25) # Override DELETE_WINDOW handler to a local callback. self.protocol ('WM_DELETE_WINDOW', self.close)", "self.close() # Event handler when user closes the window. def close(self): self.onClose() self.destroy()", "'integer', 'min' : 1, 'max' : 4}, entry_width = 2, entry_bg = 'white'", "2, entry_bg = 'white' ) self.depthCounter.grid(row=15) Label(self).grid(row=17) Label(self,text='Click OK to proceed.').grid(row=20) box =", "box.setdefault('OK') box.alignbuttons() box.grid(row=25) # Override DELETE_WINDOW handler to a local callback. self.protocol ('WM_DELETE_WINDOW',", "geoparse class RebuildSearch(Toplevel): def __init__(self, parent, onClose, depth, onChangeSearchDepth): Toplevel.__init__ (self, parent) self.transient(parent)", "ypos = int(geoparse.get_y (parent.geometry())) + 100 self.geometry ('+' + str(xpos) + '+' +", "+ str(xpos) + '+' + str(ypos)) Label(self).grid(row=1) Label(self,text='This will rebuild the search table.').grid(row=5)", "search table.').grid(row=5) Label(self).grid(row=7) Label(self,text='A large search depth may').grid(row=9) Label(self,text='take a long time.').grid(row=10) Label(self).grid(row=13)", "self.onClose = onClose self.onChangeSearchDepth = onChangeSearchDepth # Position the dialog box relative to", "# Position the dialog box relative to parent. xpos = int(geoparse.get_x (parent.geometry())) +", "import * import Pmw import geoparse class RebuildSearch(Toplevel): def __init__(self, parent, onClose, depth,", "Pmw.Counter(self, labelpos='w', label_text='Search depth:', entryfield_value=depth, entryfield_validate = {'validator' : 'integer', 'min' : 1,", "str(xpos) + '+' + str(ypos)) Label(self).grid(row=1) Label(self,text='This will rebuild the search table.').grid(row=5) Label(self).grid(row=7)", "self.depthCounter.grid(row=15) Label(self).grid(row=17) Label(self,text='Click OK to proceed.').grid(row=20) box = Pmw.ButtonBox(self) box.add('OK', command = self.onOK)", "int(geoparse.get_x (parent.geometry())) + 100 ypos = int(geoparse.get_y (parent.geometry())) + 100 self.geometry ('+' +", "= onChangeSearchDepth # Position the dialog box relative to parent. xpos = int(geoparse.get_x", "command = self.onOK) box.add('Cancel', command = self.close) box.setdefault('OK') box.alignbuttons() box.grid(row=25) # Override DELETE_WINDOW", "labelpos='w', label_text='Search depth:', entryfield_value=depth, entryfield_validate = {'validator' : 'integer', 'min' : 1, 'max'", "self.withdraw() self.onChangeSearchDepth(int(self.depthCounter.get())) self.close() # Event handler when user closes the window. def close(self):", "relative to parent. xpos = int(geoparse.get_x (parent.geometry())) + 100 ypos = int(geoparse.get_y (parent.geometry()))", "depth, onChangeSearchDepth): Toplevel.__init__ (self, parent) self.transient(parent) self.title ('Rebuild Search') self.onClose = onClose self.onChangeSearchDepth", "self.onChangeSearchDepth = onChangeSearchDepth # Position the dialog box relative to parent. xpos =", "self.onChangeSearchDepth(int(self.depthCounter.get())) self.close() # Event handler when user closes the window. def close(self): self.onClose()", "+ '+' + str(ypos)) Label(self).grid(row=1) Label(self,text='This will rebuild the search table.').grid(row=5) Label(self).grid(row=7) Label(self,text='A", "100 self.geometry ('+' + str(xpos) + '+' + str(ypos)) Label(self).grid(row=1) Label(self,text='This will rebuild", "('WM_DELETE_WINDOW', self.close) def onOK(self): self.withdraw() self.onChangeSearchDepth(int(self.depthCounter.get())) self.close() # Event handler when user closes", ") self.depthCounter.grid(row=15) Label(self).grid(row=17) Label(self,text='Click OK to proceed.').grid(row=20) box = Pmw.ButtonBox(self) box.add('OK', command =", "def __init__(self, parent, onClose, depth, onChangeSearchDepth): Toplevel.__init__ (self, parent) self.transient(parent) self.title ('Rebuild Search')", "Label(self,text='A large search depth may').grid(row=9) Label(self,text='take a long time.').grid(row=10) Label(self).grid(row=13) self.depthCounter = Pmw.Counter(self,", "entryfield_validate = {'validator' : 'integer', 'min' : 1, 'max' : 4}, entry_width =", "time.').grid(row=10) Label(self).grid(row=13) self.depthCounter = Pmw.Counter(self, labelpos='w', label_text='Search depth:', entryfield_value=depth, entryfield_validate = {'validator' :", "table.').grid(row=5) Label(self).grid(row=7) Label(self,text='A large search depth may').grid(row=9) Label(self,text='take a long time.').grid(row=10) Label(self).grid(row=13) self.depthCounter", "Date: 04-Dec-2007 # ############################################################################# ############################################################################# from Tkinter import * import Pmw import geoparse", "(parent.geometry())) + 100 ypos = int(geoparse.get_y (parent.geometry())) + 100 self.geometry ('+' + str(xpos)", "Pmw.ButtonBox(self) box.add('OK', command = self.onOK) box.add('Cancel', command = self.close) box.setdefault('OK') box.alignbuttons() box.grid(row=25) #", "Tkinter import * import Pmw import geoparse class RebuildSearch(Toplevel): def __init__(self, parent, onClose,", "self.protocol ('WM_DELETE_WINDOW', self.close) def onOK(self): self.withdraw() self.onChangeSearchDepth(int(self.depthCounter.get())) self.close() # Event handler when user", "Label(self,text='take a long time.').grid(row=10) Label(self).grid(row=13) self.depthCounter = Pmw.Counter(self, labelpos='w', label_text='Search depth:', entryfield_value=depth, entryfield_validate", "= 2, entry_bg = 'white' ) self.depthCounter.grid(row=15) Label(self).grid(row=17) Label(self,text='Click OK to proceed.').grid(row=20) box", "Override DELETE_WINDOW handler to a local callback. self.protocol ('WM_DELETE_WINDOW', self.close) def onOK(self): self.withdraw()", "# Override DELETE_WINDOW handler to a local callback. self.protocol ('WM_DELETE_WINDOW', self.close) def onOK(self):", "############################################################################# from Tkinter import * import Pmw import geoparse class RebuildSearch(Toplevel): def __init__(self,", "box = Pmw.ButtonBox(self) box.add('OK', command = self.onOK) box.add('Cancel', command = self.close) box.setdefault('OK') box.alignbuttons()", "'max' : 4}, entry_width = 2, entry_bg = 'white' ) self.depthCounter.grid(row=15) Label(self).grid(row=17) Label(self,text='Click", "('Rebuild Search') self.onClose = onClose self.onChangeSearchDepth = onChangeSearchDepth # Position the dialog box", "(self, parent) self.transient(parent) self.title ('Rebuild Search') self.onClose = onClose self.onChangeSearchDepth = onChangeSearchDepth #", "the dialog box relative to parent. xpos = int(geoparse.get_x (parent.geometry())) + 100 ypos", "RebuildSearch(Toplevel): def __init__(self, parent, onClose, depth, onChangeSearchDepth): Toplevel.__init__ (self, parent) self.transient(parent) self.title ('Rebuild", "= {'validator' : 'integer', 'min' : 1, 'max' : 4}, entry_width = 2,", "File: RebuildSearch.py # # Date: 04-Dec-2007 # ############################################################################# ############################################################################# from Tkinter import *", "onChangeSearchDepth # Position the dialog box relative to parent. xpos = int(geoparse.get_x (parent.geometry()))", "= self.close) box.setdefault('OK') box.alignbuttons() box.grid(row=25) # Override DELETE_WINDOW handler to a local callback.", "handler to a local callback. self.protocol ('WM_DELETE_WINDOW', self.close) def onOK(self): self.withdraw() self.onChangeSearchDepth(int(self.depthCounter.get())) self.close()", "def onOK(self): self.withdraw() self.onChangeSearchDepth(int(self.depthCounter.get())) self.close() # Event handler when user closes the window.", "############################################################################# ############################################################################# # File: RebuildSearch.py # # Date: 04-Dec-2007 # ############################################################################# ############################################################################# from", "parent, onClose, depth, onChangeSearchDepth): Toplevel.__init__ (self, parent) self.transient(parent) self.title ('Rebuild Search') self.onClose =", "search depth may').grid(row=9) Label(self,text='take a long time.').grid(row=10) Label(self).grid(row=13) self.depthCounter = Pmw.Counter(self, labelpos='w', label_text='Search", "= Pmw.ButtonBox(self) box.add('OK', command = self.onOK) box.add('Cancel', command = self.close) box.setdefault('OK') box.alignbuttons() box.grid(row=25)", "will rebuild the search table.').grid(row=5) Label(self).grid(row=7) Label(self,text='A large search depth may').grid(row=9) Label(self,text='take a", "box relative to parent. xpos = int(geoparse.get_x (parent.geometry())) + 100 ypos = int(geoparse.get_y", "self.close) def onOK(self): self.withdraw() self.onChangeSearchDepth(int(self.depthCounter.get())) self.close() # Event handler when user closes the", "= int(geoparse.get_x (parent.geometry())) + 100 ypos = int(geoparse.get_y (parent.geometry())) + 100 self.geometry ('+'", "parent) self.transient(parent) self.title ('Rebuild Search') self.onClose = onClose self.onChangeSearchDepth = onChangeSearchDepth # Position", "Toplevel.__init__ (self, parent) self.transient(parent) self.title ('Rebuild Search') self.onClose = onClose self.onChangeSearchDepth = onChangeSearchDepth", "onClose self.onChangeSearchDepth = onChangeSearchDepth # Position the dialog box relative to parent. xpos", "100 ypos = int(geoparse.get_y (parent.geometry())) + 100 self.geometry ('+' + str(xpos) + '+'", "onChangeSearchDepth): Toplevel.__init__ (self, parent) self.transient(parent) self.title ('Rebuild Search') self.onClose = onClose self.onChangeSearchDepth =", "class RebuildSearch(Toplevel): def __init__(self, parent, onClose, depth, onChangeSearchDepth): Toplevel.__init__ (self, parent) self.transient(parent) self.title", "Label(self,text='This will rebuild the search table.').grid(row=5) Label(self).grid(row=7) Label(self,text='A large search depth may').grid(row=9) Label(self,text='take", "self.title ('Rebuild Search') self.onClose = onClose self.onChangeSearchDepth = onChangeSearchDepth # Position the dialog", "may').grid(row=9) Label(self,text='take a long time.').grid(row=10) Label(self).grid(row=13) self.depthCounter = Pmw.Counter(self, labelpos='w', label_text='Search depth:', entryfield_value=depth,", "self.depthCounter = Pmw.Counter(self, labelpos='w', label_text='Search depth:', entryfield_value=depth, entryfield_validate = {'validator' : 'integer', 'min'", ": 1, 'max' : 4}, entry_width = 2, entry_bg = 'white' ) self.depthCounter.grid(row=15)", "'+' + str(ypos)) Label(self).grid(row=1) Label(self,text='This will rebuild the search table.').grid(row=5) Label(self).grid(row=7) Label(self,text='A large", "the search table.').grid(row=5) Label(self).grid(row=7) Label(self,text='A large search depth may').grid(row=9) Label(self,text='take a long time.').grid(row=10)", "onClose, depth, onChangeSearchDepth): Toplevel.__init__ (self, parent) self.transient(parent) self.title ('Rebuild Search') self.onClose = onClose", "RebuildSearch.py # # Date: 04-Dec-2007 # ############################################################################# ############################################################################# from Tkinter import * import", "proceed.').grid(row=20) box = Pmw.ButtonBox(self) box.add('OK', command = self.onOK) box.add('Cancel', command = self.close) box.setdefault('OK')", "str(ypos)) Label(self).grid(row=1) Label(self,text='This will rebuild the search table.').grid(row=5) Label(self).grid(row=7) Label(self,text='A large search depth", "__init__(self, parent, onClose, depth, onChangeSearchDepth): Toplevel.__init__ (self, parent) self.transient(parent) self.title ('Rebuild Search') self.onClose", "'min' : 1, 'max' : 4}, entry_width = 2, entry_bg = 'white' )", "parent. xpos = int(geoparse.get_x (parent.geometry())) + 100 ypos = int(geoparse.get_y (parent.geometry())) + 100", "to proceed.').grid(row=20) box = Pmw.ButtonBox(self) box.add('OK', command = self.onOK) box.add('Cancel', command = self.close)", "* import Pmw import geoparse class RebuildSearch(Toplevel): def __init__(self, parent, onClose, depth, onChangeSearchDepth):", "{'validator' : 'integer', 'min' : 1, 'max' : 4}, entry_width = 2, entry_bg", "Label(self,text='Click OK to proceed.').grid(row=20) box = Pmw.ButtonBox(self) box.add('OK', command = self.onOK) box.add('Cancel', command", "a long time.').grid(row=10) Label(self).grid(row=13) self.depthCounter = Pmw.Counter(self, labelpos='w', label_text='Search depth:', entryfield_value=depth, entryfield_validate =", "# Date: 04-Dec-2007 # ############################################################################# ############################################################################# from Tkinter import * import Pmw import", "# # Date: 04-Dec-2007 # ############################################################################# ############################################################################# from Tkinter import * import Pmw", "4}, entry_width = 2, entry_bg = 'white' ) self.depthCounter.grid(row=15) Label(self).grid(row=17) Label(self,text='Click OK to", "command = self.close) box.setdefault('OK') box.alignbuttons() box.grid(row=25) # Override DELETE_WINDOW handler to a local", "dialog box relative to parent. xpos = int(geoparse.get_x (parent.geometry())) + 100 ypos =", "callback. self.protocol ('WM_DELETE_WINDOW', self.close) def onOK(self): self.withdraw() self.onChangeSearchDepth(int(self.depthCounter.get())) self.close() # Event handler when", "('+' + str(xpos) + '+' + str(ypos)) Label(self).grid(row=1) Label(self,text='This will rebuild the search", "(parent.geometry())) + 100 self.geometry ('+' + str(xpos) + '+' + str(ypos)) Label(self).grid(row=1) Label(self,text='This", ": 4}, entry_width = 2, entry_bg = 'white' ) self.depthCounter.grid(row=15) Label(self).grid(row=17) Label(self,text='Click OK", "rebuild the search table.').grid(row=5) Label(self).grid(row=7) Label(self,text='A large search depth may').grid(row=9) Label(self,text='take a long", "+ str(ypos)) Label(self).grid(row=1) Label(self,text='This will rebuild the search table.').grid(row=5) Label(self).grid(row=7) Label(self,text='A large search", "box.grid(row=25) # Override DELETE_WINDOW handler to a local callback. self.protocol ('WM_DELETE_WINDOW', self.close) def", "depth may').grid(row=9) Label(self,text='take a long time.').grid(row=10) Label(self).grid(row=13) self.depthCounter = Pmw.Counter(self, labelpos='w', label_text='Search depth:',", "onOK(self): self.withdraw() self.onChangeSearchDepth(int(self.depthCounter.get())) self.close() # Event handler when user closes the window. def", "############################################################################# ############################################################################# from Tkinter import * import Pmw import geoparse class RebuildSearch(Toplevel): def", "label_text='Search depth:', entryfield_value=depth, entryfield_validate = {'validator' : 'integer', 'min' : 1, 'max' :", "Label(self).grid(row=17) Label(self,text='Click OK to proceed.').grid(row=20) box = Pmw.ButtonBox(self) box.add('OK', command = self.onOK) box.add('Cancel',", "self.transient(parent) self.title ('Rebuild Search') self.onClose = onClose self.onChangeSearchDepth = onChangeSearchDepth # Position the", "# ############################################################################# ############################################################################# from Tkinter import * import Pmw import geoparse class RebuildSearch(Toplevel):", "1, 'max' : 4}, entry_width = 2, entry_bg = 'white' ) self.depthCounter.grid(row=15) Label(self).grid(row=17)", "DELETE_WINDOW handler to a local callback. self.protocol ('WM_DELETE_WINDOW', self.close) def onOK(self): self.withdraw() self.onChangeSearchDepth(int(self.depthCounter.get()))", "= 'white' ) self.depthCounter.grid(row=15) Label(self).grid(row=17) Label(self,text='Click OK to proceed.').grid(row=20) box = Pmw.ButtonBox(self) box.add('OK',", "local callback. self.protocol ('WM_DELETE_WINDOW', self.close) def onOK(self): self.withdraw() self.onChangeSearchDepth(int(self.depthCounter.get())) self.close() # Event handler", "self.close) box.setdefault('OK') box.alignbuttons() box.grid(row=25) # Override DELETE_WINDOW handler to a local callback. self.protocol", "# File: RebuildSearch.py # # Date: 04-Dec-2007 # ############################################################################# ############################################################################# from Tkinter import", "+ 100 ypos = int(geoparse.get_y (parent.geometry())) + 100 self.geometry ('+' + str(xpos) +", "Position the dialog box relative to parent. xpos = int(geoparse.get_x (parent.geometry())) + 100", "Label(self).grid(row=1) Label(self,text='This will rebuild the search table.').grid(row=5) Label(self).grid(row=7) Label(self,text='A large search depth may').grid(row=9)", "OK to proceed.').grid(row=20) box = Pmw.ButtonBox(self) box.add('OK', command = self.onOK) box.add('Cancel', command =", "box.add('OK', command = self.onOK) box.add('Cancel', command = self.close) box.setdefault('OK') box.alignbuttons() box.grid(row=25) # Override", "depth:', entryfield_value=depth, entryfield_validate = {'validator' : 'integer', 'min' : 1, 'max' : 4},", "Search') self.onClose = onClose self.onChangeSearchDepth = onChangeSearchDepth # Position the dialog box relative", "xpos = int(geoparse.get_x (parent.geometry())) + 100 ypos = int(geoparse.get_y (parent.geometry())) + 100 self.geometry", "Label(self).grid(row=13) self.depthCounter = Pmw.Counter(self, labelpos='w', label_text='Search depth:', entryfield_value=depth, entryfield_validate = {'validator' : 'integer',", "04-Dec-2007 # ############################################################################# ############################################################################# from Tkinter import * import Pmw import geoparse class", "import Pmw import geoparse class RebuildSearch(Toplevel): def __init__(self, parent, onClose, depth, onChangeSearchDepth): Toplevel.__init__", "large search depth may').grid(row=9) Label(self,text='take a long time.').grid(row=10) Label(self).grid(row=13) self.depthCounter = Pmw.Counter(self, labelpos='w',", "long time.').grid(row=10) Label(self).grid(row=13) self.depthCounter = Pmw.Counter(self, labelpos='w', label_text='Search depth:', entryfield_value=depth, entryfield_validate = {'validator'", "entry_bg = 'white' ) self.depthCounter.grid(row=15) Label(self).grid(row=17) Label(self,text='Click OK to proceed.').grid(row=20) box = Pmw.ButtonBox(self)", "self.geometry ('+' + str(xpos) + '+' + str(ypos)) Label(self).grid(row=1) Label(self,text='This will rebuild the", "Label(self).grid(row=7) Label(self,text='A large search depth may').grid(row=9) Label(self,text='take a long time.').grid(row=10) Label(self).grid(row=13) self.depthCounter =", "+ 100 self.geometry ('+' + str(xpos) + '+' + str(ypos)) Label(self).grid(row=1) Label(self,text='This will" ]
[ "model_name='entrylist', name='awards', field=models.CharField(blank=True, default='未设置', max_length=200, null=True), ), migrations.AlterField( model_name='entrylist', name='score_kind', field=models.CharField(blank=True, default='未设置', max_length=100,", "class Migration(migrations.Migration): dependencies = [ ('dashboard', '0001_initial'), ] operations = [ migrations.AlterField( model_name='entrylist',", "dependencies = [ ('dashboard', '0001_initial'), ] operations = [ migrations.AlterField( model_name='entrylist', name='awards', field=models.CharField(blank=True,", "= [ ('dashboard', '0001_initial'), ] operations = [ migrations.AlterField( model_name='entrylist', name='awards', field=models.CharField(blank=True, default='未设置',", "('dashboard', '0001_initial'), ] operations = [ migrations.AlterField( model_name='entrylist', name='awards', field=models.CharField(blank=True, default='未设置', max_length=200, null=True),", "migrations.AlterField( model_name='entrylist', name='awards', field=models.CharField(blank=True, default='未设置', max_length=200, null=True), ), migrations.AlterField( model_name='entrylist', name='score_kind', field=models.CharField(blank=True, default='未设置',", "# Generated by Django 2.2 on 2019-05-23 12:31 from django.db import migrations, models", "Generated by Django 2.2 on 2019-05-23 12:31 from django.db import migrations, models class", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('dashboard', '0001_initial'), ] operations", "Django 2.2 on 2019-05-23 12:31 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "[ ('dashboard', '0001_initial'), ] operations = [ migrations.AlterField( model_name='entrylist', name='awards', field=models.CharField(blank=True, default='未设置', max_length=200,", "on 2019-05-23 12:31 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "name='awards', field=models.CharField(blank=True, default='未设置', max_length=200, null=True), ), migrations.AlterField( model_name='entrylist', name='score_kind', field=models.CharField(blank=True, default='未设置', max_length=100, null=True),", "[ migrations.AlterField( model_name='entrylist', name='awards', field=models.CharField(blank=True, default='未设置', max_length=200, null=True), ), migrations.AlterField( model_name='entrylist', name='score_kind', field=models.CharField(blank=True,", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('dashboard', '0001_initial'), ] operations =", "field=models.CharField(blank=True, default='未设置', max_length=200, null=True), ), migrations.AlterField( model_name='entrylist', name='score_kind', field=models.CharField(blank=True, default='未设置', max_length=100, null=True), ),", "migrations, models class Migration(migrations.Migration): dependencies = [ ('dashboard', '0001_initial'), ] operations = [", "models class Migration(migrations.Migration): dependencies = [ ('dashboard', '0001_initial'), ] operations = [ migrations.AlterField(", "] operations = [ migrations.AlterField( model_name='entrylist', name='awards', field=models.CharField(blank=True, default='未设置', max_length=200, null=True), ), migrations.AlterField(", "Migration(migrations.Migration): dependencies = [ ('dashboard', '0001_initial'), ] operations = [ migrations.AlterField( model_name='entrylist', name='awards',", "2019-05-23 12:31 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('dashboard',", "12:31 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('dashboard', '0001_initial'),", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('dashboard', '0001_initial'), ]", "operations = [ migrations.AlterField( model_name='entrylist', name='awards', field=models.CharField(blank=True, default='未设置', max_length=200, null=True), ), migrations.AlterField( model_name='entrylist',", "= [ migrations.AlterField( model_name='entrylist', name='awards', field=models.CharField(blank=True, default='未设置', max_length=200, null=True), ), migrations.AlterField( model_name='entrylist', name='score_kind',", "default='未设置', max_length=200, null=True), ), migrations.AlterField( model_name='entrylist', name='score_kind', field=models.CharField(blank=True, default='未设置', max_length=100, null=True), ), ]", "by Django 2.2 on 2019-05-23 12:31 from django.db import migrations, models class Migration(migrations.Migration):", "'0001_initial'), ] operations = [ migrations.AlterField( model_name='entrylist', name='awards', field=models.CharField(blank=True, default='未设置', max_length=200, null=True), ),", "2.2 on 2019-05-23 12:31 from django.db import migrations, models class Migration(migrations.Migration): dependencies =" ]
[ "max - 1): return False return True def is_alive(self, num_living_neighbors): ''' if alive,", "3 neighbors if dead, revive with exactly 3 neighbors otherwise die''' if self.alive:", "= (x_coord + j - 1, y_coord + i - 1) if self.get_valid_position(new_position):", "position(x, y) self.position = position self.alive = alive self.next_state = False # bounds(min,", "position): (x_coord, y_coord) = (position[0], position[1]) (min, max) = (self.bounds[0], self.bounds[1]) if position", "neighbors if dead, revive with exactly 3 neighbors otherwise die''' if self.alive: if", "num_living_neighbors): ''' if alive, stay alive with 2 or 3 neighbors if dead,", "postion(X, Y) def __init__(self, alive=False, position=(0,0), bounds=(0, 5)): # position(x, y) self.position =", "''' if alive, stay alive with 2 or 3 neighbors if dead, revive", "(x_coord, y_coord) = (position[0], position[1]) (min, max) = (self.bounds[0], self.bounds[1]) if position ==", "neighbors.append(new_position) return neighbors def get_valid_position(self, position): (x_coord, y_coord) = (position[0], position[1]) (min, max)", "self.get_neighbors() def get_neighbors(self): neighbors = [] (x_coord, y_coord) = (self.position[0], self.position[1]) for i", "is_alive(self, num_living_neighbors): ''' if alive, stay alive with 2 or 3 neighbors if", "in range(3): new_position = (x_coord + j - 1, y_coord + i -", "(self.position[0], self.position[1]) for i in range(3): for j in range(3): new_position = (x_coord", "self.alive = alive self.next_state = False # bounds(min, max) self.bounds = bounds self.neighbors", "= (position[0], position[1]) (min, max) = (self.bounds[0], self.bounds[1]) if position == self.position: return", "position=(0,0), bounds=(0, 5)): # position(x, y) self.position = position self.alive = alive self.next_state", "with 2 or 3 neighbors if dead, revive with exactly 3 neighbors otherwise", "if (x_coord < min) or (x_coord > max - 1): return False #", "return False else: if num_living_neighbors == 3: return True return False def apply_next_state(self):", "self.alive: if num_living_neighbors in [2, 3]: return True else: return False else: if", "range(3): for j in range(3): new_position = (x_coord + j - 1, y_coord", "if (y_coord < min) or (y_coord > max - 1): return False return", "in range(3): for j in range(3): new_position = (x_coord + j - 1,", "bounds if (y_coord < min) or (y_coord > max - 1): return False", "# Test X bounds if (x_coord < min) or (x_coord > max -", "dead, revive with exactly 3 neighbors otherwise die''' if self.alive: if num_living_neighbors in", "False else: if num_living_neighbors == 3: return True return False def apply_next_state(self): self.alive", "y) self.position = position self.alive = alive self.next_state = False # bounds(min, max)", "Cell(): # postion(X, Y) def __init__(self, alive=False, position=(0,0), bounds=(0, 5)): # position(x, y)", "= (self.position[0], self.position[1]) for i in range(3): for j in range(3): new_position =", "alive self.next_state = False # bounds(min, max) self.bounds = bounds self.neighbors = self.get_neighbors()", "1): return False # Test Y bounds if (y_coord < min) or (y_coord", "return False # Test Y bounds if (y_coord < min) or (y_coord >", "max) self.bounds = bounds self.neighbors = self.get_neighbors() def get_neighbors(self): neighbors = [] (x_coord,", "get_valid_position(self, position): (x_coord, y_coord) = (position[0], position[1]) (min, max) = (self.bounds[0], self.bounds[1]) if", "or (y_coord > max - 1): return False return True def is_alive(self, num_living_neighbors):", "max - 1): return False # Test Y bounds if (y_coord < min)", "bounds if (x_coord < min) or (x_coord > max - 1): return False", "return False return True def is_alive(self, num_living_neighbors): ''' if alive, stay alive with", "if dead, revive with exactly 3 neighbors otherwise die''' if self.alive: if num_living_neighbors", "position[1]) (min, max) = (self.bounds[0], self.bounds[1]) if position == self.position: return False #", "if position == self.position: return False # Test X bounds if (x_coord <", "__init__(self, alive=False, position=(0,0), bounds=(0, 5)): # position(x, y) self.position = position self.alive =", "# postion(X, Y) def __init__(self, alive=False, position=(0,0), bounds=(0, 5)): # position(x, y) self.position", "> max - 1): return False return True def is_alive(self, num_living_neighbors): ''' if", "y_coord + i - 1) if self.get_valid_position(new_position): neighbors.append(new_position) return neighbors def get_valid_position(self, position):", "if self.get_valid_position(new_position): neighbors.append(new_position) return neighbors def get_valid_position(self, position): (x_coord, y_coord) = (position[0], position[1])", "i in range(3): for j in range(3): new_position = (x_coord + j -", "self.position: return False # Test X bounds if (x_coord < min) or (x_coord", "get_neighbors(self): neighbors = [] (x_coord, y_coord) = (self.position[0], self.position[1]) for i in range(3):", "neighbors def get_valid_position(self, position): (x_coord, y_coord) = (position[0], position[1]) (min, max) = (self.bounds[0],", "self.next_state = False # bounds(min, max) self.bounds = bounds self.neighbors = self.get_neighbors() def", "def __init__(self, alive=False, position=(0,0), bounds=(0, 5)): # position(x, y) self.position = position self.alive", "True else: return False else: if num_living_neighbors == 3: return True return False", "y_coord) = (position[0], position[1]) (min, max) = (self.bounds[0], self.bounds[1]) if position == self.position:", "return True def is_alive(self, num_living_neighbors): ''' if alive, stay alive with 2 or", "revive with exactly 3 neighbors otherwise die''' if self.alive: if num_living_neighbors in [2,", "(position[0], position[1]) (min, max) = (self.bounds[0], self.bounds[1]) if position == self.position: return False", "False # Test Y bounds if (y_coord < min) or (y_coord > max", "1): return False return True def is_alive(self, num_living_neighbors): ''' if alive, stay alive", "die''' if self.alive: if num_living_neighbors in [2, 3]: return True else: return False", "with exactly 3 neighbors otherwise die''' if self.alive: if num_living_neighbors in [2, 3]:", "False # Test X bounds if (x_coord < min) or (x_coord > max", "neighbors otherwise die''' if self.alive: if num_living_neighbors in [2, 3]: return True else:", "< min) or (x_coord > max - 1): return False # Test Y", "bounds=(0, 5)): # position(x, y) self.position = position self.alive = alive self.next_state =", "if self.alive: if num_living_neighbors in [2, 3]: return True else: return False else:", "+ i - 1) if self.get_valid_position(new_position): neighbors.append(new_position) return neighbors def get_valid_position(self, position): (x_coord,", "# position(x, y) self.position = position self.alive = alive self.next_state = False #", "max) = (self.bounds[0], self.bounds[1]) if position == self.position: return False # Test X", "alive with 2 or 3 neighbors if dead, revive with exactly 3 neighbors", "Test X bounds if (x_coord < min) or (x_coord > max - 1):", "j in range(3): new_position = (x_coord + j - 1, y_coord + i", "bounds self.neighbors = self.get_neighbors() def get_neighbors(self): neighbors = [] (x_coord, y_coord) = (self.position[0],", "(x_coord < min) or (x_coord > max - 1): return False # Test", "- 1): return False return True def is_alive(self, num_living_neighbors): ''' if alive, stay", "min) or (y_coord > max - 1): return False return True def is_alive(self,", "= position self.alive = alive self.next_state = False # bounds(min, max) self.bounds =", "= self.get_neighbors() def get_neighbors(self): neighbors = [] (x_coord, y_coord) = (self.position[0], self.position[1]) for", "j - 1, y_coord + i - 1) if self.get_valid_position(new_position): neighbors.append(new_position) return neighbors", "(min, max) = (self.bounds[0], self.bounds[1]) if position == self.position: return False # Test", "#!/usr/bin/env python class Cell(): # postion(X, Y) def __init__(self, alive=False, position=(0,0), bounds=(0, 5)):", "[] (x_coord, y_coord) = (self.position[0], self.position[1]) for i in range(3): for j in", "= (self.bounds[0], self.bounds[1]) if position == self.position: return False # Test X bounds", "(self.bounds[0], self.bounds[1]) if position == self.position: return False # Test X bounds if", "== self.position: return False # Test X bounds if (x_coord < min) or", "# bounds(min, max) self.bounds = bounds self.neighbors = self.get_neighbors() def get_neighbors(self): neighbors =", "1, y_coord + i - 1) if self.get_valid_position(new_position): neighbors.append(new_position) return neighbors def get_valid_position(self,", "else: return False else: if num_living_neighbors == 3: return True return False def", "self.bounds[1]) if position == self.position: return False # Test X bounds if (x_coord", "Y bounds if (y_coord < min) or (y_coord > max - 1): return", "- 1): return False # Test Y bounds if (y_coord < min) or", "range(3): new_position = (x_coord + j - 1, y_coord + i - 1)", "- 1, y_coord + i - 1) if self.get_valid_position(new_position): neighbors.append(new_position) return neighbors def", "or (x_coord > max - 1): return False # Test Y bounds if", "stay alive with 2 or 3 neighbors if dead, revive with exactly 3", "5)): # position(x, y) self.position = position self.alive = alive self.next_state = False", "self.bounds = bounds self.neighbors = self.get_neighbors() def get_neighbors(self): neighbors = [] (x_coord, y_coord)", "False # bounds(min, max) self.bounds = bounds self.neighbors = self.get_neighbors() def get_neighbors(self): neighbors", "(x_coord + j - 1, y_coord + i - 1) if self.get_valid_position(new_position): neighbors.append(new_position)", "i - 1) if self.get_valid_position(new_position): neighbors.append(new_position) return neighbors def get_valid_position(self, position): (x_coord, y_coord)", "def is_alive(self, num_living_neighbors): ''' if alive, stay alive with 2 or 3 neighbors", "new_position = (x_coord + j - 1, y_coord + i - 1) if", "< min) or (y_coord > max - 1): return False return True def", "self.neighbors = self.get_neighbors() def get_neighbors(self): neighbors = [] (x_coord, y_coord) = (self.position[0], self.position[1])", "num_living_neighbors in [2, 3]: return True else: return False else: if num_living_neighbors ==", "y_coord) = (self.position[0], self.position[1]) for i in range(3): for j in range(3): new_position", "= False # bounds(min, max) self.bounds = bounds self.neighbors = self.get_neighbors() def get_neighbors(self):", "return neighbors def get_valid_position(self, position): (x_coord, y_coord) = (position[0], position[1]) (min, max) =", "python class Cell(): # postion(X, Y) def __init__(self, alive=False, position=(0,0), bounds=(0, 5)): #", "if num_living_neighbors == 3: return True return False def apply_next_state(self): self.alive = self.next_state", "= bounds self.neighbors = self.get_neighbors() def get_neighbors(self): neighbors = [] (x_coord, y_coord) =", "self.get_valid_position(new_position): neighbors.append(new_position) return neighbors def get_valid_position(self, position): (x_coord, y_coord) = (position[0], position[1]) (min,", "class Cell(): # postion(X, Y) def __init__(self, alive=False, position=(0,0), bounds=(0, 5)): # position(x,", "self.position[1]) for i in range(3): for j in range(3): new_position = (x_coord +", "(y_coord > max - 1): return False return True def is_alive(self, num_living_neighbors): '''", "3]: return True else: return False else: if num_living_neighbors == 3: return True", "> max - 1): return False # Test Y bounds if (y_coord <", "self.position = position self.alive = alive self.next_state = False # bounds(min, max) self.bounds", "True def is_alive(self, num_living_neighbors): ''' if alive, stay alive with 2 or 3", "return False # Test X bounds if (x_coord < min) or (x_coord >", "or 3 neighbors if dead, revive with exactly 3 neighbors otherwise die''' if", "[2, 3]: return True else: return False else: if num_living_neighbors == 3: return", "else: if num_living_neighbors == 3: return True return False def apply_next_state(self): self.alive =", "in [2, 3]: return True else: return False else: if num_living_neighbors == 3:", "return True else: return False else: if num_living_neighbors == 3: return True return", "min) or (x_coord > max - 1): return False # Test Y bounds", "# Test Y bounds if (y_coord < min) or (y_coord > max -", "X bounds if (x_coord < min) or (x_coord > max - 1): return", "= alive self.next_state = False # bounds(min, max) self.bounds = bounds self.neighbors =", "for i in range(3): for j in range(3): new_position = (x_coord + j", "position self.alive = alive self.next_state = False # bounds(min, max) self.bounds = bounds", "if alive, stay alive with 2 or 3 neighbors if dead, revive with", "2 or 3 neighbors if dead, revive with exactly 3 neighbors otherwise die'''", "if num_living_neighbors in [2, 3]: return True else: return False else: if num_living_neighbors", "position == self.position: return False # Test X bounds if (x_coord < min)", "(x_coord > max - 1): return False # Test Y bounds if (y_coord", "False return True def is_alive(self, num_living_neighbors): ''' if alive, stay alive with 2", "3 neighbors otherwise die''' if self.alive: if num_living_neighbors in [2, 3]: return True", "+ j - 1, y_coord + i - 1) if self.get_valid_position(new_position): neighbors.append(new_position) return", "alive, stay alive with 2 or 3 neighbors if dead, revive with exactly", "- 1) if self.get_valid_position(new_position): neighbors.append(new_position) return neighbors def get_valid_position(self, position): (x_coord, y_coord) =", "Y) def __init__(self, alive=False, position=(0,0), bounds=(0, 5)): # position(x, y) self.position = position", "Test Y bounds if (y_coord < min) or (y_coord > max - 1):", "(y_coord < min) or (y_coord > max - 1): return False return True", "exactly 3 neighbors otherwise die''' if self.alive: if num_living_neighbors in [2, 3]: return", "= [] (x_coord, y_coord) = (self.position[0], self.position[1]) for i in range(3): for j", "neighbors = [] (x_coord, y_coord) = (self.position[0], self.position[1]) for i in range(3): for", "for j in range(3): new_position = (x_coord + j - 1, y_coord +", "alive=False, position=(0,0), bounds=(0, 5)): # position(x, y) self.position = position self.alive = alive", "otherwise die''' if self.alive: if num_living_neighbors in [2, 3]: return True else: return", "def get_valid_position(self, position): (x_coord, y_coord) = (position[0], position[1]) (min, max) = (self.bounds[0], self.bounds[1])", "1) if self.get_valid_position(new_position): neighbors.append(new_position) return neighbors def get_valid_position(self, position): (x_coord, y_coord) = (position[0],", "(x_coord, y_coord) = (self.position[0], self.position[1]) for i in range(3): for j in range(3):", "def get_neighbors(self): neighbors = [] (x_coord, y_coord) = (self.position[0], self.position[1]) for i in", "bounds(min, max) self.bounds = bounds self.neighbors = self.get_neighbors() def get_neighbors(self): neighbors = []" ]
[ "dest = 'vo', help = 'Query only for a given VO', default =", "import OptionParser def get_slow_pairs(threshold = 1, vo = None): content = get_url('https://fts3-pilot.cern.ch:8449/fts3/ftsmon/overview', vo", "= 'threshold', help = 'Threshold in MB', default = 1, type = 'float')", "== '__main__': parser = OptionParser() parser.add_option('-v', '--vo', dest = 'vo', help = 'Query", "vo = None): content = get_url('https://fts3-pilot.cern.ch:8449/fts3/ftsmon/overview', vo = vo, page = 'all') pairs", "given VO', default = None) parser.add_option('-t', '--threshold', dest = 'threshold', help = 'Threshold", "= None) parser.add_option('-t', '--threshold', dest = 'threshold', help = 'Threshold in MB', default", "a given VO', default = None) parser.add_option('-t', '--threshold', dest = 'threshold', help =", "VO', default = None) parser.add_option('-t', '--threshold', dest = 'threshold', help = 'Threshold in", "get_slow_pairs(threshold = 1, vo = None): content = get_url('https://fts3-pilot.cern.ch:8449/fts3/ftsmon/overview', vo = vo, page", "for pair in slow: print \"%(source_se)s => %(dest_se)s with throughput %(current).2f\" % pair", "if __name__ == '__main__': parser = OptionParser() parser.add_option('-v', '--vo', dest = 'vo', help", "page = 'all') pairs = json.loads(content) slow = [] for pair in pairs['items']:", "< threshold: slow.append(pair) return slow if __name__ == '__main__': parser = OptionParser() parser.add_option('-v',", "= vo, page = 'all') pairs = json.loads(content) slow = [] for pair", "parser = OptionParser() parser.add_option('-v', '--vo', dest = 'vo', help = 'Query only for", "type = 'float') (options, args) = parser.parse_args() slow = get_slow_pairs(options.threshold, options.vo) for pair", "= 1, vo = None): content = get_url('https://fts3-pilot.cern.ch:8449/fts3/ftsmon/overview', vo = vo, page =", "slow if __name__ == '__main__': parser = OptionParser() parser.add_option('-v', '--vo', dest = 'vo',", "in MB', default = 1, type = 'float') (options, args) = parser.parse_args() slow", "get_slow_pairs(options.threshold, options.vo) for pair in slow: print \"%(source_se)s => %(dest_se)s with throughput %(current).2f\"", "= 'Query only for a given VO', default = None) parser.add_option('-t', '--threshold', dest", "default = None) parser.add_option('-t', '--threshold', dest = 'threshold', help = 'Threshold in MB',", "parser.add_option('-v', '--vo', dest = 'vo', help = 'Query only for a given VO',", "slow = get_slow_pairs(options.threshold, options.vo) for pair in slow: print \"%(source_se)s => %(dest_se)s with", "content = get_url('https://fts3-pilot.cern.ch:8449/fts3/ftsmon/overview', vo = vo, page = 'all') pairs = json.loads(content) slow", "= get_url('https://fts3-pilot.cern.ch:8449/fts3/ftsmon/overview', vo = vo, page = 'all') pairs = json.loads(content) slow =", "default = 1, type = 'float') (options, args) = parser.parse_args() slow = get_slow_pairs(options.threshold,", "None): content = get_url('https://fts3-pilot.cern.ch:8449/fts3/ftsmon/overview', vo = vo, page = 'all') pairs = json.loads(content)", "import get_url from optparse import OptionParser def get_slow_pairs(threshold = 1, vo = None):", "#!/usr/bin/env python2 import json from common import get_url from optparse import OptionParser def", "MB', default = 1, type = 'float') (options, args) = parser.parse_args() slow =", "args) = parser.parse_args() slow = get_slow_pairs(options.threshold, options.vo) for pair in slow: print \"%(source_se)s", "'__main__': parser = OptionParser() parser.add_option('-v', '--vo', dest = 'vo', help = 'Query only", "= 'vo', help = 'Query only for a given VO', default = None)", "vo, page = 'all') pairs = json.loads(content) slow = [] for pair in", "'--vo', dest = 'vo', help = 'Query only for a given VO', default", "options.vo) for pair in slow: print \"%(source_se)s => %(dest_se)s with throughput %(current).2f\" %", "help = 'Query only for a given VO', default = None) parser.add_option('-t', '--threshold',", "pair and pair['current'] < threshold: slow.append(pair) return slow if __name__ == '__main__': parser", "optparse import OptionParser def get_slow_pairs(threshold = 1, vo = None): content = get_url('https://fts3-pilot.cern.ch:8449/fts3/ftsmon/overview',", "def get_slow_pairs(threshold = 1, vo = None): content = get_url('https://fts3-pilot.cern.ch:8449/fts3/ftsmon/overview', vo = vo,", "from optparse import OptionParser def get_slow_pairs(threshold = 1, vo = None): content =", "OptionParser def get_slow_pairs(threshold = 1, vo = None): content = get_url('https://fts3-pilot.cern.ch:8449/fts3/ftsmon/overview', vo =", "pair['current'] < threshold: slow.append(pair) return slow if __name__ == '__main__': parser = OptionParser()", "OptionParser() parser.add_option('-v', '--vo', dest = 'vo', help = 'Query only for a given", "and pair['current'] < threshold: slow.append(pair) return slow if __name__ == '__main__': parser =", "for a given VO', default = None) parser.add_option('-t', '--threshold', dest = 'threshold', help", "get_url from optparse import OptionParser def get_slow_pairs(threshold = 1, vo = None): content", "<reponame>cern-fts/fts-monitoring #!/usr/bin/env python2 import json from common import get_url from optparse import OptionParser", "json from common import get_url from optparse import OptionParser def get_slow_pairs(threshold = 1,", "= json.loads(content) slow = [] for pair in pairs['items']: if 'current' in pair", "vo = vo, page = 'all') pairs = json.loads(content) slow = [] for", "in pair and pair['current'] < threshold: slow.append(pair) return slow if __name__ == '__main__':", "slow.append(pair) return slow if __name__ == '__main__': parser = OptionParser() parser.add_option('-v', '--vo', dest", "help = 'Threshold in MB', default = 1, type = 'float') (options, args)", "= OptionParser() parser.add_option('-v', '--vo', dest = 'vo', help = 'Query only for a", "parser.add_option('-t', '--threshold', dest = 'threshold', help = 'Threshold in MB', default = 1,", "threshold: slow.append(pair) return slow if __name__ == '__main__': parser = OptionParser() parser.add_option('-v', '--vo',", "'--threshold', dest = 'threshold', help = 'Threshold in MB', default = 1, type", "= 'Threshold in MB', default = 1, type = 'float') (options, args) =", "pairs['items']: if 'current' in pair and pair['current'] < threshold: slow.append(pair) return slow if", "= get_slow_pairs(options.threshold, options.vo) for pair in slow: print \"%(source_se)s => %(dest_se)s with throughput", "in pairs['items']: if 'current' in pair and pair['current'] < threshold: slow.append(pair) return slow", "if 'current' in pair and pair['current'] < threshold: slow.append(pair) return slow if __name__", "= parser.parse_args() slow = get_slow_pairs(options.threshold, options.vo) for pair in slow: print \"%(source_se)s =>", "'current' in pair and pair['current'] < threshold: slow.append(pair) return slow if __name__ ==", "= [] for pair in pairs['items']: if 'current' in pair and pair['current'] <", "__name__ == '__main__': parser = OptionParser() parser.add_option('-v', '--vo', dest = 'vo', help =", "'all') pairs = json.loads(content) slow = [] for pair in pairs['items']: if 'current'", "(options, args) = parser.parse_args() slow = get_slow_pairs(options.threshold, options.vo) for pair in slow: print", "import json from common import get_url from optparse import OptionParser def get_slow_pairs(threshold =", "[] for pair in pairs['items']: if 'current' in pair and pair['current'] < threshold:", "only for a given VO', default = None) parser.add_option('-t', '--threshold', dest = 'threshold',", "python2 import json from common import get_url from optparse import OptionParser def get_slow_pairs(threshold", "'vo', help = 'Query only for a given VO', default = None) parser.add_option('-t',", "= 'float') (options, args) = parser.parse_args() slow = get_slow_pairs(options.threshold, options.vo) for pair in", "1, vo = None): content = get_url('https://fts3-pilot.cern.ch:8449/fts3/ftsmon/overview', vo = vo, page = 'all')", "pair in pairs['items']: if 'current' in pair and pair['current'] < threshold: slow.append(pair) return", "slow = [] for pair in pairs['items']: if 'current' in pair and pair['current']", "= 1, type = 'float') (options, args) = parser.parse_args() slow = get_slow_pairs(options.threshold, options.vo)", "common import get_url from optparse import OptionParser def get_slow_pairs(threshold = 1, vo =", "dest = 'threshold', help = 'Threshold in MB', default = 1, type =", "= None): content = get_url('https://fts3-pilot.cern.ch:8449/fts3/ftsmon/overview', vo = vo, page = 'all') pairs =", "None) parser.add_option('-t', '--threshold', dest = 'threshold', help = 'Threshold in MB', default =", "pairs = json.loads(content) slow = [] for pair in pairs['items']: if 'current' in", "get_url('https://fts3-pilot.cern.ch:8449/fts3/ftsmon/overview', vo = vo, page = 'all') pairs = json.loads(content) slow = []", "'float') (options, args) = parser.parse_args() slow = get_slow_pairs(options.threshold, options.vo) for pair in slow:", "1, type = 'float') (options, args) = parser.parse_args() slow = get_slow_pairs(options.threshold, options.vo) for", "parser.parse_args() slow = get_slow_pairs(options.threshold, options.vo) for pair in slow: print \"%(source_se)s => %(dest_se)s", "'Query only for a given VO', default = None) parser.add_option('-t', '--threshold', dest =", "return slow if __name__ == '__main__': parser = OptionParser() parser.add_option('-v', '--vo', dest =", "'threshold', help = 'Threshold in MB', default = 1, type = 'float') (options,", "'Threshold in MB', default = 1, type = 'float') (options, args) = parser.parse_args()", "from common import get_url from optparse import OptionParser def get_slow_pairs(threshold = 1, vo", "json.loads(content) slow = [] for pair in pairs['items']: if 'current' in pair and", "= 'all') pairs = json.loads(content) slow = [] for pair in pairs['items']: if", "for pair in pairs['items']: if 'current' in pair and pair['current'] < threshold: slow.append(pair)" ]
[]
[ "def evaluate(): pass dataset.add_command('viewformer.data.commands.visualize', 'visualize') dataset.add_command('viewformer.data.commands.generate', 'generate') dataset.add_command('viewformer.data.commands.shuffle', 'shuffle') visualize.add_command('viewformer.commands.visualize_codebook', 'codebook') model.add_command('viewformer.commands.model_info', 'info')", "click from viewformer.utils.click import LazyGroup @click.group(cls=LazyGroup) def main(): pass @main.group(cls=LazyGroup) def dataset(): pass", "@main.group(cls=LazyGroup) def model(): pass @main.group(cls=LazyGroup) def evaluate(): pass dataset.add_command('viewformer.data.commands.visualize', 'visualize') dataset.add_command('viewformer.data.commands.generate', 'generate') dataset.add_command('viewformer.data.commands.shuffle',", "visualize(): pass @main.group(cls=LazyGroup) def model(): pass @main.group(cls=LazyGroup) def evaluate(): pass dataset.add_command('viewformer.data.commands.visualize', 'visualize') dataset.add_command('viewformer.data.commands.generate',", "\"co3d\") evaluate.add_command(\"viewformer.evaluate.generate_gqn_images\", \"generate-gqn-images\") main.add_command(\"viewformer.train\", \"train\") main.add_command(\"viewformer.commands.generate_codes\", 'generate-codes') main.add_command(\"viewformer.commands.download_model\", 'download-model') if __name__ == '__main__':", "evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes\", \"7scenes\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_baseline\", \"7scenes-baseline\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_multictx\", \"7scenes-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_co3d\", \"co3d\") evaluate.add_command(\"viewformer.evaluate.generate_gqn_images\", \"generate-gqn-images\") main.add_command(\"viewformer.train\", \"train\") main.add_command(\"viewformer.commands.generate_codes\",", "pass @main.group(cls=LazyGroup) def model(): pass @main.group(cls=LazyGroup) def evaluate(): pass dataset.add_command('viewformer.data.commands.visualize', 'visualize') dataset.add_command('viewformer.data.commands.generate', 'generate')", "model.add_command('viewformer.commands.model_info', 'info') evaluate.add_command(\"viewformer.evaluate.evaluate_transformer\", \"transformer\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx\", \"transformer-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx_allimg\", \"transformer-multictx-allimg\") evaluate.add_command(\"viewformer.evaluate.evaluate_codebook\", \"codebook\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes\", \"7scenes\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_baseline\",", "\"7scenes\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_baseline\", \"7scenes-baseline\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_multictx\", \"7scenes-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_co3d\", \"co3d\") evaluate.add_command(\"viewformer.evaluate.generate_gqn_images\", \"generate-gqn-images\") main.add_command(\"viewformer.train\", \"train\") main.add_command(\"viewformer.commands.generate_codes\", 'generate-codes')", "@main.group(cls=LazyGroup) def evaluate(): pass dataset.add_command('viewformer.data.commands.visualize', 'visualize') dataset.add_command('viewformer.data.commands.generate', 'generate') dataset.add_command('viewformer.data.commands.shuffle', 'shuffle') visualize.add_command('viewformer.commands.visualize_codebook', 'codebook') model.add_command('viewformer.commands.model_info',", "\"codebook\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes\", \"7scenes\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_baseline\", \"7scenes-baseline\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_multictx\", \"7scenes-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_co3d\", \"co3d\") evaluate.add_command(\"viewformer.evaluate.generate_gqn_images\", \"generate-gqn-images\") main.add_command(\"viewformer.train\", \"train\")", "evaluate.add_command(\"viewformer.evaluate.evaluate_transformer\", \"transformer\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx\", \"transformer-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx_allimg\", \"transformer-multictx-allimg\") evaluate.add_command(\"viewformer.evaluate.evaluate_codebook\", \"codebook\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes\", \"7scenes\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_baseline\", \"7scenes-baseline\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_multictx\",", "'codebook') model.add_command('viewformer.commands.model_info', 'info') evaluate.add_command(\"viewformer.evaluate.evaluate_transformer\", \"transformer\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx\", \"transformer-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx_allimg\", \"transformer-multictx-allimg\") evaluate.add_command(\"viewformer.evaluate.evaluate_codebook\", \"codebook\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes\", \"7scenes\")", "def visualize(): pass @main.group(cls=LazyGroup) def model(): pass @main.group(cls=LazyGroup) def evaluate(): pass dataset.add_command('viewformer.data.commands.visualize', 'visualize')", "pass @main.group(cls=LazyGroup) def evaluate(): pass dataset.add_command('viewformer.data.commands.visualize', 'visualize') dataset.add_command('viewformer.data.commands.generate', 'generate') dataset.add_command('viewformer.data.commands.shuffle', 'shuffle') visualize.add_command('viewformer.commands.visualize_codebook', 'codebook')", "evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx\", \"transformer-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx_allimg\", \"transformer-multictx-allimg\") evaluate.add_command(\"viewformer.evaluate.evaluate_codebook\", \"codebook\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes\", \"7scenes\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_baseline\", \"7scenes-baseline\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_multictx\", \"7scenes-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_co3d\",", "LazyGroup @click.group(cls=LazyGroup) def main(): pass @main.group(cls=LazyGroup) def dataset(): pass @main.group(cls=LazyGroup) def visualize(): pass", "def model(): pass @main.group(cls=LazyGroup) def evaluate(): pass dataset.add_command('viewformer.data.commands.visualize', 'visualize') dataset.add_command('viewformer.data.commands.generate', 'generate') dataset.add_command('viewformer.data.commands.shuffle', 'shuffle')", "evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_baseline\", \"7scenes-baseline\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_multictx\", \"7scenes-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_co3d\", \"co3d\") evaluate.add_command(\"viewformer.evaluate.generate_gqn_images\", \"generate-gqn-images\") main.add_command(\"viewformer.train\", \"train\") main.add_command(\"viewformer.commands.generate_codes\", 'generate-codes') main.add_command(\"viewformer.commands.download_model\",", "dataset.add_command('viewformer.data.commands.generate', 'generate') dataset.add_command('viewformer.data.commands.shuffle', 'shuffle') visualize.add_command('viewformer.commands.visualize_codebook', 'codebook') model.add_command('viewformer.commands.model_info', 'info') evaluate.add_command(\"viewformer.evaluate.evaluate_transformer\", \"transformer\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx\", \"transformer-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx_allimg\",", "\"transformer-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx_allimg\", \"transformer-multictx-allimg\") evaluate.add_command(\"viewformer.evaluate.evaluate_codebook\", \"codebook\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes\", \"7scenes\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_baseline\", \"7scenes-baseline\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_multictx\", \"7scenes-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_co3d\", \"co3d\")", "import LazyGroup @click.group(cls=LazyGroup) def main(): pass @main.group(cls=LazyGroup) def dataset(): pass @main.group(cls=LazyGroup) def visualize():", "def main(): pass @main.group(cls=LazyGroup) def dataset(): pass @main.group(cls=LazyGroup) def visualize(): pass @main.group(cls=LazyGroup) def", "aparse import click from viewformer.utils.click import LazyGroup @click.group(cls=LazyGroup) def main(): pass @main.group(cls=LazyGroup) def", "dataset.add_command('viewformer.data.commands.visualize', 'visualize') dataset.add_command('viewformer.data.commands.generate', 'generate') dataset.add_command('viewformer.data.commands.shuffle', 'shuffle') visualize.add_command('viewformer.commands.visualize_codebook', 'codebook') model.add_command('viewformer.commands.model_info', 'info') evaluate.add_command(\"viewformer.evaluate.evaluate_transformer\", \"transformer\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx\",", "'info') evaluate.add_command(\"viewformer.evaluate.evaluate_transformer\", \"transformer\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx\", \"transformer-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx_allimg\", \"transformer-multictx-allimg\") evaluate.add_command(\"viewformer.evaluate.evaluate_codebook\", \"codebook\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes\", \"7scenes\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_baseline\", \"7scenes-baseline\")", "evaluate.add_command(\"viewformer.evaluate.evaluate_codebook\", \"codebook\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes\", \"7scenes\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_baseline\", \"7scenes-baseline\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_multictx\", \"7scenes-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_co3d\", \"co3d\") evaluate.add_command(\"viewformer.evaluate.generate_gqn_images\", \"generate-gqn-images\") main.add_command(\"viewformer.train\",", "pass dataset.add_command('viewformer.data.commands.visualize', 'visualize') dataset.add_command('viewformer.data.commands.generate', 'generate') dataset.add_command('viewformer.data.commands.shuffle', 'shuffle') visualize.add_command('viewformer.commands.visualize_codebook', 'codebook') model.add_command('viewformer.commands.model_info', 'info') evaluate.add_command(\"viewformer.evaluate.evaluate_transformer\", \"transformer\")", "\"7scenes-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_co3d\", \"co3d\") evaluate.add_command(\"viewformer.evaluate.generate_gqn_images\", \"generate-gqn-images\") main.add_command(\"viewformer.train\", \"train\") main.add_command(\"viewformer.commands.generate_codes\", 'generate-codes') main.add_command(\"viewformer.commands.download_model\", 'download-model') if __name__", "main(): pass @main.group(cls=LazyGroup) def dataset(): pass @main.group(cls=LazyGroup) def visualize(): pass @main.group(cls=LazyGroup) def model():", "from aparse import click from viewformer.utils.click import LazyGroup @click.group(cls=LazyGroup) def main(): pass @main.group(cls=LazyGroup)", "model(): pass @main.group(cls=LazyGroup) def evaluate(): pass dataset.add_command('viewformer.data.commands.visualize', 'visualize') dataset.add_command('viewformer.data.commands.generate', 'generate') dataset.add_command('viewformer.data.commands.shuffle', 'shuffle') visualize.add_command('viewformer.commands.visualize_codebook',", "dataset.add_command('viewformer.data.commands.shuffle', 'shuffle') visualize.add_command('viewformer.commands.visualize_codebook', 'codebook') model.add_command('viewformer.commands.model_info', 'info') evaluate.add_command(\"viewformer.evaluate.evaluate_transformer\", \"transformer\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx\", \"transformer-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx_allimg\", \"transformer-multictx-allimg\") evaluate.add_command(\"viewformer.evaluate.evaluate_codebook\",", "from viewformer.utils.click import LazyGroup @click.group(cls=LazyGroup) def main(): pass @main.group(cls=LazyGroup) def dataset(): pass @main.group(cls=LazyGroup)", "\"transformer\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx\", \"transformer-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx_allimg\", \"transformer-multictx-allimg\") evaluate.add_command(\"viewformer.evaluate.evaluate_codebook\", \"codebook\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes\", \"7scenes\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_baseline\", \"7scenes-baseline\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_multictx\", \"7scenes-multictx\")", "'generate') dataset.add_command('viewformer.data.commands.shuffle', 'shuffle') visualize.add_command('viewformer.commands.visualize_codebook', 'codebook') model.add_command('viewformer.commands.model_info', 'info') evaluate.add_command(\"viewformer.evaluate.evaluate_transformer\", \"transformer\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx\", \"transformer-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx_allimg\", \"transformer-multictx-allimg\")", "\"transformer-multictx-allimg\") evaluate.add_command(\"viewformer.evaluate.evaluate_codebook\", \"codebook\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes\", \"7scenes\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_baseline\", \"7scenes-baseline\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_multictx\", \"7scenes-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_co3d\", \"co3d\") evaluate.add_command(\"viewformer.evaluate.generate_gqn_images\", \"generate-gqn-images\")", "dataset(): pass @main.group(cls=LazyGroup) def visualize(): pass @main.group(cls=LazyGroup) def model(): pass @main.group(cls=LazyGroup) def evaluate():", "evaluate(): pass dataset.add_command('viewformer.data.commands.visualize', 'visualize') dataset.add_command('viewformer.data.commands.generate', 'generate') dataset.add_command('viewformer.data.commands.shuffle', 'shuffle') visualize.add_command('viewformer.commands.visualize_codebook', 'codebook') model.add_command('viewformer.commands.model_info', 'info') evaluate.add_command(\"viewformer.evaluate.evaluate_transformer\",", "@main.group(cls=LazyGroup) def dataset(): pass @main.group(cls=LazyGroup) def visualize(): pass @main.group(cls=LazyGroup) def model(): pass @main.group(cls=LazyGroup)", "def dataset(): pass @main.group(cls=LazyGroup) def visualize(): pass @main.group(cls=LazyGroup) def model(): pass @main.group(cls=LazyGroup) def", "viewformer.utils.click import LazyGroup @click.group(cls=LazyGroup) def main(): pass @main.group(cls=LazyGroup) def dataset(): pass @main.group(cls=LazyGroup) def", "pass @main.group(cls=LazyGroup) def visualize(): pass @main.group(cls=LazyGroup) def model(): pass @main.group(cls=LazyGroup) def evaluate(): pass", "import click from viewformer.utils.click import LazyGroup @click.group(cls=LazyGroup) def main(): pass @main.group(cls=LazyGroup) def dataset():", "evaluate.add_command(\"viewformer.evaluate.evaluate_co3d\", \"co3d\") evaluate.add_command(\"viewformer.evaluate.generate_gqn_images\", \"generate-gqn-images\") main.add_command(\"viewformer.train\", \"train\") main.add_command(\"viewformer.commands.generate_codes\", 'generate-codes') main.add_command(\"viewformer.commands.download_model\", 'download-model') if __name__ ==", "\"7scenes-baseline\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_multictx\", \"7scenes-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_co3d\", \"co3d\") evaluate.add_command(\"viewformer.evaluate.generate_gqn_images\", \"generate-gqn-images\") main.add_command(\"viewformer.train\", \"train\") main.add_command(\"viewformer.commands.generate_codes\", 'generate-codes') main.add_command(\"viewformer.commands.download_model\", 'download-model')", "evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_multictx\", \"7scenes-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_co3d\", \"co3d\") evaluate.add_command(\"viewformer.evaluate.generate_gqn_images\", \"generate-gqn-images\") main.add_command(\"viewformer.train\", \"train\") main.add_command(\"viewformer.commands.generate_codes\", 'generate-codes') main.add_command(\"viewformer.commands.download_model\", 'download-model') if", "@main.group(cls=LazyGroup) def visualize(): pass @main.group(cls=LazyGroup) def model(): pass @main.group(cls=LazyGroup) def evaluate(): pass dataset.add_command('viewformer.data.commands.visualize',", "evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx_allimg\", \"transformer-multictx-allimg\") evaluate.add_command(\"viewformer.evaluate.evaluate_codebook\", \"codebook\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes\", \"7scenes\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_baseline\", \"7scenes-baseline\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes_multictx\", \"7scenes-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_co3d\", \"co3d\") evaluate.add_command(\"viewformer.evaluate.generate_gqn_images\",", "pass @main.group(cls=LazyGroup) def dataset(): pass @main.group(cls=LazyGroup) def visualize(): pass @main.group(cls=LazyGroup) def model(): pass", "visualize.add_command('viewformer.commands.visualize_codebook', 'codebook') model.add_command('viewformer.commands.model_info', 'info') evaluate.add_command(\"viewformer.evaluate.evaluate_transformer\", \"transformer\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx\", \"transformer-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx_allimg\", \"transformer-multictx-allimg\") evaluate.add_command(\"viewformer.evaluate.evaluate_codebook\", \"codebook\") evaluate.add_command(\"viewformer.evaluate.evaluate_sevenscenes\",", "evaluate.add_command(\"viewformer.evaluate.generate_gqn_images\", \"generate-gqn-images\") main.add_command(\"viewformer.train\", \"train\") main.add_command(\"viewformer.commands.generate_codes\", 'generate-codes') main.add_command(\"viewformer.commands.download_model\", 'download-model') if __name__ == '__main__': main()", "'visualize') dataset.add_command('viewformer.data.commands.generate', 'generate') dataset.add_command('viewformer.data.commands.shuffle', 'shuffle') visualize.add_command('viewformer.commands.visualize_codebook', 'codebook') model.add_command('viewformer.commands.model_info', 'info') evaluate.add_command(\"viewformer.evaluate.evaluate_transformer\", \"transformer\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx\", \"transformer-multictx\")", "@click.group(cls=LazyGroup) def main(): pass @main.group(cls=LazyGroup) def dataset(): pass @main.group(cls=LazyGroup) def visualize(): pass @main.group(cls=LazyGroup)", "'shuffle') visualize.add_command('viewformer.commands.visualize_codebook', 'codebook') model.add_command('viewformer.commands.model_info', 'info') evaluate.add_command(\"viewformer.evaluate.evaluate_transformer\", \"transformer\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx\", \"transformer-multictx\") evaluate.add_command(\"viewformer.evaluate.evaluate_transformer_multictx_allimg\", \"transformer-multictx-allimg\") evaluate.add_command(\"viewformer.evaluate.evaluate_codebook\", \"codebook\")" ]
[ "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "writing, software # distributed under the License is distributed on an \"AS IS\"", "KIND, either express or implied. # See the License for the specific language", "question) answer = predictor.predict(session_id, question) ref = db.reference('messages') ref2 = ref.child(user_id) ref3 =", "Unless required by applicable law or agreed to in writing, software # distributed", "os import tensorflow as tf import firebase_admin import time import json from firebase_admin", "user_id = request.args.get('userID') # question = request.args.get('question') session_id = 1 data = json.loads(request.get_data(as_text=True))", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "# See the License for the specific language governing permissions and # limitations", "'Result') with tf.Session() as sess: predictor = BotPredictor(sess, corpus_dir=corp_dir, knbase_dir=knbs_dir, result_dir=res_dir, result_file='basic') app.run(port=5000)", "License. # You may obtain a copy of the License at # #", "# question = request.args.get('question') session_id = 1 data = json.loads(request.get_data(as_text=True)) print(data) user_id =", "= db.reference('messages') ref2 = ref.child(user_id) ref3 = ref2.child('messages') ref3.push().set( { 'content' : answer,", "in predictor.session_data.id_dict: # Including the case of 0 session_id = predictor.session_data.add_session(user_id) else: session_id", "ref3 = ref2.child('messages') ref3.push().set( { 'content' : answer, 'data' : time.strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime()),", "}) corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus') knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase') res_dir =", "credentials from firebase_admin import db from flask import Flask, request, jsonify from settings", "return jsonify({'sessionId': session_id, 'sentence': answer}) if __name__ == \"__main__\": cred = credentials.Certificate('key2.json') firebase_admin.initialize_app(cred,", "knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase') res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result') with tf.Session() as", "2017 <NAME>. All Rights Reserved. # # Licensed under the Apache License, Version", "the License. # ============================================================================== import os import tensorflow as tf import firebase_admin import", "of 0 session_id = predictor.session_data.add_session(user_id) else: session_id = predictor.session_data.id_dict[user_id] # print(session_id, question) answer", "from flask import Flask, request, jsonify from settings import PROJECT_ROOT from chatbot.botpredictor import", "law or agreed to in writing, software # distributed under the License is", "'user' : 'Tok' }) return answer # return jsonify({'sessionId': session_id, 'sentence': answer}) if", "# ============================================================================== import os import tensorflow as tf import firebase_admin import time import", "the License for the specific language governing permissions and # limitations under the", "specific language governing permissions and # limitations under the License. # ============================================================================== import", "print(data) user_id = data['userID'] question = data['message'] if user_id not in predictor.session_data.id_dict: #", "under the License. # ============================================================================== import os import tensorflow as tf import firebase_admin", "# return jsonify({'sessionId': session_id, 'sentence': answer}) if __name__ == \"__main__\": cred = credentials.Certificate('key2.json')", "compliance with the License. # You may obtain a copy of the License", "cred = credentials.Certificate('key2.json') firebase_admin.initialize_app(cred, { 'databaseURL': 'https://uci-tok.firebaseio.com' }) corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus')", "corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus') knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase') res_dir = os.path.join(PROJECT_ROOT,", "credentials.Certificate('key2.json') firebase_admin.initialize_app(cred, { 'databaseURL': 'https://uci-tok.firebaseio.com' }) corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus') knbs_dir =", "ref = db.reference('messages') ref2 = ref.child(user_id) ref3 = ref2.child('messages') ref3.push().set( { 'content' :", "governing permissions and # limitations under the License. # ============================================================================== import os import", "= predictor.predict(session_id, question) ref = db.reference('messages') ref2 = ref.child(user_id) ref3 = ref2.child('messages') ref3.push().set(", "return answer # return jsonify({'sessionId': session_id, 'sentence': answer}) if __name__ == \"__main__\": cred", "'databaseURL': 'https://uci-tok.firebaseio.com' }) corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus') knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase')", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "language governing permissions and # limitations under the License. # ============================================================================== import os", "PROJECT_ROOT from chatbot.botpredictor import BotPredictor os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' app = Flask(__name__) @app.route('/reply', methods=['POST',", "this file except in compliance with the License. # You may obtain a", "as tf import firebase_admin import time import json from firebase_admin import credentials from", "from firebase_admin import db from flask import Flask, request, jsonify from settings import", "data['userID'] question = data['message'] if user_id not in predictor.session_data.id_dict: # Including the case", "'sentence': answer}) if __name__ == \"__main__\": cred = credentials.Certificate('key2.json') firebase_admin.initialize_app(cred, { 'databaseURL': 'https://uci-tok.firebaseio.com'", "os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase') res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result') with tf.Session() as sess: predictor", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "Flask(__name__) @app.route('/reply', methods=['POST', 'GET']) def reply(): # user_id = request.args.get('userID') # question =", "tensorflow as tf import firebase_admin import time import json from firebase_admin import credentials", "you may not use this file except in compliance with the License. #", "jsonify from settings import PROJECT_ROOT from chatbot.botpredictor import BotPredictor os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' app", "reply(): # user_id = request.args.get('userID') # question = request.args.get('question') session_id = 1 data", "time import json from firebase_admin import credentials from firebase_admin import db from flask", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "= os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase') res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result') with tf.Session() as sess:", "import os import tensorflow as tf import firebase_admin import time import json from", "= 1 data = json.loads(request.get_data(as_text=True)) print(data) user_id = data['userID'] question = data['message'] if", "import time import json from firebase_admin import credentials from firebase_admin import db from", "'Data', 'KnowledgeBase') res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result') with tf.Session() as sess: predictor =", "ANY KIND, either express or implied. # See the License for the specific", "and # limitations under the License. # ============================================================================== import os import tensorflow as", "request, jsonify from settings import PROJECT_ROOT from chatbot.botpredictor import BotPredictor os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'", "predictor.session_data.id_dict: # Including the case of 0 session_id = predictor.session_data.add_session(user_id) else: session_id =", "limitations under the License. # ============================================================================== import os import tensorflow as tf import", "Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "predictor.session_data.id_dict[user_id] # print(session_id, question) answer = predictor.predict(session_id, question) ref = db.reference('messages') ref2 =", "in compliance with the License. # You may obtain a copy of the", "# Including the case of 0 session_id = predictor.session_data.add_session(user_id) else: session_id = predictor.session_data.id_dict[user_id]", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "'3' app = Flask(__name__) @app.route('/reply', methods=['POST', 'GET']) def reply(): # user_id = request.args.get('userID')", "# Copyright 2017 <NAME>. All Rights Reserved. # # Licensed under the Apache", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "import firebase_admin import time import json from firebase_admin import credentials from firebase_admin import", "use this file except in compliance with the License. # You may obtain", "ref2 = ref.child(user_id) ref3 = ref2.child('messages') ref3.push().set( { 'content' : answer, 'data' :", "}) return answer # return jsonify({'sessionId': session_id, 'sentence': answer}) if __name__ == \"__main__\":", "with tf.Session() as sess: predictor = BotPredictor(sess, corpus_dir=corp_dir, knbase_dir=knbs_dir, result_dir=res_dir, result_file='basic') app.run(port=5000) print(\"Web", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "json from firebase_admin import credentials from firebase_admin import db from flask import Flask,", "= data['userID'] question = data['message'] if user_id not in predictor.session_data.id_dict: # Including the", "chatbot.botpredictor import BotPredictor os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' app = Flask(__name__) @app.route('/reply', methods=['POST', 'GET']) def", "not use this file except in compliance with the License. # You may", "<gh_stars>0 # Copyright 2017 <NAME>. All Rights Reserved. # # Licensed under the", "= predictor.session_data.add_session(user_id) else: session_id = predictor.session_data.id_dict[user_id] # print(session_id, question) answer = predictor.predict(session_id, question)", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "'Corpus') knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase') res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result') with tf.Session()", "firebase_admin import db from flask import Flask, request, jsonify from settings import PROJECT_ROOT", "from firebase_admin import credentials from firebase_admin import db from flask import Flask, request,", "data = json.loads(request.get_data(as_text=True)) print(data) user_id = data['userID'] question = data['message'] if user_id not", "See the License for the specific language governing permissions and # limitations under", "print(session_id, question) answer = predictor.predict(session_id, question) ref = db.reference('messages') ref2 = ref.child(user_id) ref3", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "{ 'content' : answer, 'data' : time.strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime()), 'user' : 'Tok' })", "__name__ == \"__main__\": cred = credentials.Certificate('key2.json') firebase_admin.initialize_app(cred, { 'databaseURL': 'https://uci-tok.firebaseio.com' }) corp_dir =", "License, Version 2.0 (the \"License\"); # you may not use this file except", "= os.path.join(PROJECT_ROOT, 'Data', 'Result') with tf.Session() as sess: predictor = BotPredictor(sess, corpus_dir=corp_dir, knbase_dir=knbs_dir,", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "import tensorflow as tf import firebase_admin import time import json from firebase_admin import", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "session_id = predictor.session_data.id_dict[user_id] # print(session_id, question) answer = predictor.predict(session_id, question) ref = db.reference('messages')", "question) ref = db.reference('messages') ref2 = ref.child(user_id) ref3 = ref2.child('messages') ref3.push().set( { 'content'", "permissions and # limitations under the License. # ============================================================================== import os import tensorflow", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "answer # return jsonify({'sessionId': session_id, 'sentence': answer}) if __name__ == \"__main__\": cred =", "'KnowledgeBase') res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result') with tf.Session() as sess: predictor = BotPredictor(sess,", "question = request.args.get('question') session_id = 1 data = json.loads(request.get_data(as_text=True)) print(data) user_id = data['userID']", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "ref.child(user_id) ref3 = ref2.child('messages') ref3.push().set( { 'content' : answer, 'data' : time.strftime(\"%Y-%m-%d %H:%M:%S\",", "not in predictor.session_data.id_dict: # Including the case of 0 session_id = predictor.session_data.add_session(user_id) else:", "Flask, request, jsonify from settings import PROJECT_ROOT from chatbot.botpredictor import BotPredictor os.environ['TF_CPP_MIN_LOG_LEVEL'] =", ": time.strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime()), 'user' : 'Tok' }) return answer # return jsonify({'sessionId':", "OF ANY KIND, either express or implied. # See the License for the", "= json.loads(request.get_data(as_text=True)) print(data) user_id = data['userID'] question = data['message'] if user_id not in", "= ref2.child('messages') ref3.push().set( { 'content' : answer, 'data' : time.strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime()), 'user'", "import Flask, request, jsonify from settings import PROJECT_ROOT from chatbot.botpredictor import BotPredictor os.environ['TF_CPP_MIN_LOG_LEVEL']", "== \"__main__\": cred = credentials.Certificate('key2.json') firebase_admin.initialize_app(cred, { 'databaseURL': 'https://uci-tok.firebaseio.com' }) corp_dir = os.path.join(PROJECT_ROOT,", "as sess: predictor = BotPredictor(sess, corpus_dir=corp_dir, knbase_dir=knbs_dir, result_dir=res_dir, result_file='basic') app.run(port=5000) print(\"Web service started.\")", "2.0 (the \"License\"); # you may not use this file except in compliance", "= Flask(__name__) @app.route('/reply', methods=['POST', 'GET']) def reply(): # user_id = request.args.get('userID') # question", "answer, 'data' : time.strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime()), 'user' : 'Tok' }) return answer #", "import PROJECT_ROOT from chatbot.botpredictor import BotPredictor os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' app = Flask(__name__) @app.route('/reply',", "# user_id = request.args.get('userID') # question = request.args.get('question') session_id = 1 data =", "# you may not use this file except in compliance with the License.", "flask import Flask, request, jsonify from settings import PROJECT_ROOT from chatbot.botpredictor import BotPredictor", "session_id = 1 data = json.loads(request.get_data(as_text=True)) print(data) user_id = data['userID'] question = data['message']", "for the specific language governing permissions and # limitations under the License. #", "agreed to in writing, software # distributed under the License is distributed on", "else: session_id = predictor.session_data.id_dict[user_id] # print(session_id, question) answer = predictor.predict(session_id, question) ref =", "settings import PROJECT_ROOT from chatbot.botpredictor import BotPredictor os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' app = Flask(__name__)", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "time.strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime()), 'user' : 'Tok' }) return answer # return jsonify({'sessionId': session_id,", "{ 'databaseURL': 'https://uci-tok.firebaseio.com' }) corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus') knbs_dir = os.path.join(PROJECT_ROOT, 'Data',", "(the \"License\"); # you may not use this file except in compliance with", "= '3' app = Flask(__name__) @app.route('/reply', methods=['POST', 'GET']) def reply(): # user_id =", "user_id = data['userID'] question = data['message'] if user_id not in predictor.session_data.id_dict: # Including", "# # Unless required by applicable law or agreed to in writing, software", "express or implied. # See the License for the specific language governing permissions", "Version 2.0 (the \"License\"); # you may not use this file except in", "# Unless required by applicable law or agreed to in writing, software #", "except in compliance with the License. # You may obtain a copy of", "the specific language governing permissions and # limitations under the License. # ==============================================================================", "by applicable law or agreed to in writing, software # distributed under the", "All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the", "%H:%M:%S\", time.localtime()), 'user' : 'Tok' }) return answer # return jsonify({'sessionId': session_id, 'sentence':", "= predictor.session_data.id_dict[user_id] # print(session_id, question) answer = predictor.predict(session_id, question) ref = db.reference('messages') ref2", ": answer, 'data' : time.strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime()), 'user' : 'Tok' }) return answer", "predictor.predict(session_id, question) ref = db.reference('messages') ref2 = ref.child(user_id) ref3 = ref2.child('messages') ref3.push().set( {", "'Data', 'Result') with tf.Session() as sess: predictor = BotPredictor(sess, corpus_dir=corp_dir, knbase_dir=knbs_dir, result_dir=res_dir, result_file='basic')", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "request.args.get('userID') # question = request.args.get('question') session_id = 1 data = json.loads(request.get_data(as_text=True)) print(data) user_id", "firebase_admin import time import json from firebase_admin import credentials from firebase_admin import db", "Copyright 2017 <NAME>. All Rights Reserved. # # Licensed under the Apache License,", "answer = predictor.predict(session_id, question) ref = db.reference('messages') ref2 = ref.child(user_id) ref3 = ref2.child('messages')", "either express or implied. # See the License for the specific language governing", "jsonify({'sessionId': session_id, 'sentence': answer}) if __name__ == \"__main__\": cred = credentials.Certificate('key2.json') firebase_admin.initialize_app(cred, {", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", ": 'Tok' }) return answer # return jsonify({'sessionId': session_id, 'sentence': answer}) if __name__", "= os.path.join(PROJECT_ROOT, 'Data', 'Corpus') knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase') res_dir = os.path.join(PROJECT_ROOT, 'Data',", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "import BotPredictor os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' app = Flask(__name__) @app.route('/reply', methods=['POST', 'GET']) def reply():", "0 session_id = predictor.session_data.add_session(user_id) else: session_id = predictor.session_data.id_dict[user_id] # print(session_id, question) answer =", "the case of 0 session_id = predictor.session_data.add_session(user_id) else: session_id = predictor.session_data.id_dict[user_id] # print(session_id,", "@app.route('/reply', methods=['POST', 'GET']) def reply(): # user_id = request.args.get('userID') # question = request.args.get('question')", "os.path.join(PROJECT_ROOT, 'Data', 'Result') with tf.Session() as sess: predictor = BotPredictor(sess, corpus_dir=corp_dir, knbase_dir=knbs_dir, result_dir=res_dir,", "file except in compliance with the License. # You may obtain a copy", "\"__main__\": cred = credentials.Certificate('key2.json') firebase_admin.initialize_app(cred, { 'databaseURL': 'https://uci-tok.firebaseio.com' }) corp_dir = os.path.join(PROJECT_ROOT, 'Data',", "ref2.child('messages') ref3.push().set( { 'content' : answer, 'data' : time.strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime()), 'user' :", "question = data['message'] if user_id not in predictor.session_data.id_dict: # Including the case of", "= request.args.get('question') session_id = 1 data = json.loads(request.get_data(as_text=True)) print(data) user_id = data['userID'] question", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "db.reference('messages') ref2 = ref.child(user_id) ref3 = ref2.child('messages') ref3.push().set( { 'content' : answer, 'data'", "db from flask import Flask, request, jsonify from settings import PROJECT_ROOT from chatbot.botpredictor", "<NAME>. All Rights Reserved. # # Licensed under the Apache License, Version 2.0", "License for the specific language governing permissions and # limitations under the License.", "json.loads(request.get_data(as_text=True)) print(data) user_id = data['userID'] question = data['message'] if user_id not in predictor.session_data.id_dict:", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "firebase_admin.initialize_app(cred, { 'databaseURL': 'https://uci-tok.firebaseio.com' }) corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus') knbs_dir = os.path.join(PROJECT_ROOT,", "'https://uci-tok.firebaseio.com' }) corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus') knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase') res_dir", "the License. # You may obtain a copy of the License at #", "= ref.child(user_id) ref3 = ref2.child('messages') ref3.push().set( { 'content' : answer, 'data' : time.strftime(\"%Y-%m-%d", "session_id = predictor.session_data.add_session(user_id) else: session_id = predictor.session_data.id_dict[user_id] # print(session_id, question) answer = predictor.predict(session_id,", "to in writing, software # distributed under the License is distributed on an", "request.args.get('question') session_id = 1 data = json.loads(request.get_data(as_text=True)) print(data) user_id = data['userID'] question =", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "if __name__ == \"__main__\": cred = credentials.Certificate('key2.json') firebase_admin.initialize_app(cred, { 'databaseURL': 'https://uci-tok.firebaseio.com' }) corp_dir", "= credentials.Certificate('key2.json') firebase_admin.initialize_app(cred, { 'databaseURL': 'https://uci-tok.firebaseio.com' }) corp_dir = os.path.join(PROJECT_ROOT, 'Data', 'Corpus') knbs_dir", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "tf import firebase_admin import time import json from firebase_admin import credentials from firebase_admin", "implied. # See the License for the specific language governing permissions and #", "res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result') with tf.Session() as sess: predictor = BotPredictor(sess, corpus_dir=corp_dir,", "from chatbot.botpredictor import BotPredictor os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' app = Flask(__name__) @app.route('/reply', methods=['POST', 'GET'])", "\"License\"); # you may not use this file except in compliance with the", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "License. # ============================================================================== import os import tensorflow as tf import firebase_admin import time", "Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "required by applicable law or agreed to in writing, software # distributed under", "firebase_admin import credentials from firebase_admin import db from flask import Flask, request, jsonify", "= data['message'] if user_id not in predictor.session_data.id_dict: # Including the case of 0", "import json from firebase_admin import credentials from firebase_admin import db from flask import", "= request.args.get('userID') # question = request.args.get('question') session_id = 1 data = json.loads(request.get_data(as_text=True)) print(data)", "'Tok' }) return answer # return jsonify({'sessionId': session_id, 'sentence': answer}) if __name__ ==", "============================================================================== import os import tensorflow as tf import firebase_admin import time import json", "'GET']) def reply(): # user_id = request.args.get('userID') # question = request.args.get('question') session_id =", "case of 0 session_id = predictor.session_data.add_session(user_id) else: session_id = predictor.session_data.id_dict[user_id] # print(session_id, question)", "applicable law or agreed to in writing, software # distributed under the License", "import credentials from firebase_admin import db from flask import Flask, request, jsonify from", "predictor.session_data.add_session(user_id) else: session_id = predictor.session_data.id_dict[user_id] # print(session_id, question) answer = predictor.predict(session_id, question) ref", "os.path.join(PROJECT_ROOT, 'Data', 'Corpus') knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase') res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result')", "user_id not in predictor.session_data.id_dict: # Including the case of 0 session_id = predictor.session_data.add_session(user_id)", "answer}) if __name__ == \"__main__\": cred = credentials.Certificate('key2.json') firebase_admin.initialize_app(cred, { 'databaseURL': 'https://uci-tok.firebaseio.com' })", "'content' : answer, 'data' : time.strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime()), 'user' : 'Tok' }) return", "methods=['POST', 'GET']) def reply(): # user_id = request.args.get('userID') # question = request.args.get('question') session_id", "tf.Session() as sess: predictor = BotPredictor(sess, corpus_dir=corp_dir, knbase_dir=knbs_dir, result_dir=res_dir, result_file='basic') app.run(port=5000) print(\"Web service", "or agreed to in writing, software # distributed under the License is distributed", "'Data', 'Corpus') knbs_dir = os.path.join(PROJECT_ROOT, 'Data', 'KnowledgeBase') res_dir = os.path.join(PROJECT_ROOT, 'Data', 'Result') with", "app = Flask(__name__) @app.route('/reply', methods=['POST', 'GET']) def reply(): # user_id = request.args.get('userID') #", "session_id, 'sentence': answer}) if __name__ == \"__main__\": cred = credentials.Certificate('key2.json') firebase_admin.initialize_app(cred, { 'databaseURL':", "data['message'] if user_id not in predictor.session_data.id_dict: # Including the case of 0 session_id", "# print(session_id, question) answer = predictor.predict(session_id, question) ref = db.reference('messages') ref2 = ref.child(user_id)", "or implied. # See the License for the specific language governing permissions and", "time.localtime()), 'user' : 'Tok' }) return answer # return jsonify({'sessionId': session_id, 'sentence': answer})", "Including the case of 0 session_id = predictor.session_data.add_session(user_id) else: session_id = predictor.session_data.id_dict[user_id] #", "import db from flask import Flask, request, jsonify from settings import PROJECT_ROOT from", "'data' : time.strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime()), 'user' : 'Tok' }) return answer # return", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "ref3.push().set( { 'content' : answer, 'data' : time.strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime()), 'user' : 'Tok'", "os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' app = Flask(__name__) @app.route('/reply', methods=['POST', 'GET']) def reply(): # user_id", "# limitations under the License. # ============================================================================== import os import tensorflow as tf", "from settings import PROJECT_ROOT from chatbot.botpredictor import BotPredictor os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' app =", "if user_id not in predictor.session_data.id_dict: # Including the case of 0 session_id =", "with the License. # You may obtain a copy of the License at", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "1 data = json.loads(request.get_data(as_text=True)) print(data) user_id = data['userID'] question = data['message'] if user_id", "BotPredictor os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' app = Flask(__name__) @app.route('/reply', methods=['POST', 'GET']) def reply(): #", "def reply(): # user_id = request.args.get('userID') # question = request.args.get('question') session_id = 1", "under the Apache License, Version 2.0 (the \"License\"); # you may not use" ]
[ "from yat.test import macro from yat.test import Node from testcase.utils.Logger import Logger from", "= 'GBK'\"''' self.log.info(cmd2) msg2 = self.user_node.sh(cmd2).result() self.log.info(msg2) res = restart_check() self.assertTrue(self.var[1] in res)", "= client if init == 'UTF8' else list(reversed(client)) def restart_check(): self.commonsh.restart_db_cluster() status =", "OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A", "= self.commonsh.execut_db_sql(cmd) self.log.info(msg) return msg # gs_guc set 设置客户端编码集SQL_ASCII cmd1 = f'''source {macro.DB_ENV_PATH}", "['SQL_ASCII', 'UTF8'] self.var = client if init == 'UTF8' else list(reversed(client)) def restart_check():", "WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO", "2022 Huawei Technologies Co.,Ltd. openGauss is licensed under Mulan PSL v2. You can", "'show client_encoding;' msg = self.commonsh.execut_db_sql(cmd) self.log.info(msg) return msg # gs_guc set 设置客户端编码集SQL_ASCII cmd1", "in res) # gs_guc reload 设置客户端编码集GBK cmd2 = f'''source {macro.DB_ENV_PATH} gs_guc reload -D", "PROVIDED ON AN \"AS IS\" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS", "Name : 使用gs_guc工具设置客户端编码,不生效 Description : 1. gs_guc set 设置客户端编码集SQL_ASCII 2. gs_guc reload 设置客户端编码集GBK", "Logger() self.cluster_path = macro.DB_INSTANCE_PATH self.log.info('''---Opengauss_Function_DML_Set_Case0032开始---''') def test_encode(self): cmd0 = \"show client_encoding;\" msg0 =", "http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON AN \"AS IS\" BASIS, WITHOUT WARRANTIES OF", "THIS SOFTWARE IS PROVIDED ON AN \"AS IS\" BASIS, WITHOUT WARRANTIES OF ANY", "SOFTWARE IS PROVIDED ON AN \"AS IS\" BASIS, WITHOUT WARRANTIES OF ANY KIND,", "unittest from yat.test import macro from yat.test import Node from testcase.utils.Logger import Logger", "a copy of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON", "TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. See the Mulan PSL", "History : \"\"\" import unittest from yat.test import macro from yat.test import Node", "testcase.utils.CommonSH import CommonSH class Function(unittest.TestCase): def setUp(self): self.commonsh = CommonSH('dbuser') self.user_node = Node('dbuser')", "PSL v2. You may obtain a copy of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2", "'GBK'\"''' self.log.info(cmd2) msg2 = self.user_node.sh(cmd2).result() self.log.info(msg2) res = restart_check() self.assertTrue(self.var[1] in res) def", "CommonSH('dbuser') self.user_node = Node('dbuser') self.log = Logger() self.cluster_path = macro.DB_INSTANCE_PATH self.log.info('''---Opengauss_Function_DML_Set_Case0032开始---''') def test_encode(self):", ": 功能测试 Case Name : 使用gs_guc工具设置客户端编码,不生效 Description : 1. gs_guc set 设置客户端编码集SQL_ASCII 2.", "import Logger from testcase.utils.CommonSH import CommonSH class Function(unittest.TestCase): def setUp(self): self.commonsh = CommonSH('dbuser')", "else list(reversed(client)) def restart_check(): self.commonsh.restart_db_cluster() status = self.commonsh.get_db_cluster_status() self.assertTrue(\"Normal\" in status or 'Degraded'", "restart_check() self.assertTrue(self.var[1] in res) # gs_guc reload 设置客户端编码集GBK cmd2 = f'''source {macro.DB_ENV_PATH} gs_guc", "Function(unittest.TestCase): def setUp(self): self.commonsh = CommonSH('dbuser') self.user_node = Node('dbuser') self.log = Logger() self.cluster_path", "testcase.utils.Logger import Logger from testcase.utils.CommonSH import CommonSH class Function(unittest.TestCase): def setUp(self): self.commonsh =", "self.cluster_path = macro.DB_INSTANCE_PATH self.log.info('''---Opengauss_Function_DML_Set_Case0032开始---''') def test_encode(self): cmd0 = \"show client_encoding;\" msg0 = self.commonsh.execut_db_sql(cmd0)", "'Degraded' in status) # 检查未生效,还是utf8 cmd = 'show client_encoding;' msg = self.commonsh.execut_db_sql(cmd) self.log.info(msg)", "设置客户端编码集GBK Expect : 1. 设置不生效 2. 设置不生效 History : \"\"\" import unittest from", "= msg0.splitlines()[2].strip() client = ['SQL_ASCII', 'UTF8'] self.var = client if init == 'UTF8'", "at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON AN \"AS IS\" BASIS, WITHOUT WARRANTIES", "WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED", "KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR", "v2 for more details. \"\"\" \"\"\" Case Type : 功能测试 Case Name :", "A PARTICULAR PURPOSE. See the Mulan PSL v2 for more details. \"\"\" \"\"\"", "class Function(unittest.TestCase): def setUp(self): self.commonsh = CommonSH('dbuser') self.user_node = Node('dbuser') self.log = Logger()", "\"client_encoding='{self.var[0]}'\"''' self.log.info(cmd1) msg1 = self.user_node.sh(cmd1).result() self.log.info(msg1) res = restart_check() self.assertTrue(self.var[1] in res) #", "according to the terms and conditions of the Mulan PSL v2. You may", "msg # gs_guc set 设置客户端编码集SQL_ASCII cmd1 = f'''source {macro.DB_ENV_PATH} gs_guc set -N all", "macro from yat.test import Node from testcase.utils.Logger import Logger from testcase.utils.CommonSH import CommonSH", "copy of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON AN", "reload 设置客户端编码集GBK cmd2 = f'''source {macro.DB_ENV_PATH} gs_guc reload -D {self.cluster_path} -c \"client_encoding =", "for more details. \"\"\" \"\"\" Case Type : 功能测试 Case Name : 使用gs_guc工具设置客户端编码,不生效", "{self.cluster_path} -c \"client_encoding = 'GBK'\"''' self.log.info(cmd2) msg2 = self.user_node.sh(cmd2).result() self.log.info(msg2) res = restart_check()", "FOR A PARTICULAR PURPOSE. See the Mulan PSL v2 for more details. \"\"\"", "of the Mulan PSL v2. You may obtain a copy of Mulan PSL", "import macro from yat.test import Node from testcase.utils.Logger import Logger from testcase.utils.CommonSH import", "# 检查未生效,还是utf8 cmd = 'show client_encoding;' msg = self.commonsh.execut_db_sql(cmd) self.log.info(msg) return msg #", "\"\"\" Copyright (c) 2022 Huawei Technologies Co.,Ltd. openGauss is licensed under Mulan PSL", "\"\"\" Case Type : 功能测试 Case Name : 使用gs_guc工具设置客户端编码,不生效 Description : 1. gs_guc", "self.user_node.sh(cmd1).result() self.log.info(msg1) res = restart_check() self.assertTrue(self.var[1] in res) # gs_guc reload 设置客户端编码集GBK cmd2", ": 1. 设置不生效 2. 设置不生效 History : \"\"\" import unittest from yat.test import", "1. gs_guc set 设置客户端编码集SQL_ASCII 2. gs_guc reload 设置客户端编码集GBK Expect : 1. 设置不生效 2.", "IS PROVIDED ON AN \"AS IS\" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER", "(c) 2022 Huawei Technologies Co.,Ltd. openGauss is licensed under Mulan PSL v2. You", "\"\"\" import unittest from yat.test import macro from yat.test import Node from testcase.utils.Logger", "= f'''source {macro.DB_ENV_PATH} gs_guc set -N all -I all -c \"client_encoding='{self.var[0]}'\"''' self.log.info(cmd1) msg1", "self.log.info(cmd2) msg2 = self.user_node.sh(cmd2).result() self.log.info(msg2) res = restart_check() self.assertTrue(self.var[1] in res) def tearDown(self):", "PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON AN \"AS IS\" BASIS,", ": 1. gs_guc set 设置客户端编码集SQL_ASCII 2. gs_guc reload 设置客户端编码集GBK Expect : 1. 设置不生效", "= \"show client_encoding;\" msg0 = self.commonsh.execut_db_sql(cmd0) self.log.info(msg0) init = msg0.splitlines()[2].strip() client = ['SQL_ASCII',", "init = msg0.splitlines()[2].strip() client = ['SQL_ASCII', 'UTF8'] self.var = client if init ==", "设置客户端编码集GBK cmd2 = f'''source {macro.DB_ENV_PATH} gs_guc reload -D {self.cluster_path} -c \"client_encoding = 'GBK'\"'''", "set -N all -I all -c \"client_encoding='{self.var[0]}'\"''' self.log.info(cmd1) msg1 = self.user_node.sh(cmd1).result() self.log.info(msg1) res", "f'''source {macro.DB_ENV_PATH} gs_guc reload -D {self.cluster_path} -c \"client_encoding = 'GBK'\"''' self.log.info(cmd2) msg2 =", "-N all -I all -c \"client_encoding='{self.var[0]}'\"''' self.log.info(cmd1) msg1 = self.user_node.sh(cmd1).result() self.log.info(msg1) res =", "== 'UTF8' else list(reversed(client)) def restart_check(): self.commonsh.restart_db_cluster() status = self.commonsh.get_db_cluster_status() self.assertTrue(\"Normal\" in status", "= self.user_node.sh(cmd1).result() self.log.info(msg1) res = restart_check() self.assertTrue(self.var[1] in res) # gs_guc reload 设置客户端编码集GBK", "设置不生效 History : \"\"\" import unittest from yat.test import macro from yat.test import", "setUp(self): self.commonsh = CommonSH('dbuser') self.user_node = Node('dbuser') self.log = Logger() self.cluster_path = macro.DB_INSTANCE_PATH", "-c \"client_encoding='{self.var[0]}'\"''' self.log.info(cmd1) msg1 = self.user_node.sh(cmd1).result() self.log.info(msg1) res = restart_check() self.assertTrue(self.var[1] in res)", "licensed under Mulan PSL v2. You can use this software according to the", "import Node from testcase.utils.Logger import Logger from testcase.utils.CommonSH import CommonSH class Function(unittest.TestCase): def", "client if init == 'UTF8' else list(reversed(client)) def restart_check(): self.commonsh.restart_db_cluster() status = self.commonsh.get_db_cluster_status()", "the Mulan PSL v2. You may obtain a copy of Mulan PSL v2", "Mulan PSL v2. You may obtain a copy of Mulan PSL v2 at:", "IS\" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT", "macro.DB_INSTANCE_PATH self.log.info('''---Opengauss_Function_DML_Set_Case0032开始---''') def test_encode(self): cmd0 = \"show client_encoding;\" msg0 = self.commonsh.execut_db_sql(cmd0) self.log.info(msg0) init", "INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.", "Mulan PSL v2 for more details. \"\"\" \"\"\" Case Type : 功能测试 Case", "MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. See the Mulan PSL v2 for", "IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR", "See the Mulan PSL v2 for more details. \"\"\" \"\"\" Case Type :", "client_encoding;' msg = self.commonsh.execut_db_sql(cmd) self.log.info(msg) return msg # gs_guc set 设置客户端编码集SQL_ASCII cmd1 =", "使用gs_guc工具设置客户端编码,不生效 Description : 1. gs_guc set 设置客户端编码集SQL_ASCII 2. gs_guc reload 设置客户端编码集GBK Expect :", "Co.,Ltd. openGauss is licensed under Mulan PSL v2. You can use this software", "v2. You can use this software according to the terms and conditions of", "ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY", "self.var = client if init == 'UTF8' else list(reversed(client)) def restart_check(): self.commonsh.restart_db_cluster() status", "status = self.commonsh.get_db_cluster_status() self.assertTrue(\"Normal\" in status or 'Degraded' in status) # 检查未生效,还是utf8 cmd", "PSL v2 for more details. \"\"\" \"\"\" Case Type : 功能测试 Case Name", "Case Type : 功能测试 Case Name : 使用gs_guc工具设置客户端编码,不生效 Description : 1. gs_guc set", "Technologies Co.,Ltd. openGauss is licensed under Mulan PSL v2. You can use this", "self.log.info(msg1) res = restart_check() self.assertTrue(self.var[1] in res) # gs_guc reload 设置客户端编码集GBK cmd2 =", "Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON AN \"AS IS\"", "msg = self.commonsh.execut_db_sql(cmd) self.log.info(msg) return msg # gs_guc set 设置客户端编码集SQL_ASCII cmd1 = f'''source", "all -c \"client_encoding='{self.var[0]}'\"''' self.log.info(cmd1) msg1 = self.user_node.sh(cmd1).result() self.log.info(msg1) res = restart_check() self.assertTrue(self.var[1] in", "cmd2 = f'''source {macro.DB_ENV_PATH} gs_guc reload -D {self.cluster_path} -c \"client_encoding = 'GBK'\"''' self.log.info(cmd2)", "self.commonsh.execut_db_sql(cmd0) self.log.info(msg0) init = msg0.splitlines()[2].strip() client = ['SQL_ASCII', 'UTF8'] self.var = client if", "检查未生效,还是utf8 cmd = 'show client_encoding;' msg = self.commonsh.execut_db_sql(cmd) self.log.info(msg) return msg # gs_guc", "init == 'UTF8' else list(reversed(client)) def restart_check(): self.commonsh.restart_db_cluster() status = self.commonsh.get_db_cluster_status() self.assertTrue(\"Normal\" in", "gs_guc set 设置客户端编码集SQL_ASCII cmd1 = f'''source {macro.DB_ENV_PATH} gs_guc set -N all -I all", "of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON AN \"AS", "\"AS IS\" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING", ": 使用gs_guc工具设置客户端编码,不生效 Description : 1. gs_guc set 设置客户端编码集SQL_ASCII 2. gs_guc reload 设置客户端编码集GBK Expect", ": \"\"\" import unittest from yat.test import macro from yat.test import Node from", "under Mulan PSL v2. You can use this software according to the terms", "BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT", "You may obtain a copy of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE", "terms and conditions of the Mulan PSL v2. You may obtain a copy", "self.commonsh = CommonSH('dbuser') self.user_node = Node('dbuser') self.log = Logger() self.cluster_path = macro.DB_INSTANCE_PATH self.log.info('''---Opengauss_Function_DML_Set_Case0032开始---''')", "= macro.DB_INSTANCE_PATH self.log.info('''---Opengauss_Function_DML_Set_Case0032开始---''') def test_encode(self): cmd0 = \"show client_encoding;\" msg0 = self.commonsh.execut_db_sql(cmd0) self.log.info(msg0)", "yat.test import Node from testcase.utils.Logger import Logger from testcase.utils.CommonSH import CommonSH class Function(unittest.TestCase):", "LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. See the Mulan", "self.log.info('''---Opengauss_Function_DML_Set_Case0032开始---''') def test_encode(self): cmd0 = \"show client_encoding;\" msg0 = self.commonsh.execut_db_sql(cmd0) self.log.info(msg0) init =", "You can use this software according to the terms and conditions of the", "import unittest from yat.test import macro from yat.test import Node from testcase.utils.Logger import", "gs_guc reload 设置客户端编码集GBK cmd2 = f'''source {macro.DB_ENV_PATH} gs_guc reload -D {self.cluster_path} -c \"client_encoding", "\"show client_encoding;\" msg0 = self.commonsh.execut_db_sql(cmd0) self.log.info(msg0) init = msg0.splitlines()[2].strip() client = ['SQL_ASCII', 'UTF8']", "PSL v2. You can use this software according to the terms and conditions", "msg0 = self.commonsh.execut_db_sql(cmd0) self.log.info(msg0) init = msg0.splitlines()[2].strip() client = ['SQL_ASCII', 'UTF8'] self.var =", "the terms and conditions of the Mulan PSL v2. You may obtain a", "list(reversed(client)) def restart_check(): self.commonsh.restart_db_cluster() status = self.commonsh.get_db_cluster_status() self.assertTrue(\"Normal\" in status or 'Degraded' in", "NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. See the", "NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. See the Mulan PSL v2", "BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. See", "gs_guc set 设置客户端编码集SQL_ASCII 2. gs_guc reload 设置客户端编码集GBK Expect : 1. 设置不生效 2. 设置不生效", "= 'show client_encoding;' msg = self.commonsh.execut_db_sql(cmd) self.log.info(msg) return msg # gs_guc set 设置客户端编码集SQL_ASCII", "all -I all -c \"client_encoding='{self.var[0]}'\"''' self.log.info(cmd1) msg1 = self.user_node.sh(cmd1).result() self.log.info(msg1) res = restart_check()", "and conditions of the Mulan PSL v2. You may obtain a copy of", "test_encode(self): cmd0 = \"show client_encoding;\" msg0 = self.commonsh.execut_db_sql(cmd0) self.log.info(msg0) init = msg0.splitlines()[2].strip() client", "Copyright (c) 2022 Huawei Technologies Co.,Ltd. openGauss is licensed under Mulan PSL v2.", "cmd0 = \"show client_encoding;\" msg0 = self.commonsh.execut_db_sql(cmd0) self.log.info(msg0) init = msg0.splitlines()[2].strip() client =", "PARTICULAR PURPOSE. See the Mulan PSL v2 for more details. \"\"\" \"\"\" Case", "EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR", "def test_encode(self): cmd0 = \"show client_encoding;\" msg0 = self.commonsh.execut_db_sql(cmd0) self.log.info(msg0) init = msg0.splitlines()[2].strip()", "self.commonsh.restart_db_cluster() status = self.commonsh.get_db_cluster_status() self.assertTrue(\"Normal\" in status or 'Degraded' in status) # 检查未生效,还是utf8", "gs_guc reload 设置客户端编码集GBK Expect : 1. 设置不生效 2. 设置不生效 History : \"\"\" import", "gs_guc set -N all -I all -c \"client_encoding='{self.var[0]}'\"''' self.log.info(cmd1) msg1 = self.user_node.sh(cmd1).result() self.log.info(msg1)", "conditions of the Mulan PSL v2. You may obtain a copy of Mulan", "msg0.splitlines()[2].strip() client = ['SQL_ASCII', 'UTF8'] self.var = client if init == 'UTF8' else", "v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON AN \"AS IS\" BASIS, WITHOUT", "{macro.DB_ENV_PATH} gs_guc set -N all -I all -c \"client_encoding='{self.var[0]}'\"''' self.log.info(cmd1) msg1 = self.user_node.sh(cmd1).result()", "'UTF8'] self.var = client if init == 'UTF8' else list(reversed(client)) def restart_check(): self.commonsh.restart_db_cluster()", "EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT", "Node('dbuser') self.log = Logger() self.cluster_path = macro.DB_INSTANCE_PATH self.log.info('''---Opengauss_Function_DML_Set_Case0032开始---''') def test_encode(self): cmd0 = \"show", "status or 'Degraded' in status) # 检查未生效,还是utf8 cmd = 'show client_encoding;' msg =", "return msg # gs_guc set 设置客户端编码集SQL_ASCII cmd1 = f'''source {macro.DB_ENV_PATH} gs_guc set -N", "-I all -c \"client_encoding='{self.var[0]}'\"''' self.log.info(cmd1) msg1 = self.user_node.sh(cmd1).result() self.log.info(msg1) res = restart_check() self.assertTrue(self.var[1]", "= restart_check() self.assertTrue(self.var[1] in res) # gs_guc reload 设置客户端编码集GBK cmd2 = f'''source {macro.DB_ENV_PATH}", "AN \"AS IS\" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED,", "details. \"\"\" \"\"\" Case Type : 功能测试 Case Name : 使用gs_guc工具设置客户端编码,不生效 Description :", "# gs_guc set 设置客户端编码集SQL_ASCII cmd1 = f'''source {macro.DB_ENV_PATH} gs_guc set -N all -I", "msg2 = self.user_node.sh(cmd2).result() self.log.info(msg2) res = restart_check() self.assertTrue(self.var[1] in res) def tearDown(self): self.log.info('''---Opengauss_Function_DML_Set_Case0032结束---''')", "in status) # 检查未生效,还是utf8 cmd = 'show client_encoding;' msg = self.commonsh.execut_db_sql(cmd) self.log.info(msg) return", "设置不生效 2. 设置不生效 History : \"\"\" import unittest from yat.test import macro from", "2. gs_guc reload 设置客户端编码集GBK Expect : 1. 设置不生效 2. 设置不生效 History : \"\"\"", "client_encoding;\" msg0 = self.commonsh.execut_db_sql(cmd0) self.log.info(msg0) init = msg0.splitlines()[2].strip() client = ['SQL_ASCII', 'UTF8'] self.var", "def restart_check(): self.commonsh.restart_db_cluster() status = self.commonsh.get_db_cluster_status() self.assertTrue(\"Normal\" in status or 'Degraded' in status)", "设置客户端编码集SQL_ASCII 2. gs_guc reload 设置客户端编码集GBK Expect : 1. 设置不生效 2. 设置不生效 History :", "set 设置客户端编码集SQL_ASCII 2. gs_guc reload 设置客户端编码集GBK Expect : 1. 设置不生效 2. 设置不生效 History", "may obtain a copy of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS", "self.assertTrue(\"Normal\" in status or 'Degraded' in status) # 检查未生效,还是utf8 cmd = 'show client_encoding;'", "f'''source {macro.DB_ENV_PATH} gs_guc set -N all -I all -c \"client_encoding='{self.var[0]}'\"''' self.log.info(cmd1) msg1 =", "self.log.info(cmd1) msg1 = self.user_node.sh(cmd1).result() self.log.info(msg1) res = restart_check() self.assertTrue(self.var[1] in res) # gs_guc", "FIT FOR A PARTICULAR PURPOSE. See the Mulan PSL v2 for more details.", "= Logger() self.cluster_path = macro.DB_INSTANCE_PATH self.log.info('''---Opengauss_Function_DML_Set_Case0032开始---''') def test_encode(self): cmd0 = \"show client_encoding;\" msg0", "msg1 = self.user_node.sh(cmd1).result() self.log.info(msg1) res = restart_check() self.assertTrue(self.var[1] in res) # gs_guc reload", "= CommonSH('dbuser') self.user_node = Node('dbuser') self.log = Logger() self.cluster_path = macro.DB_INSTANCE_PATH self.log.info('''---Opengauss_Function_DML_Set_Case0032开始---''') def", "res = restart_check() self.assertTrue(self.var[1] in res) # gs_guc reload 设置客户端编码集GBK cmd2 = f'''source", "# gs_guc reload 设置客户端编码集GBK cmd2 = f'''source {macro.DB_ENV_PATH} gs_guc reload -D {self.cluster_path} -c", "self.commonsh.get_db_cluster_status() self.assertTrue(\"Normal\" in status or 'Degraded' in status) # 检查未生效,还是utf8 cmd = 'show", "yat.test import macro from yat.test import Node from testcase.utils.Logger import Logger from testcase.utils.CommonSH", "\"\"\" \"\"\" Case Type : 功能测试 Case Name : 使用gs_guc工具设置客户端编码,不生效 Description : 1.", "Mulan PSL v2. You can use this software according to the terms and", "if init == 'UTF8' else list(reversed(client)) def restart_check(): self.commonsh.restart_db_cluster() status = self.commonsh.get_db_cluster_status() self.assertTrue(\"Normal\"", "the Mulan PSL v2 for more details. \"\"\" \"\"\" Case Type : 功能测试", "OR FIT FOR A PARTICULAR PURPOSE. See the Mulan PSL v2 for more", "restart_check(): self.commonsh.restart_db_cluster() status = self.commonsh.get_db_cluster_status() self.assertTrue(\"Normal\" in status or 'Degraded' in status) #", "-c \"client_encoding = 'GBK'\"''' self.log.info(cmd2) msg2 = self.user_node.sh(cmd2).result() self.log.info(msg2) res = restart_check() self.assertTrue(self.var[1]", "2. 设置不生效 History : \"\"\" import unittest from yat.test import macro from yat.test", "reload -D {self.cluster_path} -c \"client_encoding = 'GBK'\"''' self.log.info(cmd2) msg2 = self.user_node.sh(cmd2).result() self.log.info(msg2) res", "Case Name : 使用gs_guc工具设置客户端编码,不生效 Description : 1. gs_guc set 设置客户端编码集SQL_ASCII 2. gs_guc reload", "Huawei Technologies Co.,Ltd. openGauss is licensed under Mulan PSL v2. You can use", "PURPOSE. See the Mulan PSL v2 for more details. \"\"\" \"\"\" Case Type", "reload 设置客户端编码集GBK Expect : 1. 设置不生效 2. 设置不生效 History : \"\"\" import unittest", "设置客户端编码集SQL_ASCII cmd1 = f'''source {macro.DB_ENV_PATH} gs_guc set -N all -I all -c \"client_encoding='{self.var[0]}'\"'''", "from testcase.utils.Logger import Logger from testcase.utils.CommonSH import CommonSH class Function(unittest.TestCase): def setUp(self): self.commonsh", "ON AN \"AS IS\" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR", "CommonSH class Function(unittest.TestCase): def setUp(self): self.commonsh = CommonSH('dbuser') self.user_node = Node('dbuser') self.log =", "1. 设置不生效 2. 设置不生效 History : \"\"\" import unittest from yat.test import macro", "Logger from testcase.utils.CommonSH import CommonSH class Function(unittest.TestCase): def setUp(self): self.commonsh = CommonSH('dbuser') self.user_node", "can use this software according to the terms and conditions of the Mulan", "self.log = Logger() self.cluster_path = macro.DB_INSTANCE_PATH self.log.info('''---Opengauss_Function_DML_Set_Case0032开始---''') def test_encode(self): cmd0 = \"show client_encoding;\"", "from testcase.utils.CommonSH import CommonSH class Function(unittest.TestCase): def setUp(self): self.commonsh = CommonSH('dbuser') self.user_node =", "this software according to the terms and conditions of the Mulan PSL v2.", "client = ['SQL_ASCII', 'UTF8'] self.var = client if init == 'UTF8' else list(reversed(client))", "or 'Degraded' in status) # 检查未生效,还是utf8 cmd = 'show client_encoding;' msg = self.commonsh.execut_db_sql(cmd)", "self.commonsh.execut_db_sql(cmd) self.log.info(msg) return msg # gs_guc set 设置客户端编码集SQL_ASCII cmd1 = f'''source {macro.DB_ENV_PATH} gs_guc", "self.assertTrue(self.var[1] in res) # gs_guc reload 设置客户端编码集GBK cmd2 = f'''source {macro.DB_ENV_PATH} gs_guc reload", "= f'''source {macro.DB_ENV_PATH} gs_guc reload -D {self.cluster_path} -c \"client_encoding = 'GBK'\"''' self.log.info(cmd2) msg2", "{macro.DB_ENV_PATH} gs_guc reload -D {self.cluster_path} -c \"client_encoding = 'GBK'\"''' self.log.info(cmd2) msg2 = self.user_node.sh(cmd2).result()", "OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,", "set 设置客户端编码集SQL_ASCII cmd1 = f'''source {macro.DB_ENV_PATH} gs_guc set -N all -I all -c", "= self.commonsh.execut_db_sql(cmd0) self.log.info(msg0) init = msg0.splitlines()[2].strip() client = ['SQL_ASCII', 'UTF8'] self.var = client", "in status or 'Degraded' in status) # 检查未生效,还是utf8 cmd = 'show client_encoding;' msg", "v2. You may obtain a copy of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS", "Expect : 1. 设置不生效 2. 设置不生效 History : \"\"\" import unittest from yat.test", "res) # gs_guc reload 设置客户端编码集GBK cmd2 = f'''source {macro.DB_ENV_PATH} gs_guc reload -D {self.cluster_path}", "gs_guc reload -D {self.cluster_path} -c \"client_encoding = 'GBK'\"''' self.log.info(cmd2) msg2 = self.user_node.sh(cmd2).result() self.log.info(msg2)", "import CommonSH class Function(unittest.TestCase): def setUp(self): self.commonsh = CommonSH('dbuser') self.user_node = Node('dbuser') self.log", "self.user_node = Node('dbuser') self.log = Logger() self.cluster_path = macro.DB_INSTANCE_PATH self.log.info('''---Opengauss_Function_DML_Set_Case0032开始---''') def test_encode(self): cmd0", "'UTF8' else list(reversed(client)) def restart_check(): self.commonsh.restart_db_cluster() status = self.commonsh.get_db_cluster_status() self.assertTrue(\"Normal\" in status or", "openGauss is licensed under Mulan PSL v2. You can use this software according", "status) # 检查未生效,还是utf8 cmd = 'show client_encoding;' msg = self.commonsh.execut_db_sql(cmd) self.log.info(msg) return msg", "cmd1 = f'''source {macro.DB_ENV_PATH} gs_guc set -N all -I all -c \"client_encoding='{self.var[0]}'\"''' self.log.info(cmd1)", "\"client_encoding = 'GBK'\"''' self.log.info(cmd2) msg2 = self.user_node.sh(cmd2).result() self.log.info(msg2) res = restart_check() self.assertTrue(self.var[1] in", "-D {self.cluster_path} -c \"client_encoding = 'GBK'\"''' self.log.info(cmd2) msg2 = self.user_node.sh(cmd2).result() self.log.info(msg2) res =", "= Node('dbuser') self.log = Logger() self.cluster_path = macro.DB_INSTANCE_PATH self.log.info('''---Opengauss_Function_DML_Set_Case0032开始---''') def test_encode(self): cmd0 =", "Node from testcase.utils.Logger import Logger from testcase.utils.CommonSH import CommonSH class Function(unittest.TestCase): def setUp(self):", "software according to the terms and conditions of the Mulan PSL v2. You", "Type : 功能测试 Case Name : 使用gs_guc工具设置客户端编码,不生效 Description : 1. gs_guc set 设置客户端编码集SQL_ASCII", "from yat.test import Node from testcase.utils.Logger import Logger from testcase.utils.CommonSH import CommonSH class", "功能测试 Case Name : 使用gs_guc工具设置客户端编码,不生效 Description : 1. gs_guc set 设置客户端编码集SQL_ASCII 2. gs_guc", "is licensed under Mulan PSL v2. You can use this software according to", "self.log.info(msg) return msg # gs_guc set 设置客户端编码集SQL_ASCII cmd1 = f'''source {macro.DB_ENV_PATH} gs_guc set", "use this software according to the terms and conditions of the Mulan PSL", "self.log.info(msg0) init = msg0.splitlines()[2].strip() client = ['SQL_ASCII', 'UTF8'] self.var = client if init", "cmd = 'show client_encoding;' msg = self.commonsh.execut_db_sql(cmd) self.log.info(msg) return msg # gs_guc set", "def setUp(self): self.commonsh = CommonSH('dbuser') self.user_node = Node('dbuser') self.log = Logger() self.cluster_path =", "= self.commonsh.get_db_cluster_status() self.assertTrue(\"Normal\" in status or 'Degraded' in status) # 检查未生效,还是utf8 cmd =", "to the terms and conditions of the Mulan PSL v2. You may obtain", "obtain a copy of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED", "= ['SQL_ASCII', 'UTF8'] self.var = client if init == 'UTF8' else list(reversed(client)) def", "Description : 1. gs_guc set 设置客户端编码集SQL_ASCII 2. gs_guc reload 设置客户端编码集GBK Expect : 1.", "more details. \"\"\" \"\"\" Case Type : 功能测试 Case Name : 使用gs_guc工具设置客户端编码,不生效 Description" ]
[ "Module.\"\"\" from scphylo.tl.cna import infercna from scphylo.tl.consensus import consensus, consensus_day from scphylo.tl.fitch import", "gpps, grmt, huntress, infscite, iscistree, onconem, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, phiscsi_bulk, rscistree, sbm,", "from scphylo.tl.partition_function import partition_function from scphylo.tl.score import ad, caset, cc, disc, dl, gs,", "sbm, sciphi, scistree, scite, siclonefit, sphyr, ) __all__ = ( infercna, consensus, consensus_day,", "phiscsb_bulk, phiscsi, phiscsi_bulk, rscistree, sbm, sciphi, scistree, scite, siclonefit, sphyr, ) __all__ =", "phiscsb_bulk, phiscsi, rscistree, scistree, scite, siclonefit, fitch, caset, disc, mp3, rf, gs, sphyr,", "onconem, phiscsi_bulk, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, rscistree, scistree, scite, siclonefit, fitch, caset, disc,", "huntress, infscite, iscistree, onconem, phiscsi_bulk, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, rscistree, scistree, scite, siclonefit,", "sphyr, ) __all__ = ( infercna, consensus, consensus_day, partition_function, sbm, ad, cc, dl,", "phiscsb, phiscsb_bulk, phiscsi, rscistree, scistree, scite, siclonefit, fitch, caset, disc, mp3, rf, gs,", "phiscsi_bulk, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, rscistree, scistree, scite, siclonefit, fitch, caset, disc, mp3,", "dendro, gpps, grmt, huntress, infscite, iscistree, onconem, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, phiscsi_bulk, rscistree,", "cardelino, dendro, gpps, grmt, huntress, infscite, iscistree, onconem, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, phiscsi_bulk,", "gs, mltd, mp3, rf, tpted from scphylo.tl.solver import ( bnb, booster, cardelino, dendro,", "import ( bnb, booster, cardelino, dendro, gpps, grmt, huntress, infscite, iscistree, onconem, phiscs_readcount,", "rscistree, scistree, scite, siclonefit, fitch, caset, disc, mp3, rf, gs, sphyr, grmt, sciphi,", "scphylo.tl.solver import ( bnb, booster, cardelino, dendro, gpps, grmt, huntress, infscite, iscistree, onconem,", "= ( infercna, consensus, consensus_day, partition_function, sbm, ad, cc, dl, mltd, tpted, bnb,", "\"\"\"Tools Module.\"\"\" from scphylo.tl.cna import infercna from scphylo.tl.consensus import consensus, consensus_day from scphylo.tl.fitch", "iscistree, onconem, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, phiscsi_bulk, rscistree, sbm, sciphi, scistree, scite, siclonefit,", "infscite, iscistree, onconem, phiscsi_bulk, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, rscistree, scistree, scite, siclonefit, fitch,", "dendro, huntress, infscite, iscistree, onconem, phiscsi_bulk, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, rscistree, scistree, scite,", "scistree, scite, siclonefit, fitch, caset, disc, mp3, rf, gs, sphyr, grmt, sciphi, gpps,", "tpted from scphylo.tl.solver import ( bnb, booster, cardelino, dendro, gpps, grmt, huntress, infscite,", "scphylo.tl.score import ad, caset, cc, disc, dl, gs, mltd, mp3, rf, tpted from", "consensus_day, partition_function, sbm, ad, cc, dl, mltd, tpted, bnb, booster, cardelino, dendro, huntress,", "ad, caset, cc, disc, dl, gs, mltd, mp3, rf, tpted from scphylo.tl.solver import", "from scphylo.tl.fitch import fitch from scphylo.tl.partition_function import partition_function from scphylo.tl.score import ad, caset,", ") __all__ = ( infercna, consensus, consensus_day, partition_function, sbm, ad, cc, dl, mltd,", "import partition_function from scphylo.tl.score import ad, caset, cc, disc, dl, gs, mltd, mp3,", "onconem, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, phiscsi_bulk, rscistree, sbm, sciphi, scistree, scite, siclonefit, sphyr,", "scphylo.tl.partition_function import partition_function from scphylo.tl.score import ad, caset, cc, disc, dl, gs, mltd,", "ad, cc, dl, mltd, tpted, bnb, booster, cardelino, dendro, huntress, infscite, iscistree, onconem,", "huntress, infscite, iscistree, onconem, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, phiscsi_bulk, rscistree, sbm, sciphi, scistree,", "mp3, rf, tpted from scphylo.tl.solver import ( bnb, booster, cardelino, dendro, gpps, grmt,", "mltd, mp3, rf, tpted from scphylo.tl.solver import ( bnb, booster, cardelino, dendro, gpps,", "rscistree, sbm, sciphi, scistree, scite, siclonefit, sphyr, ) __all__ = ( infercna, consensus,", "disc, dl, gs, mltd, mp3, rf, tpted from scphylo.tl.solver import ( bnb, booster,", "scphylo.tl.cna import infercna from scphylo.tl.consensus import consensus, consensus_day from scphylo.tl.fitch import fitch from", "cc, dl, mltd, tpted, bnb, booster, cardelino, dendro, huntress, infscite, iscistree, onconem, phiscsi_bulk,", "booster, cardelino, dendro, huntress, infscite, iscistree, onconem, phiscsi_bulk, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, rscistree,", "scistree, scite, siclonefit, sphyr, ) __all__ = ( infercna, consensus, consensus_day, partition_function, sbm,", "cc, disc, dl, gs, mltd, mp3, rf, tpted from scphylo.tl.solver import ( bnb,", "scphylo.tl.consensus import consensus, consensus_day from scphylo.tl.fitch import fitch from scphylo.tl.partition_function import partition_function from", "import consensus, consensus_day from scphylo.tl.fitch import fitch from scphylo.tl.partition_function import partition_function from scphylo.tl.score", "infercna, consensus, consensus_day, partition_function, sbm, ad, cc, dl, mltd, tpted, bnb, booster, cardelino,", "partition_function from scphylo.tl.score import ad, caset, cc, disc, dl, gs, mltd, mp3, rf,", "import ad, caset, cc, disc, dl, gs, mltd, mp3, rf, tpted from scphylo.tl.solver", "caset, cc, disc, dl, gs, mltd, mp3, rf, tpted from scphylo.tl.solver import (", "sciphi, scistree, scite, siclonefit, sphyr, ) __all__ = ( infercna, consensus, consensus_day, partition_function,", "<reponame>faridrashidi/scphylo-tools \"\"\"Tools Module.\"\"\" from scphylo.tl.cna import infercna from scphylo.tl.consensus import consensus, consensus_day from", "phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, phiscsi_bulk, rscistree, sbm, sciphi, scistree, scite, siclonefit, sphyr, )", "phiscsi, rscistree, scistree, scite, siclonefit, fitch, caset, disc, mp3, rf, gs, sphyr, grmt,", "( bnb, booster, cardelino, dendro, gpps, grmt, huntress, infscite, iscistree, onconem, phiscs_readcount, phiscsb,", "phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, rscistree, scistree, scite, siclonefit, fitch, caset, disc, mp3, rf,", "consensus, consensus_day, partition_function, sbm, ad, cc, dl, mltd, tpted, bnb, booster, cardelino, dendro,", "bnb, booster, cardelino, dendro, gpps, grmt, huntress, infscite, iscistree, onconem, phiscs_readcount, phiscsb, phiscsb_bulk,", "consensus, consensus_day from scphylo.tl.fitch import fitch from scphylo.tl.partition_function import partition_function from scphylo.tl.score import", "infercna from scphylo.tl.consensus import consensus, consensus_day from scphylo.tl.fitch import fitch from scphylo.tl.partition_function import", "tpted, bnb, booster, cardelino, dendro, huntress, infscite, iscistree, onconem, phiscsi_bulk, phiscs_readcount, phiscsb, phiscsb_bulk,", "from scphylo.tl.consensus import consensus, consensus_day from scphylo.tl.fitch import fitch from scphylo.tl.partition_function import partition_function", "sbm, ad, cc, dl, mltd, tpted, bnb, booster, cardelino, dendro, huntress, infscite, iscistree,", "phiscsi, phiscsi_bulk, rscistree, sbm, sciphi, scistree, scite, siclonefit, sphyr, ) __all__ = (", "from scphylo.tl.score import ad, caset, cc, disc, dl, gs, mltd, mp3, rf, tpted", "mltd, tpted, bnb, booster, cardelino, dendro, huntress, infscite, iscistree, onconem, phiscsi_bulk, phiscs_readcount, phiscsb,", "bnb, booster, cardelino, dendro, huntress, infscite, iscistree, onconem, phiscsi_bulk, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi,", "scite, siclonefit, sphyr, ) __all__ = ( infercna, consensus, consensus_day, partition_function, sbm, ad,", "phiscsb, phiscsb_bulk, phiscsi, phiscsi_bulk, rscistree, sbm, sciphi, scistree, scite, siclonefit, sphyr, ) __all__", "cardelino, dendro, huntress, infscite, iscistree, onconem, phiscsi_bulk, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, rscistree, scistree,", "import fitch from scphylo.tl.partition_function import partition_function from scphylo.tl.score import ad, caset, cc, disc,", "( infercna, consensus, consensus_day, partition_function, sbm, ad, cc, dl, mltd, tpted, bnb, booster,", "dl, mltd, tpted, bnb, booster, cardelino, dendro, huntress, infscite, iscistree, onconem, phiscsi_bulk, phiscs_readcount,", "import infercna from scphylo.tl.consensus import consensus, consensus_day from scphylo.tl.fitch import fitch from scphylo.tl.partition_function", "__all__ = ( infercna, consensus, consensus_day, partition_function, sbm, ad, cc, dl, mltd, tpted,", "siclonefit, sphyr, ) __all__ = ( infercna, consensus, consensus_day, partition_function, sbm, ad, cc,", "fitch from scphylo.tl.partition_function import partition_function from scphylo.tl.score import ad, caset, cc, disc, dl,", "iscistree, onconem, phiscsi_bulk, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, rscistree, scistree, scite, siclonefit, fitch, caset,", "booster, cardelino, dendro, gpps, grmt, huntress, infscite, iscistree, onconem, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi,", "partition_function, sbm, ad, cc, dl, mltd, tpted, bnb, booster, cardelino, dendro, huntress, infscite,", "from scphylo.tl.cna import infercna from scphylo.tl.consensus import consensus, consensus_day from scphylo.tl.fitch import fitch", "dl, gs, mltd, mp3, rf, tpted from scphylo.tl.solver import ( bnb, booster, cardelino,", "grmt, huntress, infscite, iscistree, onconem, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, phiscsi_bulk, rscistree, sbm, sciphi,", "rf, tpted from scphylo.tl.solver import ( bnb, booster, cardelino, dendro, gpps, grmt, huntress,", "from scphylo.tl.solver import ( bnb, booster, cardelino, dendro, gpps, grmt, huntress, infscite, iscistree,", "scphylo.tl.fitch import fitch from scphylo.tl.partition_function import partition_function from scphylo.tl.score import ad, caset, cc,", "consensus_day from scphylo.tl.fitch import fitch from scphylo.tl.partition_function import partition_function from scphylo.tl.score import ad,", "scite, siclonefit, fitch, caset, disc, mp3, rf, gs, sphyr, grmt, sciphi, gpps, )", "phiscsi_bulk, rscistree, sbm, sciphi, scistree, scite, siclonefit, sphyr, ) __all__ = ( infercna,", "infscite, iscistree, onconem, phiscs_readcount, phiscsb, phiscsb_bulk, phiscsi, phiscsi_bulk, rscistree, sbm, sciphi, scistree, scite," ]
[ "> len(lst): return lst else: return([n for n in lst[:index]] + [2 *", "the index should be multiplied by 2 # Rest of the elements should", "lst[:index]] + [2 * lst[index]] + [n for n in lst[index + 1:]])", "double_index(lst, index): if index > len(lst): return lst else: return([n for n in", "element at the index should be multiplied by 2 # Rest of the", "Return a list # The element at the index should be multiplied by", "Rest of the elements should be the same. def double_index(lst, index): if index", "of the elements should be the same. def double_index(lst, index): if index >", "2 # Rest of the elements should be the same. def double_index(lst, index):", "* lst[index]] + [n for n in lst[index + 1:]]) print(double_index([3, 8, -10,", "# Return a list # The element at the index should be multiplied", "the elements should be the same. def double_index(lst, index): if index > len(lst):", "lst[index]] + [n for n in lst[index + 1:]]) print(double_index([3, 8, -10, 12],", "by 2 # Rest of the elements should be the same. def double_index(lst,", "+ [2 * lst[index]] + [n for n in lst[index + 1:]]) print(double_index([3,", "# Rest of the elements should be the same. def double_index(lst, index): if", "index should be multiplied by 2 # Rest of the elements should be", "should be multiplied by 2 # Rest of the elements should be the", "[2 * lst[index]] + [n for n in lst[index + 1:]]) print(double_index([3, 8,", "list # The element at the index should be multiplied by 2 #", "def double_index(lst, index): if index > len(lst): return lst else: return([n for n", "multiplied by 2 # Rest of the elements should be the same. def", "The element at the index should be multiplied by 2 # Rest of", "elements should be the same. def double_index(lst, index): if index > len(lst): return", "len(lst): return lst else: return([n for n in lst[:index]] + [2 * lst[index]]", "index): if index > len(lst): return lst else: return([n for n in lst[:index]]", "n in lst[:index]] + [2 * lst[index]] + [n for n in lst[index", "same. def double_index(lst, index): if index > len(lst): return lst else: return([n for", "python3 # Return a list # The element at the index should be", "be the same. def double_index(lst, index): if index > len(lst): return lst else:", "lst else: return([n for n in lst[:index]] + [2 * lst[index]] + [n", "in lst[:index]] + [2 * lst[index]] + [n for n in lst[index +", "+ [n for n in lst[index + 1:]]) print(double_index([3, 8, -10, 12], 2))", "a list # The element at the index should be multiplied by 2", "if index > len(lst): return lst else: return([n for n in lst[:index]] +", "index > len(lst): return lst else: return([n for n in lst[:index]] + [2", "the same. def double_index(lst, index): if index > len(lst): return lst else: return([n", "else: return([n for n in lst[:index]] + [2 * lst[index]] + [n for", "#!/usr/bin/env python3 # Return a list # The element at the index should", "# The element at the index should be multiplied by 2 # Rest", "should be the same. def double_index(lst, index): if index > len(lst): return lst", "for n in lst[:index]] + [2 * lst[index]] + [n for n in", "return([n for n in lst[:index]] + [2 * lst[index]] + [n for n", "be multiplied by 2 # Rest of the elements should be the same.", "at the index should be multiplied by 2 # Rest of the elements", "return lst else: return([n for n in lst[:index]] + [2 * lst[index]] +" ]
[ "dy = g.dy # append texture coordinates texCords.append(g.s1, g.t1) texCords.append(g.s2, g.t1) texCords.append(g.s2, g.t2)", "h else: anchory = h/2.0 # set anchor x if halign < 0:", "45: halign, valign = valign, -halign elif self._angle < -45: halign, valign =", "in screen coordinates (raw) self._vertices2 = None # dito, but corrected for angle", "halign(): \"\"\"Get/Set the horizontal alignment. Specify as: * 'left', 'center', 'right' * -1,", "as: * 'left', 'center', 'right' * -1, 0, 1 \"\"\" def fget(self): return", "* u2000 - u23ff symbols There are several escape sequences for (mathematical) characters", "def halign(): \"\"\"Get/Set the horizontal alignment. Specify as: * 'left', 'center', 'right' *", "draw character %i! \" % ord(char) ac = 32 # make space #", "should be before the next char * s1 s2 t1 t2 represent texture", "when using the texture one would # see artifacts from neighbouring characters. Additionally,", "scene. The fontname can be 'mono', 'sans' or 'serif'. If not given, the", "self._size: self._size = value self._Invalidate() # force recalculation self.Draw() return locals() @Property def", "append texture coordinates texCords.append(g.s1, g.t1) texCords.append(g.s2, g.t1) texCords.append(g.s2, g.t2) texCords.append(g.s1, g.t2) # set", "part from the texture stored in the Font object. * sizex and sizey", "= escapes.keys() escapesKeys.sort( lambda x,y:len(y)-len(x)) class Font(TextureObject): \"\"\" Font(info) A Font object holds", "instead. \"\"\" import OpenGL.GL as gl import OpenGL.GLU as glu import os import", "- an array of size's # - fontsize of the font in the", "about position changes to update alignment self.eventPosition.Bind(self._PositionText) def OnDraw(self): # Draw the box", "fontname may be 'mono', 'sans', 'serif' or None, in which case the vv.settings.defaultFontName", "cos_angle) # Move anchor in label if isinstance(self, Label): w,h = self.position.size #", "latin * u0380 - u03ff greek * u2000 - u23ff symbols There are", "self.width = float(infoWidth[ac]) * factor # is spacing? smaller = 0.6 self.dy =", "0.5 # calculate width on screen, given the size factor = size /", "# get font instance from figure fig = self.GetFigure() if not fig: return", "'rho':0x03C1, 'varsigma':0x03C2, 'sigma':0x03C3, 'tau':0x03C4, 'upsilon':0x03C5, 'phi':0x03C6, 'chi':0x03C7, 'psi':0x03C8, 'omega':0x03C9, # some math 'Re':0x211c,", "anchor if self._halign < 0: anchorx = x1 elif self._halign > 0: anchorx", "BaseText): \"\"\" Text(parent, text='', x=0, y=0, z=0, fontname=None) A wobject representing a string", "in info.charcodes_b: # bold text infoSize, infoOrigin, infoWidth = ( info.size_b, info.origin_b, info.width_b)", "build arrays with vertices and coordinates x1, y1, z = 0, 0, 0", "fontname=None): Box.__init__(self, parent) BaseText.__init__(self, text, fontname) # no edge self.edgeWidth = 0 #", "formatted using the following constructs (which can be mixed): * hello^2 or hello^{there},", "2, shape[1],shape[0], 0, # gl.GL_ALPHA, gl.GL_UNSIGNED_BYTE, data) gl.GL_LUMINANCE_ALPHA, gl.GL_UNSIGNED_BYTE, data2) tmp1 = gl.GL_LINEAR", "\"\"\" GetFont(fontname) Get a font instance. If that font was created earlier, that", "0.0 if style.italic: self.skewFactor = 0.5 # calculate width on screen, given the", "gl.glDrawArrays(gl.GL_QUADS, 0, len(vertices)) gl.glFlush() # disable texture and clean up if x or", "The fontname can be 'mono', 'sans' or 'serif'. If not given, the vv.settings.defaultFontName", "_Invalidate(self): \"\"\" Invalidate this object, such that the text is recompiled the next", "alignment. Specify as: * 'left', 'center', 'right' * -1, 0, 1 \"\"\" def", "angle and alignment. -> produces _vertices2 from _vertices1 (and is called when the", "def fget(self): return self._size def fset(self, value): if value != self._size: self._size =", "value = value.lower() tmp = {'left':-1,'center':0,'centre':0,'right':1 } if not value in tmp: raise", "tau * upsilon phi chi psi * omega Note: In case one needs", "If that font was created earlier, that font is returned, otherwise it is", "* rho varsigma sigma tau * upsilon phi chi psi * omega Note:", "tmp2 = gl.GL_LINEAR gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, tmp1) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, tmp2) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP) gl.glTexParameteri(gl.GL_TEXTURE_2D,", "tmp={'up':-1,'top':-1,'center':0,'centre':0,'down':1,'bottom':1} if not value in tmp: raise ValueError('Invalid value for valign.') value =", "parent, text='', x=0, y=0, z=0, fontname=None): Wobject.__init__(self, parent) BaseText.__init__(self, text, fontname) # store", "given without double backslash tt = tt.replace('\\alpha', unichr(escapes['alpha'])) tt = tt.replace('\\beta', unichr(escapes['beta'])) tt", "self.Draw() return locals() @Property def textSpacing(): \"\"\"Get/Set the spacing between characters. \"\"\" def", "def text(): \"\"\"Get/Set the text to display. \"\"\" def fget(self): return self._text def", "'Omega':0x03A9, # lower case greek 'alpha':0x03B1, 'beta':0x03B2, 'gamma':0x03B3, 'delta':0x03B4, 'epsilon':0x03B5, 'zeta':0x03B6, 'eta':0x03B7, 'theta':0x03B8,", "cdot pm * oplus ominus otimes oslash Letters from the greek alfabet can", "the style for this glyph self.style = MiniStyle() if styles: for style in", "make any supported # unicode character italic. # if style.italic and ac in", "string of characters. The text has a certain position in the scene. The", "vertices and coordinates x1, y1, z = 0, 0, 0 vertices = Pointset(3)", "in visvis. Defines a wibject and a wobject: Label and Text, which are", "the scene. The fontname can be 'mono', 'sans' or 'serif'. If not given,", "= self.textColor gl.glColor(clr[0], clr[1], clr[2]) gl.glDrawArrays(gl.GL_QUADS, 0, len(vertices)) gl.glFlush() # disable texture and", "it skewed rather using the # italic glyphs. The reason is that when", "self._vertices2 = None # force recalculation self.Draw() return locals() @Property def textSpacing(): \"\"\"Get/Set", "x1, y1, z = 0, 0, 0 vertices = Pointset(3) texCords = Pointset(2)", "data.shape gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, 2, shape[1],shape[0], 0, # gl.GL_ALPHA, gl.GL_UNSIGNED_BYTE, data) gl.GL_LUMINANCE_ALPHA, gl.GL_UNSIGNED_BYTE, data2)", "calculated, which are then corrected for angle and alignment in _PositionText(). -> Produces", "not given, the vv.settings.defaultFontName is used. \"\"\" def __init__(self, parent, text='', fontname=None): Box.__init__(self,", "fontsize of the font in the data array def __init__(self, font, char, size=12,", "chars tt = tt.replace(r'\\\\', '\\t') # double backslashes do not escape for c", "__repr__(self): tmp = self.script, self.bold, self.italic return '<MiniStyle script:%i, bold:%i, italic:%i>' % tmp", "= x1 + g.sizex y2 = g.sizey #y2 = y1 - g.sizey dy", "self._fontname: self._fontname = value self._Invalidate() # force recalculation self.Draw() return locals() @Property def", "character.') # do we have that char? if ac not in info.charcodes:#ac <", "and stored for reuse. \"\"\" if fontname in self.fonts: return self.fonts[fontname] elif hasattr(self.s,", "utf-8 -*- # Copyright (C) 2012, <NAME> # # Visvis is distributed under", "font info = self.font.info # get asci code and check it if isinstance(char,", "'<MiniStyle script:%i, bold:%i, italic:%i>' % tmp class BaseText(object): \"\"\" BaseText(text='', fontname=None) Base object", "from visvis.core.base import Wobject from visvis.core.misc import Property, PropWithDraw from visvis.core.misc import getResourceDir,", "be inserted using the backslash (for example '\\infty'). People familiar with Latex know", "line of text oriented at a certain angle. Formatting ---------- Text can be", "y2+dy, z) # prepare for next glyph x1 = x1 + g.width +", "c=='}': # Remove style if styles: styles.pop() elif c=='^': style = MiniStyle(2) elif", "one can always look up its unicode value and use that instead. \"\"\"", "characters. \"\"\" def fget(self): return self._charSpacing def fset(self, value): if value != self._charSpacing:", "def fset(self, value): if value != self._angle: self._angle = value self._vertices2 = None", "None # coords in the font texture self._vertices1 = None # the coords", "None: self._PositionText() # get font instance from figure fig = self.GetFigure() if not", "nu xi omicron pi * rho varsigma sigma tau * upsilon phi chi", "by rendering the proper part from the texture stored in the Font object.", "tt.replace('\\b', '\\x07') # build list of glyphs, take sub/super scripting into account. escape", "figure/context. \"\"\" def __init__(self): # load font data path = getResourceDir() self.s =", "produces _vertices2 from _vertices1 (and is called when the first is None) \"\"\"", "using the texture one would # see artifacts from neighbouring characters. Additionally, it's", "self._z) self._screenx, self._screeny, self._screenz = tuple(tmp) # make integer (to prevent glitchy behaviour),", "escapes.keys() escapesKeys.sort( lambda x,y:len(y)-len(x)) class Font(TextureObject): \"\"\" Font(info) A Font object holds the", "valign(): \"\"\"Get/Set the vertical alignment. Specify as: * 'up', 'center', 'down' * 'top',", "skew = self._size * g.skewFactor # append vertices vertices.append(x1+skew, y1+dy, z) vertices.append(x2+skew, y1+dy,", "of origin 's # - an array of size's # - fontsize of", "or z: gl.glPushMatrix() gl.glTranslatef(x, y, z) # make sure the glyphs are created", "Property, PropWithDraw from visvis.core.misc import getResourceDir, getColor # from visvis.core.cameras import depthToZ from", "that char? if ac not in info.charcodes:#ac < 32 or ac > 255:", "self._charSpacing def fset(self, value): if value != self._charSpacing: self._charSpacing = value self._Invalidate() #", "unichr(escapes['rho'])) tt = tt.replace('\\theta', unichr(escapes['theta'])) # transform other chars tt = tt.replace(r'\\\\', '\\t')", "vertices[:,1].min(), vertices[:,1].max() def _DrawText(self, x=0, y=0, z=0): # Translate if x or y", "def valign(): \"\"\"Get/Set the vertical alignment. Specify as: * 'up', 'center', 'down' *", "anchor x if halign < 0: anchorx = 0 elif halign > 0:", "get asci code and check it if isinstance(char, basestring): ac = ord(char) elif", "according to global text size property vertices *= fig._relativeFontSize # obtain dimensions if", "using the # italic glyphs. The reason is that when using the texture", "x1, x2 = 0,0 y1, y2 = 0, self._xglyph.sizey # set anchor if", "AxisLabel class) if vertices is not None and len(vertices): self._deltax = vertices[:,0].min(), vertices[:,0].max()", "def __init__(self): # load font data path = getResourceDir() self.s = ssdf.load(os.path.join(path, 'fonts.ssdf'))", "Base object for the Text wobject and Label wibject. fontname may be 'mono',", "angle = self._angle if isinstance(self, Text): # Text is a wobject, so must", "The full license can be found in 'license.txt'. \"\"\" Module textRender For rendering", "infoSize, infoOrigin, infoWidth = info.size, info.origin, info.width # should and can we display", "an uppercase letter, the corresponding upper case greek letter is inserted): * alpha", "anchorx = x1 + (x2-x1)/2.0 # if self._valign < 0: anchory = y1", "value self.Draw() return locals() @Property def halign(): \"\"\"Get/Set the horizontal alignment. Specify as:", "self._text = text # Set and check fontname if fontname is None: fontname", "info.size_i, info.origin_i, info.width_i) if style.bold and ac in info.charcodes_b: # bold text infoSize,", "to display. \"\"\" def fget(self): return self._text def fset(self, value): if value !=", "np import visvis from visvis import ssdf from visvis.pypoints import Pointset # from", "\"\"\" def fget(self): return self._x def fset(self, value): self._x = value return locals()", "ord(char) elif isinstance(char, int): ac = char else: raise ValueError('To create a glyph,", "None elif c=='}': # Remove style if styles: styles.pop() elif c=='^': style =", "changes to update alignment self.eventPosition.Bind(self._PositionText) def OnDraw(self): # Draw the box Box.OnDraw(self) #", "coordinates (raw) self._vertices2 = None # dito, but corrected for angle and alignment", "# normal script if style.script == 1: # sub script self.dy = (1-smaller)", "x=0, y=0, z=0, fontname=None) A wobject representing a string of characters. The text", "- u037f latin * u0380 - u03ff greek * u2000 - u23ff symbols", "> 135 or self._angle < -135: halign, valign = -halign, valign elif self._angle", "# store font information self.info = info # set data self.SetData(self.info.data) def _UploadTexture(self,", "store font information self.info = info # set data self.SetData(self.info.data) def _UploadTexture(self, data,", "escape: g = Glyph(font, c, self._size, styles) glyphs.append( g ) escape = False", "g ) escape = False elif c=='{': # Append style to the list", "= vertices[:,0] - anchorx vertices[:,1] = -(vertices[:,1] - anchory) elif isinstance(self, Label): angle", "return self.fonts[fontname] elif hasattr(self.s, fontname): tmp = Font(self.s[fontname]) self.fonts[fontname] = tmp return tmp", "or character.') # do we have that char? if ac not in info.charcodes:#ac", "-halign, valign elif self._angle > 45: halign, valign = valign, -halign elif self._angle", "coords in screen coordinates (raw) self._vertices2 = None # dito, but corrected for", "vertices if self._vertices1 is None: return vertices = self._vertices1.copy() # scale text according", "= (x1) / tmp, (x2-1) / tmp y1 = infoOrigin[ac,1] y2 = y1", "isinstance(char, basestring): ac = ord(char) elif isinstance(char, int): ac = char else: raise", "(x2-x1)/2.0 # if self._valign < 0: anchory = y1 elif self._valign > 0:", "getResourceDir, getColor # from visvis.core.cameras import depthToZ from visvis.core.baseWibjects import Box escapes =", "enable texture font.Enable() # prepare texCords = self._texCords#.copy() vertices = self._vertices2#.copy() # init", "sure the glyphs are created if self._vertices1 is None or self._texCords is None:", "> 255: print \"Warning: Cannot draw character %i! \" % ord(char) ac =", "Module textRender For rendering text in visvis. Defines a wibject and a wobject:", "in info.charcodes_i: # # italic text # infoSize, infoOrigin, infoWidth = ( #", "is ment as a verb. The vertices1 are corrected for angle and alignment.", "the angle of the text in degrees. \"\"\" def fget(self): return self._angle def", "characters. The text has a certain position in the scene. The fontname can", "fontname can be 'mono', 'sans' or 'serif'. If not given, the vv.settings.defaultFontName is", "def fget(self): return self._color def fset(self, value): value = getColor(value,'setting textColor') if value", "if fontname not in ['mono', 'sans', 'serif']: raise ValueError('Invalid font name.') # more", "fontname): tmp = Font(self.s[fontname]) self.fonts[fontname] = tmp return tmp else: raise ValueError(\"Invalid font", "make invalid first self._Invalidate() # get font instance from figure f = self.GetFigure()", "= 0 elif halign > 0: anchorx = w else: anchorx = w/2.0", "and clean up if x or y or z: gl.glPopMatrix() font.Disable() gl.glDisableClientState(gl.GL_VERTEX_ARRAY) gl.glDisableClientState(gl.GL_TEXTURE_COORD_ARRAY)", "no string tt = tt.replace('\\b', '\\x07') # build list of glyphs, take sub/super", "gl.glPopMatrix() font.Disable() gl.glDisableClientState(gl.GL_VERTEX_ARRAY) gl.glDisableClientState(gl.GL_TEXTURE_COORD_ARRAY) class Text(Wobject, BaseText): \"\"\" Text(parent, text='', x=0, y=0, z=0,", "= gl.GL_LINEAR gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, tmp1) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, tmp2) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T,", "* 'up', 'center', 'down' * 'top', 'center', 'bottom' * -1, 0, 1 \"\"\"", "* cos_angle - vertices[:,1] * sin_angle, vertices[:,0] * sin_angle + vertices[:,1] * cos_angle)", "self._z def fset(self, value): self._z = value return locals() def OnDraw(self): # get", "gl.GL_UNSIGNED_BYTE, data2) tmp1 = gl.GL_LINEAR tmp2 = gl.GL_LINEAR gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, tmp1) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER,", "and ac in info.charcodes_b: # bold text infoSize, infoOrigin, infoWidth = ( info.size_b,", "locals() @PropWithDraw def y(): \"\"\"Get/Set the y position of the text. \"\"\" def", "gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP) class FontManager: \"\"\" FontManager() Manager of fonts. There should be only", "u0380 - u03ff greek * u2000 - u23ff symbols There are several escape", "32 # make space # default infoSize, infoOrigin, infoWidth = info.size, info.origin, info.width", "value for valign.') value = tmp[value.lower()] else: raise ValueError('valign must be an int", "contains all the characters. \"\"\" def __init__(self, info): TextureObject.__init__(self, 2) # store font", "= False elif c=='{': # Append style to the list if style: styles.append(style)", "'kappa':0x03BA, 'lambda':0x03BB, 'mu':0x03BC, 'nu':0x03BD, 'xi':0x03BE, 'omicron':0x03BF, 'pi':0x03C0, 'rho':0x03C1, 'varsigma':0x03C2, 'sigma':0x03C3, 'tau':0x03C4, 'upsilon':0x03C5, 'phi':0x03C6,", "recalculation self.Draw() return locals() @Property def fontSize(): \"\"\"Get/Set the size of the text.", "self._angle < -135: halign, valign = -halign, valign elif self._angle > 45: halign,", "can be inserted using the backslash (for example '\\infty'). People familiar with Latex", "\"\"\"Get/Set the horizontal alignment. Specify as: * 'left', 'center', 'right' * -1, 0,", "* upsilon phi chi psi * omega Note: In case one needs a", "int(value<0) if value != self._valign: self._valign = value self._vertices2 = None # force", "(C) 2012, <NAME> # # Visvis is distributed under the terms of the", "# Add lumincance channel data2 = np.zeros((data.shape[0],data.shape[1],2), dtype=np.uint8) data2[:,:,0] = 255 data2[:,:,1] =", "in the font texture self._vertices1 = None # the coords in screen coordinates", "to know about position changes to update alignment self.eventPosition.Bind(self._PositionText) def OnDraw(self): # Draw", "self._size) tt = self._text # transform greek characters that were given without double", "charactes superscript. * hello_2 or hello_{there}, makes one or more charactes subscript. *", "(taking angle into account) self._deltax = 0,0 self._deltay = 0,0 # store text", "do not escape for c in escapesKeys: tt = tt.replace('\\\\'+c, unichr(escapes[c])) tt =", "and sizey represent the size of the glyph. * dy represents the offset", "(sub/super script, bold, and italic. Used when compiling the text. script = {0:'normal',", "class BaseText(object): \"\"\" BaseText(text='', fontname=None) Base object for the Text wobject and Label", "clr[2]) gl.glDrawArrays(gl.GL_QUADS, 0, len(vertices)) gl.glFlush() # disable texture and clean up if x", "value.lower() tmp = {'left':-1,'center':0,'centre':0,'right':1 } if not value in tmp: raise ValueError('Invalid value", "for position skew = self._size * g.skewFactor # append vertices vertices.append(x1+skew, y1+dy, z)", "'oplus':0x2295, 'ominus':0x2296, 'otimes':0x2297, 'oslash':0x2298, } # sort the keys, such that longer names", "If not given, the vv.settings.defaultFontName is used. \"\"\" def __init__(self, parent, text='', fontname=None):", "-135: halign, valign = -halign, valign elif self._angle > 45: halign, valign =", "z) vertices.append(x1, y2+dy, z) # prepare for next glyph x1 = x1 +", "self.italic return '<MiniStyle script:%i, bold:%i, italic:%i>' % tmp class BaseText(object): \"\"\" BaseText(text='', fontname=None)", "for valign.') value = tmp[value.lower()] else: raise ValueError('valign must be an int or", "x,y:len(y)-len(x)) class Font(TextureObject): \"\"\" Font(info) A Font object holds the texture that contains", "makes one or more charactes bold. * hello\\_there, a backslash escapes, thus keeping", "value): if value != self._fontname: self._fontname = value self._Invalidate() # force recalculation self.Draw()", "data2 = np.zeros((data.shape[0],data.shape[1],2), dtype=np.uint8) data2[:,:,0] = 255 data2[:,:,1] = data shape = data.shape", "unwind the style for this glyph self.style = MiniStyle() if styles: for style", "under the terms of the (new) BSD License. # The full license can", "vertices[:,1] = -(vertices[:,1] - anchory) elif isinstance(self, Label): angle = -self._angle vertices[:,0] =", "glyphs = [] self._xglyph = Glyph(font, 'X', self._size) tt = self._text # transform", "* leftarrow uparrow rightarrow downarrow * Leftarrow Uparrow Rightarrow Downarrow * leftceil rightceil", "The name is ment as a verb. The vertices1 are corrected for angle", "= (y1) / tmp, (y2-1) / tmp # Define skew factor to handle", "= texCords self._vertices1 = vertices def _PositionText(self, event=None): \"\"\" The name is ment", "self._size = value self._Invalidate() # force recalculation self.Draw() return locals() @Property def fontName():", "glyphs are created if self._vertices1 is None or self._texCords is None: self._Compile() if", "styles: self.style += style style = self.style # store font self.font = font", "'center', 'bottom' * -1, 0, 1 \"\"\" def fget(self): return self._valign def fset(self,", "phi chi psi * omega Note: In case one needs a character that", "fget(self): return self._text def fset(self, value): if value != self._text: self._text = value", "in the same way (By starting the name with an uppercase letter, the", "glu.gluProject(self._x, self._y, self._z) self._screenx, self._screeny, self._screenz = tuple(tmp) # make integer (to prevent", "text, and one can make any supported # unicode character italic. # if", "have that char? if ac not in info.charcodes:#ac < 32 or ac >", "Defines a wibject and a wobject: Label and Text, which are both able", "hell\\bo or hell\\b{ohoo}, makes one or more charactes bold. * hello\\_there, a backslash", "such that longer names are replaced first escapesKeys = escapes.keys() escapesKeys.sort( lambda x,y:len(y)-len(x))", "is recompiled the next time it is drawn. \"\"\" self._texCords = None self._vertices1", "anchor y if valign < 0: anchory = 0 elif valign > 0:", "this object, such that the text is recompiled the next time it is", "more charactes bold. * hello\\_there, a backslash escapes, thus keeping the _^ or", "Find position in texture, normalized to texture coordinates x1 = infoOrigin[ac,0] x2 =", "\"\"\" Font(info) A Font object holds the texture that contains all the characters.", "size property vertices *= fig._relativeFontSize # obtain dimensions if len(vertices): x1, x2 =", "+ vertices[:,1] * cos_angle) # Move anchor in label if isinstance(self, Label): w,h", "def fget(self): return self._fontname def fset(self, value): if value != self._fontname: self._fontname =", "gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, tmp2) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP) class FontManager: \"\"\" FontManager()", "fontname not in ['mono', 'sans', 'serif']: raise ValueError('Invalid font name.') # more properties", "@Property def textColor(): \"\"\"Get/Set the color of the text. \"\"\" def fget(self): return", "tmp y1 = infoOrigin[ac,1] y2 = y1 + infoSize[ac,1] tmp = float(info.data.shape[0]) self.t1,", "self._vertices2 = None @Property # Smart draw def text(): \"\"\"Get/Set the text to", "modifiers tt = tt.replace('\\i', '\\x06') # just use some char that is no", "able to produce a single line of text oriented at a certain angle.", "bold? # Note: italic is now realized by printing it skewed rather using", "value self._Invalidate() # force recalculation self.Draw() return locals() @Property # Smart draw def", "= value return locals() def OnDraw(self): # get screen position and store tmp", "gl.GL_UNSIGNED_BYTE, data) gl.GL_LUMINANCE_ALPHA, gl.GL_UNSIGNED_BYTE, data2) tmp1 = gl.GL_LINEAR tmp2 = gl.GL_LINEAR gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER,", "styles: for style in styles: self.style += style style = self.style # store", "anchorx = w/2.0 # apply vertices[:,0] = vertices[:,0] + anchorx vertices[:,1] = vertices[:,1]", "use self._screenx, self._screeny, self._screenz = 0, 0, 0 @PropWithDraw def x(): \"\"\"Get/Set the", "ValueError('Invalid value for halign.') value = tmp[value.lower()] else: raise ValueError('halign must be an", "normalized to texture coordinates x1 = infoOrigin[ac,0] x2 = x1 + infoSize[ac,0] tmp", "# set data self.SetData(self.info.data) def _UploadTexture(self, data, *args): \"\"\" Overload to make it", "info = self.font.info # get asci code and check it if isinstance(char, basestring):", "# Remove style if styles: styles.pop() elif c=='^': style = MiniStyle(2) elif c=='_':", "Specify as: * 'up', 'center', 'down' * 'top', 'center', 'bottom' * -1, 0,", "properties self._size = 9 self._fontname = fontname self._color = (0,0,0) self._angle = 0", "xi omicron pi * rho varsigma sigma tau * upsilon phi chi psi", "code and check it if isinstance(char, basestring): ac = ord(char) elif isinstance(char, int):", "# get figure fig = self.GetFigure() # get vertices if self._vertices1 is None:", "stored in the Font object. * sizex and sizey represent the size of", "sequences for (mathematical) characters that can be inserted using the backslash (for example", "g = Glyph(font, c, self._size, styles) glyphs.append( g ) escape = False elif", "__init__(self, info): TextureObject.__init__(self, 2) # store font information self.info = info # set", "= info.size, info.origin, info.width # should and can we display in italic or", "y2 = g.sizey #y2 = y1 - g.sizey dy = g.dy # append", "calculate width on screen, given the size factor = size / float(info.fontsize) self.sizex", "y axis vertices[:,0] = vertices[:,0] - anchorx vertices[:,1] = -(vertices[:,1] - anchory) elif", "infoOrigin, infoWidth = ( info.size_b, info.origin_b, info.width_b) # Find position in texture, normalized", "/ tmp y1 = infoOrigin[ac,1] y2 = y1 + infoSize[ac,1] tmp = float(info.data.shape[0])", "halign, valign = -halign, valign elif self._angle > 45: halign, valign = valign,", "> 0: anchory = h else: anchory = h/2.0 # set anchor x", "# Smart draw def textAngle(): \"\"\"Get/Set the angle of the text in degrees.", "texture self._vertices1 = None # the coords in screen coordinates (raw) self._vertices2 =", "upsilon phi chi psi * omega Note: In case one needs a character", "script=0, bold=False, italic=False): self.script = script self.bold = bold self.italic = italic def", "z) vertices.append(x2, y2+dy, z) vertices.append(x1, y2+dy, z) # prepare for next glyph x1", "fset(self, value): self._x = value return locals() @PropWithDraw def y(): \"\"\"Get/Set the y", "self.dy = (1-smaller) * self.sizey if style.script: # super or subscript self.skewFactor *=", "the text in degrees. \"\"\" def fget(self): return self._angle def fset(self, value): if", "init vertex and texture array gl.glEnableClientState(gl.GL_VERTEX_ARRAY) gl.glEnableClientState(gl.GL_TEXTURE_COORD_ARRAY) gl.glVertexPointerf(vertices.data) gl.glTexCoordPointerf(texCords.data) # draw if self.textColor", "self._vertices1 is None: return vertices = self._vertices1.copy() # scale text according to global", "self.position.size # determine whether the text is vertical or horizontal halign, valign =", "charactes bold. * hello\\_there, a backslash escapes, thus keeping the _^ or \\", "1: # sub script self.dy = (1-smaller) * self.sizey if style.script: # super", "texCords.append(g.s1, g.t1) texCords.append(g.s2, g.t1) texCords.append(g.s2, g.t2) texCords.append(g.s1, g.t2) # set skewing for position", "GetFont(fontname) Get a font instance. If that font was created earlier, that font", "font = fig._fontManager.GetFont(self._fontname) # enable texture font.Enable() # prepare texCords = self._texCords#.copy() vertices", "(inherits from box) with text inside. The fontname can be 'mono', 'sans' or", "= [] self._xglyph = Glyph(font, 'X', self._size) tt = self._text # transform greek", "glyph, supply an int or character.') # do we have that char? if", "self.dy = 0.0 # normal script if style.script == 1: # sub script", "self._z = x, y, z # for internal use self._screenx, self._screeny, self._screenz =", "else: raise ValueError('To create a glyph, supply an int or character.') # do", "g.sizex y2 = g.sizey #y2 = y1 - g.sizey dy = g.dy #", "represent texture coordinates \"\"\" # the font.info contains # - a string of", "factor to handle italics correctly self.skewFactor = 0.0 if style.italic: self.skewFactor = 0.5", "return self._z def fset(self, value): self._z = value return locals() def OnDraw(self): #", "upper case greek 'Alpha':0x0391, 'Beta':0x0392, 'Gamma':0x0393, 'Delta':0x0394, 'Epsilon':0x0395, 'Zeta':0x0396, 'Eta':0x0397, 'Theta':0x0398, 'Iota':0x0399, 'Kappa':0x039A,", "style.script: # super or subscript self.skewFactor *= smaller self.sizex = self.sizex * smaller", "ac not in info.charcodes:#ac < 32 or ac > 255: print \"Warning: Cannot", "= vertices[:,0] - anchorx vertices[:,1] = vertices[:,1] - anchory # apply angle if", "= char else: raise ValueError('To create a glyph, supply an int or character.')", "characters that were given without double backslash tt = tt.replace('\\alpha', unichr(escapes['alpha'])) tt =", "'Phi':0x03A6, 'Chi':0x03A7, 'Psi':0x03A8, 'Omega':0x03A9, # lower case greek 'alpha':0x03B1, 'beta':0x03B2, 'gamma':0x03B3, 'delta':0x03B4, 'epsilon':0x03B5,", "\"\"\" Invalidate this object, such that the text is recompiled the next time", "anchory) elif isinstance(self, Label): angle = -self._angle vertices[:,0] = vertices[:,0] - anchorx vertices[:,1]", "gl.GL_TEXTURE_MAG_FILTER, tmp1) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, tmp2) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP) class FontManager:", "symbols There are several escape sequences for (mathematical) characters that can be inserted", "Style to set for i in range(len(tt)): c = tt[i] if escape: g", "for example the AxisLabel class) if vertices is not None and len(vertices): self._deltax", "characters. \"\"\" def __init__(self, info): TextureObject.__init__(self, 2) # store font information self.info =", "holds the texture that contains all the characters. \"\"\" def __init__(self, info): TextureObject.__init__(self,", "text is recompiled the next time it is drawn. \"\"\" self._texCords = None", "take sub/super scripting into account. escape = False styles = [] style =", "= text # Set and check fontname if fontname is None: fontname =", "!= self._size: self._size = value self._Invalidate() # force recalculation self.Draw() return locals() @Property", "at a certain angle. Formatting ---------- Text can be formatted using the following", "force recalculation self.Draw() return locals() @Property def fontName(): \"\"\"Get/Set the font type by", "y direction (for sub/super scripts) * width specifies how much space there should", "angle into account) self._deltax = 0,0 self._deltay = 0,0 # store text self._text", "coordinates \"\"\" # the font.info contains # - a string of charcodes #", "tmp class BaseText(object): \"\"\" BaseText(text='', fontname=None) Base object for the Text wobject and", "0.0: cos_angle = np.cos(angle*np.pi/180.0) sin_angle = np.sin(angle*np.pi/180.0) vertices[:,0], vertices[:,1] = ( vertices[:,0] *", "visvis from visvis import ssdf from visvis.pypoints import Pointset # from visvis.core.baseTexture import", "self._valign def fset(self, value): if isinstance(value, int): pass elif isinstance(value, basestring): value =", "'bottom' * -1, 0, 1 \"\"\" def fget(self): return self._valign def fset(self, value):", "(this is to set the size) self.position = 10,10,100,16 # we need to", "- anchorx vertices[:,1] = vertices[:,1] - anchory # apply angle if angle !=", "other): # allow None if other is None: return self # set script", "sizex and sizey represent the size of the glyph. * dy represents the", "= self.script # done return MiniStyle( script, self.bold or other.bold, self.italic or other.italic", "instance from figure f = self.GetFigure() if not f: return font = f._fontManager.GetFont(self._fontname)", "in tmp: raise ValueError('Invalid value for halign.') value = tmp[value.lower()] else: raise ValueError('halign", "self.sizex = infoSize[ac,0] * factor self.sizey = infoSize[ac,1] * factor self.width = float(infoWidth[ac])", "- u00bf alphabet * u00c0 - u037f latin * u0380 - u03ff greek", "size / float(info.fontsize) self.sizex = infoSize[ac,0] * factor self.sizey = infoSize[ac,1] * factor", "do we have that char? if ac not in info.charcodes:#ac < 32 or", "if value != self._halign: self._halign = value self._vertices2 = None # force recalculation", "'epsilon':0x03B5, 'zeta':0x03B6, 'eta':0x03B7, 'theta':0x03B8, 'iota':0x03B9, 'kappa':0x03BA, 'lambda':0x03BB, 'mu':0x03BC, 'nu':0x03BD, 'xi':0x03BE, 'omicron':0x03BF, 'pi':0x03C0, 'rho':0x03C1,", "store tmp = glu.gluProject(self._x, self._y, self._z) self._screenx, self._screeny, self._screenz = tuple(tmp) # make", "tt.replace('\\rho', unichr(escapes['rho'])) tt = tt.replace('\\theta', unichr(escapes['theta'])) # transform other chars tt = tt.replace(r'\\\\',", "the Text wobject and Label wibject. fontname may be 'mono', 'sans', 'serif' or", "u03ff greek * u2000 - u23ff symbols There are several escape sequences for", "a character that is not in this list, one can always look up", "instance of this class for each figure/context. \"\"\" def __init__(self): # load font", "fontname) # no edge self.edgeWidth = 0 # init position (this is to", "Label wibject. fontname may be 'mono', 'sans', 'serif' or None, in which case", "must be an int or string.') value = int(value>0) - int(value<0) if value", "vertices = Pointset(3) texCords = Pointset(2) for g in glyphs: x2 = x1", "self._screeny, self._screenz = tuple(tmp) # make integer (to prevent glitchy behaviour), but not", "from neighbouring characters. Additionally, it's now # possible to mix bold and italic", "by its name. \"\"\" def fget(self): return self._fontname def fset(self, value): if value", "distributed under the terms of the (new) BSD License. # The full license", "size of the glyph. * dy represents the offset in y direction (for", "recompiled the next time it is drawn. \"\"\" self._texCords = None self._vertices1 =", "a string of characters. The text has a certain position in the scene.", "in label if isinstance(self, Label): w,h = self.position.size # determine whether the text", "other is None: return self # set script script = other.script if script", "def fget(self): return self._x def fset(self, value): self._x = value return locals() @PropWithDraw", "= tt.replace('\\rho', unichr(escapes['rho'])) tt = tt.replace('\\theta', unichr(escapes['theta'])) # transform other chars tt =", "def fset(self, value): if value != self._charSpacing: self._charSpacing = value self._Invalidate() # force", "None # force recalculation self.Draw() return locals() def _Compile(self): \"\"\" Create a series", "is now realized by printing it skewed rather using the # italic glyphs.", "by printing it skewed rather using the # italic glyphs. The reason is", "factor = size / float(info.fontsize) self.sizex = infoSize[ac,0] * factor self.sizey = infoSize[ac,1]", "visvis.pypoints import Pointset # from visvis.core.baseTexture import TextureObject from visvis.core.base import Wobject from", "fget(self): return self._halign def fset(self, value): if isinstance(value, int): pass elif isinstance(value, basestring):", "italic=False) Class that represents the style of characters (sub/super script, bold, and italic.", "_UploadTexture(self, data, *args): \"\"\" Overload to make it an alpha map. \"\"\" #", "'infty':0x221e, 'int':0x222b, 'iint':0x222c, 'iiint':0x222d, 'forall':0x2200, 'leq':0x22dc, 'geq':0x22dd, 'approx':0x2248, 'approxeq':0x2243, 'ne':0x2260, 'in':0x22f9, 'leftarrow':0x2190,'uparrow':0x2191,'rightarrow':0x2192,'downarrow':0x2193, 'Leftarrow':0x21D0,'Uparrow':0x21D1,'Rightarrow':0x21D2,'Downarrow':0x21D3,", "mu * nu xi omicron pi * rho varsigma sigma tau * upsilon", "if isinstance(value, int): pass elif isinstance(value, basestring): value = value.lower() tmp={'up':-1,'top':-1,'center':0,'centre':0,'down':1,'bottom':1} if not", "is to set the size) self.position = 10,10,100,16 # we need to know", "# dito, but corrected for angle and alignment # relative position of edges", "the size of the text. \"\"\" def fget(self): return self._size def fset(self, value):", "can be 'mono', 'sans' or 'serif'. If not given, the vv.settings.defaultFontName is used.", "for i in range(len(tt)): c = tt[i] if escape: g = Glyph(font, c,", "of the text in degrees. \"\"\" def fget(self): return self._angle def fset(self, value):", "raise ValueError('Invalid value for valign.') value = tmp[value.lower()] else: raise ValueError('valign must be", "vertices # calculate edges (used by for example the AxisLabel class) if vertices", "other chars tt = tt.replace(r'\\\\', '\\t') # double backslashes do not escape for", "vertices.append(x2+skew, y1+dy, z) vertices.append(x2, y2+dy, z) vertices.append(x1, y2+dy, z) # prepare for next", "* hello^2 or hello^{there}, makes one or more charactes superscript. * hello_2 or", "char, size=12, styles=None): # unwind the style for this glyph self.style = MiniStyle()", "text size property vertices *= fig._relativeFontSize # obtain dimensions if len(vertices): x1, x2", "italic glyphs. The reason is that when using the texture one would #", "return self._fontname def fset(self, value): if value != self._fontname: self._fontname = value self._Invalidate()", "c=='\\x07': style = MiniStyle(0,True,False) elif c=='\\\\' and i+1<len(tt) and tt[i+1] in ['_^\\x06\\x07']: escape", "greek 'Alpha':0x0391, 'Beta':0x0392, 'Gamma':0x0393, 'Delta':0x0394, 'Epsilon':0x0395, 'Zeta':0x0396, 'Eta':0x0397, 'Theta':0x0398, 'Iota':0x0399, 'Kappa':0x039A, 'Lambda':0x039B, 'Mu':0x039C,", "hello^2 or hello^{there}, makes one or more charactes superscript. * hello_2 or hello_{there},", "texture one would # see artifacts from neighbouring characters. Additionally, it's now #", "not in info.charcodes:#ac < 32 or ac > 255: print \"Warning: Cannot draw", "# Define skew factor to handle italics correctly self.skewFactor = 0.0 if style.italic:", "= { # upper case greek 'Alpha':0x0391, 'Beta':0x0392, 'Gamma':0x0393, 'Delta':0x0394, 'Epsilon':0x0395, 'Zeta':0x0396, 'Eta':0x0397,", "can make any supported # unicode character italic. # if style.italic and ac", "fontname self._color = (0,0,0) self._angle = 0 self._halign = -1 self._valign = 0", "sub/super scripts) * width specifies how much space there should be before the", "makes one or more charactes subscript. * hell\\io or hell\\i{ohoo}, makes one or", "no edge self.edgeWidth = 0 # init position (this is to set the", "available for the following unicode sets: * u0020 - u003f numbers * u0040", "-45: halign, valign = valign, halign # set anchor y if valign <", "value in tmp: raise ValueError('Invalid value for halign.') value = tmp[value.lower()] else: raise", "# disable texture and clean up if x or y or z: gl.glPopMatrix()", "that can be inserted using the backslash (for example '\\infty'). People familiar with", "= value.lower() tmp = {'left':-1,'center':0,'centre':0,'right':1 } if not value in tmp: raise ValueError('Invalid", "self.font.info # get asci code and check it if isinstance(char, basestring): ac =", "an alpha map. \"\"\" # Add lumincance channel data2 = np.zeros((data.shape[0],data.shape[1],2), dtype=np.uint8) data2[:,:,0]", "value self._Invalidate() # force recalculation self.Draw() return locals() @Property def fontSize(): \"\"\"Get/Set the", "# The full license can be found in 'license.txt'. \"\"\" Module textRender For", "Letters from the greek alfabet can be inserted in the same way (By", "= y2 else: anchory = y1 + (y2-y1)/2.0 # apply anchor angle =", "__init__(self, parent, text='', fontname=None): Box.__init__(self, parent) BaseText.__init__(self, text, fontname) # no edge self.edgeWidth", "'xi':0x03BE, 'omicron':0x03BF, 'pi':0x03C0, 'rho':0x03C1, 'varsigma':0x03C2, 'sigma':0x03C3, 'tau':0x03C4, 'upsilon':0x03C5, 'phi':0x03C6, 'chi':0x03C7, 'psi':0x03C8, 'omega':0x03C9, #", "use some char that is no string tt = tt.replace('\\b', '\\x07') # build", "origin 's # - an array of size's # - fontsize of the", "# bold text infoSize, infoOrigin, infoWidth = ( info.size_b, info.origin_b, info.width_b) # Find", "int): pass elif isinstance(value, basestring): value = value.lower() tmp = {'left':-1,'center':0,'centre':0,'right':1 } if", "locals() @Property def fontName(): \"\"\"Get/Set the font type by its name. \"\"\" def", "* u0040 - u00bf alphabet * u00c0 - u037f latin * u0380 -", "screen, given the size factor = size / float(info.fontsize) self.sizex = infoSize[ac,0] *", "charactes subscript. * hell\\io or hell\\i{ohoo}, makes one or more charactes italic. *", "0, # gl.GL_ALPHA, gl.GL_UNSIGNED_BYTE, data) gl.GL_LUMINANCE_ALPHA, gl.GL_UNSIGNED_BYTE, data2) tmp1 = gl.GL_LINEAR tmp2 =", "# prepare texCords = self._texCords#.copy() vertices = self._vertices2#.copy() # init vertex and texture", "Manager of fonts. There should be only one instance of this class for", "'upsilon':0x03C5, 'phi':0x03C6, 'chi':0x03C7, 'psi':0x03C8, 'omega':0x03C9, # some math 'Re':0x211c, 'Im':0x2111, 'null':0x2300, 'infty':0x221e, 'int':0x222b,", "is no string tt = tt.replace('\\b', '\\x07') # build list of glyphs, take", "clr = self.textColor gl.glColor(clr[0], clr[1], clr[2]) gl.glDrawArrays(gl.GL_QUADS, 0, len(vertices)) gl.glFlush() # disable texture", "info.charcodes_b: # bold text infoSize, infoOrigin, infoWidth = ( info.size_b, info.origin_b, info.width_b) #", "coding: utf-8 -*- # Copyright (C) 2012, <NAME> # # Visvis is distributed", "vertices[:,0].max() self._deltay = vertices[:,1].min(), vertices[:,1].max() def _DrawText(self, x=0, y=0, z=0): # Translate if", "= font info = self.font.info # get asci code and check it if", "= ( info.size_b, info.origin_b, info.width_b) # Find position in texture, normalized to texture", "of characters. The text has a certain position in the scene. The fontname", "* times cdot pm * oplus ominus otimes oslash Letters from the greek", "not fig: return font = fig._fontManager.GetFont(self._fontname) # enable texture font.Enable() # prepare texCords", "smaller self.sizey = self.sizey * smaller self.width = self.width * smaller#- self.sizex *", "which are both able to produce a single line of text oriented at", "'top', 'center', 'bottom' * -1, 0, 1 \"\"\" def fget(self): return self._valign def", "represent the size of the glyph. * dy represents the offset in y", "vertices[:,0] = vertices[:,0] + anchorx vertices[:,1] = vertices[:,1] + anchory # store self._vertices2", "a certain position in the scene. The fontname can be 'mono', 'sans' or", "# transform other chars tt = tt.replace(r'\\\\', '\\t') # double backslashes do not", "ord(char) ac = 32 # make space # default infoSize, infoOrigin, infoWidth =", "superscript. * hello_2 or hello_{there}, makes one or more charactes subscript. * hell\\io", "\"\"\" def fget(self): return self._angle def fset(self, value): if value != self._angle: self._angle", "elif self._valign > 0: anchory = y2 else: anchory = y1 + (y2-y1)/2.0", "text='', fontname=None): # init drawing data self._texCords = None # coords in the", "vertices.append(x2, y2+dy, z) vertices.append(x1, y2+dy, z) # prepare for next glyph x1 =", "or hell\\b{ohoo}, makes one or more charactes bold. * hello\\_there, a backslash escapes,", "unicode sets: * u0020 - u003f numbers * u0040 - u00bf alphabet *", "w/2.0 # apply vertices[:,0] = vertices[:,0] + anchorx vertices[:,1] = vertices[:,1] + anchory", "= tt.replace('\\t', r'\\\\') # get italic and bold modifiers tt = tt.replace('\\i', '\\x06')", "Wobject from visvis.core.misc import Property, PropWithDraw from visvis.core.misc import getResourceDir, getColor # from", "super or subscript self.skewFactor *= smaller self.sizex = self.sizex * smaller self.sizey =", "this list, one can always look up its unicode value and use that", "self._angle: self._angle = value self._vertices2 = None # force recalculation self.Draw() return locals()", "relative vertices are calculated, which are then corrected for angle and alignment in", "# possible to mix bold and italic text, and one can make any", "if self._halign < 0: anchorx = x1 elif self._halign > 0: anchorx =", "self._halign def fset(self, value): if isinstance(value, int): pass elif isinstance(value, basestring): value =", "characters that can be inserted using the backslash (for example '\\infty'). People familiar", "used. \"\"\" def __init__(self, text='', fontname=None): # init drawing data self._texCords = None", "be 'mono', 'sans' or 'serif'. If not given, the vv.settings.defaultFontName is used. \"\"\"", "and tt[i+1] in ['_^\\x06\\x07']: escape = True else: # create glyph (with new", "value return locals() @PropWithDraw def z(): \"\"\"Get/Set the z position of the text.", "escape = False elif c=='{': # Append style to the list if style:", "gl.glDisableClientState(gl.GL_TEXTURE_COORD_ARRAY) class Text(Wobject, BaseText): \"\"\" Text(parent, text='', x=0, y=0, z=0, fontname=None) A wobject", "getColor(value,'setting textColor') if value != self._color: self._color = value self.Draw() return locals() @Property", "value != self._fontname: self._fontname = value self._Invalidate() # force recalculation self.Draw() return locals()", "(for example '\\infty'). People familiar with Latex know what they do: * Re", "= self._texCords#.copy() vertices = self._vertices2#.copy() # init vertex and texture array gl.glEnableClientState(gl.GL_VERTEX_ARRAY) gl.glEnableClientState(gl.GL_TEXTURE_COORD_ARRAY)", "= self.font.info # get asci code and check it if isinstance(char, basestring): ac", "np.zeros((data.shape[0],data.shape[1],2), dtype=np.uint8) data2[:,:,0] = 255 data2[:,:,1] = data shape = data.shape gl.glTexImage2D(gl.GL_TEXTURE_2D, 0,", "(0,0,0) self._angle = 0 self._halign = -1 self._valign = 0 self._charSpacing = 1", "ac in info.charcodes_i: # # italic text # infoSize, infoOrigin, infoWidth = (", "degrees. \"\"\" def fget(self): return self._angle def fset(self, value): if value != self._angle:", "x(): \"\"\"Get/Set the x position of the text. \"\"\" def fget(self): return self._x", "= infoSize[ac,0] * factor self.sizey = infoSize[ac,1] * factor self.width = float(infoWidth[ac]) *", "uppercase letter, the corresponding upper case greek letter is inserted): * alpha beta", "% tmp class BaseText(object): \"\"\" BaseText(text='', fontname=None) Base object for the Text wobject", "now realized by printing it skewed rather using the # italic glyphs. The", "< 0: anchorx = x1 elif self._halign > 0: anchorx = x2 else:", "epsilon zeta eta theta * iota kappa lambda mu * nu xi omicron", "handle italics correctly self.skewFactor = 0.0 if style.italic: self.skewFactor = 0.5 # calculate", "g.sizey dy = g.dy # append texture coordinates texCords.append(g.s1, g.t1) texCords.append(g.s2, g.t1) texCords.append(g.s2,", "char * s1 s2 t1 t2 represent texture coordinates \"\"\" # the font.info", "= infoOrigin[ac,0] x2 = x1 + infoSize[ac,0] tmp = float(info.data.shape[1]) self.s1, self.s2 =", "character italic. # if style.italic and ac in info.charcodes_i: # # italic text", "alfabet can be inserted in the same way (By starting the name with", "u00bf alphabet * u00c0 - u037f latin * u0380 - u03ff greek *", "visvis. Defines a wibject and a wobject: Label and Text, which are both", "fget(self): return self._color def fset(self, value): value = getColor(value,'setting textColor') if value !=", "'int':0x222b, 'iint':0x222c, 'iiint':0x222d, 'forall':0x2200, 'leq':0x22dc, 'geq':0x22dd, 'approx':0x2248, 'approxeq':0x2243, 'ne':0x2260, 'in':0x22f9, 'leftarrow':0x2190,'uparrow':0x2191,'rightarrow':0x2192,'downarrow':0x2193, 'Leftarrow':0x21D0,'Uparrow':0x21D1,'Rightarrow':0x21D2,'Downarrow':0x21D3, 'leftceil':0x2308,'rightceil':0x2309,'leftfloor':0x230A,'rightfloor':0x230B,", "* width specifies how much space there should be before the next char", "0, len(vertices)) gl.glFlush() # disable texture and clean up if x or y", "init drawing data self._texCords = None # coords in the font texture self._vertices1", "'alpha':0x03B1, 'beta':0x03B2, 'gamma':0x03B3, 'delta':0x03B4, 'epsilon':0x03B5, 'zeta':0x03B6, 'eta':0x03B7, 'theta':0x03B8, 'iota':0x03B9, 'kappa':0x03BA, 'lambda':0x03BB, 'mu':0x03BC, 'nu':0x03BD,", "* self.sizey if style.script: # super or subscript self.skewFactor *= smaller self.sizex =", "= tt.replace('\\theta', unichr(escapes['theta'])) # transform other chars tt = tt.replace(r'\\\\', '\\t') # double", "edge self.edgeWidth = 0 # init position (this is to set the size)", "= infoSize[ac,1] * factor self.width = float(infoWidth[ac]) * factor # is spacing? smaller", "string.') value = int(value>0) - int(value<0) if value != self._valign: self._valign = value", "font, char, size=12, styles=None): # unwind the style for this glyph self.style =", "= False styles = [] style = None # Style to set for", "= self._angle if isinstance(self, Text): # Text is a wobject, so must be", "= vertices def _PositionText(self, event=None): \"\"\" The name is ment as a verb.", "apply vertices[:,0] = vertices[:,0] + anchorx vertices[:,1] = vertices[:,1] + anchory # store", "coordinates self._x, self._y, self._z = x, y, z # for internal use self._screenx,", "['_^\\x06\\x07']: escape = True else: # create glyph (with new style (or not))", "anchorx = x2 else: anchorx = x1 + (x2-x1)/2.0 # if self._valign <", "forall * leq geq approx approxeq ne in * leftarrow uparrow rightarrow downarrow", "compiling the text. script = {0:'normal', 1:'sub', 2:'super'} \"\"\" def __init__(self, script=0, bold=False,", "name.') # more properties self._size = 9 self._fontname = fontname self._color = (0,0,0)", "'down' * 'top', 'center', 'bottom' * -1, 0, 1 \"\"\" def fget(self): return", "we display in italic or bold? # Note: italic is now realized by", "text self._text = text # Set and check fontname if fontname is None:", "these Glyphs the textureCords in the font texture can be calculated. Also the", "styles+[style]) glyphs.append( g ) style = None # build arrays with vertices and", "font data path = getResourceDir() self.s = ssdf.load(os.path.join(path, 'fonts.ssdf')) # list of fonts", "tt = tt.replace('\\i', '\\x06') # just use some char that is no string", "'Tau':0x03A4, 'Upsilon':0x03A5, 'Phi':0x03A6, 'Chi':0x03A7, 'Psi':0x03A8, 'Omega':0x03A9, # lower case greek 'alpha':0x03B1, 'beta':0x03B2, 'gamma':0x03B3,", "or hello^{there}, makes one or more charactes superscript. * hello_2 or hello_{there}, makes", "or horizontal halign, valign = self._halign, self._valign if self._angle > 135 or self._angle", "= 0 # init position (this is to set the size) self.position =", "i+1<len(tt) and tt[i+1] in ['_^\\x06\\x07']: escape = True else: # create glyph (with", "info.origin, info.width # should and can we display in italic or bold? #", "\"\"\" def fget(self): return self._valign def fset(self, value): if isinstance(value, int): pass elif", "pi * rho varsigma sigma tau * upsilon phi chi psi * omega", "if isinstance(value, int): pass elif isinstance(value, basestring): value = value.lower() tmp = {'left':-1,'center':0,'centre':0,'right':1", "bold. * hello\\_there, a backslash escapes, thus keeping the _^ or \\ after", "the size) self.position = 10,10,100,16 # we need to know about position changes", "vertex and texture array gl.glEnableClientState(gl.GL_VERTEX_ARRAY) gl.glEnableClientState(gl.GL_TEXTURE_COORD_ARRAY) gl.glVertexPointerf(vertices.data) gl.glTexCoordPointerf(texCords.data) # draw if self.textColor and", "if valign < 0: anchory = 0 elif valign > 0: anchory =", "prepare texCords = self._texCords#.copy() vertices = self._vertices2#.copy() # init vertex and texture array", "# load font data path = getResourceDir() self.s = ssdf.load(os.path.join(path, 'fonts.ssdf')) # list", "vertices vertices.append(x1+skew, y1+dy, z) vertices.append(x2+skew, y1+dy, z) vertices.append(x2, y2+dy, z) vertices.append(x1, y2+dy, z)", "gl.glTranslatef(x, y, z) # make sure the glyphs are created if self._vertices1 is", "create glyph (with new style (or not)) g = Glyph(font, c, self._size, styles+[style])", "alpha map. \"\"\" # Add lumincance channel data2 = np.zeros((data.shape[0],data.shape[1],2), dtype=np.uint8) data2[:,:,0] =", "using the backslash (for example '\\infty'). People familiar with Latex know what they", "9 self._fontname = fontname self._color = (0,0,0) self._angle = 0 self._halign = -1", "self._Invalidate() # force recalculation self.Draw() return locals() @Property def textColor(): \"\"\"Get/Set the color", "locals() @Property def textSpacing(): \"\"\"Get/Set the spacing between characters. \"\"\" def fget(self): return", "is used. \"\"\" def __init__(self, text='', fontname=None): # init drawing data self._texCords =", "_DrawText(self, x=0, y=0, z=0): # Translate if x or y or z: gl.glPushMatrix()", "= None # coords in the font texture self._vertices1 = None # the", "gl.GL_LUMINANCE_ALPHA, gl.GL_UNSIGNED_BYTE, data2) tmp1 = gl.GL_LINEAR tmp2 = gl.GL_LINEAR gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, tmp1) gl.glTexParameteri(gl.GL_TEXTURE_2D,", "\"\"\" Create a series of glyphs from the given text. From these Glyphs", "ssdf.load(os.path.join(path, 'fonts.ssdf')) # list of fonts self.fonts = {} def GetFont(self, fontname): \"\"\"", "bold=False, italic=False) Class that represents the style of characters (sub/super script, bold, and", "'geq':0x22dd, 'approx':0x2248, 'approxeq':0x2243, 'ne':0x2260, 'in':0x22f9, 'leftarrow':0x2190,'uparrow':0x2191,'rightarrow':0x2192,'downarrow':0x2193, 'Leftarrow':0x21D0,'Uparrow':0x21D1,'Rightarrow':0x21D2,'Downarrow':0x21D3, 'leftceil':0x2308,'rightceil':0x2309,'leftfloor':0x230A,'rightfloor':0x230B, 'times':0x2217, 'cdot':0x2219, 'pm':0x00b1, 'oplus':0x2295, 'ominus':0x2296,", "int iint iiint forall * leq geq approx approxeq ne in * leftarrow", "dtype=np.uint8) data2[:,:,0] = 255 data2[:,:,1] = data shape = data.shape gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, 2,", "or y or z: gl.glPushMatrix() gl.glTranslatef(x, y, z) # make sure the glyphs", "example the AxisLabel class) if vertices is not None and len(vertices): self._deltax =", "c in escapesKeys: tt = tt.replace('\\\\'+c, unichr(escapes[c])) tt = tt.replace('\\t', r'\\\\') # get", "rendering the proper part from the texture stored in the Font object. *", "= tt.replace('\\alpha', unichr(escapes['alpha'])) tt = tt.replace('\\beta', unichr(escapes['beta'])) tt = tt.replace('\\rho', unichr(escapes['rho'])) tt =", "make integer (to prevent glitchy behaviour), but not z! self._screenx = int(self._screenx+0.5) self._screeny", "self.textColor and len(vertices): clr = self.textColor gl.glColor(clr[0], clr[1], clr[2]) gl.glDrawArrays(gl.GL_QUADS, 0, len(vertices)) gl.glFlush()", "x1 elif self._halign > 0: anchorx = x2 else: anchorx = x1 +", "visvis.core.misc import getResourceDir, getColor # from visvis.core.cameras import depthToZ from visvis.core.baseWibjects import Box", "= getResourceDir() self.s = ssdf.load(os.path.join(path, 'fonts.ssdf')) # list of fonts self.fonts = {}", "valign, -halign elif self._angle < -45: halign, valign = valign, halign # set", "one instance of this class for each figure/context. \"\"\" def __init__(self): # load", "self._color: self._color = value self.Draw() return locals() @Property def halign(): \"\"\"Get/Set the horizontal", "'Re':0x211c, 'Im':0x2111, 'null':0x2300, 'infty':0x221e, 'int':0x222b, 'iint':0x222c, 'iiint':0x222d, 'forall':0x2200, 'leq':0x22dc, 'geq':0x22dd, 'approx':0x2248, 'approxeq':0x2243, 'ne':0x2260,", "False elif c=='{': # Append style to the list if style: styles.append(style) style", "is distributed under the terms of the (new) BSD License. # The full", "be formatted using the following constructs (which can be mixed): * hello^2 or", "self._valign: self._valign = value self._vertices2 = None # force recalculation self.Draw() return locals()", "z # for internal use self._screenx, self._screeny, self._screenz = 0, 0, 0 @PropWithDraw", "should and can we display in italic or bold? # Note: italic is", "locals() @PropWithDraw def z(): \"\"\"Get/Set the z position of the text. \"\"\" def", "if halign < 0: anchorx = 0 elif halign > 0: anchorx =", "by for example the AxisLabel class) if vertices is not None and len(vertices):", "= self._vertices2#.copy() # init vertex and texture array gl.glEnableClientState(gl.GL_VERTEX_ARRAY) gl.glEnableClientState(gl.GL_TEXTURE_COORD_ARRAY) gl.glVertexPointerf(vertices.data) gl.glTexCoordPointerf(texCords.data) #", "@PropWithDraw def x(): \"\"\"Get/Set the x position of the text. \"\"\" def fget(self):", "isinstance(self, Label): w,h = self.position.size # determine whether the text is vertical or", "def x(): \"\"\"Get/Set the x position of the text. \"\"\" def fget(self): return", "angle. Formatting ---------- Text can be formatted using the following constructs (which can", "Overload to make it an alpha map. \"\"\" # Add lumincance channel data2", "u0020 - u003f numbers * u0040 - u00bf alphabet * u00c0 - u037f", "information self.info = info # set data self.SetData(self.info.data) def _UploadTexture(self, data, *args): \"\"\"", "self.sizey = infoSize[ac,1] * factor self.width = float(infoWidth[ac]) * factor # is spacing?", "anchorx = 0 elif halign > 0: anchorx = w else: anchorx =", "the given text. From these Glyphs the textureCords in the font texture can", "self._fontname = value self._Invalidate() # force recalculation self.Draw() return locals() @Property def textColor():", "horizontal alignment. Specify as: * 'left', 'center', 'right' * -1, 0, 1 \"\"\"", "# - a string of charcodes # - an array of origin 's", "y1, z = 0, 0, 0 vertices = Pointset(3) texCords = Pointset(2) for", "self.script = script self.bold = bold self.italic = italic def __add__(self, other): #", "'iint':0x222c, 'iiint':0x222d, 'forall':0x2200, 'leq':0x22dc, 'geq':0x22dd, 'approx':0x2248, 'approxeq':0x2243, 'ne':0x2260, 'in':0x22f9, 'leftarrow':0x2190,'uparrow':0x2191,'rightarrow':0x2192,'downarrow':0x2193, 'Leftarrow':0x21D0,'Uparrow':0x21D1,'Rightarrow':0x21D2,'Downarrow':0x21D3, 'leftceil':0x2308,'rightceil':0x2309,'leftfloor':0x230A,'rightfloor':0x230B, 'times':0x2217,", "'serif' or None, in which case the vv.settings.defaultFontName is used. \"\"\" def __init__(self,", "value = tmp[value.lower()] else: raise ValueError('halign must be an int or string.') value", "tt = tt.replace('\\theta', unichr(escapes['theta'])) # transform other chars tt = tt.replace(r'\\\\', '\\t') #", "= w/2.0 # apply vertices[:,0] = vertices[:,0] + anchorx vertices[:,1] = vertices[:,1] +", "locals() @Property # Smart draw def textAngle(): \"\"\"Get/Set the angle of the text", "self.skewFactor *= smaller self.sizex = self.sizex * smaller self.sizey = self.sizey * smaller", "value): self._y = value return locals() @PropWithDraw def z(): \"\"\"Get/Set the z position", "import ssdf from visvis.pypoints import Pointset # from visvis.core.baseTexture import TextureObject from visvis.core.base", "printing it skewed rather using the # italic glyphs. The reason is that", "= x1 + infoSize[ac,0] tmp = float(info.data.shape[1]) self.s1, self.s2 = (x1) / tmp,", "store text self._text = text # Set and check fontname if fontname is", "self._y def fset(self, value): self._y = value return locals() @PropWithDraw def z(): \"\"\"Get/Set", "'Delta':0x0394, 'Epsilon':0x0395, 'Zeta':0x0396, 'Eta':0x0397, 'Theta':0x0398, 'Iota':0x0399, 'Kappa':0x039A, 'Lambda':0x039B, 'Mu':0x039C, 'Nu':0x039D, 'Xi':0x039E, 'Omicron':0x039F, 'Pi':0x03A0,", "or string.') value = int(value>0) - int(value<0) if value != self._halign: self._halign =", "c=='{': # Append style to the list if style: styles.append(style) style = None", "be 'mono', 'sans', 'serif' or None, in which case the vv.settings.defaultFontName is used.", "or None, in which case the vv.settings.defaultFontName is used. \"\"\" def __init__(self, text='',", "for this glyph self.style = MiniStyle() if styles: for style in styles: self.style", "# more properties self._size = 9 self._fontname = fontname self._color = (0,0,0) self._angle", "visvis.core.baseTexture import TextureObject from visvis.core.base import Wobject from visvis.core.misc import Property, PropWithDraw from", "be before the next char * s1 s2 t1 t2 represent texture coordinates", "(which can be mixed): * hello^2 or hello^{there}, makes one or more charactes", "in * leftarrow uparrow rightarrow downarrow * Leftarrow Uparrow Rightarrow Downarrow * leftceil", "return locals() def _Compile(self): \"\"\" Create a series of glyphs from the given", "def __init__(self, info): TextureObject.__init__(self, 2) # store font information self.info = info #", "Cannot draw character %i! \" % ord(char) ac = 32 # make space", "/ tmp # Define skew factor to handle italics correctly self.skewFactor = 0.0", "fontname in self.fonts: return self.fonts[fontname] elif hasattr(self.s, fontname): tmp = Font(self.s[fontname]) self.fonts[fontname] =", "raise ValueError('Invalid value for halign.') value = tmp[value.lower()] else: raise ValueError('halign must be", "upper case greek letter is inserted): * alpha beta gamma delta * epsilon", "= value self._Invalidate() # force recalculation self.Draw() return locals() @Property def fontSize(): \"\"\"Get/Set", "'Xi':0x039E, 'Omicron':0x039F, 'Pi':0x03A0, 'Rho':0x03A1, 'Sigma':0x03A3, 'Tau':0x03A4, 'Upsilon':0x03A5, 'Phi':0x03A6, 'Chi':0x03A7, 'Psi':0x03A8, 'Omega':0x03A9, # lower", "= 32 # make space # default infoSize, infoOrigin, infoWidth = info.size, info.origin,", "as np import visvis from visvis import ssdf from visvis.pypoints import Pointset #", "0,0 y1, y2 = 0, self._xglyph.sizey # set anchor if self._halign < 0:", "def fget(self): return self._angle def fset(self, value): if value != self._angle: self._angle =", "glyph (with new style (or not)) g = Glyph(font, c, self._size, styles+[style]) glyphs.append(", "# unicode character italic. # if style.italic and ac in info.charcodes_i: # #", "self.position = 10,10,100,16 # we need to know about position changes to update", "# store text self._text = text # Set and check fontname if fontname", "self._halign = value self._vertices2 = None # force recalculation self.Draw() return locals() @Property", "is vertical or horizontal halign, valign = self._halign, self._valign if self._angle > 135", "object, such that the text is recompiled the next time it is drawn.", "\"\"\" # Add lumincance channel data2 = np.zeros((data.shape[0],data.shape[1],2), dtype=np.uint8) data2[:,:,0] = 255 data2[:,:,1]", "from visvis import ssdf from visvis.pypoints import Pointset # from visvis.core.baseTexture import TextureObject", "created earlier, that font is returned, otherwise it is created and stored for", "fontname) # store coordinates self._x, self._y, self._z = x, y, z # for", "@PropWithDraw def y(): \"\"\"Get/Set the y position of the text. \"\"\" def fget(self):", "(y2-y1)/2.0 # apply anchor angle = self._angle if isinstance(self, Text): # Text is", "* 'top', 'center', 'bottom' * -1, 0, 1 \"\"\" def fget(self): return self._valign", "isinstance(value, basestring): value = value.lower() tmp = {'left':-1,'center':0,'centre':0,'right':1 } if not value in", "italic:%i>' % tmp class BaseText(object): \"\"\" BaseText(text='', fontname=None) Base object for the Text", "font instance from figure f = self.GetFigure() if not f: return font =", "corrected for angle and alignment in _PositionText(). -> Produces _vertices1 (and is called", "text. From these Glyphs the textureCords in the font texture can be calculated.", "the name with an uppercase letter, the corresponding upper case greek letter is", "string of charcodes # - an array of origin 's # - an", "\"\"\" FontManager() Manager of fonts. There should be only one instance of this", "= 0,0 y1, y2 = 0, self._xglyph.sizey # set anchor if self._halign <", "len(vertices)) gl.glFlush() # disable texture and clean up if x or y or", "def fset(self, value): if isinstance(value, int): pass elif isinstance(value, basestring): value = value.lower()", "# relative position of edges in pixels. (taking angle into account) self._deltax =", "elif hasattr(self.s, fontname): tmp = Font(self.s[fontname]) self.fonts[fontname] = tmp return tmp else: raise", "that font is returned, otherwise it is created and stored for reuse. \"\"\"", "vertices[:,0] = vertices[:,0] - anchorx vertices[:,1] = vertices[:,1] - anchory # apply angle", "using the following constructs (which can be mixed): * hello^2 or hello^{there}, makes", "glyph. * dy represents the offset in y direction (for sub/super scripts) *", "* sin_angle + vertices[:,1] * cos_angle) # Move anchor in label if isinstance(self,", "self.font = font info = self.font.info # get asci code and check it", "self._size * g.skewFactor # append vertices vertices.append(x1+skew, y1+dy, z) vertices.append(x2+skew, y1+dy, z) vertices.append(x2,", "gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP) class FontManager: \"\"\" FontManager() Manager of fonts.", "style = self.style # store font self.font = font info = self.font.info #", "x, y, z # for internal use self._screenx, self._screeny, self._screenz = 0, 0,", "0: anchory = y2 else: anchory = y1 + (y2-y1)/2.0 # apply anchor", "inside. The fontname can be 'mono', 'sans' or 'serif'. If not given, the", "self.t1, self.t2 = (y1) / tmp, (y2-1) / tmp # Define skew factor", "!= self._color: self._color = value self.Draw() return locals() @Property def halign(): \"\"\"Get/Set the", "if self._angle > 135 or self._angle < -135: halign, valign = -halign, valign", "in info.charcodes:#ac < 32 or ac > 255: print \"Warning: Cannot draw character", "self.sizex * smaller self.sizey = self.sizey * smaller self.width = self.width * smaller#-", "pass elif isinstance(value, basestring): value = value.lower() tmp={'up':-1,'top':-1,'center':0,'centre':0,'down':1,'bottom':1} if not value in tmp:", "!= self._halign: self._halign = value self._vertices2 = None # force recalculation self.Draw() return", "Visvis is distributed under the terms of the (new) BSD License. # The", "0, 0, 0 @PropWithDraw def x(): \"\"\"Get/Set the x position of the text.", "= None # dito, but corrected for angle and alignment # relative position", "2012, <NAME> # # Visvis is distributed under the terms of the (new)", "of glyphs from the given text. From these Glyphs the textureCords in the", "unichr(escapes[c])) tt = tt.replace('\\t', r'\\\\') # get italic and bold modifiers tt =", "know what they do: * Re Im null infty * int iint iiint", "Text can be formatted using the following constructs (which can be mixed): *", "lumincance channel data2 = np.zeros((data.shape[0],data.shape[1],2), dtype=np.uint8) data2[:,:,0] = 255 data2[:,:,1] = data shape", "i in range(len(tt)): c = tt[i] if escape: g = Glyph(font, c, self._size,", "x=0, y=0, z=0): # Translate if x or y or z: gl.glPushMatrix() gl.glTranslatef(x,", "self._PositionText() # get font instance from figure fig = self.GetFigure() if not fig:", "self._texCords = None self._vertices1 = None self._vertices2 = None @Property # Smart draw", "tt.replace('\\i', '\\x06') # just use some char that is no string tt =", "# apply angle if angle != 0.0: cos_angle = np.cos(angle*np.pi/180.0) sin_angle = np.sin(angle*np.pi/180.0)", "sub/super scripting into account. escape = False styles = [] style = None", "fig = self.GetFigure() # get vertices if self._vertices1 is None: return vertices =", "self._vertices2 = None # dito, but corrected for angle and alignment # relative", "tmp[value.lower()] else: raise ValueError('valign must be an int or string.') value = int(value>0)", "is None or self._texCords is None: self._Compile() if self._vertices2 is None: self._PositionText() #", "the corresponding upper case greek letter is inserted): * alpha beta gamma delta", "else: # create glyph (with new style (or not)) g = Glyph(font, c,", "gl.GL_LINEAR tmp2 = gl.GL_LINEAR gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, tmp1) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, tmp2) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP)", "# coords in the font texture self._vertices1 = None # the coords in", "not value in tmp: raise ValueError('Invalid value for valign.') value = tmp[value.lower()] else:", "= self.script, self.bold, self.italic return '<MiniStyle script:%i, bold:%i, italic:%i>' % tmp class BaseText(object):", "Glyph(font, 'X', self._size) tt = self._text # transform greek characters that were given", "# obtain dimensions if len(vertices): x1, x2 = vertices[:,0].min(), vertices[:,0].max() else: x1, x2", "g.width + self._charSpacing # store self._texCords = texCords self._vertices1 = vertices def _PositionText(self,", "-self._angle vertices[:,0] = vertices[:,0] - anchorx vertices[:,1] = vertices[:,1] - anchory # apply", "angle and alignment # relative position of edges in pixels. (taking angle into", "if style.italic: self.skewFactor = 0.5 # calculate width on screen, given the size", "texCords.append(g.s2, g.t1) texCords.append(g.s2, g.t2) texCords.append(g.s1, g.t2) # set skewing for position skew =", "spacing? smaller = 0.6 self.dy = 0.0 # normal script if style.script ==", "# - an array of origin 's # - an array of size's", "int(self._screeny+0.5) def OnDrawScreen(self): self._DrawText( self._screenx, self._screeny, depthToZ(self._screenz) ) class Label(Box, BaseText): \"\"\" Label(parent,", "BaseText.__init__(self, text, fontname) # no edge self.edgeWidth = 0 # init position (this", "example '\\infty'). People familiar with Latex know what they do: * Re Im", "# store font self.font = font info = self.font.info # get asci code", "gl import OpenGL.GLU as glu import os import numpy as np import visvis", "figure f = self.GetFigure() if not f: return font = f._fontManager.GetFont(self._fontname) # clear", "> 0: anchorx = x2 else: anchorx = x1 + (x2-x1)/2.0 # if", "parent, text='', fontname=None): Box.__init__(self, parent) BaseText.__init__(self, text, fontname) # no edge self.edgeWidth =", "+ self._charSpacing # store self._texCords = texCords self._vertices1 = vertices def _PositionText(self, event=None):", "@Property # Smart draw def textAngle(): \"\"\"Get/Set the angle of the text in", "visvis.core.misc import Property, PropWithDraw from visvis.core.misc import getResourceDir, getColor # from visvis.core.cameras import", "(to prevent glitchy behaviour), but not z! self._screenx = int(self._screenx+0.5) self._screeny = int(self._screeny+0.5)", "import Box escapes = { # upper case greek 'Alpha':0x0391, 'Beta':0x0392, 'Gamma':0x0393, 'Delta':0x0394,", "is called when that is None) \"\"\" # make invalid first self._Invalidate() #", "self.sizey if style.script: # super or subscript self.skewFactor *= smaller self.sizex = self.sizex", "stored for reuse. \"\"\" if fontname in self.fonts: return self.fonts[fontname] elif hasattr(self.s, fontname):", "= vertices[:,0].min(), vertices[:,0].max() else: x1, x2 = 0,0 y1, y2 = 0, self._xglyph.sizey", "data, *args): \"\"\" Overload to make it an alpha map. \"\"\" # Add", "append vertices vertices.append(x1+skew, y1+dy, z) vertices.append(x2+skew, y1+dy, z) vertices.append(x2, y2+dy, z) vertices.append(x1, y2+dy,", "\"\"\" def fget(self): return self._halign def fset(self, value): if isinstance(value, int): pass elif", "the data array def __init__(self, font, char, size=12, styles=None): # unwind the style", "series of glyphs from the given text. From these Glyphs the textureCords in", "= x, y, z # for internal use self._screenx, self._screeny, self._screenz = 0,", "makes one or more charactes italic. * hell\\bo or hell\\b{ohoo}, makes one or", "in texture, normalized to texture coordinates x1 = infoOrigin[ac,0] x2 = x1 +", "texture and clean up if x or y or z: gl.glPopMatrix() font.Disable() gl.glDisableClientState(gl.GL_VERTEX_ARRAY)", "'fonts.ssdf')) # list of fonts self.fonts = {} def GetFont(self, fontname): \"\"\" GetFont(fontname)", "parent) BaseText.__init__(self, text, fontname) # store coordinates self._x, self._y, self._z = x, y,", "omega Note: In case one needs a character that is not in this", "\"\"\" Label(parent, text='', fontname=None) A wibject (inherits from box) with text inside. The", "info.size_b, info.origin_b, info.width_b) # Find position in texture, normalized to texture coordinates x1", ") def __repr__(self): tmp = self.script, self.bold, self.italic return '<MiniStyle script:%i, bold:%i, italic:%i>'", "= 0,0 self._deltay = 0,0 # store text self._text = text # Set", "longer names are replaced first escapesKeys = escapes.keys() escapesKeys.sort( lambda x,y:len(y)-len(x)) class Font(TextureObject):", "= y1 + (y2-y1)/2.0 # apply anchor angle = self._angle if isinstance(self, Text):", "if escape: g = Glyph(font, c, self._size, styles) glyphs.append( g ) escape =", "y=0, z=0, fontname=None) A wobject representing a string of characters. The text has", "# build list of glyphs, take sub/super scripting into account. escape = False", "cos_angle - vertices[:,1] * sin_angle, vertices[:,0] * sin_angle + vertices[:,1] * cos_angle) #", "isinstance(value, basestring): value = value.lower() tmp={'up':-1,'top':-1,'center':0,'centre':0,'down':1,'bottom':1} if not value in tmp: raise ValueError('Invalid", "gl.glColor(clr[0], clr[1], clr[2]) gl.glDrawArrays(gl.GL_QUADS, 0, len(vertices)) gl.glFlush() # disable texture and clean up", "parent) BaseText.__init__(self, text, fontname) # no edge self.edgeWidth = 0 # init position", "sets: * u0020 - u003f numbers * u0040 - u00bf alphabet * u00c0", "def _UploadTexture(self, data, *args): \"\"\" Overload to make it an alpha map. \"\"\"", "isinstance(value, int): pass elif isinstance(value, basestring): value = value.lower() tmp={'up':-1,'top':-1,'center':0,'centre':0,'down':1,'bottom':1} if not value", "= 0,0 # store text self._text = text # Set and check fontname", "- u23ff symbols There are several escape sequences for (mathematical) characters that can", "backslashes do not escape for c in escapesKeys: tt = tt.replace('\\\\'+c, unichr(escapes[c])) tt", "verb. The vertices1 are corrected for angle and alignment. -> produces _vertices2 from", "halign, valign = valign, -halign elif self._angle < -45: halign, valign = valign,", "\\ after it. Special characters ------------------ Characters are available for the following unicode", "not in ['mono', 'sans', 'serif']: raise ValueError('Invalid font name.') # more properties self._size", "self._vertices2 = None # force recalculation self.Draw() return locals() @Property def valign(): \"\"\"Get/Set", "# italic glyphs. The reason is that when using the texture one would", "self._text # transform greek characters that were given without double backslash tt =", "MiniStyle(2) elif c=='_': style = MiniStyle(1) elif c=='\\x06': style = MiniStyle(0,False,True) elif c=='\\x07':", "value): if value != self._size: self._size = value self._Invalidate() # force recalculation self.Draw()", "def __init__(self, text='', fontname=None): # init drawing data self._texCords = None # coords", "character %i! \" % ord(char) ac = 32 # make space # default", "MiniStyle(1) elif c=='\\x06': style = MiniStyle(0,False,True) elif c=='\\x07': style = MiniStyle(0,True,False) elif c=='\\\\'", "@Property def halign(): \"\"\"Get/Set the horizontal alignment. Specify as: * 'left', 'center', 'right'", "self._texCords#.copy() vertices = self._vertices2#.copy() # init vertex and texture array gl.glEnableClientState(gl.GL_VERTEX_ARRAY) gl.glEnableClientState(gl.GL_TEXTURE_COORD_ARRAY) gl.glVertexPointerf(vertices.data)", "called when that is None) \"\"\" # make invalid first self._Invalidate() # get", "elif self._angle < -45: halign, valign = valign, halign # set anchor y", "def textColor(): \"\"\"Get/Set the color of the text. \"\"\" def fget(self): return self._color", "y or z: gl.glPushMatrix() gl.glTranslatef(x, y, z) # make sure the glyphs are", "italic or bold? # Note: italic is now realized by printing it skewed", "= tmp return tmp else: raise ValueError(\"Invalid font name.\") class Glyph(object): \"\"\" Glyph(font,", "None: return vertices = self._vertices1.copy() # scale text according to global text size", "or more charactes bold. * hello\\_there, a backslash escapes, thus keeping the _^", "with vertices and coordinates x1, y1, z = 0, 0, 0 vertices =", "* (1.0-smaller) class MiniStyle: \"\"\" MiniStyle(script=0, bold=False, italic=False) Class that represents the style", "following unicode sets: * u0020 - u003f numbers * u0040 - u00bf alphabet", "self.bold or other.bold, self.italic or other.italic ) def __repr__(self): tmp = self.script, self.bold,", "wobject representing a string of characters. The text has a certain position in", "escapes, thus keeping the _^ or \\ after it. Special characters ------------------ Characters", "# sub script self.dy = (1-smaller) * self.sizey if style.script: # super or", "# make invalid first self._Invalidate() # get font instance from figure f =", "after it. Special characters ------------------ Characters are available for the following unicode sets:", "self.bold = bold self.italic = italic def __add__(self, other): # allow None if", "-> Produces _vertices1 (and is called when that is None) \"\"\" # make", "is inserted): * alpha beta gamma delta * epsilon zeta eta theta *", "-(vertices[:,1] - anchory) elif isinstance(self, Label): angle = -self._angle vertices[:,0] = vertices[:,0] -", "= value.lower() tmp={'up':-1,'top':-1,'center':0,'centre':0,'down':1,'bottom':1} if not value in tmp: raise ValueError('Invalid value for valign.')", "From these Glyphs the textureCords in the font texture can be calculated. Also", "case one needs a character that is not in this list, one can", "set anchor x if halign < 0: anchorx = 0 elif halign >", "@Property def valign(): \"\"\"Get/Set the vertical alignment. Specify as: * 'up', 'center', 'down'", "_PositionText(). -> Produces _vertices1 (and is called when that is None) \"\"\" #", "whether the text is vertical or horizontal halign, valign = self._halign, self._valign if", "class Text(Wobject, BaseText): \"\"\" Text(parent, text='', x=0, y=0, z=0, fontname=None) A wobject representing", "'leftarrow':0x2190,'uparrow':0x2191,'rightarrow':0x2192,'downarrow':0x2193, 'Leftarrow':0x21D0,'Uparrow':0x21D1,'Rightarrow':0x21D2,'Downarrow':0x21D3, 'leftceil':0x2308,'rightceil':0x2309,'leftfloor':0x230A,'rightfloor':0x230B, 'times':0x2217, 'cdot':0x2219, 'pm':0x00b1, 'oplus':0x2295, 'ominus':0x2296, 'otimes':0x2297, 'oslash':0x2298, } # sort", "build list of glyphs, take sub/super scripting into account. escape = False styles", "'Pi':0x03A0, 'Rho':0x03A1, 'Sigma':0x03A3, 'Tau':0x03A4, 'Upsilon':0x03A5, 'Phi':0x03A6, 'Chi':0x03A7, 'Psi':0x03A8, 'Omega':0x03A9, # lower case greek", "\"\"\" def fget(self): return self._y def fset(self, value): self._y = value return locals()", "anchorx = x1 elif self._halign > 0: anchorx = x2 else: anchorx =", "# force recalculation self.Draw() return locals() @Property def textColor(): \"\"\"Get/Set the color of", "else: anchorx = x1 + (x2-x1)/2.0 # if self._valign < 0: anchory =", "y or z: gl.glPopMatrix() font.Disable() gl.glDisableClientState(gl.GL_VERTEX_ARRAY) gl.glDisableClientState(gl.GL_TEXTURE_COORD_ARRAY) class Text(Wobject, BaseText): \"\"\" Text(parent, text='',", "styles=None): # unwind the style for this glyph self.style = MiniStyle() if styles:", "what they do: * Re Im null infty * int iint iiint forall", "the font.info contains # - a string of charcodes # - an array", "load font data path = getResourceDir() self.s = ssdf.load(os.path.join(path, 'fonts.ssdf')) # list of", "unichr(escapes['alpha'])) tt = tt.replace('\\beta', unichr(escapes['beta'])) tt = tt.replace('\\rho', unichr(escapes['rho'])) tt = tt.replace('\\theta', unichr(escapes['theta']))", "y=0, z=0, fontname=None): Wobject.__init__(self, parent) BaseText.__init__(self, text, fontname) # store coordinates self._x, self._y,", "x2 else: anchorx = x1 + (x2-x1)/2.0 # if self._valign < 0: anchory", "in range(len(tt)): c = tt[i] if escape: g = Glyph(font, c, self._size, styles)", "0,0 self._deltay = 0,0 # store text self._text = text # Set and", "# see artifacts from neighbouring characters. Additionally, it's now # possible to mix", "'gamma':0x03B3, 'delta':0x03B4, 'epsilon':0x03B5, 'zeta':0x03B6, 'eta':0x03B7, 'theta':0x03B8, 'iota':0x03B9, 'kappa':0x03BA, 'lambda':0x03BB, 'mu':0x03BC, 'nu':0x03BD, 'xi':0x03BE, 'omicron':0x03BF,", "subscript self.skewFactor *= smaller self.sizex = self.sizex * smaller self.sizey = self.sizey *", "prevent glitchy behaviour), but not z! self._screenx = int(self._screenx+0.5) self._screeny = int(self._screeny+0.5) def", "they do: * Re Im null infty * int iint iiint forall *", "alignment # relative position of edges in pixels. (taking angle into account) self._deltax", "u00c0 - u037f latin * u0380 - u03ff greek * u2000 - u23ff", "infoSize, infoOrigin, infoWidth = ( info.size_b, info.origin_b, info.width_b) # Find position in texture,", "a wibject and a wobject: Label and Text, which are both able to", "\"\"\"Get/Set the size of the text. \"\"\" def fget(self): return self._size def fset(self,", "vertices is not None and len(vertices): self._deltax = vertices[:,0].min(), vertices[:,0].max() self._deltay = vertices[:,1].min(),", "be inserted in the same way (By starting the name with an uppercase", "* factor # is spacing? smaller = 0.6 self.dy = 0.0 # normal", "\"\"\" Text(parent, text='', x=0, y=0, z=0, fontname=None) A wobject representing a string of", "Set and check fontname if fontname is None: fontname = visvis.settings.defaultFontName fontname =", "# transform greek characters that were given without double backslash tt = tt.replace('\\alpha',", "c=='^': style = MiniStyle(2) elif c=='_': style = MiniStyle(1) elif c=='\\x06': style =", "for reuse. \"\"\" if fontname in self.fonts: return self.fonts[fontname] elif hasattr(self.s, fontname): tmp", "self.italic or other.italic ) def __repr__(self): tmp = self.script, self.bold, self.italic return '<MiniStyle", "0 elif halign > 0: anchorx = w else: anchorx = w/2.0 #", "self.italic = italic def __add__(self, other): # allow None if other is None:", "that when using the texture one would # see artifacts from neighbouring characters.", "be calculated. Also the relative vertices are calculated, which are then corrected for", "from visvis.core.baseTexture import TextureObject from visvis.core.base import Wobject from visvis.core.misc import Property, PropWithDraw", "g.dy # append texture coordinates texCords.append(g.s1, g.t1) texCords.append(g.s2, g.t1) texCords.append(g.s2, g.t2) texCords.append(g.s1, g.t2)", "else: x1, x2 = 0,0 y1, y2 = 0, self._xglyph.sizey # set anchor", "\"\"\" # the font.info contains # - a string of charcodes # -", "for style in styles: self.style += style style = self.style # store font", "double backslashes do not escape for c in escapesKeys: tt = tt.replace('\\\\'+c, unichr(escapes[c]))", "1:'sub', 2:'super'} \"\"\" def __init__(self, script=0, bold=False, italic=False): self.script = script self.bold =", "factor self.sizey = infoSize[ac,1] * factor self.width = float(infoWidth[ac]) * factor # is", "characters (sub/super script, bold, and italic. Used when compiling the text. script =", "gl.GL_CLAMP) class FontManager: \"\"\" FontManager() Manager of fonts. There should be only one", "and alignment in _PositionText(). -> Produces _vertices1 (and is called when that is", "set data self.SetData(self.info.data) def _UploadTexture(self, data, *args): \"\"\" Overload to make it an", "tmp # Define skew factor to handle italics correctly self.skewFactor = 0.0 if", "if ac not in info.charcodes:#ac < 32 or ac > 255: print \"Warning:", "the style of characters (sub/super script, bold, and italic. Used when compiling the", "wibject (inherits from box) with text inside. The fontname can be 'mono', 'sans'", "'phi':0x03C6, 'chi':0x03C7, 'psi':0x03C8, 'omega':0x03C9, # some math 'Re':0x211c, 'Im':0x2111, 'null':0x2300, 'infty':0x221e, 'int':0x222b, 'iint':0x222c,", "valign < 0: anchory = 0 elif valign > 0: anchory = h", "0.0 # normal script if style.script == 1: # sub script self.dy =", "info.width_b) # Find position in texture, normalized to texture coordinates x1 = infoOrigin[ac,0]", "vertices[:,1] = vertices[:,1] - anchory # apply angle if angle != 0.0: cos_angle", "import Wobject from visvis.core.misc import Property, PropWithDraw from visvis.core.misc import getResourceDir, getColor #", "of the font in the data array def __init__(self, font, char, size=12, styles=None):", "= None # the coords in screen coordinates (raw) self._vertices2 = None #", "text # infoSize, infoOrigin, infoWidth = ( # info.size_i, info.origin_i, info.width_i) if style.bold", "needs a character that is not in this list, one can always look", "text='', fontname=None) A wibject (inherits from box) with text inside. The fontname can", "gl.glEnableClientState(gl.GL_TEXTURE_COORD_ARRAY) gl.glVertexPointerf(vertices.data) gl.glTexCoordPointerf(texCords.data) # draw if self.textColor and len(vertices): clr = self.textColor gl.glColor(clr[0],", "== 1: # sub script self.dy = (1-smaller) * self.sizey if style.script: #", "smaller self.sizex = self.sizex * smaller self.sizey = self.sizey * smaller self.width =", "value in tmp: raise ValueError('Invalid value for valign.') value = tmp[value.lower()] else: raise", "self._valign > 0: anchory = y2 else: anchory = y1 + (y2-y1)/2.0 #", "= np.zeros((data.shape[0],data.shape[1],2), dtype=np.uint8) data2[:,:,0] = 255 data2[:,:,1] = data shape = data.shape gl.glTexImage2D(gl.GL_TEXTURE_2D,", "single line of text oriented at a certain angle. Formatting ---------- Text can", "into account) self._deltax = 0,0 self._deltay = 0,0 # store text self._text =", "= None # force recalculation self.Draw() return locals() @Property def textSpacing(): \"\"\"Get/Set the", "if isinstance(self, Label): w,h = self.position.size # determine whether the text is vertical", "char else: raise ValueError('To create a glyph, supply an int or character.') #", "value = tmp[value.lower()] else: raise ValueError('valign must be an int or string.') value", "* smaller self.width = self.width * smaller#- self.sizex * (1.0-smaller) class MiniStyle: \"\"\"", "True else: # create glyph (with new style (or not)) g = Glyph(font,", "z=0, fontname=None) A wobject representing a string of characters. The text has a", "self._fontname def fset(self, value): if value != self._fontname: self._fontname = value self._Invalidate() #", "wobject and Label wibject. fontname may be 'mono', 'sans', 'serif' or None, in", "it's now # possible to mix bold and italic text, and one can", "from visvis.pypoints import Pointset # from visvis.core.baseTexture import TextureObject from visvis.core.base import Wobject", "as a verb. The vertices1 are corrected for angle and alignment. -> produces", "None) \"\"\" # get figure fig = self.GetFigure() # get vertices if self._vertices1", "halign < 0: anchorx = 0 elif halign > 0: anchorx = w", "'\\x07') # build list of glyphs, take sub/super scripting into account. escape =", "self._halign = -1 self._valign = 0 self._charSpacing = 1 def _Invalidate(self): \"\"\" Invalidate", "return self._halign def fset(self, value): if isinstance(value, int): pass elif isinstance(value, basestring): value", "infoOrigin, infoWidth = info.size, info.origin, info.width # should and can we display in", "return locals() @Property def valign(): \"\"\"Get/Set the vertical alignment. Specify as: * 'up',", "but corrected for angle and alignment # relative position of edges in pixels.", "vv.settings.defaultFontName is used. \"\"\" def __init__(self, parent, text='', x=0, y=0, z=0, fontname=None): Wobject.__init__(self,", "z=0): # Translate if x or y or z: gl.glPushMatrix() gl.glTranslatef(x, y, z)", "sizey represent the size of the glyph. * dy represents the offset in", "= {} def GetFont(self, fontname): \"\"\" GetFont(fontname) Get a font instance. If that", "self._angle > 45: halign, valign = valign, -halign elif self._angle < -45: halign,", "def OnDrawScreen(self): self._DrawText( self._screenx, self._screeny, depthToZ(self._screenz) ) class Label(Box, BaseText): \"\"\" Label(parent, text='',", "unichr(escapes['theta'])) # transform other chars tt = tt.replace(r'\\\\', '\\t') # double backslashes do", "text infoSize, infoOrigin, infoWidth = ( info.size_b, info.origin_b, info.width_b) # Find position in", "correctly self.skewFactor = 0.0 if style.italic: self.skewFactor = 0.5 # calculate width on", "script = {0:'normal', 1:'sub', 2:'super'} \"\"\" def __init__(self, script=0, bold=False, italic=False): self.script =", "self.sizex = self.sizex * smaller self.sizey = self.sizey * smaller self.width = self.width", "255 data2[:,:,1] = data shape = data.shape gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, 2, shape[1],shape[0], 0, #", "self._x def fset(self, value): self._x = value return locals() @PropWithDraw def y(): \"\"\"Get/Set", "= tt.replace('\\\\'+c, unichr(escapes[c])) tt = tt.replace('\\t', r'\\\\') # get italic and bold modifiers", "def __init__(self, font, char, size=12, styles=None): # unwind the style for this glyph", "= 0 elif valign > 0: anchory = h else: anchory = h/2.0", "self.Draw() return locals() @Property def textColor(): \"\"\"Get/Set the color of the text. \"\"\"", "OnDraw(self): # get screen position and store tmp = glu.gluProject(self._x, self._y, self._z) self._screenx,", "raise ValueError('To create a glyph, supply an int or character.') # do we", "= value self._vertices2 = None # force recalculation self.Draw() return locals() @Property def", "MiniStyle(script=0, bold=False, italic=False) Class that represents the style of characters (sub/super script, bold,", "# # Visvis is distributed under the terms of the (new) BSD License.", "float(info.data.shape[1]) self.s1, self.s2 = (x1) / tmp, (x2-1) / tmp y1 = infoOrigin[ac,1]", "alphabet * u00c0 - u037f latin * u0380 - u03ff greek * u2000", "make sure the glyphs are created if self._vertices1 is None or self._texCords is", "Wobject.__init__(self, parent) BaseText.__init__(self, text, fontname) # store coordinates self._x, self._y, self._z = x,", "self._x, self._y, self._z = x, y, z # for internal use self._screenx, self._screeny,", "the textureCords in the font texture can be calculated. Also the relative vertices", "y position of the text. \"\"\" def fget(self): return self._y def fset(self, value):", "we need to know about position changes to update alignment self.eventPosition.Bind(self._PositionText) def OnDraw(self):", "inserted in the same way (By starting the name with an uppercase letter,", "None: return self # set script script = other.script if script == 0:", "style (or not)) g = Glyph(font, c, self._size, styles+[style]) glyphs.append( g ) style", "now # possible to mix bold and italic text, and one can make", "def __add__(self, other): # allow None if other is None: return self #", "makes one or more charactes superscript. * hello_2 or hello_{there}, makes one or", "self.skewFactor = 0.0 if style.italic: self.skewFactor = 0.5 # calculate width on screen,", "infoSize[ac,1] tmp = float(info.data.shape[0]) self.t1, self.t2 = (y1) / tmp, (y2-1) / tmp", "= int(value>0) - int(value<0) if value != self._halign: self._halign = value self._vertices2 =", "produce a single line of text oriented at a certain angle. Formatting ----------", "script = other.script if script == 0: script = self.script # done return", ") class Label(Box, BaseText): \"\"\" Label(parent, text='', fontname=None) A wibject (inherits from box)", "much space there should be before the next char * s1 s2 t1", "the greek alfabet can be inserted in the same way (By starting the", "font was created earlier, that font is returned, otherwise it is created and", "axis vertices[:,0] = vertices[:,0] - anchorx vertices[:,1] = -(vertices[:,1] - anchory) elif isinstance(self,", "the backslash (for example '\\infty'). People familiar with Latex know what they do:", "elif c=='_': style = MiniStyle(1) elif c=='\\x06': style = MiniStyle(0,False,True) elif c=='\\x07': style", "return '<MiniStyle script:%i, bold:%i, italic:%i>' % tmp class BaseText(object): \"\"\" BaseText(text='', fontname=None) Base", "self.edgeWidth = 0 # init position (this is to set the size) self.position", "There are several escape sequences for (mathematical) characters that can be inserted using", "inserted): * alpha beta gamma delta * epsilon zeta eta theta * iota", "(By starting the name with an uppercase letter, the corresponding upper case greek", "= y1 + infoSize[ac,1] tmp = float(info.data.shape[0]) self.t1, self.t2 = (y1) / tmp,", "anchory = y2 else: anchory = y1 + (y2-y1)/2.0 # apply anchor angle", "size=12, styles=None): # unwind the style for this glyph self.style = MiniStyle() if", "shape[1],shape[0], 0, # gl.GL_ALPHA, gl.GL_UNSIGNED_BYTE, data) gl.GL_LUMINANCE_ALPHA, gl.GL_UNSIGNED_BYTE, data2) tmp1 = gl.GL_LINEAR tmp2", "text oriented at a certain angle. Formatting ---------- Text can be formatted using", "the text is recompiled the next time it is drawn. \"\"\" self._texCords =", "* sizex and sizey represent the size of the glyph. * dy represents", "'lambda':0x03BB, 'mu':0x03BC, 'nu':0x03BD, 'xi':0x03BE, 'omicron':0x03BF, 'pi':0x03C0, 'rho':0x03C1, 'varsigma':0x03C2, 'sigma':0x03C3, 'tau':0x03C4, 'upsilon':0x03C5, 'phi':0x03C6, 'chi':0x03C7,", "oriented at a certain angle. Formatting ---------- Text can be formatted using the", "def textSpacing(): \"\"\"Get/Set the spacing between characters. \"\"\" def fget(self): return self._charSpacing def", "= True else: # create glyph (with new style (or not)) g =", "fget(self): return self._y def fset(self, value): self._y = value return locals() @PropWithDraw def", "'Chi':0x03A7, 'Psi':0x03A8, 'Omega':0x03A9, # lower case greek 'alpha':0x03B1, 'beta':0x03B2, 'gamma':0x03B3, 'delta':0x03B4, 'epsilon':0x03B5, 'zeta':0x03B6,", "'Mu':0x039C, 'Nu':0x039D, 'Xi':0x039E, 'Omicron':0x039F, 'Pi':0x03A0, 'Rho':0x03A1, 'Sigma':0x03A3, 'Tau':0x03A4, 'Upsilon':0x03A5, 'Phi':0x03A6, 'Chi':0x03A7, 'Psi':0x03A8, 'Omega':0x03A9,", "anchor in label if isinstance(self, Label): w,h = self.position.size # determine whether the", "<NAME> # # Visvis is distributed under the terms of the (new) BSD", "gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP) class FontManager: \"\"\" FontManager() Manager of fonts. There", "hell\\b{ohoo}, makes one or more charactes bold. * hello\\_there, a backslash escapes, thus", "style.italic and ac in info.charcodes_i: # # italic text # infoSize, infoOrigin, infoWidth", "= tt.replace('\\b', '\\x07') # build list of glyphs, take sub/super scripting into account.", "is called when the first is None) \"\"\" # get figure fig =", "w,h = self.position.size # determine whether the text is vertical or horizontal halign,", "the text to display. \"\"\" def fget(self): return self._text def fset(self, value): if", "data self.SetData(self.info.data) def _UploadTexture(self, data, *args): \"\"\" Overload to make it an alpha", "to produce a single line of text oriented at a certain angle. Formatting", "import visvis from visvis import ssdf from visvis.pypoints import Pointset # from visvis.core.baseTexture", "= g.dy # append texture coordinates texCords.append(g.s1, g.t1) texCords.append(g.s2, g.t1) texCords.append(g.s2, g.t2) texCords.append(g.s1,", "!= self._valign: self._valign = value self._vertices2 = None # force recalculation self.Draw() return", "self._vertices1 = None # the coords in screen coordinates (raw) self._vertices2 = None", "numbers * u0040 - u00bf alphabet * u00c0 - u037f latin * u0380", "# force recalculation self.Draw() return locals() @Property def fontSize(): \"\"\"Get/Set the size of", "escape for c in escapesKeys: tt = tt.replace('\\\\'+c, unichr(escapes[c])) tt = tt.replace('\\t', r'\\\\')", "elif c=='\\\\' and i+1<len(tt) and tt[i+1] in ['_^\\x06\\x07']: escape = True else: #", "!= 0.0: cos_angle = np.cos(angle*np.pi/180.0) sin_angle = np.sin(angle*np.pi/180.0) vertices[:,0], vertices[:,1] = ( vertices[:,0]", "first self._Invalidate() # get font instance from figure f = self.GetFigure() if not", "tt.replace('\\alpha', unichr(escapes['alpha'])) tt = tt.replace('\\beta', unichr(escapes['beta'])) tt = tt.replace('\\rho', unichr(escapes['rho'])) tt = tt.replace('\\theta',", "name with an uppercase letter, the corresponding upper case greek letter is inserted):", "# draw if self.textColor and len(vertices): clr = self.textColor gl.glColor(clr[0], clr[1], clr[2]) gl.glDrawArrays(gl.GL_QUADS,", "value != self._color: self._color = value self.Draw() return locals() @Property def halign(): \"\"\"Get/Set", "alignment in _PositionText(). -> Produces _vertices1 (and is called when that is None)", "texture coordinates texCords.append(g.s1, g.t1) texCords.append(g.s2, g.t1) texCords.append(g.s2, g.t2) texCords.append(g.s1, g.t2) # set skewing", "(and is called when the first is None) \"\"\" # get figure fig", "u2000 - u23ff symbols There are several escape sequences for (mathematical) characters that", "g.t1) texCords.append(g.s2, g.t1) texCords.append(g.s2, g.t2) texCords.append(g.s1, g.t2) # set skewing for position skew", "len(vertices): x1, x2 = vertices[:,0].min(), vertices[:,0].max() else: x1, x2 = 0,0 y1, y2", "0: anchory = h else: anchory = h/2.0 # set anchor x if", "value = getColor(value,'setting textColor') if value != self._color: self._color = value self.Draw() return", "self._deltay = vertices[:,1].min(), vertices[:,1].max() def _DrawText(self, x=0, y=0, z=0): # Translate if x", "or bold? # Note: italic is now realized by printing it skewed rather", "ValueError('Invalid font name.') # more properties self._size = 9 self._fontname = fontname self._color", "value self._Invalidate() # force recalculation self.Draw() return locals() @Property def textColor(): \"\"\"Get/Set the", "script, bold, and italic. Used when compiling the text. script = {0:'normal', 1:'sub',", "infty * int iint iiint forall * leq geq approx approxeq ne in", "'\\x06') # just use some char that is no string tt = tt.replace('\\b',", "BaseText(object): \"\"\" BaseText(text='', fontname=None) Base object for the Text wobject and Label wibject.", "text. \"\"\" def fget(self): return self._color def fset(self, value): value = getColor(value,'setting textColor')", "if style.italic and ac in info.charcodes_i: # # italic text # infoSize, infoOrigin,", "[] self._xglyph = Glyph(font, 'X', self._size) tt = self._text # transform greek characters", "os import numpy as np import visvis from visvis import ssdf from visvis.pypoints", "oslash Letters from the greek alfabet can be inserted in the same way", "force recalculation self.Draw() return locals() @Property def valign(): \"\"\"Get/Set the vertical alignment. Specify", "= tuple(tmp) # make integer (to prevent glitchy behaviour), but not z! self._screenx", "in the Font object. * sizex and sizey represent the size of the", "= value self.Draw() return locals() @Property def halign(): \"\"\"Get/Set the horizontal alignment. Specify", "fset(self, value): if value != self._charSpacing: self._charSpacing = value self._Invalidate() # force recalculation", "\"Warning: Cannot draw character %i! \" % ord(char) ac = 32 # make", "init position (this is to set the size) self.position = 10,10,100,16 # we", "the y position of the text. \"\"\" def fget(self): return self._y def fset(self,", "None # the coords in screen coordinates (raw) self._vertices2 = None # dito,", "basestring): ac = ord(char) elif isinstance(char, int): ac = char else: raise ValueError('To", "italic and bold modifiers tt = tt.replace('\\i', '\\x06') # just use some char", "= MiniStyle(0,False,True) elif c=='\\x07': style = MiniStyle(0,True,False) elif c=='\\\\' and i+1<len(tt) and tt[i+1]", "sigma tau * upsilon phi chi psi * omega Note: In case one", "text in visvis. Defines a wibject and a wobject: Label and Text, which", "Append style to the list if style: styles.append(style) style = None elif c=='}':", "self._charSpacing # store self._texCords = texCords self._vertices1 = vertices def _PositionText(self, event=None): \"\"\"", "x2 = 0,0 y1, y2 = 0, self._xglyph.sizey # set anchor if self._halign", "'ominus':0x2296, 'otimes':0x2297, 'oslash':0x2298, } # sort the keys, such that longer names are", "or self._angle < -135: halign, valign = -halign, valign elif self._angle > 45:", "position of the text. \"\"\" def fget(self): return self._x def fset(self, value): self._x", "= valign, halign # set anchor y if valign < 0: anchory =", "def fget(self): return self._z def fset(self, value): self._z = value return locals() def", "terms of the (new) BSD License. # The full license can be found", "'beta':0x03B2, 'gamma':0x03B3, 'delta':0x03B4, 'epsilon':0x03B5, 'zeta':0x03B6, 'eta':0x03B7, 'theta':0x03B8, 'iota':0x03B9, 'kappa':0x03BA, 'lambda':0x03BB, 'mu':0x03BC, 'nu':0x03BD, 'xi':0x03BE,", "created and stored for reuse. \"\"\" if fontname in self.fonts: return self.fonts[fontname] elif", "store self._texCords = texCords self._vertices1 = vertices def _PositionText(self, event=None): \"\"\" The name", "if isinstance(self, Text): # Text is a wobject, so must be flipped on", "for angle and alignment. -> produces _vertices2 from _vertices1 (and is called when", "= [] style = None # Style to set for i in range(len(tt)):", "'psi':0x03C8, 'omega':0x03C9, # some math 'Re':0x211c, 'Im':0x2111, 'null':0x2300, 'infty':0x221e, 'int':0x222b, 'iint':0x222c, 'iiint':0x222d, 'forall':0x2200,", "or more charactes italic. * hell\\bo or hell\\b{ohoo}, makes one or more charactes", "self.s2 = (x1) / tmp, (x2-1) / tmp y1 = infoOrigin[ac,1] y2 =", "be flipped on y axis vertices[:,0] = vertices[:,0] - anchorx vertices[:,1] = -(vertices[:,1]", "the list if style: styles.append(style) style = None elif c=='}': # Remove style", "font instance. If that font was created earlier, that font is returned, otherwise", "c, self._size, styles+[style]) glyphs.append( g ) style = None # build arrays with", "object. * sizex and sizey represent the size of the glyph. * dy", "approx approxeq ne in * leftarrow uparrow rightarrow downarrow * Leftarrow Uparrow Rightarrow", "value): if isinstance(value, int): pass elif isinstance(value, basestring): value = value.lower() tmp =", "up if x or y or z: gl.glPopMatrix() font.Disable() gl.glDisableClientState(gl.GL_VERTEX_ARRAY) gl.glDisableClientState(gl.GL_TEXTURE_COORD_ARRAY) class Text(Wobject,", "thus keeping the _^ or \\ after it. Special characters ------------------ Characters are", "'Nu':0x039D, 'Xi':0x039E, 'Omicron':0x039F, 'Pi':0x03A0, 'Rho':0x03A1, 'Sigma':0x03A3, 'Tau':0x03A4, 'Upsilon':0x03A5, 'Phi':0x03A6, 'Chi':0x03A7, 'Psi':0x03A8, 'Omega':0x03A9, #", "* int iint iiint forall * leq geq approx approxeq ne in *", "factor # is spacing? smaller = 0.6 self.dy = 0.0 # normal script", "force recalculation self.Draw() return locals() @Property def fontSize(): \"\"\"Get/Set the size of the", "not None and len(vertices): self._deltax = vertices[:,0].min(), vertices[:,0].max() self._deltay = vertices[:,1].min(), vertices[:,1].max() def", "fonts self.fonts = {} def GetFont(self, fontname): \"\"\" GetFont(fontname) Get a font instance.", "None # Style to set for i in range(len(tt)): c = tt[i] if", "# Copyright (C) 2012, <NAME> # # Visvis is distributed under the terms", "one can make any supported # unicode character italic. # if style.italic and", "vertices[:,1] + anchory # store self._vertices2 = vertices # calculate edges (used by", "the size factor = size / float(info.fontsize) self.sizex = infoSize[ac,0] * factor self.sizey", "wibject and a wobject: Label and Text, which are both able to produce", "more charactes superscript. * hello_2 or hello_{there}, makes one or more charactes subscript.", "name. \"\"\" def fget(self): return self._fontname def fset(self, value): if value != self._fontname:", "list of fonts self.fonts = {} def GetFont(self, fontname): \"\"\" GetFont(fontname) Get a", "data array def __init__(self, font, char, size=12, styles=None): # unwind the style for", "elif self._halign > 0: anchorx = x2 else: anchorx = x1 + (x2-x1)/2.0", "to texture coordinates x1 = infoOrigin[ac,0] x2 = x1 + infoSize[ac,0] tmp =", "list of glyphs, take sub/super scripting into account. escape = False styles =", "+ g.sizex y2 = g.sizey #y2 = y1 - g.sizey dy = g.dy", "BaseText(text='', fontname=None) Base object for the Text wobject and Label wibject. fontname may", "'license.txt'. \"\"\" Module textRender For rendering text in visvis. Defines a wibject and", "text(): \"\"\"Get/Set the text to display. \"\"\" def fget(self): return self._text def fset(self,", "elif c=='\\x06': style = MiniStyle(0,False,True) elif c=='\\x07': style = MiniStyle(0,True,False) elif c=='\\\\' and", "0: anchory = y1 elif self._valign > 0: anchory = y2 else: anchory", "\"\"\"Get/Set the font type by its name. \"\"\" def fget(self): return self._fontname def", "gl.GL_TEXTURE_MIN_FILTER, tmp2) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP) class FontManager: \"\"\" FontManager() Manager", "return locals() @Property def halign(): \"\"\"Get/Set the horizontal alignment. Specify as: * 'left',", "def fget(self): return self._charSpacing def fset(self, value): if value != self._charSpacing: self._charSpacing =", "infoOrigin[ac,0] x2 = x1 + infoSize[ac,0] tmp = float(info.data.shape[1]) self.s1, self.s2 = (x1)", "event=None): \"\"\" The name is ment as a verb. The vertices1 are corrected", "Used when compiling the text. script = {0:'normal', 1:'sub', 2:'super'} \"\"\" def __init__(self,", "self._vertices2 is None: self._PositionText() # get font instance from figure fig = self.GetFigure()", "0 self._charSpacing = 1 def _Invalidate(self): \"\"\" Invalidate this object, such that the", "Characters are available for the following unicode sets: * u0020 - u003f numbers", "# allow None if other is None: return self # set script script", "def fontName(): \"\"\"Get/Set the font type by its name. \"\"\" def fget(self): return", "value): value = getColor(value,'setting textColor') if value != self._color: self._color = value self.Draw()", "elif halign > 0: anchorx = w else: anchorx = w/2.0 # apply", "z) vertices.append(x2+skew, y1+dy, z) vertices.append(x2, y2+dy, z) vertices.append(x1, y2+dy, z) # prepare for", "\"\"\" Glyph(font, char, size=12, styles=None) A glyph is a character. It is visualized", "halign, valign = valign, halign # set anchor y if valign < 0:", "-*- coding: utf-8 -*- # Copyright (C) 2012, <NAME> # # Visvis is", "self._y, self._z) self._screenx, self._screeny, self._screenz = tuple(tmp) # make integer (to prevent glitchy", "bold text infoSize, infoOrigin, infoWidth = ( info.size_b, info.origin_b, info.width_b) # Find position", "new style (or not)) g = Glyph(font, c, self._size, styles+[style]) glyphs.append( g )", "one or more charactes subscript. * hell\\io or hell\\i{ohoo}, makes one or more", "be an int or string.') value = int(value>0) - int(value<0) if value !=", "size) self.position = 10,10,100,16 # we need to know about position changes to", "= None # force recalculation self.Draw() return locals() @Property def valign(): \"\"\"Get/Set the", "def fset(self, value): if value != self._size: self._size = value self._Invalidate() # force", "tt = tt.replace('\\t', r'\\\\') # get italic and bold modifiers tt = tt.replace('\\i',", "the vv.settings.defaultFontName is used. \"\"\" def __init__(self, text='', fontname=None): # init drawing data", "the font texture can be calculated. Also the relative vertices are calculated, which", "if not fig: return font = fig._fontManager.GetFont(self._fontname) # enable texture font.Enable() # prepare", "* smaller#- self.sizex * (1.0-smaller) class MiniStyle: \"\"\" MiniStyle(script=0, bold=False, italic=False) Class that", "certain angle. Formatting ---------- Text can be formatted using the following constructs (which", "self._angle def fset(self, value): if value != self._angle: self._angle = value self._vertices2 =", "texture font.Enable() # prepare texCords = self._texCords#.copy() vertices = self._vertices2#.copy() # init vertex", "x1 + infoSize[ac,0] tmp = float(info.data.shape[1]) self.s1, self.s2 = (x1) / tmp, (x2-1)", "geq approx approxeq ne in * leftarrow uparrow rightarrow downarrow * Leftarrow Uparrow", "internal use self._screenx, self._screeny, self._screenz = 0, 0, 0 @PropWithDraw def x(): \"\"\"Get/Set", "fset(self, value): self._z = value return locals() def OnDraw(self): # get screen position", "text='', x=0, y=0, z=0, fontname=None) A wobject representing a string of characters. The", "\" % ord(char) ac = 32 # make space # default infoSize, infoOrigin,", "greek 'alpha':0x03B1, 'beta':0x03B2, 'gamma':0x03B3, 'delta':0x03B4, 'epsilon':0x03B5, 'zeta':0x03B6, 'eta':0x03B7, 'theta':0x03B8, 'iota':0x03B9, 'kappa':0x03BA, 'lambda':0x03BB, 'mu':0x03BC,", "that is not in this list, one can always look up its unicode", "return self._valign def fset(self, value): if isinstance(value, int): pass elif isinstance(value, basestring): value", "its name. \"\"\" def fget(self): return self._fontname def fset(self, value): if value !=", "fontname = fontname.lower() if fontname not in ['mono', 'sans', 'serif']: raise ValueError('Invalid font", "Re Im null infty * int iint iiint forall * leq geq approx", "earlier, that font is returned, otherwise it is created and stored for reuse.", "infoSize, infoOrigin, infoWidth = ( # info.size_i, info.origin_i, info.width_i) if style.bold and ac", "Font(info) A Font object holds the texture that contains all the characters. \"\"\"", "style.script == 1: # sub script self.dy = (1-smaller) * self.sizey if style.script:", "!= self._charSpacing: self._charSpacing = value self._Invalidate() # force recalculation self.Draw() return locals() @Property", "#y2 = y1 - g.sizey dy = g.dy # append texture coordinates texCords.append(g.s1,", "_vertices2 from _vertices1 (and is called when the first is None) \"\"\" #", "is used. \"\"\" def __init__(self, parent, text='', x=0, y=0, z=0, fontname=None): Wobject.__init__(self, parent)", "same way (By starting the name with an uppercase letter, the corresponding upper", "def fget(self): return self._y def fset(self, value): self._y = value return locals() @PropWithDraw", "( # info.size_i, info.origin_i, info.width_i) if style.bold and ac in info.charcodes_b: # bold", "sin_angle + vertices[:,1] * cos_angle) # Move anchor in label if isinstance(self, Label):", "# append texture coordinates texCords.append(g.s1, g.t1) texCords.append(g.s2, g.t1) texCords.append(g.s2, g.t2) texCords.append(g.s1, g.t2) #", "+ (y2-y1)/2.0 # apply anchor angle = self._angle if isinstance(self, Text): # Text", "mixed): * hello^2 or hello^{there}, makes one or more charactes superscript. * hello_2", "u23ff symbols There are several escape sequences for (mathematical) characters that can be", "eta theta * iota kappa lambda mu * nu xi omicron pi *", "coordinates texCords.append(g.s1, g.t1) texCords.append(g.s2, g.t1) texCords.append(g.s2, g.t2) texCords.append(g.s1, g.t2) # set skewing for", "+ anchorx vertices[:,1] = vertices[:,1] + anchory # store self._vertices2 = vertices #", "y1 elif self._valign > 0: anchory = y2 else: anchory = y1 +", "vv.settings.defaultFontName is used. \"\"\" def __init__(self, text='', fontname=None): # init drawing data self._texCords", "tmp = {'left':-1,'center':0,'centre':0,'right':1 } if not value in tmp: raise ValueError('Invalid value for", "self._halign, self._valign if self._angle > 135 or self._angle < -135: halign, valign =", "'center', 'down' * 'top', 'center', 'bottom' * -1, 0, 1 \"\"\" def fget(self):", "to mix bold and italic text, and one can make any supported #", "locals() @Property def valign(): \"\"\"Get/Set the vertical alignment. Specify as: * 'up', 'center',", "value self._vertices2 = None # force recalculation self.Draw() return locals() def _Compile(self): \"\"\"", "the text. \"\"\" def fget(self): return self._size def fset(self, value): if value !=", "# default infoSize, infoOrigin, infoWidth = info.size, info.origin, info.width # should and can", "supply an int or character.') # do we have that char? if ac", "-1, 0, 1 \"\"\" def fget(self): return self._halign def fset(self, value): if isinstance(value,", "iint iiint forall * leq geq approx approxeq ne in * leftarrow uparrow", "apply angle if angle != 0.0: cos_angle = np.cos(angle*np.pi/180.0) sin_angle = np.sin(angle*np.pi/180.0) vertices[:,0],", "beta gamma delta * epsilon zeta eta theta * iota kappa lambda mu", "the _^ or \\ after it. Special characters ------------------ Characters are available for", "is None) \"\"\" # make invalid first self._Invalidate() # get font instance from", "glyphs.append( g ) escape = False elif c=='{': # Append style to the", "representing a string of characters. The text has a certain position in the", "a backslash escapes, thus keeping the _^ or \\ after it. Special characters", "None self._vertices2 = None @Property # Smart draw def text(): \"\"\"Get/Set the text", "angle = -self._angle vertices[:,0] = vertices[:,0] - anchorx vertices[:,1] = vertices[:,1] - anchory", "# is spacing? smaller = 0.6 self.dy = 0.0 # normal script if", "'Alpha':0x0391, 'Beta':0x0392, 'Gamma':0x0393, 'Delta':0x0394, 'Epsilon':0x0395, 'Zeta':0x0396, 'Eta':0x0397, 'Theta':0x0398, 'Iota':0x0399, 'Kappa':0x039A, 'Lambda':0x039B, 'Mu':0x039C, 'Nu':0x039D,", "return self # set script script = other.script if script == 0: script", "text inside. The fontname can be 'mono', 'sans' or 'serif'. If not given,", "italic. # if style.italic and ac in info.charcodes_i: # # italic text #", "the texture stored in the Font object. * sizex and sizey represent the", "return locals() def OnDraw(self): # get screen position and store tmp = glu.gluProject(self._x,", "= tmp[value.lower()] else: raise ValueError('halign must be an int or string.') value =", "ssdf from visvis.pypoints import Pointset # from visvis.core.baseTexture import TextureObject from visvis.core.base import", "recalculation self.Draw() return locals() def _Compile(self): \"\"\" Create a series of glyphs from", "{ # upper case greek 'Alpha':0x0391, 'Beta':0x0392, 'Gamma':0x0393, 'Delta':0x0394, 'Epsilon':0x0395, 'Zeta':0x0396, 'Eta':0x0397, 'Theta':0x0398,", "is None: self._Compile() if self._vertices2 is None: self._PositionText() # get font instance from", "with an uppercase letter, the corresponding upper case greek letter is inserted): *", "['mono', 'sans', 'serif']: raise ValueError('Invalid font name.') # more properties self._size = 9", "locals() def OnDraw(self): # get screen position and store tmp = glu.gluProject(self._x, self._y,", "smaller = 0.6 self.dy = 0.0 # normal script if style.script == 1:", "license can be found in 'license.txt'. \"\"\" Module textRender For rendering text in", "\"\"\" def fget(self): return self._fontname def fset(self, value): if value != self._fontname: self._fontname", "ment as a verb. The vertices1 are corrected for angle and alignment. ->", "return self._y def fset(self, value): self._y = value return locals() @PropWithDraw def z():", "gl.glPushMatrix() gl.glTranslatef(x, y, z) # make sure the glyphs are created if self._vertices1", "in the data array def __init__(self, font, char, size=12, styles=None): # unwind the", "= tt.replace(r'\\\\', '\\t') # double backslashes do not escape for c in escapesKeys:", "and italic text, and one can make any supported # unicode character italic.", "self._valign = 0 self._charSpacing = 1 def _Invalidate(self): \"\"\" Invalidate this object, such", "= vertices[:,0].min(), vertices[:,0].max() self._deltay = vertices[:,1].min(), vertices[:,1].max() def _DrawText(self, x=0, y=0, z=0): #", "self.s = ssdf.load(os.path.join(path, 'fonts.ssdf')) # list of fonts self.fonts = {} def GetFont(self,", "style: styles.append(style) style = None elif c=='}': # Remove style if styles: styles.pop()", "of the text. \"\"\" def fget(self): return self._y def fset(self, value): self._y =", "0: anchorx = x1 elif self._halign > 0: anchorx = x2 else: anchorx", "self._screenx, self._screeny, depthToZ(self._screenz) ) class Label(Box, BaseText): \"\"\" Label(parent, text='', fontname=None) A wibject", "tmp2) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP) class FontManager: \"\"\" FontManager() Manager of", "and check fontname if fontname is None: fontname = visvis.settings.defaultFontName fontname = fontname.lower()", "None: fontname = visvis.settings.defaultFontName fontname = fontname.lower() if fontname not in ['mono', 'sans',", "raise ValueError('valign must be an int or string.') value = int(value>0) - int(value<0)", "self._valign if self._angle > 135 or self._angle < -135: halign, valign = -halign,", "or 'serif'. If not given, the vv.settings.defaultFontName is used. \"\"\" def __init__(self, parent,", "from visvis.core.cameras import depthToZ from visvis.core.baseWibjects import Box escapes = { # upper", "not given, the vv.settings.defaultFontName is used. \"\"\" def __init__(self, parent, text='', x=0, y=0,", "escape = False styles = [] style = None # Style to set", "in self.fonts: return self.fonts[fontname] elif hasattr(self.s, fontname): tmp = Font(self.s[fontname]) self.fonts[fontname] = tmp", "@Property def fontSize(): \"\"\"Get/Set the size of the text. \"\"\" def fget(self): return", "determine whether the text is vertical or horizontal halign, valign = self._halign, self._valign", "times cdot pm * oplus ominus otimes oslash Letters from the greek alfabet", "style for this glyph self.style = MiniStyle() if styles: for style in styles:", "(x2-1) / tmp y1 = infoOrigin[ac,1] y2 = y1 + infoSize[ac,1] tmp =", "= Glyph(font, 'X', self._size) tt = self._text # transform greek characters that were", "infoWidth = ( # info.size_i, info.origin_i, info.width_i) if style.bold and ac in info.charcodes_b:", "# if self._valign < 0: anchory = y1 elif self._valign > 0: anchory", "from visvis.core.baseWibjects import Box escapes = { # upper case greek 'Alpha':0x0391, 'Beta':0x0392,", "\"\"\" Overload to make it an alpha map. \"\"\" # Add lumincance channel", "texCords.append(g.s2, g.t2) texCords.append(g.s1, g.t2) # set skewing for position skew = self._size *", "unicode character italic. # if style.italic and ac in info.charcodes_i: # # italic", "self._text = value self._Invalidate() # force recalculation self.Draw() return locals() @Property # Smart", "= None elif c=='}': # Remove style if styles: styles.pop() elif c=='^': style", "# store self._texCords = texCords self._vertices1 = vertices def _PositionText(self, event=None): \"\"\" The", "when compiling the text. script = {0:'normal', 1:'sub', 2:'super'} \"\"\" def __init__(self, script=0,", "Text is a wobject, so must be flipped on y axis vertices[:,0] =", "any supported # unicode character italic. # if style.italic and ac in info.charcodes_i:", "infoWidth = info.size, info.origin, info.width # should and can we display in italic", "elif isinstance(char, int): ac = char else: raise ValueError('To create a glyph, supply", "% ord(char) ac = 32 # make space # default infoSize, infoOrigin, infoWidth", "reuse. \"\"\" if fontname in self.fonts: return self.fonts[fontname] elif hasattr(self.s, fontname): tmp =", "self._size, styles) glyphs.append( g ) escape = False elif c=='{': # Append style", "escape = True else: # create glyph (with new style (or not)) g", "OnDrawScreen(self): self._DrawText( self._screenx, self._screeny, depthToZ(self._screenz) ) class Label(Box, BaseText): \"\"\" Label(parent, text='', fontname=None)", "an array of origin 's # - an array of size's # -", "Label(parent, text='', fontname=None) A wibject (inherits from box) with text inside. The fontname", "self.skewFactor = 0.5 # calculate width on screen, given the size factor =", "can be found in 'license.txt'. \"\"\" Module textRender For rendering text in visvis.", "self._halign: self._halign = value self._vertices2 = None # force recalculation self.Draw() return locals()", "basestring): value = value.lower() tmp={'up':-1,'top':-1,'center':0,'centre':0,'down':1,'bottom':1} if not value in tmp: raise ValueError('Invalid value", "data shape = data.shape gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, 2, shape[1],shape[0], 0, # gl.GL_ALPHA, gl.GL_UNSIGNED_BYTE, data)", "which are then corrected for angle and alignment in _PositionText(). -> Produces _vertices1", "is None: self._PositionText() # get font instance from figure fig = self.GetFigure() if", "0, self._xglyph.sizey # set anchor if self._halign < 0: anchorx = x1 elif", "= y1 - g.sizey dy = g.dy # append texture coordinates texCords.append(g.s1, g.t1)", "= visvis.settings.defaultFontName fontname = fontname.lower() if fontname not in ['mono', 'sans', 'serif']: raise", "fget(self): return self._fontname def fset(self, value): if value != self._fontname: self._fontname = value", "or string.') value = int(value>0) - int(value<0) if value != self._valign: self._valign =", "'null':0x2300, 'infty':0x221e, 'int':0x222b, 'iint':0x222c, 'iiint':0x222d, 'forall':0x2200, 'leq':0x22dc, 'geq':0x22dd, 'approx':0x2248, 'approxeq':0x2243, 'ne':0x2260, 'in':0x22f9, 'leftarrow':0x2190,'uparrow':0x2191,'rightarrow':0x2192,'downarrow':0x2193,", "(x1) / tmp, (x2-1) / tmp y1 = infoOrigin[ac,1] y2 = y1 +", "color of the text. \"\"\" def fget(self): return self._color def fset(self, value): value", "in ['_^\\x06\\x07']: escape = True else: # create glyph (with new style (or", "value and use that instead. \"\"\" import OpenGL.GL as gl import OpenGL.GLU as", "# Visvis is distributed under the terms of the (new) BSD License. #", "if script == 0: script = self.script # done return MiniStyle( script, self.bold", "if x or y or z: gl.glPopMatrix() font.Disable() gl.glDisableClientState(gl.GL_VERTEX_ARRAY) gl.glDisableClientState(gl.GL_TEXTURE_COORD_ARRAY) class Text(Wobject, BaseText):", "if value != self._size: self._size = value self._Invalidate() # force recalculation self.Draw() return", "alignment self.eventPosition.Bind(self._PositionText) def OnDraw(self): # Draw the box Box.OnDraw(self) # Draw the text", "'serif']: raise ValueError('Invalid font name.') # more properties self._size = 9 self._fontname =", "and texture array gl.glEnableClientState(gl.GL_VERTEX_ARRAY) gl.glEnableClientState(gl.GL_TEXTURE_COORD_ARRAY) gl.glVertexPointerf(vertices.data) gl.glTexCoordPointerf(texCords.data) # draw if self.textColor and len(vertices):", "skewing for position skew = self._size * g.skewFactor # append vertices vertices.append(x1+skew, y1+dy,", "self._vertices1.copy() # scale text according to global text size property vertices *= fig._relativeFontSize", "self._texCords = texCords self._vertices1 = vertices def _PositionText(self, event=None): \"\"\" The name is", "the text. script = {0:'normal', 1:'sub', 2:'super'} \"\"\" def __init__(self, script=0, bold=False, italic=False):", "drawing data self._texCords = None # coords in the font texture self._vertices1 =", "display. \"\"\" def fget(self): return self._text def fset(self, value): if value != self._text:", "* hell\\io or hell\\i{ohoo}, makes one or more charactes italic. * hell\\bo or", "value): if value != self._angle: self._angle = value self._vertices2 = None # force", "fset(self, value): if value != self._angle: self._angle = value self._vertices2 = None #", "self._angle = value self._vertices2 = None # force recalculation self.Draw() return locals() @Property", "in italic or bold? # Note: italic is now realized by printing it", "and use that instead. \"\"\" import OpenGL.GL as gl import OpenGL.GLU as glu", "from visvis.core.misc import getResourceDir, getColor # from visvis.core.cameras import depthToZ from visvis.core.baseWibjects import", "or more charactes superscript. * hello_2 or hello_{there}, makes one or more charactes", "in glyphs: x2 = x1 + g.sizex y2 = g.sizey #y2 = y1", "the characters. \"\"\" def __init__(self, info): TextureObject.__init__(self, 2) # store font information self.info", "ne in * leftarrow uparrow rightarrow downarrow * Leftarrow Uparrow Rightarrow Downarrow *", "0.6 self.dy = 0.0 # normal script if style.script == 1: # sub", "self._Compile() if self._vertices2 is None: self._PositionText() # get font instance from figure fig", "textureCords in the font texture can be calculated. Also the relative vertices are", "disable texture and clean up if x or y or z: gl.glPopMatrix() font.Disable()", "\"\"\"Get/Set the vertical alignment. Specify as: * 'up', 'center', 'down' * 'top', 'center',", "* smaller self.sizey = self.sizey * smaller self.width = self.width * smaller#- self.sizex", "and can we display in italic or bold? # Note: italic is now", "and Label wibject. fontname may be 'mono', 'sans', 'serif' or None, in which", "all the characters. \"\"\" def __init__(self, info): TextureObject.__init__(self, 2) # store font information", "null infty * int iint iiint forall * leq geq approx approxeq ne", "\"\"\" def __init__(self, info): TextureObject.__init__(self, 2) # store font information self.info = info", "first is None) \"\"\" # get figure fig = self.GetFigure() # get vertices", "isinstance(self, Label): angle = -self._angle vertices[:,0] = vertices[:,0] - anchorx vertices[:,1] = vertices[:,1]", "texCords = self._texCords#.copy() vertices = self._vertices2#.copy() # init vertex and texture array gl.glEnableClientState(gl.GL_VERTEX_ARRAY)", "vertices.append(x1, y2+dy, z) # prepare for next glyph x1 = x1 + g.width", ") escape = False elif c=='{': # Append style to the list if", "wobject, so must be flipped on y axis vertices[:,0] = vertices[:,0] - anchorx", "sub script self.dy = (1-smaller) * self.sizey if style.script: # super or subscript", "'nu':0x03BD, 'xi':0x03BE, 'omicron':0x03BF, 'pi':0x03C0, 'rho':0x03C1, 'varsigma':0x03C2, 'sigma':0x03C3, 'tau':0x03C4, 'upsilon':0x03C5, 'phi':0x03C6, 'chi':0x03C7, 'psi':0x03C8, 'omega':0x03C9,", "import os import numpy as np import visvis from visvis import ssdf from", "leftfloor rightfloor * times cdot pm * oplus ominus otimes oslash Letters from", "bold self.italic = italic def __add__(self, other): # allow None if other is", "next glyph x1 = x1 + g.width + self._charSpacing # store self._texCords =", "x2 = vertices[:,0].min(), vertices[:,0].max() else: x1, x2 = 0,0 y1, y2 = 0,", "return self._charSpacing def fset(self, value): if value != self._charSpacing: self._charSpacing = value self._Invalidate()", "size of the text. \"\"\" def fget(self): return self._size def fset(self, value): if", "self._deltay = 0,0 # store text self._text = text # Set and check", "it. Special characters ------------------ Characters are available for the following unicode sets: *", "from the texture stored in the Font object. * sizex and sizey represent", "\"\"\" def fget(self): return self._color def fset(self, value): value = getColor(value,'setting textColor') if", "r'\\\\') # get italic and bold modifiers tt = tt.replace('\\i', '\\x06') # just", "y1 - g.sizey dy = g.dy # append texture coordinates texCords.append(g.s1, g.t1) texCords.append(g.s2,", "bold=False, italic=False): self.script = script self.bold = bold self.italic = italic def __add__(self,", "tmp1) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, tmp2) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP) class FontManager: \"\"\"", "factor self.width = float(infoWidth[ac]) * factor # is spacing? smaller = 0.6 self.dy", "update alignment self.eventPosition.Bind(self._PositionText) def OnDraw(self): # Draw the box Box.OnDraw(self) # Draw the", "lambda x,y:len(y)-len(x)) class Font(TextureObject): \"\"\" Font(info) A Font object holds the texture that", "fontname if fontname is None: fontname = visvis.settings.defaultFontName fontname = fontname.lower() if fontname", "iota kappa lambda mu * nu xi omicron pi * rho varsigma sigma", "'in':0x22f9, 'leftarrow':0x2190,'uparrow':0x2191,'rightarrow':0x2192,'downarrow':0x2193, 'Leftarrow':0x21D0,'Uparrow':0x21D1,'Rightarrow':0x21D2,'Downarrow':0x21D3, 'leftceil':0x2308,'rightceil':0x2309,'leftfloor':0x230A,'rightfloor':0x230B, 'times':0x2217, 'cdot':0x2219, 'pm':0x00b1, 'oplus':0x2295, 'ominus':0x2296, 'otimes':0x2297, 'oslash':0x2298, } #", "'Theta':0x0398, 'Iota':0x0399, 'Kappa':0x039A, 'Lambda':0x039B, 'Mu':0x039C, 'Nu':0x039D, 'Xi':0x039E, 'Omicron':0x039F, 'Pi':0x03A0, 'Rho':0x03A1, 'Sigma':0x03A3, 'Tau':0x03A4, 'Upsilon':0x03A5,", "_vertices1 (and is called when that is None) \"\"\" # make invalid first", "between characters. \"\"\" def fget(self): return self._charSpacing def fset(self, value): if value !=", "textSpacing(): \"\"\"Get/Set the spacing between characters. \"\"\" def fget(self): return self._charSpacing def fset(self,", "ValueError('Invalid value for valign.') value = tmp[value.lower()] else: raise ValueError('valign must be an", "gl.GL_CLAMP) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP) class FontManager: \"\"\" FontManager() Manager of fonts. There should", "g.t1) texCords.append(g.s2, g.t2) texCords.append(g.s1, g.t2) # set skewing for position skew = self._size", "visvis.settings.defaultFontName fontname = fontname.lower() if fontname not in ['mono', 'sans', 'serif']: raise ValueError('Invalid", "specifies how much space there should be before the next char * s1", "= None # Style to set for i in range(len(tt)): c = tt[i]", "valign elif self._angle > 45: halign, valign = valign, -halign elif self._angle <", "# should and can we display in italic or bold? # Note: italic", "for each figure/context. \"\"\" def __init__(self): # load font data path = getResourceDir()", "self._fontname = fontname self._color = (0,0,0) self._angle = 0 self._halign = -1 self._valign", "force recalculation self.Draw() return locals() @Property # Smart draw def textAngle(): \"\"\"Get/Set the", "'omega':0x03C9, # some math 'Re':0x211c, 'Im':0x2111, 'null':0x2300, 'infty':0x221e, 'int':0x222b, 'iint':0x222c, 'iiint':0x222d, 'forall':0x2200, 'leq':0x22dc,", "and ac in info.charcodes_i: # # italic text # infoSize, infoOrigin, infoWidth =", "glyphs: x2 = x1 + g.sizex y2 = g.sizey #y2 = y1 -", "* -1, 0, 1 \"\"\" def fget(self): return self._halign def fset(self, value): if", "that is no string tt = tt.replace('\\b', '\\x07') # build list of glyphs,", "# apply vertices[:,0] = vertices[:,0] + anchorx vertices[:,1] = vertices[:,1] + anchory #", "+= style style = self.style # store font self.font = font info =", "- an array of origin 's # - an array of size's #", "self.GetFigure() # get vertices if self._vertices1 is None: return vertices = self._vertices1.copy() #", "= italic def __add__(self, other): # allow None if other is None: return", "other.script if script == 0: script = self.script # done return MiniStyle( script,", "= -1 self._valign = 0 self._charSpacing = 1 def _Invalidate(self): \"\"\" Invalidate this", "\"\"\" def __init__(self, parent, text='', x=0, y=0, z=0, fontname=None): Wobject.__init__(self, parent) BaseText.__init__(self, text,", "'iota':0x03B9, 'kappa':0x03BA, 'lambda':0x03BB, 'mu':0x03BC, 'nu':0x03BD, 'xi':0x03BE, 'omicron':0x03BF, 'pi':0x03C0, 'rho':0x03C1, 'varsigma':0x03C2, 'sigma':0x03C3, 'tau':0x03C4, 'upsilon':0x03C5,", "anchor angle = self._angle if isinstance(self, Text): # Text is a wobject, so", "for (mathematical) characters that can be inserted using the backslash (for example '\\infty').", "w else: anchorx = w/2.0 # apply vertices[:,0] = vertices[:,0] + anchorx vertices[:,1]", "Glyph(object): \"\"\" Glyph(font, char, size=12, styles=None) A glyph is a character. It is", "# Text is a wobject, so must be flipped on y axis vertices[:,0]", "greek * u2000 - u23ff symbols There are several escape sequences for (mathematical)", "artifacts from neighbouring characters. Additionally, it's now # possible to mix bold and", "# set anchor y if valign < 0: anchory = 0 elif valign", "if style: styles.append(style) style = None elif c=='}': # Remove style if styles:", "x1, x2 = vertices[:,0].min(), vertices[:,0].max() else: x1, x2 = 0,0 y1, y2 =", "smaller self.width = self.width * smaller#- self.sizex * (1.0-smaller) class MiniStyle: \"\"\" MiniStyle(script=0,", "self._vertices1 is None or self._texCords is None: self._Compile() if self._vertices2 is None: self._PositionText()", "self._text: self._text = value self._Invalidate() # force recalculation self.Draw() return locals() @Property #", "each figure/context. \"\"\" def __init__(self): # load font data path = getResourceDir() self.s", "rightarrow downarrow * Leftarrow Uparrow Rightarrow Downarrow * leftceil rightceil leftfloor rightfloor *", "font name.\") class Glyph(object): \"\"\" Glyph(font, char, size=12, styles=None) A glyph is a", "contains # - a string of charcodes # - an array of origin", "__add__(self, other): # allow None if other is None: return self # set", "script, self.bold or other.bold, self.italic or other.italic ) def __repr__(self): tmp = self.script,", "def fset(self, value): if value != self._text: self._text = value self._Invalidate() # force", "if self._vertices1 is None: return vertices = self._vertices1.copy() # scale text according to", "- anchory # apply angle if angle != 0.0: cos_angle = np.cos(angle*np.pi/180.0) sin_angle", "Define skew factor to handle italics correctly self.skewFactor = 0.0 if style.italic: self.skewFactor", "vertices[:,0].min(), vertices[:,0].max() else: x1, x2 = 0,0 y1, y2 = 0, self._xglyph.sizey #", "the text is vertical or horizontal halign, valign = self._halign, self._valign if self._angle", "texCords = Pointset(2) for g in glyphs: x2 = x1 + g.sizex y2", "figure fig = self.GetFigure() if not fig: return font = fig._fontManager.GetFont(self._fontname) # enable", "'leftceil':0x2308,'rightceil':0x2309,'leftfloor':0x230A,'rightfloor':0x230B, 'times':0x2217, 'cdot':0x2219, 'pm':0x00b1, 'oplus':0x2295, 'ominus':0x2296, 'otimes':0x2297, 'oslash':0x2298, } # sort the keys,", "int or string.') value = int(value>0) - int(value<0) if value != self._valign: self._valign", "store coordinates self._x, self._y, self._z = x, y, z # for internal use", "hello\\_there, a backslash escapes, thus keeping the _^ or \\ after it. Special", "# scale text according to global text size property vertices *= fig._relativeFontSize #", "class FontManager: \"\"\" FontManager() Manager of fonts. There should be only one instance", "* 'left', 'center', 'right' * -1, 0, 1 \"\"\" def fget(self): return self._halign", "shape = data.shape gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, 2, shape[1],shape[0], 0, # gl.GL_ALPHA, gl.GL_UNSIGNED_BYTE, data) gl.GL_LUMINANCE_ALPHA,", "for the Text wobject and Label wibject. fontname may be 'mono', 'sans', 'serif'", "the vertical alignment. Specify as: * 'up', 'center', 'down' * 'top', 'center', 'bottom'", "fontname=None): # init drawing data self._texCords = None # coords in the font", "(1.0-smaller) class MiniStyle: \"\"\" MiniStyle(script=0, bold=False, italic=False) Class that represents the style of", "= vertices # calculate edges (used by for example the AxisLabel class) if", "scripts) * width specifies how much space there should be before the next", "infoOrigin, infoWidth = ( # info.size_i, info.origin_i, info.width_i) if style.bold and ac in", "class for each figure/context. \"\"\" def __init__(self): # load font data path =", "'Omicron':0x039F, 'Pi':0x03A0, 'Rho':0x03A1, 'Sigma':0x03A3, 'Tau':0x03A4, 'Upsilon':0x03A5, 'Phi':0x03A6, 'Chi':0x03A7, 'Psi':0x03A8, 'Omega':0x03A9, # lower case", "tt[i] if escape: g = Glyph(font, c, self._size, styles) glyphs.append( g ) escape", "sin_angle, vertices[:,0] * sin_angle + vertices[:,1] * cos_angle) # Move anchor in label", "\"\"\" def __init__(self, script=0, bold=False, italic=False): self.script = script self.bold = bold self.italic", "check it if isinstance(char, basestring): ac = ord(char) elif isinstance(char, int): ac =", "self.s1, self.s2 = (x1) / tmp, (x2-1) / tmp y1 = infoOrigin[ac,1] y2", "\"\"\" self._texCords = None self._vertices1 = None self._vertices2 = None @Property # Smart", "= getColor(value,'setting textColor') if value != self._color: self._color = value self.Draw() return locals()", "it an alpha map. \"\"\" # Add lumincance channel data2 = np.zeros((data.shape[0],data.shape[1],2), dtype=np.uint8)", "0 @PropWithDraw def x(): \"\"\"Get/Set the x position of the text. \"\"\" def", "charcodes # - an array of origin 's # - an array of", "self.fonts[fontname] elif hasattr(self.s, fontname): tmp = Font(self.s[fontname]) self.fonts[fontname] = tmp return tmp else:", "tt = self._text # transform greek characters that were given without double backslash", "a series of glyphs from the given text. From these Glyphs the textureCords", "32 or ac > 255: print \"Warning: Cannot draw character %i! \" %", "property vertices *= fig._relativeFontSize # obtain dimensions if len(vertices): x1, x2 = vertices[:,0].min(),", "# super or subscript self.skewFactor *= smaller self.sizex = self.sizex * smaller self.sizey", "self.Draw() return locals() @Property def halign(): \"\"\"Get/Set the horizontal alignment. Specify as: *", "the relative vertices are calculated, which are then corrected for angle and alignment", "FontManager: \"\"\" FontManager() Manager of fonts. There should be only one instance of", "class Font(TextureObject): \"\"\" Font(info) A Font object holds the texture that contains all", "tt = tt.replace('\\\\'+c, unichr(escapes[c])) tt = tt.replace('\\t', r'\\\\') # get italic and bold", "hello_{there}, makes one or more charactes subscript. * hell\\io or hell\\i{ohoo}, makes one", "'otimes':0x2297, 'oslash':0x2298, } # sort the keys, such that longer names are replaced", "first escapesKeys = escapes.keys() escapesKeys.sort( lambda x,y:len(y)-len(x)) class Font(TextureObject): \"\"\" Font(info) A Font", "position skew = self._size * g.skewFactor # append vertices vertices.append(x1+skew, y1+dy, z) vertices.append(x2+skew,", "TextureObject.__init__(self, 2) # store font information self.info = info # set data self.SetData(self.info.data)", "info): TextureObject.__init__(self, 2) # store font information self.info = info # set data", "@Property # Smart draw def text(): \"\"\"Get/Set the text to display. \"\"\" def", "self._text def fset(self, value): if value != self._text: self._text = value self._Invalidate() #", "rightceil leftfloor rightfloor * times cdot pm * oplus ominus otimes oslash Letters", "fset(self, value): if isinstance(value, int): pass elif isinstance(value, basestring): value = value.lower() tmp", "set for i in range(len(tt)): c = tt[i] if escape: g = Glyph(font,", "apply anchor angle = self._angle if isinstance(self, Text): # Text is a wobject,", "of the (new) BSD License. # The full license can be found in", "import getResourceDir, getColor # from visvis.core.cameras import depthToZ from visvis.core.baseWibjects import Box escapes", "force recalculation self.Draw() return locals() @Property def textColor(): \"\"\"Get/Set the color of the", "Glyphs the textureCords in the font texture can be calculated. Also the relative", "x or y or z: gl.glPushMatrix() gl.glTranslatef(x, y, z) # make sure the", "label if isinstance(self, Label): w,h = self.position.size # determine whether the text is", "'mono', 'sans', 'serif' or None, in which case the vv.settings.defaultFontName is used. \"\"\"", "vertices[:,1] * cos_angle) # Move anchor in label if isinstance(self, Label): w,h =", "more charactes subscript. * hell\\io or hell\\i{ohoo}, makes one or more charactes italic.", "the spacing between characters. \"\"\" def fget(self): return self._charSpacing def fset(self, value): if", "u037f latin * u0380 - u03ff greek * u2000 - u23ff symbols There", "(y1) / tmp, (y2-1) / tmp # Define skew factor to handle italics", "self._xglyph = Glyph(font, 'X', self._size) tt = self._text # transform greek characters that", "The text has a certain position in the scene. The fontname can be", "Special characters ------------------ Characters are available for the following unicode sets: * u0020", "= 255 data2[:,:,1] = data shape = data.shape gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, 2, shape[1],shape[0], 0,", "# get screen position and store tmp = glu.gluProject(self._x, self._y, self._z) self._screenx, self._screeny,", "u003f numbers * u0040 - u00bf alphabet * u00c0 - u037f latin *", "Smart draw def textAngle(): \"\"\"Get/Set the angle of the text in degrees. \"\"\"", "transform greek characters that were given without double backslash tt = tt.replace('\\alpha', unichr(escapes['alpha']))", "if styles: for style in styles: self.style += style style = self.style #", "= self.style # store font self.font = font info = self.font.info # get", "force recalculation self.Draw() return locals() def _Compile(self): \"\"\" Create a series of glyphs", "_vertices1 (and is called when the first is None) \"\"\" # get figure", "lower case greek 'alpha':0x03B1, 'beta':0x03B2, 'gamma':0x03B3, 'delta':0x03B4, 'epsilon':0x03B5, 'zeta':0x03B6, 'eta':0x03B7, 'theta':0x03B8, 'iota':0x03B9, 'kappa':0x03BA,", "glyphs from the given text. From these Glyphs the textureCords in the font", "valign, halign # set anchor y if valign < 0: anchory = 0", "if not value in tmp: raise ValueError('Invalid value for valign.') value = tmp[value.lower()]", "position of the text. \"\"\" def fget(self): return self._y def fset(self, value): self._y", "Invalidate this object, such that the text is recompiled the next time it", "\"\"\"Get/Set the z position of the text. \"\"\" def fget(self): return self._z def", "a verb. The vertices1 are corrected for angle and alignment. -> produces _vertices2", "* sin_angle, vertices[:,0] * sin_angle + vertices[:,1] * cos_angle) # Move anchor in", "* alpha beta gamma delta * epsilon zeta eta theta * iota kappa", "wobject: Label and Text, which are both able to produce a single line", "just use some char that is no string tt = tt.replace('\\b', '\\x07') #", "vertical alignment. Specify as: * 'up', 'center', 'down' * 'top', 'center', 'bottom' *", "'zeta':0x03B6, 'eta':0x03B7, 'theta':0x03B8, 'iota':0x03B9, 'kappa':0x03BA, 'lambda':0x03BB, 'mu':0x03BC, 'nu':0x03BD, 'xi':0x03BE, 'omicron':0x03BF, 'pi':0x03C0, 'rho':0x03C1, 'varsigma':0x03C2,", "if value != self._charSpacing: self._charSpacing = value self._Invalidate() # force recalculation self.Draw() return", "angle and alignment in _PositionText(). -> Produces _vertices1 (and is called when that", "elif isinstance(value, basestring): value = value.lower() tmp = {'left':-1,'center':0,'centre':0,'right':1 } if not value", "alignment. Specify as: * 'up', 'center', 'down' * 'top', 'center', 'bottom' * -1,", "c = tt[i] if escape: g = Glyph(font, c, self._size, styles) glyphs.append( g", "it if isinstance(char, basestring): ac = ord(char) elif isinstance(char, int): ac = char", "= self.position.size # determine whether the text is vertical or horizontal halign, valign", "are created if self._vertices1 is None or self._texCords is None: self._Compile() if self._vertices2", "ac = char else: raise ValueError('To create a glyph, supply an int or", "from _vertices1 (and is called when the first is None) \"\"\" # get", "# Set and check fontname if fontname is None: fontname = visvis.settings.defaultFontName fontname", "figure fig = self.GetFigure() # get vertices if self._vertices1 is None: return vertices", "= fontname.lower() if fontname not in ['mono', 'sans', 'serif']: raise ValueError('Invalid font name.')", "- g.sizey dy = g.dy # append texture coordinates texCords.append(g.s1, g.t1) texCords.append(g.s2, g.t1)", "if angle != 0.0: cos_angle = np.cos(angle*np.pi/180.0) sin_angle = np.sin(angle*np.pi/180.0) vertices[:,0], vertices[:,1] =", "it is created and stored for reuse. \"\"\" if fontname in self.fonts: return", "'Iota':0x0399, 'Kappa':0x039A, 'Lambda':0x039B, 'Mu':0x039C, 'Nu':0x039D, 'Xi':0x039E, 'Omicron':0x039F, 'Pi':0x03A0, 'Rho':0x03A1, 'Sigma':0x03A3, 'Tau':0x03A4, 'Upsilon':0x03A5, 'Phi':0x03A6,", "= {0:'normal', 1:'sub', 2:'super'} \"\"\" def __init__(self, script=0, bold=False, italic=False): self.script = script", "gl.GL_ALPHA, gl.GL_UNSIGNED_BYTE, data) gl.GL_LUMINANCE_ALPHA, gl.GL_UNSIGNED_BYTE, data2) tmp1 = gl.GL_LINEAR tmp2 = gl.GL_LINEAR gl.glTexParameteri(gl.GL_TEXTURE_2D,", "y, z) # make sure the glyphs are created if self._vertices1 is None", "# init position (this is to set the size) self.position = 10,10,100,16 #", "= (0,0,0) self._angle = 0 self._halign = -1 self._valign = 0 self._charSpacing =", "elif isinstance(value, basestring): value = value.lower() tmp={'up':-1,'top':-1,'center':0,'centre':0,'down':1,'bottom':1} if not value in tmp: raise", "ac = 32 # make space # default infoSize, infoOrigin, infoWidth = info.size,", "- anchorx vertices[:,1] = -(vertices[:,1] - anchory) elif isinstance(self, Label): angle = -self._angle", "are replaced first escapesKeys = escapes.keys() escapesKeys.sort( lambda x,y:len(y)-len(x)) class Font(TextureObject): \"\"\" Font(info)", "can always look up its unicode value and use that instead. \"\"\" import", "texture, normalized to texture coordinates x1 = infoOrigin[ac,0] x2 = x1 + infoSize[ac,0]", "_PositionText(self, event=None): \"\"\" The name is ment as a verb. The vertices1 are", "corrected for angle and alignment. -> produces _vertices2 from _vertices1 (and is called", "(new) BSD License. # The full license can be found in 'license.txt'. \"\"\"", "position and store tmp = glu.gluProject(self._x, self._y, self._z) self._screenx, self._screeny, self._screenz = tuple(tmp)", "greek alfabet can be inserted in the same way (By starting the name", "value = int(value>0) - int(value<0) if value != self._halign: self._halign = value self._vertices2", "'Epsilon':0x0395, 'Zeta':0x0396, 'Eta':0x0397, 'Theta':0x0398, 'Iota':0x0399, 'Kappa':0x039A, 'Lambda':0x039B, 'Mu':0x039C, 'Nu':0x039D, 'Xi':0x039E, 'Omicron':0x039F, 'Pi':0x03A0, 'Rho':0x03A1,", "y if valign < 0: anchory = 0 elif valign > 0: anchory", "realized by printing it skewed rather using the # italic glyphs. The reason", "range(len(tt)): c = tt[i] if escape: g = Glyph(font, c, self._size, styles) glyphs.append(", "= float(infoWidth[ac]) * factor # is spacing? smaller = 0.6 self.dy = 0.0", "of this class for each figure/context. \"\"\" def __init__(self): # load font data", "get figure fig = self.GetFigure() # get vertices if self._vertices1 is None: return", "x1 = x1 + g.width + self._charSpacing # store self._texCords = texCords self._vertices1", "textColor') if value != self._color: self._color = value self.Draw() return locals() @Property def", "of the text. \"\"\" def fget(self): return self._color def fset(self, value): value =", "y2 else: anchory = y1 + (y2-y1)/2.0 # apply anchor angle = self._angle", "if style.script: # super or subscript self.skewFactor *= smaller self.sizex = self.sizex *", "value): self._x = value return locals() @PropWithDraw def y(): \"\"\"Get/Set the y position", "c=='_': style = MiniStyle(1) elif c=='\\x06': style = MiniStyle(0,False,True) elif c=='\\x07': style =", "= ssdf.load(os.path.join(path, 'fonts.ssdf')) # list of fonts self.fonts = {} def GetFont(self, fontname):", "self._x = value return locals() @PropWithDraw def y(): \"\"\"Get/Set the y position of", "Text(Wobject, BaseText): \"\"\" Text(parent, text='', x=0, y=0, z=0, fontname=None) A wobject representing a", "*= smaller self.sizex = self.sizex * smaller self.sizey = self.sizey * smaller self.width", "math 'Re':0x211c, 'Im':0x2111, 'null':0x2300, 'infty':0x221e, 'int':0x222b, 'iint':0x222c, 'iiint':0x222d, 'forall':0x2200, 'leq':0x22dc, 'geq':0x22dd, 'approx':0x2248, 'approxeq':0x2243,", "def _DrawText(self, x=0, y=0, z=0): # Translate if x or y or z:", "one or more charactes bold. * hello\\_there, a backslash escapes, thus keeping the", "vertices = self._vertices2#.copy() # init vertex and texture array gl.glEnableClientState(gl.GL_VERTEX_ARRAY) gl.glEnableClientState(gl.GL_TEXTURE_COORD_ARRAY) gl.glVertexPointerf(vertices.data) gl.glTexCoordPointerf(texCords.data)", "position of edges in pixels. (taking angle into account) self._deltax = 0,0 self._deltay", "if value != self._valign: self._valign = value self._vertices2 = None # force recalculation", "anchorx vertices[:,1] = -(vertices[:,1] - anchory) elif isinstance(self, Label): angle = -self._angle vertices[:,0]", "self.bold, self.italic return '<MiniStyle script:%i, bold:%i, italic:%i>' % tmp class BaseText(object): \"\"\" BaseText(text='',", "# if style.italic and ac in info.charcodes_i: # # italic text # infoSize,", "x1 + g.sizex y2 = g.sizey #y2 = y1 - g.sizey dy =", "if not f: return font = f._fontManager.GetFont(self._fontname) # clear glyphs glyphs = []", "the (new) BSD License. # The full license can be found in 'license.txt'.", "asci code and check it if isinstance(char, basestring): ac = ord(char) elif isinstance(char,", "offset in y direction (for sub/super scripts) * width specifies how much space", "return self._color def fset(self, value): value = getColor(value,'setting textColor') if value != self._color:", "not)) g = Glyph(font, c, self._size, styles+[style]) glyphs.append( g ) style = None", "return locals() @PropWithDraw def y(): \"\"\"Get/Set the y position of the text. \"\"\"", "recalculation self.Draw() return locals() @Property def textColor(): \"\"\"Get/Set the color of the text.", "# Note: italic is now realized by printing it skewed rather using the", "# calculate width on screen, given the size factor = size / float(info.fontsize)", "= tt.replace('\\beta', unichr(escapes['beta'])) tt = tt.replace('\\rho', unichr(escapes['rho'])) tt = tt.replace('\\theta', unichr(escapes['theta'])) # transform", "# determine whether the text is vertical or horizontal halign, valign = self._halign,", "anchory = y1 elif self._valign > 0: anchory = y2 else: anchory =", "in degrees. \"\"\" def fget(self): return self._angle def fset(self, value): if value !=", "in escapesKeys: tt = tt.replace('\\\\'+c, unichr(escapes[c])) tt = tt.replace('\\t', r'\\\\') # get italic", "= float(info.data.shape[0]) self.t1, self.t2 = (y1) / tmp, (y2-1) / tmp # Define", "x or y or z: gl.glPopMatrix() font.Disable() gl.glDisableClientState(gl.GL_VERTEX_ARRAY) gl.glDisableClientState(gl.GL_TEXTURE_COORD_ARRAY) class Text(Wobject, BaseText): \"\"\"", "to the list if style: styles.append(style) style = None elif c=='}': # Remove", "(with new style (or not)) g = Glyph(font, c, self._size, styles+[style]) glyphs.append( g", "the text. \"\"\" def fget(self): return self._y def fset(self, value): self._y = value", "A glyph is a character. It is visualized by rendering the proper part", "self._angle if isinstance(self, Text): # Text is a wobject, so must be flipped", "# create glyph (with new style (or not)) g = Glyph(font, c, self._size,", "len(vertices): clr = self.textColor gl.glColor(clr[0], clr[1], clr[2]) gl.glDrawArrays(gl.GL_QUADS, 0, len(vertices)) gl.glFlush() # disable", "box) with text inside. The fontname can be 'mono', 'sans' or 'serif'. If", "self._screenz = tuple(tmp) # make integer (to prevent glitchy behaviour), but not z!", "__init__(self, script=0, bold=False, italic=False): self.script = script self.bold = bold self.italic = italic", "the offset in y direction (for sub/super scripts) * width specifies how much", "style = None # build arrays with vertices and coordinates x1, y1, z", "certain position in the scene. The fontname can be 'mono', 'sans' or 'serif'.", "or hello_{there}, makes one or more charactes subscript. * hell\\io or hell\\i{ohoo}, makes", "leftarrow uparrow rightarrow downarrow * Leftarrow Uparrow Rightarrow Downarrow * leftceil rightceil leftfloor", "case greek letter is inserted): * alpha beta gamma delta * epsilon zeta", "dy represents the offset in y direction (for sub/super scripts) * width specifies", "the first is None) \"\"\" # get figure fig = self.GetFigure() # get", "for internal use self._screenx, self._screeny, self._screenz = 0, 0, 0 @PropWithDraw def x():", "approxeq ne in * leftarrow uparrow rightarrow downarrow * Leftarrow Uparrow Rightarrow Downarrow", "y=0, z=0): # Translate if x or y or z: gl.glPushMatrix() gl.glTranslatef(x, y,", "= Glyph(font, c, self._size, styles+[style]) glyphs.append( g ) style = None # build", "# - fontsize of the font in the data array def __init__(self, font,", "case greek 'alpha':0x03B1, 'beta':0x03B2, 'gamma':0x03B3, 'delta':0x03B4, 'epsilon':0x03B5, 'zeta':0x03B6, 'eta':0x03B7, 'theta':0x03B8, 'iota':0x03B9, 'kappa':0x03BA, 'lambda':0x03BB,", "isinstance(char, int): ac = char else: raise ValueError('To create a glyph, supply an", "for angle and alignment in _PositionText(). -> Produces _vertices1 (and is called when", "raise ValueError('halign must be an int or string.') value = int(value>0) - int(value<0)", "the x position of the text. \"\"\" def fget(self): return self._x def fset(self,", "angle if angle != 0.0: cos_angle = np.cos(angle*np.pi/180.0) sin_angle = np.sin(angle*np.pi/180.0) vertices[:,0], vertices[:,1]", "font.Enable() # prepare texCords = self._texCords#.copy() vertices = self._vertices2#.copy() # init vertex and", "Label and Text, which are both able to produce a single line of", "and Text, which are both able to produce a single line of text", "= data.shape gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, 2, shape[1],shape[0], 0, # gl.GL_ALPHA, gl.GL_UNSIGNED_BYTE, data) gl.GL_LUMINANCE_ALPHA, gl.GL_UNSIGNED_BYTE,", "self._color def fset(self, value): value = getColor(value,'setting textColor') if value != self._color: self._color", "visualized by rendering the proper part from the texture stored in the Font", "\"\"\"Get/Set the x position of the text. \"\"\" def fget(self): return self._x def", "'eta':0x03B7, 'theta':0x03B8, 'iota':0x03B9, 'kappa':0x03BA, 'lambda':0x03BB, 'mu':0x03BC, 'nu':0x03BD, 'xi':0x03BE, 'omicron':0x03BF, 'pi':0x03C0, 'rho':0x03C1, 'varsigma':0x03C2, 'sigma':0x03C3,", "# Style to set for i in range(len(tt)): c = tt[i] if escape:", "if other is None: return self # set script script = other.script if", "data2[:,:,0] = 255 data2[:,:,1] = data shape = data.shape gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, 2, shape[1],shape[0],", "and coordinates x1, y1, z = 0, 0, 0 vertices = Pointset(3) texCords", "self.script # done return MiniStyle( script, self.bold or other.bold, self.italic or other.italic )", "returned, otherwise it is created and stored for reuse. \"\"\" if fontname in", "elif self._angle > 45: halign, valign = valign, -halign elif self._angle < -45:", "= None self._vertices2 = None @Property # Smart draw def text(): \"\"\"Get/Set the", "greek letter is inserted): * alpha beta gamma delta * epsilon zeta eta", "float(infoWidth[ac]) * factor # is spacing? smaller = 0.6 self.dy = 0.0 #", "arrays with vertices and coordinates x1, y1, z = 0, 0, 0 vertices", "vertices[:,0].max() else: x1, x2 = 0,0 y1, y2 = 0, self._xglyph.sizey # set", "Glyph(font, c, self._size, styles+[style]) glyphs.append( g ) style = None # build arrays", "name is ment as a verb. The vertices1 are corrected for angle and", "visvis.core.baseWibjects import Box escapes = { # upper case greek 'Alpha':0x0391, 'Beta':0x0392, 'Gamma':0x0393,", "array of origin 's # - an array of size's # - fontsize", "} if not value in tmp: raise ValueError('Invalid value for halign.') value =", "is returned, otherwise it is created and stored for reuse. \"\"\" if fontname", "* omega Note: In case one needs a character that is not in", "the size of the glyph. * dy represents the offset in y direction", "if isinstance(char, basestring): ac = ord(char) elif isinstance(char, int): ac = char else:", "that is None) \"\"\" # make invalid first self._Invalidate() # get font instance", "= x1 + (x2-x1)/2.0 # if self._valign < 0: anchory = y1 elif", "self.script, self.bold, self.italic return '<MiniStyle script:%i, bold:%i, italic:%i>' % tmp class BaseText(object): \"\"\"", "in the font texture can be calculated. Also the relative vertices are calculated,", "value != self._text: self._text = value self._Invalidate() # force recalculation self.Draw() return locals()", "coords in the font texture self._vertices1 = None # the coords in screen", "texCords self._vertices1 = vertices def _PositionText(self, event=None): \"\"\" The name is ment as", "= int(self._screenx+0.5) self._screeny = int(self._screeny+0.5) def OnDrawScreen(self): self._DrawText( self._screenx, self._screeny, depthToZ(self._screenz) ) class", "valign = self._halign, self._valign if self._angle > 135 or self._angle < -135: halign,", "lambda mu * nu xi omicron pi * rho varsigma sigma tau *", "ac = ord(char) elif isinstance(char, int): ac = char else: raise ValueError('To create", "else: anchory = y1 + (y2-y1)/2.0 # apply anchor angle = self._angle if", "with text inside. The fontname can be 'mono', 'sans' or 'serif'. If not", "glyph x1 = x1 + g.width + self._charSpacing # store self._texCords = texCords", "!= self._fontname: self._fontname = value self._Invalidate() # force recalculation self.Draw() return locals() @Property", "# get asci code and check it if isinstance(char, basestring): ac = ord(char)", "the vv.settings.defaultFontName is used. \"\"\" def __init__(self, parent, text='', x=0, y=0, z=0, fontname=None):", "styles: styles.pop() elif c=='^': style = MiniStyle(2) elif c=='_': style = MiniStyle(1) elif", "'Psi':0x03A8, 'Omega':0x03A9, # lower case greek 'alpha':0x03B1, 'beta':0x03B2, 'gamma':0x03B3, 'delta':0x03B4, 'epsilon':0x03B5, 'zeta':0x03B6, 'eta':0x03B7,", "else: raise ValueError('halign must be an int or string.') value = int(value>0) -", "can be mixed): * hello^2 or hello^{there}, makes one or more charactes superscript.", "chi psi * omega Note: In case one needs a character that is", "'ne':0x2260, 'in':0x22f9, 'leftarrow':0x2190,'uparrow':0x2191,'rightarrow':0x2192,'downarrow':0x2193, 'Leftarrow':0x21D0,'Uparrow':0x21D1,'Rightarrow':0x21D2,'Downarrow':0x21D3, 'leftceil':0x2308,'rightceil':0x2309,'leftfloor':0x230A,'rightfloor':0x230B, 'times':0x2217, 'cdot':0x2219, 'pm':0x00b1, 'oplus':0x2295, 'ominus':0x2296, 'otimes':0x2297, 'oslash':0x2298, }", "other.italic ) def __repr__(self): tmp = self.script, self.bold, self.italic return '<MiniStyle script:%i, bold:%i,", "is drawn. \"\"\" self._texCords = None self._vertices1 = None self._vertices2 = None @Property", "font texture self._vertices1 = None # the coords in screen coordinates (raw) self._vertices2", "self._halign > 0: anchorx = x2 else: anchorx = x1 + (x2-x1)/2.0 #", "Uparrow Rightarrow Downarrow * leftceil rightceil leftfloor rightfloor * times cdot pm *", "Im null infty * int iint iiint forall * leq geq approx approxeq", "import OpenGL.GL as gl import OpenGL.GLU as glu import os import numpy as", "the glyphs are created if self._vertices1 is None or self._texCords is None: self._Compile()", "\"\"\"Get/Set the y position of the text. \"\"\" def fget(self): return self._y def", "y1, y2 = 0, self._xglyph.sizey # set anchor if self._halign < 0: anchorx", "keys, such that longer names are replaced first escapesKeys = escapes.keys() escapesKeys.sort( lambda", "# make space # default infoSize, infoOrigin, infoWidth = info.size, info.origin, info.width #", "be only one instance of this class for each figure/context. \"\"\" def __init__(self):", "clr[1], clr[2]) gl.glDrawArrays(gl.GL_QUADS, 0, len(vertices)) gl.glFlush() # disable texture and clean up if", "> 0: anchorx = w else: anchorx = w/2.0 # apply vertices[:,0] =", "glyphs glyphs = [] self._xglyph = Glyph(font, 'X', self._size) tt = self._text #", "gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP) class FontManager: \"\"\" FontManager() Manager of fonts. There should be", "or z: gl.glPopMatrix() font.Disable() gl.glDisableClientState(gl.GL_VERTEX_ARRAY) gl.glDisableClientState(gl.GL_TEXTURE_COORD_ARRAY) class Text(Wobject, BaseText): \"\"\" Text(parent, text='', x=0,", "-halign elif self._angle < -45: halign, valign = valign, halign # set anchor", "styles) glyphs.append( g ) escape = False elif c=='{': # Append style to", "= self.sizex * smaller self.sizey = self.sizey * smaller self.width = self.width *", "A wobject representing a string of characters. The text has a certain position", "given text. From these Glyphs the textureCords in the font texture can be", "< -45: halign, valign = valign, halign # set anchor y if valign", "135 or self._angle < -135: halign, valign = -halign, valign elif self._angle >", "that the text is recompiled the next time it is drawn. \"\"\" self._texCords", "@Property def textSpacing(): \"\"\"Get/Set the spacing between characters. \"\"\" def fget(self): return self._charSpacing", "from figure f = self.GetFigure() if not f: return font = f._fontManager.GetFont(self._fontname) #", "def fset(self, value): self._x = value return locals() @PropWithDraw def y(): \"\"\"Get/Set the", "italic=False): self.script = script self.bold = bold self.italic = italic def __add__(self, other):", "valign > 0: anchory = h else: anchory = h/2.0 # set anchor", "tmp else: raise ValueError(\"Invalid font name.\") class Glyph(object): \"\"\" Glyph(font, char, size=12, styles=None)", "script == 0: script = self.script # done return MiniStyle( script, self.bold or", "Move anchor in label if isinstance(self, Label): w,h = self.position.size # determine whether", "/ tmp, (y2-1) / tmp # Define skew factor to handle italics correctly", "size=12, styles=None) A glyph is a character. It is visualized by rendering the", "ac in info.charcodes_b: # bold text infoSize, infoOrigin, infoWidth = ( info.size_b, info.origin_b,", "force recalculation self.Draw() return locals() @Property def textSpacing(): \"\"\"Get/Set the spacing between characters.", "escapesKeys.sort( lambda x,y:len(y)-len(x)) class Font(TextureObject): \"\"\" Font(info) A Font object holds the texture", "# Find position in texture, normalized to texture coordinates x1 = infoOrigin[ac,0] x2", "tt = tt.replace('\\beta', unichr(escapes['beta'])) tt = tt.replace('\\rho', unichr(escapes['rho'])) tt = tt.replace('\\theta', unichr(escapes['theta'])) #", "- a string of charcodes # - an array of origin 's #", "False styles = [] style = None # Style to set for i", "coordinates x1, y1, z = 0, 0, 0 vertices = Pointset(3) texCords =", "Also the relative vertices are calculated, which are then corrected for angle and", "default infoSize, infoOrigin, infoWidth = info.size, info.origin, info.width # should and can we", "angle of the text in degrees. \"\"\" def fget(self): return self._angle def fset(self,", "%i! \" % ord(char) ac = 32 # make space # default infoSize,", "g ) style = None # build arrays with vertices and coordinates x1,", "return locals() @Property def textColor(): \"\"\"Get/Set the color of the text. \"\"\" def", "the proper part from the texture stored in the Font object. * sizex", "alpha beta gamma delta * epsilon zeta eta theta * iota kappa lambda", "# sort the keys, such that longer names are replaced first escapesKeys =", "Class that represents the style of characters (sub/super script, bold, and italic. Used", "\"\"\" if fontname in self.fonts: return self.fonts[fontname] elif hasattr(self.s, fontname): tmp = Font(self.s[fontname])", "vertices[:,0].min(), vertices[:,0].max() self._deltay = vertices[:,1].min(), vertices[:,1].max() def _DrawText(self, x=0, y=0, z=0): # Translate", "def fget(self): return self._text def fset(self, value): if value != self._text: self._text =", "style = MiniStyle(0,True,False) elif c=='\\\\' and i+1<len(tt) and tt[i+1] in ['_^\\x06\\x07']: escape =", "texture coordinates \"\"\" # the font.info contains # - a string of charcodes", "textColor(): \"\"\"Get/Set the color of the text. \"\"\" def fget(self): return self._color def", "Additionally, it's now # possible to mix bold and italic text, and one", "# prepare for next glyph x1 = x1 + g.width + self._charSpacing #", "were given without double backslash tt = tt.replace('\\alpha', unichr(escapes['alpha'])) tt = tt.replace('\\beta', unichr(escapes['beta']))", "style if styles: styles.pop() elif c=='^': style = MiniStyle(2) elif c=='_': style =", "locals() def _Compile(self): \"\"\" Create a series of glyphs from the given text.", "# store self._vertices2 = vertices # calculate edges (used by for example the", "* cos_angle) # Move anchor in label if isinstance(self, Label): w,h = self.position.size", "vertices[:,0], vertices[:,1] = ( vertices[:,0] * cos_angle - vertices[:,1] * sin_angle, vertices[:,0] *", "None, in which case the vv.settings.defaultFontName is used. \"\"\" def __init__(self, text='', fontname=None):", "int): ac = char else: raise ValueError('To create a glyph, supply an int", "info.size, info.origin, info.width # should and can we display in italic or bold?", "# init drawing data self._texCords = None # coords in the font texture", "\"\"\" import OpenGL.GL as gl import OpenGL.GLU as glu import os import numpy", "self._screenx = int(self._screenx+0.5) self._screeny = int(self._screeny+0.5) def OnDrawScreen(self): self._DrawText( self._screenx, self._screeny, depthToZ(self._screenz) )", "vertices[:,1] = vertices[:,1] + anchory # store self._vertices2 = vertices # calculate edges", "or ac > 255: print \"Warning: Cannot draw character %i! \" % ord(char)", "self._texCords = None # coords in the font texture self._vertices1 = None #", "prepare for next glyph x1 = x1 + g.width + self._charSpacing # store", "ValueError('To create a glyph, supply an int or character.') # do we have", "text has a certain position in the scene. The fontname can be 'mono',", "value.lower() tmp={'up':-1,'top':-1,'center':0,'centre':0,'down':1,'bottom':1} if not value in tmp: raise ValueError('Invalid value for valign.') value", "object for the Text wobject and Label wibject. fontname may be 'mono', 'sans',", "fontname=None) A wobject representing a string of characters. The text has a certain", "a glyph, supply an int or character.') # do we have that char?", "1 \"\"\" def fget(self): return self._valign def fset(self, value): if isinstance(value, int): pass", "= value self._vertices2 = None # force recalculation self.Draw() return locals() def _Compile(self):", "following constructs (which can be mixed): * hello^2 or hello^{there}, makes one or", "not in this list, one can always look up its unicode value and", "get font instance from figure f = self.GetFigure() if not f: return font", "represents the offset in y direction (for sub/super scripts) * width specifies how", "= -halign, valign elif self._angle > 45: halign, valign = valign, -halign elif", "u0040 - u00bf alphabet * u00c0 - u037f latin * u0380 - u03ff", "bold and italic text, and one can make any supported # unicode character", "None # dito, but corrected for angle and alignment # relative position of", "= fontname self._color = (0,0,0) self._angle = 0 self._halign = -1 self._valign =", "so must be flipped on y axis vertices[:,0] = vertices[:,0] - anchorx vertices[:,1]", "Text(parent, text='', x=0, y=0, z=0, fontname=None) A wobject representing a string of characters.", "not escape for c in escapesKeys: tt = tt.replace('\\\\'+c, unichr(escapes[c])) tt = tt.replace('\\t',", "\"\"\"Get/Set the text to display. \"\"\" def fget(self): return self._text def fset(self, value):", "* Leftarrow Uparrow Rightarrow Downarrow * leftceil rightceil leftfloor rightfloor * times cdot", "valign.') value = tmp[value.lower()] else: raise ValueError('valign must be an int or string.')", "# lower case greek 'alpha':0x03B1, 'beta':0x03B2, 'gamma':0x03B3, 'delta':0x03B4, 'epsilon':0x03B5, 'zeta':0x03B6, 'eta':0x03B7, 'theta':0x03B8, 'iota':0x03B9,", "g = Glyph(font, c, self._size, styles+[style]) glyphs.append( g ) style = None #", "in y direction (for sub/super scripts) * width specifies how much space there", "# calculate edges (used by for example the AxisLabel class) if vertices is", "list if style: styles.append(style) style = None elif c=='}': # Remove style if", "# append vertices vertices.append(x1+skew, y1+dy, z) vertices.append(x2+skew, y1+dy, z) vertices.append(x2, y2+dy, z) vertices.append(x1,", "MiniStyle() if styles: for style in styles: self.style += style style = self.style", "is used. \"\"\" def __init__(self, parent, text='', fontname=None): Box.__init__(self, parent) BaseText.__init__(self, text, fontname)", "and check it if isinstance(char, basestring): ac = ord(char) elif isinstance(char, int): ac", "self.Draw() return locals() @Property def fontName(): \"\"\"Get/Set the font type by its name.", "unicode value and use that instead. \"\"\" import OpenGL.GL as gl import OpenGL.GLU", "that contains all the characters. \"\"\" def __init__(self, info): TextureObject.__init__(self, 2) # store", "# double backslashes do not escape for c in escapesKeys: tt = tt.replace('\\\\'+c,", "font self.font = font info = self.font.info # get asci code and check", "# build arrays with vertices and coordinates x1, y1, z = 0, 0,", "case greek 'Alpha':0x0391, 'Beta':0x0392, 'Gamma':0x0393, 'Delta':0x0394, 'Epsilon':0x0395, 'Zeta':0x0396, 'Eta':0x0397, 'Theta':0x0398, 'Iota':0x0399, 'Kappa':0x039A, 'Lambda':0x039B,", "# done return MiniStyle( script, self.bold or other.bold, self.italic or other.italic ) def", "0: anchorx = 0 elif halign > 0: anchorx = w else: anchorx", "familiar with Latex know what they do: * Re Im null infty *", "= MiniStyle() if styles: for style in styles: self.style += style style =", "__init__(self, text='', fontname=None): # init drawing data self._texCords = None # coords in", "text # Set and check fontname if fontname is None: fontname = visvis.settings.defaultFontName", "or self._texCords is None: self._Compile() if self._vertices2 is None: self._PositionText() # get font", "is None: return self # set script script = other.script if script ==", "used. \"\"\" def __init__(self, parent, text='', fontname=None): Box.__init__(self, parent) BaseText.__init__(self, text, fontname) #", "are calculated, which are then corrected for angle and alignment in _PositionText(). ->", "self._DrawText( self._screenx, self._screeny, depthToZ(self._screenz) ) class Label(Box, BaseText): \"\"\" Label(parent, text='', fontname=None) A", "@Property def fontName(): \"\"\"Get/Set the font type by its name. \"\"\" def fget(self):", "def fontSize(): \"\"\"Get/Set the size of the text. \"\"\" def fget(self): return self._size", "value != self._halign: self._halign = value self._vertices2 = None # force recalculation self.Draw()", "anchory # store self._vertices2 = vertices # calculate edges (used by for example", "# init vertex and texture array gl.glEnableClientState(gl.GL_VERTEX_ARRAY) gl.glEnableClientState(gl.GL_TEXTURE_COORD_ARRAY) gl.glVertexPointerf(vertices.data) gl.glTexCoordPointerf(texCords.data) # draw if", "style style = self.style # store font self.font = font info = self.font.info", "hasattr(self.s, fontname): tmp = Font(self.s[fontname]) self.fonts[fontname] = tmp return tmp else: raise ValueError(\"Invalid", "recalculation self.Draw() return locals() @Property def valign(): \"\"\"Get/Set the vertical alignment. Specify as:", "of glyphs, take sub/super scripting into account. escape = False styles = []", "character that is not in this list, one can always look up its", "store self._vertices2 = vertices # calculate edges (used by for example the AxisLabel", "fig = self.GetFigure() if not fig: return font = fig._fontManager.GetFont(self._fontname) # enable texture", "'oslash':0x2298, } # sort the keys, such that longer names are replaced first", "= self._text # transform greek characters that were given without double backslash tt", "texture coordinates x1 = infoOrigin[ac,0] x2 = x1 + infoSize[ac,0] tmp = float(info.data.shape[1])", "# we need to know about position changes to update alignment self.eventPosition.Bind(self._PositionText) def", "len(vertices): self._deltax = vertices[:,0].min(), vertices[:,0].max() self._deltay = vertices[:,1].min(), vertices[:,1].max() def _DrawText(self, x=0, y=0,", "visvis import ssdf from visvis.pypoints import Pointset # from visvis.core.baseTexture import TextureObject from", "# list of fonts self.fonts = {} def GetFont(self, fontname): \"\"\" GetFont(fontname) Get", "+ infoSize[ac,1] tmp = float(info.data.shape[0]) self.t1, self.t2 = (y1) / tmp, (y2-1) /", "tmp: raise ValueError('Invalid value for valign.') value = tmp[value.lower()] else: raise ValueError('valign must", "value return locals() def OnDraw(self): # get screen position and store tmp =", "alignment. -> produces _vertices2 from _vertices1 (and is called when the first is", "style of characters (sub/super script, bold, and italic. Used when compiling the text.", "0,0 # store text self._text = text # Set and check fontname if", "self.Draw() return locals() @Property def valign(): \"\"\"Get/Set the vertical alignment. Specify as: *", "fontName(): \"\"\"Get/Set the font type by its name. \"\"\" def fget(self): return self._fontname", "check fontname if fontname is None: fontname = visvis.settings.defaultFontName fontname = fontname.lower() if", "font in the data array def __init__(self, font, char, size=12, styles=None): # unwind", "vertices[:,0] * cos_angle - vertices[:,1] * sin_angle, vertices[:,0] * sin_angle + vertices[:,1] *", "styles.append(style) style = None elif c=='}': # Remove style if styles: styles.pop() elif", "possible to mix bold and italic text, and one can make any supported", "OpenGL.GLU as glu import os import numpy as np import visvis from visvis", "self.GetFigure() if not fig: return font = fig._fontManager.GetFont(self._fontname) # enable texture font.Enable() #", "= self.GetFigure() if not f: return font = f._fontManager.GetFont(self._fontname) # clear glyphs glyphs", "# apply anchor angle = self._angle if isinstance(self, Text): # Text is a", "= MiniStyle(0,True,False) elif c=='\\\\' and i+1<len(tt) and tt[i+1] in ['_^\\x06\\x07']: escape = True", "fig: return font = fig._fontManager.GetFont(self._fontname) # enable texture font.Enable() # prepare texCords =", "* s1 s2 t1 t2 represent texture coordinates \"\"\" # the font.info contains", "locals() @Property def halign(): \"\"\"Get/Set the horizontal alignment. Specify as: * 'left', 'center',", "to make it an alpha map. \"\"\" # Add lumincance channel data2 =", "self.Draw() return locals() @Property def fontSize(): \"\"\"Get/Set the size of the text. \"\"\"", "otherwise it is created and stored for reuse. \"\"\" if fontname in self.fonts:", "self.sizey = self.sizey * smaller self.width = self.width * smaller#- self.sizex * (1.0-smaller)", "global text size property vertices *= fig._relativeFontSize # obtain dimensions if len(vertices): x1,", "is a character. It is visualized by rendering the proper part from the", "pass elif isinstance(value, basestring): value = value.lower() tmp = {'left':-1,'center':0,'centre':0,'right':1 } if not", "Font(TextureObject): \"\"\" Font(info) A Font object holds the texture that contains all the", "constructs (which can be mixed): * hello^2 or hello^{there}, makes one or more", "self._size = 9 self._fontname = fontname self._color = (0,0,0) self._angle = 0 self._halign", "into account. escape = False styles = [] style = None # Style", "of the text. \"\"\" def fget(self): return self._z def fset(self, value): self._z =", "# set anchor if self._halign < 0: anchorx = x1 elif self._halign >", "should be only one instance of this class for each figure/context. \"\"\" def", "0, 0 @PropWithDraw def x(): \"\"\"Get/Set the x position of the text. \"\"\"", "y2 = 0, self._xglyph.sizey # set anchor if self._halign < 0: anchorx =", "in pixels. (taking angle into account) self._deltax = 0,0 self._deltay = 0,0 #", "'pm':0x00b1, 'oplus':0x2295, 'ominus':0x2296, 'otimes':0x2297, 'oslash':0x2298, } # sort the keys, such that longer", "\"\"\" def fget(self): return self._text def fset(self, value): if value != self._text: self._text", "* nu xi omicron pi * rho varsigma sigma tau * upsilon phi", "if fontname in self.fonts: return self.fonts[fontname] elif hasattr(self.s, fontname): tmp = Font(self.s[fontname]) self.fonts[fontname]", "= size / float(info.fontsize) self.sizex = infoSize[ac,0] * factor self.sizey = infoSize[ac,1] *", "int(value>0) - int(value<0) if value != self._halign: self._halign = value self._vertices2 = None", "and i+1<len(tt) and tt[i+1] in ['_^\\x06\\x07']: escape = True else: # create glyph", "else: anchorx = w/2.0 # apply vertices[:,0] = vertices[:,0] + anchorx vertices[:,1] =", "self._color = (0,0,0) self._angle = 0 self._halign = -1 self._valign = 0 self._charSpacing", "# Move anchor in label if isinstance(self, Label): w,h = self.position.size # determine", "valign = valign, -halign elif self._angle < -45: halign, valign = valign, halign", "x2 = x1 + infoSize[ac,0] tmp = float(info.data.shape[1]) self.s1, self.s2 = (x1) /", "italics correctly self.skewFactor = 0.0 if style.italic: self.skewFactor = 0.5 # calculate width", "'pi':0x03C0, 'rho':0x03C1, 'varsigma':0x03C2, 'sigma':0x03C3, 'tau':0x03C4, 'upsilon':0x03C5, 'phi':0x03C6, 'chi':0x03C7, 'psi':0x03C8, 'omega':0x03C9, # some math", "numpy as np import visvis from visvis import ssdf from visvis.pypoints import Pointset", "case the vv.settings.defaultFontName is used. \"\"\" def __init__(self, text='', fontname=None): # init drawing", "clean up if x or y or z: gl.glPopMatrix() font.Disable() gl.glDisableClientState(gl.GL_VERTEX_ARRAY) gl.glDisableClientState(gl.GL_TEXTURE_COORD_ARRAY) class", "type by its name. \"\"\" def fget(self): return self._fontname def fset(self, value): if", "* Re Im null infty * int iint iiint forall * leq geq", "isinstance(value, int): pass elif isinstance(value, basestring): value = value.lower() tmp = {'left':-1,'center':0,'centre':0,'right':1 }", "and alignment. -> produces _vertices2 from _vertices1 (and is called when the first", "< 0: anchorx = 0 elif halign > 0: anchorx = w else:", "MiniStyle(0,False,True) elif c=='\\x07': style = MiniStyle(0,True,False) elif c=='\\\\' and i+1<len(tt) and tt[i+1] in", "are several escape sequences for (mathematical) characters that can be inserted using the", "direction (for sub/super scripts) * width specifies how much space there should be", "+ (x2-x1)/2.0 # if self._valign < 0: anchory = y1 elif self._valign >", "when the first is None) \"\"\" # get figure fig = self.GetFigure() #", "# - an array of size's # - fontsize of the font in", "self._charSpacing = value self._Invalidate() # force recalculation self.Draw() return locals() @Property def fontSize():", "return self._text def fset(self, value): if value != self._text: self._text = value self._Invalidate()", "*= fig._relativeFontSize # obtain dimensions if len(vertices): x1, x2 = vertices[:,0].min(), vertices[:,0].max() else:", "info.charcodes:#ac < 32 or ac > 255: print \"Warning: Cannot draw character %i!", "can we display in italic or bold? # Note: italic is now realized", "'sans', 'serif']: raise ValueError('Invalid font name.') # more properties self._size = 9 self._fontname", "style = MiniStyle(2) elif c=='_': style = MiniStyle(1) elif c=='\\x06': style = MiniStyle(0,False,True)", "# make integer (to prevent glitchy behaviour), but not z! self._screenx = int(self._screenx+0.5)", "of fonts self.fonts = {} def GetFont(self, fontname): \"\"\" GetFont(fontname) Get a font", "+ anchory # store self._vertices2 = vertices # calculate edges (used by for", "when that is None) \"\"\" # make invalid first self._Invalidate() # get font", "# set anchor x if halign < 0: anchorx = 0 elif halign", "theta * iota kappa lambda mu * nu xi omicron pi * rho", "downarrow * Leftarrow Uparrow Rightarrow Downarrow * leftceil rightceil leftfloor rightfloor * times", "fig._fontManager.GetFont(self._fontname) # enable texture font.Enable() # prepare texCords = self._texCords#.copy() vertices = self._vertices2#.copy()", "if value != self._fontname: self._fontname = value self._Invalidate() # force recalculation self.Draw() return", "from visvis.core.misc import Property, PropWithDraw from visvis.core.misc import getResourceDir, getColor # from visvis.core.cameras", "self._deltax = 0,0 self._deltay = 0,0 # store text self._text = text #", "1 \"\"\" def fget(self): return self._halign def fset(self, value): if isinstance(value, int): pass", "infoWidth = ( info.size_b, info.origin_b, info.width_b) # Find position in texture, normalized to", "font.Disable() gl.glDisableClientState(gl.GL_VERTEX_ARRAY) gl.glDisableClientState(gl.GL_TEXTURE_COORD_ARRAY) class Text(Wobject, BaseText): \"\"\" Text(parent, text='', x=0, y=0, z=0, fontname=None)", "= other.script if script == 0: script = self.script # done return MiniStyle(", "map. \"\"\" # Add lumincance channel data2 = np.zeros((data.shape[0],data.shape[1],2), dtype=np.uint8) data2[:,:,0] = 255", "of fonts. There should be only one instance of this class for each", "size's # - fontsize of the font in the data array def __init__(self,", "one or more charactes italic. * hell\\bo or hell\\b{ohoo}, makes one or more", "= ( # info.size_i, info.origin_i, info.width_i) if style.bold and ac in info.charcodes_b: #", "= self._vertices1.copy() # scale text according to global text size property vertices *=", "instance from figure fig = self.GetFigure() if not fig: return font = fig._fontManager.GetFont(self._fontname)", "textRender For rendering text in visvis. Defines a wibject and a wobject: Label", "greek characters that were given without double backslash tt = tt.replace('\\alpha', unichr(escapes['alpha'])) tt", "of the text. \"\"\" def fget(self): return self._x def fset(self, value): self._x =", "__init__(self, parent, text='', x=0, y=0, z=0, fontname=None): Wobject.__init__(self, parent) BaseText.__init__(self, text, fontname) #", "\"\"\" def __init__(self, parent, text='', fontname=None): Box.__init__(self, parent) BaseText.__init__(self, text, fontname) # no", "self.SetData(self.info.data) def _UploadTexture(self, data, *args): \"\"\" Overload to make it an alpha map.", "'s # - an array of size's # - fontsize of the font", "'serif'. If not given, the vv.settings.defaultFontName is used. \"\"\" def __init__(self, parent, text='',", "__init__(self): # load font data path = getResourceDir() self.s = ssdf.load(os.path.join(path, 'fonts.ssdf')) #", "set script script = other.script if script == 0: script = self.script #", "pixels. (taking angle into account) self._deltax = 0,0 self._deltay = 0,0 # store", "tmp = glu.gluProject(self._x, self._y, self._z) self._screenx, self._screeny, self._screenz = tuple(tmp) # make integer", "'chi':0x03C7, 'psi':0x03C8, 'omega':0x03C9, # some math 'Re':0x211c, 'Im':0x2111, 'null':0x2300, 'infty':0x221e, 'int':0x222b, 'iint':0x222c, 'iiint':0x222d,", "g.sizey #y2 = y1 - g.sizey dy = g.dy # append texture coordinates", "and alignment # relative position of edges in pixels. (taking angle into account)", "calculate edges (used by for example the AxisLabel class) if vertices is not", "BSD License. # The full license can be found in 'license.txt'. \"\"\" Module", "print \"Warning: Cannot draw character %i! \" % ord(char) ac = 32 #", "can be calculated. Also the relative vertices are calculated, which are then corrected", "- u03ff greek * u2000 - u23ff symbols There are several escape sequences", "if len(vertices): x1, x2 = vertices[:,0].min(), vertices[:,0].max() else: x1, x2 = 0,0 y1,", "= h/2.0 # set anchor x if halign < 0: anchorx = 0", "hello_2 or hello_{there}, makes one or more charactes subscript. * hell\\io or hell\\i{ohoo},", "Text, which are both able to produce a single line of text oriented", "= ord(char) elif isinstance(char, int): ac = char else: raise ValueError('To create a", "1 def _Invalidate(self): \"\"\" Invalidate this object, such that the text is recompiled", "position in texture, normalized to texture coordinates x1 = infoOrigin[ac,0] x2 = x1", "infoOrigin[ac,1] y2 = y1 + infoSize[ac,1] tmp = float(info.data.shape[0]) self.t1, self.t2 = (y1)", "tt = tt.replace('\\b', '\\x07') # build list of glyphs, take sub/super scripting into", "vertices[:,0] * sin_angle + vertices[:,1] * cos_angle) # Move anchor in label if", "fontname=None) Base object for the Text wobject and Label wibject. fontname may be", "\"\"\" def __init__(self): # load font data path = getResourceDir() self.s = ssdf.load(os.path.join(path,", "screen coordinates (raw) self._vertices2 = None # dito, but corrected for angle and", "(for sub/super scripts) * width specifies how much space there should be before", "fset(self, value): if value != self._fontname: self._fontname = value self._Invalidate() # force recalculation", "= float(info.data.shape[1]) self.s1, self.s2 = (x1) / tmp, (x2-1) / tmp y1 =", "\"\"\" The name is ment as a verb. The vertices1 are corrected for", "self._vertices1 = None self._vertices2 = None @Property # Smart draw def text(): \"\"\"Get/Set", "self._xglyph.sizey # set anchor if self._halign < 0: anchorx = x1 elif self._halign", "backslash tt = tt.replace('\\alpha', unichr(escapes['alpha'])) tt = tt.replace('\\beta', unichr(escapes['beta'])) tt = tt.replace('\\rho', unichr(escapes['rho']))", "width on screen, given the size factor = size / float(info.fontsize) self.sizex =", "self.width = self.width * smaller#- self.sizex * (1.0-smaller) class MiniStyle: \"\"\" MiniStyle(script=0, bold=False,", "= 9 self._fontname = fontname self._color = (0,0,0) self._angle = 0 self._halign =", "(used by for example the AxisLabel class) if vertices is not None and", "italic is now realized by printing it skewed rather using the # italic", "get screen position and store tmp = glu.gluProject(self._x, self._y, self._z) self._screenx, self._screeny, self._screenz", "texture stored in the Font object. * sizex and sizey represent the size", "BaseText): \"\"\" Label(parent, text='', fontname=None) A wibject (inherits from box) with text inside.", "y1+dy, z) vertices.append(x2+skew, y1+dy, z) vertices.append(x2, y2+dy, z) vertices.append(x1, y2+dy, z) # prepare", "array of size's # - fontsize of the font in the data array", "None and len(vertices): self._deltax = vertices[:,0].min(), vertices[:,0].max() self._deltay = vertices[:,1].min(), vertices[:,1].max() def _DrawText(self,", "'forall':0x2200, 'leq':0x22dc, 'geq':0x22dd, 'approx':0x2248, 'approxeq':0x2243, 'ne':0x2260, 'in':0x22f9, 'leftarrow':0x2190,'uparrow':0x2191,'rightarrow':0x2192,'downarrow':0x2193, 'Leftarrow':0x21D0,'Uparrow':0x21D1,'Rightarrow':0x21D2,'Downarrow':0x21D3, 'leftceil':0x2308,'rightceil':0x2309,'leftfloor':0x230A,'rightfloor':0x230B, 'times':0x2217, 'cdot':0x2219, 'pm':0x00b1,", "supported # unicode character italic. # if style.italic and ac in info.charcodes_i: #", "Box escapes = { # upper case greek 'Alpha':0x0391, 'Beta':0x0392, 'Gamma':0x0393, 'Delta':0x0394, 'Epsilon':0x0395,", "* hell\\bo or hell\\b{ohoo}, makes one or more charactes bold. * hello\\_there, a", "{0:'normal', 1:'sub', 2:'super'} \"\"\" def __init__(self, script=0, bold=False, italic=False): self.script = script self.bold", "tmp, (y2-1) / tmp # Define skew factor to handle italics correctly self.skewFactor", "full license can be found in 'license.txt'. \"\"\" Module textRender For rendering text", "c=='\\\\' and i+1<len(tt) and tt[i+1] in ['_^\\x06\\x07']: escape = True else: # create", "'cdot':0x2219, 'pm':0x00b1, 'oplus':0x2295, 'ominus':0x2296, 'otimes':0x2297, 'oslash':0x2298, } # sort the keys, such that", "styles = [] style = None # Style to set for i in", "Remove style if styles: styles.pop() elif c=='^': style = MiniStyle(2) elif c=='_': style", "self._screenz = 0, 0, 0 @PropWithDraw def x(): \"\"\"Get/Set the x position of", "Latex know what they do: * Re Im null infty * int iint", "Note: In case one needs a character that is not in this list,", "= w else: anchorx = w/2.0 # apply vertices[:,0] = vertices[:,0] + anchorx", "value = value.lower() tmp={'up':-1,'top':-1,'center':0,'centre':0,'down':1,'bottom':1} if not value in tmp: raise ValueError('Invalid value for", "y1 + (y2-y1)/2.0 # apply anchor angle = self._angle if isinstance(self, Text): #", "'times':0x2217, 'cdot':0x2219, 'pm':0x00b1, 'oplus':0x2295, 'ominus':0x2296, 'otimes':0x2297, 'oslash':0x2298, } # sort the keys, such", "------------------ Characters are available for the following unicode sets: * u0020 - u003f", "elif c=='^': style = MiniStyle(2) elif c=='_': style = MiniStyle(1) elif c=='\\x06': style", "behaviour), but not z! self._screenx = int(self._screenx+0.5) self._screeny = int(self._screeny+0.5) def OnDrawScreen(self): self._DrawText(", "f: return font = f._fontManager.GetFont(self._fontname) # clear glyphs glyphs = [] self._xglyph =", "script self.dy = (1-smaller) * self.sizey if style.script: # super or subscript self.skewFactor", "a character. It is visualized by rendering the proper part from the texture", "tt = tt.replace('\\rho', unichr(escapes['rho'])) tt = tt.replace('\\theta', unichr(escapes['theta'])) # transform other chars tt", "this glyph self.style = MiniStyle() if styles: for style in styles: self.style +=", "getColor # from visvis.core.cameras import depthToZ from visvis.core.baseWibjects import Box escapes = {", "Pointset(2) for g in glyphs: x2 = x1 + g.sizex y2 = g.sizey", "textAngle(): \"\"\"Get/Set the angle of the text in degrees. \"\"\" def fget(self): return", "tt.replace(r'\\\\', '\\t') # double backslashes do not escape for c in escapesKeys: tt", "def y(): \"\"\"Get/Set the y position of the text. \"\"\" def fget(self): return", "'Rho':0x03A1, 'Sigma':0x03A3, 'Tau':0x03A4, 'Upsilon':0x03A5, 'Phi':0x03A6, 'Chi':0x03A7, 'Psi':0x03A8, 'Omega':0x03A9, # lower case greek 'alpha':0x03B1,", "'sans', 'serif' or None, in which case the vv.settings.defaultFontName is used. \"\"\" def", "= data shape = data.shape gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, 2, shape[1],shape[0], 0, # gl.GL_ALPHA, gl.GL_UNSIGNED_BYTE,", "info.width # should and can we display in italic or bold? # Note:", "space there should be before the next char * s1 s2 t1 t2", "and italic. Used when compiling the text. script = {0:'normal', 1:'sub', 2:'super'} \"\"\"", "rho varsigma sigma tau * upsilon phi chi psi * omega Note: In", "float(info.data.shape[0]) self.t1, self.t2 = (y1) / tmp, (y2-1) / tmp # Define skew", "self._valign = value self._vertices2 = None # force recalculation self.Draw() return locals() def", "style.bold and ac in info.charcodes_b: # bold text infoSize, infoOrigin, infoWidth = (", "# Translate if x or y or z: gl.glPushMatrix() gl.glTranslatef(x, y, z) #", "if styles: styles.pop() elif c=='^': style = MiniStyle(2) elif c=='_': style = MiniStyle(1)", "{'left':-1,'center':0,'centre':0,'right':1 } if not value in tmp: raise ValueError('Invalid value for halign.') value", "= 1 def _Invalidate(self): \"\"\" Invalidate this object, such that the text is", "vertices def _PositionText(self, event=None): \"\"\" The name is ment as a verb. The", "self._valign < 0: anchory = y1 elif self._valign > 0: anchory = y2", "otimes oslash Letters from the greek alfabet can be inserted in the same", "Label): angle = -self._angle vertices[:,0] = vertices[:,0] - anchorx vertices[:,1] = vertices[:,1] -", "Pointset(3) texCords = Pointset(2) for g in glyphs: x2 = x1 + g.sizex", "# make sure the glyphs are created if self._vertices1 is None or self._texCords", "* hello\\_there, a backslash escapes, thus keeping the _^ or \\ after it.", "< -135: halign, valign = -halign, valign elif self._angle > 45: halign, valign", "script self.bold = bold self.italic = italic def __add__(self, other): # allow None", "of the glyph. * dy represents the offset in y direction (for sub/super", "self._vertices2 = None # force recalculation self.Draw() return locals() def _Compile(self): \"\"\" Create", "# force recalculation self.Draw() return locals() @Property # Smart draw def textAngle(): \"\"\"Get/Set", "self.eventPosition.Bind(self._PositionText) def OnDraw(self): # Draw the box Box.OnDraw(self) # Draw the text self._DrawText()", "and one can make any supported # unicode character italic. # if style.italic", "'delta':0x03B4, 'epsilon':0x03B5, 'zeta':0x03B6, 'eta':0x03B7, 'theta':0x03B8, 'iota':0x03B9, 'kappa':0x03BA, 'lambda':0x03BB, 'mu':0x03BC, 'nu':0x03BD, 'xi':0x03BE, 'omicron':0x03BF, 'pi':0x03C0,", "= -(vertices[:,1] - anchory) elif isinstance(self, Label): angle = -self._angle vertices[:,0] = vertices[:,0]", "fset(self, value): if isinstance(value, int): pass elif isinstance(value, basestring): value = value.lower() tmp={'up':-1,'top':-1,'center':0,'centre':0,'down':1,'bottom':1}", "value): if value != self._charSpacing: self._charSpacing = value self._Invalidate() # force recalculation self.Draw()", "self.Draw() return locals() @Property # Smart draw def textAngle(): \"\"\"Get/Set the angle of", "def fset(self, value): if value != self._fontname: self._fontname = value self._Invalidate() # force", "starting the name with an uppercase letter, the corresponding upper case greek letter", "} # sort the keys, such that longer names are replaced first escapesKeys", "anchory # apply angle if angle != 0.0: cos_angle = np.cos(angle*np.pi/180.0) sin_angle =", "License. # The full license can be found in 'license.txt'. \"\"\" Module textRender", "(1-smaller) * self.sizey if style.script: # super or subscript self.skewFactor *= smaller self.sizex", "font texture can be calculated. Also the relative vertices are calculated, which are", "Produces _vertices1 (and is called when that is None) \"\"\" # make invalid", "self._vertices2#.copy() # init vertex and texture array gl.glEnableClientState(gl.GL_VERTEX_ARRAY) gl.glEnableClientState(gl.GL_TEXTURE_COORD_ARRAY) gl.glVertexPointerf(vertices.data) gl.glTexCoordPointerf(texCords.data) # draw", "text. \"\"\" def fget(self): return self._z def fset(self, value): self._z = value return", "is None) \"\"\" # get figure fig = self.GetFigure() # get vertices if", "def fget(self): return self._valign def fset(self, value): if isinstance(value, int): pass elif isinstance(value,", "the following constructs (which can be mixed): * hello^2 or hello^{there}, makes one", "> 0: anchory = y2 else: anchory = y1 + (y2-y1)/2.0 # apply", "fset(self, value): value = getColor(value,'setting textColor') if value != self._color: self._color = value", "(y2-1) / tmp # Define skew factor to handle italics correctly self.skewFactor =", "value for halign.') value = tmp[value.lower()] else: raise ValueError('halign must be an int", "self.Draw() return locals() def _Compile(self): \"\"\" Create a series of glyphs from the", "def __repr__(self): tmp = self.script, self.bold, self.italic return '<MiniStyle script:%i, bold:%i, italic:%i>' %", "def _Invalidate(self): \"\"\" Invalidate this object, such that the text is recompiled the", "self.width * smaller#- self.sizex * (1.0-smaller) class MiniStyle: \"\"\" MiniStyle(script=0, bold=False, italic=False) Class", "\"\"\" def fget(self): return self._charSpacing def fset(self, value): if value != self._charSpacing: self._charSpacing", "(or not)) g = Glyph(font, c, self._size, styles+[style]) glyphs.append( g ) style =", "psi * omega Note: In case one needs a character that is not", "vertices[:,0] = vertices[:,0] - anchorx vertices[:,1] = -(vertices[:,1] - anchory) elif isinstance(self, Label):", "inserted using the backslash (for example '\\infty'). People familiar with Latex know what", "* epsilon zeta eta theta * iota kappa lambda mu * nu xi", "script:%i, bold:%i, italic:%i>' % tmp class BaseText(object): \"\"\" BaseText(text='', fontname=None) Base object for", "the AxisLabel class) if vertices is not None and len(vertices): self._deltax = vertices[:,0].min(),", "gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, tmp1) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, tmp2) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP) class", "the # italic glyphs. The reason is that when using the texture one", "'approx':0x2248, 'approxeq':0x2243, 'ne':0x2260, 'in':0x22f9, 'leftarrow':0x2190,'uparrow':0x2191,'rightarrow':0x2192,'downarrow':0x2193, 'Leftarrow':0x21D0,'Uparrow':0x21D1,'Rightarrow':0x21D2,'Downarrow':0x21D3, 'leftceil':0x2308,'rightceil':0x2309,'leftfloor':0x230A,'rightfloor':0x230B, 'times':0x2217, 'cdot':0x2219, 'pm':0x00b1, 'oplus':0x2295, 'ominus':0x2296, 'otimes':0x2297,", "in _PositionText(). -> Produces _vertices1 (and is called when that is None) \"\"\"", "'mono', 'sans' or 'serif'. If not given, the vv.settings.defaultFontName is used. \"\"\" def", "fset(self, value): if value != self._text: self._text = value self._Invalidate() # force recalculation", "from the greek alfabet can be inserted in the same way (By starting", "self # set script script = other.script if script == 0: script =", "the font texture self._vertices1 = None # the coords in screen coordinates (raw)", "0: anchorx = x2 else: anchorx = x1 + (x2-x1)/2.0 # if self._valign", "c, self._size, styles) glyphs.append( g ) escape = False elif c=='{': # Append", "int or string.') value = int(value>0) - int(value<0) if value != self._halign: self._halign", "of edges in pixels. (taking angle into account) self._deltax = 0,0 self._deltay =", "data self._texCords = None # coords in the font texture self._vertices1 = None", "'mu':0x03BC, 'nu':0x03BD, 'xi':0x03BE, 'omicron':0x03BF, 'pi':0x03C0, 'rho':0x03C1, 'varsigma':0x03C2, 'sigma':0x03C3, 'tau':0x03C4, 'upsilon':0x03C5, 'phi':0x03C6, 'chi':0x03C7, 'psi':0x03C8,", "if self._valign < 0: anchory = y1 elif self._valign > 0: anchory =", "- vertices[:,1] * sin_angle, vertices[:,0] * sin_angle + vertices[:,1] * cos_angle) # Move", "class Glyph(object): \"\"\" Glyph(font, char, size=12, styles=None) A glyph is a character. It", "and len(vertices): clr = self.textColor gl.glColor(clr[0], clr[1], clr[2]) gl.glDrawArrays(gl.GL_QUADS, 0, len(vertices)) gl.glFlush() #", "gamma delta * epsilon zeta eta theta * iota kappa lambda mu *", "backslash escapes, thus keeping the _^ or \\ after it. Special characters ------------------", "need to know about position changes to update alignment self.eventPosition.Bind(self._PositionText) def OnDraw(self): #", "scale text according to global text size property vertices *= fig._relativeFontSize # obtain", "data) gl.GL_LUMINANCE_ALPHA, gl.GL_UNSIGNED_BYTE, data2) tmp1 = gl.GL_LINEAR tmp2 = gl.GL_LINEAR gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, tmp1)", "data2[:,:,1] = data shape = data.shape gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, 2, shape[1],shape[0], 0, # gl.GL_ALPHA,", "omicron pi * rho varsigma sigma tau * upsilon phi chi psi *", "int(value<0) if value != self._halign: self._halign = value self._vertices2 = None # force", "self.GetFigure() if not f: return font = f._fontManager.GetFont(self._fontname) # clear glyphs glyphs =", "Label(Box, BaseText): \"\"\" Label(parent, text='', fontname=None) A wibject (inherits from box) with text", "Downarrow * leftceil rightceil leftfloor rightfloor * times cdot pm * oplus ominus", "are both able to produce a single line of text oriented at a", "one or more charactes superscript. * hello_2 or hello_{there}, makes one or more", "fontSize(): \"\"\"Get/Set the size of the text. \"\"\" def fget(self): return self._size def", "z! self._screenx = int(self._screenx+0.5) self._screeny = int(self._screeny+0.5) def OnDrawScreen(self): self._DrawText( self._screenx, self._screeny, depthToZ(self._screenz)", "* factor self.width = float(infoWidth[ac]) * factor # is spacing? smaller = 0.6", "font information self.info = info # set data self.SetData(self.info.data) def _UploadTexture(self, data, *args):", "that represents the style of characters (sub/super script, bold, and italic. Used when", "0: anchory = 0 elif valign > 0: anchory = h else: anchory", "= None # build arrays with vertices and coordinates x1, y1, z =", "self._vertices2 = vertices # calculate edges (used by for example the AxisLabel class)", "glyph self.style = MiniStyle() if styles: for style in styles: self.style += style", "channel data2 = np.zeros((data.shape[0],data.shape[1],2), dtype=np.uint8) data2[:,:,0] = 255 data2[:,:,1] = data shape =", "text is vertical or horizontal halign, valign = self._halign, self._valign if self._angle >", "= None self._vertices1 = None self._vertices2 = None @Property # Smart draw def", "Translate if x or y or z: gl.glPushMatrix() gl.glTranslatef(x, y, z) # make", "the font type by its name. \"\"\" def fget(self): return self._fontname def fset(self,", "= 0, 0, 0 vertices = Pointset(3) texCords = Pointset(2) for g in", "fget(self): return self._z def fset(self, value): self._z = value return locals() def OnDraw(self):", "infoSize[ac,0] * factor self.sizey = infoSize[ac,1] * factor self.width = float(infoWidth[ac]) * factor", "* -1, 0, 1 \"\"\" def fget(self): return self._valign def fset(self, value): if", "from box) with text inside. The fontname can be 'mono', 'sans' or 'serif'.", "# upper case greek 'Alpha':0x0391, 'Beta':0x0392, 'Gamma':0x0393, 'Delta':0x0394, 'Epsilon':0x0395, 'Zeta':0x0396, 'Eta':0x0397, 'Theta':0x0398, 'Iota':0x0399,", "glyph is a character. It is visualized by rendering the proper part from", "None) \"\"\" # make invalid first self._Invalidate() # get font instance from figure", "set anchor y if valign < 0: anchory = 0 elif valign >", "ValueError(\"Invalid font name.\") class Glyph(object): \"\"\" Glyph(font, char, size=12, styles=None) A glyph is", "0, 1 \"\"\" def fget(self): return self._halign def fset(self, value): if isinstance(value, int):", "gl.glTexCoordPointerf(texCords.data) # draw if self.textColor and len(vertices): clr = self.textColor gl.glColor(clr[0], clr[1], clr[2])", "with Latex know what they do: * Re Im null infty * int", "not value in tmp: raise ValueError('Invalid value for halign.') value = tmp[value.lower()] else:", "= (1-smaller) * self.sizey if style.script: # super or subscript self.skewFactor *= smaller", "halign.') value = tmp[value.lower()] else: raise ValueError('halign must be an int or string.')", "sort the keys, such that longer names are replaced first escapesKeys = escapes.keys()", "and bold modifiers tt = tt.replace('\\i', '\\x06') # just use some char that", "bold modifiers tt = tt.replace('\\i', '\\x06') # just use some char that is", "Label): w,h = self.position.size # determine whether the text is vertical or horizontal", "2) # store font information self.info = info # set data self.SetData(self.info.data) def", "return locals() @Property def fontSize(): \"\"\"Get/Set the size of the text. \"\"\" def", "g.t2) # set skewing for position skew = self._size * g.skewFactor # append", "given, the vv.settings.defaultFontName is used. \"\"\" def __init__(self, parent, text='', fontname=None): Box.__init__(self, parent)", "if self._vertices1 is None or self._texCords is None: self._Compile() if self._vertices2 is None:", "create a glyph, supply an int or character.') # do we have that", "size factor = size / float(info.fontsize) self.sizex = infoSize[ac,0] * factor self.sizey =", "for angle and alignment # relative position of edges in pixels. (taking angle", "get italic and bold modifiers tt = tt.replace('\\i', '\\x06') # just use some", "[] style = None # Style to set for i in range(len(tt)): c", "up its unicode value and use that instead. \"\"\" import OpenGL.GL as gl", "TextureObject from visvis.core.base import Wobject from visvis.core.misc import Property, PropWithDraw from visvis.core.misc import", "corrected for angle and alignment # relative position of edges in pixels. (taking", "255: print \"Warning: Cannot draw character %i! \" % ord(char) ac = 32", "return font = f._fontManager.GetFont(self._fontname) # clear glyphs glyphs = [] self._xglyph = Glyph(font,", "style = None elif c=='}': # Remove style if styles: styles.pop() elif c=='^':", "a string of charcodes # - an array of origin 's # -", "look up its unicode value and use that instead. \"\"\" import OpenGL.GL as", "= np.sin(angle*np.pi/180.0) vertices[:,0], vertices[:,1] = ( vertices[:,0] * cos_angle - vertices[:,1] * sin_angle,", "# Append style to the list if style: styles.append(style) style = None elif", "on screen, given the size factor = size / float(info.fontsize) self.sizex = infoSize[ac,0]", "def GetFont(self, fontname): \"\"\" GetFont(fontname) Get a font instance. If that font was", "Create a series of glyphs from the given text. From these Glyphs the", "ValueError('halign must be an int or string.') value = int(value>0) - int(value<0) if", "self._deltax = vertices[:,0].min(), vertices[:,0].max() self._deltay = vertices[:,1].min(), vertices[:,1].max() def _DrawText(self, x=0, y=0, z=0):", "* iota kappa lambda mu * nu xi omicron pi * rho varsigma", "np.cos(angle*np.pi/180.0) sin_angle = np.sin(angle*np.pi/180.0) vertices[:,0], vertices[:,1] = ( vertices[:,0] * cos_angle - vertices[:,1]", "= 0.0 # normal script if style.script == 1: # sub script self.dy", "value return locals() @PropWithDraw def y(): \"\"\"Get/Set the y position of the text.", "if not value in tmp: raise ValueError('Invalid value for halign.') value = tmp[value.lower()]", "to global text size property vertices *= fig._relativeFontSize # obtain dimensions if len(vertices):", "x=0, y=0, z=0, fontname=None): Wobject.__init__(self, parent) BaseText.__init__(self, text, fontname) # store coordinates self._x,", "text. script = {0:'normal', 1:'sub', 2:'super'} \"\"\" def __init__(self, script=0, bold=False, italic=False): self.script", "the color of the text. \"\"\" def fget(self): return self._color def fset(self, value):", "script script = other.script if script == 0: script = self.script # done", "t2 represent texture coordinates \"\"\" # the font.info contains # - a string", "is spacing? smaller = 0.6 self.dy = 0.0 # normal script if style.script", "self._size, styles+[style]) glyphs.append( g ) style = None # build arrays with vertices", "info.charcodes_i: # # italic text # infoSize, infoOrigin, infoWidth = ( # info.size_i,", "self.sizey * smaller self.width = self.width * smaller#- self.sizex * (1.0-smaller) class MiniStyle:", "# some math 'Re':0x211c, 'Im':0x2111, 'null':0x2300, 'infty':0x221e, 'int':0x222b, 'iint':0x222c, 'iiint':0x222d, 'forall':0x2200, 'leq':0x22dc, 'geq':0x22dd,", "# from visvis.core.cameras import depthToZ from visvis.core.baseWibjects import Box escapes = { #", "name.\") class Glyph(object): \"\"\" Glyph(font, char, size=12, styles=None) A glyph is a character.", "= np.cos(angle*np.pi/180.0) sin_angle = np.sin(angle*np.pi/180.0) vertices[:,0], vertices[:,1] = ( vertices[:,0] * cos_angle -", "make space # default infoSize, infoOrigin, infoWidth = info.size, info.origin, info.width # should", "fget(self): return self._valign def fset(self, value): if isinstance(value, int): pass elif isinstance(value, basestring):", "_Compile(self): \"\"\" Create a series of glyphs from the given text. From these", "self.style # store font self.font = font info = self.font.info # get asci", "for c in escapesKeys: tt = tt.replace('\\\\'+c, unichr(escapes[c])) tt = tt.replace('\\t', r'\\\\') #", "Glyph(font, c, self._size, styles) glyphs.append( g ) escape = False elif c=='{': #", "dimensions if len(vertices): x1, x2 = vertices[:,0].min(), vertices[:,0].max() else: x1, x2 = 0,0", "more properties self._size = 9 self._fontname = fontname self._color = (0,0,0) self._angle =", "italic. Used when compiling the text. script = {0:'normal', 1:'sub', 2:'super'} \"\"\" def", "def __init__(self, parent, text='', fontname=None): Box.__init__(self, parent) BaseText.__init__(self, text, fontname) # no edge", "self.fonts: return self.fonts[fontname] elif hasattr(self.s, fontname): tmp = Font(self.s[fontname]) self.fonts[fontname] = tmp return", "class Label(Box, BaseText): \"\"\" Label(parent, text='', fontname=None) A wibject (inherits from box) with", "# get italic and bold modifiers tt = tt.replace('\\i', '\\x06') # just use", "as gl import OpenGL.GLU as glu import os import numpy as np import", "font.info contains # - a string of charcodes # - an array of", "= value self._Invalidate() # force recalculation self.Draw() return locals() @Property # Smart draw", "varsigma sigma tau * upsilon phi chi psi * omega Note: In case", "x1 + (x2-x1)/2.0 # if self._valign < 0: anchory = y1 elif self._valign", "int(self._screenx+0.5) self._screeny = int(self._screeny+0.5) def OnDrawScreen(self): self._DrawText( self._screenx, self._screeny, depthToZ(self._screenz) ) class Label(Box,", "value != self._size: self._size = value self._Invalidate() # force recalculation self.Draw() return locals()", "def fget(self): return self._halign def fset(self, value): if isinstance(value, int): pass elif isinstance(value,", "* leftceil rightceil leftfloor rightfloor * times cdot pm * oplus ominus otimes", "# unwind the style for this glyph self.style = MiniStyle() if styles: for", "the next time it is drawn. \"\"\" self._texCords = None self._vertices1 = None", "the terms of the (new) BSD License. # The full license can be", "can be inserted in the same way (By starting the name with an", "# -*- coding: utf-8 -*- # Copyright (C) 2012, <NAME> # # Visvis", "draw if self.textColor and len(vertices): clr = self.textColor gl.glColor(clr[0], clr[1], clr[2]) gl.glDrawArrays(gl.GL_QUADS, 0,", "account) self._deltax = 0,0 self._deltay = 0,0 # store text self._text = text", "without double backslash tt = tt.replace('\\alpha', unichr(escapes['alpha'])) tt = tt.replace('\\beta', unichr(escapes['beta'])) tt =", "the font in the data array def __init__(self, font, char, size=12, styles=None): #", "If not given, the vv.settings.defaultFontName is used. \"\"\" def __init__(self, parent, text='', x=0,", "if self._vertices2 is None: self._PositionText() # get font instance from figure fig =", "= Font(self.s[fontname]) self.fonts[fontname] = tmp return tmp else: raise ValueError(\"Invalid font name.\") class", "hell\\i{ohoo}, makes one or more charactes italic. * hell\\bo or hell\\b{ohoo}, makes one", "x1 = infoOrigin[ac,0] x2 = x1 + infoSize[ac,0] tmp = float(info.data.shape[1]) self.s1, self.s2", "Rightarrow Downarrow * leftceil rightceil leftfloor rightfloor * times cdot pm * oplus", "letter, the corresponding upper case greek letter is inserted): * alpha beta gamma", "visvis.core.base import Wobject from visvis.core.misc import Property, PropWithDraw from visvis.core.misc import getResourceDir, getColor", "neighbouring characters. Additionally, it's now # possible to mix bold and italic text,", "tmp = float(info.data.shape[0]) self.t1, self.t2 = (y1) / tmp, (y2-1) / tmp #", "0 vertices = Pointset(3) texCords = Pointset(2) for g in glyphs: x2 =", "info.origin_i, info.width_i) if style.bold and ac in info.charcodes_b: # bold text infoSize, infoOrigin,", "'sigma':0x03C3, 'tau':0x03C4, 'upsilon':0x03C5, 'phi':0x03C6, 'chi':0x03C7, 'psi':0x03C8, 'omega':0x03C9, # some math 'Re':0x211c, 'Im':0x2111, 'null':0x2300,", "anchory = h else: anchory = h/2.0 # set anchor x if halign", "list, one can always look up its unicode value and use that instead.", "'Im':0x2111, 'null':0x2300, 'infty':0x221e, 'int':0x222b, 'iint':0x222c, 'iiint':0x222d, 'forall':0x2200, 'leq':0x22dc, 'geq':0x22dd, 'approx':0x2248, 'approxeq':0x2243, 'ne':0x2260, 'in':0x22f9,", "string tt = tt.replace('\\b', '\\x07') # build list of glyphs, take sub/super scripting", "else: anchory = h/2.0 # set anchor x if halign < 0: anchorx", "class) if vertices is not None and len(vertices): self._deltax = vertices[:,0].min(), vertices[:,0].max() self._deltay", "leftceil rightceil leftfloor rightfloor * times cdot pm * oplus ominus otimes oslash", "# # italic text # infoSize, infoOrigin, infoWidth = ( # info.size_i, info.origin_i,", "!= self._text: self._text = value self._Invalidate() # force recalculation self.Draw() return locals() @Property", "or more charactes subscript. * hell\\io or hell\\i{ohoo}, makes one or more charactes", "set the size) self.position = 10,10,100,16 # we need to know about position", "self.fonts = {} def GetFont(self, fontname): \"\"\" GetFont(fontname) Get a font instance. If", "= self.sizey * smaller self.width = self.width * smaller#- self.sizex * (1.0-smaller) class", "given the size factor = size / float(info.fontsize) self.sizex = infoSize[ac,0] * factor", "depthToZ from visvis.core.baseWibjects import Box escapes = { # upper case greek 'Alpha':0x0391,", "know about position changes to update alignment self.eventPosition.Bind(self._PositionText) def OnDraw(self): # Draw the", "both able to produce a single line of text oriented at a certain", "x2 = x1 + g.sizex y2 = g.sizey #y2 = y1 - g.sizey", "an array of size's # - fontsize of the font in the data", "self.fonts[fontname] = tmp return tmp else: raise ValueError(\"Invalid font name.\") class Glyph(object): \"\"\"", "self._screeny, self._screenz = 0, 0, 0 @PropWithDraw def x(): \"\"\"Get/Set the x position", "double backslash tt = tt.replace('\\alpha', unichr(escapes['alpha'])) tt = tt.replace('\\beta', unichr(escapes['beta'])) tt = tt.replace('\\rho',", "draw def text(): \"\"\"Get/Set the text to display. \"\"\" def fget(self): return self._text", "# gl.GL_ALPHA, gl.GL_UNSIGNED_BYTE, data) gl.GL_LUMINANCE_ALPHA, gl.GL_UNSIGNED_BYTE, data2) tmp1 = gl.GL_LINEAR tmp2 = gl.GL_LINEAR", "styles.pop() elif c=='^': style = MiniStyle(2) elif c=='_': style = MiniStyle(1) elif c=='\\x06':", "= MiniStyle(2) elif c=='_': style = MiniStyle(1) elif c=='\\x06': style = MiniStyle(0,False,True) elif", "= info # set data self.SetData(self.info.data) def _UploadTexture(self, data, *args): \"\"\" Overload to", "self._z = value return locals() def OnDraw(self): # get screen position and store", "import Property, PropWithDraw from visvis.core.misc import getResourceDir, getColor # from visvis.core.cameras import depthToZ", "self.sizex * (1.0-smaller) class MiniStyle: \"\"\" MiniStyle(script=0, bold=False, italic=False) Class that represents the", "drawn. \"\"\" self._texCords = None self._vertices1 = None self._vertices2 = None @Property #", "time it is drawn. \"\"\" self._texCords = None self._vertices1 = None self._vertices2 =", "not z! self._screenx = int(self._screenx+0.5) self._screeny = int(self._screeny+0.5) def OnDrawScreen(self): self._DrawText( self._screenx, self._screeny,", "self._screenx, self._screeny, self._screenz = 0, 0, 0 @PropWithDraw def x(): \"\"\"Get/Set the x", "= 0.6 self.dy = 0.0 # normal script if style.script == 1: #", "if value != self._angle: self._angle = value self._vertices2 = None # force recalculation", "def fset(self, value): self._y = value return locals() @PropWithDraw def z(): \"\"\"Get/Set the", "to update alignment self.eventPosition.Bind(self._PositionText) def OnDraw(self): # Draw the box Box.OnDraw(self) # Draw", "has a certain position in the scene. The fontname can be 'mono', 'sans'", "do: * Re Im null infty * int iint iiint forall * leq", "a wobject: Label and Text, which are both able to produce a single", "self._charSpacing = 1 def _Invalidate(self): \"\"\" Invalidate this object, such that the text", "for the following unicode sets: * u0020 - u003f numbers * u0040 -", "None # force recalculation self.Draw() return locals() @Property def valign(): \"\"\"Get/Set the vertical", "0, 2, shape[1],shape[0], 0, # gl.GL_ALPHA, gl.GL_UNSIGNED_BYTE, data) gl.GL_LUMINANCE_ALPHA, gl.GL_UNSIGNED_BYTE, data2) tmp1 =", "'leq':0x22dc, 'geq':0x22dd, 'approx':0x2248, 'approxeq':0x2243, 'ne':0x2260, 'in':0x22f9, 'leftarrow':0x2190,'uparrow':0x2191,'rightarrow':0x2192,'downarrow':0x2193, 'Leftarrow':0x21D0,'Uparrow':0x21D1,'Rightarrow':0x21D2,'Downarrow':0x21D3, 'leftceil':0x2308,'rightceil':0x2309,'leftfloor':0x230A,'rightfloor':0x230B, 'times':0x2217, 'cdot':0x2219, 'pm':0x00b1, 'oplus':0x2295,", "anchory = y1 + (y2-y1)/2.0 # apply anchor angle = self._angle if isinstance(self,", "0, 0 vertices = Pointset(3) texCords = Pointset(2) for g in glyphs: x2", "from figure fig = self.GetFigure() if not fig: return font = fig._fontManager.GetFont(self._fontname) #", "self.style += style style = self.style # store font self.font = font info", "= 0.0 if style.italic: self.skewFactor = 0.5 # calculate width on screen, given", "given, the vv.settings.defaultFontName is used. \"\"\" def __init__(self, parent, text='', x=0, y=0, z=0,", "escape sequences for (mathematical) characters that can be inserted using the backslash (for", "or hell\\i{ohoo}, makes one or more charactes italic. * hell\\bo or hell\\b{ohoo}, makes", "= Pointset(2) for g in glyphs: x2 = x1 + g.sizex y2 =", "= self._size * g.skewFactor # append vertices vertices.append(x1+skew, y1+dy, z) vertices.append(x2+skew, y1+dy, z)", "Note: italic is now realized by printing it skewed rather using the #", "the same way (By starting the name with an uppercase letter, the corresponding", "valign = -halign, valign elif self._angle > 45: halign, valign = valign, -halign", "Text): # Text is a wobject, so must be flipped on y axis", "= infoOrigin[ac,1] y2 = y1 + infoSize[ac,1] tmp = float(info.data.shape[0]) self.t1, self.t2 =", "the next char * s1 s2 t1 t2 represent texture coordinates \"\"\" #", "self.info = info # set data self.SetData(self.info.data) def _UploadTexture(self, data, *args): \"\"\" Overload", "# force recalculation self.Draw() return locals() @Property def valign(): \"\"\"Get/Set the vertical alignment.", "= valign, -halign elif self._angle < -45: halign, valign = valign, halign #", "vertices[:,0] - anchorx vertices[:,1] = -(vertices[:,1] - anchory) elif isinstance(self, Label): angle =", "= None # force recalculation self.Draw() return locals() def _Compile(self): \"\"\" Create a", "text to display. \"\"\" def fget(self): return self._text def fset(self, value): if value", "self._Invalidate() # force recalculation self.Draw() return locals() @Property def fontSize(): \"\"\"Get/Set the size", "return self._x def fset(self, value): self._x = value return locals() @PropWithDraw def y():", "y(): \"\"\"Get/Set the y position of the text. \"\"\" def fget(self): return self._y", "uparrow rightarrow downarrow * Leftarrow Uparrow Rightarrow Downarrow * leftceil rightceil leftfloor rightfloor", "'varsigma':0x03C2, 'sigma':0x03C3, 'tau':0x03C4, 'upsilon':0x03C5, 'phi':0x03C6, 'chi':0x03C7, 'psi':0x03C8, 'omega':0x03C9, # some math 'Re':0x211c, 'Im':0x2111,", "Smart draw def text(): \"\"\"Get/Set the text to display. \"\"\" def fget(self): return", "would # see artifacts from neighbouring characters. Additionally, it's now # possible to", "and store tmp = glu.gluProject(self._x, self._y, self._z) self._screenx, self._screeny, self._screenz = tuple(tmp) #", "charactes italic. * hell\\bo or hell\\b{ohoo}, makes one or more charactes bold. *", "font name.') # more properties self._size = 9 self._fontname = fontname self._color =", "'Gamma':0x0393, 'Delta':0x0394, 'Epsilon':0x0395, 'Zeta':0x0396, 'Eta':0x0397, 'Theta':0x0398, 'Iota':0x0399, 'Kappa':0x039A, 'Lambda':0x039B, 'Mu':0x039C, 'Nu':0x039D, 'Xi':0x039E, 'Omicron':0x039F,", "return MiniStyle( script, self.bold or other.bold, self.italic or other.italic ) def __repr__(self): tmp", "or other.italic ) def __repr__(self): tmp = self.script, self.bold, self.italic return '<MiniStyle script:%i,", "tt.replace('\\t', r'\\\\') # get italic and bold modifiers tt = tt.replace('\\i', '\\x06') #", "self._angle < -45: halign, valign = valign, halign # set anchor y if", "names are replaced first escapesKeys = escapes.keys() escapesKeys.sort( lambda x,y:len(y)-len(x)) class Font(TextureObject): \"\"\"", "raise ValueError(\"Invalid font name.\") class Glyph(object): \"\"\" Glyph(font, char, size=12, styles=None) A glyph", "instance. If that font was created earlier, that font is returned, otherwise it", "ValueError('valign must be an int or string.') value = int(value>0) - int(value<0) if", "data2) tmp1 = gl.GL_LINEAR tmp2 = gl.GL_LINEAR gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, tmp1) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, tmp2)", "to set for i in range(len(tt)): c = tt[i] if escape: g =", "kappa lambda mu * nu xi omicron pi * rho varsigma sigma tau", "some math 'Re':0x211c, 'Im':0x2111, 'null':0x2300, 'infty':0x221e, 'int':0x222b, 'iint':0x222c, 'iiint':0x222d, 'forall':0x2200, 'leq':0x22dc, 'geq':0x22dd, 'approx':0x2248,", "== 0: script = self.script # done return MiniStyle( script, self.bold or other.bold,", "draw def textAngle(): \"\"\"Get/Set the angle of the text in degrees. \"\"\" def", "used. \"\"\" def __init__(self, parent, text='', x=0, y=0, z=0, fontname=None): Wobject.__init__(self, parent) BaseText.__init__(self,", "allow None if other is None: return self # set script script =", "self._angle = 0 self._halign = -1 self._valign = 0 self._charSpacing = 1 def", "z(): \"\"\"Get/Set the z position of the text. \"\"\" def fget(self): return self._z", "depthToZ(self._screenz) ) class Label(Box, BaseText): \"\"\" Label(parent, text='', fontname=None) A wibject (inherits from", "return locals() @Property def fontName(): \"\"\"Get/Set the font type by its name. \"\"\"", "tt.replace('\\beta', unichr(escapes['beta'])) tt = tt.replace('\\rho', unichr(escapes['rho'])) tt = tt.replace('\\theta', unichr(escapes['theta'])) # transform other", "else: raise ValueError('valign must be an int or string.') value = int(value>0) -", "or y or z: gl.glPopMatrix() font.Disable() gl.glDisableClientState(gl.GL_VERTEX_ARRAY) gl.glDisableClientState(gl.GL_TEXTURE_COORD_ARRAY) class Text(Wobject, BaseText): \"\"\" Text(parent,", "glyphs.append( g ) style = None # build arrays with vertices and coordinates", "= tmp[value.lower()] else: raise ValueError('valign must be an int or string.') value =", "import depthToZ from visvis.core.baseWibjects import Box escapes = { # upper case greek", "obtain dimensions if len(vertices): x1, x2 = vertices[:,0].min(), vertices[:,0].max() else: x1, x2 =", "texture that contains all the characters. \"\"\" def __init__(self, info): TextureObject.__init__(self, 2) #", "char, size=12, styles=None) A glyph is a character. It is visualized by rendering", "text in degrees. \"\"\" def fget(self): return self._angle def fset(self, value): if value", "more charactes italic. * hell\\bo or hell\\b{ohoo}, makes one or more charactes bold.", "'left', 'center', 'right' * -1, 0, 1 \"\"\" def fget(self): return self._halign def", "- int(value<0) if value != self._halign: self._halign = value self._vertices2 = None #", "def fset(self, value): value = getColor(value,'setting textColor') if value != self._color: self._color =", "The vertices1 are corrected for angle and alignment. -> produces _vertices2 from _vertices1", "wibject. fontname may be 'mono', 'sans', 'serif' or None, in which case the", "People familiar with Latex know what they do: * Re Im null infty", "fontname = visvis.settings.defaultFontName fontname = fontname.lower() if fontname not in ['mono', 'sans', 'serif']:", "flipped on y axis vertices[:,0] = vertices[:,0] - anchorx vertices[:,1] = -(vertices[:,1] -", "anchorx vertices[:,1] = vertices[:,1] + anchory # store self._vertices2 = vertices # calculate", "basestring): value = value.lower() tmp = {'left':-1,'center':0,'centre':0,'right':1 } if not value in tmp:", "* u00c0 - u037f latin * u0380 - u03ff greek * u2000 -", "vertices[:,1].max() def _DrawText(self, x=0, y=0, z=0): # Translate if x or y or", "in styles: self.style += style style = self.style # store font self.font =", "that were given without double backslash tt = tt.replace('\\alpha', unichr(escapes['alpha'])) tt = tt.replace('\\beta',", "\"\"\" BaseText(text='', fontname=None) Base object for the Text wobject and Label wibject. fontname", "for halign.') value = tmp[value.lower()] else: raise ValueError('halign must be an int or", "glitchy behaviour), but not z! self._screenx = int(self._screenx+0.5) self._screeny = int(self._screeny+0.5) def OnDrawScreen(self):", "# info.size_i, info.origin_i, info.width_i) if style.bold and ac in info.charcodes_b: # bold text", "= self._halign, self._valign if self._angle > 135 or self._angle < -135: halign, valign", "glyphs, take sub/super scripting into account. escape = False styles = [] style", "y2+dy, z) vertices.append(x1, y2+dy, z) # prepare for next glyph x1 = x1", "> 45: halign, valign = valign, -halign elif self._angle < -45: halign, valign", "data path = getResourceDir() self.s = ssdf.load(os.path.join(path, 'fonts.ssdf')) # list of fonts self.fonts", "= 0 self._charSpacing = 1 def _Invalidate(self): \"\"\" Invalidate this object, such that", "if value != self._text: self._text = value self._Invalidate() # force recalculation self.Draw() return", "if style.script == 1: # sub script self.dy = (1-smaller) * self.sizey if", "reason is that when using the texture one would # see artifacts from", "done return MiniStyle( script, self.bold or other.bold, self.italic or other.italic ) def __repr__(self):", "glu import os import numpy as np import visvis from visvis import ssdf", "FontManager() Manager of fonts. There should be only one instance of this class", "the coords in screen coordinates (raw) self._vertices2 = None # dito, but corrected", "vertices[:,0] + anchorx vertices[:,1] = vertices[:,1] + anchory # store self._vertices2 = vertices", "self._screenx, self._screeny, self._screenz = tuple(tmp) # make integer (to prevent glitchy behaviour), but", "anchorx vertices[:,1] = vertices[:,1] - anchory # apply angle if angle != 0.0:", "this class for each figure/context. \"\"\" def __init__(self): # load font data path", "MiniStyle( script, self.bold or other.bold, self.italic or other.italic ) def __repr__(self): tmp =", "clear glyphs glyphs = [] self._xglyph = Glyph(font, 'X', self._size) tt = self._text", "Leftarrow Uparrow Rightarrow Downarrow * leftceil rightceil leftfloor rightfloor * times cdot pm", "halign > 0: anchorx = w else: anchorx = w/2.0 # apply vertices[:,0]", "text='', fontname=None): Box.__init__(self, parent) BaseText.__init__(self, text, fontname) # no edge self.edgeWidth = 0", "= tt[i] if escape: g = Glyph(font, c, self._size, styles) glyphs.append( g )", "= 0 self._halign = -1 self._valign = 0 self._charSpacing = 1 def _Invalidate(self):", "* hello_2 or hello_{there}, makes one or more charactes subscript. * hell\\io or", "the z position of the text. \"\"\" def fget(self): return self._z def fset(self,", "# infoSize, infoOrigin, infoWidth = ( # info.size_i, info.origin_i, info.width_i) if style.bold and", "def textAngle(): \"\"\"Get/Set the angle of the text in degrees. \"\"\" def fget(self):", "horizontal halign, valign = self._halign, self._valign if self._angle > 135 or self._angle <", "in 'license.txt'. \"\"\" Module textRender For rendering text in visvis. Defines a wibject", "\"\"\"Get/Set the spacing between characters. \"\"\" def fget(self): return self._charSpacing def fset(self, value):", "gl.glVertexPointerf(vertices.data) gl.glTexCoordPointerf(texCords.data) # draw if self.textColor and len(vertices): clr = self.textColor gl.glColor(clr[0], clr[1],", "'tau':0x03C4, 'upsilon':0x03C5, 'phi':0x03C6, 'chi':0x03C7, 'psi':0x03C8, 'omega':0x03C9, # some math 'Re':0x211c, 'Im':0x2111, 'null':0x2300, 'infty':0x221e,", "or \\ after it. Special characters ------------------ Characters are available for the following", "cos_angle = np.cos(angle*np.pi/180.0) sin_angle = np.sin(angle*np.pi/180.0) vertices[:,0], vertices[:,1] = ( vertices[:,0] * cos_angle", "integer (to prevent glitchy behaviour), but not z! self._screenx = int(self._screenx+0.5) self._screeny =", "= ( vertices[:,0] * cos_angle - vertices[:,1] * sin_angle, vertices[:,0] * sin_angle +", "self.t2 = (y1) / tmp, (y2-1) / tmp # Define skew factor to", "infoSize[ac,1] * factor self.width = float(infoWidth[ac]) * factor # is spacing? smaller =", "The reason is that when using the texture one would # see artifacts", "Box.__init__(self, parent) BaseText.__init__(self, text, fontname) # no edge self.edgeWidth = 0 # init", "<reponame>chiluf/visvis.dev # -*- coding: utf-8 -*- # Copyright (C) 2012, <NAME> # #", "return self._angle def fset(self, value): if value != self._angle: self._angle = value self._vertices2", "= value self._Invalidate() # force recalculation self.Draw() return locals() @Property def textColor(): \"\"\"Get/Set", "'theta':0x03B8, 'iota':0x03B9, 'kappa':0x03BA, 'lambda':0x03BB, 'mu':0x03BC, 'nu':0x03BD, 'xi':0x03BE, 'omicron':0x03BF, 'pi':0x03C0, 'rho':0x03C1, 'varsigma':0x03C2, 'sigma':0x03C3, 'tau':0x03C4,", "None: self._Compile() if self._vertices2 is None: self._PositionText() # get font instance from figure", "only one instance of this class for each figure/context. \"\"\" def __init__(self): #", "to handle italics correctly self.skewFactor = 0.0 if style.italic: self.skewFactor = 0.5 #", "* u0380 - u03ff greek * u2000 - u23ff symbols There are several", "!= self._angle: self._angle = value self._vertices2 = None # force recalculation self.Draw() return", "None # build arrays with vertices and coordinates x1, y1, z = 0,", "italic. * hell\\bo or hell\\b{ohoo}, makes one or more charactes bold. * hello\\_there,", "None # force recalculation self.Draw() return locals() @Property def textSpacing(): \"\"\"Get/Set the spacing", "account. escape = False styles = [] style = None # Style to", "fget(self): return self._x def fset(self, value): self._x = value return locals() @PropWithDraw def", "vertices1 are corrected for angle and alignment. -> produces _vertices2 from _vertices1 (and", "'Kappa':0x039A, 'Lambda':0x039B, 'Mu':0x039C, 'Nu':0x039D, 'Xi':0x039E, 'Omicron':0x039F, 'Pi':0x03A0, 'Rho':0x03A1, 'Sigma':0x03A3, 'Tau':0x03A4, 'Upsilon':0x03A5, 'Phi':0x03A6, 'Chi':0x03A7,", "z) # make sure the glyphs are created if self._vertices1 is None or", "anchory = h/2.0 # set anchor x if halign < 0: anchorx =", "= 0, 0, 0 @PropWithDraw def x(): \"\"\"Get/Set the x position of the", "a certain angle. Formatting ---------- Text can be formatted using the following constructs", "# get vertices if self._vertices1 is None: return vertices = self._vertices1.copy() # scale", "import TextureObject from visvis.core.base import Wobject from visvis.core.misc import Property, PropWithDraw from visvis.core.misc", "infoSize[ac,0] tmp = float(info.data.shape[1]) self.s1, self.s2 = (x1) / tmp, (x2-1) / tmp", "= script self.bold = bold self.italic = italic def __add__(self, other): # allow", "fget(self): return self._size def fset(self, value): if value != self._size: self._size = value", "its unicode value and use that instead. \"\"\" import OpenGL.GL as gl import", "= None @Property # Smart draw def text(): \"\"\"Get/Set the text to display.", ") style = None # build arrays with vertices and coordinates x1, y1,", "'sans' or 'serif'. If not given, the vv.settings.defaultFontName is used. \"\"\" def __init__(self,", "set anchor if self._halign < 0: anchorx = x1 elif self._halign > 0:", "elif c=='{': # Append style to the list if style: styles.append(style) style =", "= y1 elif self._valign > 0: anchory = y2 else: anchory = y1", "self._charSpacing: self._charSpacing = value self._Invalidate() # force recalculation self.Draw() return locals() @Property def", "is None: fontname = visvis.settings.defaultFontName fontname = fontname.lower() if fontname not in ['mono',", "elif c=='}': # Remove style if styles: styles.pop() elif c=='^': style = MiniStyle(2)", "# force recalculation self.Draw() return locals() def _Compile(self): \"\"\" Create a series of", "\"\"\" def fget(self): return self._z def fset(self, value): self._z = value return locals()", "Font(self.s[fontname]) self.fonts[fontname] = tmp return tmp else: raise ValueError(\"Invalid font name.\") class Glyph(object):", "then corrected for angle and alignment in _PositionText(). -> Produces _vertices1 (and is", "the glyph. * dy represents the offset in y direction (for sub/super scripts)", "a wobject, so must be flipped on y axis vertices[:,0] = vertices[:,0] -", "how much space there should be before the next char * s1 s2", "-1, 0, 1 \"\"\" def fget(self): return self._valign def fset(self, value): if isinstance(value,", "are available for the following unicode sets: * u0020 - u003f numbers *", "fget(self): return self._charSpacing def fset(self, value): if value != self._charSpacing: self._charSpacing = value", "see artifacts from neighbouring characters. Additionally, it's now # possible to mix bold", "= fig._fontManager.GetFont(self._fontname) # enable texture font.Enable() # prepare texCords = self._texCords#.copy() vertices =", "in which case the vv.settings.defaultFontName is used. \"\"\" def __init__(self, text='', fontname=None): #", "delta * epsilon zeta eta theta * iota kappa lambda mu * nu", "gl.glDisableClientState(gl.GL_VERTEX_ARRAY) gl.glDisableClientState(gl.GL_TEXTURE_COORD_ARRAY) class Text(Wobject, BaseText): \"\"\" Text(parent, text='', x=0, y=0, z=0, fontname=None) A", "styles=None) A glyph is a character. It is visualized by rendering the proper", "sin_angle = np.sin(angle*np.pi/180.0) vertices[:,0], vertices[:,1] = ( vertices[:,0] * cos_angle - vertices[:,1] *", "to set the size) self.position = 10,10,100,16 # we need to know about", "recalculation self.Draw() return locals() @Property def fontName(): \"\"\"Get/Set the font type by its", "value self._vertices2 = None # force recalculation self.Draw() return locals() @Property def valign():", "# enable texture font.Enable() # prepare texCords = self._texCords#.copy() vertices = self._vertices2#.copy() #", "def z(): \"\"\"Get/Set the z position of the text. \"\"\" def fget(self): return", "= int(self._screeny+0.5) def OnDrawScreen(self): self._DrawText( self._screenx, self._screeny, depthToZ(self._screenz) ) class Label(Box, BaseText): \"\"\"", "value = int(value>0) - int(value<0) if value != self._valign: self._valign = value self._vertices2", "set skewing for position skew = self._size * g.skewFactor # append vertices vertices.append(x1+skew,", "-> produces _vertices2 from _vertices1 (and is called when the first is None)", "x if halign < 0: anchorx = 0 elif halign > 0: anchorx", "* g.skewFactor # append vertices vertices.append(x1+skew, y1+dy, z) vertices.append(x2+skew, y1+dy, z) vertices.append(x2, y2+dy,", "the horizontal alignment. Specify as: * 'left', 'center', 'right' * -1, 0, 1", "'Lambda':0x039B, 'Mu':0x039C, 'Nu':0x039D, 'Xi':0x039E, 'Omicron':0x039F, 'Pi':0x03A0, 'Rho':0x03A1, 'Sigma':0x03A3, 'Tau':0x03A4, 'Upsilon':0x03A5, 'Phi':0x03A6, 'Chi':0x03A7, 'Psi':0x03A8,", "c=='\\x06': style = MiniStyle(0,False,True) elif c=='\\x07': style = MiniStyle(0,True,False) elif c=='\\\\' and i+1<len(tt)", "'right' * -1, 0, 1 \"\"\" def fget(self): return self._halign def fset(self, value):", "z = 0, 0, 0 vertices = Pointset(3) texCords = Pointset(2) for g", "Glyph(font, char, size=12, styles=None) A glyph is a character. It is visualized by", "get font instance from figure fig = self.GetFigure() if not fig: return font", "Copyright (C) 2012, <NAME> # # Visvis is distributed under the terms of", "import numpy as np import visvis from visvis import ssdf from visvis.pypoints import", "'omicron':0x03BF, 'pi':0x03C0, 'rho':0x03C1, 'varsigma':0x03C2, 'sigma':0x03C3, 'tau':0x03C4, 'upsilon':0x03C5, 'phi':0x03C6, 'chi':0x03C7, 'psi':0x03C8, 'omega':0x03C9, # some", "z) # prepare for next glyph x1 = x1 + g.width + self._charSpacing", "= value return locals() @PropWithDraw def y(): \"\"\"Get/Set the y position of the", "( vertices[:,0] * cos_angle - vertices[:,1] * sin_angle, vertices[:,0] * sin_angle + vertices[:,1]", "self._vertices1 = vertices def _PositionText(self, event=None): \"\"\" The name is ment as a", "self._color = value self.Draw() return locals() @Property def halign(): \"\"\"Get/Set the horizontal alignment.", "(and is called when that is None) \"\"\" # make invalid first self._Invalidate()", "value): if value != self._text: self._text = value self._Invalidate() # force recalculation self.Draw()", "be mixed): * hello^2 or hello^{there}, makes one or more charactes superscript. *", "0: anchorx = w else: anchorx = w/2.0 # apply vertices[:,0] = vertices[:,0]", "x1 + g.width + self._charSpacing # store self._texCords = texCords self._vertices1 = vertices", "char that is no string tt = tt.replace('\\b', '\\x07') # build list of", "halign # set anchor y if valign < 0: anchory = 0 elif", "def OnDraw(self): # get screen position and store tmp = glu.gluProject(self._x, self._y, self._z)", "= self.width * smaller#- self.sizex * (1.0-smaller) class MiniStyle: \"\"\" MiniStyle(script=0, bold=False, italic=False)", "there should be before the next char * s1 s2 t1 t2 represent", "locals() @Property def fontSize(): \"\"\"Get/Set the size of the text. \"\"\" def fget(self):", "next char * s1 s2 t1 t2 represent texture coordinates \"\"\" # the", "use that instead. \"\"\" import OpenGL.GL as gl import OpenGL.GLU as glu import", "\"\"\" MiniStyle(script=0, bold=False, italic=False) Class that represents the style of characters (sub/super script,", "one needs a character that is not in this list, one can always", "* dy represents the offset in y direction (for sub/super scripts) * width", "display in italic or bold? # Note: italic is now realized by printing", "characters. Additionally, it's now # possible to mix bold and italic text, and", "self._Invalidate() # force recalculation self.Draw() return locals() @Property # Smart draw def textAngle():", "# the coords in screen coordinates (raw) self._vertices2 = None # dito, but", "backslash (for example '\\infty'). People familiar with Latex know what they do: *", "= Glyph(font, c, self._size, styles) glyphs.append( g ) escape = False elif c=='{':", "# set skewing for position skew = self._size * g.skewFactor # append vertices", "(mathematical) characters that can be inserted using the backslash (for example '\\infty'). People", "we have that char? if ac not in info.charcodes:#ac < 32 or ac", "= vertices[:,0] + anchorx vertices[:,1] = vertices[:,1] + anchory # store self._vertices2 =", "if vertices is not None and len(vertices): self._deltax = vertices[:,0].min(), vertices[:,0].max() self._deltay =", "0: script = self.script # done return MiniStyle( script, self.bold or other.bold, self.italic", "- anchory) elif isinstance(self, Label): angle = -self._angle vertices[:,0] = vertices[:,0] - anchorx", "must be flipped on y axis vertices[:,0] = vertices[:,0] - anchorx vertices[:,1] =", "texCords.append(g.s1, g.t2) # set skewing for position skew = self._size * g.skewFactor #", "array gl.glEnableClientState(gl.GL_VERTEX_ARRAY) gl.glEnableClientState(gl.GL_TEXTURE_COORD_ARRAY) gl.glVertexPointerf(vertices.data) gl.glTexCoordPointerf(texCords.data) # draw if self.textColor and len(vertices): clr =", "# italic text # infoSize, infoOrigin, infoWidth = ( # info.size_i, info.origin_i, info.width_i)", "* oplus ominus otimes oslash Letters from the greek alfabet can be inserted", "is visualized by rendering the proper part from the texture stored in the", "f = self.GetFigure() if not f: return font = f._fontManager.GetFont(self._fontname) # clear glyphs", "dito, but corrected for angle and alignment # relative position of edges in", "if x or y or z: gl.glPushMatrix() gl.glTranslatef(x, y, z) # make sure", "- fontsize of the font in the data array def __init__(self, font, char,", "z position of the text. \"\"\" def fget(self): return self._z def fset(self, value):", "gl.glEnableClientState(gl.GL_VERTEX_ARRAY) gl.glEnableClientState(gl.GL_TEXTURE_COORD_ARRAY) gl.glVertexPointerf(vertices.data) gl.glTexCoordPointerf(texCords.data) # draw if self.textColor and len(vertices): clr = self.textColor", "* factor self.sizey = infoSize[ac,1] * factor self.width = float(infoWidth[ac]) * factor #", "z=0, fontname=None): Wobject.__init__(self, parent) BaseText.__init__(self, text, fontname) # store coordinates self._x, self._y, self._z", "10,10,100,16 # we need to know about position changes to update alignment self.eventPosition.Bind(self._PositionText)", "the texture that contains all the characters. \"\"\" def __init__(self, info): TextureObject.__init__(self, 2)", "= gl.GL_LINEAR tmp2 = gl.GL_LINEAR gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, tmp1) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, tmp2) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S,", "self._texCords is None: self._Compile() if self._vertices2 is None: self._PositionText() # get font instance", "before the next char * s1 s2 t1 t2 represent texture coordinates \"\"\"", "character. It is visualized by rendering the proper part from the texture stored", "that instead. \"\"\" import OpenGL.GL as gl import OpenGL.GLU as glu import os", "font is returned, otherwise it is created and stored for reuse. \"\"\" if", "vertices = self._vertices1.copy() # scale text according to global text size property vertices", "angle != 0.0: cos_angle = np.cos(angle*np.pi/180.0) sin_angle = np.sin(angle*np.pi/180.0) vertices[:,0], vertices[:,1] = (", "vertices[:,1] = ( vertices[:,0] * cos_angle - vertices[:,1] * sin_angle, vertices[:,0] * sin_angle", "the text. \"\"\" def fget(self): return self._x def fset(self, value): self._x = value", "text. \"\"\" def fget(self): return self._x def fset(self, value): self._x = value return", "path = getResourceDir() self.s = ssdf.load(os.path.join(path, 'fonts.ssdf')) # list of fonts self.fonts =", "of the text. \"\"\" def fget(self): return self._size def fset(self, value): if value", "'Zeta':0x0396, 'Eta':0x0397, 'Theta':0x0398, 'Iota':0x0399, 'Kappa':0x039A, 'Lambda':0x039B, 'Mu':0x039C, 'Nu':0x039D, 'Xi':0x039E, 'Omicron':0x039F, 'Pi':0x03A0, 'Rho':0x03A1, 'Sigma':0x03A3,", "an int or string.') value = int(value>0) - int(value<0) if value != self._halign:", "invalid first self._Invalidate() # get font instance from figure f = self.GetFigure() if", "@PropWithDraw def z(): \"\"\"Get/Set the z position of the text. \"\"\" def fget(self):", "value != self._angle: self._angle = value self._vertices2 = None # force recalculation self.Draw()", "style.italic: self.skewFactor = 0.5 # calculate width on screen, given the size factor", "# do we have that char? if ac not in info.charcodes:#ac < 32", "fontname.lower() if fontname not in ['mono', 'sans', 'serif']: raise ValueError('Invalid font name.') #", "style = MiniStyle(0,False,True) elif c=='\\x07': style = MiniStyle(0,True,False) elif c=='\\\\' and i+1<len(tt) and", "is that when using the texture one would # see artifacts from neighbouring", "self._screeny, depthToZ(self._screenz) ) class Label(Box, BaseText): \"\"\" Label(parent, text='', fontname=None) A wibject (inherits", "value self._Invalidate() # force recalculation self.Draw() return locals() @Property def fontName(): \"\"\"Get/Set the", "y1 + infoSize[ac,1] tmp = float(info.data.shape[0]) self.t1, self.t2 = (y1) / tmp, (y2-1)", "are corrected for angle and alignment. -> produces _vertices2 from _vertices1 (and is", "class MiniStyle: \"\"\" MiniStyle(script=0, bold=False, italic=False) Class that represents the style of characters", "text. \"\"\" def fget(self): return self._y def fset(self, value): self._y = value return", "other.bold, self.italic or other.italic ) def __repr__(self): tmp = self.script, self.bold, self.italic return", "fonts. There should be only one instance of this class for each figure/context.", "# for internal use self._screenx, self._screeny, self._screenz = 0, 0, 0 @PropWithDraw def", "/ float(info.fontsize) self.sizex = infoSize[ac,0] * factor self.sizey = infoSize[ac,1] * factor self.width", "int(value>0) - int(value<0) if value != self._valign: self._valign = value self._vertices2 = None", "text. \"\"\" def fget(self): return self._size def fset(self, value): if value != self._size:", "letter is inserted): * alpha beta gamma delta * epsilon zeta eta theta", "next time it is drawn. \"\"\" self._texCords = None self._vertices1 = None self._vertices2", "text, fontname) # store coordinates self._x, self._y, self._z = x, y, z #", "position in the scene. The fontname can be 'mono', 'sans' or 'serif'. If", "return self._size def fset(self, value): if value != self._size: self._size = value self._Invalidate()", "raise ValueError('Invalid font name.') # more properties self._size = 9 self._fontname = fontname", "* leq geq approx approxeq ne in * leftarrow uparrow rightarrow downarrow *", "for g in glyphs: x2 = x1 + g.sizex y2 = g.sizey #y2", "fontname is None: fontname = visvis.settings.defaultFontName fontname = fontname.lower() if fontname not in", "some char that is no string tt = tt.replace('\\b', '\\x07') # build list", "'Eta':0x0397, 'Theta':0x0398, 'Iota':0x0399, 'Kappa':0x039A, 'Lambda':0x039B, 'Mu':0x039C, 'Nu':0x039D, 'Xi':0x039E, 'Omicron':0x039F, 'Pi':0x03A0, 'Rho':0x03A1, 'Sigma':0x03A3, 'Tau':0x03A4,", "None self._vertices1 = None self._vertices2 = None @Property # Smart draw def text():", "transform other chars tt = tt.replace(r'\\\\', '\\t') # double backslashes do not escape", "---------- Text can be formatted using the following constructs (which can be mixed):", "rather using the # italic glyphs. The reason is that when using the", "< 0: anchory = 0 elif valign > 0: anchory = h else:", "that font was created earlier, that font is returned, otherwise it is created", "g.skewFactor # append vertices vertices.append(x1+skew, y1+dy, z) vertices.append(x2+skew, y1+dy, z) vertices.append(x2, y2+dy, z)", "a font instance. If that font was created earlier, that font is returned,", "style = None # Style to set for i in range(len(tt)): c =", "self._Invalidate() # get font instance from figure f = self.GetFigure() if not f:", "bold:%i, italic:%i>' % tmp class BaseText(object): \"\"\" BaseText(text='', fontname=None) Base object for the", "Pointset # from visvis.core.baseTexture import TextureObject from visvis.core.base import Wobject from visvis.core.misc import", "leq geq approx approxeq ne in * leftarrow uparrow rightarrow downarrow * Leftarrow", "g.t2) texCords.append(g.s1, g.t2) # set skewing for position skew = self._size * g.skewFactor", "def __init__(self, parent, text='', x=0, y=0, z=0, fontname=None): Wobject.__init__(self, parent) BaseText.__init__(self, text, fontname)", "value != self._valign: self._valign = value self._vertices2 = None # force recalculation self.Draw()", "__init__(self, font, char, size=12, styles=None): # unwind the style for this glyph self.style", "h/2.0 # set anchor x if halign < 0: anchorx = 0 elif", "italic text, and one can make any supported # unicode character italic. #", "always look up its unicode value and use that instead. \"\"\" import OpenGL.GL", "return font = fig._fontManager.GetFont(self._fontname) # enable texture font.Enable() # prepare texCords = self._texCords#.copy()", "of text oriented at a certain angle. Formatting ---------- Text can be formatted", "MiniStyle(0,True,False) elif c=='\\\\' and i+1<len(tt) and tt[i+1] in ['_^\\x06\\x07']: escape = True else:", "are then corrected for angle and alignment in _PositionText(). -> Produces _vertices1 (and", "but not z! self._screenx = int(self._screenx+0.5) self._screeny = int(self._screeny+0.5) def OnDrawScreen(self): self._DrawText( self._screenx,", "np.sin(angle*np.pi/180.0) vertices[:,0], vertices[:,1] = ( vertices[:,0] * cos_angle - vertices[:,1] * sin_angle, vertices[:,0]", "# get font instance from figure f = self.GetFigure() if not f: return", "A wibject (inherits from box) with text inside. The fontname can be 'mono',", "subscript. * hell\\io or hell\\i{ohoo}, makes one or more charactes italic. * hell\\bo", "if style.bold and ac in info.charcodes_b: # bold text infoSize, infoOrigin, infoWidth =", "the texture one would # see artifacts from neighbouring characters. Additionally, it's now", "In case one needs a character that is not in this list, one", "= 10,10,100,16 # we need to know about position changes to update alignment", "'X', self._size) tt = self._text # transform greek characters that were given without", "rendering text in visvis. Defines a wibject and a wobject: Label and Text,", "value): self._z = value return locals() def OnDraw(self): # get screen position and", "char? if ac not in info.charcodes:#ac < 32 or ac > 255: print", "italic def __add__(self, other): # allow None if other is None: return self", "= vertices[:,1] + anchory # store self._vertices2 = vertices # calculate edges (used", "y, z # for internal use self._screenx, self._screeny, self._screenz = 0, 0, 0", "vertices[:,1] * sin_angle, vertices[:,0] * sin_angle + vertices[:,1] * cos_angle) # Move anchor", "recalculation self.Draw() return locals() @Property def textSpacing(): \"\"\"Get/Set the spacing between characters. \"\"\"", "y1 = infoOrigin[ac,1] y2 = y1 + infoSize[ac,1] tmp = float(info.data.shape[0]) self.t1, self.t2", "int): pass elif isinstance(value, basestring): value = value.lower() tmp={'up':-1,'top':-1,'center':0,'centre':0,'down':1,'bottom':1} if not value in", "= 0, self._xglyph.sizey # set anchor if self._halign < 0: anchorx = x1", "normal script if style.script == 1: # sub script self.dy = (1-smaller) *", "hell\\io or hell\\i{ohoo}, makes one or more charactes italic. * hell\\bo or hell\\b{ohoo},", "of size's # - fontsize of the font in the data array def", "proper part from the texture stored in the Font object. * sizex and", "if self.textColor and len(vertices): clr = self.textColor gl.glColor(clr[0], clr[1], clr[2]) gl.glDrawArrays(gl.GL_QUADS, 0, len(vertices))", "glyphs. The reason is that when using the texture one would # see", "Text wobject and Label wibject. fontname may be 'mono', 'sans', 'serif' or None,", "OpenGL.GL as gl import OpenGL.GLU as glu import os import numpy as np", "(raw) self._vertices2 = None # dito, but corrected for angle and alignment #", "= value return locals() @PropWithDraw def z(): \"\"\"Get/Set the z position of the", "# clear glyphs glyphs = [] self._xglyph = Glyph(font, 'X', self._size) tt =", "not f: return font = f._fontManager.GetFont(self._fontname) # clear glyphs glyphs = [] self._xglyph", "\"\"\"Get/Set the color of the text. \"\"\" def fget(self): return self._color def fset(self,", "on y axis vertices[:,0] = vertices[:,0] - anchorx vertices[:,1] = -(vertices[:,1] - anchory)", "space # default infoSize, infoOrigin, infoWidth = info.size, info.origin, info.width # should and", "There should be only one instance of this class for each figure/context. \"\"\"", "in the scene. The fontname can be 'mono', 'sans' or 'serif'. If not", "self._y = value return locals() @PropWithDraw def z(): \"\"\"Get/Set the z position of", "tuple(tmp) # make integer (to prevent glitchy behaviour), but not z! self._screenx =", "one would # see artifacts from neighbouring characters. Additionally, it's now # possible", "_^ or \\ after it. Special characters ------------------ Characters are available for the", "Specify as: * 'left', 'center', 'right' * -1, 0, 1 \"\"\" def fget(self):", "return vertices = self._vertices1.copy() # scale text according to global text size property", "= tt.replace('\\i', '\\x06') # just use some char that is no string tt", "locals() @Property def textColor(): \"\"\"Get/Set the color of the text. \"\"\" def fget(self):", "# set script script = other.script if script == 0: script = self.script", "float(info.fontsize) self.sizex = infoSize[ac,0] * factor self.sizey = infoSize[ac,1] * factor self.width =", "PropWithDraw from visvis.core.misc import getResourceDir, getColor # from visvis.core.cameras import depthToZ from visvis.core.baseWibjects", "texture can be calculated. Also the relative vertices are calculated, which are then", "tmp return tmp else: raise ValueError(\"Invalid font name.\") class Glyph(object): \"\"\" Glyph(font, char,", "= h else: anchory = h/2.0 # set anchor x if halign <", "tt[i+1] in ['_^\\x06\\x07']: escape = True else: # create glyph (with new style", "value != self._charSpacing: self._charSpacing = value self._Invalidate() # force recalculation self.Draw() return locals()", "s2 t1 t2 represent texture coordinates \"\"\" # the font.info contains # -", "'Leftarrow':0x21D0,'Uparrow':0x21D1,'Rightarrow':0x21D2,'Downarrow':0x21D3, 'leftceil':0x2308,'rightceil':0x2309,'leftfloor':0x230A,'rightfloor':0x230B, 'times':0x2217, 'cdot':0x2219, 'pm':0x00b1, 'oplus':0x2295, 'ominus':0x2296, 'otimes':0x2297, 'oslash':0x2298, } # sort the", "x position of the text. \"\"\" def fget(self): return self._x def fset(self, value):", "vertices.append(x1+skew, y1+dy, z) vertices.append(x2+skew, y1+dy, z) vertices.append(x2, y2+dy, z) vertices.append(x1, y2+dy, z) #", "z: gl.glPopMatrix() font.Disable() gl.glDisableClientState(gl.GL_VERTEX_ARRAY) gl.glDisableClientState(gl.GL_TEXTURE_COORD_ARRAY) class Text(Wobject, BaseText): \"\"\" Text(parent, text='', x=0, y=0,", "elif isinstance(self, Label): angle = -self._angle vertices[:,0] = vertices[:,0] - anchorx vertices[:,1] =", "recalculation self.Draw() return locals() @Property # Smart draw def textAngle(): \"\"\"Get/Set the angle", "fontname=None) A wibject (inherits from box) with text inside. The fontname can be", "= x1 + g.width + self._charSpacing # store self._texCords = texCords self._vertices1 =", "Font object. * sizex and sizey represent the size of the glyph. *", "tmp[value.lower()] else: raise ValueError('halign must be an int or string.') value = int(value>0)", "is None: return vertices = self._vertices1.copy() # scale text according to global text", "an int or string.') value = int(value>0) - int(value<0) if value != self._valign:", "string.') value = int(value>0) - int(value<0) if value != self._halign: self._halign = value", "calculated. Also the relative vertices are calculated, which are then corrected for angle", "y1+dy, z) vertices.append(x2, y2+dy, z) vertices.append(x1, y2+dy, z) # prepare for next glyph", "self._Invalidate() # force recalculation self.Draw() return locals() @Property def fontName(): \"\"\"Get/Set the font", "smaller#- self.sizex * (1.0-smaller) class MiniStyle: \"\"\" MiniStyle(script=0, bold=False, italic=False) Class that represents", "in tmp: raise ValueError('Invalid value for valign.') value = tmp[value.lower()] else: raise ValueError('valign", "\"\"\" def __init__(self, text='', fontname=None): # init drawing data self._texCords = None #", "position of the text. \"\"\" def fget(self): return self._z def fset(self, value): self._z", "# no edge self.edgeWidth = 0 # init position (this is to set", "getResourceDir() self.s = ssdf.load(os.path.join(path, 'fonts.ssdf')) # list of fonts self.fonts = {} def", "gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, 2, shape[1],shape[0], 0, # gl.GL_ALPHA, gl.GL_UNSIGNED_BYTE, data) gl.GL_LUMINANCE_ALPHA, gl.GL_UNSIGNED_BYTE, data2) tmp1", "as: * 'up', 'center', 'down' * 'top', 'center', 'bottom' * -1, 0, 1", "for next glyph x1 = x1 + g.width + self._charSpacing # store self._texCords", "elif valign > 0: anchory = h else: anchory = h/2.0 # set", "def fset(self, value): self._z = value return locals() def OnDraw(self): # get screen", "y2 = y1 + infoSize[ac,1] tmp = float(info.data.shape[0]) self.t1, self.t2 = (y1) /", "= vertices[:,1] - anchory # apply angle if angle != 0.0: cos_angle =", "import OpenGL.GLU as glu import os import numpy as np import visvis from", "was created earlier, that font is returned, otherwise it is created and stored", "value): if isinstance(value, int): pass elif isinstance(value, basestring): value = value.lower() tmp={'up':-1,'top':-1,'center':0,'centre':0,'down':1,'bottom':1} if", "info.width_i) if style.bold and ac in info.charcodes_b: # bold text infoSize, infoOrigin, infoWidth", "< 0: anchory = y1 elif self._valign > 0: anchory = y2 else:", "None if other is None: return self # set script script = other.script", "MiniStyle: \"\"\" MiniStyle(script=0, bold=False, italic=False) Class that represents the style of characters (sub/super", "oplus ominus otimes oslash Letters from the greek alfabet can be inserted in", "ominus otimes oslash Letters from the greek alfabet can be inserted in the", "fset(self, value): if value != self._size: self._size = value self._Invalidate() # force recalculation", "edges in pixels. (taking angle into account) self._deltax = 0,0 self._deltay = 0,0", "coordinates x1 = infoOrigin[ac,0] x2 = x1 + infoSize[ac,0] tmp = float(info.data.shape[1]) self.s1,", "of charcodes # - an array of origin 's # - an array", "vertices are calculated, which are then corrected for angle and alignment in _PositionText().", "- u003f numbers * u0040 - u00bf alphabet * u00c0 - u037f latin", "return locals() @Property def textSpacing(): \"\"\"Get/Set the spacing between characters. \"\"\" def fget(self):", "fontname): \"\"\" GetFont(fontname) Get a font instance. If that font was created earlier,", "script if style.script == 1: # sub script self.dy = (1-smaller) * self.sizey", "tt = tt.replace(r'\\\\', '\\t') # double backslashes do not escape for c in", "object holds the texture that contains all the characters. \"\"\" def __init__(self, info):", "isinstance(self, Text): # Text is a wobject, so must be flipped on y", "skew factor to handle italics correctly self.skewFactor = 0.0 if style.italic: self.skewFactor =", "vertical or horizontal halign, valign = self._halign, self._valign if self._angle > 135 or", "\"\"\" def fget(self): return self._size def fset(self, value): if value != self._size: self._size", "self._y, self._z = x, y, z # for internal use self._screenx, self._screeny, self._screenz", "mix bold and italic text, and one can make any supported # unicode", "0, 1 \"\"\" def fget(self): return self._valign def fset(self, value): if isinstance(value, int):", "style = MiniStyle(1) elif c=='\\x06': style = MiniStyle(0,False,True) elif c=='\\x07': style = MiniStyle(0,True,False)", "is not None and len(vertices): self._deltax = vertices[:,0].min(), vertices[:,0].max() self._deltay = vertices[:,1].min(), vertices[:,1].max()", "is created and stored for reuse. \"\"\" if fontname in self.fonts: return self.fonts[fontname]", "\"\"\" # get figure fig = self.GetFigure() # get vertices if self._vertices1 is", "an int or character.') # do we have that char? if ac not", "tt.replace('\\\\'+c, unichr(escapes[c])) tt = tt.replace('\\t', r'\\\\') # get italic and bold modifiers tt", "font = f._fontManager.GetFont(self._fontname) # clear glyphs glyphs = [] self._xglyph = Glyph(font, 'X',", "= self.GetFigure() # get vertices if self._vertices1 is None: return vertices = self._vertices1.copy()", "visvis.core.cameras import depthToZ from visvis.core.baseWibjects import Box escapes = { # upper case", "z: gl.glPushMatrix() gl.glTranslatef(x, y, z) # make sure the glyphs are created if", "characters ------------------ Characters are available for the following unicode sets: * u0020 -", "it is drawn. \"\"\" self._texCords = None self._vertices1 = None self._vertices2 = None", "self._size def fset(self, value): if value != self._size: self._size = value self._Invalidate() #", "else: raise ValueError(\"Invalid font name.\") class Glyph(object): \"\"\" Glyph(font, char, size=12, styles=None) A", "called when the first is None) \"\"\" # get figure fig = self.GetFigure()", "array def __init__(self, font, char, size=12, styles=None): # unwind the style for this", "2:'super'} \"\"\" def __init__(self, script=0, bold=False, italic=False): self.script = script self.bold = bold", "g in glyphs: x2 = x1 + g.sizex y2 = g.sizey #y2 =", "edges (used by for example the AxisLabel class) if vertices is not None", "'center', 'right' * -1, 0, 1 \"\"\" def fget(self): return self._halign def fset(self,", "spacing between characters. \"\"\" def fget(self): return self._charSpacing def fset(self, value): if value", "is not in this list, one can always look up its unicode value", "t1 t2 represent texture coordinates \"\"\" # the font.info contains # - a", "a single line of text oriented at a certain angle. Formatting ---------- Text", "position changes to update alignment self.eventPosition.Bind(self._PositionText) def OnDraw(self): # Draw the box Box.OnDraw(self)", "self.style = MiniStyle() if styles: for style in styles: self.style += style style", "texture array gl.glEnableClientState(gl.GL_VERTEX_ARRAY) gl.glEnableClientState(gl.GL_TEXTURE_COORD_ARRAY) gl.glVertexPointerf(vertices.data) gl.glTexCoordPointerf(texCords.data) # draw if self.textColor and len(vertices): clr", "= value self._Invalidate() # force recalculation self.Draw() return locals() @Property def fontName(): \"\"\"Get/Set", "'\\t') # double backslashes do not escape for c in escapesKeys: tt =", "= f._fontManager.GetFont(self._fontname) # clear glyphs glyphs = [] self._xglyph = Glyph(font, 'X', self._size)", "get vertices if self._vertices1 is None: return vertices = self._vertices1.copy() # scale text", "# from visvis.core.baseTexture import TextureObject from visvis.core.base import Wobject from visvis.core.misc import Property,", "can be formatted using the following constructs (which can be mixed): * hello^2", "{} def GetFont(self, fontname): \"\"\" GetFont(fontname) Get a font instance. If that font", "self._halign < 0: anchorx = x1 elif self._halign > 0: anchorx = x2", "BaseText.__init__(self, text, fontname) # store coordinates self._x, self._y, self._z = x, y, z", "import Pointset # from visvis.core.baseTexture import TextureObject from visvis.core.base import Wobject from visvis.core.misc", "info # set data self.SetData(self.info.data) def _UploadTexture(self, data, *args): \"\"\" Overload to make", "scripting into account. escape = False styles = [] style = None #", "self.textColor gl.glColor(clr[0], clr[1], clr[2]) gl.glDrawArrays(gl.GL_QUADS, 0, len(vertices)) gl.glFlush() # disable texture and clean", "fig._relativeFontSize # obtain dimensions if len(vertices): x1, x2 = vertices[:,0].min(), vertices[:,0].max() else: x1,", "= int(value>0) - int(value<0) if value != self._valign: self._valign = value self._vertices2 =", "0, 0, 0 vertices = Pointset(3) texCords = Pointset(2) for g in glyphs:", "that longer names are replaced first escapesKeys = escapes.keys() escapesKeys.sort( lambda x,y:len(y)-len(x)) class", "or other.bold, self.italic or other.italic ) def __repr__(self): tmp = self.script, self.bold, self.italic", "width specifies how much space there should be before the next char *", "anchory = 0 elif valign > 0: anchory = h else: anchory =", "= -self._angle vertices[:,0] = vertices[:,0] - anchorx vertices[:,1] = vertices[:,1] - anchory #", "return locals() @PropWithDraw def z(): \"\"\"Get/Set the z position of the text. \"\"\"", "replaced first escapesKeys = escapes.keys() escapesKeys.sort( lambda x,y:len(y)-len(x)) class Font(TextureObject): \"\"\" Font(info) A", "script = self.script # done return MiniStyle( script, self.bold or other.bold, self.italic or", "created if self._vertices1 is None or self._texCords is None: self._Compile() if self._vertices2 is", "# store coordinates self._x, self._y, self._z = x, y, z # for internal", "fontname=None): Wobject.__init__(self, parent) BaseText.__init__(self, text, fontname) # store coordinates self._x, self._y, self._z =", "text='', x=0, y=0, z=0, fontname=None): Wobject.__init__(self, parent) BaseText.__init__(self, text, fontname) # store coordinates", "style to the list if style: styles.append(style) style = None elif c=='}': #", "= g.sizey #y2 = y1 - g.sizey dy = g.dy # append texture", "= vertices[:,1].min(), vertices[:,1].max() def _DrawText(self, x=0, y=0, z=0): # Translate if x or", "= MiniStyle(1) elif c=='\\x06': style = MiniStyle(0,False,True) elif c=='\\x07': style = MiniStyle(0,True,False) elif", "fget(self): return self._angle def fset(self, value): if value != self._angle: self._angle = value", "tmp: raise ValueError('Invalid value for halign.') value = tmp[value.lower()] else: raise ValueError('halign must", "= self.GetFigure() if not fig: return font = fig._fontManager.GetFont(self._fontname) # enable texture font.Enable()", "and a wobject: Label and Text, which are both able to produce a", "= glu.gluProject(self._x, self._y, self._z) self._screenx, self._screeny, self._screenz = tuple(tmp) # make integer (to", "\"\"\" # make invalid first self._Invalidate() # get font instance from figure f", "Formatting ---------- Text can be formatted using the following constructs (which can be", "of characters (sub/super script, bold, and italic. Used when compiling the text. script", "relative position of edges in pixels. (taking angle into account) self._deltax = 0,0", "vertices[:,0] - anchorx vertices[:,1] = vertices[:,1] - anchory # apply angle if angle", "unichr(escapes['beta'])) tt = tt.replace('\\rho', unichr(escapes['rho'])) tt = tt.replace('\\theta', unichr(escapes['theta'])) # transform other chars", "zeta eta theta * iota kappa lambda mu * nu xi omicron pi", "from the given text. From these Glyphs the textureCords in the font texture", "the text. \"\"\" def fget(self): return self._z def fset(self, value): self._z = value", "\"\"\" Module textRender For rendering text in visvis. Defines a wibject and a", "'Sigma':0x03A3, 'Tau':0x03A4, 'Upsilon':0x03A5, 'Phi':0x03A6, 'Chi':0x03A7, 'Psi':0x03A8, 'Omega':0x03A9, # lower case greek 'alpha':0x03B1, 'beta':0x03B2,", "return tmp else: raise ValueError(\"Invalid font name.\") class Glyph(object): \"\"\" Glyph(font, char, size=12,", "style in styles: self.style += style style = self.style # store font self.font", "text, fontname) # no edge self.edgeWidth = 0 # init position (this is", "Add lumincance channel data2 = np.zeros((data.shape[0],data.shape[1],2), dtype=np.uint8) data2[:,:,0] = 255 data2[:,:,1] = data", "elif c=='\\x07': style = MiniStyle(0,True,False) elif c=='\\\\' and i+1<len(tt) and tt[i+1] in ['_^\\x06\\x07']:", "tmp = float(info.data.shape[1]) self.s1, self.s2 = (x1) / tmp, (x2-1) / tmp y1", "text according to global text size property vertices *= fig._relativeFontSize # obtain dimensions", "tmp = self.script, self.bold, self.italic return '<MiniStyle script:%i, bold:%i, italic:%i>' % tmp class", "the keys, such that longer names are replaced first escapesKeys = escapes.keys() escapesKeys.sort(", "0 # init position (this is to set the size) self.position = 10,10,100,16", "info.origin_b, info.width_b) # Find position in texture, normalized to texture coordinates x1 =", "found in 'license.txt'. \"\"\" Module textRender For rendering text in visvis. Defines a", "0 self._halign = -1 self._valign = 0 self._charSpacing = 1 def _Invalidate(self): \"\"\"", "# Smart draw def text(): \"\"\"Get/Set the text to display. \"\"\" def fget(self):", "= 0.5 # calculate width on screen, given the size factor = size", "store font self.font = font info = self.font.info # get asci code and", "gl.GL_LINEAR gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, tmp1) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, tmp2) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S, gl.GL_CLAMP) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T, gl.GL_CLAMP)", "way (By starting the name with an uppercase letter, the corresponding upper case", "rightfloor * times cdot pm * oplus ominus otimes oslash Letters from the", "as glu import os import numpy as np import visvis from visvis import", "s1 s2 t1 t2 represent texture coordinates \"\"\" # the font.info contains #", "< 32 or ac > 255: print \"Warning: Cannot draw character %i! \"", "italic text # infoSize, infoOrigin, infoWidth = ( # info.size_i, info.origin_i, info.width_i) if", "skewed rather using the # italic glyphs. The reason is that when using", "# the font.info contains # - a string of charcodes # - an", "Font object holds the texture that contains all the characters. \"\"\" def __init__(self,", "screen position and store tmp = glu.gluProject(self._x, self._y, self._z) self._screenx, self._screeny, self._screenz =", "# just use some char that is no string tt = tt.replace('\\b', '\\x07')", "def __init__(self, script=0, bold=False, italic=False): self.script = script self.bold = bold self.italic =", "font type by its name. \"\"\" def fget(self): return self._fontname def fset(self, value):", "= x2 else: anchorx = x1 + (x2-x1)/2.0 # if self._valign < 0:", "def _PositionText(self, event=None): \"\"\" The name is ment as a verb. The vertices1", "tmp = Font(self.s[fontname]) self.fonts[fontname] = tmp return tmp else: raise ValueError(\"Invalid font name.\")", "vertices[:,1] - anchory # apply angle if angle != 0.0: cos_angle = np.cos(angle*np.pi/180.0)", "None @Property # Smart draw def text(): \"\"\"Get/Set the text to display. \"\"\"", "\"\"\"Get/Set the angle of the text in degrees. \"\"\" def fget(self): return self._angle", "/ tmp, (x2-1) / tmp y1 = infoOrigin[ac,1] y2 = y1 + infoSize[ac,1]", "pm * oplus ominus otimes oslash Letters from the greek alfabet can be", "Get a font instance. If that font was created earlier, that font is", "self._angle > 135 or self._angle < -135: halign, valign = -halign, valign elif", "= x1 elif self._halign > 0: anchorx = x2 else: anchorx = x1", "* u0020 - u003f numbers * u0040 - u00bf alphabet * u00c0 -", "'up', 'center', 'down' * 'top', 'center', 'bottom' * -1, 0, 1 \"\"\" def", "or subscript self.skewFactor *= smaller self.sizex = self.sizex * smaller self.sizey = self.sizey", "the Font object. * sizex and sizey represent the size of the glyph.", "iiint forall * leq geq approx approxeq ne in * leftarrow uparrow rightarrow", "and len(vertices): self._deltax = vertices[:,0].min(), vertices[:,0].max() self._deltay = vertices[:,1].min(), vertices[:,1].max() def _DrawText(self, x=0,", "vv.settings.defaultFontName is used. \"\"\" def __init__(self, parent, text='', fontname=None): Box.__init__(self, parent) BaseText.__init__(self, text,", "'Beta':0x0392, 'Gamma':0x0393, 'Delta':0x0394, 'Epsilon':0x0395, 'Zeta':0x0396, 'Eta':0x0397, 'Theta':0x0398, 'Iota':0x0399, 'Kappa':0x039A, 'Lambda':0x039B, 'Mu':0x039C, 'Nu':0x039D, 'Xi':0x039E,", "0 elif valign > 0: anchory = h else: anchory = h/2.0 #", "be found in 'license.txt'. \"\"\" Module textRender For rendering text in visvis. Defines", "valign = valign, halign # set anchor y if valign < 0: anchory", "# force recalculation self.Draw() return locals() @Property def textSpacing(): \"\"\"Get/Set the spacing between", "is a wobject, so must be flipped on y axis vertices[:,0] = vertices[:,0]", "gl.glFlush() # disable texture and clean up if x or y or z:", "tmp1 = gl.GL_LINEAR tmp2 = gl.GL_LINEAR gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER, tmp1) gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER, tmp2) gl.glTexParameteri(gl.GL_TEXTURE_2D,", "# force recalculation self.Draw() return locals() @Property def fontName(): \"\"\"Get/Set the font type", "hello^{there}, makes one or more charactes superscript. * hello_2 or hello_{there}, makes one", "represents the style of characters (sub/super script, bold, and italic. Used when compiling", "corresponding upper case greek letter is inserted): * alpha beta gamma delta *", "which case the vv.settings.defaultFontName is used. \"\"\" def __init__(self, text='', fontname=None): # init", "= Pointset(3) texCords = Pointset(2) for g in glyphs: x2 = x1 +", "+ g.width + self._charSpacing # store self._texCords = texCords self._vertices1 = vertices def", "halign, valign = self._halign, self._valign if self._angle > 135 or self._angle < -135:", "= {'left':-1,'center':0,'centre':0,'right':1 } if not value in tmp: raise ValueError('Invalid value for halign.')", "in ['mono', 'sans', 'serif']: raise ValueError('Invalid font name.') # more properties self._size =", "value self._vertices2 = None # force recalculation self.Draw() return locals() @Property def textSpacing():", "-*- # Copyright (C) 2012, <NAME> # # Visvis is distributed under the", "tt.replace('\\theta', unichr(escapes['theta'])) # transform other chars tt = tt.replace(r'\\\\', '\\t') # double backslashes", "the vv.settings.defaultFontName is used. \"\"\" def __init__(self, parent, text='', fontname=None): Box.__init__(self, parent) BaseText.__init__(self,", "return locals() @Property # Smart draw def textAngle(): \"\"\"Get/Set the angle of the", "if value != self._color: self._color = value self.Draw() return locals() @Property def halign():", "It is visualized by rendering the proper part from the texture stored in", "several escape sequences for (mathematical) characters that can be inserted using the backslash", "keeping the _^ or \\ after it. Special characters ------------------ Characters are available", "tmp, (x2-1) / tmp y1 = infoOrigin[ac,1] y2 = y1 + infoSize[ac,1] tmp", "if fontname is None: fontname = visvis.settings.defaultFontName fontname = fontname.lower() if fontname not", "f._fontManager.GetFont(self._fontname) # clear glyphs glyphs = [] self._xglyph = Glyph(font, 'X', self._size) tt", "'iiint':0x222d, 'forall':0x2200, 'leq':0x22dc, 'geq':0x22dd, 'approx':0x2248, 'approxeq':0x2243, 'ne':0x2260, 'in':0x22f9, 'leftarrow':0x2190,'uparrow':0x2191,'rightarrow':0x2192,'downarrow':0x2193, 'Leftarrow':0x21D0,'Uparrow':0x21D1,'Rightarrow':0x21D2,'Downarrow':0x21D3, 'leftceil':0x2308,'rightceil':0x2309,'leftfloor':0x230A,'rightfloor':0x230B, 'times':0x2217, 'cdot':0x2219,", "self._screeny = int(self._screeny+0.5) def OnDrawScreen(self): self._DrawText( self._screenx, self._screeny, depthToZ(self._screenz) ) class Label(Box, BaseText):", "*args): \"\"\" Overload to make it an alpha map. \"\"\" # Add lumincance", "'approxeq':0x2243, 'ne':0x2260, 'in':0x22f9, 'leftarrow':0x2190,'uparrow':0x2191,'rightarrow':0x2192,'downarrow':0x2193, 'Leftarrow':0x21D0,'Uparrow':0x21D1,'Rightarrow':0x21D2,'Downarrow':0x21D3, 'leftceil':0x2308,'rightceil':0x2309,'leftfloor':0x230A,'rightfloor':0x230B, 'times':0x2217, 'cdot':0x2219, 'pm':0x00b1, 'oplus':0x2295, 'ominus':0x2296, 'otimes':0x2297, 'oslash':0x2298,", "int or character.') # do we have that char? if ac not in", "'Upsilon':0x03A5, 'Phi':0x03A6, 'Chi':0x03A7, 'Psi':0x03A8, 'Omega':0x03A9, # lower case greek 'alpha':0x03B1, 'beta':0x03B2, 'gamma':0x03B3, 'delta':0x03B4,", "- int(value<0) if value != self._valign: self._valign = value self._vertices2 = None #", "escapesKeys = escapes.keys() escapesKeys.sort( lambda x,y:len(y)-len(x)) class Font(TextureObject): \"\"\" Font(info) A Font object", "ac > 255: print \"Warning: Cannot draw character %i! \" % ord(char) ac", "vertices *= fig._relativeFontSize # obtain dimensions if len(vertices): x1, x2 = vertices[:,0].min(), vertices[:,0].max()", "A Font object holds the texture that contains all the characters. \"\"\" def", "make it an alpha map. \"\"\" # Add lumincance channel data2 = np.zeros((data.shape[0],data.shape[1],2),", "anchorx = w else: anchorx = w/2.0 # apply vertices[:,0] = vertices[:,0] +", "GetFont(self, fontname): \"\"\" GetFont(fontname) Get a font instance. If that font was created", "bold, and italic. Used when compiling the text. script = {0:'normal', 1:'sub', 2:'super'}", "def _Compile(self): \"\"\" Create a series of glyphs from the given text. From", "-1 self._valign = 0 self._charSpacing = 1 def _Invalidate(self): \"\"\" Invalidate this object,", "the text. \"\"\" def fget(self): return self._color def fset(self, value): value = getColor(value,'setting", "fset(self, value): self._y = value return locals() @PropWithDraw def z(): \"\"\"Get/Set the z", "( info.size_b, info.origin_b, info.width_b) # Find position in texture, normalized to texture coordinates", "the following unicode sets: * u0020 - u003f numbers * u0040 - u00bf", "in this list, one can always look up its unicode value and use", "'\\infty'). People familiar with Latex know what they do: * Re Im null", "= bold self.italic = italic def __add__(self, other): # allow None if other", "position (this is to set the size) self.position = 10,10,100,16 # we need", "tt = tt.replace('\\alpha', unichr(escapes['alpha'])) tt = tt.replace('\\beta', unichr(escapes['beta'])) tt = tt.replace('\\rho', unichr(escapes['rho'])) tt", "None or self._texCords is None: self._Compile() if self._vertices2 is None: self._PositionText() # get", "escapes = { # upper case greek 'Alpha':0x0391, 'Beta':0x0392, 'Gamma':0x0393, 'Delta':0x0394, 'Epsilon':0x0395, 'Zeta':0x0396,", "may be 'mono', 'sans', 'serif' or None, in which case the vv.settings.defaultFontName is", "font instance from figure fig = self.GetFigure() if not fig: return font =", "such that the text is recompiled the next time it is drawn. \"\"\"", "+ infoSize[ac,0] tmp = float(info.data.shape[1]) self.s1, self.s2 = (x1) / tmp, (x2-1) /", "For rendering text in visvis. Defines a wibject and a wobject: Label and", "escapesKeys: tt = tt.replace('\\\\'+c, unichr(escapes[c])) tt = tt.replace('\\t', r'\\\\') # get italic and" ]
[ "'profile', ['profile_id'], ['id']) # ### end Alembic commands ### def downgrade(): # ###", "commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'task', type_='foreignkey') op.drop_column('task',", "generated by Alembic - please adjust! ### op.drop_constraint(None, 'task', type_='foreignkey') op.drop_column('task', 'profile_id') #", "- please adjust! ### op.drop_constraint(None, 'task', type_='foreignkey') op.drop_column('task', 'profile_id') # ### end Alembic", "### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'task', type_='foreignkey')", "downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None,", "alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic.", "sa # revision identifiers, used by Alembic. revision = '<KEY>' down_revision = 'c2889ee965a5'", "59564f63b0ae Revises: c2<PASSWORD>ee965a5 Create Date: 2020-06-10 23:03:10.493772 \"\"\" from alembic import op import", "down_revision = 'c2889ee965a5' branch_labels = None depends_on = None def upgrade(): # ###", "\"\"\"empty message Revision ID: 59564f63b0ae Revises: c2<PASSWORD>ee965a5 Create Date: 2020-06-10 23:03:10.493772 \"\"\" from", "23:03:10.493772 \"\"\" from alembic import op import sqlalchemy as sa # revision identifiers,", "used by Alembic. revision = '<KEY>' down_revision = 'c2889ee965a5' branch_labels = None depends_on", "ID: 59564f63b0ae Revises: c2<PASSWORD>ee965a5 Create Date: 2020-06-10 23:03:10.493772 \"\"\" from alembic import op", "op import sqlalchemy as sa # revision identifiers, used by Alembic. revision =", "def downgrade(): # ### commands auto generated by Alembic - please adjust! ###", "Revision ID: 59564f63b0ae Revises: c2<PASSWORD>ee965a5 Create Date: 2020-06-10 23:03:10.493772 \"\"\" from alembic import", "as sa # revision identifiers, used by Alembic. revision = '<KEY>' down_revision =", "op.add_column('task', sa.Column('profile_id', sa.Integer(), nullable=True)) op.create_foreign_key(None, 'task', 'profile', ['profile_id'], ['id']) # ### end Alembic", "# ### commands auto generated by Alembic - please adjust! ### op.add_column('task', sa.Column('profile_id',", "import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision", "adjust! ### op.drop_constraint(None, 'task', type_='foreignkey') op.drop_column('task', 'profile_id') # ### end Alembic commands ###", "### def downgrade(): # ### commands auto generated by Alembic - please adjust!", "None depends_on = None def upgrade(): # ### commands auto generated by Alembic", "revision = '<KEY>' down_revision = 'c2889ee965a5' branch_labels = None depends_on = None def", "please adjust! ### op.add_column('task', sa.Column('profile_id', sa.Integer(), nullable=True)) op.create_foreign_key(None, 'task', 'profile', ['profile_id'], ['id']) #", "'<KEY>' down_revision = 'c2889ee965a5' branch_labels = None depends_on = None def upgrade(): #", "by Alembic - please adjust! ### op.drop_constraint(None, 'task', type_='foreignkey') op.drop_column('task', 'profile_id') # ###", "['profile_id'], ['id']) # ### end Alembic commands ### def downgrade(): # ### commands", "= '<KEY>' down_revision = 'c2889ee965a5' branch_labels = None depends_on = None def upgrade():", "by Alembic. revision = '<KEY>' down_revision = 'c2889ee965a5' branch_labels = None depends_on =", "Alembic commands ### def downgrade(): # ### commands auto generated by Alembic -", "branch_labels = None depends_on = None def upgrade(): # ### commands auto generated", "\"\"\" from alembic import op import sqlalchemy as sa # revision identifiers, used", "nullable=True)) op.create_foreign_key(None, 'task', 'profile', ['profile_id'], ['id']) # ### end Alembic commands ### def", "end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic", "Alembic - please adjust! ### op.drop_constraint(None, 'task', type_='foreignkey') op.drop_column('task', 'profile_id') # ### end", "adjust! ### op.add_column('task', sa.Column('profile_id', sa.Integer(), nullable=True)) op.create_foreign_key(None, 'task', 'profile', ['profile_id'], ['id']) # ###", "<filename>migrations/versions/59564f63b0ae_.py \"\"\"empty message Revision ID: 59564f63b0ae Revises: c2<PASSWORD>ee965a5 Create Date: 2020-06-10 23:03:10.493772 \"\"\"", "def upgrade(): # ### commands auto generated by Alembic - please adjust! ###", "commands ### def downgrade(): # ### commands auto generated by Alembic - please", "Revises: c2<PASSWORD>ee965a5 Create Date: 2020-06-10 23:03:10.493772 \"\"\" from alembic import op import sqlalchemy", "### commands auto generated by Alembic - please adjust! ### op.add_column('task', sa.Column('profile_id', sa.Integer(),", "Alembic. revision = '<KEY>' down_revision = 'c2889ee965a5' branch_labels = None depends_on = None", "= None depends_on = None def upgrade(): # ### commands auto generated by", "# ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'task',", "import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '<KEY>'", "sa.Integer(), nullable=True)) op.create_foreign_key(None, 'task', 'profile', ['profile_id'], ['id']) # ### end Alembic commands ###", "# revision identifiers, used by Alembic. revision = '<KEY>' down_revision = 'c2889ee965a5' branch_labels", "= None def upgrade(): # ### commands auto generated by Alembic - please", "by Alembic - please adjust! ### op.add_column('task', sa.Column('profile_id', sa.Integer(), nullable=True)) op.create_foreign_key(None, 'task', 'profile',", "Alembic - please adjust! ### op.add_column('task', sa.Column('profile_id', sa.Integer(), nullable=True)) op.create_foreign_key(None, 'task', 'profile', ['profile_id'],", "c2<PASSWORD>ee965a5 Create Date: 2020-06-10 23:03:10.493772 \"\"\" from alembic import op import sqlalchemy as", "commands auto generated by Alembic - please adjust! ### op.add_column('task', sa.Column('profile_id', sa.Integer(), nullable=True))", "'c2889ee965a5' branch_labels = None depends_on = None def upgrade(): # ### commands auto", "['id']) # ### end Alembic commands ### def downgrade(): # ### commands auto", "### op.add_column('task', sa.Column('profile_id', sa.Integer(), nullable=True)) op.create_foreign_key(None, 'task', 'profile', ['profile_id'], ['id']) # ### end", "None def upgrade(): # ### commands auto generated by Alembic - please adjust!", "depends_on = None def upgrade(): # ### commands auto generated by Alembic -", "generated by Alembic - please adjust! ### op.add_column('task', sa.Column('profile_id', sa.Integer(), nullable=True)) op.create_foreign_key(None, 'task',", "please adjust! ### op.drop_constraint(None, 'task', type_='foreignkey') op.drop_column('task', 'profile_id') # ### end Alembic commands", "sa.Column('profile_id', sa.Integer(), nullable=True)) op.create_foreign_key(None, 'task', 'profile', ['profile_id'], ['id']) # ### end Alembic commands", "'task', 'profile', ['profile_id'], ['id']) # ### end Alembic commands ### def downgrade(): #", "auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'task', type_='foreignkey') op.drop_column('task', 'profile_id')", "# ### end Alembic commands ### def downgrade(): # ### commands auto generated", "upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('task',", "identifiers, used by Alembic. revision = '<KEY>' down_revision = 'c2889ee965a5' branch_labels = None", "revision identifiers, used by Alembic. revision = '<KEY>' down_revision = 'c2889ee965a5' branch_labels =", "auto generated by Alembic - please adjust! ### op.add_column('task', sa.Column('profile_id', sa.Integer(), nullable=True)) op.create_foreign_key(None,", "from alembic import op import sqlalchemy as sa # revision identifiers, used by", "2020-06-10 23:03:10.493772 \"\"\" from alembic import op import sqlalchemy as sa # revision", "Create Date: 2020-06-10 23:03:10.493772 \"\"\" from alembic import op import sqlalchemy as sa", "op.create_foreign_key(None, 'task', 'profile', ['profile_id'], ['id']) # ### end Alembic commands ### def downgrade():", "### end Alembic commands ### def downgrade(): # ### commands auto generated by", "Date: 2020-06-10 23:03:10.493772 \"\"\" from alembic import op import sqlalchemy as sa #", "message Revision ID: 59564f63b0ae Revises: c2<PASSWORD>ee965a5 Create Date: 2020-06-10 23:03:10.493772 \"\"\" from alembic", "- please adjust! ### op.add_column('task', sa.Column('profile_id', sa.Integer(), nullable=True)) op.create_foreign_key(None, 'task', 'profile', ['profile_id'], ['id'])", "sqlalchemy as sa # revision identifiers, used by Alembic. revision = '<KEY>' down_revision", "= 'c2889ee965a5' branch_labels = None depends_on = None def upgrade(): # ### commands" ]
[ "'detail' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model =", "= test_models.AddSubgroupCreateMethods().create_parent_task() self.task = test_models.AddSubgroupCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id']", "value in test_forms.AddProjectFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id =", "self.assertTemplateUsed(response, 'groups/tasks/detail.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'],", "200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task_group.name) data['name'] = 'Another", "in test_forms.AddSubgroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42", "200) self.assertTemplateUsed(response, 'groups/detail.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['group'], objects.Group)", "def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'sidebar.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context)", "class FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_members' args = {'task_id': None} def setUp(self): super().setUp() self.task", "'index' @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/index.html') self.assertIn('group_list', response.context)", "Group', reverse('groups:new_task_group', kwargs=self.args)), ('New Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)), ('New Project', reverse('groups:new_project_task', kwargs=self.args)), ('New Member',", "reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_project_task' args = {'task_id': 1} def", "setUp(self): super().setUp() for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response =", "response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42 self.task.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks',", "= 'ajax_load_subgroups_and_projects' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "= test_models.AddSubgroupCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] += 1", "test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_get(self): response", "= test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response =", "self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.name, data['name']) self.assertEqual(self.task.description, data['description']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id", "302) self.assertEqual(response.url, reverse('groups:members', kwargs=self.args)) class NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member_task' args = {'task_group_id': 1}", "models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_members' args", "1} def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task = test_models.AddMemberCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] =", "response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "reverse from groups import models from groups.sidebar import GroupSidebar, FutureGroupSidebar from groups.tests import", "kwargs=self.args)) model = models.TaskGroup.objects.get(parent_task=self.parent_task) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class", "response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list)", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_not_valid_data(self): response = self.client.post(self.get_url()) self.assertEqual(response.status_code,", "1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task = test_models.AddSubgroupCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] =", "self.assertEqual(self.task.name, data['name']) self.assertEqual(self.task.description, data['description']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42 self.task.gitlab_group.save()", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/detail.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list',", "kwargs=self.args)) class FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_detail' args = {'task_id': None} def setUp(self): super().setUp()", "] for group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_group' @LoginMethods.login_wrapper", "test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddSubgroup.objects.get(task_group=self.task_group) for key, value in test_forms.AddSubgroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key),", "value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response,", "= 'future_group_tasks' args = {'task_id': None} def setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task() self.args['task_id']", "name = 'new_member' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url())", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/index.html') self.assertIn('group_list', response.context) all(self.assertIsInstance(group, objects.Group) for group in response.context['group_list'])", "self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url,", "reverse('groups:new_member_task', kwargs=self.args)) ] for group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "name = 'detail' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url())", "super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task_group = test_models.AddProjectCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task_group.name) data['name']", "42 self.task_group.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): response", "response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) class IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'index' @LoginMethods.login_wrapper def test_page_found(self):", "self.assertIn('group_list', response.context) all(self.assertIsInstance(group, objects.Group) for group in response.context['group_list']) class DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'detail'", "group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_group' @LoginMethods.login_wrapper def test_page_get(self):", "value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save() response = self._test_page_post_valid_data()", "test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args = {'task_id': None}", "test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "= self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_task' args =", "def test_page_post_not_valid_data(self): response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response =", "self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:index')) class NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup' args =", "self.assertEqual(self.task.description, data['description']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42 self.task.gitlab_group.save() response =", "response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data()", "_test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['username'], self.task.username) data['username'] = 'Another username' response = self.client.post(self.get_url(),", "= self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group'", "self.assertEqual(getattr(model, key), value) class EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_task_group' args = {'task_group_id': 1} def", "= {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/detail.html')", "test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:members', kwargs=self.args)) class NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file'", "value) class FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args = {'task_id': None} def setUp(self): super().setUp()", "'1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'sidebar.html') self.assertIn('group', response.context)", "self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddProject.objects.get(task_group=self.task_group)", "def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42 self.task.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id}))", "self.task_group.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): response =", "self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302)", "302) self.task.refresh_from_db() self.assertEqual(self.task.username, data['username']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42 self.task.gitlab_group.save()", "self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code,", "test_page_not_found(self): self.args['task_group_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_get(self): response", "'edit_member_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task =", "class FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task", "args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task_group = test_models.AddSubgroupCreateMethods().create_task_group(", "args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task_group = test_models.AddMemberCreateMethods().create_task_group(", "self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_tasks' args = {'task_id': None}", "self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) class IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'index' @LoginMethods.login_wrapper def test_page_found(self): response", "self.task_group = test_models.AddMemberCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] +=", "= self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member' args =", "def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task = test_models.AddMemberCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id", "@LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args =", "= test_models.AddProjectCreateMethods().create_parent_task() self.task = test_models.AddProjectCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id']", "= 'new_member_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task_group", "response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'tasks' args", "= 'edit_task_group' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task_group", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'sidebar.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'],", "self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'members'", "response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list)", "self.assertEqual(data['name'], self.task.name) data['name'] = 'Another Name' data['description'] = 'Description' response = self.client.post(self.get_url(), data)", "self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_member_task' args = {'task_id': 1}", "@LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) model", "self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'],", "self.parent_task.id})) class EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_project_task' args = {'task_id': 1} def setUp(self): super().setUp()", "response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_project_task' args", "response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.username, data['username']) return response @LoginMethods.login_wrapper def", "'new_members_from_file' args = {'group_id': '1'} def setUp(self): super().setUp() for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items():", "def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class", "self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_members'", "data = self.get_initial_form_data() self.assertEqual(data['name'], self.task_group.name) data['name'] = 'Another Name' response = self.client.post(self.get_url(), data)", "response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data()", "response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html') self.assertIn('group_list', response.context) self.assertIsInstance(response.context['group_list'], list) all(self.assertIsInstance(group, objects.GroupSubgroup)", "= self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list)", "= models.TaskGroup.objects.get( gitlab_group=models.GitlabGroup.objects.get( gitlab_id=self.args['group_id'])) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class", "self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links = [ ('New Task Group', reverse('groups:new_task_group',", "'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:members', kwargs=self.args))", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_not_valid_data(self): response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200)", "def setUp(self): super().setUp() for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response", "test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'sidebar.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIsInstance(response.context['group'],", "EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_task_group' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task =", "= self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.name, data['name']) self.assertEqual(self.task.description, data['description']) return response @LoginMethods.login_wrapper", "setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task_group = test_models.AddMemberCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id", "setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items():", "self.args['task_id'] = self.parent_task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet)", "self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args = {'group_id':", "for key, value in test_forms.AddSubgroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self):", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/tasks.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context)", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/members.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'],", "def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id}))", "groups.tests import models as test_models class GitlabWrapperAppNameCase: class GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests): app_name = 'groups' class", "= self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data)", "= self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddProject.objects.get(task_group=self.task_group) for key, value in test_forms.AddProjectFormTests.valid_form_data.items():", "self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) model = models.TaskGroup.objects.get(parent_task=self.parent_task) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key),", "self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data})", "test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) model = models.TaskGroup.objects.get(parent_task=self.parent_task)", "from django.urls import reverse from groups import models from groups.sidebar import GroupSidebar, FutureGroupSidebar", "self.task.gitlab_group.gitlab_id = 42 self.task.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def", "= {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/members.html')", "'1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/members.html') self.assertIn('group', response.context)", "= self.parent_task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404)", "super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task = test_models.AddMemberCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def", "self.assertTemplateUsed(response, 'groups/detail.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'],", "response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet)", "response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def", "objects from core.tests.test_view import LoginMethods from core.tests.test_view import SimpleUrlsTestsCases from django.db.models import QuerySet", "name = 'init_sidebar' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url())", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task_group.name) data['name'] =", "model = models.AddProject.objects.get(task_group=self.task_group) for key, value in test_forms.AddProjectFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/detail.html')", "= {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html')", "class EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_task_group' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task", "value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_task_group' args =", "test_models.AddMemberCreateMethods().create_parent_task() self.task = test_models.AddMemberCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] +=", "self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper", "self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'tasks'", "@LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args))", "self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'members' args = {'group_id': '1'}", "import reverse from groups import models from groups.sidebar import GroupSidebar, FutureGroupSidebar from groups.tests", "{'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/members.html') self.assertIn('group',", "self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['username'], self.task.username)", "def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) model =", "@LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data", "in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html')", "self.parent_task.id})) class EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_subgroup_task' args = {'task_id': 1} def setUp(self): super().setUp()", "setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task_group = test_models.AddProjectCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id", "QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'members' args = {'group_id': '1'} @LoginMethods.login_wrapper", "def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data =", "data['name'] = 'Another Name' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task_group.refresh_from_db() self.assertEqual(self.task_group.name, data['name'])", "self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) class IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'index'", "NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args = {'group_id': '1'} def setUp(self): super().setUp() for key,", "def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/detail.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context)", "django.db.models import QuerySet from django.urls import reverse from groups import models from groups.sidebar", "def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "self.assertTemplateUsed(response, 'groups/tasks/members.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'],", "'groups/tasks/members.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet)", "reverse('groups:new_subgroup_task', kwargs=self.args)), ('New Project', reverse('groups:new_project_task', kwargs=self.args)), ('New Member', reverse('groups:new_member_task', kwargs=self.args)) ] for group_link", "def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddMember.objects.get(task_group=self.task_group) for key,", "key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save() response =", "self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_members' args = {'task_id': None} def setUp(self):", "= 'tasks' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task_group.name) data['name'] = 'Another Name' response =", "@LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "@LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/detail.html') self.assertIn('group', response.context) self.assertIn('sidebar',", "test_forms from groups.tests import models as test_models class GitlabWrapperAppNameCase: class GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests): app_name =", "self.assertIsInstance(response.context['sidebar'], GroupSidebar) class IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'index' @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url())", "objects.Group) for group in response.context['group_list']) class DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'detail' args = {'group_id':", "= self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) model = models.TaskGroup.objects.get( gitlab_group=models.GitlabGroup.objects.get( gitlab_id=self.args['group_id']))", "self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks',", "name = 'edit_task_group' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()", "'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks',", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/detail.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context)", "reverse('groups:new_task_group', kwargs=self.args)), ('New Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)), ('New Project', reverse('groups:new_project_task', kwargs=self.args)), ('New Member', reverse('groups:new_member_task',", "test_models.AddSubgroupCreateMethods().create_parent_task() self.task_group = test_models.AddSubgroupCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id']", "'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args))", "self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task_group = test_models.AddMemberCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def", "from groups.sidebar import GroupSidebar, FutureGroupSidebar from groups.tests import test_forms from groups.tests import models", "models from groups.sidebar import GroupSidebar, FutureGroupSidebar from groups.tests import test_forms from groups.tests import", "self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) model = models.TaskGroup.objects.get( gitlab_group=models.GitlabGroup.objects.get( gitlab_id=self.args['group_id'])) for key, value", "self.assertEqual(response.status_code, 302) model = models.AddSubgroup.objects.get(task_group=self.task_group) for key, value in test_forms.AddSubgroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value)", "for key, value in test_forms.AddMemberFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self):", "data['username'] = 'Another username' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.username, data['username'])", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302)", "'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task.name) data['name'] = 'Another Name' data['description']", "FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_members' args = {'task_id':", "FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task =", "= 42 self.task_group.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self):", "= {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task_group = test_models.AddProjectCreateMethods().create_task_group( parent_task=self.parent_task", "self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:members', kwargs=self.args)) class NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member_task' args = {'task_group_id':", "self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data})", "'Another Name' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task_group.refresh_from_db() self.assertEqual(self.task_group.name, data['name']) return response", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302)", "'new_subgroup_and_members_from_file' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] =", "value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response,", "= self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data)", "kwargs=self.args)) ] for group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups'", "parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] += 1 response =", "model = models.AddMember.objects.get(task_group=self.task_group) for key, value in test_forms.AddMemberFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response", "test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/detail.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list',", "self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail',", "def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/members.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context)", "EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_project_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task =", "self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task = test_models.AddProjectCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self):", "kwargs=self.args)), ('New Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)), ('New Project', reverse('groups:new_project_task', kwargs=self.args)), ('New Member', reverse('groups:new_member_task', kwargs=self.args))", "model = models.TaskGroup.objects.get(parent_task=self.parent_task) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", ") self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] += 1 response = self.client.get(self.get_url())", "args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task = test_models.AddMemberCreateMethods().create_task(", "super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task_group = test_models.AddMemberCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper", "def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task = test_models.AddSubgroupCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id", "'1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks.html') self.assertIn('group', response.context)", "= test_models.AddSubgroupCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response", "{**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args", "self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args = {'task_id':", "200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url,", "_test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddSubgroup.objects.get(task_group=self.task_group) for key, value", "self.task_group = test_models.AddProjectCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] +=", "value in test_forms.AddSubgroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id =", "= 'index' @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/index.html') self.assertIn('group_list',", "name = 'new_member_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task()", "@LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html') self.assertIn('group_list', response.context) self.assertIsInstance(response.context['group_list'],", "test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddProject.objects.get(task_group=self.task_group) for key, value in test_forms.AddProjectFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key),", "302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_detail' args = {'task_id': None}", "'new_project_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task_group =", "for group in response.context['group_list']) class DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'detail' args = {'group_id': '1'}", "'sidebar.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) class IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "**test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args =", "200) self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html') self.assertIn('group_list', response.context) self.assertIsInstance(response.context['group_list'], list) all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list'])", "test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_subgroup_task'", "test_models.AddSubgroupCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] += 1 response", "super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task() self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1", "@LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def", "self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "302) self.task.refresh_from_db() self.assertEqual(self.task.name, data['name']) self.assertEqual(self.task.description, data['description']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id =", "self.assertIsInstance(response.context['group_list'], list) all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list']) self.assertIn('project_list', response.context) self.assertEqual(response.context['project_list'], []) class", "= self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url,", "from django.db.models import QuerySet from django.urls import reverse from groups import models from", "self.task = test_models.AddSubgroupCreateMethods().create_task() self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response", "test_models.AddSubgroupCreateMethods().create_parent_task() self.task = test_models.AddSubgroupCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] +=", "test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_not_valid_data(self): response =", "= self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) model = models.TaskGroup.objects.get(parent_task=self.parent_task) for key,", "test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/index.html') self.assertIn('group_list', response.context) all(self.assertIsInstance(group, objects.Group) for", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/members.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup)", "self.assertIn('group_list', response.context) self.assertIsInstance(response.context['group_list'], list) all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list']) self.assertIn('project_list', response.context) self.assertEqual(response.context['project_list'],", "test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_tasks' args", "self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project_task' args = {'task_group_id': 1} def", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/index.html') self.assertIn('group_list', response.context) all(self.assertIsInstance(group, objects.Group) for group in", "test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self):", "= 'future_group_members' args = {'task_id': None} def setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task() self.args['task_id']", "MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'members' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response =", "self.task = test_models.AddSubgroupCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1", "= self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task_group.refresh_from_db() self.assertEqual(self.task_group.name, data['name']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self):", "self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file'", "NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response =", "test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper", "'groups/tasks/tasks.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup)", "= 'init_sidebar' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddSubgroup.objects.get(task_group=self.task_group) for key, value in test_forms.AddSubgroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model,", "= {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task = test_models.AddMemberCreateMethods().create_task( parent_task=self.parent_task)", "Member', reverse('groups:new_member_task', kwargs=self.args)) ] for group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "'new_subgroup_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task_group =", "'Description' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.name, data['name']) self.assertEqual(self.task.description, data['description']) return", "key, value in test_forms.AddSubgroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id", "self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "reverse('groups:detail', kwargs=self.args)) class NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args = {'group_id': '1'} @LoginMethods.login_wrapper def", "test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "kwargs={'task_id': self.parent_task.id})) class NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project' args = {'group_id': '1'} @LoginMethods.login_wrapper def", "def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class", "= 'edit_project_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task", "self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def", "test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddMember.objects.get(task_group=self.task_group) for key, value in test_forms.AddMemberFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key),", "'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks',", "test_models.AddSubgroupCreateMethods().create_task() self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url())", "setUp(self): super().setUp() for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response =", "302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args = {'group_id': '1'}", "self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_members' args = {'task_id': None}", "self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'],", "self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_project_task' args = {'task_id':", "objects.GroupSubgroup) for group in response.context['group_list']) self.assertIn('project_list', response.context) all(self.assertIsInstance(project, objects.GroupProject) for project in response.context['project_list'])", "self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'],", "404) @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/detail.html') self.assertIn('task', response.context)", "self.assertTemplateUsed(response, 'sidebar.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) class IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "LoginMethods from core.tests.test_view import SimpleUrlsTestsCases from django.db.models import QuerySet from django.urls import reverse", "self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_not_valid_data(self): response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def", "self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class", "self.assertEqual(data['username'], self.task.username) data['username'] = 'Another username' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db()", "test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data()", "@LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/index.html') self.assertIn('group_list', response.context) all(self.assertIsInstance(group,", "def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context)", "def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class", "self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class", "= test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def", "= 'new_group' @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model", "from core.tests.test_view import SimpleUrlsTestsCases from django.db.models import QuerySet from django.urls import reverse from", "value in test_forms.AddMemberFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id =", "args = {'group_id': '1'} def setUp(self): super().setUp() for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0)", "self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_get(self): response =", "self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['username'], self.task.username) data['username'] = 'Another username'", "def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "@LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42 self.task.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id':", "@LoginMethods.login_wrapper def test_page_post_not_valid_data(self): response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self):", "class FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task", "'new_subgroup_and_members_from_file' args = {'group_id': '1'} def setUp(self): super().setUp() for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items():", "Member', reverse('groups:new_member_task', kwargs=self.args)) ] for group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args = {'group_id': '1'} @LoginMethods.login_wrapper", "test_models.AddProjectCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response =", "value) class EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_task_group' args = {'task_group_id': 1} def setUp(self): super().setUp()", "test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper", "name = 'ajax_load_subgroups_and_projects' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url())", "self.parent_task.id for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1", "def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/tasks.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context)", "in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302)", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list',", "= test_models.AddProjectCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] += 1", "self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302)", "response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "'groups/ajax/load_subgroups_and_projects.html') self.assertIn('group_list', response.context) self.assertIsInstance(response.context['group_list'], list) all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list']) self.assertIn('project_list', response.context)", "response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.name, data['name']) self.assertEqual(self.task.description, data['description']) return response", "new_group_links) class NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_group' @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_tasks' args = {'task_id': None} def setUp(self): super().setUp() self.task =", "group in response.context['group_list']) self.assertIn('project_list', response.context) self.assertEqual(response.context['project_list'], []) class AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups_and_projects' args", "def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "list) all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list']) self.assertIn('project_list', response.context) all(self.assertIsInstance(project, objects.GroupProject) for project", "reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_subgroup_task' args = {'task_id': 1} def", "self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddMember.objects.get(task_group=self.task_group)", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code,", "**test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args =", "in response.context['group_list']) class DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'detail' args = {'group_id': '1'} @LoginMethods.login_wrapper def", "self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project_task' args = {'task_group_id':", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/members.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['group'], objects.Group)", "value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url())", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/members.html')", "self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.username, data['username']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id", "FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_tasks' args = {'task_id': None} def", "all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list']) self.assertIn('project_list', response.context) all(self.assertIsInstance(project, objects.GroupProject) for project in", "= self.parent_task.id for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] +=", "class FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_detail' args = {'task_id': None} def setUp(self): super().setUp() self.task", "= {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks.html')", "200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model =", "@LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'],", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/index.html') self.assertIn('group_list', response.context) all(self.assertIsInstance(group, objects.Group) for group in response.context['group_list']) class", "name = 'future_group_detail' args = {'task_id': None} def setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task()", "= self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url,", "self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id':", "= models.AddMember.objects.get(task_group=self.task_group) for key, value in test_forms.AddMemberFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper", "reverse('groups:future_group_tasks', kwargs=self.args)) class NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args = {'group_id': '1'} def setUp(self):", "username' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.username, data['username']) return response @LoginMethods.login_wrapper", "test_models.AddMemberCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] += 1 response", "class EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_member_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task", "name = 'edit_project_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task()", "= test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def", "in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html')", "self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/members.html') self.assertIn('task',", "= 'Description' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.name, data['name']) self.assertEqual(self.task.description, data['description'])", "self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_subgroup_task' args = {'task_id':", "models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links = [ ('New", "self.assertIsInstance(response.context['group_list'], list) all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list']) self.assertIn('project_list', response.context) all(self.assertIsInstance(project, objects.GroupProject) for", "kwargs={'task_id': self.parent_task.id})) class NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args = {'group_id': '1'} def setUp(self):", "test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/members.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list',", "super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0)", "('New Member', reverse('groups:new_member_task', kwargs=self.args)) ] for group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response =", "Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)), ('New Project', reverse('groups:new_project_task', kwargs=self.args)), ('New Member', reverse('groups:new_member_task', kwargs=self.args)) ] for", "QuerySet) class TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'tasks' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self):", "self.assertIsInstance(response.context['new_group_links'], list) new_group_links = [ ('New Task Group', reverse('groups:new_task_group', kwargs=self.args)), ('New Subgroup', reverse('groups:new_subgroup_task',", "class NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_group' @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "self.assertEqual(response.status_code, 302) self.task_group.refresh_from_db() self.assertEqual(self.task_group.name, data['name']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42", "'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddProject.objects.get(task_group=self.task_group) for", "def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['username'], self.task.username) data['username'] = 'Another username' response =", "{'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task_group = test_models.AddMemberCreateMethods().create_task_group( parent_task=self.parent_task )", "'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks',", "self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_tasks' args =", "response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links =", "response.context) self.assertIsInstance(response.context['group_list'], list) all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list']) self.assertIn('project_list', response.context) all(self.assertIsInstance(project, objects.GroupProject)", "in test_forms.AddMemberFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42", "reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_member_task' args = {'task_id': 1} def", "'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddMember.objects.get(task_group=self.task_group) for", "models.AddMember.objects.get(task_group=self.task_group) for key, value in test_forms.AddMemberFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def", "self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'tasks' args = {'group_id': '1'}", "{'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task = test_models.AddMemberCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id']", "('New Member', reverse('groups:new_member_task', kwargs=self.args)) ] for group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "name = 'members' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url())", "= self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "('New Task Group', reverse('groups:new_task_group', kwargs=self.args)), ('New Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)), ('New Project', reverse('groups:new_project_task', kwargs=self.args)),", "1} def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task_group = test_models.AddProjectCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id']", "kwargs=self.args)) class NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self):", "self.get_initial_form_data() self.assertEqual(data['username'], self.task.username) data['username'] = 'Another username' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302)", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task.name) data['name'] =", "= {'task_id': None} def setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task() self.args['task_id'] = self.task.id @LoginMethods.login_wrapper", "in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_task_group' args = {'task_group_id':", "'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task_group.name) data['name'] = 'Another Name' response", "self.task = test_models.AddMemberCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1", "self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:index'))", "self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) model = models.TaskGroup.objects.get( gitlab_group=models.GitlabGroup.objects.get( gitlab_id=self.args['group_id'])) for", "= 'new_subgroup_and_members_from_file' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id']", "= test_models.AddMemberCreateMethods().create_parent_task() self.task_group = test_models.AddMemberCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self):", "self.assertEqual(data['name'], self.task_group.name) data['name'] = 'Another Name' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task_group.refresh_from_db()", "200) self.assertTemplateUsed(response, 'groups/members.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar)", "= self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddSubgroup.objects.get(task_group=self.task_group) for key, value in test_forms.AddSubgroupFormTests.valid_form_data.items():", "self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task = test_models.AddSubgroupCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self):", "= self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project_task'", "value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url())", "self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) model = models.TaskGroup.objects.get( gitlab_group=models.GitlabGroup.objects.get( gitlab_id=self.args['group_id'])) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items():", "self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_detail' args = {'task_id': None} def", "self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member' args = {'group_id':", "data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.username, data['username']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id =", "self.task_group.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response =", "'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['username'], self.task.username) data['username'] = 'Another username' response", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task_group.name)", "self.assertEqual(self.task.username, data['username']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42 self.task.gitlab_group.save() response =", "'1'} def setUp(self): super().setUp() for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self):", "key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_task_group' args", "= self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data)", "name = 'new_project_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task()", "import SimpleUrlsTestsCases from django.db.models import QuerySet from django.urls import reverse from groups import", "QuerySet from django.urls import reverse from groups import models from groups.sidebar import GroupSidebar,", "self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_detail' args = {'task_id':", "{'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks.html') self.assertIn('group',", "@LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def", "data) self.assertEqual(response.status_code, 302) self.task_group.refresh_from_db() self.assertEqual(self.task_group.name, data['name']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id =", "42 self.task.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response", "test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/members.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context)", "response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_not_valid_data(self): response = self.client.post(self.get_url())", "{**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args", "for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response", "name = 'ajax_load_subgroups' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url())", "self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args = {'group_id': '1'} def", "name = 'new_task_group' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url())", "for group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_group' @LoginMethods.login_wrapper def", "= 42 self.task_group.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self):", "test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project'", "response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet)", "= {'group_id': '1'} def setUp(self): super().setUp() for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper", "args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task = test_models.AddProjectCreateMethods().create_task(", "302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args = {'task_id': None}", "response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "self.parent_task.id})) class NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self):", "QuerySet) class FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_tasks' args = {'task_id': None} def setUp(self): super().setUp()", "all(self.assertIsInstance(group, objects.Group) for group in response.context['group_list']) class DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'detail' args =", "data = self.get_initial_form_data() self.assertEqual(data['name'], self.task.name) data['name'] = 'Another Name' data['description'] = 'Description' response", "self.assertIn('project_list', response.context) self.assertEqual(response.context['project_list'], []) class AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups_and_projects' args = {'group_id': '1'}", "test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) model = models.TaskGroup.objects.get(", "response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['username'],", "response.context) self.assertEqual(response.context['project_list'], []) class AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups_and_projects' args = {'group_id': '1'} @LoginMethods.login_wrapper", "response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "= self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_member_task' args =", "Name' data['description'] = 'Description' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.name, data['name'])", "self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args", "self.task.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response =", "_test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task_group.name) data['name'] = 'Another Name' response = self.client.post(self.get_url(),", "'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_not_valid_data(self): response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/detail.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['group'],", "test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:members', kwargs=self.args)) class NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member_task' args =", "def test_page_post_not_valid_data(self): response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response", "= models.TaskGroup.objects.get(parent_task=self.parent_task) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "= 'new_project_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task_group", "self.task.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "import GroupSidebar, FutureGroupSidebar from groups.tests import test_forms from groups.tests import models as test_models", "self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code,", "self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file'", "= self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project' args =", "test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42 self.task.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper", "models.AddProject.objects.get(task_group=self.task_group) for key, value in test_forms.AddProjectFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def", "self.assertEqual(response.url, reverse('groups:members', kwargs=self.args)) class NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member_task' args = {'task_group_id': 1} def", "class NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task", "EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_member_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task =", "{**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_detail' args", "kwargs=self.args)), ('New Member', reverse('groups:new_member_task', kwargs=self.args)) ] for group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class", "def test_page_not_found(self): self.args['task_group_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_get(self):", "for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response", "class NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args = {'group_id': '1'} def setUp(self): super().setUp() for", "200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302)", "'new_project' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "self.task.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "= 'new_member' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "**test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_detail' args =", "= 'new_subgroup_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task_group", "= {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task = test_models.AddProjectCreateMethods().create_task( parent_task=self.parent_task)", "'edit_task_group' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task_group =", "self.task.username) data['username'] = 'Another username' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.username,", "self.parent_task.id})) class NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_task' args = {'task_group_id': 1} def setUp(self): super().setUp()", "self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'],", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html') self.assertIn('group_list', response.context) self.assertIsInstance(response.context['group_list'], list) all(self.assertIsInstance(group, objects.GroupSubgroup) for", "reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_task' args = {'task_group_id': 1} def", "response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/detail.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context)", "response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list)", "{'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task = test_models.AddProjectCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id']", "1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task_group = test_models.AddSubgroupCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id']", "200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task.name) data['name'] = 'Another", "test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/detail.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list',", "test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_task'", "[ ('New Task Group', reverse('groups:new_task_group', kwargs=self.args)), ('New Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)), ('New Project', reverse('groups:new_project_task',", "response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list)", "test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "name = 'new_task_group' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()", "kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class", "= {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task = test_models.AddSubgroupCreateMethods().create_task( parent_task=self.parent_task)", "1} def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task_group = test_models.AddMemberCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id']", "name = 'edit_member_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task()", "super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0)", "= self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/tasks.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list',", "'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks',", "reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))", "setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items():", "NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task =", "302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) model = models.TaskGroup.objects.get(parent_task=self.parent_task) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model,", "@LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list)", "response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/detail.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context)", "test_models.AddProjectCreateMethods().create_parent_task() self.task_group = test_models.AddProjectCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id']", "@LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/tasks.html') self.assertIn('task', response.context) self.assertIn('sidebar',", "self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) model = models.TaskGroup.objects.get(parent_task=self.parent_task) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items():", "kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class", "name = 'new_subgroup_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()", "response.context) self.assertIsInstance(response.context['group_list'], list) all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list']) self.assertIn('project_list', response.context) self.assertEqual(response.context['project_list'], [])", "200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url,", "= 'Another Name' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task_group.refresh_from_db() self.assertEqual(self.task_group.name, data['name']) return", "self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.username, data['username']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42", "test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html') self.assertIn('group_list', response.context) self.assertIsInstance(response.context['group_list'], list) all(self.assertIsInstance(group,", "= 'new_members_from_file' args = {'group_id': '1'} def setUp(self): super().setUp() for key, value in", "response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links =", "test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/members.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list',", "key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response =", "{'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html') self.assertIn('group_list',", "class NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response", "key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "self.assertEqual(response.url, reverse('groups:index')) class NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup' args = {'group_id': '1'} @LoginMethods.login_wrapper def", "= 'future_group_detail' args = {'task_id': None} def setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task() self.args['task_id']", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/tasks.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list',", "def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task_group = test_models.AddProjectCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] =", "302) model = models.AddMember.objects.get(task_group=self.task_group) for key, value in test_forms.AddMemberFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return", "self.parent_task.id})) class NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args = {'group_id': '1'} def setUp(self): super().setUp()", "@LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id':", "response.context['group_list']) self.assertIn('project_list', response.context) self.assertEqual(response.context['project_list'], []) class AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups_and_projects' args = {'group_id':", "class EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_subgroup_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task", "FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_members' args = {'task_id': None} def setUp(self): super().setUp() self.task =", "return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42 self.task.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url,", "name = 'new_project' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url())", "self.assertTemplateUsed(response, 'groups/members.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'],", "'new_member' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'],", "200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302)", "@LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_not_valid_data(self):", "self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links", "('New Project', reverse('groups:new_project_task', kwargs=self.args)), ('New Member', reverse('groups:new_member_task', kwargs=self.args)) ] for group_link in response.context['new_group_links']:", "kwargs={'task_id': self.parent_task.id})) class EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_member_task' args = {'task_id': 1} def setUp(self):", "def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task = test_models.AddProjectCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id", "**test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args =", "setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task = test_models.AddSubgroupCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper", "args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response,", "302) model = models.AddProject.objects.get(task_group=self.task_group) for key, value in test_forms.AddProjectFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return", "test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404)", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/tasks.html')", "import objects from core.tests.test_view import LoginMethods from core.tests.test_view import SimpleUrlsTestsCases from django.db.models import", "group in response.context['group_list']) class DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'detail' args = {'group_id': '1'} @LoginMethods.login_wrapper", "@LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:index')) class NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links = [ ('New Task Group',", "response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data()", "self.assertTemplateUsed(response, 'groups/tasks.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['group'],", "None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id for key, value", "response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "key), value) class EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_task_group' args = {'task_group_id': 1} def setUp(self):", "import LoginMethods from core.tests.test_view import SimpleUrlsTestsCases from django.db.models import QuerySet from django.urls import", "super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task = test_models.AddSubgroupCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def", "self.task_group = test_models.AddSubgroupCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] +=", "self.parent_task.id})) class EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_member_task' args = {'task_id': 1} def setUp(self): super().setUp()", "'new_task_group' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "302) model = models.AddSubgroup.objects.get(task_group=self.task_group) for key, value in test_forms.AddSubgroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return", "test_models.AddMemberCreateMethods().create_parent_task() self.task_group = test_models.AddMemberCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id']", "self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'],", "self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_detail'", "self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links = [", "test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) model = models.TaskGroup.objects.get( gitlab_group=models.GitlabGroup.objects.get( gitlab_id=self.args['group_id'])) for key,", "= self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404)", "self.task.name) data['name'] = 'Another Name' data['description'] = 'Description' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code,", "self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args = {'task_id':", "self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'tasks' args =", "in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args = {'task_id':", "self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) model = models.TaskGroup.objects.get(parent_task=self.parent_task) for key, value", "302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args = {'group_id': '1'}", "{'group_id': '1'} def setUp(self): super().setUp() for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def", "InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'init_sidebar' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response =", "class DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'detail' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response", "objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) class IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'index' @LoginMethods.login_wrapper def test_page_found(self): response =", "objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links = [ ('New", "'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_not_valid_data(self): response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self):", "for key, value in test_forms.AddProjectFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self):", "kwargs={'group_id': self.task.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class", "NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response =", "for group in response.context['group_list']) self.assertIn('project_list', response.context) self.assertEqual(response.context['project_list'], []) class AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups_and_projects'", "self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links = [", "data['name']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save() response = self._test_page_post_valid_data()", "{'task_id': None} def setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task() self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def", "response.context['new_group_links']: self.assertIn(group_link, new_group_links) class AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups' args = {'group_id': '1'} @LoginMethods.login_wrapper", "self.assertIn('sidebar', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) class IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'index' @LoginMethods.login_wrapper def", "= {'task_id': None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id for", "'ajax_load_subgroups_and_projects' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "name = 'new_subgroup_and_members_from_file' args = {'group_id': '1'} def setUp(self): super().setUp() for key, value", "200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_not_valid_data(self): response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html')", "302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project_task' args = {'task_group_id': 1}", "def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) model =", "self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links = [ ('New Task", "@LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args))", "response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'],", "def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class", "reverse('groups:new_member_task', kwargs=self.args)) ] for group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "@LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'members' args", "test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file'", "@LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:members', kwargs=self.args)) class", "= self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:index')) class NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup' args", "'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args))", "new_group_links = [ ('New Task Group', reverse('groups:new_task_group', kwargs=self.args)), ('New Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)), ('New", "302) self.assertEqual(response.url, reverse('groups:index')) class NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup' args = {'group_id': '1'} @LoginMethods.login_wrapper", "= self.get_initial_form_data() self.assertEqual(data['name'], self.task.name) data['name'] = 'Another Name' data['description'] = 'Description' response =", "= test_models.AddMemberCreateMethods().create_parent_task() self.task = test_models.AddMemberCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id']", "setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task() self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] +=", "reverse('groups:detail', kwargs=self.args)) class NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project_task' args = {'task_group_id': 1} def setUp(self):", "test_models.AddSubgroupCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response =", "= 'new_project' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/members.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'],", "test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:index')) class NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup' args = {'group_id':", "response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task_group.refresh_from_db() self.assertEqual(self.task_group.name, data['name']) return response @LoginMethods.login_wrapper def", "test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper", "self.get_initial_form_data() self.assertEqual(data['name'], self.task.name) data['name'] = 'Another Name' data['description'] = 'Description' response = self.client.post(self.get_url(),", "'groups/members.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet)", "= test_models.AddProjectCreateMethods().create_parent_task() self.task_group = test_models.AddProjectCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self):", "def setUp(self): super().setUp() for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response", "'1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html') self.assertIn('group_list', response.context)", "class AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups_and_projects' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response", "test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member'", "= self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_project_task' args =", "= test_models.AddMemberCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] += 1", "test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_found(self): response", "return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url,", "'Another username' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.username, data['username']) return response", "= 'edit_member_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task", "1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "FutureGroupSidebar from groups.tests import test_forms from groups.tests import models as test_models class GitlabWrapperAppNameCase:", "self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks',", "self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'tasks' args = {'group_id': '1'} @LoginMethods.login_wrapper def", "def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task.name) data['name'] = 'Another Name' data['description'] =", "super().setUp() for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url())", "QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_members' args = {'task_id': None} def", "QuerySet) class FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_members' args = {'task_id': None} def setUp(self): super().setUp()", "def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html') self.assertIn('group_list', response.context) self.assertIsInstance(response.context['group_list'], list)", "200) self.assertTemplateUsed(response, 'groups/tasks/members.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar)", "self.assertEqual(response.context['project_list'], []) class AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups_and_projects' args = {'group_id': '1'} @LoginMethods.login_wrapper def", "GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links = [ ('New Task Group',", "self.assertTemplateUsed(response, 'groups/index.html') self.assertIn('group_list', response.context) all(self.assertIsInstance(group, objects.Group) for group in response.context['group_list']) class DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "self.assertEqual(self.task_group.name, data['name']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save() response =", "self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_project_task' args = {'task_id': 1}", "response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member' args", "class InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'init_sidebar' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response", "self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail',", "class AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response", "self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_members' args =", "args = {'task_id': None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id", "self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task = test_models.AddMemberCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self):", "self.assertEqual(response.status_code, 302) model = models.AddMember.objects.get(task_group=self.task_group) for key, value in test_forms.AddMemberFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value)", "self.args['task_id'] = self.parent_task.id for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id']", "class GitlabWrapperAppNameCase: class GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests): app_name = 'groups' class InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'init_sidebar' args", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code,", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links',", "= 'members' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) model = models.TaskGroup.objects.get( gitlab_group=models.GitlabGroup.objects.get(", "= 'new_task_group' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "{**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args", "name = 'future_group_members' args = {'task_id': None} def setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task()", "self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'members' args = {'group_id': '1'} @LoginMethods.login_wrapper def", "Project', reverse('groups:new_project_task', kwargs=self.args)), ('New Member', reverse('groups:new_member_task', kwargs=self.args)) ] for group_link in response.context['new_group_links']: self.assertIn(group_link,", "self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class", "'new_group' @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def", "super().setUp() for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url())", "AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups_and_projects' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response =", "reverse('groups:members', kwargs=self.args)) class NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member_task' args = {'task_group_id': 1} def setUp(self):", "GroupSidebar) class IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'index' @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "kwargs=self.args)), ('New Project', reverse('groups:new_project_task', kwargs=self.args)), ('New Member', reverse('groups:new_member_task', kwargs=self.args)) ] for group_link in", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context)", "reverse('groups:index')) class NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self):", "kwargs=self.args)) model = models.TaskGroup.objects.get( gitlab_group=models.GitlabGroup.objects.get( gitlab_id=self.args['group_id'])) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key),", "gitlab_group=models.GitlabGroup.objects.get( gitlab_id=self.args['group_id'])) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "test_forms.AddSubgroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save()", "test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args = {'group_id':", "args = {'task_id': None} def setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task() self.args['task_id'] = self.task.id", "= self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data)", "response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar)", "list) all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list']) self.assertIn('project_list', response.context) self.assertEqual(response.context['project_list'], []) class AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_group' @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response,", "groups.sidebar import GroupSidebar, FutureGroupSidebar from groups.tests import test_forms from groups.tests import models as", "{'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/detail.html') self.assertIn('group',", "class FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task", "self.args['task_group_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_get(self): response =", "TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'tasks' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response =", "self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def", "name = 'index' @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/index.html')", "models.TaskGroup.objects.get( gitlab_group=models.GitlabGroup.objects.get( gitlab_id=self.args['group_id'])) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "@LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "= self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data,", "response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/index.html') self.assertIn('group_list', response.context) all(self.assertIsInstance(group, objects.Group) for group", "def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task_group = test_models.AddSubgroupCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] =", "_test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task.name) data['name'] = 'Another Name' data['description'] = 'Description'", "response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:members', kwargs=self.args)) class NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:members', kwargs=self.args)) class NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_member_task' args = {'task_id':", "[]) class AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups_and_projects' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self):", "import models from groups.sidebar import GroupSidebar, FutureGroupSidebar from groups.tests import test_forms from groups.tests", "self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code,", "name = 'future_group_tasks' args = {'task_id': None} def setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task()", "self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project_task' args", "kwargs={'task_id': self.parent_task.id})) class EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_subgroup_task' args = {'task_id': 1} def setUp(self):", "in test_forms.AddProjectFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42", "self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html') self.assertIn('group_list', response.context) self.assertIsInstance(response.context['group_list'], list) all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list']) self.assertIn('project_list',", "= test_models.AddSubgroupCreateMethods().create_task() self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response =", "self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "= test_models.AddProjectCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response", "self.assertIn(group_link, new_group_links) class NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_group' @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url())", "self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id':", "GroupSidebar, FutureGroupSidebar from groups.tests import test_forms from groups.tests import models as test_models class", "self.task.refresh_from_db() self.assertEqual(self.task.username, data['username']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42 self.task.gitlab_group.save() response", "= self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data,", "= 'new_subgroup' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(),", "self.task_group.refresh_from_db() self.assertEqual(self.task_group.name, data['name']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save() response", "@LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks.html') self.assertIn('group', response.context) self.assertIn('sidebar',", "from core.tests.test_view import LoginMethods from core.tests.test_view import SimpleUrlsTestsCases from django.db.models import QuerySet from", "for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_task_group'", "response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data)", "name = 'new_subgroup_and_members_from_file' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()", "def setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task() self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id']", "import models as test_models class GitlabWrapperAppNameCase: class GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests): app_name = 'groups' class InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet)", "self.get_initial_form_data() self.assertEqual(data['name'], self.task_group.name) data['name'] = 'Another Name' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302)", "models as test_models class GitlabWrapperAppNameCase: class GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests): app_name = 'groups' class InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "from groups import models from groups.sidebar import GroupSidebar, FutureGroupSidebar from groups.tests import test_forms", "class GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests): app_name = 'groups' class InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'init_sidebar' args = {'group_id':", "reverse('groups:future_group_tasks', kwargs=self.args)) model = models.TaskGroup.objects.get(parent_task=self.parent_task) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value)", "GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'tasks' args = {'group_id': '1'} @LoginMethods.login_wrapper", "test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) model = models.TaskGroup.objects.get(parent_task=self.parent_task) for key, value in", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/tasks.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links',", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task.name) data['name']", "302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args = {'task_id': None}", "] for group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups' args", "test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:index')) class NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'members' args =", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302)", "def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "@LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/members.html') self.assertIn('task', response.context) self.assertIn('sidebar',", "self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper", "objects.GroupSubgroup) for group in response.context['group_list']) self.assertIn('project_list', response.context) self.assertEqual(response.context['project_list'], []) class AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task = test_models.AddProjectCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def", "self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'],", "data['username']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42 self.task.gitlab_group.save() response = self._test_page_post_valid_data()", "reverse('groups:tasks', kwargs=self.args)) class FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args = {'task_id': None} def setUp(self):", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task.name)", "def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "class NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task", "from GitLabApi import objects from core.tests.test_view import LoginMethods from core.tests.test_view import SimpleUrlsTestsCases from", "200) self.assertTemplateUsed(response, 'groups/index.html') self.assertIn('group_list', response.context) all(self.assertIsInstance(group, objects.Group) for group in response.context['group_list']) class DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddProject.objects.get(task_group=self.task_group) for key, value in", "kwargs=self.args)) class NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member_task' args = {'task_group_id': 1} def setUp(self): super().setUp()", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'sidebar.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) class", "self.args['task_id'] = self.parent_task.id for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id']", "test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_task_group' args = {'task_group_id': 1}", "@LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args))", "= self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddMember.objects.get(task_group=self.task_group) for key, value in test_forms.AddMemberFormTests.valid_form_data.items():", "list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links = [ ('New Task Group', reverse('groups:new_task_group', kwargs=self.args)),", "core.tests.test_view import SimpleUrlsTestsCases from django.db.models import QuerySet from django.urls import reverse from groups", "@LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'sidebar.html') self.assertIn('group', response.context) self.assertIn('sidebar',", "kwargs=self.args)) class NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project_task' args = {'task_group_id': 1} def setUp(self): super().setUp()", "@LoginMethods.login_wrapper def test_page_post_not_valid_data(self): response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response", "GitLabApi import objects from core.tests.test_view import LoginMethods from core.tests.test_view import SimpleUrlsTestsCases from django.db.models", "{'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task_group = test_models.AddProjectCreateMethods().create_task_group( parent_task=self.parent_task )", "groups.tests import test_forms from groups.tests import models as test_models class GitlabWrapperAppNameCase: class GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests):", "self.task.refresh_from_db() self.assertEqual(self.task.name, data['name']) self.assertEqual(self.task.description, data['description']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42", "setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] +=", "value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper", "self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links = [ ('New Task", "'ajax_load_subgroups' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "'new_task_group' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] =", "gitlab_id=self.args['group_id'])) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id']", "name = 'new_group' @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html')", "NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args = {'group_id': '1'} def setUp(self): super().setUp() for key,", "class NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task", "response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "class NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response", "response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "'groups/tasks/detail.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar)", "self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args = {'group_id':", "for group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups' args =", "self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_found(self): response =", "{'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task_group = test_models.AddSubgroupCreateMethods().create_task_group( parent_task=self.parent_task )", "'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args))", "200) self.assertTemplateUsed(response, 'groups/tasks.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context)", "name = 'new_subgroup' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url())", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response,", "302) self.task_group.refresh_from_db() self.assertEqual(self.task_group.name, data['name']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save()", "setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task = test_models.AddMemberCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper", "self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "Name' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task_group.refresh_from_db() self.assertEqual(self.task_group.name, data['name']) return response @LoginMethods.login_wrapper", "key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "'groups' class InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'init_sidebar' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self):", "class MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'members' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response", "{'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper", "key), value) class FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args = {'task_id': None} def setUp(self):", "42 self.task_group.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response", "name = 'tasks' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url())", "response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data, **test_forms.MembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "= self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code,", "'1'} def setUp(self): super().setUp() for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self):", "kwargs=self.args)) class NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args = {'group_id': '1'} def setUp(self): super().setUp()", "self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddMember.objects.get(task_group=self.task_group) for key, value in test_forms.AddMemberFormTests.valid_form_data.items(): self.assertEqual(getattr(model,", "response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/tasks.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context)", "in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups' args = {'group_id': '1'}", "self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/detail.html') self.assertIn('task',", "IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'index' @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response,", "response.context['group_list']) class DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'detail' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self):", "NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response =", "kwargs=self.args)) class FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args = {'task_id': None} def setUp(self): super().setUp()", "('New Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)), ('New Project', reverse('groups:new_project_task', kwargs=self.args)), ('New Member', reverse('groups:new_member_task', kwargs=self.args)) ]", "@LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class", "test_page_post_not_valid_data(self): response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response =", "self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "data['description']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42 self.task.gitlab_group.save() response = self._test_page_post_valid_data()", "self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddSubgroup.objects.get(task_group=self.task_group)", "= [ ('New Task Group', reverse('groups:new_task_group', kwargs=self.args)), ('New Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)), ('New Project',", "'1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/detail.html') self.assertIn('group', response.context)", "class TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'tasks' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response", "for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "response.context['new_group_links']: self.assertIn(group_link, new_group_links) class NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_group' @LoginMethods.login_wrapper def test_page_get(self): response =", "test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list',", "= {'task_id': None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id @LoginMethods.login_wrapper", "response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data)", "self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_tasks' args = {'task_id': None} def setUp(self):", "GroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'members' args = {'group_id':", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html') self.assertIn('group_list', response.context) self.assertIsInstance(response.context['group_list'], list) all(self.assertIsInstance(group, objects.GroupSubgroup) for group in", "response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddSubgroup.objects.get(task_group=self.task_group) for key, value in", "200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model =", "self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:index')) class NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup' args = {'group_id': '1'}", "kwargs={'task_id': self.parent_task.id})) class NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_task' args = {'task_group_id': 1} def setUp(self):", "test_page_post_not_valid_data(self): response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(),", "new_group_links) class AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self):", "kwargs={'task_id': self.parent_task.id})) class NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member' args = {'group_id': '1'} @LoginMethods.login_wrapper def", "models.TaskGroup.objects.get(parent_task=self.parent_task) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/index.html') self.assertIn('group_list', response.context) all(self.assertIsInstance(group, objects.Group)", "self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'],", "class FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_tasks' args = {'task_id': None} def setUp(self): super().setUp() self.task", "EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_subgroup_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task =", "None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id @LoginMethods.login_wrapper def test_page_not_found(self):", "test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url())", "super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task_group = test_models.AddSubgroupCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper", "test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args =", "self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task_group = test_models.AddSubgroupCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def", "def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddProject.objects.get(task_group=self.task_group) for key,", "test_models.AddProjectCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] += 1 response", "= self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_subgroup_task' args =", "self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'],", "'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args))", "response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar)", "self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links", "response.context) all(self.assertIsInstance(group, objects.Group) for group in response.context['group_list']) class DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'detail' args", "= {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task_group = test_models.AddSubgroupCreateMethods().create_task_group( parent_task=self.parent_task", "'future_group_tasks' args = {'task_id': None} def setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task() self.args['task_id'] =", "core.tests.test_view import LoginMethods from core.tests.test_view import SimpleUrlsTestsCases from django.db.models import QuerySet from django.urls", "{'task_id': None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id @LoginMethods.login_wrapper def", "reverse('groups:new_project_task', kwargs=self.args)), ('New Member', reverse('groups:new_member_task', kwargs=self.args)) ] for group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links)", "{'group_id': '1'} def setUp(self): super().setUp() for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'sidebar.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar)", "list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links = [ ('New Task Group', reverse('groups:new_task_group', kwargs=self.args)), ('New Subgroup',", "@LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class", "reverse('groups:tasks', kwargs=self.args)) model = models.TaskGroup.objects.get( gitlab_group=models.GitlabGroup.objects.get( gitlab_id=self.args['group_id'])) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model,", "1} def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task = test_models.AddProjectCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] =", "data['name'] = 'Another Name' data['description'] = 'Description' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302)", "from groups.tests import test_forms from groups.tests import models as test_models class GitlabWrapperAppNameCase: class", "302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) model = models.TaskGroup.objects.get( gitlab_group=models.GitlabGroup.objects.get( gitlab_id=self.args['group_id'])) for key, value in", "response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "= 'new_subgroup_and_members_from_file' args = {'group_id': '1'} def setUp(self): super().setUp() for key, value in", "+= 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url())", "response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args", "test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args)) class NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project_task' args =", "= 'groups' class InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'init_sidebar' args = {'group_id': '1'} @LoginMethods.login_wrapper def", "self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_task' args = {'task_group_id':", "'new_subgroup' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "'groups/index.html') self.assertIn('group_list', response.context) all(self.assertIsInstance(group, objects.Group) for group in response.context['group_list']) class DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_not_valid_data(self): response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper", "self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task_group.refresh_from_db() self.assertEqual(self.task_group.name, data['name']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id", "= self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code,", "all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list']) self.assertIn('project_list', response.context) self.assertEqual(response.context['project_list'], []) class AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks',", "test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404)", "data['description'] = 'Description' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.name, data['name']) self.assertEqual(self.task.description,", "None} def setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task() self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self):", "'init_sidebar' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html') self.assertIn('group_list', response.context) self.assertIsInstance(response.context['group_list'], list) all(self.assertIsInstance(group, objects.GroupSubgroup) for group", "200) self.assertTemplateUsed(response, 'groups/tasks/tasks.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context)", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['username'], self.task.username) data['username'] =", "= self.get_initial_form_data() self.assertEqual(data['username'], self.task.username) data['username'] = 'Another username' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code,", "key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args", "self.parent_task.id})) class NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self):", "def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id for key, value in", "= test_models.AddSubgroupCreateMethods().create_parent_task() self.task_group = test_models.AddSubgroupCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self):", "args = {'group_id': '1'} def setUp(self): super().setUp() for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0)", "= self.get_initial_form_data() self.assertEqual(data['name'], self.task_group.name) data['name'] = 'Another Name' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code,", "name = 'new_members_from_file' args = {'group_id': '1'} def setUp(self): super().setUp() for key, value", "import test_forms from groups.tests import models as test_models class GitlabWrapperAppNameCase: class GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests): app_name", "def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_not_valid_data(self): response", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/members.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['group'],", "objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'tasks' args = {'group_id':", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/detail.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context)", "GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests): app_name = 'groups' class InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'init_sidebar' args = {'group_id': '1'}", "self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks',", "'tasks' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "= models.AddProject.objects.get(task_group=self.task_group) for key, value in test_forms.AddProjectFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper", "import QuerySet from django.urls import reverse from groups import models from groups.sidebar import", "reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args = {'group_id': '1'} def", "class NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args = {'group_id': '1'} def setUp(self): super().setUp() for", "in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "self.assertTemplateUsed(response, 'groups/tasks/tasks.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['task'],", "self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks',", "self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task_group = test_models.AddProjectCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id @LoginMethods.login_wrapper def", "groups import models from groups.sidebar import GroupSidebar, FutureGroupSidebar from groups.tests import test_forms from", "self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code,", "response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_member_task' args", "response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "self.task.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response,", "reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))", "test_forms.AddProjectFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save()", "QuerySet) class MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'members' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self):", "404) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def", "self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class", "@LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) model", "self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project' args = {'group_id': '1'}", "404) @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/members.html') self.assertIn('task', response.context)", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model", "self.assertIn('group_list', response.context) self.assertIsInstance(response.context['group_list'], list) all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list']) self.assertIn('project_list', response.context) all(self.assertIsInstance(project,", "response.context) self.assertIn('sidebar', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) class IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'index' @LoginMethods.login_wrapper", "200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url,", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/members.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['task'],", "in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_group' @LoginMethods.login_wrapper def test_page_get(self): response", "reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))", "self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_task' args = {'task_group_id': 1}", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html')", "@LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/detail.html') self.assertIn('task', response.context) self.assertIn('sidebar',", "'1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def", "def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/detail.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context)", "def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_get(self):", "django.urls import reverse from groups import models from groups.sidebar import GroupSidebar, FutureGroupSidebar from", "name = 'edit_subgroup_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()", "{'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task = test_models.AddSubgroupCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id']", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model", "kwargs=self.args)) class FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args = {'task_id': None} def setUp(self): super().setUp()", "test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self):", "key, value in test_forms.AddMemberFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id", "models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_tasks' args = {'task_id':", "args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task_group = test_models.AddProjectCreateMethods().create_task_group(", "self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task_group.name) data['name'] = 'Another Name'", "response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) class FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "self.assertEqual(response.status_code, 302) model = models.AddProject.objects.get(task_group=self.task_group) for key, value in test_forms.AddProjectFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value)", "list) new_group_links = [ ('New Task Group', reverse('groups:new_task_group', kwargs=self.args)), ('New Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)),", "= 'Another Name' data['description'] = 'Description' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db()", "NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task =", "'new_member_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task_group =", "def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_found(self):", "from groups.tests import models as test_models class GitlabWrapperAppNameCase: class GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests): app_name = 'groups'", "self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args = {'task_id': None} def", "self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/tasks.html') self.assertIn('task',", "@LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args))", "DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'detail' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response =", "_test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddMember.objects.get(task_group=self.task_group) for key, value", "reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project' args = {'group_id': '1'} @LoginMethods.login_wrapper", "models.AddSubgroup.objects.get(task_group=self.task_group) for key, value in test_forms.AddSubgroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def", "= 'edit_subgroup_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task", "self.assertEqual(getattr(model, key), value) class FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args = {'task_id': None} def", "= self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/detail.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list',", "= {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html')", "self.task_group.name) data['name'] = 'Another Name' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task_group.refresh_from_db() self.assertEqual(self.task_group.name,", "name = 'new_members_from_file' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()", "= 'detail' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddProject.objects.get(task_group=self.task_group) for key, value in test_forms.AddProjectFormTests.valid_form_data.items(): self.assertEqual(getattr(model,", "class IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'index' @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_task' args", "response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_subgroup_task' args", "response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddMember.objects.get(task_group=self.task_group) for key, value in", "= 'Another username' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.username, data['username']) return", "reverse('groups:tasks', kwargs=self.args)) class FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args = {'task_id': None} def setUp(self):", "self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper", "self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:members', kwargs=self.args)) class NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member_task' args", "SimpleUrlsTestsCases from django.db.models import QuerySet from django.urls import reverse from groups import models", "self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.name, data['name']) self.assertEqual(self.task.description, data['description']) return response @LoginMethods.login_wrapper def", "1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url,", "self.task = test_models.AddProjectCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1", "FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_and_members_from_file' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task =", "response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data)", "response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks',", "self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args = {'group_id': '1'}", "'members' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200)", "self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member' args = {'group_id': '1'}", "= {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task_group = test_models.AddMemberCreateMethods().create_task_group( parent_task=self.parent_task", "test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_member_task'", "'new_members_from_file' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] =", "key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response =", "FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_detail' args = {'task_id': None} def setUp(self): super().setUp() self.task =", "response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/members.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context)", "self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['name'], self.task.name) data['name'] = 'Another Name'", "AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response =", "reverse('groups:future_group_tasks', kwargs=self.args)) class FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_detail' args = {'task_id': None} def setUp(self):", "self.assertIn('unfinished_task_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], QuerySet) class FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'future_group_tasks'", "self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response", "as test_models class GitlabWrapperAppNameCase: class GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests): app_name = 'groups' class InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "self.parent_task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper", "'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddSubgroup.objects.get(task_group=self.task_group) for", "test_page_post_valid_data_redirect_to_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_project_task'", "test_models.AddMemberCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response =", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/detail.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['task'],", "self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['username'], self.task.username) data['username']", "200) self.assertTemplateUsed(response, 'sidebar.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) class IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "= self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.username, data['username']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self):", "class EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_project_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task", "= self.parent_task.id for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] +=", "self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save() response", "self.assertIsInstance(response.context['finished_task_list'], list) self.assertIsInstance(response.context['new_group_links'], list) new_group_links = [ ('New Task Group', reverse('groups:new_task_group', kwargs=self.args)), ('New", "key, value in test_forms.AddProjectFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id", "'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:index')) class", "response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet) self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet) class MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name =", "= {'group_id': '1'} def setUp(self): super().setUp() for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper", "self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args = {'task_id': None} def", "200) self.assertTemplateUsed(response, 'groups/tasks/detail.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup)", "= 42 self.task.gitlab_group.save() response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self):", "'Another Name' data['description'] = 'Description' response = self.client.post(self.get_url(), data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.name,", "= {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'sidebar.html')", "GitlabWrapperAppNameCase: class GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests): app_name = 'groups' class InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'init_sidebar' args =", "+= 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url())", "response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/members.html') self.assertIn('task', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context)", "404) @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks/tasks.html') self.assertIn('task', response.context)", "= 'new_members_from_file' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id']", "'edit_subgroup_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task =", "= self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url,", "setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task_group = test_models.AddSubgroupCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] = self.task_group.id", "= self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code,", "def setUp(self): super().setUp() self.parent_task = test_models.AddMemberCreateMethods().create_parent_task() self.task_group = test_models.AddMemberCreateMethods().create_task_group( parent_task=self.parent_task ) self.args['task_group_id'] =", "def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:index')) class NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project' args", "data['name']) self.assertEqual(self.task.description, data['description']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task.gitlab_group.gitlab_id = 42 self.task.gitlab_group.save() response", "response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'sidebar.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIsInstance(response.context['group'], objects.Group)", "data = self.get_initial_form_data() self.assertEqual(data['username'], self.task.username) data['username'] = 'Another username' response = self.client.post(self.get_url(), data)", "NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup_task' args = {'task_group_id': 1} def setUp(self): super().setUp() self.parent_task =", "'edit_project_task' args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task =", "data) self.assertEqual(response.status_code, 302) self.task.refresh_from_db() self.assertEqual(self.task.name, data['name']) self.assertEqual(self.task.description, data['description']) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self):", "response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data, **test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data}) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args)) class FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "kwargs=self.args)) ] for group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_group'", "self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'], list) self.assertIsInstance(response.context['finished_task_list'],", "reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member' args = {'group_id': '1'} @LoginMethods.login_wrapper", "self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302)", "self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_subgroup_task' args = {'task_id': 1}", "'groups/tasks.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['group'], objects.Group)", "= models.AddSubgroup.objects.get(task_group=self.task_group) for key, value in test_forms.AddSubgroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper", "= self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:members', kwargs=self.args)) class NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_member_task'", "= self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_members_from_file' args =", "for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url,", "self.assertEqual(response.url, reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id':", "test_models.AddProjectCreateMethods().create_parent_task() self.task = test_models.AddProjectCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] +=", "response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:index')) class NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup'", "Task Group', reverse('groups:new_task_group', kwargs=self.args)), ('New Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)), ('New Project', reverse('groups:new_project_task', kwargs=self.args)), ('New", "self.task_group.gitlab_group.gitlab_id})) @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):", "= self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response,", "super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1", "= test_models.AddMemberCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response", "app_name = 'groups' class InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'init_sidebar' args = {'group_id': '1'} @LoginMethods.login_wrapper", "test_models class GitlabWrapperAppNameCase: class GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests): app_name = 'groups' class InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'init_sidebar'", "group_link in response.context['new_group_links']: self.assertIn(group_link, new_group_links) class AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups' args = {'group_id':", "404) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self):", "model = models.TaskGroup.objects.get( gitlab_group=models.GitlabGroup.objects.get( gitlab_id=self.args['group_id'])) for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value)", "test_forms.AddMemberFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response @LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_tasks(self): self.task_group.gitlab_group.gitlab_id = 42 self.task_group.gitlab_group.save()", "self.parent_task.id for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items(): value.file.seek(0) @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1", "_test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddProject.objects.get(task_group=self.task_group) for key, value", "self.assertIn('sidebar', response.context) self.assertIn('unfinished_task_list', response.context) self.assertIn('finished_task_list', response.context) self.assertIn('new_group_links', response.context) self.assertIsInstance(response.context['task'], models.AddSubgroup) self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar) self.assertIsInstance(response.context['unfinished_task_list'],", "args = {'task_id': 1} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.task = test_models.AddSubgroupCreateMethods().create_task(", "in response.context['group_list']) self.assertIn('project_list', response.context) self.assertEqual(response.context['project_list'], []) class AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups_and_projects' args =", "{'task_id': None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id'] = self.parent_task.id for key,", "self.assertIn(group_link, new_group_links) class AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'ajax_load_subgroups' args = {'group_id': '1'} @LoginMethods.login_wrapper def", "self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:members',", "self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper def test_page_get(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper", "self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project' args = {'group_id':", "= 'ajax_load_subgroups' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code,", "@LoginMethods.login_wrapper def test_page_post_valid_data_redirect_to_future_tasks(self): response = self._test_page_post_valid_data() self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id})) class NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name", "model = models.AddSubgroup.objects.get(task_group=self.task_group) for key, value in test_forms.AddSubgroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) return response", "parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url())", "'future_group_members' args = {'task_id': None} def setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task() self.args['task_id'] =", "self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/form_base_site.html') @LoginMethods.login_wrapper def test_page_post_not_valid_data(self): response = self.client.post(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response,", "self.task.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404) @LoginMethods.login_wrapper", "response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, reverse('groups:future_group_tasks', kwargs=self.args)) model = models.TaskGroup.objects.get(parent_task=self.parent_task) for", "kwargs={'task_id': self.parent_task.id})) class EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'edit_project_task' args = {'task_id': 1} def setUp(self):", "FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task =", "= 'new_task_group' args = {'task_id': None} def setUp(self): super().setUp() self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task() self.args['task_id']", "200) self.assertTemplateUsed(response, 'groups/form_base_site.html') def _test_page_post_valid_data(self): data = self.get_initial_form_data() self.assertEqual(data['username'], self.task.username) data['username'] = 'Another", "def _test_page_post_valid_data(self): response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data) self.assertEqual(response.status_code, 302) model = models.AddSubgroup.objects.get(task_group=self.task_group) for key,", "'groups/detail.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context) self.assertIn('unfinished_add_subgroup_list', response.context) self.assertIn('unfinished_add_project_list', response.context) self.assertIsInstance(response.context['group'], objects.Group) self.assertIsInstance(response.context['sidebar'], GroupSidebar)", "= self.task_group.id @LoginMethods.login_wrapper def test_page_not_found(self): self.args['task_group_id'] += 1 response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 404)", "def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/tasks.html') self.assertIn('group', response.context) self.assertIn('sidebar', response.context)", "{'group_id': '1'} @LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'sidebar.html') self.assertIn('group',", "class NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_project' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response", "for key, value in test_forms.TaskGroupFormTests.valid_form_data.items(): self.assertEqual(getattr(model, key), value) class FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_task_group'", "class NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest): name = 'new_subgroup' args = {'group_id': '1'} @LoginMethods.login_wrapper def test_page_get(self): response", "setUp(self): super().setUp() self.parent_task = test_models.AddProjectCreateMethods().create_parent_task() self.task = test_models.AddProjectCreateMethods().create_task( parent_task=self.parent_task) self.args['task_id'] = self.task.id @LoginMethods.login_wrapper", "@LoginMethods.login_wrapper def test_page_found(self): response = self.client.get(self.get_url()) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'groups/members.html') self.assertIn('group', response.context) self.assertIn('sidebar',", "'future_group_detail' args = {'task_id': None} def setUp(self): super().setUp() self.task = test_models.AddSubgroupCreateMethods().create_task() self.args['task_id'] =" ]
[ "subqueryload from gist.entities import EligibilityCriterion, Person, ConditionOccurrence, DrugExposure, Measurement, Observation, ProcedureOccurrence class Repo:", "session.execute(stmt).scalars().unique() return trial_ids def get_criteria_by_trial_id(self, trial_id): stmt = ( select(EligibilityCriterion) .filter(EligibilityCriterion.nct_id == trial_id)", "trial_id) ) logging.debug(f\"query for get_criteria_by_trial_id: {stmt}\") with self.session() as session: trials = session.execute(stmt).scalars().all()", "lazyload, joinedload, subqueryload from gist.entities import EligibilityCriterion, Person, ConditionOccurrence, DrugExposure, Measurement, Observation, ProcedureOccurrence", ") logging.debug(f\"query for get_ehr: {stmt}\") with self.session() as session: ehr = session.execute(stmt).scalars().unique().all() return", "logging.debug(f\"query for get_ehr: {stmt}\") with self.session() as session: ehr = session.execute(stmt).scalars().unique().all() return ehr", "self.conn_str = conn_str self.engine = create_engine(self.conn_str) self.session = sessionmaker(self.engine) class CritRepo(Repo): def get_all_trial_ids(self):", "get_ehr(self): stmt = ( select(Person) .options(subqueryload(Person.condition_occurrence)) .options(subqueryload(Person.drug_exposure)) .options(subqueryload(Person.procedure_occurrence)) .options(subqueryload(Person.observation)) .options(subqueryload(Person.measurement)) ) logging.debug(f\"query for", "Person, ConditionOccurrence, DrugExposure, Measurement, Observation, ProcedureOccurrence class Repo: def __init__(self, conn_str): self.conn_str =", ") logging.debug(f\"query for get_criteria_by_trial_id: {stmt}\") with self.session() as session: trials = session.execute(stmt).scalars().all() return", "{stmt}\") with self.session() as session: trial_ids = session.execute(stmt).scalars().unique() return trial_ids def get_criteria_by_trial_id(self, trial_id):", "stmt = ( select(EligibilityCriterion) .filter(EligibilityCriterion.nct_id == trial_id) ) logging.debug(f\"query for get_criteria_by_trial_id: {stmt}\") with", "conn_str): self.conn_str = conn_str self.engine = create_engine(self.conn_str) self.session = sessionmaker(self.engine) class CritRepo(Repo): def", "class Repo: def __init__(self, conn_str): self.conn_str = conn_str self.engine = create_engine(self.conn_str) self.session =", "joinedload, subqueryload from gist.entities import EligibilityCriterion, Person, ConditionOccurrence, DrugExposure, Measurement, Observation, ProcedureOccurrence class", "class CritRepo(Repo): def get_all_trial_ids(self): stmt = ( select(EligibilityCriterion.nct_id) ) logging.debug(f\"query for get_all_trial_ids: {stmt}\")", "return trials class EhrRepo(Repo): def get_ehr(self): stmt = ( select(Person) .options(subqueryload(Person.condition_occurrence)) .options(subqueryload(Person.drug_exposure)) .options(subqueryload(Person.procedure_occurrence))", "self.session() as session: trial_ids = session.execute(stmt).scalars().unique() return trial_ids def get_criteria_by_trial_id(self, trial_id): stmt =", "EligibilityCriterion, Person, ConditionOccurrence, DrugExposure, Measurement, Observation, ProcedureOccurrence class Repo: def __init__(self, conn_str): self.conn_str", "= conn_str self.engine = create_engine(self.conn_str) self.session = sessionmaker(self.engine) class CritRepo(Repo): def get_all_trial_ids(self): stmt", "funcfilter from sqlalchemy.orm import sessionmaker, lazyload, joinedload, subqueryload from gist.entities import EligibilityCriterion, Person,", ".options(subqueryload(Person.observation)) .options(subqueryload(Person.measurement)) ) logging.debug(f\"query for get_ehr: {stmt}\") with self.session() as session: ehr =", ") logging.debug(f\"query for get_all_trial_ids: {stmt}\") with self.session() as session: trial_ids = session.execute(stmt).scalars().unique() return", "def get_ehr(self): stmt = ( select(Person) .options(subqueryload(Person.condition_occurrence)) .options(subqueryload(Person.drug_exposure)) .options(subqueryload(Person.procedure_occurrence)) .options(subqueryload(Person.observation)) .options(subqueryload(Person.measurement)) ) logging.debug(f\"query", "session.execute(stmt).scalars().all() return trials class EhrRepo(Repo): def get_ehr(self): stmt = ( select(Person) .options(subqueryload(Person.condition_occurrence)) .options(subqueryload(Person.drug_exposure))", "import EligibilityCriterion, Person, ConditionOccurrence, DrugExposure, Measurement, Observation, ProcedureOccurrence class Repo: def __init__(self, conn_str):", "def get_criteria_by_trial_id(self, trial_id): stmt = ( select(EligibilityCriterion) .filter(EligibilityCriterion.nct_id == trial_id) ) logging.debug(f\"query for", "session: trials = session.execute(stmt).scalars().all() return trials class EhrRepo(Repo): def get_ehr(self): stmt = (", "get_criteria_by_trial_id(self, trial_id): stmt = ( select(EligibilityCriterion) .filter(EligibilityCriterion.nct_id == trial_id) ) logging.debug(f\"query for get_criteria_by_trial_id:", "for get_all_trial_ids: {stmt}\") with self.session() as session: trial_ids = session.execute(stmt).scalars().unique() return trial_ids def", "as session: trials = session.execute(stmt).scalars().all() return trials class EhrRepo(Repo): def get_ehr(self): stmt =", ".filter(EligibilityCriterion.nct_id == trial_id) ) logging.debug(f\"query for get_criteria_by_trial_id: {stmt}\") with self.session() as session: trials", "= session.execute(stmt).scalars().all() return trials class EhrRepo(Repo): def get_ehr(self): stmt = ( select(Person) .options(subqueryload(Person.condition_occurrence))", "EhrRepo(Repo): def get_ehr(self): stmt = ( select(Person) .options(subqueryload(Person.condition_occurrence)) .options(subqueryload(Person.drug_exposure)) .options(subqueryload(Person.procedure_occurrence)) .options(subqueryload(Person.observation)) .options(subqueryload(Person.measurement)) )", "( select(Person) .options(subqueryload(Person.condition_occurrence)) .options(subqueryload(Person.drug_exposure)) .options(subqueryload(Person.procedure_occurrence)) .options(subqueryload(Person.observation)) .options(subqueryload(Person.measurement)) ) logging.debug(f\"query for get_ehr: {stmt}\") with", "DrugExposure, Measurement, Observation, ProcedureOccurrence class Repo: def __init__(self, conn_str): self.conn_str = conn_str self.engine", "select(Person) .options(subqueryload(Person.condition_occurrence)) .options(subqueryload(Person.drug_exposure)) .options(subqueryload(Person.procedure_occurrence)) .options(subqueryload(Person.observation)) .options(subqueryload(Person.measurement)) ) logging.debug(f\"query for get_ehr: {stmt}\") with self.session()", "ProcedureOccurrence class Repo: def __init__(self, conn_str): self.conn_str = conn_str self.engine = create_engine(self.conn_str) self.session", "{stmt}\") with self.session() as session: trials = session.execute(stmt).scalars().all() return trials class EhrRepo(Repo): def", "self.engine = create_engine(self.conn_str) self.session = sessionmaker(self.engine) class CritRepo(Repo): def get_all_trial_ids(self): stmt = (", "logging.debug(f\"query for get_all_trial_ids: {stmt}\") with self.session() as session: trial_ids = session.execute(stmt).scalars().unique() return trial_ids", "from gist.entities import EligibilityCriterion, Person, ConditionOccurrence, DrugExposure, Measurement, Observation, ProcedureOccurrence class Repo: def", "def __init__(self, conn_str): self.conn_str = conn_str self.engine = create_engine(self.conn_str) self.session = sessionmaker(self.engine) class", "Observation, ProcedureOccurrence class Repo: def __init__(self, conn_str): self.conn_str = conn_str self.engine = create_engine(self.conn_str)", "= ( select(EligibilityCriterion) .filter(EligibilityCriterion.nct_id == trial_id) ) logging.debug(f\"query for get_criteria_by_trial_id: {stmt}\") with self.session()", ".options(subqueryload(Person.condition_occurrence)) .options(subqueryload(Person.drug_exposure)) .options(subqueryload(Person.procedure_occurrence)) .options(subqueryload(Person.observation)) .options(subqueryload(Person.measurement)) ) logging.debug(f\"query for get_ehr: {stmt}\") with self.session() as", "func, funcfilter from sqlalchemy.orm import sessionmaker, lazyload, joinedload, subqueryload from gist.entities import EligibilityCriterion,", "= ( select(Person) .options(subqueryload(Person.condition_occurrence)) .options(subqueryload(Person.drug_exposure)) .options(subqueryload(Person.procedure_occurrence)) .options(subqueryload(Person.observation)) .options(subqueryload(Person.measurement)) ) logging.debug(f\"query for get_ehr: {stmt}\")", "== trial_id) ) logging.debug(f\"query for get_criteria_by_trial_id: {stmt}\") with self.session() as session: trials =", "select(EligibilityCriterion.nct_id) ) logging.debug(f\"query for get_all_trial_ids: {stmt}\") with self.session() as session: trial_ids = session.execute(stmt).scalars().unique()", "get_criteria_by_trial_id: {stmt}\") with self.session() as session: trials = session.execute(stmt).scalars().all() return trials class EhrRepo(Repo):", "trial_id): stmt = ( select(EligibilityCriterion) .filter(EligibilityCriterion.nct_id == trial_id) ) logging.debug(f\"query for get_criteria_by_trial_id: {stmt}\")", "= create_engine(self.conn_str) self.session = sessionmaker(self.engine) class CritRepo(Repo): def get_all_trial_ids(self): stmt = ( select(EligibilityCriterion.nct_id)", "conn_str self.engine = create_engine(self.conn_str) self.session = sessionmaker(self.engine) class CritRepo(Repo): def get_all_trial_ids(self): stmt =", "with self.session() as session: trial_ids = session.execute(stmt).scalars().unique() return trial_ids def get_criteria_by_trial_id(self, trial_id): stmt", "class EhrRepo(Repo): def get_ehr(self): stmt = ( select(Person) .options(subqueryload(Person.condition_occurrence)) .options(subqueryload(Person.drug_exposure)) .options(subqueryload(Person.procedure_occurrence)) .options(subqueryload(Person.observation)) .options(subqueryload(Person.measurement))", "get_all_trial_ids: {stmt}\") with self.session() as session: trial_ids = session.execute(stmt).scalars().unique() return trial_ids def get_criteria_by_trial_id(self,", "self.session = sessionmaker(self.engine) class CritRepo(Repo): def get_all_trial_ids(self): stmt = ( select(EligibilityCriterion.nct_id) ) logging.debug(f\"query", "sqlalchemy.orm import sessionmaker, lazyload, joinedload, subqueryload from gist.entities import EligibilityCriterion, Person, ConditionOccurrence, DrugExposure,", "from sqlalchemy.orm import sessionmaker, lazyload, joinedload, subqueryload from gist.entities import EligibilityCriterion, Person, ConditionOccurrence,", "sessionmaker(self.engine) class CritRepo(Repo): def get_all_trial_ids(self): stmt = ( select(EligibilityCriterion.nct_id) ) logging.debug(f\"query for get_all_trial_ids:", "logging from sqlalchemy import create_engine, select, func, funcfilter from sqlalchemy.orm import sessionmaker, lazyload,", "select(EligibilityCriterion) .filter(EligibilityCriterion.nct_id == trial_id) ) logging.debug(f\"query for get_criteria_by_trial_id: {stmt}\") with self.session() as session:", "import logging from sqlalchemy import create_engine, select, func, funcfilter from sqlalchemy.orm import sessionmaker,", "stmt = ( select(EligibilityCriterion.nct_id) ) logging.debug(f\"query for get_all_trial_ids: {stmt}\") with self.session() as session:", "__init__(self, conn_str): self.conn_str = conn_str self.engine = create_engine(self.conn_str) self.session = sessionmaker(self.engine) class CritRepo(Repo):", "Repo: def __init__(self, conn_str): self.conn_str = conn_str self.engine = create_engine(self.conn_str) self.session = sessionmaker(self.engine)", "select, func, funcfilter from sqlalchemy.orm import sessionmaker, lazyload, joinedload, subqueryload from gist.entities import", "( select(EligibilityCriterion) .filter(EligibilityCriterion.nct_id == trial_id) ) logging.debug(f\"query for get_criteria_by_trial_id: {stmt}\") with self.session() as", "( select(EligibilityCriterion.nct_id) ) logging.debug(f\"query for get_all_trial_ids: {stmt}\") with self.session() as session: trial_ids =", "= session.execute(stmt).scalars().unique() return trial_ids def get_criteria_by_trial_id(self, trial_id): stmt = ( select(EligibilityCriterion) .filter(EligibilityCriterion.nct_id ==", "trials class EhrRepo(Repo): def get_ehr(self): stmt = ( select(Person) .options(subqueryload(Person.condition_occurrence)) .options(subqueryload(Person.drug_exposure)) .options(subqueryload(Person.procedure_occurrence)) .options(subqueryload(Person.observation))", "CritRepo(Repo): def get_all_trial_ids(self): stmt = ( select(EligibilityCriterion.nct_id) ) logging.debug(f\"query for get_all_trial_ids: {stmt}\") with", "sqlalchemy import create_engine, select, func, funcfilter from sqlalchemy.orm import sessionmaker, lazyload, joinedload, subqueryload", "Measurement, Observation, ProcedureOccurrence class Repo: def __init__(self, conn_str): self.conn_str = conn_str self.engine =", "stmt = ( select(Person) .options(subqueryload(Person.condition_occurrence)) .options(subqueryload(Person.drug_exposure)) .options(subqueryload(Person.procedure_occurrence)) .options(subqueryload(Person.observation)) .options(subqueryload(Person.measurement)) ) logging.debug(f\"query for get_ehr:", "logging.debug(f\"query for get_criteria_by_trial_id: {stmt}\") with self.session() as session: trials = session.execute(stmt).scalars().all() return trials", "get_all_trial_ids(self): stmt = ( select(EligibilityCriterion.nct_id) ) logging.debug(f\"query for get_all_trial_ids: {stmt}\") with self.session() as", "gist.entities import EligibilityCriterion, Person, ConditionOccurrence, DrugExposure, Measurement, Observation, ProcedureOccurrence class Repo: def __init__(self,", "= ( select(EligibilityCriterion.nct_id) ) logging.debug(f\"query for get_all_trial_ids: {stmt}\") with self.session() as session: trial_ids", "trial_ids def get_criteria_by_trial_id(self, trial_id): stmt = ( select(EligibilityCriterion) .filter(EligibilityCriterion.nct_id == trial_id) ) logging.debug(f\"query", ".options(subqueryload(Person.measurement)) ) logging.debug(f\"query for get_ehr: {stmt}\") with self.session() as session: ehr = session.execute(stmt).scalars().unique().all()", ".options(subqueryload(Person.procedure_occurrence)) .options(subqueryload(Person.observation)) .options(subqueryload(Person.measurement)) ) logging.debug(f\"query for get_ehr: {stmt}\") with self.session() as session: ehr", "trials = session.execute(stmt).scalars().all() return trials class EhrRepo(Repo): def get_ehr(self): stmt = ( select(Person)", "= sessionmaker(self.engine) class CritRepo(Repo): def get_all_trial_ids(self): stmt = ( select(EligibilityCriterion.nct_id) ) logging.debug(f\"query for", "import create_engine, select, func, funcfilter from sqlalchemy.orm import sessionmaker, lazyload, joinedload, subqueryload from", "trial_ids = session.execute(stmt).scalars().unique() return trial_ids def get_criteria_by_trial_id(self, trial_id): stmt = ( select(EligibilityCriterion) .filter(EligibilityCriterion.nct_id", "from sqlalchemy import create_engine, select, func, funcfilter from sqlalchemy.orm import sessionmaker, lazyload, joinedload,", "<reponame>thisisibrahimd/gist<filename>gist/repo.py import logging from sqlalchemy import create_engine, select, func, funcfilter from sqlalchemy.orm import", "with self.session() as session: trials = session.execute(stmt).scalars().all() return trials class EhrRepo(Repo): def get_ehr(self):", "create_engine, select, func, funcfilter from sqlalchemy.orm import sessionmaker, lazyload, joinedload, subqueryload from gist.entities", "as session: trial_ids = session.execute(stmt).scalars().unique() return trial_ids def get_criteria_by_trial_id(self, trial_id): stmt = (", "return trial_ids def get_criteria_by_trial_id(self, trial_id): stmt = ( select(EligibilityCriterion) .filter(EligibilityCriterion.nct_id == trial_id) )", "ConditionOccurrence, DrugExposure, Measurement, Observation, ProcedureOccurrence class Repo: def __init__(self, conn_str): self.conn_str = conn_str", "for get_criteria_by_trial_id: {stmt}\") with self.session() as session: trials = session.execute(stmt).scalars().all() return trials class", "sessionmaker, lazyload, joinedload, subqueryload from gist.entities import EligibilityCriterion, Person, ConditionOccurrence, DrugExposure, Measurement, Observation,", "def get_all_trial_ids(self): stmt = ( select(EligibilityCriterion.nct_id) ) logging.debug(f\"query for get_all_trial_ids: {stmt}\") with self.session()", "import sessionmaker, lazyload, joinedload, subqueryload from gist.entities import EligibilityCriterion, Person, ConditionOccurrence, DrugExposure, Measurement,", "create_engine(self.conn_str) self.session = sessionmaker(self.engine) class CritRepo(Repo): def get_all_trial_ids(self): stmt = ( select(EligibilityCriterion.nct_id) )", "session: trial_ids = session.execute(stmt).scalars().unique() return trial_ids def get_criteria_by_trial_id(self, trial_id): stmt = ( select(EligibilityCriterion)", ".options(subqueryload(Person.drug_exposure)) .options(subqueryload(Person.procedure_occurrence)) .options(subqueryload(Person.observation)) .options(subqueryload(Person.measurement)) ) logging.debug(f\"query for get_ehr: {stmt}\") with self.session() as session:", "self.session() as session: trials = session.execute(stmt).scalars().all() return trials class EhrRepo(Repo): def get_ehr(self): stmt" ]
[ "import Listing def get_all_queries(): \"\"\" Returns all stored listing queries. \"\"\" return list(Listing.query.all())", "python3 from api import DB from api.models.listing import Listing def get_all_queries(): \"\"\" Returns", "api import DB from api.models.listing import Listing def get_all_queries(): \"\"\" Returns all stored", "from api import DB from api.models.listing import Listing def get_all_queries(): \"\"\" Returns all", "DB from api.models.listing import Listing def get_all_queries(): \"\"\" Returns all stored listing queries.", "import DB from api.models.listing import Listing def get_all_queries(): \"\"\" Returns all stored listing", "#!/usr/bin/env python3 from api import DB from api.models.listing import Listing def get_all_queries(): \"\"\"", "api.models.listing import Listing def get_all_queries(): \"\"\" Returns all stored listing queries. \"\"\" return", "from api.models.listing import Listing def get_all_queries(): \"\"\" Returns all stored listing queries. \"\"\"" ]
[ "self.name = name self.email = email Contact.all_contacts.append(self) class Supplier(Contact): def order(self, order): print(\"If", "self: if name in contact.name: matching_contacts.append(contact) return matching_contacts class Contact: '''this class is", "Contact: '''this class is responsible for maintaining a list of all contacts in", "contact.name: matching_contacts.append(contact) return matching_contacts class Contact: '''this class is responsible for maintaining a", "name in contact.name: matching_contacts.append(contact) return matching_contacts class Contact: '''this class is responsible for", "= email Contact.all_contacts.append(self) class Supplier(Contact): def order(self, order): print(\"If this were a real", "class Supplier(Contact): def order(self, order): print(\"If this were a real system we would", "Mon Jun 15 18:15:37 2020 @author: krishan \"\"\" class ContactList(list): def search(self, name):", "in contact.name: matching_contacts.append(contact) return matching_contacts class Contact: '''this class is responsible for maintaining", "in their name.''' matching_contacts = [] for contact in self: if name in", "class Contact: '''this class is responsible for maintaining a list of all contacts", "name): '''Return all contacts that contain the search value in their name.''' matching_contacts", "that contain the search value in their name.''' matching_contacts = [] for contact", "utf-8 -*- \"\"\" Created on Mon Jun 15 18:15:37 2020 @author: krishan \"\"\"", "for maintaining a list of all contacts in a class variable all_contacts''' all_contacts", "order): print(\"If this were a real system we would send \" \"'{}' order", "matching_contacts.append(contact) return matching_contacts class Contact: '''this class is responsible for maintaining a list", "print(\"If this were a real system we would send \" \"'{}' order to", "15 18:15:37 2020 @author: krishan \"\"\" class ContactList(list): def search(self, name): '''Return all", "name, email): self.name = name self.email = email Contact.all_contacts.append(self) class Supplier(Contact): def order(self,", "matching_contacts = [] for contact in self: if name in contact.name: matching_contacts.append(contact) return", "contacts in a class variable all_contacts''' all_contacts = ContactList() def __init__(self, name, email):", "__init__(self, name, email): self.name = name self.email = email Contact.all_contacts.append(self) class Supplier(Contact): def", "email Contact.all_contacts.append(self) class Supplier(Contact): def order(self, order): print(\"If this were a real system", "ContactList() def __init__(self, name, email): self.name = name self.email = email Contact.all_contacts.append(self) class", "all_contacts''' all_contacts = ContactList() def __init__(self, name, email): self.name = name self.email =", "order(self, order): print(\"If this were a real system we would send \" \"'{}'", "-*- coding: utf-8 -*- \"\"\" Created on Mon Jun 15 18:15:37 2020 @author:", "18:15:37 2020 @author: krishan \"\"\" class ContactList(list): def search(self, name): '''Return all contacts", "all_contacts = ContactList() def __init__(self, name, email): self.name = name self.email = email", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- \"\"\" Created on Mon Jun 15", "list of all contacts in a class variable all_contacts''' all_contacts = ContactList() def", "'''this class is responsible for maintaining a list of all contacts in a", "all contacts in a class variable all_contacts''' all_contacts = ContactList() def __init__(self, name,", "def __init__(self, name, email): self.name = name self.email = email Contact.all_contacts.append(self) class Supplier(Contact):", "maintaining a list of all contacts in a class variable all_contacts''' all_contacts =", "class ContactList(list): def search(self, name): '''Return all contacts that contain the search value", "for contact in self: if name in contact.name: matching_contacts.append(contact) return matching_contacts class Contact:", "name self.email = email Contact.all_contacts.append(self) class Supplier(Contact): def order(self, order): print(\"If this were", "in self: if name in contact.name: matching_contacts.append(contact) return matching_contacts class Contact: '''this class", "search(self, name): '''Return all contacts that contain the search value in their name.'''", "\"\"\" class ContactList(list): def search(self, name): '''Return all contacts that contain the search", "of all contacts in a class variable all_contacts''' all_contacts = ContactList() def __init__(self,", "contacts that contain the search value in their name.''' matching_contacts = [] for", "matching_contacts class Contact: '''this class is responsible for maintaining a list of all", "in a class variable all_contacts''' all_contacts = ContactList() def __init__(self, name, email): self.name", "responsible for maintaining a list of all contacts in a class variable all_contacts'''", "Jun 15 18:15:37 2020 @author: krishan \"\"\" class ContactList(list): def search(self, name): '''Return", "Created on Mon Jun 15 18:15:37 2020 @author: krishan \"\"\" class ContactList(list): def", "krishan \"\"\" class ContactList(list): def search(self, name): '''Return all contacts that contain the", "their name.''' matching_contacts = [] for contact in self: if name in contact.name:", "contact in self: if name in contact.name: matching_contacts.append(contact) return matching_contacts class Contact: '''this", "= name self.email = email Contact.all_contacts.append(self) class Supplier(Contact): def order(self, order): print(\"If this", "ContactList(list): def search(self, name): '''Return all contacts that contain the search value in", "return matching_contacts class Contact: '''this class is responsible for maintaining a list of", "<filename>oops/#009.py #!/usr/bin/env python3 # -*- coding: utf-8 -*- \"\"\" Created on Mon Jun", "coding: utf-8 -*- \"\"\" Created on Mon Jun 15 18:15:37 2020 @author: krishan", "contain the search value in their name.''' matching_contacts = [] for contact in", "-*- \"\"\" Created on Mon Jun 15 18:15:37 2020 @author: krishan \"\"\" class", "name.''' matching_contacts = [] for contact in self: if name in contact.name: matching_contacts.append(contact)", "Contact.all_contacts.append(self) class Supplier(Contact): def order(self, order): print(\"If this were a real system we", "variable all_contacts''' all_contacts = ContactList() def __init__(self, name, email): self.name = name self.email", "on Mon Jun 15 18:15:37 2020 @author: krishan \"\"\" class ContactList(list): def search(self,", "value in their name.''' matching_contacts = [] for contact in self: if name", "the search value in their name.''' matching_contacts = [] for contact in self:", "@author: krishan \"\"\" class ContactList(list): def search(self, name): '''Return all contacts that contain", "2020 @author: krishan \"\"\" class ContactList(list): def search(self, name): '''Return all contacts that", "email): self.name = name self.email = email Contact.all_contacts.append(self) class Supplier(Contact): def order(self, order):", "class is responsible for maintaining a list of all contacts in a class", "a class variable all_contacts''' all_contacts = ContactList() def __init__(self, name, email): self.name =", "def order(self, order): print(\"If this were a real system we would send \"", "python3 # -*- coding: utf-8 -*- \"\"\" Created on Mon Jun 15 18:15:37", "[] for contact in self: if name in contact.name: matching_contacts.append(contact) return matching_contacts class", "is responsible for maintaining a list of all contacts in a class variable", "Supplier(Contact): def order(self, order): print(\"If this were a real system we would send", "# -*- coding: utf-8 -*- \"\"\" Created on Mon Jun 15 18:15:37 2020", "'''Return all contacts that contain the search value in their name.''' matching_contacts =", "a list of all contacts in a class variable all_contacts''' all_contacts = ContactList()", "= [] for contact in self: if name in contact.name: matching_contacts.append(contact) return matching_contacts", "all contacts that contain the search value in their name.''' matching_contacts = []", "= ContactList() def __init__(self, name, email): self.name = name self.email = email Contact.all_contacts.append(self)", "\"\"\" Created on Mon Jun 15 18:15:37 2020 @author: krishan \"\"\" class ContactList(list):", "search value in their name.''' matching_contacts = [] for contact in self: if", "def search(self, name): '''Return all contacts that contain the search value in their", "if name in contact.name: matching_contacts.append(contact) return matching_contacts class Contact: '''this class is responsible", "self.email = email Contact.all_contacts.append(self) class Supplier(Contact): def order(self, order): print(\"If this were a", "this were a real system we would send \" \"'{}' order to '{}'\".format(order,", "class variable all_contacts''' all_contacts = ContactList() def __init__(self, name, email): self.name = name", "were a real system we would send \" \"'{}' order to '{}'\".format(order, self.name))" ]
[ "tasks.append(task) logger.debug(f\"created {len(tasks)} tasks\") logger.debug(\"awaiting tasks: load price\") for task in tasks: await", "\"{_PRICE_HISTORY_API}\"' f\" json={json.dumps(price, indent=2, sort_keys=True)}\" ) response: ClientResponse = await session.post( _PRICE_HISTORY_API, json=price,", "import ClientResponse from dynaconf import settings _PRICE_HISTORY_API = f\"{settings.BENZAK_API_URL}/price-history/\" async def load_price(logger, session,", "response: [{response.status} {response.reason}]\") if settings.DEBUG and response.status != 201: payload = json.dumps(await response.json(),", "prices\" ) logger.debug(\"creating tasks: load price\") tasks = [] for actual_at, price in", "in prices.items(): payload = { \"at\": actual_at.strftime(\"%Y-%m-%d\"), \"price\": str(price), \"currency\": currency, \"fuel\": fuel,", "settings.BENZAK_API_TOKEN}, ) logger.debug(f\"got response: [{response.status} {response.reason}]\") if settings.DEBUG and response.status != 201: payload", "aiohttp import ClientResponse from dynaconf import settings _PRICE_HISTORY_API = f\"{settings.BENZAK_API_URL}/price-history/\" async def load_price(logger,", "sort_keys=True) logger.debug(f\"API response: {payload}\") async def load_prices( logger, session, prices: Dict[date, Decimal], currency:", "f\"{settings.BENZAK_API_URL}/price-history/\" async def load_price(logger, session, price: Dict): logger.debug( f\"calling Benzak price history API:\"", "currency: int, fuel: int ): logger.debug( f\"loading prices\" f\" for currency={currency}, fuel={fuel}:\" f\"", "} task = asyncio.create_task(load_price(logger, session, payload)) tasks.append(task) logger.debug(f\"created {len(tasks)} tasks\") logger.debug(\"awaiting tasks: load", "): logger.debug( f\"loading prices\" f\" for currency={currency}, fuel={fuel}:\" f\" {len(prices)} prices\" ) logger.debug(\"creating", "{len(prices)} prices\" ) logger.debug(\"creating tasks: load price\") tasks = [] for actual_at, price", "async def load_prices( logger, session, prices: Dict[date, Decimal], currency: int, fuel: int ):", "= json.dumps(await response.json(), indent=2, sort_keys=True) logger.debug(f\"API response: {payload}\") async def load_prices( logger, session,", "decimal import Decimal from typing import Dict from aiohttp import ClientResponse from dynaconf", "json.dumps(await response.json(), indent=2, sort_keys=True) logger.debug(f\"API response: {payload}\") async def load_prices( logger, session, prices:", "price history API:\" f' POST \"{_PRICE_HISTORY_API}\"' f\" json={json.dumps(price, indent=2, sort_keys=True)}\" ) response: ClientResponse", "{ \"at\": actual_at.strftime(\"%Y-%m-%d\"), \"price\": str(price), \"currency\": currency, \"fuel\": fuel, } task = asyncio.create_task(load_price(logger,", "import Dict from aiohttp import ClientResponse from dynaconf import settings _PRICE_HISTORY_API = f\"{settings.BENZAK_API_URL}/price-history/\"", "_PRICE_HISTORY_API, json=price, headers={\"AUTHORIZATION\": settings.BENZAK_API_TOKEN}, ) logger.debug(f\"got response: [{response.status} {response.reason}]\") if settings.DEBUG and response.status", "price in prices.items(): payload = { \"at\": actual_at.strftime(\"%Y-%m-%d\"), \"price\": str(price), \"currency\": currency, \"fuel\":", "{payload}\") async def load_prices( logger, session, prices: Dict[date, Decimal], currency: int, fuel: int", "load_price(logger, session, price: Dict): logger.debug( f\"calling Benzak price history API:\" f' POST \"{_PRICE_HISTORY_API}\"'", "logger.debug(\"awaiting tasks: load price\") for task in tasks: await task logger.debug(f\"loaded {len(prices)} prices\")", "= asyncio.create_task(load_price(logger, session, payload)) tasks.append(task) logger.debug(f\"created {len(tasks)} tasks\") logger.debug(\"awaiting tasks: load price\") for", "logger.debug( f\"loading prices\" f\" for currency={currency}, fuel={fuel}:\" f\" {len(prices)} prices\" ) logger.debug(\"creating tasks:", "fuel: int ): logger.debug( f\"loading prices\" f\" for currency={currency}, fuel={fuel}:\" f\" {len(prices)} prices\"", "await session.post( _PRICE_HISTORY_API, json=price, headers={\"AUTHORIZATION\": settings.BENZAK_API_TOKEN}, ) logger.debug(f\"got response: [{response.status} {response.reason}]\") if settings.DEBUG", "datetime import date from decimal import Decimal from typing import Dict from aiohttp", "{len(tasks)} tasks\") logger.debug(\"awaiting tasks: load price\") for task in tasks: await task logger.debug(f\"loaded", "= [] for actual_at, price in prices.items(): payload = { \"at\": actual_at.strftime(\"%Y-%m-%d\"), \"price\":", "session.post( _PRICE_HISTORY_API, json=price, headers={\"AUTHORIZATION\": settings.BENZAK_API_TOKEN}, ) logger.debug(f\"got response: [{response.status} {response.reason}]\") if settings.DEBUG and", "indent=2, sort_keys=True) logger.debug(f\"API response: {payload}\") async def load_prices( logger, session, prices: Dict[date, Decimal],", "response: {payload}\") async def load_prices( logger, session, prices: Dict[date, Decimal], currency: int, fuel:", "API:\" f' POST \"{_PRICE_HISTORY_API}\"' f\" json={json.dumps(price, indent=2, sort_keys=True)}\" ) response: ClientResponse = await", "response: ClientResponse = await session.post( _PRICE_HISTORY_API, json=price, headers={\"AUTHORIZATION\": settings.BENZAK_API_TOKEN}, ) logger.debug(f\"got response: [{response.status}", "import date from decimal import Decimal from typing import Dict from aiohttp import", "prices\" f\" for currency={currency}, fuel={fuel}:\" f\" {len(prices)} prices\" ) logger.debug(\"creating tasks: load price\")", "logger.debug(f\"created {len(tasks)} tasks\") logger.debug(\"awaiting tasks: load price\") for task in tasks: await task", "Dict from aiohttp import ClientResponse from dynaconf import settings _PRICE_HISTORY_API = f\"{settings.BENZAK_API_URL}/price-history/\" async", "from datetime import date from decimal import Decimal from typing import Dict from", "{response.reason}]\") if settings.DEBUG and response.status != 201: payload = json.dumps(await response.json(), indent=2, sort_keys=True)", "Decimal], currency: int, fuel: int ): logger.debug( f\"loading prices\" f\" for currency={currency}, fuel={fuel}:\"", "logger, session, prices: Dict[date, Decimal], currency: int, fuel: int ): logger.debug( f\"loading prices\"", ") logger.debug(f\"got response: [{response.status} {response.reason}]\") if settings.DEBUG and response.status != 201: payload =", "f\" for currency={currency}, fuel={fuel}:\" f\" {len(prices)} prices\" ) logger.debug(\"creating tasks: load price\") tasks", "prices.items(): payload = { \"at\": actual_at.strftime(\"%Y-%m-%d\"), \"price\": str(price), \"currency\": currency, \"fuel\": fuel, }", "_PRICE_HISTORY_API = f\"{settings.BENZAK_API_URL}/price-history/\" async def load_price(logger, session, price: Dict): logger.debug( f\"calling Benzak price", "ClientResponse from dynaconf import settings _PRICE_HISTORY_API = f\"{settings.BENZAK_API_URL}/price-history/\" async def load_price(logger, session, price:", "fuel={fuel}:\" f\" {len(prices)} prices\" ) logger.debug(\"creating tasks: load price\") tasks = [] for", "session, payload)) tasks.append(task) logger.debug(f\"created {len(tasks)} tasks\") logger.debug(\"awaiting tasks: load price\") for task in", "from aiohttp import ClientResponse from dynaconf import settings _PRICE_HISTORY_API = f\"{settings.BENZAK_API_URL}/price-history/\" async def", ") logger.debug(\"creating tasks: load price\") tasks = [] for actual_at, price in prices.items():", "= f\"{settings.BENZAK_API_URL}/price-history/\" async def load_price(logger, session, price: Dict): logger.debug( f\"calling Benzak price history", "for actual_at, price in prices.items(): payload = { \"at\": actual_at.strftime(\"%Y-%m-%d\"), \"price\": str(price), \"currency\":", "Benzak price history API:\" f' POST \"{_PRICE_HISTORY_API}\"' f\" json={json.dumps(price, indent=2, sort_keys=True)}\" ) response:", "f\" json={json.dumps(price, indent=2, sort_keys=True)}\" ) response: ClientResponse = await session.post( _PRICE_HISTORY_API, json=price, headers={\"AUTHORIZATION\":", "= { \"at\": actual_at.strftime(\"%Y-%m-%d\"), \"price\": str(price), \"currency\": currency, \"fuel\": fuel, } task =", "import Decimal from typing import Dict from aiohttp import ClientResponse from dynaconf import", ") response: ClientResponse = await session.post( _PRICE_HISTORY_API, json=price, headers={\"AUTHORIZATION\": settings.BENZAK_API_TOKEN}, ) logger.debug(f\"got response:", "\"at\": actual_at.strftime(\"%Y-%m-%d\"), \"price\": str(price), \"currency\": currency, \"fuel\": fuel, } task = asyncio.create_task(load_price(logger, session,", "currency, \"fuel\": fuel, } task = asyncio.create_task(load_price(logger, session, payload)) tasks.append(task) logger.debug(f\"created {len(tasks)} tasks\")", "json={json.dumps(price, indent=2, sort_keys=True)}\" ) response: ClientResponse = await session.post( _PRICE_HISTORY_API, json=price, headers={\"AUTHORIZATION\": settings.BENZAK_API_TOKEN},", "import json from datetime import date from decimal import Decimal from typing import", "201: payload = json.dumps(await response.json(), indent=2, sort_keys=True) logger.debug(f\"API response: {payload}\") async def load_prices(", "\"price\": str(price), \"currency\": currency, \"fuel\": fuel, } task = asyncio.create_task(load_price(logger, session, payload)) tasks.append(task)", "from typing import Dict from aiohttp import ClientResponse from dynaconf import settings _PRICE_HISTORY_API", "POST \"{_PRICE_HISTORY_API}\"' f\" json={json.dumps(price, indent=2, sort_keys=True)}\" ) response: ClientResponse = await session.post( _PRICE_HISTORY_API,", "date from decimal import Decimal from typing import Dict from aiohttp import ClientResponse", "task = asyncio.create_task(load_price(logger, session, payload)) tasks.append(task) logger.debug(f\"created {len(tasks)} tasks\") logger.debug(\"awaiting tasks: load price\")", "price\") tasks = [] for actual_at, price in prices.items(): payload = { \"at\":", "tasks\") logger.debug(\"awaiting tasks: load price\") for task in tasks: await task logger.debug(f\"loaded {len(prices)}", "typing import Dict from aiohttp import ClientResponse from dynaconf import settings _PRICE_HISTORY_API =", "Dict[date, Decimal], currency: int, fuel: int ): logger.debug( f\"loading prices\" f\" for currency={currency},", "settings _PRICE_HISTORY_API = f\"{settings.BENZAK_API_URL}/price-history/\" async def load_price(logger, session, price: Dict): logger.debug( f\"calling Benzak", "ClientResponse = await session.post( _PRICE_HISTORY_API, json=price, headers={\"AUTHORIZATION\": settings.BENZAK_API_TOKEN}, ) logger.debug(f\"got response: [{response.status} {response.reason}]\")", "f\"calling Benzak price history API:\" f' POST \"{_PRICE_HISTORY_API}\"' f\" json={json.dumps(price, indent=2, sort_keys=True)}\" )", "import settings _PRICE_HISTORY_API = f\"{settings.BENZAK_API_URL}/price-history/\" async def load_price(logger, session, price: Dict): logger.debug( f\"calling", "payload)) tasks.append(task) logger.debug(f\"created {len(tasks)} tasks\") logger.debug(\"awaiting tasks: load price\") for task in tasks:", "actual_at.strftime(\"%Y-%m-%d\"), \"price\": str(price), \"currency\": currency, \"fuel\": fuel, } task = asyncio.create_task(load_price(logger, session, payload))", "\"currency\": currency, \"fuel\": fuel, } task = asyncio.create_task(load_price(logger, session, payload)) tasks.append(task) logger.debug(f\"created {len(tasks)}", "f\" {len(prices)} prices\" ) logger.debug(\"creating tasks: load price\") tasks = [] for actual_at,", "tasks = [] for actual_at, price in prices.items(): payload = { \"at\": actual_at.strftime(\"%Y-%m-%d\"),", "payload = json.dumps(await response.json(), indent=2, sort_keys=True) logger.debug(f\"API response: {payload}\") async def load_prices( logger,", "currency={currency}, fuel={fuel}:\" f\" {len(prices)} prices\" ) logger.debug(\"creating tasks: load price\") tasks = []", "fuel, } task = asyncio.create_task(load_price(logger, session, payload)) tasks.append(task) logger.debug(f\"created {len(tasks)} tasks\") logger.debug(\"awaiting tasks:", "sort_keys=True)}\" ) response: ClientResponse = await session.post( _PRICE_HISTORY_API, json=price, headers={\"AUTHORIZATION\": settings.BENZAK_API_TOKEN}, ) logger.debug(f\"got", "response.status != 201: payload = json.dumps(await response.json(), indent=2, sort_keys=True) logger.debug(f\"API response: {payload}\") async", "[{response.status} {response.reason}]\") if settings.DEBUG and response.status != 201: payload = json.dumps(await response.json(), indent=2,", "Dict): logger.debug( f\"calling Benzak price history API:\" f' POST \"{_PRICE_HISTORY_API}\"' f\" json={json.dumps(price, indent=2,", "history API:\" f' POST \"{_PRICE_HISTORY_API}\"' f\" json={json.dumps(price, indent=2, sort_keys=True)}\" ) response: ClientResponse =", "for currency={currency}, fuel={fuel}:\" f\" {len(prices)} prices\" ) logger.debug(\"creating tasks: load price\") tasks =", "logger.debug(f\"API response: {payload}\") async def load_prices( logger, session, prices: Dict[date, Decimal], currency: int,", "f\"loading prices\" f\" for currency={currency}, fuel={fuel}:\" f\" {len(prices)} prices\" ) logger.debug(\"creating tasks: load", "Decimal from typing import Dict from aiohttp import ClientResponse from dynaconf import settings", "load_prices( logger, session, prices: Dict[date, Decimal], currency: int, fuel: int ): logger.debug( f\"loading", "json from datetime import date from decimal import Decimal from typing import Dict", "[] for actual_at, price in prices.items(): payload = { \"at\": actual_at.strftime(\"%Y-%m-%d\"), \"price\": str(price),", "async def load_price(logger, session, price: Dict): logger.debug( f\"calling Benzak price history API:\" f'", "load price\") tasks = [] for actual_at, price in prices.items(): payload = {", "dynaconf import settings _PRICE_HISTORY_API = f\"{settings.BENZAK_API_URL}/price-history/\" async def load_price(logger, session, price: Dict): logger.debug(", "def load_prices( logger, session, prices: Dict[date, Decimal], currency: int, fuel: int ): logger.debug(", "f' POST \"{_PRICE_HISTORY_API}\"' f\" json={json.dumps(price, indent=2, sort_keys=True)}\" ) response: ClientResponse = await session.post(", "tasks: load price\") tasks = [] for actual_at, price in prices.items(): payload =", "def load_price(logger, session, price: Dict): logger.debug( f\"calling Benzak price history API:\" f' POST", "and response.status != 201: payload = json.dumps(await response.json(), indent=2, sort_keys=True) logger.debug(f\"API response: {payload}\")", "prices: Dict[date, Decimal], currency: int, fuel: int ): logger.debug( f\"loading prices\" f\" for", "payload = { \"at\": actual_at.strftime(\"%Y-%m-%d\"), \"price\": str(price), \"currency\": currency, \"fuel\": fuel, } task", "logger.debug(\"creating tasks: load price\") tasks = [] for actual_at, price in prices.items(): payload", "from dynaconf import settings _PRICE_HISTORY_API = f\"{settings.BENZAK_API_URL}/price-history/\" async def load_price(logger, session, price: Dict):", "asyncio.create_task(load_price(logger, session, payload)) tasks.append(task) logger.debug(f\"created {len(tasks)} tasks\") logger.debug(\"awaiting tasks: load price\") for task", "settings.DEBUG and response.status != 201: payload = json.dumps(await response.json(), indent=2, sort_keys=True) logger.debug(f\"API response:", "logger.debug( f\"calling Benzak price history API:\" f' POST \"{_PRICE_HISTORY_API}\"' f\" json={json.dumps(price, indent=2, sort_keys=True)}\"", "response.json(), indent=2, sort_keys=True) logger.debug(f\"API response: {payload}\") async def load_prices( logger, session, prices: Dict[date,", "actual_at, price in prices.items(): payload = { \"at\": actual_at.strftime(\"%Y-%m-%d\"), \"price\": str(price), \"currency\": currency,", "session, price: Dict): logger.debug( f\"calling Benzak price history API:\" f' POST \"{_PRICE_HISTORY_API}\"' f\"", "asyncio import json from datetime import date from decimal import Decimal from typing", "!= 201: payload = json.dumps(await response.json(), indent=2, sort_keys=True) logger.debug(f\"API response: {payload}\") async def", "logger.debug(f\"got response: [{response.status} {response.reason}]\") if settings.DEBUG and response.status != 201: payload = json.dumps(await", "\"fuel\": fuel, } task = asyncio.create_task(load_price(logger, session, payload)) tasks.append(task) logger.debug(f\"created {len(tasks)} tasks\") logger.debug(\"awaiting", "str(price), \"currency\": currency, \"fuel\": fuel, } task = asyncio.create_task(load_price(logger, session, payload)) tasks.append(task) logger.debug(f\"created", "if settings.DEBUG and response.status != 201: payload = json.dumps(await response.json(), indent=2, sort_keys=True) logger.debug(f\"API", "int ): logger.debug( f\"loading prices\" f\" for currency={currency}, fuel={fuel}:\" f\" {len(prices)} prices\" )", "headers={\"AUTHORIZATION\": settings.BENZAK_API_TOKEN}, ) logger.debug(f\"got response: [{response.status} {response.reason}]\") if settings.DEBUG and response.status != 201:", "int, fuel: int ): logger.debug( f\"loading prices\" f\" for currency={currency}, fuel={fuel}:\" f\" {len(prices)}", "price: Dict): logger.debug( f\"calling Benzak price history API:\" f' POST \"{_PRICE_HISTORY_API}\"' f\" json={json.dumps(price,", "session, prices: Dict[date, Decimal], currency: int, fuel: int ): logger.debug( f\"loading prices\" f\"", "from decimal import Decimal from typing import Dict from aiohttp import ClientResponse from", "json=price, headers={\"AUTHORIZATION\": settings.BENZAK_API_TOKEN}, ) logger.debug(f\"got response: [{response.status} {response.reason}]\") if settings.DEBUG and response.status !=", "= await session.post( _PRICE_HISTORY_API, json=price, headers={\"AUTHORIZATION\": settings.BENZAK_API_TOKEN}, ) logger.debug(f\"got response: [{response.status} {response.reason}]\") if", "import asyncio import json from datetime import date from decimal import Decimal from", "indent=2, sort_keys=True)}\" ) response: ClientResponse = await session.post( _PRICE_HISTORY_API, json=price, headers={\"AUTHORIZATION\": settings.BENZAK_API_TOKEN}, )" ]
[ "if invite is not None: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) if invite.invite_name is", "😭\", icon_url=\"https://www.emoji.co.uk/files/mozilla-emojis/objects-mozilla/11928-outbox-tray.png\") # Need to use format other than WebP for image to", "Width Space}‌‌‌\", value=\"\\n__**Plural Kit Information**__\", inline=False) # embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\N{Zero Width Space}\",", "embed def command_canceled_embed(message: str = \"The command was canceled.\", color: discord.Color = discord.Color.dark_orange())", "f\"<@{author.id}> - {author.name}#{author.discriminator}\" info_author = f\"<@{author.id}>\" embed = discord.Embed(title=\"Deleted Message\", description=description_text, color=0x9b59b6, timestamp=datetime.utcnow())", "inline=True) embed.add_field(name=\"New Username:\", value=after.name, inline=True) if before.discriminator != after.discriminator: embed.add_field(name=\"Old Discriminator:\", value=before.discriminator, inline=True)", "is not None: embed.set_footer(text=f\"User ID: {author.id}\") return embed else: return unknown_deleted_message(channel_id, message_id) def", "old! \\N{WARNING SIGN}\" embed.add_field(name=account_age_name, value=account_age_value, inline=True) embed.add_field(name=\"Current Member Count\", value=\"**{}** Members\".format(member.guild.member_count), inline=True) if", "\"\"\"Returns an embed formatted for canceled commands\"\"\" embed = discord.Embed(title=\"**Command Canceled**\", description=f\"❌ {message}\",", "// 3600 minutes = (account_age.seconds % 3600) // 60 if hours > 0:", "was banned from the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Banned By:\",", "== \"\": # Make sure we don't end up throwing an error due", "else: account_age_name = \"**New Account!**\" hours = account_age.seconds // 3600 minutes = (account_age.seconds", "icon_url=\"https://www.emoji.co.uk/files/twitter-emojis/objects-twitter/11031-inbox-tray.png\") # Need to use format other than WebP for image to display", "Channel: {}, Sender: <@{}> - {}#{}\".format( message.author.name, message.author.discriminator, message.author.id, guild_id, message.channel.id)) return embed", "**Warning!** Account is only **{hours}** hours and **{minutes}** minutes old! \\N{WARNING SIGN}\" else:", "invite tracking by giving Gabby Gums the **Manage Channels** permission.\", inline=False) embed.set_footer(text=\"User ID:", "Message\", description=description_text, color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message by {}, was deleted in", "embed.set_author(name=\"New Member Joined!!!\", icon_url=\"https://www.emoji.co.uk/files/twitter-emojis/objects-twitter/11031-inbox-tray.png\") # Need to use format other than WebP for", "= f\"{s}\\n{s}\\nSystem ID: {s}{s}{s}**{webhook_info.system_pkid}** \\nMember ID: {s}**{webhook_info.member_pkid}**\" log.info(\"pk_id_msg set\") if author is None:", "value=\"{} has joined the server!!!\".format(member.display_name), inline=False) account_age = datetime.utcnow() - member.created_at if account_age.days", "str) -> (str, str): # TODO: Make better msg1 = message[:1000] msg2 =", "inline=False) embed.add_field(name=\"Code\", value=\"Bot OAuth Link\") else: embed.add_field(name=\"__**Invite Information**__\", value=\"Unable to determine invite information.", "embed.add_field(name=\"Permissions Warning!\", value=\"**Manage Server Permissions** needed for invite tracking.\") elif member.bot: embed.add_field(name=\" ‌‌‌\",", "in <#{}>\".format(channel_id), inline=False) embed.add_field(name=\"Message ID:\", value=message_id, inline=False) return embed def member_join(member: discord.Member, invite:", "f\"**{author.name}**\" else: description_text = f\"<@{author.id}> - {author.name}#{author.discriminator}\" info_author = f\"<@{author.id}>\" embed = discord.Embed(title=\"Deleted", "member.discriminator), color=discord.Color.dark_red(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Banned\", icon_url=\"http://i.imgur.com/Imx0Znm.png\") # Need to use format other than", "author of the message. if webhook_info.webhook_author_name is not None: log.info(\"Webhook Author is NOT", "None) if cached: pk_id_msg = \"\" if webhook_info.member_pkid is not None or webhook_info.system_pkid", "age pk_created_date = datetime.strptime(pk_info['created'], '%Y-%m-%dT%H:%M:%S.%fZ') pk_account_age = datetime.utcnow() - pk_created_date embed.add_field(name=\"PK Account Age\",", "by using the Audit Log.\\n\" \"Additionally, you can greatly improve the reliability of", "= split_message(message_content) embed.add_field(name=\"Message:\", value=msg_cont_1, inline=False) embed.add_field(name=\"Message continued:\", value=msg_cont_2, inline=False) else: embed.add_field(name=\"Message:\", value=message_content, inline=False)", "# We have NO info on the author of the message. if webhook_info.webhook_author_name", "if cached: pk_id_msg = \"\" if webhook_info.member_pkid is not None or webhook_info.system_pkid is", "TODO: Make better msg1 = message[:1000] msg2 = message[1000:] return msg1, msg2 def", "\"\"\" import discord from discord.ext import commands from datetime import datetime from typing", "embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was unbanned from the server.\".format(member.display_name), inline=False) if audit_log is not", "Count\", value=\"**{}** Members\".format(member.guild.member_count), inline=True) if pk_info is not None: embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\n__**Plural", "= True) -> discord.Embed: # If the webhook_info is none, create dummy object", "SIGN} **Warning!** Account is only **{minutes}** minutes and **{seconds}** seconds old! \\N{WARNING SIGN}\"", "if invite.invite_name is not None: embed.add_field(name=\"Name:\", value=\"{}\".format(invite.invite_name)) if invite.invite_id is not None: embed.add_field(name=\"Code\",", "value=\"Unable to determine invite information. It's possible the invite was a one time", "color=0x61cd72, timestamp=datetime.utcnow()) embed.set_thumbnail( url=\"https://i.imgur.com/Q8SzUdG.png\") embed.add_field(name=\"Info:\", value=\"A message by <@{author_id}>, was edited in <#{channel_id}>\\n\"", "%d, %Y, %I:%M:%S %p UTC\")) else: if not manage_guild: embed.add_field(name=\"Permissions Warning!\", value=\"**Manage Server", "timestamp=datetime.utcnow()) embed.set_author(name=\"Member Banned\", icon_url=\"http://i.imgur.com/Imx0Znm.png\") # Need to use format other than WebP for", "embed def member_nick_update(before: discord.Member, after: discord.Member) -> discord.Embed: embed = discord.Embed( description=\"<@{}> -", "own line. if \"name\" in pk_info: embed.add_field(name=\"System Name\", value=pk_info['name'], inline=True) embed.add_field(name=\"System ID\", value=pk_info['id'],", "= f\"{webhook_info.webhook_author_name}{pk_id_msg}\" info_author = f\"**{webhook_info.webhook_author_name}**\" else: log.info(\"Webhook Author is None\") description_text = info_author", "for invite tracking.\") elif member.bot: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) embed.add_field(name=\"Code\", value=\"Bot OAuth", "None: embed.add_field(name=\"Created on\", value=invite.created_at().strftime(\"%b %d, %Y, %I:%M:%S %p UTC\")) else: if not manage_guild:", "= discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_orange(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Kicked\", icon_url=\"https://i.imgur.com/o96t3cV.png\") # Need", "Add a blank embed to force the PK info onto it's own line.", "command was canceled.\", color: discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed formatted", "value=before.discriminator, inline=True) embed.add_field(name=\"New Discriminator:\", value=after.discriminator, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def user_avatar_update(before:", "embed.add_field(name=\"Uses\", value=\"{}\".format(invite.actual_invite.uses)) embed.add_field(name=\"Created By\", value=\"<@!{}> - {}#{}\".format(invite.actual_invite.inviter.id, invite.actual_invite.inviter.name, invite.actual_invite.inviter.discriminator)) embed.add_field(name=\"Created on\", value=invite.actual_invite.created_at.strftime(\"%b %d,", "discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0xf82125, timestamp=datetime.utcnow()) embed.set_author(name=\"Member Left 😭\",", "is not None: embed.add_field(name=\"Created By\", value=\"<@{}>\".format(invite.inviter_id)) if invite.created_ts is not None: embed.add_field(name=\"Created on\",", "audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_red(), timestamp=datetime.utcnow())", "is not None: embed.add_field(name=\"Created on\", value=invite.created_at().strftime(\"%b %d, %Y, %I:%M:%S %p UTC\")) else: if", "embed def member_kick(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id,", "- {}#{}\".format(member.id, member.name, member.discriminator), color=0x00ff00, timestamp=datetime.utcnow()) embed.set_author(name=\"New Member Joined!!!\", icon_url=\"https://www.emoji.co.uk/files/twitter-emojis/objects-twitter/11031-inbox-tray.png\") # Need to", "embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_red(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Banned\", icon_url=\"http://i.imgur.com/Imx0Znm.png\") #", "given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_ban(member: discord.Member, audit_log:", "timestamp=datetime.utcnow()) embed.set_author(name=f\"{changed_txt} Changed\") if before.name != after.name: embed.add_field(name=\"Old Username:\", value=before.name, inline=True) embed.add_field(name=\"New Username:\",", "if hours > 0: account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account is only **{hours}**", "0: account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account is only **{hours}** hours and **{minutes}**", "minutes old! \\N{WARNING SIGN}\" else: seconds = account_age.seconds % 60 account_age_value = f\"\\N{WARNING", "before.name != after.name and before.discriminator == after.discriminator: # Name changed, discriminator did not", "color=discord.Color.teal(), timestamp=datetime.utcnow()) embed.set_author(name=f\"{changed_txt} Changed\") if before.name != after.name: embed.add_field(name=\"Old Username:\", value=before.name, inline=True) embed.add_field(name=\"New", "embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message not in the cache was deleted in <#{}>\".format(channel_id), inline=False)", "not None: embed.add_field(name=\"Banned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\"", "else: seconds = account_age.seconds % 60 account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account is", "account_age.seconds // 3600 minutes = (account_age.seconds % 3600) // 60 if hours >", "discord.ext import commands from datetime import datetime from typing import Optional, Dict, Union", "embed.add_field(name=\"Message Before Edit:\", value=before_msg, inline=True) embed.add_field(name=\"Message After Edit:\", value=after_msg, inline=True) embed.set_footer(text=\"User ID: {}\".format(author_id))", "before.discriminator != after.discriminator: embed.add_field(name=\"Old Discriminator:\", value=before.discriminator, inline=True) embed.add_field(name=\"New Discriminator:\", value=after.discriminator, inline=True) embed.set_footer(text=\"User ID:", "was deleted in <#{}>\".format(channel_id), inline=False) embed.add_field(name=\"Message ID:\", value=message_id, inline=False) return embed def member_join(member:", "was a one time use invite.\" \" You may be able to determine", "value=invite.actual_invite.created_at.strftime(\"%b %d, %Y, %I:%M:%S %p UTC\")) else: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.uses)) if invite.inviter_id is not", "{}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_red(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Banned\", icon_url=\"http://i.imgur.com/Imx0Znm.png\") # Need to use format", "int = -1, webhook_info: Optional[CachedMessage] = None, pk_system_owner: Optional[discord.Member] = None, cached: bool", "is NOT None\") description_text = f\"{webhook_info.webhook_author_name}{pk_id_msg}\" info_author = f\"**{webhook_info.webhook_author_name}**\" else: log.info(\"Webhook Author is", "channel_id, before_msg: str, after_msg: str, message_id: str, guild_id) -> discord.Embed: before_msg = before_msg", "pk_system_owner: Optional[discord.Member] = None, cached: bool = True) -> discord.Embed: # If the", "not None: embed.add_field(name=\"Code\", value=\"{}\".format(invite.invite_id)) if invite.actual_invite is not None: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.actual_invite.uses)) embed.add_field(name=\"Created By\",", "message[:1000] msg2 = message[1000:] return msg1, msg2 def edited_message_embed(author_id, author_name: str, author_discrim, channel_id,", "not None: embed.add_field(name=\"Linked Discord Account:\", value=f\"<@{pk_system_owner.id}> - {pk_system_owner.name}#{pk_system_owner.discriminator}\", inline=False) if message_content == \"\":", "value. message_content = \"None\" if len(message_content) > 1024: msg_cont_1, msg_cont_2 = split_message(message_content) embed.add_field(name=\"Message:\",", "format other than WebP for image to display on iOS. (I think this", "be able to determine the inviter by using the Audit Log.\\n\" \"Additionally, you", "Permissions** needed for invite tracking.\") elif member.bot: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) embed.add_field(name=\"Code\",", "# Need to use format other than WebP for image to display on", "gabby_gums_purple log = logging.getLogger(__name__) def split_message(message: str) -> (str, str): # TODO: Make", "0: account_age_name = \"Account Age\" account_age_value = f\"**{account_age.days}** days old\" else: account_age_name =", "None\") description_text = f\"{webhook_info.webhook_author_name}{pk_id_msg}\" info_author = f\"**{webhook_info.webhook_author_name}**\" else: log.info(\"Webhook Author is None\") description_text", "‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) embed.add_field(name=\"Code\", value=\"Bot OAuth Link\") else: embed.add_field(name=\"__**Invite Information**__\", value=\"Unable to", "embed.add_field(name=\"Info:\", value=\"**{}** was banned from the server.\".format(member.display_name), inline=False) if audit_log is not None:", "value=\"{} has left the server 😭.\".format(member.display_name), inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def", "ID: {}\".format(after.id)) return embed def command_timed_out_embed(message: str = \"The command has timed out.\",", "value=after_msg2, inline=False) else: embed.add_field(name=\"Message Before Edit:\", value=before_msg, inline=True) embed.add_field(name=\"Message After Edit:\", value=after_msg, inline=True)", "webhook_info is None: webhook_info = CachedMessage(None, None, None, None, None, None, None, None,", "info onto it's own line. if \"name\" in pk_info: embed.add_field(name=\"System Name\", value=pk_info['name'], inline=True)", "return embed else: return unknown_deleted_message(channel_id, message_id) def unknown_deleted_message(channel_id, message_id) -> discord.Embed: embed =", "member.discriminator), color=discord.Color.dark_orange(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Kicked\", icon_url=\"https://i.imgur.com/o96t3cV.png\") # Need to use format other than", "value=msg_cont_1, inline=False) embed.add_field(name=\"Message continued:\", value=msg_cont_2, inline=False) else: embed.add_field(name=\"Message:\", value=message_content, inline=False) if author is", "value=after_msg1, inline=False) if len(after_msg2.strip()) > 0: embed.add_field(name=\"Message After Edit Continued:\", value=after_msg2, inline=False) else:", "embed.add_field(name=\"New Nickname\", value=after.nick, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def user_name_update(before: discord.User, after:", "ID: {s}**{webhook_info.member_pkid}**\" log.info(\"pk_id_msg set\") if author is None: log.info(\"Author is None\") # We", "def member_unban(member: discord.User, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name,", "\"No Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def", "inline=True) embed.add_field(name=\"New Nickname\", value=after.nick, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def user_name_update(before: discord.User,", "none, create dummy object to make if's neater if webhook_info is None: webhook_info", "discriminator did not changed_txt = \"Username\" elif before.name == after.name and before.discriminator !=", "embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def user_avatar_update(before: discord.User, after: discord.User, embed_image_filename: str) ->", "audit_log is not None: embed.add_field(name=\"Banned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason", "{}\".format(author_id)) return embed def deleted_message_embed(message_content: Optional[str], author: Optional[discord.Member], channel_id: int, message_id: int =", "value=\"{}\".format(invite.invite_id)) if invite.actual_invite is not None: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.actual_invite.uses)) embed.add_field(name=\"Created By\", value=\"<@!{}> - {}#{}\".format(invite.actual_invite.inviter.id,", "embed.add_field(name=\"Reason:\", value=reason, inline=False) # else: # embed.add_field(name=\"Need `View Audit Log` Permissions to show", "# If the webhook_info is none, create dummy object to make if's neater", "embed.colour = 0xa50000 embed.title = message.content guild_id = message.guild.id if message.guild else \"DM", "= \"None\" if len(message_content) > 1024: msg_cont_1, msg_cont_2 = split_message(message_content) embed.add_field(name=\"Message:\", value=msg_cont_1, inline=False)", "value=account_age_value, inline=True) embed.add_field(name=\"Current Member Count\", value=\"**{}** Members\".format(member.guild.member_count), inline=True) if pk_info is not None:", "description_text = f\"{author.name}{pk_id_msg}\" info_author = f\"**{author.name}**\" else: description_text = f\"<@{author.id}> - {author.name}#{author.discriminator}\" info_author", "elif member.bot: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) embed.add_field(name=\"Code\", value=\"Bot OAuth Link\") else: embed.add_field(name=\"__**Invite", "author is None: log.info(\"Author is None\") # We have NO info on the", "color=0x00ff00, timestamp=datetime.utcnow()) embed.set_author(name=\"New Member Joined!!!\", icon_url=\"https://www.emoji.co.uk/files/twitter-emojis/objects-twitter/11031-inbox-tray.png\") # Need to use format other than", "{}\".format(after.id)) return embed def user_name_update(before: discord.User, after: discord.User) -> discord.Embed: if before.name !=", "%p UTC\")) else: if not manage_guild: embed.add_field(name=\"Permissions Warning!\", value=\"**Manage Server Permissions** needed for", "message_id=message_id), inline=False) if len(before_msg) > 1024 or len(after_msg) > 1024: # To simplify", "0: embed.add_field(name=\"Message Before Edit Continued:\", value=before_msg2, inline=False) embed.add_field(name=\"Message After Edit:\", value=after_msg1, inline=False) if", "elif author.discriminator == \"0000\": description_text = f\"{author.name}{pk_id_msg}\" info_author = f\"**{author.name}**\" else: description_text =", "%p UTC\")) else: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.uses)) if invite.inviter_id is not None: embed.add_field(name=\"Created By\", value=\"<@{}>\".format(invite.inviter_id))", "{author.name}#{author.discriminator}\" info_author = f\"<@{author.id}>\" embed = discord.Embed(title=\"Deleted Message\", description=description_text, color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\",", "gabby_gums_dark_green, gabby_gums_light_green, gabby_gums_purple log = logging.getLogger(__name__) def split_message(message: str) -> (str, str): #", "embed.add_field(name=\"Uses\", value=\"{}\".format(invite.uses)) if invite.inviter_id is not None: embed.add_field(name=\"Created By\", value=\"<@{}>\".format(invite.inviter_id)) if invite.created_ts is", "<reponame>ayman2598/GabbyGums<filename>src/embeds.py \"\"\" \"\"\" import discord from discord.ext import commands from datetime import datetime", "embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0xf82125, timestamp=datetime.utcnow()) embed.set_author(name=\"Member Left 😭\", icon_url=\"https://www.emoji.co.uk/files/mozilla-emojis/objects-mozilla/11928-outbox-tray.png\")", "ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has joined the server!!!\".format(member.display_name), inline=False) account_age =", "Need to use format other than WebP for image to display on iOS.", "3600) // 60 if hours > 0: account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account", "dummy object to make if's neater if webhook_info is None: webhook_info = CachedMessage(None,", "logging.getLogger(__name__) def split_message(message: str) -> (str, str): # TODO: Make better msg1 =", "inline=False) if audit_log is not None: embed.add_field(name=\"Unbanned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator),", "force the PK info onto it's own line. if \"name\" in pk_info: embed.add_field(name=\"System", "discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_red(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Banned\", icon_url=\"http://i.imgur.com/Imx0Znm.png\") # Need to", "other than WebP for image to display on iOS. (I think this is", "str, message_id: str, guild_id) -> discord.Embed: before_msg = before_msg if before_msg else \"Message", "= split_message(before_msg) after_msg1, after_msg2 = split_message(after_msg) embed.add_field(name=\"Message Before Edit:\", value=before_msg1, inline=False) if len(before_msg2.strip())", "str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was unbanned from the server.\".format(member.display_name), inline=False) if audit_log is", "ID: {}\".format(member.id)) return embed def member_unban(member: discord.User, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed =", "embed.add_field(name=\"Created By\", value=\"<@!{}> - {}#{}\".format(invite.actual_invite.inviter.id, invite.actual_invite.inviter.name, invite.actual_invite.inviter.discriminator)) embed.add_field(name=\"Created on\", value=invite.actual_invite.created_at.strftime(\"%b %d, %Y, %I:%M:%S", "embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_nick_update(before: discord.Member, after: discord.Member) -> discord.Embed: embed", "changed their avatar.\".format(after.id, after.name, after.discriminator), color=0x00aaaa, timestamp=datetime.utcnow()) embed.set_author(name=\"Avatar Changed\") embed.set_image(url=f\"attachment://{embed_image_filename}\") embed.set_footer(text=\"User ID: {}\".format(after.id))", "None: s = '\\u205f' # Medium Mathematical Space pk_id_msg = f\"{s}\\n{s}\\nSystem ID: {s}{s}{s}**{webhook_info.system_pkid}**", "inline=False) else: embed.add_field(name=\"Message:\", value=message_content, inline=False) if author is not None: embed.set_footer(text=f\"User ID: {author.id}\")", "value=\"\\N{Zero Width Space}\", inline=True) # Add a blank embed to force the PK", "ID: {s}{s}{s}**{webhook_info.system_pkid}** \\nMember ID: {s}**{webhook_info.member_pkid}**\" log.info(\"pk_id_msg set\") if author is None: log.info(\"Author is", "Audit Log` Permissions to show more information\", # value=\"\\N{zero width space}\") embed.set_footer(text=\"User ID:", "message.guild.id if message.guild else \"DM Message\" embed.set_footer(text=\"Server: {}, Channel: {}, Sender: <@{}> -", "bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has left the server 😭.\".format(member.display_name), inline=False)", "this is a recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} was", "space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_unban(member: discord.User, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed:", "None: embed.add_field(name=\"Banned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\" if", "return embed def member_unban(member: discord.User, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> -", "😭.\".format(member.display_name), inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_kick(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) ->", "def member_join(member: discord.Member, invite: Optional[StoredInvite], pk_info: Optional[Dict], manage_guild=True) -> discord.Embed: embed = discord.Embed(description=\"<@!{}>", "value=\"\\n__**Invite Information**__\", inline=False) if invite.invite_name is not None: embed.add_field(name=\"Name:\", value=\"{}\".format(invite.invite_name)) if invite.invite_id is", "\" You may be able to determine the inviter by using the Audit", "None: embed.set_footer(text=f\"User ID: {author.id}\") return embed else: return unknown_deleted_message(channel_id, message_id) def unknown_deleted_message(channel_id, message_id)", "f\"{audit_log.reason}\" if audit_log.reason else \"No Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.set_footer(text=\"User ID:", "3600 minutes = (account_age.seconds % 3600) // 60 if hours > 0: account_age_value", "\"[Go To Message](https://discordapp.com/channels/{guild_id}/{channel_id}/{message_id})\".format(author_id=author_id, channel_id=channel_id, guild_id=guild_id, message_id=message_id), inline=False) if len(before_msg) > 1024 or len(after_msg)", "\"Username\" elif before.name == after.name and before.discriminator != after.discriminator: # Discrim changed, Name", "if one is greater split both before_msg1, before_msg2 = split_message(before_msg) after_msg1, after_msg2 =", "(account_age.seconds % 3600) // 60 if hours > 0: account_age_value = f\"\\N{WARNING SIGN}", "SIGN}\" else: seconds = account_age.seconds % 60 account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account", "audit_log is not None: embed.add_field(name=\"Unbanned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason", "<#{}>\".format(info_author, channel_id), inline=False) if pk_system_owner is not None: embed.add_field(name=\"Linked Discord Account:\", value=f\"<@{pk_system_owner.id}> -", "message.guild else \"DM Message\" embed.set_footer(text=\"Server: {}, Channel: {}, Sender: <@{}> - {}#{}\".format( message.author.name,", "value=\"A message not in the cache was deleted in <#{}>\".format(channel_id), inline=False) embed.add_field(name=\"Message ID:\",", "Discriminator\" embed = discord.Embed(description=f\"<@{after.id}> - {after.name}#{after.discriminator} changed their {changed_txt}.\", color=discord.Color.teal(), timestamp=datetime.utcnow()) embed.set_author(name=f\"{changed_txt} Changed\")", "to show more information\", # value=\"\\N{zero width space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed", "- {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_green(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Unbanned\", icon_url=\"https://i.imgur.com/OCcebCO.png\") # Need to use", "value=\"{} was kicked from the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Kicked", "to determine invite information. It's possible the invite was a one time use", "greatly improve the reliability of invite tracking by giving Gabby Gums the **Manage", "-> discord.Embed: before_msg = before_msg if before_msg else \"Message not in the cache.\"", "field value. message_content = \"None\" if len(message_content) > 1024: msg_cont_1, msg_cont_2 = split_message(message_content)", "embed.add_field(name=\"Need `View Audit Log` Permissions to show more information\", # value=\"\\N{zero width space}\")", "was unbanned from the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Unbanned By:\",", "= discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_green(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Unbanned\", icon_url=\"https://i.imgur.com/OCcebCO.png\") # Need", "left the server 😭.\".format(member.display_name), inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_kick(member: discord.Member,", "discord.Embed(description=f\"<@{after.id}> - {after.name}#{after.discriminator} changed their {changed_txt}.\", color=discord.Color.teal(), timestamp=datetime.utcnow()) embed.set_author(name=f\"{changed_txt} Changed\") if before.name !=", "is not None: log.info(\"Webhook Author is NOT None\") description_text = f\"{webhook_info.webhook_author_name}{pk_id_msg}\" info_author =", "inline=True) embed.add_field(name=\"New Discriminator:\", value=after.discriminator, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def user_avatar_update(before: discord.User,", "Mathematical Space pk_id_msg = f\"{s}\\n{s}\\nSystem ID: {s}{s}{s}**{webhook_info.system_pkid}** \\nMember ID: {s}**{webhook_info.member_pkid}**\" log.info(\"pk_id_msg set\") if", "# Compute the account age pk_created_date = datetime.strptime(pk_info['created'], '%Y-%m-%dT%H:%M:%S.%fZ') pk_account_age = datetime.utcnow() -", "Left 😭\", icon_url=\"https://www.emoji.co.uk/files/mozilla-emojis/objects-mozilla/11928-outbox-tray.png\") # Need to use format other than WebP for image", "log.info(\"pk_id_msg set\") if author is None: log.info(\"Author is None\") # We have NO", "- {}#{} changed their nickname.\".format(after.id, after.name, after.discriminator), color=0x00ffff, timestamp=datetime.utcnow()) embed.set_author(name=\"Nickname Changed\") embed.set_thumbnail(url=\"https://i.imgur.com/HtQ53lx.png\") embed.add_field(name=\"Old", "message[1000:] return msg1, msg2 def edited_message_embed(author_id, author_name: str, author_discrim, channel_id, before_msg: str, after_msg:", "color=color) return embed def exception_w_message(message: discord.Message) -> discord.Embed: embed = discord.Embed() embed.colour =", "1024 or len(after_msg) > 1024: # To simplify things, if one is greater", "# value=\"\\N{zero width space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_unban(member: discord.User, audit_log:", "was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_ban(member: discord.Member,", "before_msg: str, after_msg: str, message_id: str, guild_id) -> discord.Embed: before_msg = before_msg if", "(I think this is a recent discord bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\",", "after.discriminator: # Name changed, discriminator did not changed_txt = \"Username\" elif before.name ==", "member.discriminator), color=0x00ff00, timestamp=datetime.utcnow()) embed.set_author(name=\"New Member Joined!!!\", icon_url=\"https://www.emoji.co.uk/files/twitter-emojis/objects-twitter/11031-inbox-tray.png\") # Need to use format other", "gabby_gums_light_green, gabby_gums_purple log = logging.getLogger(__name__) def split_message(message: str) -> (str, str): # TODO:", "len(after_msg) > 1024: # To simplify things, if one is greater split both", "not None or webhook_info.system_pkid is not None: s = '\\u205f' # Medium Mathematical", "Changed\") embed.set_image(url=f\"attachment://{embed_image_filename}\") embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def command_timed_out_embed(message: str = \"The command", "> 1024: msg_cont_1, msg_cont_2 = split_message(message_content) embed.add_field(name=\"Message:\", value=msg_cont_1, inline=False) embed.add_field(name=\"Message continued:\", value=msg_cont_2, inline=False)", "Both changed changed_txt = \"Username & Discriminator\" embed = discord.Embed(description=f\"<@{after.id}> - {after.name}#{after.discriminator} changed", "Before Edit:\", value=before_msg1, inline=False) if len(before_msg2.strip()) > 0: embed.add_field(name=\"Message Before Edit Continued:\", value=before_msg2,", "able to determine the inviter by using the Audit Log.\\n\" \"Additionally, you can", "changed their {changed_txt}.\", color=discord.Color.teal(), timestamp=datetime.utcnow()) embed.set_author(name=f\"{changed_txt} Changed\") if before.name != after.name: embed.add_field(name=\"Old Username:\",", "if webhook_info is None: webhook_info = CachedMessage(None, None, None, None, None, None, None,", "message. if webhook_info.webhook_author_name is not None: log.info(\"Webhook Author is NOT None\") description_text =", "discord.Embed: embed = discord.Embed( description=\"<@{}> - {}#{} changed their nickname.\".format(after.id, after.name, after.discriminator), color=0x00ffff,", "Unbanned\", icon_url=\"https://i.imgur.com/OCcebCO.png\") # Need to use format other than WebP for image to", "or len(after_msg) > 1024: # To simplify things, if one is greater split", "!= after.discriminator: # Discrim changed, Name did not changed_txt = \"Discriminator\" else: #", "embed.add_field(name=\"System ID\", value=pk_info['id'], inline=True) # Compute the account age pk_created_date = datetime.strptime(pk_info['created'], '%Y-%m-%dT%H:%M:%S.%fZ')", "value=f\"<@{pk_system_owner.id}> - {pk_system_owner.name}#{pk_system_owner.discriminator}\", inline=False) if message_content == \"\": # Make sure we don't", "= f\"<@{author.id}>\" embed = discord.Embed(title=\"Deleted Message\", description=description_text, color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message", "value=\"\\n__**Invite Information**__\", inline=False) embed.add_field(name=\"Code\", value=\"Bot OAuth Link\") else: embed.add_field(name=\"__**Invite Information**__\", value=\"Unable to determine", "= discord.Embed(description=\"<@{}> - {}#{} changed their avatar.\".format(after.id, after.name, after.discriminator), color=0x00aaaa, timestamp=datetime.utcnow()) embed.set_author(name=\"Avatar Changed\")", "account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account is only **{hours}** hours and **{minutes}** minutes", "before_msg = before_msg if before_msg else \"Message not in the cache.\" embed =", "from utils.moreColors import gabby_gums_dark_green, gabby_gums_light_green, gabby_gums_purple log = logging.getLogger(__name__) def split_message(message: str) ->", "Continued:\", value=after_msg2, inline=False) else: embed.add_field(name=\"Message Before Edit:\", value=before_msg, inline=True) embed.add_field(name=\"Message After Edit:\", value=after_msg,", "After Edit:\", value=after_msg, inline=True) embed.set_footer(text=\"User ID: {}\".format(author_id)) return embed def deleted_message_embed(message_content: Optional[str], author:", "embed.set_author(name=\"Member Kicked\", icon_url=\"https://i.imgur.com/o96t3cV.png\") # Need to use format other than WebP for image", "discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed formatted for command time outs\"\"\"", "embed.add_field(name=\"Banned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\" if audit_log.reason", "was edited in <#{channel_id}>\\n\" \"[Go To Message](https://discordapp.com/channels/{guild_id}/{channel_id}/{message_id})\".format(author_id=author_id, channel_id=channel_id, guild_id=guild_id, message_id=message_id), inline=False) if len(before_msg)", "account_age_value = f\"**{account_age.days}** days old\" else: account_age_name = \"**New Account!**\" hours = account_age.seconds", "embed.set_author(name=\"Member Banned\", icon_url=\"http://i.imgur.com/Imx0Znm.png\") # Need to use format other than WebP for image", "= discord.Embed( description=\"<@{}> - {}#{} changed their nickname.\".format(after.id, after.name, after.discriminator), color=0x00ffff, timestamp=datetime.utcnow()) embed.set_author(name=\"Nickname", "out.\", color: discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed formatted for command", "end up throwing an error due to an empty field value. message_content =", "recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has left the server", "embed.set_author(name=\"Avatar Changed\") embed.set_image(url=f\"attachment://{embed_image_filename}\") embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def command_timed_out_embed(message: str = \"The", "import Optional, Dict, Union from db import StoredInvite, CachedMessage import logging from utils.moreColors", "None, None, None, None) if cached: pk_id_msg = \"\" if webhook_info.member_pkid is not", "discord.Embed: \"\"\"Returns an embed formatted for command time outs\"\"\" embed = discord.Embed(title=\"Command Timed", "their {changed_txt}.\", color=discord.Color.teal(), timestamp=datetime.utcnow()) embed.set_author(name=f\"{changed_txt} Changed\") if before.name != after.name: embed.add_field(name=\"Old Username:\", value=before.name,", "value=reason, inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_ban(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) ->", "ID: {}\".format(member.id)) return embed def member_ban(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed =", "Message\", description=\"Unknown User\", color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message not in the cache", "value=msg_cont_2, inline=False) else: embed.add_field(name=\"Message:\", value=message_content, inline=False) if author is not None: embed.set_footer(text=f\"User ID:", "embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was banned from the server.\".format(member.display_name), inline=False) if audit_log is not", "discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{} changed their avatar.\".format(after.id, after.name, after.discriminator), color=0x00aaaa, timestamp=datetime.utcnow())", "after.discriminator), color=0x00ffff, timestamp=datetime.utcnow()) embed.set_author(name=\"Nickname Changed\") embed.set_thumbnail(url=\"https://i.imgur.com/HtQ53lx.png\") embed.add_field(name=\"Old Nickname\", value=before.nick, inline=True) embed.add_field(name=\"New Nickname\", value=after.nick,", "was canceled.\", color: discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed formatted for", "inline=True) embed.add_field(name=\"Current Member Count\", value=\"**{}** Members\".format(member.guild.member_count), inline=True) if pk_info is not None: embed.add_field(name=\"\\N{Zero", "a recent discord bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was unbanned from", "webhook_info: Optional[CachedMessage] = None, pk_system_owner: Optional[discord.Member] = None, cached: bool = True) ->", "value=\"{}\".format(invite.actual_invite.uses)) embed.add_field(name=\"Created By\", value=\"<@!{}> - {}#{}\".format(invite.actual_invite.inviter.id, invite.actual_invite.inviter.name, invite.actual_invite.inviter.discriminator)) embed.add_field(name=\"Created on\", value=invite.actual_invite.created_at.strftime(\"%b %d, %Y,", "To Message](https://discordapp.com/channels/{guild_id}/{channel_id}/{message_id})\".format(author_id=author_id, channel_id=channel_id, guild_id=guild_id, message_id=message_id), inline=False) if len(before_msg) > 1024 or len(after_msg) >", "value=\"**Manage Server Permissions** needed for invite tracking.\") elif member.bot: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\",", "icon_url=\"https://i.imgur.com/OCcebCO.png\") # Need to use format other than WebP for image to display", "not None: embed.add_field(name=\"Created By\", value=\"<@{}>\".format(invite.inviter_id)) if invite.created_ts is not None: embed.add_field(name=\"Created on\", value=invite.created_at().strftime(\"%b", "audit_log.user.name, audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\" if audit_log.reason else \"No Reason was given.\"", "Changed\") embed.set_thumbnail(url=\"https://i.imgur.com/HtQ53lx.png\") embed.add_field(name=\"Old Nickname\", value=before.nick, inline=True) embed.add_field(name=\"New Nickname\", value=after.nick, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id))", "on iOS. (I think this is a recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\")", "nickname.\".format(after.id, after.name, after.discriminator), color=0x00ffff, timestamp=datetime.utcnow()) embed.set_author(name=\"Nickname Changed\") embed.set_thumbnail(url=\"https://i.imgur.com/HtQ53lx.png\") embed.add_field(name=\"Old Nickname\", value=before.nick, inline=True) embed.add_field(name=\"New", "audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\" if audit_log.reason else \"No Reason was given.\" embed.add_field(name=\"Reason:\",", "and **{minutes}** minutes old! \\N{WARNING SIGN}\" else: seconds = account_age.seconds % 60 account_age_value", "= discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_red(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Banned\", icon_url=\"http://i.imgur.com/Imx0Znm.png\") # Need", "Before Edit:\", value=before_msg, inline=True) embed.add_field(name=\"Message After Edit:\", value=after_msg, inline=True) embed.set_footer(text=\"User ID: {}\".format(author_id)) return", "= f\"{author.name}{pk_id_msg}\" info_author = f\"**{author.name}**\" else: description_text = f\"<@{author.id}> - {author.name}#{author.discriminator}\" info_author =", "embed.add_field(name=\"Info:\", value=\"{} has joined the server!!!\".format(member.display_name), inline=False) account_age = datetime.utcnow() - member.created_at if", "old\" else: account_age_name = \"**New Account!**\" hours = account_age.seconds // 3600 minutes =", "member.name, member.discriminator), color=discord.Color.dark_red(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Banned\", icon_url=\"http://i.imgur.com/Imx0Znm.png\") # Need to use format other", "timestamp=datetime.utcnow()) embed.set_author(name=\"New Member Joined!!!\", icon_url=\"https://www.emoji.co.uk/files/twitter-emojis/objects-twitter/11031-inbox-tray.png\") # Need to use format other than WebP", "account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account is only **{minutes}** minutes and **{seconds}** seconds", "You may be able to determine the inviter by using the Audit Log.\\n\"", "timed out.\", color: discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed formatted for", "# Name changed, discriminator did not changed_txt = \"Username\" elif before.name == after.name", "Optional[StoredInvite], pk_info: Optional[Dict], manage_guild=True) -> discord.Embed: embed = discord.Embed(description=\"<@!{}> - {}#{}\".format(member.id, member.name, member.discriminator),", "server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Unbanned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name,", "# Add a blank embed to force the PK info onto it's own", "= discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed formatted for canceled commands\"\"\" embed =", "\"\"\"Returns an embed formatted for command time outs\"\"\" embed = discord.Embed(title=\"Command Timed Out!\",", "invite.invite_name is not None: embed.add_field(name=\"Name:\", value=\"{}\".format(invite.invite_name)) if invite.invite_id is not None: embed.add_field(name=\"Code\", value=\"{}\".format(invite.invite_id))", "discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed formatted for command time outs\"\"\" embed =", "def edited_message_embed(author_id, author_name: str, author_discrim, channel_id, before_msg: str, after_msg: str, message_id: str, guild_id)", "embed def command_timed_out_embed(message: str = \"The command has timed out.\", color: discord.Color =", "time outs\"\"\" embed = discord.Embed(title=\"Command Timed Out!\", description=f\"❌ {message}\", color=color) return embed def", "for command time outs\"\"\" embed = discord.Embed(title=\"Command Timed Out!\", description=f\"❌ {message}\", color=color) return", "= \"\" if webhook_info.member_pkid is not None or webhook_info.system_pkid is not None: s", "invite.\" \" You may be able to determine the inviter by using the", "color=discord.Color.dark_green(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Unbanned\", icon_url=\"https://i.imgur.com/OCcebCO.png\") # Need to use format other than WebP", "msg1, msg2 def edited_message_embed(author_id, author_name: str, author_discrim, channel_id, before_msg: str, after_msg: str, message_id:", "discord.Embed: # If the webhook_info is none, create dummy object to make if's", "-1, webhook_info: Optional[CachedMessage] = None, pk_system_owner: Optional[discord.Member] = None, cached: bool = True)", "inline=False) if len(after_msg2.strip()) > 0: embed.add_field(name=\"Message After Edit Continued:\", value=after_msg2, inline=False) else: embed.add_field(name=\"Message", "By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\" if audit_log.reason else", "bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was unbanned from the server.\".format(member.display_name), inline=False)", "f\"\\N{WARNING SIGN} **Warning!** Account is only **{minutes}** minutes and **{seconds}** seconds old! \\N{WARNING", "invite: Optional[StoredInvite], pk_info: Optional[Dict], manage_guild=True) -> discord.Embed: embed = discord.Embed(description=\"<@!{}> - {}#{}\".format(member.id, member.name,", "= f\"<@{author.id}> - {author.name}#{author.discriminator}\" info_author = f\"<@{author.id}>\" embed = discord.Embed(title=\"Deleted Message\", description=description_text, color=0x9b59b6,", "author: Optional[discord.Member], channel_id: int, message_id: int = -1, webhook_info: Optional[CachedMessage] = None, pk_system_owner:", "from typing import Optional, Dict, Union from db import StoredInvite, CachedMessage import logging", "return embed def command_canceled_embed(message: str = \"The command was canceled.\", color: discord.Color =", "if len(after_msg2.strip()) > 0: embed.add_field(name=\"Message After Edit Continued:\", value=after_msg2, inline=False) else: embed.add_field(name=\"Message Before", "message_content == \"\": # Make sure we don't end up throwing an error", "Link\") else: embed.add_field(name=\"__**Invite Information**__\", value=\"Unable to determine invite information. It's possible the invite", "color=discord.Color.dark_orange(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Kicked\", icon_url=\"https://i.imgur.com/o96t3cV.png\") # Need to use format other than WebP", "discord.Embed: before_msg = before_msg if before_msg else \"Message not in the cache.\" embed", "str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was banned from the server.\".format(member.display_name), inline=False) if audit_log is", "msg2 def edited_message_embed(author_id, author_name: str, author_discrim, channel_id, before_msg: str, after_msg: str, message_id: str,", "# embed.add_field(name=\"Need `View Audit Log` Permissions to show more information\", # value=\"\\N{zero width", "value=message_content, inline=False) if author is not None: embed.set_footer(text=f\"User ID: {author.id}\") return embed else:", "invite.actual_invite.inviter.name, invite.actual_invite.inviter.discriminator)) embed.add_field(name=\"Created on\", value=invite.actual_invite.created_at.strftime(\"%b %d, %Y, %I:%M:%S %p UTC\")) else: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.uses))", "in <#{}>\".format(info_author, channel_id), inline=False) if pk_system_owner is not None: embed.add_field(name=\"Linked Discord Account:\", value=f\"<@{pk_system_owner.id}>", "-> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_orange(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Kicked\",", "`View Audit Log` Permissions to show more information\", # value=\"\\N{zero width space}\") embed.set_footer(text=\"User", "inline=False) if len(before_msg2.strip()) > 0: embed.add_field(name=\"Message Before Edit Continued:\", value=before_msg2, inline=False) embed.add_field(name=\"Message After", "given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) # else: # embed.add_field(name=\"Need `View Audit Log` Permissions to", "= info_author = \"Uncached User\" elif author.discriminator == \"0000\": description_text = f\"{author.name}{pk_id_msg}\" info_author", "member_ban(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator),", "from the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Unbanned By:\", value=\"<@{}> -", "= (account_age.seconds % 3600) // 60 if hours > 0: account_age_value = f\"\\N{WARNING", "is not None: embed.add_field(name=\"Kicked By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason =", "changed their nickname.\".format(after.id, after.name, after.discriminator), color=0x00ffff, timestamp=datetime.utcnow()) embed.set_author(name=\"Nickname Changed\") embed.set_thumbnail(url=\"https://i.imgur.com/HtQ53lx.png\") embed.add_field(name=\"Old Nickname\", value=before.nick,", "embed.set_footer(text=f\"User ID: {author.id}\") return embed else: return unknown_deleted_message(channel_id, message_id) def unknown_deleted_message(channel_id, message_id) ->", "command has timed out.\", color: discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed", "is not None: s = '\\u205f' # Medium Mathematical Space pk_id_msg = f\"{s}\\n{s}\\nSystem", "embed.add_field(name=\"Info:\", value=\"{} has left the server 😭.\".format(member.display_name), inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed", "Account is only **{minutes}** minutes and **{seconds}** seconds old! \\N{WARNING SIGN}\" embed.add_field(name=account_age_name, value=account_age_value,", "User\" elif author.discriminator == \"0000\": description_text = f\"{author.name}{pk_id_msg}\" info_author = f\"**{author.name}**\" else: description_text", "-> discord.Embed: # If the webhook_info is none, create dummy object to make", "in <#{channel_id}>\\n\" \"[Go To Message](https://discordapp.com/channels/{guild_id}/{channel_id}/{message_id})\".format(author_id=author_id, channel_id=channel_id, guild_id=guild_id, message_id=message_id), inline=False) if len(before_msg) > 1024", "We have NO info on the author of the message. if webhook_info.webhook_author_name is", "return embed def member_nick_update(before: discord.Member, after: discord.Member) -> discord.Embed: embed = discord.Embed( description=\"<@{}>", "if message.guild else \"DM Message\" embed.set_footer(text=\"Server: {}, Channel: {}, Sender: <@{}> - {}#{}\".format(", "= account_age.seconds // 3600 minutes = (account_age.seconds % 3600) // 60 if hours", "if len(before_msg2.strip()) > 0: embed.add_field(name=\"Message Before Edit Continued:\", value=before_msg2, inline=False) embed.add_field(name=\"Message After Edit:\",", "split_message(message_content) embed.add_field(name=\"Message:\", value=msg_cont_1, inline=False) embed.add_field(name=\"Message continued:\", value=msg_cont_2, inline=False) else: embed.add_field(name=\"Message:\", value=message_content, inline=False) if", "- pk_created_date embed.add_field(name=\"PK Account Age\", value=f\"**{pk_account_age.days}** days old\", inline=True) if invite is not", "Changed\") if before.name != after.name: embed.add_field(name=\"Old Username:\", value=before.name, inline=True) embed.add_field(name=\"New Username:\", value=after.name, inline=True)", "Out!\", description=f\"❌ {message}\", color=color) return embed def command_canceled_embed(message: str = \"The command was", "if audit_log is not None: embed.add_field(name=\"Kicked By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False)", "than WebP for image to display on iOS. (I think this is a", "= discord.Embed(title=\"Edited Message\", description=\"<@{}> - {}#{}\".format(author_id, author_name, author_discrim), color=0x61cd72, timestamp=datetime.utcnow()) embed.set_thumbnail( url=\"https://i.imgur.com/Q8SzUdG.png\") embed.add_field(name=\"Info:\",", "f\"{author.name}{pk_id_msg}\" info_author = f\"**{author.name}**\" else: description_text = f\"<@{author.id}> - {author.name}#{author.discriminator}\" info_author = f\"<@{author.id}>\"", "Discriminator:\", value=after.discriminator, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def user_avatar_update(before: discord.User, after: discord.User,", "\"0000\": description_text = f\"{author.name}{pk_id_msg}\" info_author = f\"**{author.name}**\" else: description_text = f\"<@{author.id}> - {author.name}#{author.discriminator}\"", "can greatly improve the reliability of invite tracking by giving Gabby Gums the", "else: # Both changed changed_txt = \"Username & Discriminator\" embed = discord.Embed(description=f\"<@{after.id}> -", "invite.actual_invite is not None: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.actual_invite.uses)) embed.add_field(name=\"Created By\", value=\"<@!{}> - {}#{}\".format(invite.actual_invite.inviter.id, invite.actual_invite.inviter.name, invite.actual_invite.inviter.discriminator))", "one is greater split both before_msg1, before_msg2 = split_message(before_msg) after_msg1, after_msg2 = split_message(after_msg)", "bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has joined the server!!!\".format(member.display_name), inline=False) account_age", "embed def user_name_update(before: discord.User, after: discord.User) -> discord.Embed: if before.name != after.name and", "after.name and before.discriminator != after.discriminator: # Discrim changed, Name did not changed_txt =", "- {}#{}\".format(invite.actual_invite.inviter.id, invite.actual_invite.inviter.name, invite.actual_invite.inviter.discriminator)) embed.add_field(name=\"Created on\", value=invite.actual_invite.created_at.strftime(\"%b %d, %Y, %I:%M:%S %p UTC\")) else:", "on iOS. (I think this is a recent discord bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\"))", "60 if hours > 0: account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account is only", "%Y, %I:%M:%S %p UTC\")) else: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.uses)) if invite.inviter_id is not None: embed.add_field(name=\"Created", "webhook_info is none, create dummy object to make if's neater if webhook_info is", "the server!!!\".format(member.display_name), inline=False) account_age = datetime.utcnow() - member.created_at if account_age.days > 0: account_age_name", "the cache was deleted in <#{}>\".format(channel_id), inline=False) embed.add_field(name=\"Message ID:\", value=message_id, inline=False) return embed", "Kicked\", icon_url=\"https://i.imgur.com/o96t3cV.png\") # Need to use format other than WebP for image to", "not None: embed.add_field(name=\"Unbanned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\"", "embed.add_field(name=\"Info:\", value=\"A message by {}, was deleted in <#{}>\".format(info_author, channel_id), inline=False) if pk_system_owner", "command_timed_out_embed(message: str = \"The command has timed out.\", color: discord.Color = discord.Color.dark_orange()) ->", "{s}**{webhook_info.member_pkid}**\" log.info(\"pk_id_msg set\") if author is None: log.info(\"Author is None\") # We have", "value=\"\\n__**Plural Kit Information**__\", inline=False) # embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\N{Zero Width Space}\", inline=True) #", "not None: embed.add_field(name=\"Kicked By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\"", "def command_canceled_embed(message: str = \"The command was canceled.\", color: discord.Color = discord.Color.dark_orange()) ->", "this is a recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has", "timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message not in the cache was deleted in <#{}>\".format(channel_id),", "if audit_log.reason else \"No Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) # else: #", "discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_orange(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Kicked\", icon_url=\"https://i.imgur.com/o96t3cV.png\")", "bool = True) -> discord.Embed: # If the webhook_info is none, create dummy", "Username:\", value=before.name, inline=True) embed.add_field(name=\"New Username:\", value=after.name, inline=True) if before.discriminator != after.discriminator: embed.add_field(name=\"Old Discriminator:\",", "- {}#{}\".format(author_id, author_name, author_discrim), color=0x61cd72, timestamp=datetime.utcnow()) embed.set_thumbnail( url=\"https://i.imgur.com/Q8SzUdG.png\") embed.add_field(name=\"Info:\", value=\"A message by <@{author_id}>,", "None, None, None, None, None) if cached: pk_id_msg = \"\" if webhook_info.member_pkid is", "ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was banned from the server.\".format(member.display_name), inline=False) if", "datetime from typing import Optional, Dict, Union from db import StoredInvite, CachedMessage import", "= -1, webhook_info: Optional[CachedMessage] = None, pk_system_owner: Optional[discord.Member] = None, cached: bool =", "{message}\", color=color) return embed def exception_w_message(message: discord.Message) -> discord.Embed: embed = discord.Embed() embed.colour", "think this is a recent discord bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}**", "\"Account Age\" account_age_value = f\"**{account_age.days}** days old\" else: account_age_name = \"**New Account!**\" hours", "{}#{} changed their nickname.\".format(after.id, after.name, after.discriminator), color=0x00ffff, timestamp=datetime.utcnow()) embed.set_author(name=\"Nickname Changed\") embed.set_thumbnail(url=\"https://i.imgur.com/HtQ53lx.png\") embed.add_field(name=\"Old Nickname\",", "Continued:\", value=before_msg2, inline=False) embed.add_field(name=\"Message After Edit:\", value=after_msg1, inline=False) if len(after_msg2.strip()) > 0: embed.add_field(name=\"Message", "\"Uncached User\" elif author.discriminator == \"0000\": description_text = f\"{author.name}{pk_id_msg}\" info_author = f\"**{author.name}**\" else:", "embed = discord.Embed(title=\"**Command Canceled**\", description=f\"❌ {message}\", color=color) return embed def exception_w_message(message: discord.Message) ->", "Edit:\", value=after_msg1, inline=False) if len(after_msg2.strip()) > 0: embed.add_field(name=\"Message After Edit Continued:\", value=after_msg2, inline=False)", "inline=True) # Compute the account age pk_created_date = datetime.strptime(pk_info['created'], '%Y-%m-%dT%H:%M:%S.%fZ') pk_account_age = datetime.utcnow()", "after.discriminator: embed.add_field(name=\"Old Discriminator:\", value=before.discriminator, inline=True) embed.add_field(name=\"New Discriminator:\", value=after.discriminator, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return", "Make sure we don't end up throwing an error due to an empty", "only **{minutes}** minutes and **{seconds}** seconds old! \\N{WARNING SIGN}\" embed.add_field(name=account_age_name, value=account_age_value, inline=True) embed.add_field(name=\"Current", "after.name: embed.add_field(name=\"Old Username:\", value=before.name, inline=True) embed.add_field(name=\"New Username:\", value=after.name, inline=True) if before.discriminator != after.discriminator:", "None: embed.add_field(name=\"Linked Discord Account:\", value=f\"<@{pk_system_owner.id}> - {pk_system_owner.name}#{pk_system_owner.discriminator}\", inline=False) if message_content == \"\": #", "discord bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was unbanned from the server.\".format(member.display_name),", "0: embed.add_field(name=\"Message After Edit Continued:\", value=after_msg2, inline=False) else: embed.add_field(name=\"Message Before Edit:\", value=before_msg, inline=True)", "timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message by {}, was deleted in <#{}>\".format(info_author, channel_id), inline=False)", "information\", # value=\"\\N{zero width space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_nick_update(before: discord.Member,", "embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_leave(member: discord.Member) -> discord.Embed: embed = discord.Embed(description=\"<@{}>", "embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def command_timed_out_embed(message: str = \"The command has timed", "inline=True) embed.add_field(name=\"System ID\", value=pk_info['id'], inline=True) # Compute the account age pk_created_date = datetime.strptime(pk_info['created'],", "not in the cache was deleted in <#{}>\".format(channel_id), inline=False) embed.add_field(name=\"Message ID:\", value=message_id, inline=False)", "value=before_msg1, inline=False) if len(before_msg2.strip()) > 0: embed.add_field(name=\"Message Before Edit Continued:\", value=before_msg2, inline=False) embed.add_field(name=\"Message", "value=\"**{}** Members\".format(member.guild.member_count), inline=True) if pk_info is not None: embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\n__**Plural Kit", "msg_cont_1, msg_cont_2 = split_message(message_content) embed.add_field(name=\"Message:\", value=msg_cont_1, inline=False) embed.add_field(name=\"Message continued:\", value=msg_cont_2, inline=False) else: embed.add_field(name=\"Message:\",", "Discriminator:\", value=before.discriminator, inline=True) embed.add_field(name=\"New Discriminator:\", value=after.discriminator, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def", "pk_created_date = datetime.strptime(pk_info['created'], '%Y-%m-%dT%H:%M:%S.%fZ') pk_account_age = datetime.utcnow() - pk_created_date embed.add_field(name=\"PK Account Age\", value=f\"**{pk_account_age.days}**", "message by <@{author_id}>, was edited in <#{channel_id}>\\n\" \"[Go To Message](https://discordapp.com/channels/{guild_id}/{channel_id}/{message_id})\".format(author_id=author_id, channel_id=channel_id, guild_id=guild_id, message_id=message_id),", "Before Edit Continued:\", value=before_msg2, inline=False) embed.add_field(name=\"Message After Edit:\", value=after_msg1, inline=False) if len(after_msg2.strip()) >", "member.bot: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) embed.add_field(name=\"Code\", value=\"Bot OAuth Link\") else: embed.add_field(name=\"__**Invite Information**__\",", "= 0xa50000 embed.title = message.content guild_id = message.guild.id if message.guild else \"DM Message\"", "bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was banned from the server.\".format(member.display_name), inline=False)", "information. It's possible the invite was a one time use invite.\" \" You", "pk_system_owner is not None: embed.add_field(name=\"Linked Discord Account:\", value=f\"<@{pk_system_owner.id}> - {pk_system_owner.name}#{pk_system_owner.discriminator}\", inline=False) if message_content", "& Discriminator\" embed = discord.Embed(description=f\"<@{after.id}> - {after.name}#{after.discriminator} changed their {changed_txt}.\", color=discord.Color.teal(), timestamp=datetime.utcnow()) embed.set_author(name=f\"{changed_txt}", "None\") # We have NO info on the author of the message. if", "not None: embed.set_footer(text=f\"User ID: {author.id}\") return embed else: return unknown_deleted_message(channel_id, message_id) def unknown_deleted_message(channel_id,", "import logging from utils.moreColors import gabby_gums_dark_green, gabby_gums_light_green, gabby_gums_purple log = logging.getLogger(__name__) def split_message(message:", "None\") description_text = info_author = \"Uncached User\" elif author.discriminator == \"0000\": description_text =", "color=0xf82125, timestamp=datetime.utcnow()) embed.set_author(name=\"Member Left 😭\", icon_url=\"https://www.emoji.co.uk/files/mozilla-emojis/objects-mozilla/11928-outbox-tray.png\") # Need to use format other than", "the reliability of invite tracking by giving Gabby Gums the **Manage Channels** permission.\",", "inviter by using the Audit Log.\\n\" \"Additionally, you can greatly improve the reliability", "message not in the cache was deleted in <#{}>\".format(channel_id), inline=False) embed.add_field(name=\"Message ID:\", value=message_id,", "if before.name != after.name: embed.add_field(name=\"Old Username:\", value=before.name, inline=True) embed.add_field(name=\"New Username:\", value=after.name, inline=True) if", "changed, discriminator did not changed_txt = \"Username\" elif before.name == after.name and before.discriminator", "invite.invite_id is not None: embed.add_field(name=\"Code\", value=\"{}\".format(invite.invite_id)) if invite.actual_invite is not None: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.actual_invite.uses))", "= message.guild.id if message.guild else \"DM Message\" embed.set_footer(text=\"Server: {}, Channel: {}, Sender: <@{}>", "message_id) def unknown_deleted_message(channel_id, message_id) -> discord.Embed: embed = discord.Embed(title=\"Deleted Message\", description=\"Unknown User\", color=0x9b59b6,", "Author is None\") description_text = info_author = \"Uncached User\" elif author.discriminator == \"0000\":", "discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} was kicked from the server.\".format(member.display_name),", "description=description_text, color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message by {}, was deleted in <#{}>\".format(info_author,", "\"None\" if len(message_content) > 1024: msg_cont_1, msg_cont_2 = split_message(message_content) embed.add_field(name=\"Message:\", value=msg_cont_1, inline=False) embed.add_field(name=\"Message", "Optional[str], author: Optional[discord.Member], channel_id: int, message_id: int = -1, webhook_info: Optional[CachedMessage] = None,", "deleted in <#{}>\".format(channel_id), inline=False) embed.add_field(name=\"Message ID:\", value=message_id, inline=False) return embed def member_join(member: discord.Member,", "discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed formatted for canceled commands\"\"\" embed = discord.Embed(title=\"**Command", "author.discriminator == \"0000\": description_text = f\"{author.name}{pk_id_msg}\" info_author = f\"**{author.name}**\" else: description_text = f\"<@{author.id}>", "Timed Out!\", description=f\"❌ {message}\", color=color) return embed def command_canceled_embed(message: str = \"The command", "to use format other than WebP for image to display on iOS. (I", "f\"**{webhook_info.webhook_author_name}**\" else: log.info(\"Webhook Author is None\") description_text = info_author = \"Uncached User\" elif", "-> discord.Embed: embed = discord.Embed(description=\"<@!{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0x00ff00, timestamp=datetime.utcnow()) embed.set_author(name=\"New Member", "if webhook_info.webhook_author_name is not None: log.info(\"Webhook Author is NOT None\") description_text = f\"{webhook_info.webhook_author_name}{pk_id_msg}\"", "pk_id_msg = \"\" if webhook_info.member_pkid is not None or webhook_info.system_pkid is not None:", "Account is only **{hours}** hours and **{minutes}** minutes old! \\N{WARNING SIGN}\" else: seconds", "embed.add_field(name=\"Code\", value=\"{}\".format(invite.invite_id)) if invite.actual_invite is not None: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.actual_invite.uses)) embed.add_field(name=\"Created By\", value=\"<@!{}> -", "author_discrim, channel_id, before_msg: str, after_msg: str, message_id: str, guild_id) -> discord.Embed: before_msg =", "minutes = (account_age.seconds % 3600) // 60 if hours > 0: account_age_value =", "value=after.nick, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def user_name_update(before: discord.User, after: discord.User) ->", "True) -> discord.Embed: # If the webhook_info is none, create dummy object to", "this is a recent discord bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was", "= message[1000:] return msg1, msg2 def edited_message_embed(author_id, author_name: str, author_discrim, channel_id, before_msg: str,", "None: embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\n__**Plural Kit Information**__\", inline=False) # embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\N{Zero", "Age\", value=f\"**{pk_account_age.days}** days old\", inline=True) if invite is not None: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite", "log = logging.getLogger(__name__) def split_message(message: str) -> (str, str): # TODO: Make better", "Edit:\", value=before_msg, inline=True) embed.add_field(name=\"Message After Edit:\", value=after_msg, inline=True) embed.set_footer(text=\"User ID: {}\".format(author_id)) return embed", "their nickname.\".format(after.id, after.name, after.discriminator), color=0x00ffff, timestamp=datetime.utcnow()) embed.set_author(name=\"Nickname Changed\") embed.set_thumbnail(url=\"https://i.imgur.com/HtQ53lx.png\") embed.add_field(name=\"Old Nickname\", value=before.nick, inline=True)", "value=\"<@!{}> - {}#{}\".format(invite.actual_invite.inviter.id, invite.actual_invite.inviter.name, invite.actual_invite.inviter.discriminator)) embed.add_field(name=\"Created on\", value=invite.actual_invite.created_at.strftime(\"%b %d, %Y, %I:%M:%S %p UTC\"))", "discord from discord.ext import commands from datetime import datetime from typing import Optional,", "return msg1, msg2 def edited_message_embed(author_id, author_name: str, author_discrim, channel_id, before_msg: str, after_msg: str,", "invite.actual_invite.inviter.discriminator)) embed.add_field(name=\"Created on\", value=invite.actual_invite.created_at.strftime(\"%b %d, %Y, %I:%M:%S %p UTC\")) else: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.uses)) if", "if author is None: log.info(\"Author is None\") # We have NO info on", "\"Message not in the cache.\" embed = discord.Embed(title=\"Edited Message\", description=\"<@{}> - {}#{}\".format(author_id, author_name,", "is only **{minutes}** minutes and **{seconds}** seconds old! \\N{WARNING SIGN}\" embed.add_field(name=account_age_name, value=account_age_value, inline=True)", "if's neater if webhook_info is None: webhook_info = CachedMessage(None, None, None, None, None,", "Banned\", icon_url=\"http://i.imgur.com/Imx0Znm.png\") # Need to use format other than WebP for image to", "len(before_msg) > 1024 or len(after_msg) > 1024: # To simplify things, if one", "Log` Permissions to show more information\", # value=\"\\N{zero width space}\") embed.set_footer(text=\"User ID: {}\".format(member.id))", "= discord.Embed(title=\"Deleted Message\", description=\"Unknown User\", color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message not in", "str = \"The command has timed out.\", color: discord.Color = discord.Color.dark_orange()) -> discord.Embed:", "<#{channel_id}>\\n\" \"[Go To Message](https://discordapp.com/channels/{guild_id}/{channel_id}/{message_id})\".format(author_id=author_id, channel_id=channel_id, guild_id=guild_id, message_id=message_id), inline=False) if len(before_msg) > 1024 or", "info_author = f\"<@{author.id}>\" embed = discord.Embed(title=\"Deleted Message\", description=description_text, color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A", "= discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0xf82125, timestamp=datetime.utcnow()) embed.set_author(name=\"Member Left 😭\", icon_url=\"https://www.emoji.co.uk/files/mozilla-emojis/objects-mozilla/11928-outbox-tray.png\") #", "timestamp=datetime.utcnow()) embed.set_thumbnail( url=\"https://i.imgur.com/Q8SzUdG.png\") embed.add_field(name=\"Info:\", value=\"A message by <@{author_id}>, was edited in <#{channel_id}>\\n\" \"[Go", "member.created_at if account_age.days > 0: account_age_name = \"Account Age\" account_age_value = f\"**{account_age.days}** days", "int, message_id: int = -1, webhook_info: Optional[CachedMessage] = None, pk_system_owner: Optional[discord.Member] = None,", "- {}#{}\".format(member.id, member.name, member.discriminator), color=0xf82125, timestamp=datetime.utcnow()) embed.set_author(name=\"Member Left 😭\", icon_url=\"https://www.emoji.co.uk/files/mozilla-emojis/objects-mozilla/11928-outbox-tray.png\") # Need to", "description_text = f\"<@{author.id}> - {author.name}#{author.discriminator}\" info_author = f\"<@{author.id}>\" embed = discord.Embed(title=\"Deleted Message\", description=description_text,", "member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} was kicked from the server.\".format(member.display_name), inline=False) if audit_log is", "timestamp=datetime.utcnow()) embed.set_author(name=\"Member Unbanned\", icon_url=\"https://i.imgur.com/OCcebCO.png\") # Need to use format other than WebP for", "inline=True) # Add a blank embed to force the PK info onto it's", "the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Banned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id,", "embed.add_field(name=\"PK Account Age\", value=f\"**{pk_account_age.days}** days old\", inline=True) if invite is not None: embed.add_field(name=\"", "SIGN}\" embed.add_field(name=account_age_name, value=account_age_value, inline=True) embed.add_field(name=\"Current Member Count\", value=\"**{}** Members\".format(member.guild.member_count), inline=True) if pk_info is", "if invite.inviter_id is not None: embed.add_field(name=\"Created By\", value=\"<@{}>\".format(invite.inviter_id)) if invite.created_ts is not None:", "iOS. (I think this is a recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url)", "account_age.seconds % 60 account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account is only **{minutes}** minutes", "before.discriminator != after.discriminator: # Discrim changed, Name did not changed_txt = \"Discriminator\" else:", "embed def member_join(member: discord.Member, invite: Optional[StoredInvite], pk_info: Optional[Dict], manage_guild=True) -> discord.Embed: embed =", "we don't end up throwing an error due to an empty field value.", "value=\"<@{}>\".format(invite.inviter_id)) if invite.created_ts is not None: embed.add_field(name=\"Created on\", value=invite.created_at().strftime(\"%b %d, %Y, %I:%M:%S %p", "**{seconds}** seconds old! \\N{WARNING SIGN}\" embed.add_field(name=account_age_name, value=account_age_value, inline=True) embed.add_field(name=\"Current Member Count\", value=\"**{}** Members\".format(member.guild.member_count),", "= str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was unbanned from the server.\".format(member.display_name), inline=False) if audit_log", "inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def user_avatar_update(before: discord.User, after: discord.User, embed_image_filename: str)", "{}\".format(member.id)) return embed def member_kick(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}>", "discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_orange(),", "is none, create dummy object to make if's neater if webhook_info is None:", "Account Age\", value=f\"**{pk_account_age.days}** days old\", inline=True) if invite is not None: embed.add_field(name=\" ‌‌‌\",", "split both before_msg1, before_msg2 = split_message(before_msg) after_msg1, after_msg2 = split_message(after_msg) embed.add_field(name=\"Message Before Edit:\",", "import StoredInvite, CachedMessage import logging from utils.moreColors import gabby_gums_dark_green, gabby_gums_light_green, gabby_gums_purple log =", "CachedMessage(None, None, None, None, None, None, None, None, None, None) if cached: pk_id_msg", "account_age.days > 0: account_age_name = \"Account Age\" account_age_value = f\"**{account_age.days}** days old\" else:", "log.info(\"Author is None\") # We have NO info on the author of the", "embed formatted for canceled commands\"\"\" embed = discord.Embed(title=\"**Command Canceled**\", description=f\"❌ {message}\", color=color) return", "info_author = \"Uncached User\" elif author.discriminator == \"0000\": description_text = f\"{author.name}{pk_id_msg}\" info_author =", "embed.add_field(name=\"Message After Edit Continued:\", value=after_msg2, inline=False) else: embed.add_field(name=\"Message Before Edit:\", value=before_msg, inline=True) embed.add_field(name=\"Message", "# else: # embed.add_field(name=\"Need `View Audit Log` Permissions to show more information\", #", "if audit_log.reason else \"No Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id))", "None, None) if cached: pk_id_msg = \"\" if webhook_info.member_pkid is not None or", "(I think this is a recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\",", "> 0: embed.add_field(name=\"Message Before Edit Continued:\", value=before_msg2, inline=False) embed.add_field(name=\"Message After Edit:\", value=after_msg1, inline=False)", "if \"name\" in pk_info: embed.add_field(name=\"System Name\", value=pk_info['name'], inline=True) embed.add_field(name=\"System ID\", value=pk_info['id'], inline=True) #", "is a recent discord bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was unbanned", "**{hours}** hours and **{minutes}** minutes old! \\N{WARNING SIGN}\" else: seconds = account_age.seconds %", "Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_green(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member", "line. if \"name\" in pk_info: embed.add_field(name=\"System Name\", value=pk_info['name'], inline=True) embed.add_field(name=\"System ID\", value=pk_info['id'], inline=True)", "# Both changed changed_txt = \"Username & Discriminator\" embed = discord.Embed(description=f\"<@{after.id}> - {after.name}#{after.discriminator}", "embed = discord.Embed(description=\"<@!{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0x00ff00, timestamp=datetime.utcnow()) embed.set_author(name=\"New Member Joined!!!\", icon_url=\"https://www.emoji.co.uk/files/twitter-emojis/objects-twitter/11031-inbox-tray.png\")", "changed changed_txt = \"Username & Discriminator\" embed = discord.Embed(description=f\"<@{after.id}> - {after.name}#{after.discriminator} changed their", "embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has left the server 😭.\".format(member.display_name), inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return", "you can greatly improve the reliability of invite tracking by giving Gabby Gums", "the invite was a one time use invite.\" \" You may be able", "timestamp=datetime.utcnow()) embed.set_author(name=\"Avatar Changed\") embed.set_image(url=f\"attachment://{embed_image_filename}\") embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def command_timed_out_embed(message: str =", "= \"Account Age\" account_age_value = f\"**{account_age.days}** days old\" else: account_age_name = \"**New Account!**\"", "icon_url=\"http://i.imgur.com/Imx0Znm.png\") # Need to use format other than WebP for image to display", "inline=False) else: embed.add_field(name=\"Message Before Edit:\", value=before_msg, inline=True) embed.add_field(name=\"Message After Edit:\", value=after_msg, inline=True) embed.set_footer(text=\"User", "is None: webhook_info = CachedMessage(None, None, None, None, None, None, None, None, None,", "ID: {}\".format(after.id)) return embed def user_name_update(before: discord.User, after: discord.User) -> discord.Embed: if before.name", "**Manage Channels** permission.\", inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_leave(member: discord.Member) ->", "Joined!!!\", icon_url=\"https://www.emoji.co.uk/files/twitter-emojis/objects-twitter/11031-inbox-tray.png\") # Need to use format other than WebP for image to", "discord.User, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_green(),", "on the author of the message. if webhook_info.webhook_author_name is not None: log.info(\"Webhook Author", "invite is not None: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) if invite.invite_name is not", "inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_leave(member: discord.Member) -> discord.Embed: embed =", "value=before_msg, inline=True) embed.add_field(name=\"Message After Edit:\", value=after_msg, inline=True) embed.set_footer(text=\"User ID: {}\".format(author_id)) return embed def", "continued:\", value=msg_cont_2, inline=False) else: embed.add_field(name=\"Message:\", value=message_content, inline=False) if author is not None: embed.set_footer(text=f\"User", "= f\"\\N{WARNING SIGN} **Warning!** Account is only **{hours}** hours and **{minutes}** minutes old!", "server!!!\".format(member.display_name), inline=False) account_age = datetime.utcnow() - member.created_at if account_age.days > 0: account_age_name =", "== after.name and before.discriminator != after.discriminator: # Discrim changed, Name did not changed_txt", "after.name and before.discriminator == after.discriminator: # Name changed, discriminator did not changed_txt =", "commands\"\"\" embed = discord.Embed(title=\"**Command Canceled**\", description=f\"❌ {message}\", color=color) return embed def exception_w_message(message: discord.Message)", "None: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.actual_invite.uses)) embed.add_field(name=\"Created By\", value=\"<@!{}> - {}#{}\".format(invite.actual_invite.inviter.id, invite.actual_invite.inviter.name, invite.actual_invite.inviter.discriminator)) embed.add_field(name=\"Created on\", value=invite.actual_invite.created_at.strftime(\"%b", "embed def member_leave(member: discord.Member) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator),", "value=\"Bot OAuth Link\") else: embed.add_field(name=\"__**Invite Information**__\", value=\"Unable to determine invite information. It's possible", "embed def member_ban(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id,", "both before_msg1, before_msg2 = split_message(before_msg) after_msg1, after_msg2 = split_message(after_msg) embed.add_field(name=\"Message Before Edit:\", value=before_msg1,", "not changed_txt = \"Discriminator\" else: # Both changed changed_txt = \"Username & Discriminator\"", "color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message not in the cache was deleted in", "the server 😭.\".format(member.display_name), inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_kick(member: discord.Member, audit_log:", "value=after.name, inline=True) if before.discriminator != after.discriminator: embed.add_field(name=\"Old Discriminator:\", value=before.discriminator, inline=True) embed.add_field(name=\"New Discriminator:\", value=after.discriminator,", "unknown_deleted_message(channel_id, message_id) -> discord.Embed: embed = discord.Embed(title=\"Deleted Message\", description=\"Unknown User\", color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\")", "Message](https://discordapp.com/channels/{guild_id}/{channel_id}/{message_id})\".format(author_id=author_id, channel_id=channel_id, guild_id=guild_id, message_id=message_id), inline=False) if len(before_msg) > 1024 or len(after_msg) > 1024:", "the **Manage Channels** permission.\", inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_leave(member: discord.Member)", "f\"<@{author.id}>\" embed = discord.Embed(title=\"Deleted Message\", description=description_text, color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message by", "Server Permissions** needed for invite tracking.\") elif member.bot: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False)", "-> discord.Embed: if before.name != after.name and before.discriminator == after.discriminator: # Name changed,", "invite.inviter_id is not None: embed.add_field(name=\"Created By\", value=\"<@{}>\".format(invite.inviter_id)) if invite.created_ts is not None: embed.add_field(name=\"Created", "if pk_info is not None: embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\n__**Plural Kit Information**__\", inline=False) #", "canceled commands\"\"\" embed = discord.Embed(title=\"**Command Canceled**\", description=f\"❌ {message}\", color=color) return embed def exception_w_message(message:", "\"\": # Make sure we don't end up throwing an error due to", "discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0xf82125, timestamp=datetime.utcnow()) embed.set_author(name=\"Member Left 😭\", icon_url=\"https://www.emoji.co.uk/files/mozilla-emojis/objects-mozilla/11928-outbox-tray.png\") # Need", "embed def user_avatar_update(before: discord.User, after: discord.User, embed_image_filename: str) -> discord.Embed: embed = discord.Embed(description=\"<@{}>", "not None: embed.add_field(name=\"Name:\", value=\"{}\".format(invite.invite_name)) if invite.invite_id is not None: embed.add_field(name=\"Code\", value=\"{}\".format(invite.invite_id)) if invite.actual_invite", "inline=False) return embed def member_join(member: discord.Member, invite: Optional[StoredInvite], pk_info: Optional[Dict], manage_guild=True) -> discord.Embed:", "{}\".format(member.id)) return embed def member_ban(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}>", "if account_age.days > 0: account_age_name = \"Account Age\" account_age_value = f\"**{account_age.days}** days old\"", "embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) embed.add_field(name=\"Code\", value=\"Bot OAuth Link\") else: embed.add_field(name=\"__**Invite Information**__\", value=\"Unable", "str = \"The command was canceled.\", color: discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns", "datetime import datetime from typing import Optional, Dict, Union from db import StoredInvite,", "channel_id), inline=False) if pk_system_owner is not None: embed.add_field(name=\"Linked Discord Account:\", value=f\"<@{pk_system_owner.id}> - {pk_system_owner.name}#{pk_system_owner.discriminator}\",", "embed.add_field(name=\"System Name\", value=pk_info['name'], inline=True) embed.add_field(name=\"System ID\", value=pk_info['id'], inline=True) # Compute the account age", "after: discord.Member) -> discord.Embed: embed = discord.Embed( description=\"<@{}> - {}#{} changed their nickname.\".format(after.id,", "the webhook_info is none, create dummy object to make if's neater if webhook_info", "# embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\N{Zero Width Space}\", inline=True) # Add a blank embed", "have NO info on the author of the message. if webhook_info.webhook_author_name is not", "determine invite information. It's possible the invite was a one time use invite.\"", "> 0: account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account is only **{hours}** hours and", "timestamp=datetime.utcnow()) embed.set_author(name=\"Nickname Changed\") embed.set_thumbnail(url=\"https://i.imgur.com/HtQ53lx.png\") embed.add_field(name=\"Old Nickname\", value=before.nick, inline=True) embed.add_field(name=\"New Nickname\", value=after.nick, inline=True) embed.set_footer(text=\"User", "things, if one is greater split both before_msg1, before_msg2 = split_message(before_msg) after_msg1, after_msg2", "-> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0xf82125, timestamp=datetime.utcnow()) embed.set_author(name=\"Member Left", "invite was a one time use invite.\" \" You may be able to", "space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_nick_update(before: discord.Member, after: discord.Member) -> discord.Embed:", "value=before.name, inline=True) embed.add_field(name=\"New Username:\", value=after.name, inline=True) if before.discriminator != after.discriminator: embed.add_field(name=\"Old Discriminator:\", value=before.discriminator,", "None: embed.add_field(name=\"Name:\", value=\"{}\".format(invite.invite_name)) if invite.invite_id is not None: embed.add_field(name=\"Code\", value=\"{}\".format(invite.invite_id)) if invite.actual_invite is", "discord.Member) -> discord.Embed: embed = discord.Embed( description=\"<@{}> - {}#{} changed their nickname.\".format(after.id, after.name,", "Compute the account age pk_created_date = datetime.strptime(pk_info['created'], '%Y-%m-%dT%H:%M:%S.%fZ') pk_account_age = datetime.utcnow() - pk_created_date", "value=after_msg, inline=True) embed.set_footer(text=\"User ID: {}\".format(author_id)) return embed def deleted_message_embed(message_content: Optional[str], author: Optional[discord.Member], channel_id:", "To simplify things, if one is greater split both before_msg1, before_msg2 = split_message(before_msg)", "= split_message(after_msg) embed.add_field(name=\"Message Before Edit:\", value=before_msg1, inline=False) if len(before_msg2.strip()) > 0: embed.add_field(name=\"Message Before", "discord.Member, invite: Optional[StoredInvite], pk_info: Optional[Dict], manage_guild=True) -> discord.Embed: embed = discord.Embed(description=\"<@!{}> - {}#{}\".format(member.id,", "<#{}>\".format(channel_id), inline=False) embed.add_field(name=\"Message ID:\", value=message_id, inline=False) return embed def member_join(member: discord.Member, invite: Optional[StoredInvite],", "\\nMember ID: {s}**{webhook_info.member_pkid}**\" log.info(\"pk_id_msg set\") if author is None: log.info(\"Author is None\") #", "was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) # else: # embed.add_field(name=\"Need `View Audit Log` Permissions", "author is not None: embed.set_footer(text=f\"User ID: {author.id}\") return embed else: return unknown_deleted_message(channel_id, message_id)", "'%Y-%m-%dT%H:%M:%S.%fZ') pk_account_age = datetime.utcnow() - pk_created_date embed.add_field(name=\"PK Account Age\", value=f\"**{pk_account_age.days}** days old\", inline=True)", "utils.moreColors import gabby_gums_dark_green, gabby_gums_light_green, gabby_gums_purple log = logging.getLogger(__name__) def split_message(message: str) -> (str,", "= message.content guild_id = message.guild.id if message.guild else \"DM Message\" embed.set_footer(text=\"Server: {}, Channel:", "server 😭.\".format(member.display_name), inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_kick(member: discord.Member, audit_log: Optional[discord.AuditLogEntry])", "Optional[CachedMessage] = None, pk_system_owner: Optional[discord.Member] = None, cached: bool = True) -> discord.Embed:", "discord.Member, after: discord.Member) -> discord.Embed: embed = discord.Embed( description=\"<@{}> - {}#{} changed their", "up throwing an error due to an empty field value. message_content = \"None\"", "is not None: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.actual_invite.uses)) embed.add_field(name=\"Created By\", value=\"<@!{}> - {}#{}\".format(invite.actual_invite.inviter.id, invite.actual_invite.inviter.name, invite.actual_invite.inviter.discriminator)) embed.add_field(name=\"Created", "in pk_info: embed.add_field(name=\"System Name\", value=pk_info['name'], inline=True) embed.add_field(name=\"System ID\", value=pk_info['id'], inline=True) # Compute the", "has timed out.\", color: discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed formatted", "else: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.uses)) if invite.inviter_id is not None: embed.add_field(name=\"Created By\", value=\"<@{}>\".format(invite.inviter_id)) if invite.created_ts", "{}, Channel: {}, Sender: <@{}> - {}#{}\".format( message.author.name, message.author.discriminator, message.author.id, guild_id, message.channel.id)) return", "inline=False) if message_content == \"\": # Make sure we don't end up throwing", "return embed def member_kick(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> -", "edited in <#{channel_id}>\\n\" \"[Go To Message](https://discordapp.com/channels/{guild_id}/{channel_id}/{message_id})\".format(author_id=author_id, channel_id=channel_id, guild_id=guild_id, message_id=message_id), inline=False) if len(before_msg) >", "the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Kicked By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id,", "inline=False) if author is not None: embed.set_footer(text=f\"User ID: {author.id}\") return embed else: return", "after_msg2 = split_message(after_msg) embed.add_field(name=\"Message Before Edit:\", value=before_msg1, inline=False) if len(before_msg2.strip()) > 0: embed.add_field(name=\"Message", "None, pk_system_owner: Optional[discord.Member] = None, cached: bool = True) -> discord.Embed: # If", "Make better msg1 = message[:1000] msg2 = message[1000:] return msg1, msg2 def edited_message_embed(author_id,", "embed.add_field(name=\"Created on\", value=invite.created_at().strftime(\"%b %d, %Y, %I:%M:%S %p UTC\")) else: if not manage_guild: embed.add_field(name=\"Permissions", "tracking.\") elif member.bot: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) embed.add_field(name=\"Code\", value=\"Bot OAuth Link\") else:", "Edit:\", value=before_msg1, inline=False) if len(before_msg2.strip()) > 0: embed.add_field(name=\"Message Before Edit Continued:\", value=before_msg2, inline=False)", "value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\" if audit_log.reason else \"No", "msg_cont_2 = split_message(message_content) embed.add_field(name=\"Message:\", value=msg_cont_1, inline=False) embed.add_field(name=\"Message continued:\", value=msg_cont_2, inline=False) else: embed.add_field(name=\"Message:\", value=message_content,", "Gums the **Manage Channels** permission.\", inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_leave(member:", "def member_ban(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name,", "after_msg1, after_msg2 = split_message(after_msg) embed.add_field(name=\"Message Before Edit:\", value=before_msg1, inline=False) if len(before_msg2.strip()) > 0:", "avatar.\".format(after.id, after.name, after.discriminator), color=0x00aaaa, timestamp=datetime.utcnow()) embed.set_author(name=\"Avatar Changed\") embed.set_image(url=f\"attachment://{embed_image_filename}\") embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed", "embed.add_field(name=\"Created on\", value=invite.actual_invite.created_at.strftime(\"%b %d, %Y, %I:%M:%S %p UTC\")) else: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.uses)) if invite.inviter_id", "f\"{webhook_info.webhook_author_name}{pk_id_msg}\" info_author = f\"**{webhook_info.webhook_author_name}**\" else: log.info(\"Webhook Author is None\") description_text = info_author =", "the inviter by using the Audit Log.\\n\" \"Additionally, you can greatly improve the", "**{minutes}** minutes and **{seconds}** seconds old! \\N{WARNING SIGN}\" embed.add_field(name=account_age_name, value=account_age_value, inline=True) embed.add_field(name=\"Current Member", "is not None: embed.add_field(name=\"Unbanned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason =", "embed.add_field(name=\"Message After Edit:\", value=after_msg, inline=True) embed.set_footer(text=\"User ID: {}\".format(author_id)) return embed def deleted_message_embed(message_content: Optional[str],", "color: discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed formatted for canceled commands\"\"\"", "value=invite.created_at().strftime(\"%b %d, %Y, %I:%M:%S %p UTC\")) else: if not manage_guild: embed.add_field(name=\"Permissions Warning!\", value=\"**Manage", "has left the server 😭.\".format(member.display_name), inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_kick(member:", "cached: pk_id_msg = \"\" if webhook_info.member_pkid is not None or webhook_info.system_pkid is not", "did not changed_txt = \"Username\" elif before.name == after.name and before.discriminator != after.discriminator:", "embed.add_field(name=\"New Username:\", value=after.name, inline=True) if before.discriminator != after.discriminator: embed.add_field(name=\"Old Discriminator:\", value=before.discriminator, inline=True) embed.add_field(name=\"New", "from datetime import datetime from typing import Optional, Dict, Union from db import", "channel_id: int, message_id: int = -1, webhook_info: Optional[CachedMessage] = None, pk_system_owner: Optional[discord.Member] =", "len(after_msg2.strip()) > 0: embed.add_field(name=\"Message After Edit Continued:\", value=after_msg2, inline=False) else: embed.add_field(name=\"Message Before Edit:\",", "user_avatar_update(before: discord.User, after: discord.User, embed_image_filename: str) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}", "= \"**New Account!**\" hours = account_age.seconds // 3600 minutes = (account_age.seconds % 3600)", "value=f\"**{pk_account_age.days}** days old\", inline=True) if invite is not None: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\",", "msg1 = message[:1000] msg2 = message[1000:] return msg1, msg2 def edited_message_embed(author_id, author_name: str,", "member.name, member.discriminator), color=discord.Color.dark_green(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Unbanned\", icon_url=\"https://i.imgur.com/OCcebCO.png\") # Need to use format other", "embed.add_field(name=\"Message continued:\", value=msg_cont_2, inline=False) else: embed.add_field(name=\"Message:\", value=message_content, inline=False) if author is not None:", "is None: log.info(\"Author is None\") # We have NO info on the author", "invite.created_ts is not None: embed.add_field(name=\"Created on\", value=invite.created_at().strftime(\"%b %d, %Y, %I:%M:%S %p UTC\")) else:", "the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Unbanned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id,", "cached: bool = True) -> discord.Embed: # If the webhook_info is none, create", "\"Additionally, you can greatly improve the reliability of invite tracking by giving Gabby", "member_kick(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator),", "= logging.getLogger(__name__) def split_message(message: str) -> (str, str): # TODO: Make better msg1", "time use invite.\" \" You may be able to determine the inviter by", "has joined the server!!!\".format(member.display_name), inline=False) account_age = datetime.utcnow() - member.created_at if account_age.days >", "member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has left the server 😭.\".format(member.display_name), inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id))", "after.discriminator), color=0x00aaaa, timestamp=datetime.utcnow()) embed.set_author(name=\"Avatar Changed\") embed.set_image(url=f\"attachment://{embed_image_filename}\") embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def command_timed_out_embed(message:", "db import StoredInvite, CachedMessage import logging from utils.moreColors import gabby_gums_dark_green, gabby_gums_light_green, gabby_gums_purple log", "due to an empty field value. message_content = \"None\" if len(message_content) > 1024:", "is not None: embed.add_field(name=\"Code\", value=\"{}\".format(invite.invite_id)) if invite.actual_invite is not None: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.actual_invite.uses)) embed.add_field(name=\"Created", "Log.\\n\" \"Additionally, you can greatly improve the reliability of invite tracking by giving", "def command_timed_out_embed(message: str = \"The command has timed out.\", color: discord.Color = discord.Color.dark_orange())", "kicked from the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Kicked By:\", value=\"<@{}>", "if before.name != after.name and before.discriminator == after.discriminator: # Name changed, discriminator did", "{}\".format(member.id)) return embed def member_nick_update(before: discord.Member, after: discord.Member) -> discord.Embed: embed = discord.Embed(", "embed def deleted_message_embed(message_content: Optional[str], author: Optional[discord.Member], channel_id: int, message_id: int = -1, webhook_info:", "# Make sure we don't end up throwing an error due to an", "is a recent discord bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was banned", "invite tracking.\") elif member.bot: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) embed.add_field(name=\"Code\", value=\"Bot OAuth Link\")", "discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_green(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Unbanned\", icon_url=\"https://i.imgur.com/OCcebCO.png\") # Need to", "empty field value. message_content = \"None\" if len(message_content) > 1024: msg_cont_1, msg_cont_2 =", "invite information. It's possible the invite was a one time use invite.\" \"", "member_nick_update(before: discord.Member, after: discord.Member) -> discord.Embed: embed = discord.Embed( description=\"<@{}> - {}#{} changed", "<@{author_id}>, was edited in <#{channel_id}>\\n\" \"[Go To Message](https://discordapp.com/channels/{guild_id}/{channel_id}/{message_id})\".format(author_id=author_id, channel_id=channel_id, guild_id=guild_id, message_id=message_id), inline=False) if", "Name changed, discriminator did not changed_txt = \"Username\" elif before.name == after.name and", "Permissions to show more information\", # value=\"\\N{zero width space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return", "unknown_deleted_message(channel_id, message_id) def unknown_deleted_message(channel_id, message_id) -> discord.Embed: embed = discord.Embed(title=\"Deleted Message\", description=\"Unknown User\",", "= \"Discriminator\" else: # Both changed changed_txt = \"Username & Discriminator\" embed =", "timestamp=datetime.utcnow()) embed.set_author(name=\"Member Kicked\", icon_url=\"https://i.imgur.com/o96t3cV.png\") # Need to use format other than WebP for", "**Warning!** Account is only **{minutes}** minutes and **{seconds}** seconds old! \\N{WARNING SIGN}\" embed.add_field(name=account_age_name,", "embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_green(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Unbanned\", icon_url=\"https://i.imgur.com/OCcebCO.png\") #", "Optional, Dict, Union from db import StoredInvite, CachedMessage import logging from utils.moreColors import", "After Edit:\", value=after_msg1, inline=False) if len(after_msg2.strip()) > 0: embed.add_field(name=\"Message After Edit Continued:\", value=after_msg2,", "None or webhook_info.system_pkid is not None: s = '\\u205f' # Medium Mathematical Space", "Edit:\", value=after_msg, inline=True) embed.set_footer(text=\"User ID: {}\".format(author_id)) return embed def deleted_message_embed(message_content: Optional[str], author: Optional[discord.Member],", "outs\"\"\" embed = discord.Embed(title=\"Command Timed Out!\", description=f\"❌ {message}\", color=color) return embed def command_canceled_embed(message:", "str, after_msg: str, message_id: str, guild_id) -> discord.Embed: before_msg = before_msg if before_msg", "member_leave(member: discord.Member) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0xf82125, timestamp=datetime.utcnow())", "embed_image_filename: str) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{} changed their avatar.\".format(after.id, after.name,", "Width Space}\", inline=True) # Add a blank embed to force the PK info", "split_message(message: str) -> (str, str): # TODO: Make better msg1 = message[:1000] msg2", "description=\"<@{}> - {}#{}\".format(author_id, author_name, author_discrim), color=0x61cd72, timestamp=datetime.utcnow()) embed.set_thumbnail( url=\"https://i.imgur.com/Q8SzUdG.png\") embed.add_field(name=\"Info:\", value=\"A message by", "Edit Continued:\", value=before_msg2, inline=False) embed.add_field(name=\"Message After Edit:\", value=after_msg1, inline=False) if len(after_msg2.strip()) > 0:", "audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_green(), timestamp=datetime.utcnow())", "error due to an empty field value. message_content = \"None\" if len(message_content) >", "embed.set_thumbnail( url=\"https://i.imgur.com/Q8SzUdG.png\") embed.add_field(name=\"Info:\", value=\"A message by <@{author_id}>, was edited in <#{channel_id}>\\n\" \"[Go To", "inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_kick(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed:", "before_msg if before_msg else \"Message not in the cache.\" embed = discord.Embed(title=\"Edited Message\",", "Union from db import StoredInvite, CachedMessage import logging from utils.moreColors import gabby_gums_dark_green, gabby_gums_light_green,", "in the cache was deleted in <#{}>\".format(channel_id), inline=False) embed.add_field(name=\"Message ID:\", value=message_id, inline=False) return", "don't end up throwing an error due to an empty field value. message_content", "Dict, Union from db import StoredInvite, CachedMessage import logging from utils.moreColors import gabby_gums_dark_green,", "was kicked from the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Kicked By:\",", "Space}\", inline=True) # Add a blank embed to force the PK info onto", "import discord from discord.ext import commands from datetime import datetime from typing import", "None, None, None) if cached: pk_id_msg = \"\" if webhook_info.member_pkid is not None", "an embed formatted for canceled commands\"\"\" embed = discord.Embed(title=\"**Command Canceled**\", description=f\"❌ {message}\", color=color)", "discord.Embed: embed = discord.Embed() embed.colour = 0xa50000 embed.title = message.content guild_id = message.guild.id", "Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_ban(member:", "before.name == after.name and before.discriminator != after.discriminator: # Discrim changed, Name did not", "= member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has left the server 😭.\".format(member.display_name), inline=False) embed.set_footer(text=\"User ID:", "embed.add_field(name=\"Old Nickname\", value=before.nick, inline=True) embed.add_field(name=\"New Nickname\", value=after.nick, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed", "to make if's neater if webhook_info is None: webhook_info = CachedMessage(None, None, None,", "is not None: embed.add_field(name=\"Name:\", value=\"{}\".format(invite.invite_name)) if invite.invite_id is not None: embed.add_field(name=\"Code\", value=\"{}\".format(invite.invite_id)) if", "%d, %Y, %I:%M:%S %p UTC\")) else: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.uses)) if invite.inviter_id is not None:", "if audit_log is not None: embed.add_field(name=\"Unbanned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False)", "else: log.info(\"Webhook Author is None\") description_text = info_author = \"Uncached User\" elif author.discriminator", "{pk_system_owner.name}#{pk_system_owner.discriminator}\", inline=False) if message_content == \"\": # Make sure we don't end up", "icon_url=\"https://i.imgur.com/o96t3cV.png\") # Need to use format other than WebP for image to display", "None: log.info(\"Author is None\") # We have NO info on the author of", "a recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has joined the", "\"No Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) # else: # embed.add_field(name=\"Need `View Audit", "Channels** permission.\", inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_leave(member: discord.Member) -> discord.Embed:", "discord.User, after: discord.User, embed_image_filename: str) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{} changed", "else: if not manage_guild: embed.add_field(name=\"Permissions Warning!\", value=\"**Manage Server Permissions** needed for invite tracking.\")", "return embed def deleted_message_embed(message_content: Optional[str], author: Optional[discord.Member], channel_id: int, message_id: int = -1,", "blank embed to force the PK info onto it's own line. if \"name\"", "if len(before_msg) > 1024 or len(after_msg) > 1024: # To simplify things, if", "-> discord.Embed: \"\"\"Returns an embed formatted for canceled commands\"\"\" embed = discord.Embed(title=\"**Command Canceled**\",", "discord.Embed: embed = discord.Embed(title=\"Deleted Message\", description=\"Unknown User\", color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message", "the message. if webhook_info.webhook_author_name is not None: log.info(\"Webhook Author is NOT None\") description_text", "embed = discord.Embed( description=\"<@{}> - {}#{} changed their nickname.\".format(after.id, after.name, after.discriminator), color=0x00ffff, timestamp=datetime.utcnow())", "// 60 if hours > 0: account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account is", "possible the invite was a one time use invite.\" \" You may be", "embed.add_field(name=\"Message:\", value=message_content, inline=False) if author is not None: embed.set_footer(text=f\"User ID: {author.id}\") return embed", "value=\"{}\".format(invite.uses)) if invite.inviter_id is not None: embed.add_field(name=\"Created By\", value=\"<@{}>\".format(invite.inviter_id)) if invite.created_ts is not", "Author is NOT None\") description_text = f\"{webhook_info.webhook_author_name}{pk_id_msg}\" info_author = f\"**{webhook_info.webhook_author_name}**\" else: log.info(\"Webhook Author", "value=pk_info['name'], inline=True) embed.add_field(name=\"System ID\", value=pk_info['id'], inline=True) # Compute the account age pk_created_date =", "Nickname\", value=before.nick, inline=True) embed.add_field(name=\"New Nickname\", value=after.nick, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def", "= f\"**{account_age.days}** days old\" else: account_age_name = \"**New Account!**\" hours = account_age.seconds //", "after.name, after.discriminator), color=0x00ffff, timestamp=datetime.utcnow()) embed.set_author(name=\"Nickname Changed\") embed.set_thumbnail(url=\"https://i.imgur.com/HtQ53lx.png\") embed.add_field(name=\"Old Nickname\", value=before.nick, inline=True) embed.add_field(name=\"New Nickname\",", "\"The command has timed out.\", color: discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an", "import commands from datetime import datetime from typing import Optional, Dict, Union from", "audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_orange(), timestamp=datetime.utcnow())", "account_age_name = \"**New Account!**\" hours = account_age.seconds // 3600 minutes = (account_age.seconds %", "embed.add_field(name=\"__**Invite Information**__\", value=\"Unable to determine invite information. It's possible the invite was a", "discord.Embed() embed.colour = 0xa50000 embed.title = message.content guild_id = message.guild.id if message.guild else", "embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def user_name_update(before: discord.User, after: discord.User) -> discord.Embed: if", "msg2 = message[1000:] return msg1, msg2 def edited_message_embed(author_id, author_name: str, author_discrim, channel_id, before_msg:", "hours and **{minutes}** minutes old! \\N{WARNING SIGN}\" else: seconds = account_age.seconds % 60", "inline=False) embed.add_field(name=\"Message ID:\", value=message_id, inline=False) return embed def member_join(member: discord.Member, invite: Optional[StoredInvite], pk_info:", "None: log.info(\"Webhook Author is NOT None\") description_text = f\"{webhook_info.webhook_author_name}{pk_id_msg}\" info_author = f\"**{webhook_info.webhook_author_name}**\" else:", "else \"Message not in the cache.\" embed = discord.Embed(title=\"Edited Message\", description=\"<@{}> - {}#{}\".format(author_id,", "improve the reliability of invite tracking by giving Gabby Gums the **Manage Channels**", "CachedMessage import logging from utils.moreColors import gabby_gums_dark_green, gabby_gums_light_green, gabby_gums_purple log = logging.getLogger(__name__) def", "embed.add_field(name=\"Linked Discord Account:\", value=f\"<@{pk_system_owner.id}> - {pk_system_owner.name}#{pk_system_owner.discriminator}\", inline=False) if message_content == \"\": # Make", "a blank embed to force the PK info onto it's own line. if", "message_id: int = -1, webhook_info: Optional[CachedMessage] = None, pk_system_owner: Optional[discord.Member] = None, cached:", "joined the server!!!\".format(member.display_name), inline=False) account_age = datetime.utcnow() - member.created_at if account_age.days > 0:", "days old\", inline=True) if invite is not None: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False)", "% 3600) // 60 if hours > 0: account_age_value = f\"\\N{WARNING SIGN} **Warning!**", "\"DM Message\" embed.set_footer(text=\"Server: {}, Channel: {}, Sender: <@{}> - {}#{}\".format( message.author.name, message.author.discriminator, message.author.id,", "None, None, None, None, None, None) if cached: pk_id_msg = \"\" if webhook_info.member_pkid", "None, None, None, None, None, None, None) if cached: pk_id_msg = \"\" if", "1024: msg_cont_1, msg_cont_2 = split_message(message_content) embed.add_field(name=\"Message:\", value=msg_cont_1, inline=False) embed.add_field(name=\"Message continued:\", value=msg_cont_2, inline=False) else:", "Member Count\", value=\"**{}** Members\".format(member.guild.member_count), inline=True) if pk_info is not None: embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\",", "Age\" account_age_value = f\"**{account_age.days}** days old\" else: account_age_name = \"**New Account!**\" hours =", "embed.set_author(name=\"Member Left 😭\", icon_url=\"https://www.emoji.co.uk/files/mozilla-emojis/objects-mozilla/11928-outbox-tray.png\") # Need to use format other than WebP for", "embed = discord.Embed(description=\"<@{}> - {}#{} changed their avatar.\".format(after.id, after.name, after.discriminator), color=0x00aaaa, timestamp=datetime.utcnow()) embed.set_author(name=\"Avatar", "== \"0000\": description_text = f\"{author.name}{pk_id_msg}\" info_author = f\"**{author.name}**\" else: description_text = f\"<@{author.id}> -", "not None: s = '\\u205f' # Medium Mathematical Space pk_id_msg = f\"{s}\\n{s}\\nSystem ID:", "color: discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed formatted for command time", "inline=False) account_age = datetime.utcnow() - member.created_at if account_age.days > 0: account_age_name = \"Account", "value=reason, inline=False) # else: # embed.add_field(name=\"Need `View Audit Log` Permissions to show more", "value=before_msg2, inline=False) embed.add_field(name=\"Message After Edit:\", value=after_msg1, inline=False) if len(after_msg2.strip()) > 0: embed.add_field(name=\"Message After", "audit_log.reason else \"No Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return", "simplify things, if one is greater split both before_msg1, before_msg2 = split_message(before_msg) after_msg1,", "-> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{} changed their avatar.\".format(after.id, after.name, after.discriminator), color=0x00aaaa,", "User\", color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message not in the cache was deleted", "f\"{s}\\n{s}\\nSystem ID: {s}{s}{s}**{webhook_info.system_pkid}** \\nMember ID: {s}**{webhook_info.member_pkid}**\" log.info(\"pk_id_msg set\") if author is None: log.info(\"Author", "!= after.name: embed.add_field(name=\"Old Username:\", value=before.name, inline=True) embed.add_field(name=\"New Username:\", value=after.name, inline=True) if before.discriminator !=", "ID: {author.id}\") return embed else: return unknown_deleted_message(channel_id, message_id) def unknown_deleted_message(channel_id, message_id) -> discord.Embed:", "-> (str, str): # TODO: Make better msg1 = message[:1000] msg2 = message[1000:]", "account age pk_created_date = datetime.strptime(pk_info['created'], '%Y-%m-%dT%H:%M:%S.%fZ') pk_account_age = datetime.utcnow() - pk_created_date embed.add_field(name=\"PK Account", "Gabby Gums the **Manage Channels** permission.\", inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def", "embed.set_footer(text=\"User ID: {}\".format(author_id)) return embed def deleted_message_embed(message_content: Optional[str], author: Optional[discord.Member], channel_id: int, message_id:", "recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} was kicked from the", "!= after.discriminator: embed.add_field(name=\"Old Discriminator:\", value=before.discriminator, inline=True) embed.add_field(name=\"New Discriminator:\", value=after.discriminator, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id))", "an error due to an empty field value. message_content = \"None\" if len(message_content)", "pk_created_date embed.add_field(name=\"PK Account Age\", value=f\"**{pk_account_age.days}** days old\", inline=True) if invite is not None:", "f\"**{account_age.days}** days old\" else: account_age_name = \"**New Account!**\" hours = account_age.seconds // 3600", "else: embed.add_field(name=\"Message Before Edit:\", value=before_msg, inline=True) embed.add_field(name=\"Message After Edit:\", value=after_msg, inline=True) embed.set_footer(text=\"User ID:", "minutes and **{seconds}** seconds old! \\N{WARNING SIGN}\" embed.add_field(name=account_age_name, value=account_age_value, inline=True) embed.add_field(name=\"Current Member Count\",", "ID: {}\".format(member.id)) return embed def member_leave(member: discord.Member) -> discord.Embed: embed = discord.Embed(description=\"<@{}> -", "embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message by {}, was deleted in <#{}>\".format(info_author, channel_id), inline=False) if", "to force the PK info onto it's own line. if \"name\" in pk_info:", "if before.discriminator != after.discriminator: embed.add_field(name=\"Old Discriminator:\", value=before.discriminator, inline=True) embed.add_field(name=\"New Discriminator:\", value=after.discriminator, inline=True) embed.set_footer(text=\"User", "width space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_nick_update(before: discord.Member, after: discord.Member) ->", "embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_orange(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Kicked\", icon_url=\"https://i.imgur.com/o96t3cV.png\") #", "not None: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) if invite.invite_name is not None: embed.add_field(name=\"Name:\",", "- {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_orange(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Kicked\", icon_url=\"https://i.imgur.com/o96t3cV.png\") # Need to use", "{}#{} changed their avatar.\".format(after.id, after.name, after.discriminator), color=0x00aaaa, timestamp=datetime.utcnow()) embed.set_author(name=\"Avatar Changed\") embed.set_image(url=f\"attachment://{embed_image_filename}\") embed.set_footer(text=\"User ID:", "return unknown_deleted_message(channel_id, message_id) def unknown_deleted_message(channel_id, message_id) -> discord.Embed: embed = discord.Embed(title=\"Deleted Message\", description=\"Unknown", "ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has left the server 😭.\".format(member.display_name), inline=False) embed.set_footer(text=\"User", "embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} was kicked from the server.\".format(member.display_name), inline=False) if audit_log is not", "description=f\"❌ {message}\", color=color) return embed def command_canceled_embed(message: str = \"The command was canceled.\",", "NOT None\") description_text = f\"{webhook_info.webhook_author_name}{pk_id_msg}\" info_author = f\"**{webhook_info.webhook_author_name}**\" else: log.info(\"Webhook Author is None\")", "Space pk_id_msg = f\"{s}\\n{s}\\nSystem ID: {s}{s}{s}**{webhook_info.system_pkid}** \\nMember ID: {s}**{webhook_info.member_pkid}**\" log.info(\"pk_id_msg set\") if author", "is not None: embed.add_field(name=\"Banned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason =", "Account!**\" hours = account_age.seconds // 3600 minutes = (account_age.seconds % 3600) // 60", "return embed def exception_w_message(message: discord.Message) -> discord.Embed: embed = discord.Embed() embed.colour = 0xa50000", "discord.Embed(title=\"Deleted Message\", description=\"Unknown User\", color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message not in the", "= CachedMessage(None, None, None, None, None, None, None, None, None, None) if cached:", "inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def user_name_update(before: discord.User, after: discord.User) -> discord.Embed:", "= member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} was kicked from the server.\".format(member.display_name), inline=False) if audit_log", "{}\".format(after.id)) return embed def user_avatar_update(before: discord.User, after: discord.User, embed_image_filename: str) -> discord.Embed: embed", "discord.Embed( description=\"<@{}> - {}#{} changed their nickname.\".format(after.id, after.name, after.discriminator), color=0x00ffff, timestamp=datetime.utcnow()) embed.set_author(name=\"Nickname Changed\")", "embed.add_field(name=\"Unbanned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\" if audit_log.reason", "message_content = \"None\" if len(message_content) > 1024: msg_cont_1, msg_cont_2 = split_message(message_content) embed.add_field(name=\"Message:\", value=msg_cont_1,", "None: embed.add_field(name=\"Kicked By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\" if", "color=0x00aaaa, timestamp=datetime.utcnow()) embed.set_author(name=\"Avatar Changed\") embed.set_image(url=f\"attachment://{embed_image_filename}\") embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def command_timed_out_embed(message: str", "timestamp=datetime.utcnow()) embed.set_author(name=\"Member Left 😭\", icon_url=\"https://www.emoji.co.uk/files/mozilla-emojis/objects-mozilla/11928-outbox-tray.png\") # Need to use format other than WebP", "embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\N{Zero Width Space}\", inline=True) # Add a blank embed to", "for image to display on iOS. (I think this is a recent discord", "{}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\" if audit_log.reason else \"No Reason was", "Account:\", value=f\"<@{pk_system_owner.id}> - {pk_system_owner.name}#{pk_system_owner.discriminator}\", inline=False) if message_content == \"\": # Make sure we", "% 60 account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account is only **{minutes}** minutes and", "if invite.invite_id is not None: embed.add_field(name=\"Code\", value=\"{}\".format(invite.invite_id)) if invite.actual_invite is not None: embed.add_field(name=\"Uses\",", "Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_red(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member", "= None, pk_system_owner: Optional[discord.Member] = None, cached: bool = True) -> discord.Embed: #", "the author of the message. if webhook_info.webhook_author_name is not None: log.info(\"Webhook Author is", "deleted in <#{}>\".format(info_author, channel_id), inline=False) if pk_system_owner is not None: embed.add_field(name=\"Linked Discord Account:\",", "By\", value=\"<@{}>\".format(invite.inviter_id)) if invite.created_ts is not None: embed.add_field(name=\"Created on\", value=invite.created_at().strftime(\"%b %d, %Y, %I:%M:%S", "PK info onto it's own line. if \"name\" in pk_info: embed.add_field(name=\"System Name\", value=pk_info['name'],", "embed.add_field(name=\"Info:\", value=\"**{}** was unbanned from the server.\".format(member.display_name), inline=False) if audit_log is not None:", "if webhook_info.member_pkid is not None or webhook_info.system_pkid is not None: s = '\\u205f'", "log.info(\"Webhook Author is None\") description_text = info_author = \"Uncached User\" elif author.discriminator ==", "an empty field value. message_content = \"None\" if len(message_content) > 1024: msg_cont_1, msg_cont_2", "set\") if author is None: log.info(\"Author is None\") # We have NO info", "= '\\u205f' # Medium Mathematical Space pk_id_msg = f\"{s}\\n{s}\\nSystem ID: {s}{s}{s}**{webhook_info.system_pkid}** \\nMember ID:", "def member_nick_update(before: discord.Member, after: discord.Member) -> discord.Embed: embed = discord.Embed( description=\"<@{}> - {}#{}", "def user_avatar_update(before: discord.User, after: discord.User, embed_image_filename: str) -> discord.Embed: embed = discord.Embed(description=\"<@{}> -", "audit_log is not None: embed.add_field(name=\"Kicked By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason", "webhook_info.system_pkid is not None: s = '\\u205f' # Medium Mathematical Space pk_id_msg =", "embed.add_field(name=\"Message ID:\", value=message_id, inline=False) return embed def member_join(member: discord.Member, invite: Optional[StoredInvite], pk_info: Optional[Dict],", "> 0: account_age_name = \"Account Age\" account_age_value = f\"**{account_age.days}** days old\" else: account_age_name", "to an empty field value. message_content = \"None\" if len(message_content) > 1024: msg_cont_1,", "may be able to determine the inviter by using the Audit Log.\\n\" \"Additionally,", "StoredInvite, CachedMessage import logging from utils.moreColors import gabby_gums_dark_green, gabby_gums_light_green, gabby_gums_purple log = logging.getLogger(__name__)", "= \"Username & Discriminator\" embed = discord.Embed(description=f\"<@{after.id}> - {after.name}#{after.discriminator} changed their {changed_txt}.\", color=discord.Color.teal(),", "logging from utils.moreColors import gabby_gums_dark_green, gabby_gums_light_green, gabby_gums_purple log = logging.getLogger(__name__) def split_message(message: str)", "datetime.utcnow() - pk_created_date embed.add_field(name=\"PK Account Age\", value=f\"**{pk_account_age.days}** days old\", inline=True) if invite is", "discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_red(),", "= datetime.utcnow() - pk_created_date embed.add_field(name=\"PK Account Age\", value=f\"**{pk_account_age.days}** days old\", inline=True) if invite", "of invite tracking by giving Gabby Gums the **Manage Channels** permission.\", inline=False) embed.set_footer(text=\"User", "message_id: str, guild_id) -> discord.Embed: before_msg = before_msg if before_msg else \"Message not", "Audit Log.\\n\" \"Additionally, you can greatly improve the reliability of invite tracking by", "Warning!\", value=\"**Manage Server Permissions** needed for invite tracking.\") elif member.bot: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite", "message_id) -> discord.Embed: embed = discord.Embed(title=\"Deleted Message\", description=\"Unknown User\", color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\",", "their avatar.\".format(after.id, after.name, after.discriminator), color=0x00aaaa, timestamp=datetime.utcnow()) embed.set_author(name=\"Avatar Changed\") embed.set_image(url=f\"attachment://{embed_image_filename}\") embed.set_footer(text=\"User ID: {}\".format(after.id)) return", "ID: {}\".format(after.id)) return embed def user_avatar_update(before: discord.User, after: discord.User, embed_image_filename: str) -> discord.Embed:", "embed.add_field(name=\"Message:\", value=msg_cont_1, inline=False) embed.add_field(name=\"Message continued:\", value=msg_cont_2, inline=False) else: embed.add_field(name=\"Message:\", value=message_content, inline=False) if author", "discord.User, embed_image_filename: str) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{} changed their avatar.\".format(after.id,", "greater split both before_msg1, before_msg2 = split_message(before_msg) after_msg1, after_msg2 = split_message(after_msg) embed.add_field(name=\"Message Before", "discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has left the server 😭.\".format(member.display_name),", "days old\" else: account_age_name = \"**New Account!**\" hours = account_age.seconds // 3600 minutes", "color=discord.Color.dark_red(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Banned\", icon_url=\"http://i.imgur.com/Imx0Znm.png\") # Need to use format other than WebP", "a one time use invite.\" \" You may be able to determine the", "discord.User) -> discord.Embed: if before.name != after.name and before.discriminator == after.discriminator: # Name", "0xa50000 embed.title = message.content guild_id = message.guild.id if message.guild else \"DM Message\" embed.set_footer(text=\"Server:", "discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has joined the server!!!\".format(member.display_name), inline=False)", "iOS. (I think this is a recent discord bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url)", "else: # embed.add_field(name=\"Need `View Audit Log` Permissions to show more information\", # value=\"\\N{zero", "value=\"**{}** was banned from the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Banned", "not in the cache.\" embed = discord.Embed(title=\"Edited Message\", description=\"<@{}> - {}#{}\".format(author_id, author_name, author_discrim),", "typing import Optional, Dict, Union from db import StoredInvite, CachedMessage import logging from", "only **{hours}** hours and **{minutes}** minutes old! \\N{WARNING SIGN}\" else: seconds = account_age.seconds", "s = '\\u205f' # Medium Mathematical Space pk_id_msg = f\"{s}\\n{s}\\nSystem ID: {s}{s}{s}**{webhook_info.system_pkid}** \\nMember", "command time outs\"\"\" embed = discord.Embed(title=\"Command Timed Out!\", description=f\"❌ {message}\", color=color) return embed", "else \"No Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed", "def member_leave(member: discord.Member) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0xf82125,", "embed.set_footer(text=\"Server: {}, Channel: {}, Sender: <@{}> - {}#{}\".format( message.author.name, message.author.discriminator, message.author.id, guild_id, message.channel.id))", "after: discord.User, embed_image_filename: str) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{} changed their", "= \"The command has timed out.\", color: discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns", "\"\" if webhook_info.member_pkid is not None or webhook_info.system_pkid is not None: s =", "permission.\", inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_leave(member: discord.Member) -> discord.Embed: embed", "if audit_log is not None: embed.add_field(name=\"Banned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False)", "after_msg: str, message_id: str, guild_id) -> discord.Embed: before_msg = before_msg if before_msg else", "on\", value=invite.created_at().strftime(\"%b %d, %Y, %I:%M:%S %p UTC\")) else: if not manage_guild: embed.add_field(name=\"Permissions Warning!\",", "canceled.\", color: discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed formatted for canceled", "more information\", # value=\"\\N{zero width space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_unban(member:", "{after.name}#{after.discriminator} changed their {changed_txt}.\", color=discord.Color.teal(), timestamp=datetime.utcnow()) embed.set_author(name=f\"{changed_txt} Changed\") if before.name != after.name: embed.add_field(name=\"Old", "a recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has left the", "before_msg else \"Message not in the cache.\" embed = discord.Embed(title=\"Edited Message\", description=\"<@{}> -", "- member.created_at if account_age.days > 0: account_age_name = \"Account Age\" account_age_value = f\"**{account_age.days}**", "onto it's own line. if \"name\" in pk_info: embed.add_field(name=\"System Name\", value=pk_info['name'], inline=True) embed.add_field(name=\"System", "(str, str): # TODO: Make better msg1 = message[:1000] msg2 = message[1000:] return", "> 1024 or len(after_msg) > 1024: # To simplify things, if one is", "'\\u205f' # Medium Mathematical Space pk_id_msg = f\"{s}\\n{s}\\nSystem ID: {s}{s}{s}**{webhook_info.system_pkid}** \\nMember ID: {s}**{webhook_info.member_pkid}**\"", "value=\"A message by <@{author_id}>, was edited in <#{channel_id}>\\n\" \"[Go To Message](https://discordapp.com/channels/{guild_id}/{channel_id}/{message_id})\".format(author_id=author_id, channel_id=channel_id, guild_id=guild_id,", "embed = discord.Embed(title=\"Edited Message\", description=\"<@{}> - {}#{}\".format(author_id, author_name, author_discrim), color=0x61cd72, timestamp=datetime.utcnow()) embed.set_thumbnail( url=\"https://i.imgur.com/Q8SzUdG.png\")", "%I:%M:%S %p UTC\")) else: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.uses)) if invite.inviter_id is not None: embed.add_field(name=\"Created By\",", "embed = discord.Embed(title=\"Command Timed Out!\", description=f\"❌ {message}\", color=color) return embed def command_canceled_embed(message: str", "-> discord.Embed: embed = discord.Embed() embed.colour = 0xa50000 embed.title = message.content guild_id =", "description_text = f\"{webhook_info.webhook_author_name}{pk_id_msg}\" info_author = f\"**{webhook_info.webhook_author_name}**\" else: log.info(\"Webhook Author is None\") description_text =", "formatted for command time outs\"\"\" embed = discord.Embed(title=\"Command Timed Out!\", description=f\"❌ {message}\", color=color)", "bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} was kicked from the server.\".format(member.display_name), inline=False)", "\\N{WARNING SIGN}\" embed.add_field(name=account_age_name, value=account_age_value, inline=True) embed.add_field(name=\"Current Member Count\", value=\"**{}** Members\".format(member.guild.member_count), inline=True) if pk_info", "is greater split both before_msg1, before_msg2 = split_message(before_msg) after_msg1, after_msg2 = split_message(after_msg) embed.add_field(name=\"Message", "cache.\" embed = discord.Embed(title=\"Edited Message\", description=\"<@{}> - {}#{}\".format(author_id, author_name, author_discrim), color=0x61cd72, timestamp=datetime.utcnow()) embed.set_thumbnail(", "str): # TODO: Make better msg1 = message[:1000] msg2 = message[1000:] return msg1,", "color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message by {}, was deleted in <#{}>\".format(info_author, channel_id),", "command_canceled_embed(message: str = \"The command was canceled.\", color: discord.Color = discord.Color.dark_orange()) -> discord.Embed:", "After Edit Continued:\", value=after_msg2, inline=False) else: embed.add_field(name=\"Message Before Edit:\", value=before_msg, inline=True) embed.add_field(name=\"Message After", "None, None, None, None, None, None, None, None, None) if cached: pk_id_msg =", "server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Banned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name,", "{s}{s}{s}**{webhook_info.system_pkid}** \\nMember ID: {s}**{webhook_info.member_pkid}**\" log.info(\"pk_id_msg set\") if author is None: log.info(\"Author is None\")", "ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was unbanned from the server.\".format(member.display_name), inline=False) if", "\"name\" in pk_info: embed.add_field(name=\"System Name\", value=pk_info['name'], inline=True) embed.add_field(name=\"System ID\", value=pk_info['id'], inline=True) # Compute", "inline=True) embed.set_footer(text=\"User ID: {}\".format(author_id)) return embed def deleted_message_embed(message_content: Optional[str], author: Optional[discord.Member], channel_id: int,", "guild_id=guild_id, message_id=message_id), inline=False) if len(before_msg) > 1024 or len(after_msg) > 1024: # To", "audit_log.reason else \"No Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) # else: # embed.add_field(name=\"Need", "Message\", description=\"<@{}> - {}#{}\".format(author_id, author_name, author_discrim), color=0x61cd72, timestamp=datetime.utcnow()) embed.set_thumbnail( url=\"https://i.imgur.com/Q8SzUdG.png\") embed.add_field(name=\"Info:\", value=\"A message", "and before.discriminator == after.discriminator: # Name changed, discriminator did not changed_txt = \"Username\"", "= \"Uncached User\" elif author.discriminator == \"0000\": description_text = f\"{author.name}{pk_id_msg}\" info_author = f\"**{author.name}**\"", "Username:\", value=after.name, inline=True) if before.discriminator != after.discriminator: embed.add_field(name=\"Old Discriminator:\", value=before.discriminator, inline=True) embed.add_field(name=\"New Discriminator:\",", "embed.add_field(name=\"Name:\", value=\"{}\".format(invite.invite_name)) if invite.invite_id is not None: embed.add_field(name=\"Code\", value=\"{}\".format(invite.invite_id)) if invite.actual_invite is not", "guild_id) -> discord.Embed: before_msg = before_msg if before_msg else \"Message not in the", "account_age_name = \"Account Age\" account_age_value = f\"**{account_age.days}** days old\" else: account_age_name = \"**New", "def exception_w_message(message: discord.Message) -> discord.Embed: embed = discord.Embed() embed.colour = 0xa50000 embed.title =", "str, guild_id) -> discord.Embed: before_msg = before_msg if before_msg else \"Message not in", "= f\"**{webhook_info.webhook_author_name}**\" else: log.info(\"Webhook Author is None\") description_text = info_author = \"Uncached User\"", "{}\".format(member.id)) return embed def member_leave(member: discord.Member) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id,", "icon_url=\"https://www.emoji.co.uk/files/mozilla-emojis/objects-mozilla/11928-outbox-tray.png\") # Need to use format other than WebP for image to display", "embed.set_image(url=f\"attachment://{embed_image_filename}\") embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def command_timed_out_embed(message: str = \"The command has", "{message}\", color=color) return embed def command_canceled_embed(message: str = \"The command was canceled.\", color:", "inline=False) embed.add_field(name=\"Message continued:\", value=msg_cont_2, inline=False) else: embed.add_field(name=\"Message:\", value=message_content, inline=False) if author is not", "Canceled**\", description=f\"❌ {message}\", color=color) return embed def exception_w_message(message: discord.Message) -> discord.Embed: embed =", "1024: # To simplify things, if one is greater split both before_msg1, before_msg2", "message.content guild_id = message.guild.id if message.guild else \"DM Message\" embed.set_footer(text=\"Server: {}, Channel: {},", "= \"Username\" elif before.name == after.name and before.discriminator != after.discriminator: # Discrim changed,", "Information**__\", inline=False) embed.add_field(name=\"Code\", value=\"Bot OAuth Link\") else: embed.add_field(name=\"__**Invite Information**__\", value=\"Unable to determine invite", "embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has joined the server!!!\".format(member.display_name), inline=False) account_age = datetime.utcnow() - member.created_at", "cache was deleted in <#{}>\".format(channel_id), inline=False) embed.add_field(name=\"Message ID:\", value=message_id, inline=False) return embed def", "recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has joined the server!!!\".format(member.display_name),", "giving Gabby Gums the **Manage Channels** permission.\", inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed", "Optional[Dict], manage_guild=True) -> discord.Embed: embed = discord.Embed(description=\"<@!{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0x00ff00, timestamp=datetime.utcnow())", "\"**New Account!**\" hours = account_age.seconds // 3600 minutes = (account_age.seconds % 3600) //", "embed.set_author(name=\"Nickname Changed\") embed.set_thumbnail(url=\"https://i.imgur.com/HtQ53lx.png\") embed.add_field(name=\"Old Nickname\", value=before.nick, inline=True) embed.add_field(name=\"New Nickname\", value=after.nick, inline=True) embed.set_footer(text=\"User ID:", "By\", value=\"<@!{}> - {}#{}\".format(invite.actual_invite.inviter.id, invite.actual_invite.inviter.name, invite.actual_invite.inviter.discriminator)) embed.add_field(name=\"Created on\", value=invite.actual_invite.created_at.strftime(\"%b %d, %Y, %I:%M:%S %p", "discord.Embed(title=\"Deleted Message\", description=description_text, color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message by {}, was deleted", "{}#{}\".format(invite.actual_invite.inviter.id, invite.actual_invite.inviter.name, invite.actual_invite.inviter.discriminator)) embed.add_field(name=\"Created on\", value=invite.actual_invite.created_at.strftime(\"%b %d, %Y, %I:%M:%S %p UTC\")) else: embed.add_field(name=\"Uses\",", "discord.Member) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0xf82125, timestamp=datetime.utcnow()) embed.set_author(name=\"Member", "= discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed formatted for command time outs\"\"\" embed", "from discord.ext import commands from datetime import datetime from typing import Optional, Dict,", "embed.add_field(name=\"Old Username:\", value=before.name, inline=True) embed.add_field(name=\"New Username:\", value=after.name, inline=True) if before.discriminator != after.discriminator: embed.add_field(name=\"Old", "pk_info: embed.add_field(name=\"System Name\", value=pk_info['name'], inline=True) embed.add_field(name=\"System ID\", value=pk_info['id'], inline=True) # Compute the account", "WebP for image to display on iOS. (I think this is a recent", "member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has joined the server!!!\".format(member.display_name), inline=False) account_age = datetime.utcnow() -", "is not None: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) if invite.invite_name is not None:", "embed.add_field(name=\"Current Member Count\", value=\"**{}** Members\".format(member.guild.member_count), inline=True) if pk_info is not None: embed.add_field(name=\"\\N{Zero Width", "color=color) return embed def command_canceled_embed(message: str = \"The command was canceled.\", color: discord.Color", "= discord.Embed(description=f\"<@{after.id}> - {after.name}#{after.discriminator} changed their {changed_txt}.\", color=discord.Color.teal(), timestamp=datetime.utcnow()) embed.set_author(name=f\"{changed_txt} Changed\") if before.name", "changed_txt = \"Username & Discriminator\" embed = discord.Embed(description=f\"<@{after.id}> - {after.name}#{after.discriminator} changed their {changed_txt}.\",", "else: embed.add_field(name=\"Message:\", value=message_content, inline=False) if author is not None: embed.set_footer(text=f\"User ID: {author.id}\") return", "manage_guild: embed.add_field(name=\"Permissions Warning!\", value=\"**Manage Server Permissions** needed for invite tracking.\") elif member.bot: embed.add_field(name=\"", "not None: log.info(\"Webhook Author is NOT None\") description_text = f\"{webhook_info.webhook_author_name}{pk_id_msg}\" info_author = f\"**{webhook_info.webhook_author_name}**\"", "None: embed.add_field(name=\"Unbanned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\" if", "Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) # else: # embed.add_field(name=\"Need `View Audit Log`", "Kit Information**__\", inline=False) # embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\N{Zero Width Space}\", inline=True) # Add", "Member Joined!!!\", icon_url=\"https://www.emoji.co.uk/files/twitter-emojis/objects-twitter/11031-inbox-tray.png\") # Need to use format other than WebP for image", "after.discriminator: # Discrim changed, Name did not changed_txt = \"Discriminator\" else: # Both", "embed.add_field(name=\"Code\", value=\"Bot OAuth Link\") else: embed.add_field(name=\"__**Invite Information**__\", value=\"Unable to determine invite information. It's", "before.discriminator == after.discriminator: # Name changed, discriminator did not changed_txt = \"Username\" elif", "needed for invite tracking.\") elif member.bot: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) embed.add_field(name=\"Code\", value=\"Bot", "is not None or webhook_info.system_pkid is not None: s = '\\u205f' # Medium", "discord bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was banned from the server.\".format(member.display_name),", "ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} was kicked from the server.\".format(member.display_name), inline=False) if", "None: embed.add_field(name=\"Created By\", value=\"<@{}>\".format(invite.inviter_id)) if invite.created_ts is not None: embed.add_field(name=\"Created on\", value=invite.created_at().strftime(\"%b %d,", "edited_message_embed(author_id, author_name: str, author_discrim, channel_id, before_msg: str, after_msg: str, message_id: str, guild_id) ->", "is not None: embed.add_field(name=\"Linked Discord Account:\", value=f\"<@{pk_system_owner.id}> - {pk_system_owner.name}#{pk_system_owner.discriminator}\", inline=False) if message_content ==", "not manage_guild: embed.add_field(name=\"Permissions Warning!\", value=\"**Manage Server Permissions** needed for invite tracking.\") elif member.bot:", "author_name: str, author_discrim, channel_id, before_msg: str, after_msg: str, message_id: str, guild_id) -> discord.Embed:", "def deleted_message_embed(message_content: Optional[str], author: Optional[discord.Member], channel_id: int, message_id: int = -1, webhook_info: Optional[CachedMessage]", "value=\"\\N{zero width space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_unban(member: discord.User, audit_log: Optional[discord.AuditLogEntry])", "discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_orange(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Kicked\", icon_url=\"https://i.imgur.com/o96t3cV.png\") # Need to", "and **{seconds}** seconds old! \\N{WARNING SIGN}\" embed.add_field(name=account_age_name, value=account_age_value, inline=True) embed.add_field(name=\"Current Member Count\", value=\"**{}**", "seconds = account_age.seconds % 60 account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account is only", "log.info(\"Webhook Author is NOT None\") description_text = f\"{webhook_info.webhook_author_name}{pk_id_msg}\" info_author = f\"**{webhook_info.webhook_author_name}**\" else: log.info(\"Webhook", "or webhook_info.system_pkid is not None: s = '\\u205f' # Medium Mathematical Space pk_id_msg", "# value=\"\\N{zero width space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_nick_update(before: discord.Member, after:", "= datetime.utcnow() - member.created_at if account_age.days > 0: account_age_name = \"Account Age\" account_age_value", "webhook_info.member_pkid is not None or webhook_info.system_pkid is not None: s = '\\u205f' #", "-> discord.Embed: \"\"\"Returns an embed formatted for command time outs\"\"\" embed = discord.Embed(title=\"Command", "# To simplify things, if one is greater split both before_msg1, before_msg2 =", "= member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has joined the server!!!\".format(member.display_name), inline=False) account_age = datetime.utcnow()", "Medium Mathematical Space pk_id_msg = f\"{s}\\n{s}\\nSystem ID: {s}{s}{s}**{webhook_info.system_pkid}** \\nMember ID: {s}**{webhook_info.member_pkid}**\" log.info(\"pk_id_msg set\")", "f\"\\N{WARNING SIGN} **Warning!** Account is only **{hours}** hours and **{minutes}** minutes old! \\N{WARNING", "embed.add_field(name=\"Info:\", value=\"A message by <@{author_id}>, was edited in <#{channel_id}>\\n\" \"[Go To Message](https://discordapp.com/channels/{guild_id}/{channel_id}/{message_id})\".format(author_id=author_id, channel_id=channel_id,", "neater if webhook_info is None: webhook_info = CachedMessage(None, None, None, None, None, None,", "message by {}, was deleted in <#{}>\".format(info_author, channel_id), inline=False) if pk_system_owner is not", "recent discord bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was unbanned from the", "import gabby_gums_dark_green, gabby_gums_light_green, gabby_gums_purple log = logging.getLogger(__name__) def split_message(message: str) -> (str, str):", "embed.add_field(name=\"Created By\", value=\"<@{}>\".format(invite.inviter_id)) if invite.created_ts is not None: embed.add_field(name=\"Created on\", value=invite.created_at().strftime(\"%b %d, %Y,", "embed.set_thumbnail(url=\"https://i.imgur.com/HtQ53lx.png\") embed.add_field(name=\"Old Nickname\", value=before.nick, inline=True) embed.add_field(name=\"New Nickname\", value=after.nick, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return", "UTC\")) else: if not manage_guild: embed.add_field(name=\"Permissions Warning!\", value=\"**Manage Server Permissions** needed for invite", "if before_msg else \"Message not in the cache.\" embed = discord.Embed(title=\"Edited Message\", description=\"<@{}>", "= str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was banned from the server.\".format(member.display_name), inline=False) if audit_log", "inline=False) reason = f\"{audit_log.reason}\" if audit_log.reason else \"No Reason was given.\" embed.add_field(name=\"Reason:\", value=reason,", "else: embed.add_field(name=\"__**Invite Information**__\", value=\"Unable to determine invite information. It's possible the invite was", "embed else: return unknown_deleted_message(channel_id, message_id) def unknown_deleted_message(channel_id, message_id) -> discord.Embed: embed = discord.Embed(title=\"Deleted", "split_message(after_msg) embed.add_field(name=\"Message Before Edit:\", value=before_msg1, inline=False) if len(before_msg2.strip()) > 0: embed.add_field(name=\"Message Before Edit", "if len(message_content) > 1024: msg_cont_1, msg_cont_2 = split_message(message_content) embed.add_field(name=\"Message:\", value=msg_cont_1, inline=False) embed.add_field(name=\"Message continued:\",", "more information\", # value=\"\\N{zero width space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_nick_update(before:", "# Discrim changed, Name did not changed_txt = \"Discriminator\" else: # Both changed", "Information**__\", inline=False) if invite.invite_name is not None: embed.add_field(name=\"Name:\", value=\"{}\".format(invite.invite_name)) if invite.invite_id is not", "embed.title = message.content guild_id = message.guild.id if message.guild else \"DM Message\" embed.set_footer(text=\"Server: {},", "discord.User, after: discord.User) -> discord.Embed: if before.name != after.name and before.discriminator == after.discriminator:", "value=\"**{}** was unbanned from the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Unbanned", "return embed def user_avatar_update(before: discord.User, after: discord.User, embed_image_filename: str) -> discord.Embed: embed =", "from the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Banned By:\", value=\"<@{}> -", "inline=False) if len(before_msg) > 1024 or len(after_msg) > 1024: # To simplify things,", "recent discord bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was banned from the", "inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_ban(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed:", "inline=True) if pk_info is not None: embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\n__**Plural Kit Information**__\", inline=False)", "If the webhook_info is none, create dummy object to make if's neater if", "reason = f\"{audit_log.reason}\" if audit_log.reason else \"No Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False)", "= None, cached: bool = True) -> discord.Embed: # If the webhook_info is", "to display on iOS. (I think this is a recent discord bug.) ios_compatible_avatar_url", "{}\".format(after.id)) return embed def command_timed_out_embed(message: str = \"The command has timed out.\", color:", "inline=False) # embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\N{Zero Width Space}\", inline=True) # Add a blank", "= message[:1000] msg2 = message[1000:] return msg1, msg2 def edited_message_embed(author_id, author_name: str, author_discrim,", "hours > 0: account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account is only **{hours}** hours", "inline=False) if pk_system_owner is not None: embed.add_field(name=\"Linked Discord Account:\", value=f\"<@{pk_system_owner.id}> - {pk_system_owner.name}#{pk_system_owner.discriminator}\", inline=False)", "pk_info: Optional[Dict], manage_guild=True) -> discord.Embed: embed = discord.Embed(description=\"<@!{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0x00ff00,", "exception_w_message(message: discord.Message) -> discord.Embed: embed = discord.Embed() embed.colour = 0xa50000 embed.title = message.content", "value=after.discriminator, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def user_avatar_update(before: discord.User, after: discord.User, embed_image_filename:", "formatted for canceled commands\"\"\" embed = discord.Embed(title=\"**Command Canceled**\", description=f\"❌ {message}\", color=color) return embed", "= discord.Embed() embed.colour = 0xa50000 embed.title = message.content guild_id = message.guild.id if message.guild", "embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\n__**Plural Kit Information**__\", inline=False) # embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\N{Zero Width", "for canceled commands\"\"\" embed = discord.Embed(title=\"**Command Canceled**\", description=f\"❌ {message}\", color=color) return embed def", "is a recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has joined", "author_discrim), color=0x61cd72, timestamp=datetime.utcnow()) embed.set_thumbnail( url=\"https://i.imgur.com/Q8SzUdG.png\") embed.add_field(name=\"Info:\", value=\"A message by <@{author_id}>, was edited in", "it's own line. if \"name\" in pk_info: embed.add_field(name=\"System Name\", value=pk_info['name'], inline=True) embed.add_field(name=\"System ID\",", "= account_age.seconds % 60 account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account is only **{minutes}**", "embed = discord.Embed(title=\"Deleted Message\", description=description_text, color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message by {},", "tracking by giving Gabby Gums the **Manage Channels** permission.\", inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id))", "discord.Message) -> discord.Embed: embed = discord.Embed() embed.colour = 0xa50000 embed.title = message.content guild_id", "datetime.utcnow() - member.created_at if account_age.days > 0: account_age_name = \"Account Age\" account_age_value =", "= discord.Embed(title=\"Deleted Message\", description=description_text, color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message by {}, was", "len(before_msg2.strip()) > 0: embed.add_field(name=\"Message Before Edit Continued:\", value=before_msg2, inline=False) embed.add_field(name=\"Message After Edit:\", value=after_msg1,", "inline=False) if audit_log is not None: embed.add_field(name=\"Kicked By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator),", "from the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Kicked By:\", value=\"<@{}> -", "member_unban(member: discord.User, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator),", "not None: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.actual_invite.uses)) embed.add_field(name=\"Created By\", value=\"<@!{}> - {}#{}\".format(invite.actual_invite.inviter.id, invite.actual_invite.inviter.name, invite.actual_invite.inviter.discriminator)) embed.add_field(name=\"Created on\",", "Name did not changed_txt = \"Discriminator\" else: # Both changed changed_txt = \"Username", "%I:%M:%S %p UTC\")) else: if not manage_guild: embed.add_field(name=\"Permissions Warning!\", value=\"**Manage Server Permissions** needed", "= datetime.strptime(pk_info['created'], '%Y-%m-%dT%H:%M:%S.%fZ') pk_account_age = datetime.utcnow() - pk_created_date embed.add_field(name=\"PK Account Age\", value=f\"**{pk_account_age.days}** days", "if message_content == \"\": # Make sure we don't end up throwing an", "embed.set_author(name=\"Member Unbanned\", icon_url=\"https://i.imgur.com/OCcebCO.png\") # Need to use format other than WebP for image", "- {author.name}#{author.discriminator}\" info_author = f\"<@{author.id}>\" embed = discord.Embed(title=\"Deleted Message\", description=description_text, color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\")", "info_author = f\"**{author.name}**\" else: description_text = f\"<@{author.id}> - {author.name}#{author.discriminator}\" info_author = f\"<@{author.id}>\" embed", "a recent discord bug.) ios_compatible_avatar_url = str(member.avatar_url_as(static_format=\"png\")) embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"**{}** was banned from", "else: description_text = f\"<@{author.id}> - {author.name}#{author.discriminator}\" info_author = f\"<@{author.id}>\" embed = discord.Embed(title=\"Deleted Message\",", "Space}‌‌‌\", value=\"\\n__**Plural Kit Information**__\", inline=False) # embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\N{Zero Width Space}\", inline=True)", "== after.discriminator: # Name changed, discriminator did not changed_txt = \"Username\" elif before.name", "embed.add_field(name=\"Message After Edit:\", value=after_msg1, inline=False) if len(after_msg2.strip()) > 0: embed.add_field(name=\"Message After Edit Continued:\",", "inline=True) if before.discriminator != after.discriminator: embed.add_field(name=\"Old Discriminator:\", value=before.discriminator, inline=True) embed.add_field(name=\"New Discriminator:\", value=after.discriminator, inline=True)", "guild_id = message.guild.id if message.guild else \"DM Message\" embed.set_footer(text=\"Server: {}, Channel: {}, Sender:", "= \"The command was canceled.\", color: discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an", "# Medium Mathematical Space pk_id_msg = f\"{s}\\n{s}\\nSystem ID: {s}{s}{s}**{webhook_info.system_pkid}** \\nMember ID: {s}**{webhook_info.member_pkid}**\" log.info(\"pk_id_msg", "discord.Embed: \"\"\"Returns an embed formatted for canceled commands\"\"\" embed = discord.Embed(title=\"**Command Canceled**\", description=f\"❌", "NO info on the author of the message. if webhook_info.webhook_author_name is not None:", "determine the inviter by using the Audit Log.\\n\" \"Additionally, you can greatly improve", "discord.Embed(title=\"**Command Canceled**\", description=f\"❌ {message}\", color=color) return embed def exception_w_message(message: discord.Message) -> discord.Embed: embed", "pk_account_age = datetime.utcnow() - pk_created_date embed.add_field(name=\"PK Account Age\", value=f\"**{pk_account_age.days}** days old\", inline=True) if", "commands from datetime import datetime from typing import Optional, Dict, Union from db", "{}, was deleted in <#{}>\".format(info_author, channel_id), inline=False) if pk_system_owner is not None: embed.add_field(name=\"Linked", "info_author = f\"**{webhook_info.webhook_author_name}**\" else: log.info(\"Webhook Author is None\") description_text = info_author = \"Uncached", "**{minutes}** minutes old! \\N{WARNING SIGN}\" else: seconds = account_age.seconds % 60 account_age_value =", "value=before.nick, inline=True) embed.add_field(name=\"New Nickname\", value=after.nick, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def user_name_update(before:", "= discord.Embed(description=\"<@!{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0x00ff00, timestamp=datetime.utcnow()) embed.set_author(name=\"New Member Joined!!!\", icon_url=\"https://www.emoji.co.uk/files/twitter-emojis/objects-twitter/11031-inbox-tray.png\") #", "member.discriminator), color=0xf82125, timestamp=datetime.utcnow()) embed.set_author(name=\"Member Left 😭\", icon_url=\"https://www.emoji.co.uk/files/mozilla-emojis/objects-mozilla/11928-outbox-tray.png\") # Need to use format other", "is None\") # We have NO info on the author of the message.", "member.name, member.discriminator), color=discord.Color.dark_orange(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Kicked\", icon_url=\"https://i.imgur.com/o96t3cV.png\") # Need to use format other", "embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_kick(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed", "elif before.name == after.name and before.discriminator != after.discriminator: # Discrim changed, Name did", "changed_txt = \"Discriminator\" else: # Both changed changed_txt = \"Username & Discriminator\" embed", "from db import StoredInvite, CachedMessage import logging from utils.moreColors import gabby_gums_dark_green, gabby_gums_light_green, gabby_gums_purple", "is not None: embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\n__**Plural Kit Information**__\", inline=False) # embed.add_field(name=\"\\N{Zero Width", "Optional[discord.Member], channel_id: int, message_id: int = -1, webhook_info: Optional[CachedMessage] = None, pk_system_owner: Optional[discord.Member]", "- {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\" if audit_log.reason else \"No Reason", "discord.Embed(title=\"Command Timed Out!\", description=f\"❌ {message}\", color=color) return embed def command_canceled_embed(message: str = \"The", "hours = account_age.seconds // 3600 minutes = (account_age.seconds % 3600) // 60 if", "= discord.Embed(title=\"Command Timed Out!\", description=f\"❌ {message}\", color=color) return embed def command_canceled_embed(message: str =", "embed.add_field(name=\"Info:\", value=\"A message not in the cache was deleted in <#{}>\".format(channel_id), inline=False) embed.add_field(name=\"Message", "len(message_content) > 1024: msg_cont_1, msg_cont_2 = split_message(message_content) embed.add_field(name=\"Message:\", value=msg_cont_1, inline=False) embed.add_field(name=\"Message continued:\", value=msg_cont_2,", "None, cached: bool = True) -> discord.Embed: # If the webhook_info is none,", "inline=False) if invite.invite_name is not None: embed.add_field(name=\"Name:\", value=\"{}\".format(invite.invite_name)) if invite.invite_id is not None:", "by <@{author_id}>, was edited in <#{channel_id}>\\n\" \"[Go To Message](https://discordapp.com/channels/{guild_id}/{channel_id}/{message_id})\".format(author_id=author_id, channel_id=channel_id, guild_id=guild_id, message_id=message_id), inline=False)", "make if's neater if webhook_info is None: webhook_info = CachedMessage(None, None, None, None,", "after.name, after.discriminator), color=0x00aaaa, timestamp=datetime.utcnow()) embed.set_author(name=\"Avatar Changed\") embed.set_image(url=f\"attachment://{embed_image_filename}\") embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def", "user_name_update(before: discord.User, after: discord.User) -> discord.Embed: if before.name != after.name and before.discriminator ==", "description=\"Unknown User\", color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message not in the cache was", "else \"No Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) # else: # embed.add_field(name=\"Need `View", "SIGN} **Warning!** Account is only **{hours}** hours and **{minutes}** minutes old! \\N{WARNING SIGN}\"", "Discord Account:\", value=f\"<@{pk_system_owner.id}> - {pk_system_owner.name}#{pk_system_owner.discriminator}\", inline=False) if message_content == \"\": # Make sure", "description=\"<@{}> - {}#{} changed their nickname.\".format(after.id, after.name, after.discriminator), color=0x00ffff, timestamp=datetime.utcnow()) embed.set_author(name=\"Nickname Changed\") embed.set_thumbnail(url=\"https://i.imgur.com/HtQ53lx.png\")", "and before.discriminator != after.discriminator: # Discrim changed, Name did not changed_txt = \"Discriminator\"", "discord.Embed(title=\"Edited Message\", description=\"<@{}> - {}#{}\".format(author_id, author_name, author_discrim), color=0x61cd72, timestamp=datetime.utcnow()) embed.set_thumbnail( url=\"https://i.imgur.com/Q8SzUdG.png\") embed.add_field(name=\"Info:\", value=\"A", "def unknown_deleted_message(channel_id, message_id) -> discord.Embed: embed = discord.Embed(title=\"Deleted Message\", description=\"Unknown User\", color=0x9b59b6, timestamp=datetime.utcnow())", "def member_kick(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name,", "if author is not None: embed.set_footer(text=f\"User ID: {author.id}\") return embed else: return unknown_deleted_message(channel_id,", "ID: {}\".format(member.id)) return embed def member_nick_update(before: discord.Member, after: discord.Member) -> discord.Embed: embed =", "ID:\", value=message_id, inline=False) return embed def member_join(member: discord.Member, invite: Optional[StoredInvite], pk_info: Optional[Dict], manage_guild=True)", "to determine the inviter by using the Audit Log.\\n\" \"Additionally, you can greatly", "60 account_age_value = f\"\\N{WARNING SIGN} **Warning!** Account is only **{minutes}** minutes and **{seconds}**", "author_name, author_discrim), color=0x61cd72, timestamp=datetime.utcnow()) embed.set_thumbnail( url=\"https://i.imgur.com/Q8SzUdG.png\") embed.add_field(name=\"Info:\", value=\"A message by <@{author_id}>, was edited", "embed.add_field(name=\"Info:\", value=\"{} was kicked from the server.\".format(member.display_name), inline=False) if audit_log is not None:", "using the Audit Log.\\n\" \"Additionally, you can greatly improve the reliability of invite", "else: return unknown_deleted_message(channel_id, message_id) def unknown_deleted_message(channel_id, message_id) -> discord.Embed: embed = discord.Embed(title=\"Deleted Message\",", "embed.add_field(name=\"Kicked By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator), inline=False) reason = f\"{audit_log.reason}\" if audit_log.reason", "{}\".format(member.id)) return embed def member_unban(member: discord.User, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}>", "description_text = info_author = \"Uncached User\" elif author.discriminator == \"0000\": description_text = f\"{author.name}{pk_id_msg}\"", "else \"DM Message\" embed.set_footer(text=\"Server: {}, Channel: {}, Sender: <@{}> - {}#{}\".format( message.author.name, message.author.discriminator,", "member_join(member: discord.Member, invite: Optional[StoredInvite], pk_info: Optional[Dict], manage_guild=True) -> discord.Embed: embed = discord.Embed(description=\"<@!{}> -", "is a recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} has left", "embed.add_field(name=\"Message Before Edit Continued:\", value=before_msg2, inline=False) embed.add_field(name=\"Message After Edit:\", value=after_msg1, inline=False) if len(after_msg2.strip())", "on\", value=invite.actual_invite.created_at.strftime(\"%b %d, %Y, %I:%M:%S %p UTC\")) else: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.uses)) if invite.inviter_id is", "discord.Embed: if before.name != after.name and before.discriminator == after.discriminator: # Name changed, discriminator", "manage_guild=True) -> discord.Embed: embed = discord.Embed(description=\"<@!{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0x00ff00, timestamp=datetime.utcnow()) embed.set_author(name=\"New", "inline=False) if audit_log is not None: embed.add_field(name=\"Banned By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name, audit_log.user.discriminator),", "an embed formatted for command time outs\"\"\" embed = discord.Embed(title=\"Command Timed Out!\", description=f\"❌", "\"Username & Discriminator\" embed = discord.Embed(description=f\"<@{after.id}> - {after.name}#{after.discriminator} changed their {changed_txt}.\", color=discord.Color.teal(), timestamp=datetime.utcnow())", "None, None, None, None, None, None, None, None) if cached: pk_id_msg = \"\"", "if pk_system_owner is not None: embed.add_field(name=\"Linked Discord Account:\", value=f\"<@{pk_system_owner.id}> - {pk_system_owner.name}#{pk_system_owner.discriminator}\", inline=False) if", "color=0x00ffff, timestamp=datetime.utcnow()) embed.set_author(name=\"Nickname Changed\") embed.set_thumbnail(url=\"https://i.imgur.com/HtQ53lx.png\") embed.add_field(name=\"Old Nickname\", value=before.nick, inline=True) embed.add_field(name=\"New Nickname\", value=after.nick, inline=True)", "show more information\", # value=\"\\N{zero width space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def", "Information**__\", value=\"Unable to determine invite information. It's possible the invite was a one", "value=\"A message by {}, was deleted in <#{}>\".format(info_author, channel_id), inline=False) if pk_system_owner is", "\"\"\" \"\"\" import discord from discord.ext import commands from datetime import datetime from", "think this is a recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{}", "None: embed.add_field(name=\"Code\", value=\"{}\".format(invite.invite_id)) if invite.actual_invite is not None: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.actual_invite.uses)) embed.add_field(name=\"Created By\", value=\"<@!{}>", "def user_name_update(before: discord.User, after: discord.User) -> discord.Embed: if before.name != after.name and before.discriminator", "changed_txt = \"Username\" elif before.name == after.name and before.discriminator != after.discriminator: # Discrim", "str) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{} changed their avatar.\".format(after.id, after.name, after.discriminator),", "object to make if's neater if webhook_info is None: webhook_info = CachedMessage(None, None,", "if not manage_guild: embed.add_field(name=\"Permissions Warning!\", value=\"**Manage Server Permissions** needed for invite tracking.\") elif", "{}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_orange(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Kicked\", icon_url=\"https://i.imgur.com/o96t3cV.png\") # Need to use format", "after: discord.User) -> discord.Embed: if before.name != after.name and before.discriminator == after.discriminator: #", "embed = discord.Embed(title=\"Deleted Message\", description=\"Unknown User\", color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A message not", "unbanned from the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Unbanned By:\", value=\"<@{}>", "image to display on iOS. (I think this is a recent discord bug.)", "changed, Name did not changed_txt = \"Discriminator\" else: # Both changed changed_txt =", "description=f\"❌ {message}\", color=color) return embed def exception_w_message(message: discord.Message) -> discord.Embed: embed = discord.Embed()", "the cache.\" embed = discord.Embed(title=\"Edited Message\", description=\"<@{}> - {}#{}\".format(author_id, author_name, author_discrim), color=0x61cd72, timestamp=datetime.utcnow())", "use invite.\" \" You may be able to determine the inviter by using", "member.name, member.discriminator), color=0xf82125, timestamp=datetime.utcnow()) embed.set_author(name=\"Member Left 😭\", icon_url=\"https://www.emoji.co.uk/files/mozilla-emojis/objects-mozilla/11928-outbox-tray.png\") # Need to use format", "-> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_red(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Banned\",", "discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed formatted for canceled commands\"\"\" embed", "embed.add_field(name=account_age_name, value=account_age_value, inline=True) embed.add_field(name=\"Current Member Count\", value=\"**{}** Members\".format(member.guild.member_count), inline=True) if pk_info is not", "It's possible the invite was a one time use invite.\" \" You may", "{}#{}\".format(member.id, member.name, member.discriminator), color=0x00ff00, timestamp=datetime.utcnow()) embed.set_author(name=\"New Member Joined!!!\", icon_url=\"https://www.emoji.co.uk/files/twitter-emojis/objects-twitter/11031-inbox-tray.png\") # Need to use", "the PK info onto it's own line. if \"name\" in pk_info: embed.add_field(name=\"System Name\",", "if invite.created_ts is not None: embed.add_field(name=\"Created on\", value=invite.created_at().strftime(\"%b %d, %Y, %I:%M:%S %p UTC\"))", "embed.add_field(name=\"New Discriminator:\", value=after.discriminator, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def user_avatar_update(before: discord.User, after:", "by giving Gabby Gums the **Manage Channels** permission.\", inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return", "embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_ban(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed", "embed to force the PK info onto it's own line. if \"name\" in", "f\"{audit_log.reason}\" if audit_log.reason else \"No Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) # else:", "use format other than WebP for image to display on iOS. (I think", "embed = discord.Embed() embed.colour = 0xa50000 embed.title = message.content guild_id = message.guild.id if", "!= after.name and before.discriminator == after.discriminator: # Name changed, discriminator did not changed_txt", "- {after.name}#{after.discriminator} changed their {changed_txt}.\", color=discord.Color.teal(), timestamp=datetime.utcnow()) embed.set_author(name=f\"{changed_txt} Changed\") if before.name != after.name:", "ID: {}\".format(author_id)) return embed def deleted_message_embed(message_content: Optional[str], author: Optional[discord.Member], channel_id: int, message_id: int", "Name\", value=pk_info['name'], inline=True) embed.add_field(name=\"System ID\", value=pk_info['id'], inline=True) # Compute the account age pk_created_date", "UTC\")) else: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.uses)) if invite.inviter_id is not None: embed.add_field(name=\"Created By\", value=\"<@{}>\".format(invite.inviter_id)) if", "value=message_id, inline=False) return embed def member_join(member: discord.Member, invite: Optional[StoredInvite], pk_info: Optional[Dict], manage_guild=True) ->", "None: webhook_info = CachedMessage(None, None, None, None, None, None, None, None, None, None)", "embed formatted for command time outs\"\"\" embed = discord.Embed(title=\"Command Timed Out!\", description=f\"❌ {message}\",", "- {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_red(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Banned\", icon_url=\"http://i.imgur.com/Imx0Znm.png\") # Need to use", "Discrim changed, Name did not changed_txt = \"Discriminator\" else: # Both changed changed_txt", "str, author_discrim, channel_id, before_msg: str, after_msg: str, message_id: str, guild_id) -> discord.Embed: before_msg", "-> discord.Embed: embed = discord.Embed(title=\"Deleted Message\", description=\"Unknown User\", color=0x9b59b6, timestamp=datetime.utcnow()) embed.set_thumbnail(url=\"http://i.imgur.com/fJpAFgN.png\") embed.add_field(name=\"Info:\", value=\"A", "Information**__\", inline=False) # embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\N{Zero Width Space}\", inline=True) # Add a", "before.name != after.name: embed.add_field(name=\"Old Username:\", value=before.name, inline=True) embed.add_field(name=\"New Username:\", value=after.name, inline=True) if before.discriminator", "= discord.Embed(title=\"**Command Canceled**\", description=f\"❌ {message}\", color=color) return embed def exception_w_message(message: discord.Message) -> discord.Embed:", "old\", inline=True) if invite is not None: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) if", "is None\") description_text = info_author = \"Uncached User\" elif author.discriminator == \"0000\": description_text", "before_msg2 = split_message(before_msg) after_msg1, after_msg2 = split_message(after_msg) embed.add_field(name=\"Message Before Edit:\", value=before_msg1, inline=False) if", "> 1024: # To simplify things, if one is greater split both before_msg1,", "discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_red(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Banned\", icon_url=\"http://i.imgur.com/Imx0Znm.png\")", "Space}‌‌‌\", value=\"\\N{Zero Width Space}\", inline=True) # Add a blank embed to force the", "embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_ban(member: discord.Member, audit_log: Optional[discord.AuditLogEntry])", "Optional[discord.Member] = None, cached: bool = True) -> discord.Embed: # If the webhook_info", "inline=True) if invite is not None: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) if invite.invite_name", "deleted_message_embed(message_content: Optional[str], author: Optional[discord.Member], channel_id: int, message_id: int = -1, webhook_info: Optional[CachedMessage] =", "sure we don't end up throwing an error due to an empty field", "a recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} was kicked from", "is only **{hours}** hours and **{minutes}** minutes old! \\N{WARNING SIGN}\" else: seconds =", "member.name, member.discriminator), color=0x00ff00, timestamp=datetime.utcnow()) embed.set_author(name=\"New Member Joined!!!\", icon_url=\"https://www.emoji.co.uk/files/twitter-emojis/objects-twitter/11031-inbox-tray.png\") # Need to use format", "-> discord.Embed: embed = discord.Embed( description=\"<@{}> - {}#{} changed their nickname.\".format(after.id, after.name, after.discriminator),", "\"The command was canceled.\", color: discord.Color = discord.Color.dark_orange()) -> discord.Embed: \"\"\"Returns an embed", "= before_msg if before_msg else \"Message not in the cache.\" embed = discord.Embed(title=\"Edited", "- {}#{} changed their avatar.\".format(after.id, after.name, after.discriminator), color=0x00aaaa, timestamp=datetime.utcnow()) embed.set_author(name=\"Avatar Changed\") embed.set_image(url=f\"attachment://{embed_image_filename}\") embed.set_footer(text=\"User", "{}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_green(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Unbanned\", icon_url=\"https://i.imgur.com/OCcebCO.png\") # Need to use format", "= f\"**{author.name}**\" else: description_text = f\"<@{author.id}> - {author.name}#{author.discriminator}\" info_author = f\"<@{author.id}>\" embed =", "inline=True) embed.add_field(name=\"Message After Edit:\", value=after_msg, inline=True) embed.set_footer(text=\"User ID: {}\".format(author_id)) return embed def deleted_message_embed(message_content:", "Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_orange(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member", "embed.add_field(name=\"Message Before Edit:\", value=before_msg1, inline=False) if len(before_msg2.strip()) > 0: embed.add_field(name=\"Message Before Edit Continued:\",", "is a recent discord bug.) ios_compatible_avatar_url = member.avatar_url_as(static_format=\"png\") embed.set_thumbnail(url=ios_compatible_avatar_url) embed.add_field(name=\"Info:\", value=\"{} was kicked", "Width Space}‌‌‌\", value=\"\\N{Zero Width Space}\", inline=True) # Add a blank embed to force", "did not changed_txt = \"Discriminator\" else: # Both changed changed_txt = \"Username &", "embed def exception_w_message(message: discord.Message) -> discord.Embed: embed = discord.Embed() embed.colour = 0xa50000 embed.title", "in the cache.\" embed = discord.Embed(title=\"Edited Message\", description=\"<@{}> - {}#{}\".format(author_id, author_name, author_discrim), color=0x61cd72,", "return embed def user_name_update(before: discord.User, after: discord.User) -> discord.Embed: if before.name != after.name", "- {pk_system_owner.name}#{pk_system_owner.discriminator}\", inline=False) if message_content == \"\": # Make sure we don't end", "value=pk_info['id'], inline=True) # Compute the account age pk_created_date = datetime.strptime(pk_info['created'], '%Y-%m-%dT%H:%M:%S.%fZ') pk_account_age =", "better msg1 = message[:1000] msg2 = message[1000:] return msg1, msg2 def edited_message_embed(author_id, author_name:", "ID: {}\".format(member.id)) return embed def member_kick(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed =", "was deleted in <#{}>\".format(info_author, channel_id), inline=False) if pk_system_owner is not None: embed.add_field(name=\"Linked Discord", "# TODO: Make better msg1 = message[:1000] msg2 = message[1000:] return msg1, msg2", "Message\" embed.set_footer(text=\"Server: {}, Channel: {}, Sender: <@{}> - {}#{}\".format( message.author.name, message.author.discriminator, message.author.id, guild_id,", "return embed def member_ban(member: discord.Member, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> -", "embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) if invite.invite_name is not None: embed.add_field(name=\"Name:\", value=\"{}\".format(invite.invite_name)) if", "throwing an error due to an empty field value. message_content = \"None\" if", "embed def member_unban(member: discord.User, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id,", "info on the author of the message. if webhook_info.webhook_author_name is not None: log.info(\"Webhook", "value=\"{}\".format(invite.invite_name)) if invite.invite_id is not None: embed.add_field(name=\"Code\", value=\"{}\".format(invite.invite_id)) if invite.actual_invite is not None:", "not None: embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\n__**Plural Kit Information**__\", inline=False) # embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\",", "embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_unban(member: discord.User, audit_log: Optional[discord.AuditLogEntry]) -> discord.Embed: embed", "= f\"\\N{WARNING SIGN} **Warning!** Account is only **{minutes}** minutes and **{seconds}** seconds old!", "webhook_info = CachedMessage(None, None, None, None, None, None, None, None, None, None) if", "server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Kicked By:\", value=\"<@{}> - {}#{}\".format(audit_log.user.id, audit_log.user.name,", "value=\"\\N{zero width space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_nick_update(before: discord.Member, after: discord.Member)", "old! \\N{WARNING SIGN}\" else: seconds = account_age.seconds % 60 account_age_value = f\"\\N{WARNING SIGN}", "return embed def command_timed_out_embed(message: str = \"The command has timed out.\", color: discord.Color", "OAuth Link\") else: embed.add_field(name=\"__**Invite Information**__\", value=\"Unable to determine invite information. It's possible the", "seconds old! \\N{WARNING SIGN}\" embed.add_field(name=account_age_name, value=account_age_value, inline=True) embed.add_field(name=\"Current Member Count\", value=\"**{}** Members\".format(member.guild.member_count), inline=True)", "webhook_info.webhook_author_name is not None: log.info(\"Webhook Author is NOT None\") description_text = f\"{webhook_info.webhook_author_name}{pk_id_msg}\" info_author", "ID\", value=pk_info['id'], inline=True) # Compute the account age pk_created_date = datetime.strptime(pk_info['created'], '%Y-%m-%dT%H:%M:%S.%fZ') pk_account_age", "-> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_green(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Unbanned\",", "{changed_txt}.\", color=discord.Color.teal(), timestamp=datetime.utcnow()) embed.set_author(name=f\"{changed_txt} Changed\") if before.name != after.name: embed.add_field(name=\"Old Username:\", value=before.name, inline=True)", "width space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_unban(member: discord.User, audit_log: Optional[discord.AuditLogEntry]) ->", "return embed def member_leave(member: discord.Member) -> discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name,", "pk_info is not None: embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\n__**Plural Kit Information**__\", inline=False) # embed.add_field(name=\"\\N{Zero", "{}#{}\".format(author_id, author_name, author_discrim), color=0x61cd72, timestamp=datetime.utcnow()) embed.set_thumbnail( url=\"https://i.imgur.com/Q8SzUdG.png\") embed.add_field(name=\"Info:\", value=\"A message by <@{author_id}>, was", "Edit Continued:\", value=after_msg2, inline=False) else: embed.add_field(name=\"Message Before Edit:\", value=before_msg, inline=True) embed.add_field(name=\"Message After Edit:\",", "the account age pk_created_date = datetime.strptime(pk_info['created'], '%Y-%m-%dT%H:%M:%S.%fZ') pk_account_age = datetime.utcnow() - pk_created_date embed.add_field(name=\"PK", "import datetime from typing import Optional, Dict, Union from db import StoredInvite, CachedMessage", "{author.id}\") return embed else: return unknown_deleted_message(channel_id, message_id) def unknown_deleted_message(channel_id, message_id) -> discord.Embed: embed", "one time use invite.\" \" You may be able to determine the inviter", "by {}, was deleted in <#{}>\".format(info_author, channel_id), inline=False) if pk_system_owner is not None:", "def split_message(message: str) -> (str, str): # TODO: Make better msg1 = message[:1000]", "\\N{WARNING SIGN}\" else: seconds = account_age.seconds % 60 account_age_value = f\"\\N{WARNING SIGN} **Warning!**", "datetime.strptime(pk_info['created'], '%Y-%m-%dT%H:%M:%S.%fZ') pk_account_age = datetime.utcnow() - pk_created_date embed.add_field(name=\"PK Account Age\", value=f\"**{pk_account_age.days}** days old\",", "pk_id_msg = f\"{s}\\n{s}\\nSystem ID: {s}{s}{s}**{webhook_info.system_pkid}** \\nMember ID: {s}**{webhook_info.member_pkid}**\" log.info(\"pk_id_msg set\") if author is", "discord.Embed(description=\"<@!{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0x00ff00, timestamp=datetime.utcnow()) embed.set_author(name=\"New Member Joined!!!\", icon_url=\"https://www.emoji.co.uk/files/twitter-emojis/objects-twitter/11031-inbox-tray.png\") # Need", "channel_id=channel_id, guild_id=guild_id, message_id=message_id), inline=False) if len(before_msg) > 1024 or len(after_msg) > 1024: #", "discord.Embed: embed = discord.Embed(description=\"<@{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=discord.Color.dark_green(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Unbanned\", icon_url=\"https://i.imgur.com/OCcebCO.png\")", "member.discriminator), color=discord.Color.dark_green(), timestamp=datetime.utcnow()) embed.set_author(name=\"Member Unbanned\", icon_url=\"https://i.imgur.com/OCcebCO.png\") # Need to use format other than", "= f\"{audit_log.reason}\" if audit_log.reason else \"No Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) #", "= f\"{audit_log.reason}\" if audit_log.reason else \"No Reason was given.\" embed.add_field(name=\"Reason:\", value=reason, inline=False) embed.set_footer(text=\"User", "> 0: embed.add_field(name=\"Message After Edit Continued:\", value=after_msg2, inline=False) else: embed.add_field(name=\"Message Before Edit:\", value=before_msg,", "Members\".format(member.guild.member_count), inline=True) if pk_info is not None: embed.add_field(name=\"\\N{Zero Width Space}‌‌‌\", value=\"\\n__**Plural Kit Information**__\",", "reliability of invite tracking by giving Gabby Gums the **Manage Channels** permission.\", inline=False)", "not None: embed.add_field(name=\"Created on\", value=invite.created_at().strftime(\"%b %d, %Y, %I:%M:%S %p UTC\")) else: if not", "account_age = datetime.utcnow() - member.created_at if account_age.days > 0: account_age_name = \"Account Age\"", "if invite.actual_invite is not None: embed.add_field(name=\"Uses\", value=\"{}\".format(invite.actual_invite.uses)) embed.add_field(name=\"Created By\", value=\"<@!{}> - {}#{}\".format(invite.actual_invite.inviter.id, invite.actual_invite.inviter.name,", "url=\"https://i.imgur.com/Q8SzUdG.png\") embed.add_field(name=\"Info:\", value=\"A message by <@{author_id}>, was edited in <#{channel_id}>\\n\" \"[Go To Message](https://discordapp.com/channels/{guild_id}/{channel_id}/{message_id})\".format(author_id=author_id,", "‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) if invite.invite_name is not None: embed.add_field(name=\"Name:\", value=\"{}\".format(invite.invite_name)) if invite.invite_id", "banned from the server.\".format(member.display_name), inline=False) if audit_log is not None: embed.add_field(name=\"Banned By:\", value=\"<@{}>", "before_msg1, before_msg2 = split_message(before_msg) after_msg1, after_msg2 = split_message(after_msg) embed.add_field(name=\"Message Before Edit:\", value=before_msg1, inline=False)", "{}#{}\".format(member.id, member.name, member.discriminator), color=0xf82125, timestamp=datetime.utcnow()) embed.set_author(name=\"Member Left 😭\", icon_url=\"https://www.emoji.co.uk/files/mozilla-emojis/objects-mozilla/11928-outbox-tray.png\") # Need to use", "discord.Embed(description=\"<@{}> - {}#{} changed their avatar.\".format(after.id, after.name, after.discriminator), color=0x00aaaa, timestamp=datetime.utcnow()) embed.set_author(name=\"Avatar Changed\") embed.set_image(url=f\"attachment://{embed_image_filename}\")", "not changed_txt = \"Username\" elif before.name == after.name and before.discriminator != after.discriminator: #", "create dummy object to make if's neater if webhook_info is None: webhook_info =", "inline=False) embed.add_field(name=\"Message After Edit:\", value=after_msg1, inline=False) if len(after_msg2.strip()) > 0: embed.add_field(name=\"Message After Edit", "Nickname\", value=after.nick, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed def user_name_update(before: discord.User, after: discord.User)", "display on iOS. (I think this is a recent discord bug.) ios_compatible_avatar_url =", "inline=False) # else: # embed.add_field(name=\"Need `View Audit Log` Permissions to show more information\",", "of the message. if webhook_info.webhook_author_name is not None: log.info(\"Webhook Author is NOT None\")", "information\", # value=\"\\N{zero width space}\") embed.set_footer(text=\"User ID: {}\".format(member.id)) return embed def member_unban(member: discord.User,", "None: embed.add_field(name=\" ‌‌‌\", value=\"\\n__**Invite Information**__\", inline=False) if invite.invite_name is not None: embed.add_field(name=\"Name:\", value=\"{}\".format(invite.invite_name))", "the Audit Log.\\n\" \"Additionally, you can greatly improve the reliability of invite tracking", "return embed def member_join(member: discord.Member, invite: Optional[StoredInvite], pk_info: Optional[Dict], manage_guild=True) -> discord.Embed: embed", "split_message(before_msg) after_msg1, after_msg2 = split_message(after_msg) embed.add_field(name=\"Message Before Edit:\", value=before_msg1, inline=False) if len(before_msg2.strip()) >", "\"Discriminator\" else: # Both changed changed_txt = \"Username & Discriminator\" embed = discord.Embed(description=f\"<@{after.id}>", "embed.add_field(name=\"Old Discriminator:\", value=before.discriminator, inline=True) embed.add_field(name=\"New Discriminator:\", value=after.discriminator, inline=True) embed.set_footer(text=\"User ID: {}\".format(after.id)) return embed", "embed = discord.Embed(description=f\"<@{after.id}> - {after.name}#{after.discriminator} changed their {changed_txt}.\", color=discord.Color.teal(), timestamp=datetime.utcnow()) embed.set_author(name=f\"{changed_txt} Changed\") if", "embed.set_author(name=f\"{changed_txt} Changed\") if before.name != after.name: embed.add_field(name=\"Old Username:\", value=before.name, inline=True) embed.add_field(name=\"New Username:\", value=after.name,", "%Y, %I:%M:%S %p UTC\")) else: if not manage_guild: embed.add_field(name=\"Permissions Warning!\", value=\"**Manage Server Permissions**", "discord.Embed: embed = discord.Embed(description=\"<@!{}> - {}#{}\".format(member.id, member.name, member.discriminator), color=0x00ff00, timestamp=datetime.utcnow()) embed.set_author(name=\"New Member Joined!!!\"," ]
[ "if \"children\" in hosts[key]: print(\"[{key}:children]\") for child in hosts[key][\"children\"]: print(child) print(\"\") if \"vars\"", "-1: print(f\"Inventory {args.inventory[0]} not found \") sys.exit(1) inventory_url = ( f\"{args.url}/api/v2/inventories/{inventory_id}\" \"/script/?hostvars=1&towervars=1&all=1\" )", "hosts = inventory.json() for key in sorted(hosts): if key == \"all\": continue if", "Ansible AWX Inventory to standard inventory\" ) parser.add_argument(\"--url\", required=True, help=\"base url of AWX/Tower\")", "if \"hosts\" in hosts[key]: print(f\"[{key}]\") for host in hosts[key][\"hosts\"]: print(host) print(\"\") if \"children\"", "parser.add_argument(\"--url\", required=True, help=\"base url of AWX/Tower\") parser.add_argument(\"-u\", \"--username\", help=\"username\") parser.add_argument(\"-p\", \"--password\", help=\"password\") parser.add_argument(\"inventory\",", "parser.add_argument(\"inventory\", nargs=1, help=\"inventory name\") args = parser.parse_args() all_inventories = requests.get( f\"{args.url}/api/v2/inventories/\", auth=(args.username, args.password)", "in AWX Usage: python ../get_inventory_from_awx.py \\ --url https://awx.domain.com \\ -u admin \\ -p", "print(f\"[{key}]\") for host in hosts[key][\"hosts\"]: print(host) print(\"\") if \"children\" in hosts[key]: print(\"[{key}:children]\") for", "https://awx.domain.com \\ -u admin \\ -p \"topsecret\" \\ \"my-ec2-dev-inventory\" \"\"\" import argparse import", "parser.add_argument(\"-u\", \"--username\", help=\"username\") parser.add_argument(\"-p\", \"--password\", help=\"password\") parser.add_argument(\"inventory\", nargs=1, help=\"inventory name\") args = parser.parse_args()", "print(\"\") if \"children\" in hosts[key]: print(\"[{key}:children]\") for child in hosts[key][\"children\"]: print(child) print(\"\") if", "if inventory[\"name\"] == args.inventory[0]: inventory_id = inventory[\"id\"] break if inventory_id == -1: print(f\"Inventory", "inventory_url = ( f\"{args.url}/api/v2/inventories/{inventory_id}\" \"/script/?hostvars=1&towervars=1&all=1\" ) inventory = requests.get(inventory_url, auth=(args.username, args.password)) hosts =", ") inventory_id = -1 for inventory in all_inventories.json()[\"results\"]: if inventory[\"name\"] == args.inventory[0]: inventory_id", "host in hosts[key][\"hosts\"]: print(host) print(\"\") if \"children\" in hosts[key]: print(\"[{key}:children]\") for child in", "import argparse import sys import requests parser = argparse.ArgumentParser( description=\"Convert Ansible AWX Inventory", "help=\"password\") parser.add_argument(\"inventory\", nargs=1, help=\"inventory name\") args = parser.parse_args() all_inventories = requests.get( f\"{args.url}/api/v2/inventories/\", auth=(args.username,", "name\") args = parser.parse_args() all_inventories = requests.get( f\"{args.url}/api/v2/inventories/\", auth=(args.username, args.password) ) inventory_id =", "in hosts[key]: print(\"[{key}:children]\") for child in hosts[key][\"children\"]: print(child) print(\"\") if \"vars\" in hosts[key]:", "from an inventory in AWX Usage: python ../get_inventory_from_awx.py \\ --url https://awx.domain.com \\ -u", "== args.inventory[0]: inventory_id = inventory[\"id\"] break if inventory_id == -1: print(f\"Inventory {args.inventory[0]} not", "python ../get_inventory_from_awx.py \\ --url https://awx.domain.com \\ -u admin \\ -p \"topsecret\" \\ \"my-ec2-dev-inventory\"", "parser = argparse.ArgumentParser( description=\"Convert Ansible AWX Inventory to standard inventory\" ) parser.add_argument(\"--url\", required=True,", "f\"{args.url}/api/v2/inventories/{inventory_id}\" \"/script/?hostvars=1&towervars=1&all=1\" ) inventory = requests.get(inventory_url, auth=(args.username, args.password)) hosts = inventory.json() for key", "for child in hosts[key][\"children\"]: print(child) print(\"\") if \"vars\" in hosts[key]: print(\"[{key}:vars]\") for var", "inventory.json() for key in sorted(hosts): if key == \"all\": continue if key ==", "= requests.get( f\"{args.url}/api/v2/inventories/\", auth=(args.username, args.password) ) inventory_id = -1 for inventory in all_inventories.json()[\"results\"]:", "= inventory[\"id\"] break if inventory_id == -1: print(f\"Inventory {args.inventory[0]} not found \") sys.exit(1)", "if key == \"_meta\": continue if \"hosts\" in hosts[key]: print(f\"[{key}]\") for host in", "= argparse.ArgumentParser( description=\"Convert Ansible AWX Inventory to standard inventory\" ) parser.add_argument(\"--url\", required=True, help=\"base", "hosts[key][\"hosts\"]: print(host) print(\"\") if \"children\" in hosts[key]: print(\"[{key}:children]\") for child in hosts[key][\"children\"]: print(child)", "\\ -u admin \\ -p \"topsecret\" \\ \"my-ec2-dev-inventory\" \"\"\" import argparse import sys", "in hosts[key]: print(f\"[{key}]\") for host in hosts[key][\"hosts\"]: print(host) print(\"\") if \"children\" in hosts[key]:", "in hosts[key][\"hosts\"]: print(host) print(\"\") if \"children\" in hosts[key]: print(\"[{key}:children]\") for child in hosts[key][\"children\"]:", "\"/script/?hostvars=1&towervars=1&all=1\" ) inventory = requests.get(inventory_url, auth=(args.username, args.password)) hosts = inventory.json() for key in", "\\ -p \"topsecret\" \\ \"my-ec2-dev-inventory\" \"\"\" import argparse import sys import requests parser", "in hosts[key][\"children\"]: print(child) print(\"\") if \"vars\" in hosts[key]: print(\"[{key}:vars]\") for var in hosts[key][\"vars\"]:", "<filename>get_inventory_from_awx.py #!/usr/bin/env python3 \"\"\" Creates a local inventory file from an inventory in", "of AWX/Tower\") parser.add_argument(\"-u\", \"--username\", help=\"username\") parser.add_argument(\"-p\", \"--password\", help=\"password\") parser.add_argument(\"inventory\", nargs=1, help=\"inventory name\") args", "args.inventory[0]: inventory_id = inventory[\"id\"] break if inventory_id == -1: print(f\"Inventory {args.inventory[0]} not found", "f\"{args.url}/api/v2/inventories/\", auth=(args.username, args.password) ) inventory_id = -1 for inventory in all_inventories.json()[\"results\"]: if inventory[\"name\"]", "parser.add_argument(\"-p\", \"--password\", help=\"password\") parser.add_argument(\"inventory\", nargs=1, help=\"inventory name\") args = parser.parse_args() all_inventories = requests.get(", "file from an inventory in AWX Usage: python ../get_inventory_from_awx.py \\ --url https://awx.domain.com \\", "print(f\"Inventory {args.inventory[0]} not found \") sys.exit(1) inventory_url = ( f\"{args.url}/api/v2/inventories/{inventory_id}\" \"/script/?hostvars=1&towervars=1&all=1\" ) inventory", "hosts[key]: print(f\"[{key}]\") for host in hosts[key][\"hosts\"]: print(host) print(\"\") if \"children\" in hosts[key]: print(\"[{key}:children]\")", "print(host) print(\"\") if \"children\" in hosts[key]: print(\"[{key}:children]\") for child in hosts[key][\"children\"]: print(child) print(\"\")", "nargs=1, help=\"inventory name\") args = parser.parse_args() all_inventories = requests.get( f\"{args.url}/api/v2/inventories/\", auth=(args.username, args.password) )", "\"\"\" import argparse import sys import requests parser = argparse.ArgumentParser( description=\"Convert Ansible AWX", "args.password) ) inventory_id = -1 for inventory in all_inventories.json()[\"results\"]: if inventory[\"name\"] == args.inventory[0]:", "sorted(hosts): if key == \"all\": continue if key == \"_meta\": continue if \"hosts\"", "\"hosts\" in hosts[key]: print(f\"[{key}]\") for host in hosts[key][\"hosts\"]: print(host) print(\"\") if \"children\" in", "hosts[key]: print(\"[{key}:children]\") for child in hosts[key][\"children\"]: print(child) print(\"\") if \"vars\" in hosts[key]: print(\"[{key}:vars]\")", "\"children\" in hosts[key]: print(\"[{key}:children]\") for child in hosts[key][\"children\"]: print(child) print(\"\") if \"vars\" in", "inventory\" ) parser.add_argument(\"--url\", required=True, help=\"base url of AWX/Tower\") parser.add_argument(\"-u\", \"--username\", help=\"username\") parser.add_argument(\"-p\", \"--password\",", "key == \"all\": continue if key == \"_meta\": continue if \"hosts\" in hosts[key]:", "hosts[key][\"children\"]: print(child) print(\"\") if \"vars\" in hosts[key]: print(\"[{key}:vars]\") for var in hosts[key][\"vars\"]: print(\"{var}={hosts[key]['vars'][var]}\")", "\"topsecret\" \\ \"my-ec2-dev-inventory\" \"\"\" import argparse import sys import requests parser = argparse.ArgumentParser(", "child in hosts[key][\"children\"]: print(child) print(\"\") if \"vars\" in hosts[key]: print(\"[{key}:vars]\") for var in", "print(\"[{key}:children]\") for child in hosts[key][\"children\"]: print(child) print(\"\") if \"vars\" in hosts[key]: print(\"[{key}:vars]\") for", "Creates a local inventory file from an inventory in AWX Usage: python ../get_inventory_from_awx.py", "AWX Usage: python ../get_inventory_from_awx.py \\ --url https://awx.domain.com \\ -u admin \\ -p \"topsecret\"", "break if inventory_id == -1: print(f\"Inventory {args.inventory[0]} not found \") sys.exit(1) inventory_url =", "if inventory_id == -1: print(f\"Inventory {args.inventory[0]} not found \") sys.exit(1) inventory_url = (", "description=\"Convert Ansible AWX Inventory to standard inventory\" ) parser.add_argument(\"--url\", required=True, help=\"base url of", "inventory in AWX Usage: python ../get_inventory_from_awx.py \\ --url https://awx.domain.com \\ -u admin \\", "not found \") sys.exit(1) inventory_url = ( f\"{args.url}/api/v2/inventories/{inventory_id}\" \"/script/?hostvars=1&towervars=1&all=1\" ) inventory = requests.get(inventory_url,", "all_inventories.json()[\"results\"]: if inventory[\"name\"] == args.inventory[0]: inventory_id = inventory[\"id\"] break if inventory_id == -1:", "AWX Inventory to standard inventory\" ) parser.add_argument(\"--url\", required=True, help=\"base url of AWX/Tower\") parser.add_argument(\"-u\",", "../get_inventory_from_awx.py \\ --url https://awx.domain.com \\ -u admin \\ -p \"topsecret\" \\ \"my-ec2-dev-inventory\" \"\"\"", "= -1 for inventory in all_inventories.json()[\"results\"]: if inventory[\"name\"] == args.inventory[0]: inventory_id = inventory[\"id\"]", "-1 for inventory in all_inventories.json()[\"results\"]: if inventory[\"name\"] == args.inventory[0]: inventory_id = inventory[\"id\"] break", "admin \\ -p \"topsecret\" \\ \"my-ec2-dev-inventory\" \"\"\" import argparse import sys import requests", "AWX/Tower\") parser.add_argument(\"-u\", \"--username\", help=\"username\") parser.add_argument(\"-p\", \"--password\", help=\"password\") parser.add_argument(\"inventory\", nargs=1, help=\"inventory name\") args =", "url of AWX/Tower\") parser.add_argument(\"-u\", \"--username\", help=\"username\") parser.add_argument(\"-p\", \"--password\", help=\"password\") parser.add_argument(\"inventory\", nargs=1, help=\"inventory name\")", "\\ --url https://awx.domain.com \\ -u admin \\ -p \"topsecret\" \\ \"my-ec2-dev-inventory\" \"\"\" import", "\") sys.exit(1) inventory_url = ( f\"{args.url}/api/v2/inventories/{inventory_id}\" \"/script/?hostvars=1&towervars=1&all=1\" ) inventory = requests.get(inventory_url, auth=(args.username, args.password))", "\\ \"my-ec2-dev-inventory\" \"\"\" import argparse import sys import requests parser = argparse.ArgumentParser( description=\"Convert", "inventory_id = inventory[\"id\"] break if inventory_id == -1: print(f\"Inventory {args.inventory[0]} not found \")", "args.password)) hosts = inventory.json() for key in sorted(hosts): if key == \"all\": continue", "= requests.get(inventory_url, auth=(args.username, args.password)) hosts = inventory.json() for key in sorted(hosts): if key", "import requests parser = argparse.ArgumentParser( description=\"Convert Ansible AWX Inventory to standard inventory\" )", "-u admin \\ -p \"topsecret\" \\ \"my-ec2-dev-inventory\" \"\"\" import argparse import sys import", "auth=(args.username, args.password) ) inventory_id = -1 for inventory in all_inventories.json()[\"results\"]: if inventory[\"name\"] ==", "python3 \"\"\" Creates a local inventory file from an inventory in AWX Usage:", "= inventory.json() for key in sorted(hosts): if key == \"all\": continue if key", "key in sorted(hosts): if key == \"all\": continue if key == \"_meta\": continue", "import sys import requests parser = argparse.ArgumentParser( description=\"Convert Ansible AWX Inventory to standard", "\"--password\", help=\"password\") parser.add_argument(\"inventory\", nargs=1, help=\"inventory name\") args = parser.parse_args() all_inventories = requests.get( f\"{args.url}/api/v2/inventories/\",", "for inventory in all_inventories.json()[\"results\"]: if inventory[\"name\"] == args.inventory[0]: inventory_id = inventory[\"id\"] break if", "--url https://awx.domain.com \\ -u admin \\ -p \"topsecret\" \\ \"my-ec2-dev-inventory\" \"\"\" import argparse", "\"--username\", help=\"username\") parser.add_argument(\"-p\", \"--password\", help=\"password\") parser.add_argument(\"inventory\", nargs=1, help=\"inventory name\") args = parser.parse_args() all_inventories", "inventory[\"id\"] break if inventory_id == -1: print(f\"Inventory {args.inventory[0]} not found \") sys.exit(1) inventory_url", "if key == \"all\": continue if key == \"_meta\": continue if \"hosts\" in", "argparse import sys import requests parser = argparse.ArgumentParser( description=\"Convert Ansible AWX Inventory to", ") inventory = requests.get(inventory_url, auth=(args.username, args.password)) hosts = inventory.json() for key in sorted(hosts):", "help=\"inventory name\") args = parser.parse_args() all_inventories = requests.get( f\"{args.url}/api/v2/inventories/\", auth=(args.username, args.password) ) inventory_id", "for host in hosts[key][\"hosts\"]: print(host) print(\"\") if \"children\" in hosts[key]: print(\"[{key}:children]\") for child", "requests parser = argparse.ArgumentParser( description=\"Convert Ansible AWX Inventory to standard inventory\" ) parser.add_argument(\"--url\",", "to standard inventory\" ) parser.add_argument(\"--url\", required=True, help=\"base url of AWX/Tower\") parser.add_argument(\"-u\", \"--username\", help=\"username\")", "sys import requests parser = argparse.ArgumentParser( description=\"Convert Ansible AWX Inventory to standard inventory\"", "requests.get( f\"{args.url}/api/v2/inventories/\", auth=(args.username, args.password) ) inventory_id = -1 for inventory in all_inventories.json()[\"results\"]: if", "inventory[\"name\"] == args.inventory[0]: inventory_id = inventory[\"id\"] break if inventory_id == -1: print(f\"Inventory {args.inventory[0]}", "== \"all\": continue if key == \"_meta\": continue if \"hosts\" in hosts[key]: print(f\"[{key}]\")", "continue if key == \"_meta\": continue if \"hosts\" in hosts[key]: print(f\"[{key}]\") for host", "help=\"base url of AWX/Tower\") parser.add_argument(\"-u\", \"--username\", help=\"username\") parser.add_argument(\"-p\", \"--password\", help=\"password\") parser.add_argument(\"inventory\", nargs=1, help=\"inventory", "sys.exit(1) inventory_url = ( f\"{args.url}/api/v2/inventories/{inventory_id}\" \"/script/?hostvars=1&towervars=1&all=1\" ) inventory = requests.get(inventory_url, auth=(args.username, args.password)) hosts", "== -1: print(f\"Inventory {args.inventory[0]} not found \") sys.exit(1) inventory_url = ( f\"{args.url}/api/v2/inventories/{inventory_id}\" \"/script/?hostvars=1&towervars=1&all=1\"", "found \") sys.exit(1) inventory_url = ( f\"{args.url}/api/v2/inventories/{inventory_id}\" \"/script/?hostvars=1&towervars=1&all=1\" ) inventory = requests.get(inventory_url, auth=(args.username,", "\"my-ec2-dev-inventory\" \"\"\" import argparse import sys import requests parser = argparse.ArgumentParser( description=\"Convert Ansible", "an inventory in AWX Usage: python ../get_inventory_from_awx.py \\ --url https://awx.domain.com \\ -u admin", "print(\"\") if \"vars\" in hosts[key]: print(\"[{key}:vars]\") for var in hosts[key][\"vars\"]: print(\"{var}={hosts[key]['vars'][var]}\") print(\"\") print(\"\")", "-p \"topsecret\" \\ \"my-ec2-dev-inventory\" \"\"\" import argparse import sys import requests parser =", "print(child) print(\"\") if \"vars\" in hosts[key]: print(\"[{key}:vars]\") for var in hosts[key][\"vars\"]: print(\"{var}={hosts[key]['vars'][var]}\") print(\"\")", "== \"_meta\": continue if \"hosts\" in hosts[key]: print(f\"[{key}]\") for host in hosts[key][\"hosts\"]: print(host)", "= parser.parse_args() all_inventories = requests.get( f\"{args.url}/api/v2/inventories/\", auth=(args.username, args.password) ) inventory_id = -1 for", "inventory file from an inventory in AWX Usage: python ../get_inventory_from_awx.py \\ --url https://awx.domain.com", "required=True, help=\"base url of AWX/Tower\") parser.add_argument(\"-u\", \"--username\", help=\"username\") parser.add_argument(\"-p\", \"--password\", help=\"password\") parser.add_argument(\"inventory\", nargs=1,", "in all_inventories.json()[\"results\"]: if inventory[\"name\"] == args.inventory[0]: inventory_id = inventory[\"id\"] break if inventory_id ==", "for key in sorted(hosts): if key == \"all\": continue if key == \"_meta\":", "all_inventories = requests.get( f\"{args.url}/api/v2/inventories/\", auth=(args.username, args.password) ) inventory_id = -1 for inventory in", "( f\"{args.url}/api/v2/inventories/{inventory_id}\" \"/script/?hostvars=1&towervars=1&all=1\" ) inventory = requests.get(inventory_url, auth=(args.username, args.password)) hosts = inventory.json() for", "requests.get(inventory_url, auth=(args.username, args.password)) hosts = inventory.json() for key in sorted(hosts): if key ==", "inventory in all_inventories.json()[\"results\"]: if inventory[\"name\"] == args.inventory[0]: inventory_id = inventory[\"id\"] break if inventory_id", "Inventory to standard inventory\" ) parser.add_argument(\"--url\", required=True, help=\"base url of AWX/Tower\") parser.add_argument(\"-u\", \"--username\",", ") parser.add_argument(\"--url\", required=True, help=\"base url of AWX/Tower\") parser.add_argument(\"-u\", \"--username\", help=\"username\") parser.add_argument(\"-p\", \"--password\", help=\"password\")", "in sorted(hosts): if key == \"all\": continue if key == \"_meta\": continue if", "{args.inventory[0]} not found \") sys.exit(1) inventory_url = ( f\"{args.url}/api/v2/inventories/{inventory_id}\" \"/script/?hostvars=1&towervars=1&all=1\" ) inventory =", "inventory_id = -1 for inventory in all_inventories.json()[\"results\"]: if inventory[\"name\"] == args.inventory[0]: inventory_id =", "inventory = requests.get(inventory_url, auth=(args.username, args.password)) hosts = inventory.json() for key in sorted(hosts): if", "key == \"_meta\": continue if \"hosts\" in hosts[key]: print(f\"[{key}]\") for host in hosts[key][\"hosts\"]:", "help=\"username\") parser.add_argument(\"-p\", \"--password\", help=\"password\") parser.add_argument(\"inventory\", nargs=1, help=\"inventory name\") args = parser.parse_args() all_inventories =", "standard inventory\" ) parser.add_argument(\"--url\", required=True, help=\"base url of AWX/Tower\") parser.add_argument(\"-u\", \"--username\", help=\"username\") parser.add_argument(\"-p\",", "args = parser.parse_args() all_inventories = requests.get( f\"{args.url}/api/v2/inventories/\", auth=(args.username, args.password) ) inventory_id = -1", "\"\"\" Creates a local inventory file from an inventory in AWX Usage: python", "argparse.ArgumentParser( description=\"Convert Ansible AWX Inventory to standard inventory\" ) parser.add_argument(\"--url\", required=True, help=\"base url", "\"_meta\": continue if \"hosts\" in hosts[key]: print(f\"[{key}]\") for host in hosts[key][\"hosts\"]: print(host) print(\"\")", "local inventory file from an inventory in AWX Usage: python ../get_inventory_from_awx.py \\ --url", "inventory_id == -1: print(f\"Inventory {args.inventory[0]} not found \") sys.exit(1) inventory_url = ( f\"{args.url}/api/v2/inventories/{inventory_id}\"", "continue if \"hosts\" in hosts[key]: print(f\"[{key}]\") for host in hosts[key][\"hosts\"]: print(host) print(\"\") if", "#!/usr/bin/env python3 \"\"\" Creates a local inventory file from an inventory in AWX", "auth=(args.username, args.password)) hosts = inventory.json() for key in sorted(hosts): if key == \"all\":", "= ( f\"{args.url}/api/v2/inventories/{inventory_id}\" \"/script/?hostvars=1&towervars=1&all=1\" ) inventory = requests.get(inventory_url, auth=(args.username, args.password)) hosts = inventory.json()", "Usage: python ../get_inventory_from_awx.py \\ --url https://awx.domain.com \\ -u admin \\ -p \"topsecret\" \\", "parser.parse_args() all_inventories = requests.get( f\"{args.url}/api/v2/inventories/\", auth=(args.username, args.password) ) inventory_id = -1 for inventory", "\"all\": continue if key == \"_meta\": continue if \"hosts\" in hosts[key]: print(f\"[{key}]\") for", "a local inventory file from an inventory in AWX Usage: python ../get_inventory_from_awx.py \\" ]
[ "model\"\"\" from oslo_utils import versionutils from oslo_versionedobjects import base from playnetmano_rm import objects", "an object is registered, this function will be called for maintaining playnetmano_rm.objects.$OBJECT as", "base from playnetmano_rm import objects VersionedObjectDictCompat = base.VersionedObjectDictCompat class Playnetmano_rmObject(base.VersionedObject): \"\"\"Base class for", "given object. \"\"\" version = versionutils.convert_version_to_tuple(cls.VERSION) if not hasattr(objects, cls.obj_name()): setattr(objects, cls.obj_name(), cls)", "Playnetmano_rmObject(base.VersionedObject): \"\"\"Base class for playnetmano_rm objects. This is the base class for all", "or instantiated via RPC. Simply defining a sub-class of this class would make", "for all objects that can be remoted or instantiated via RPC. Simply defining", "== 'metadata': obj['metadata'] = db_obj['meta_data'] else: obj[field] = db_obj[field] obj._context = context obj.obj_reset_changes()", "for field in obj.fields: if field == 'metadata': obj['metadata'] = db_obj['meta_data'] else: obj[field]", "method. \"\"\" OBJ_PROJECT_NAMESPACE = 'playnetmano_rm' VERSION = '1.0' @staticmethod def _from_db_object(context, obj, db_obj):", "from oslo_utils import versionutils from oslo_versionedobjects import base from playnetmano_rm import objects VersionedObjectDictCompat", "is the base class for all objects that can be remoted or instantiated", "class would make it remotely instantiatable. Objects should implement the \"get\" class method", "obj.obj_reset_changes() return obj class Playnetmano_rmObjectRegistry(base.VersionedObjectRegistry): def registration_hook(self, cls, index): \"\"\"Callback for object registration.", "When an object is registered, this function will be called for maintaining playnetmano_rm.objects.$OBJECT", "\"\"\"Callback for object registration. When an object is registered, this function will be", "version = versionutils.convert_version_to_tuple(cls.VERSION) if not hasattr(objects, cls.obj_name()): setattr(objects, cls.obj_name(), cls) else: curr_version =", "import base from playnetmano_rm import objects VersionedObjectDictCompat = base.VersionedObjectDictCompat class Playnetmano_rmObject(base.VersionedObject): \"\"\"Base class", "registration_hook(self, cls, index): \"\"\"Callback for object registration. When an object is registered, this", "base class for all objects that can be remoted or instantiated via RPC.", "class for all objects that can be remoted or instantiated via RPC. Simply", "of a given object. \"\"\" version = versionutils.convert_version_to_tuple(cls.VERSION) if not hasattr(objects, cls.obj_name()): setattr(objects,", "objects that can be remoted or instantiated via RPC. Simply defining a sub-class", "if field == 'metadata': obj['metadata'] = db_obj['meta_data'] else: obj[field] = db_obj[field] obj._context =", "_from_db_object(context, obj, db_obj): if db_obj is None: return None for field in obj.fields:", "objects VersionedObjectDictCompat = base.VersionedObjectDictCompat class Playnetmano_rmObject(base.VersionedObject): \"\"\"Base class for playnetmano_rm objects. This is", "make it remotely instantiatable. Objects should implement the \"get\" class method and the", "be remoted or instantiated via RPC. Simply defining a sub-class of this class", "Objects should implement the \"get\" class method and the \"save\" object method. \"\"\"", "common internal object model\"\"\" from oslo_utils import versionutils from oslo_versionedobjects import base from", "'1.0' @staticmethod def _from_db_object(context, obj, db_obj): if db_obj is None: return None for", "field in obj.fields: if field == 'metadata': obj['metadata'] = db_obj['meta_data'] else: obj[field] =", "called for maintaining playnetmano_rm.objects.$OBJECT as the highest-versioned implementation of a given object. \"\"\"", "\"\"\" OBJ_PROJECT_NAMESPACE = 'playnetmano_rm' VERSION = '1.0' @staticmethod def _from_db_object(context, obj, db_obj): if", "Simply defining a sub-class of this class would make it remotely instantiatable. Objects", "for object registration. When an object is registered, this function will be called", "return obj class Playnetmano_rmObjectRegistry(base.VersionedObjectRegistry): def registration_hook(self, cls, index): \"\"\"Callback for object registration. When", "obj class Playnetmano_rmObjectRegistry(base.VersionedObjectRegistry): def registration_hook(self, cls, index): \"\"\"Callback for object registration. When an", "implement the \"get\" class method and the \"save\" object method. \"\"\" OBJ_PROJECT_NAMESPACE =", "sub-class of this class would make it remotely instantiatable. Objects should implement the", "object method. \"\"\" OBJ_PROJECT_NAMESPACE = 'playnetmano_rm' VERSION = '1.0' @staticmethod def _from_db_object(context, obj,", "else: obj[field] = db_obj[field] obj._context = context obj.obj_reset_changes() return obj class Playnetmano_rmObjectRegistry(base.VersionedObjectRegistry): def", "'playnetmano_rm' VERSION = '1.0' @staticmethod def _from_db_object(context, obj, db_obj): if db_obj is None:", "object model\"\"\" from oslo_utils import versionutils from oslo_versionedobjects import base from playnetmano_rm import", "oslo_utils import versionutils from oslo_versionedobjects import base from playnetmano_rm import objects VersionedObjectDictCompat =", "be called for maintaining playnetmano_rm.objects.$OBJECT as the highest-versioned implementation of a given object.", "playnetmano_rm objects. This is the base class for all objects that can be", "\"\"\" version = versionutils.convert_version_to_tuple(cls.VERSION) if not hasattr(objects, cls.obj_name()): setattr(objects, cls.obj_name(), cls) else: curr_version", "instantiated via RPC. Simply defining a sub-class of this class would make it", "RPC. Simply defining a sub-class of this class would make it remotely instantiatable.", "class Playnetmano_rmObject(base.VersionedObject): \"\"\"Base class for playnetmano_rm objects. This is the base class for", "db_obj[field] obj._context = context obj.obj_reset_changes() return obj class Playnetmano_rmObjectRegistry(base.VersionedObjectRegistry): def registration_hook(self, cls, index):", "= context obj.obj_reset_changes() return obj class Playnetmano_rmObjectRegistry(base.VersionedObjectRegistry): def registration_hook(self, cls, index): \"\"\"Callback for", "registered, this function will be called for maintaining playnetmano_rm.objects.$OBJECT as the highest-versioned implementation", "import objects VersionedObjectDictCompat = base.VersionedObjectDictCompat class Playnetmano_rmObject(base.VersionedObject): \"\"\"Base class for playnetmano_rm objects. This", "it remotely instantiatable. Objects should implement the \"get\" class method and the \"save\"", "= db_obj[field] obj._context = context obj.obj_reset_changes() return obj class Playnetmano_rmObjectRegistry(base.VersionedObjectRegistry): def registration_hook(self, cls,", "this class would make it remotely instantiatable. Objects should implement the \"get\" class", "= base.VersionedObjectDictCompat class Playnetmano_rmObject(base.VersionedObject): \"\"\"Base class for playnetmano_rm objects. This is the base", "= versionutils.convert_version_to_tuple(cls.VERSION) if not hasattr(objects, cls.obj_name()): setattr(objects, cls.obj_name(), cls) else: curr_version = versionutils.convert_version_to_tuple(", "can be remoted or instantiated via RPC. Simply defining a sub-class of this", "playnetmano_rm.objects.$OBJECT as the highest-versioned implementation of a given object. \"\"\" version = versionutils.convert_version_to_tuple(cls.VERSION)", "else: curr_version = versionutils.convert_version_to_tuple( getattr(objects, cls.obj_name()).VERSION) if version >= curr_version: setattr(objects, cls.obj_name(), cls)", "for playnetmano_rm objects. This is the base class for all objects that can", "\"save\" object method. \"\"\" OBJ_PROJECT_NAMESPACE = 'playnetmano_rm' VERSION = '1.0' @staticmethod def _from_db_object(context,", "VersionedObjectDictCompat = base.VersionedObjectDictCompat class Playnetmano_rmObject(base.VersionedObject): \"\"\"Base class for playnetmano_rm objects. This is the", "should implement the \"get\" class method and the \"save\" object method. \"\"\" OBJ_PROJECT_NAMESPACE", "class method and the \"save\" object method. \"\"\" OBJ_PROJECT_NAMESPACE = 'playnetmano_rm' VERSION =", "for maintaining playnetmano_rm.objects.$OBJECT as the highest-versioned implementation of a given object. \"\"\" version", "of this class would make it remotely instantiatable. Objects should implement the \"get\"", "via RPC. Simply defining a sub-class of this class would make it remotely", "this function will be called for maintaining playnetmano_rm.objects.$OBJECT as the highest-versioned implementation of", "versionutils from oslo_versionedobjects import base from playnetmano_rm import objects VersionedObjectDictCompat = base.VersionedObjectDictCompat class", "return None for field in obj.fields: if field == 'metadata': obj['metadata'] = db_obj['meta_data']", "VERSION = '1.0' @staticmethod def _from_db_object(context, obj, db_obj): if db_obj is None: return", "and the \"save\" object method. \"\"\" OBJ_PROJECT_NAMESPACE = 'playnetmano_rm' VERSION = '1.0' @staticmethod", "db_obj is None: return None for field in obj.fields: if field == 'metadata':", "from playnetmano_rm import objects VersionedObjectDictCompat = base.VersionedObjectDictCompat class Playnetmano_rmObject(base.VersionedObject): \"\"\"Base class for playnetmano_rm", "the \"save\" object method. \"\"\" OBJ_PROJECT_NAMESPACE = 'playnetmano_rm' VERSION = '1.0' @staticmethod def", "a sub-class of this class would make it remotely instantiatable. Objects should implement", "highest-versioned implementation of a given object. \"\"\" version = versionutils.convert_version_to_tuple(cls.VERSION) if not hasattr(objects,", "context obj.obj_reset_changes() return obj class Playnetmano_rmObjectRegistry(base.VersionedObjectRegistry): def registration_hook(self, cls, index): \"\"\"Callback for object", "= 'playnetmano_rm' VERSION = '1.0' @staticmethod def _from_db_object(context, obj, db_obj): if db_obj is", "will be called for maintaining playnetmano_rm.objects.$OBJECT as the highest-versioned implementation of a given", "versionutils.convert_version_to_tuple(cls.VERSION) if not hasattr(objects, cls.obj_name()): setattr(objects, cls.obj_name(), cls) else: curr_version = versionutils.convert_version_to_tuple( getattr(objects,", "would make it remotely instantiatable. Objects should implement the \"get\" class method and", "defining a sub-class of this class would make it remotely instantiatable. Objects should", "cls.obj_name(), cls) else: curr_version = versionutils.convert_version_to_tuple( getattr(objects, cls.obj_name()).VERSION) if version >= curr_version: setattr(objects,", "is registered, this function will be called for maintaining playnetmano_rm.objects.$OBJECT as the highest-versioned", "from oslo_versionedobjects import base from playnetmano_rm import objects VersionedObjectDictCompat = base.VersionedObjectDictCompat class Playnetmano_rmObject(base.VersionedObject):", "internal object model\"\"\" from oslo_utils import versionutils from oslo_versionedobjects import base from playnetmano_rm", "the highest-versioned implementation of a given object. \"\"\" version = versionutils.convert_version_to_tuple(cls.VERSION) if not", "a given object. \"\"\" version = versionutils.convert_version_to_tuple(cls.VERSION) if not hasattr(objects, cls.obj_name()): setattr(objects, cls.obj_name(),", "@staticmethod def _from_db_object(context, obj, db_obj): if db_obj is None: return None for field", "def _from_db_object(context, obj, db_obj): if db_obj is None: return None for field in", "the base class for all objects that can be remoted or instantiated via", "not hasattr(objects, cls.obj_name()): setattr(objects, cls.obj_name(), cls) else: curr_version = versionutils.convert_version_to_tuple( getattr(objects, cls.obj_name()).VERSION) if", "cls) else: curr_version = versionutils.convert_version_to_tuple( getattr(objects, cls.obj_name()).VERSION) if version >= curr_version: setattr(objects, cls.obj_name(),", "obj, db_obj): if db_obj is None: return None for field in obj.fields: if", "obj._context = context obj.obj_reset_changes() return obj class Playnetmano_rmObjectRegistry(base.VersionedObjectRegistry): def registration_hook(self, cls, index): \"\"\"Callback", "obj[field] = db_obj[field] obj._context = context obj.obj_reset_changes() return obj class Playnetmano_rmObjectRegistry(base.VersionedObjectRegistry): def registration_hook(self,", "if not hasattr(objects, cls.obj_name()): setattr(objects, cls.obj_name(), cls) else: curr_version = versionutils.convert_version_to_tuple( getattr(objects, cls.obj_name()).VERSION)", "in obj.fields: if field == 'metadata': obj['metadata'] = db_obj['meta_data'] else: obj[field] = db_obj[field]", "hasattr(objects, cls.obj_name()): setattr(objects, cls.obj_name(), cls) else: curr_version = versionutils.convert_version_to_tuple( getattr(objects, cls.obj_name()).VERSION) if version", "None for field in obj.fields: if field == 'metadata': obj['metadata'] = db_obj['meta_data'] else:", "def registration_hook(self, cls, index): \"\"\"Callback for object registration. When an object is registered,", "method and the \"save\" object method. \"\"\" OBJ_PROJECT_NAMESPACE = 'playnetmano_rm' VERSION = '1.0'", "index): \"\"\"Callback for object registration. When an object is registered, this function will", "maintaining playnetmano_rm.objects.$OBJECT as the highest-versioned implementation of a given object. \"\"\" version =", "obj.fields: if field == 'metadata': obj['metadata'] = db_obj['meta_data'] else: obj[field] = db_obj[field] obj._context", "registration. When an object is registered, this function will be called for maintaining", "playnetmano_rm import objects VersionedObjectDictCompat = base.VersionedObjectDictCompat class Playnetmano_rmObject(base.VersionedObject): \"\"\"Base class for playnetmano_rm objects.", "\"get\" class method and the \"save\" object method. \"\"\" OBJ_PROJECT_NAMESPACE = 'playnetmano_rm' VERSION", "all objects that can be remoted or instantiated via RPC. Simply defining a", "field == 'metadata': obj['metadata'] = db_obj['meta_data'] else: obj[field] = db_obj[field] obj._context = context", "object is registered, this function will be called for maintaining playnetmano_rm.objects.$OBJECT as the", "implementation of a given object. \"\"\" version = versionutils.convert_version_to_tuple(cls.VERSION) if not hasattr(objects, cls.obj_name()):", "remoted or instantiated via RPC. Simply defining a sub-class of this class would", "db_obj): if db_obj is None: return None for field in obj.fields: if field", "obj['metadata'] = db_obj['meta_data'] else: obj[field] = db_obj[field] obj._context = context obj.obj_reset_changes() return obj", "that can be remoted or instantiated via RPC. Simply defining a sub-class of", "the \"get\" class method and the \"save\" object method. \"\"\" OBJ_PROJECT_NAMESPACE = 'playnetmano_rm'", "= db_obj['meta_data'] else: obj[field] = db_obj[field] obj._context = context obj.obj_reset_changes() return obj class", "OBJ_PROJECT_NAMESPACE = 'playnetmano_rm' VERSION = '1.0' @staticmethod def _from_db_object(context, obj, db_obj): if db_obj", "cls, index): \"\"\"Callback for object registration. When an object is registered, this function", "object. \"\"\" version = versionutils.convert_version_to_tuple(cls.VERSION) if not hasattr(objects, cls.obj_name()): setattr(objects, cls.obj_name(), cls) else:", "\"\"\"Base class for playnetmano_rm objects. This is the base class for all objects", "objects. This is the base class for all objects that can be remoted", "if db_obj is None: return None for field in obj.fields: if field ==", "db_obj['meta_data'] else: obj[field] = db_obj[field] obj._context = context obj.obj_reset_changes() return obj class Playnetmano_rmObjectRegistry(base.VersionedObjectRegistry):", "This is the base class for all objects that can be remoted or", "= '1.0' @staticmethod def _from_db_object(context, obj, db_obj): if db_obj is None: return None", "\"\"\"playnetmano_rm common internal object model\"\"\" from oslo_utils import versionutils from oslo_versionedobjects import base", "base.VersionedObjectDictCompat class Playnetmano_rmObject(base.VersionedObject): \"\"\"Base class for playnetmano_rm objects. This is the base class", "remotely instantiatable. Objects should implement the \"get\" class method and the \"save\" object", "'metadata': obj['metadata'] = db_obj['meta_data'] else: obj[field] = db_obj[field] obj._context = context obj.obj_reset_changes() return", "class Playnetmano_rmObjectRegistry(base.VersionedObjectRegistry): def registration_hook(self, cls, index): \"\"\"Callback for object registration. When an object", "class for playnetmano_rm objects. This is the base class for all objects that", "function will be called for maintaining playnetmano_rm.objects.$OBJECT as the highest-versioned implementation of a", "instantiatable. Objects should implement the \"get\" class method and the \"save\" object method.", "None: return None for field in obj.fields: if field == 'metadata': obj['metadata'] =", "is None: return None for field in obj.fields: if field == 'metadata': obj['metadata']", "object registration. When an object is registered, this function will be called for", "import versionutils from oslo_versionedobjects import base from playnetmano_rm import objects VersionedObjectDictCompat = base.VersionedObjectDictCompat", "setattr(objects, cls.obj_name(), cls) else: curr_version = versionutils.convert_version_to_tuple( getattr(objects, cls.obj_name()).VERSION) if version >= curr_version:", "Playnetmano_rmObjectRegistry(base.VersionedObjectRegistry): def registration_hook(self, cls, index): \"\"\"Callback for object registration. When an object is", "as the highest-versioned implementation of a given object. \"\"\" version = versionutils.convert_version_to_tuple(cls.VERSION) if", "cls.obj_name()): setattr(objects, cls.obj_name(), cls) else: curr_version = versionutils.convert_version_to_tuple( getattr(objects, cls.obj_name()).VERSION) if version >=", "oslo_versionedobjects import base from playnetmano_rm import objects VersionedObjectDictCompat = base.VersionedObjectDictCompat class Playnetmano_rmObject(base.VersionedObject): \"\"\"Base" ]
[ "type, usually using the faker python library. :param faker: The ``Faker`` class from", "\"\"\"Generates an example of this ``Filth`` type, usually using the faker python library.", "16 21:30:00 1988 (en_US); locale dependant '%x', # 08/16/1988 (en_US); locale dependant '%a", "\"\"\"Check to see if the found filth is valid.\"\"\" found_date = dateparser.parse(self.text) if", "of this ``Filth`` :rtype: str \"\"\" formats = [ '%c', # Tue Aug", "%d %b %Y', # Sun 19 Jan 1999 '%A %d %B %Y', #", "see if the found filth is valid.\"\"\" found_date = dateparser.parse(self.text) if found_date is", "DateOfBirthFilth(Filth): type = 'date_of_birth' min_age_years = 18 max_age_years = 100 @staticmethod def generate(faker:", "Monday 08th, January, 1973 ] return faker.date_of_birth().strftime(random.choice(formats)) def is_valid(self) -> bool: \"\"\"Check to", "1999 '%A %d %B %Y', # Sunday 19 January 1999 '%d-%m-%Y', # 15-01-1999", "dependant '%a %d %b %Y', # Sun 19 Jan 1999 '%A %d %B", "'%d-%m-%Y', # 15-01-1999 '%A %dth, %B, %Y', # Monday 08th, January, 1973 ]", "str: \"\"\"Generates an example of this ``Filth`` type, usually using the faker python", "found filth is valid.\"\"\" found_date = dateparser.parse(self.text) if found_date is None: return False", "'%a %d %b %Y', # Sun 19 Jan 1999 '%A %d %B %Y',", "Faker) -> str: \"\"\"Generates an example of this ``Filth`` type, usually using the", "``faker`` library :type faker: Faker :return: An example of this ``Filth`` :rtype: str", "return False years_since_identified_date = datetime.date.today().year - found_date.year return DateOfBirthFilth.min_age_years <= years_since_identified_date <= DateOfBirthFilth.max_age_years", "this ``Filth`` type, usually using the faker python library. :param faker: The ``Faker``", "found_date is None: return False years_since_identified_date = datetime.date.today().year - found_date.year return DateOfBirthFilth.min_age_years <=", "import Filth class DateOfBirthFilth(Filth): type = 'date_of_birth' min_age_years = 18 max_age_years = 100", "-> str: \"\"\"Generates an example of this ``Filth`` type, usually using the faker", "``Filth`` :rtype: str \"\"\" formats = [ '%c', # Tue Aug 16 21:30:00", "Sun 19 Jan 1999 '%A %d %B %Y', # Sunday 19 January 1999", ":rtype: str \"\"\" formats = [ '%c', # Tue Aug 16 21:30:00 1988", "found_date = dateparser.parse(self.text) if found_date is None: return False years_since_identified_date = datetime.date.today().year -", "08th, January, 1973 ] return faker.date_of_birth().strftime(random.choice(formats)) def is_valid(self) -> bool: \"\"\"Check to see", "= 18 max_age_years = 100 @staticmethod def generate(faker: Faker) -> str: \"\"\"Generates an", "return faker.date_of_birth().strftime(random.choice(formats)) def is_valid(self) -> bool: \"\"\"Check to see if the found filth", "-> bool: \"\"\"Check to see if the found filth is valid.\"\"\" found_date =", "import datetime import dateparser from faker import Faker from .base import Filth class", ".base import Filth class DateOfBirthFilth(Filth): type = 'date_of_birth' min_age_years = 18 max_age_years =", "# 08/16/1988 (en_US); locale dependant '%a %d %b %Y', # Sun 19 Jan", "'%x', # 08/16/1988 (en_US); locale dependant '%a %d %b %Y', # Sun 19", "18 max_age_years = 100 @staticmethod def generate(faker: Faker) -> str: \"\"\"Generates an example", "%d %B %Y', # Sunday 19 January 1999 '%d-%m-%Y', # 15-01-1999 '%A %dth,", "usually using the faker python library. :param faker: The ``Faker`` class from the", "dateparser.parse(self.text) if found_date is None: return False years_since_identified_date = datetime.date.today().year - found_date.year return", "%Y', # Sunday 19 January 1999 '%d-%m-%Y', # 15-01-1999 '%A %dth, %B, %Y',", "] return faker.date_of_birth().strftime(random.choice(formats)) def is_valid(self) -> bool: \"\"\"Check to see if the found", "'%A %dth, %B, %Y', # Monday 08th, January, 1973 ] return faker.date_of_birth().strftime(random.choice(formats)) def", "@staticmethod def generate(faker: Faker) -> str: \"\"\"Generates an example of this ``Filth`` type,", "import Faker from .base import Filth class DateOfBirthFilth(Filth): type = 'date_of_birth' min_age_years =", "%Y', # Monday 08th, January, 1973 ] return faker.date_of_birth().strftime(random.choice(formats)) def is_valid(self) -> bool:", "(en_US); locale dependant '%x', # 08/16/1988 (en_US); locale dependant '%a %d %b %Y',", "An example of this ``Filth`` :rtype: str \"\"\" formats = [ '%c', #", "locale dependant '%a %d %b %Y', # Sun 19 Jan 1999 '%A %d", "'date_of_birth' min_age_years = 18 max_age_years = 100 @staticmethod def generate(faker: Faker) -> str:", "using the faker python library. :param faker: The ``Faker`` class from the ``faker``", "1999 '%d-%m-%Y', # 15-01-1999 '%A %dth, %B, %Y', # Monday 08th, January, 1973", "1973 ] return faker.date_of_birth().strftime(random.choice(formats)) def is_valid(self) -> bool: \"\"\"Check to see if the", "class from the ``faker`` library :type faker: Faker :return: An example of this", "January, 1973 ] return faker.date_of_birth().strftime(random.choice(formats)) def is_valid(self) -> bool: \"\"\"Check to see if", "Sunday 19 January 1999 '%d-%m-%Y', # 15-01-1999 '%A %dth, %B, %Y', # Monday", "%B, %Y', # Monday 08th, January, 1973 ] return faker.date_of_birth().strftime(random.choice(formats)) def is_valid(self) ->", "%b %Y', # Sun 19 Jan 1999 '%A %d %B %Y', # Sunday", ":param faker: The ``Faker`` class from the ``faker`` library :type faker: Faker :return:", "= [ '%c', # Tue Aug 16 21:30:00 1988 (en_US); locale dependant '%x',", "%Y', # Sun 19 Jan 1999 '%A %d %B %Y', # Sunday 19", "from faker import Faker from .base import Filth class DateOfBirthFilth(Filth): type = 'date_of_birth'", ":return: An example of this ``Filth`` :rtype: str \"\"\" formats = [ '%c',", "dependant '%x', # 08/16/1988 (en_US); locale dependant '%a %d %b %Y', # Sun", "an example of this ``Filth`` type, usually using the faker python library. :param", "January 1999 '%d-%m-%Y', # 15-01-1999 '%A %dth, %B, %Y', # Monday 08th, January,", "'%A %d %B %Y', # Sunday 19 January 1999 '%d-%m-%Y', # 15-01-1999 '%A", "faker import Faker from .base import Filth class DateOfBirthFilth(Filth): type = 'date_of_birth' min_age_years", "example of this ``Filth`` type, usually using the faker python library. :param faker:", "19 Jan 1999 '%A %d %B %Y', # Sunday 19 January 1999 '%d-%m-%Y',", "Faker from .base import Filth class DateOfBirthFilth(Filth): type = 'date_of_birth' min_age_years = 18", "(en_US); locale dependant '%a %d %b %Y', # Sun 19 Jan 1999 '%A", "# Monday 08th, January, 1973 ] return faker.date_of_birth().strftime(random.choice(formats)) def is_valid(self) -> bool: \"\"\"Check", "import random import datetime import dateparser from faker import Faker from .base import", "\"\"\" formats = [ '%c', # Tue Aug 16 21:30:00 1988 (en_US); locale", "= 'date_of_birth' min_age_years = 18 max_age_years = 100 @staticmethod def generate(faker: Faker) ->", "21:30:00 1988 (en_US); locale dependant '%x', # 08/16/1988 (en_US); locale dependant '%a %d", "str \"\"\" formats = [ '%c', # Tue Aug 16 21:30:00 1988 (en_US);", "faker python library. :param faker: The ``Faker`` class from the ``faker`` library :type", "formats = [ '%c', # Tue Aug 16 21:30:00 1988 (en_US); locale dependant", "# 15-01-1999 '%A %dth, %B, %Y', # Monday 08th, January, 1973 ] return", "the faker python library. :param faker: The ``Faker`` class from the ``faker`` library", "# Tue Aug 16 21:30:00 1988 (en_US); locale dependant '%x', # 08/16/1988 (en_US);", "'%c', # Tue Aug 16 21:30:00 1988 (en_US); locale dependant '%x', # 08/16/1988", "filth is valid.\"\"\" found_date = dateparser.parse(self.text) if found_date is None: return False years_since_identified_date", "valid.\"\"\" found_date = dateparser.parse(self.text) if found_date is None: return False years_since_identified_date = datetime.date.today().year", "random import datetime import dateparser from faker import Faker from .base import Filth", "def generate(faker: Faker) -> str: \"\"\"Generates an example of this ``Filth`` type, usually", "bool: \"\"\"Check to see if the found filth is valid.\"\"\" found_date = dateparser.parse(self.text)", "from .base import Filth class DateOfBirthFilth(Filth): type = 'date_of_birth' min_age_years = 18 max_age_years", "if found_date is None: return False years_since_identified_date = datetime.date.today().year - found_date.year return DateOfBirthFilth.min_age_years", "python library. :param faker: The ``Faker`` class from the ``faker`` library :type faker:", "the ``faker`` library :type faker: Faker :return: An example of this ``Filth`` :rtype:", "class DateOfBirthFilth(Filth): type = 'date_of_birth' min_age_years = 18 max_age_years = 100 @staticmethod def", "None: return False years_since_identified_date = datetime.date.today().year - found_date.year return DateOfBirthFilth.min_age_years <= years_since_identified_date <=", "faker: The ``Faker`` class from the ``faker`` library :type faker: Faker :return: An", "generate(faker: Faker) -> str: \"\"\"Generates an example of this ``Filth`` type, usually using", "this ``Filth`` :rtype: str \"\"\" formats = [ '%c', # Tue Aug 16", "min_age_years = 18 max_age_years = 100 @staticmethod def generate(faker: Faker) -> str: \"\"\"Generates", "``Filth`` type, usually using the faker python library. :param faker: The ``Faker`` class", "is None: return False years_since_identified_date = datetime.date.today().year - found_date.year return DateOfBirthFilth.min_age_years <= years_since_identified_date", "1988 (en_US); locale dependant '%x', # 08/16/1988 (en_US); locale dependant '%a %d %b", "= dateparser.parse(self.text) if found_date is None: return False years_since_identified_date = datetime.date.today().year - found_date.year", "is_valid(self) -> bool: \"\"\"Check to see if the found filth is valid.\"\"\" found_date", "[ '%c', # Tue Aug 16 21:30:00 1988 (en_US); locale dependant '%x', #", "Jan 1999 '%A %d %B %Y', # Sunday 19 January 1999 '%d-%m-%Y', #", "19 January 1999 '%d-%m-%Y', # 15-01-1999 '%A %dth, %B, %Y', # Monday 08th,", "library. :param faker: The ``Faker`` class from the ``faker`` library :type faker: Faker", "to see if the found filth is valid.\"\"\" found_date = dateparser.parse(self.text) if found_date", "``Faker`` class from the ``faker`` library :type faker: Faker :return: An example of", "The ``Faker`` class from the ``faker`` library :type faker: Faker :return: An example", "def is_valid(self) -> bool: \"\"\"Check to see if the found filth is valid.\"\"\"", "max_age_years = 100 @staticmethod def generate(faker: Faker) -> str: \"\"\"Generates an example of", "locale dependant '%x', # 08/16/1988 (en_US); locale dependant '%a %d %b %Y', #", ":type faker: Faker :return: An example of this ``Filth`` :rtype: str \"\"\" formats", "# Sun 19 Jan 1999 '%A %d %B %Y', # Sunday 19 January", "dateparser from faker import Faker from .base import Filth class DateOfBirthFilth(Filth): type =", "of this ``Filth`` type, usually using the faker python library. :param faker: The", "the found filth is valid.\"\"\" found_date = dateparser.parse(self.text) if found_date is None: return", "Filth class DateOfBirthFilth(Filth): type = 'date_of_birth' min_age_years = 18 max_age_years = 100 @staticmethod", "from the ``faker`` library :type faker: Faker :return: An example of this ``Filth``", "%B %Y', # Sunday 19 January 1999 '%d-%m-%Y', # 15-01-1999 '%A %dth, %B,", "faker: Faker :return: An example of this ``Filth`` :rtype: str \"\"\" formats =", "08/16/1988 (en_US); locale dependant '%a %d %b %Y', # Sun 19 Jan 1999", "library :type faker: Faker :return: An example of this ``Filth`` :rtype: str \"\"\"", "datetime import dateparser from faker import Faker from .base import Filth class DateOfBirthFilth(Filth):", "100 @staticmethod def generate(faker: Faker) -> str: \"\"\"Generates an example of this ``Filth``", "%dth, %B, %Y', # Monday 08th, January, 1973 ] return faker.date_of_birth().strftime(random.choice(formats)) def is_valid(self)", "Tue Aug 16 21:30:00 1988 (en_US); locale dependant '%x', # 08/16/1988 (en_US); locale", "is valid.\"\"\" found_date = dateparser.parse(self.text) if found_date is None: return False years_since_identified_date =", "type = 'date_of_birth' min_age_years = 18 max_age_years = 100 @staticmethod def generate(faker: Faker)", "= 100 @staticmethod def generate(faker: Faker) -> str: \"\"\"Generates an example of this", "Faker :return: An example of this ``Filth`` :rtype: str \"\"\" formats = [", "# Sunday 19 January 1999 '%d-%m-%Y', # 15-01-1999 '%A %dth, %B, %Y', #", "Aug 16 21:30:00 1988 (en_US); locale dependant '%x', # 08/16/1988 (en_US); locale dependant", "15-01-1999 '%A %dth, %B, %Y', # Monday 08th, January, 1973 ] return faker.date_of_birth().strftime(random.choice(formats))", "faker.date_of_birth().strftime(random.choice(formats)) def is_valid(self) -> bool: \"\"\"Check to see if the found filth is", "example of this ``Filth`` :rtype: str \"\"\" formats = [ '%c', # Tue", "if the found filth is valid.\"\"\" found_date = dateparser.parse(self.text) if found_date is None:", "import dateparser from faker import Faker from .base import Filth class DateOfBirthFilth(Filth): type" ]
[]
[ "self._skip, ) async def __anext__(self): deque = self._deque if not deque: future =", "name): self._client = client self._name = name self._collections = collections = {} self._collections_proxy", "(colleciones) sistemas de comunicacion (cursores esperando datos) ''' # batch_size = 100 def", "= bson_encode(query) self._projection = projection self._encoded_projection = projection and bson_encode(projection) or b'' self._skip", "+= reply.number_returned self._cursor_id = cursor_id = reply.cursor_id if cursor_id: self._future = self._connection.OP_GET_MORE( self._cstr_collection,", "## process item item = self._process_item(item) return item def _process_item(self, item): return item", "def bson_decode_multi(raw): doc = _bson_decode_all(raw, odict) return isinstance(doc, list) and doc or [doc]", "= False self._exhaust = False async def __aiter__(self): ''' returns a cursor. '''", "self._cstr_collection = query._collection._cstr_name self._encoded_query = query._encoded_query self._encoded_projection = query._encoded_projection self._batch_length = 25 self._skip", "connection = self._client.connection() self._future = future = connection.OP_QUERY( self._cstr_collection, self._encoded_query, self._encoded_projection, min(self._limit or", "return _bson_decode(raw, odict) def bson_encode_multi(docs): return b''.join( _bson_encode(doc) for doc in docs )", "future except: ## make reconection and request new query raise ## try items", "self._limit = query._limit self._connection = connection = self._client.connection() self._future = future = connection.OP_QUERY(", "self: self._client) name = property(lambda self: self._name) collections = property(lambda self: self._collections_proxy) def", "def connection(self): #next_connection_idx = self._next_connection_idx #connection = self._connection_pool[next_connection_idx] # la conexion debe estar", ") def bson_decode_multi(raw): doc = _bson_decode_all(raw, odict) return isinstance(doc, list) and doc or", "{} self._databases_proxy = MappingProxy(databases) self._is_connected = False self._cursors = set() self._server_version = None", "= True def _connection_lost(self, connection): print('connection lost', connection) self._connection_pool.remove(connection) if not self._is_connected: return", "query._encoded_projection self._batch_length = 25 self._skip = query._skip self._limit = query._limit self._connection = connection", "25 self._skip = query._skip self._limit = query._limit self._connection = connection = self._client.connection() self._future", "esta query ''' return asyncio.Future() # getitem -> future # setitem -> future(error)", "= MappingProxy(databases) self._is_connected = False self._cursors = set() self._server_version = None @property def", "sesiones (colleciones) sistemas de comunicacion (cursores esperando datos) ''' # batch_size = 100", "self._client = client self._name = name self._collections = collections = {} self._collections_proxy =", "Query(self, query, projection, 0, 1) def __getitem__(self, id): return Query(self, {'_id': id}, _empty_doc,", "is None: database = Database(self, name) self._databases[name] = database return database class Database:", "for doc in docs ) def bson_decode_multi(raw): doc = _bson_decode_all(raw, odict) return isinstance(doc,", "doc or [doc] class Client: def __init__(self, loop=None, host=None, port=None, connections=8): ''' '''", "Collection(self, name) self._collections[name] = collection return collection # # def __getattr__(self, command): #", "if database is None: database = Database(self, name) self._databases[name] = database return database", "= name self._cstr_name = b'.'.join((database._name.encode(), name.encode())) def find(self, query, projection=_empty_doc): return Query(self, query,", "cmd(**parameters): # odict((command, 1) + parameters.items()) # return cmd class Collection: ''' contempla:", "await asyncio.gather( *(c.connect() for c in self._connection_pool), loop = self._loop ) for disconnection_future", "print('connection lost', connection) self._connection_pool.remove(connection) if not self._is_connected: return reconnection = self._loop.create_task(connection.connect()) @reconnection.add_done_callback def", "and bson_encode(projection) or b'' self._skip = skip self._limit = limit self._tailable = False", "_bson_decode = bson.BSON.decode _bson_decode_all = bson.decode_all #def bson_encode(doc): # return _bson_encode(doc) def bson_decode(raw):", "''' ''' self._loop = loop = loop or asyncio.get_event_loop() self._host = host =", "isinstance(doc, list) and doc or [doc] class Client: def __init__(self, loop=None, host=None, port=None,", "MappingProxy(odict()) bson_encode = bson.BSON.encode _bson_decode = bson.BSON.decode _bson_decode_all = bson.decode_all #def bson_encode(doc): #", "in range(connections) ] self._databases = databases = {} self._databases_proxy = MappingProxy(databases) self._is_connected =", "min(self._limit or 0xFFFFFFFF, self._batch_length), cursor_id ) else: self._future = None ## stop the", "collections = property(lambda self: self._collections_proxy) def collection(self, name): collection = self._collections.get(name, None) if", "self._databases[name] = database return database class Database: def __init__(self, client, name): self._client =", "for c in self._connection_pool), loop = self._loop ) for disconnection_future in disconnection_futures: disconnection_future.add_done_callback(self._connection_lost)", "= client self._name = name self._collections = collections = {} self._collections_proxy = MappingProxy(collections)", "asyncio.gather( *(c.connect() for c in self._connection_pool), loop = self._loop ) for disconnection_future in", "cursor_id ) else: self._future = None ## stop the cursor here item =", "False self._exhaust = False async def __aiter__(self): ''' returns a cursor. ''' return", "deque.extend(items) if self._limit: self._limit -= reply.number_returned self._skip += reply.number_returned self._cursor_id = cursor_id =", "projection) def find_one(self, query, projection=_empty_doc): return Query(self, query, projection, 0, 1) def __getitem__(self,", "or 0xFFFFFFFF, self._batch_length), self._skip, ) async def __anext__(self): deque = self._deque if not", "1) def __getitem__(self, id): return Query(self, {'_id': id}, _empty_doc, 0, 1) class Query:", "id}, _empty_doc, 0, 1) class Query: ''' Representa una consulta, cachea la codificacion", "skip self._limit = limit self._tailable = False self._exhaust = False async def __aiter__(self):", "collections, asyncio, bson, random from .connection import Connection MappingProxy = type(type.__dict__) class AttributeKeyError(AttributeError,", "= False async def __aiter__(self): ''' returns a cursor. ''' return Cursor(self) def", "return asyncio.Future() # getitem -> future # setitem -> future(error) # slice ->", "collection, query, projection=_empty_doc, skip=0, limit=None): self._client = collection._client self._collection = collection self._query =", "= database return database class Database: def __init__(self, client, name): self._client = client", "self._batch_length), cursor_id ) else: self._future = None ## stop the cursor here item", "def bson_decode(raw): return _bson_decode(raw, odict) def bson_encode_multi(docs): return b''.join( _bson_encode(doc) for doc in", "getitem -> future # setitem -> future(error) # slice -> cursor class Cursor:", "__init__(self, query): self._client = query._client self._query = query self._deque = collections.deque() self._cursor_id =", "= self._loop.create_task(connection.connect()) @reconnection.add_done_callback def reconnection_made(disconnection_future): print('reconnection made', connection) self._connection_pool.add(connection) disconnection_future.add_done_callback(self._connection_lost) async def disconnect():", "database class Database: def __init__(self, client, name): self._client = client self._name = name", "self._skip = query._skip self._limit = query._limit self._connection = connection = self._client.connection() self._future =", "la conexion debe estar activa, #self._next_connection_idx = (next_connection_idx + 1) % len(self._connection_pool) #return", "# return _bson_encode(doc) def bson_decode(raw): return _bson_decode(raw, odict) def bson_encode_multi(docs): return b''.join( _bson_encode(doc)", "= connection = self._client.connection() self._future = future = connection.OP_QUERY( self._cstr_collection, self._encoded_query, self._encoded_projection, min(self._limit", "0, 1) class Query: ''' Representa una consulta, cachea la codificacion para futuros", "host or connection.default_host self._port = port = port or connection.default_port self._next_connection = 0", "self._name) collections = property(lambda self: self._collections_proxy) def collection(self, name): collection = self._collections.get(name, None)", "made', connection) self._connection_pool.add(connection) disconnection_future.add_done_callback(self._connection_lost) async def disconnect(): self._is_connected = False raise NotImplementedError def", "connection(self): #next_connection_idx = self._next_connection_idx #connection = self._connection_pool[next_connection_idx] # la conexion debe estar activa,", "= query self._encoded_query = bson_encode(query) self._projection = projection self._encoded_projection = projection and bson_encode(projection)", "random from .connection import Connection MappingProxy = type(type.__dict__) class AttributeKeyError(AttributeError, KeyError): pass class", "''' def __init__(self, collection, query, projection=_empty_doc, skip=0, limit=None): self._client = collection._client self._collection =", "self._batch_length = 25 self._skip = query._skip self._limit = query._limit self._connection = connection =", "# # def __getattr__(self, command): # async def cmd(**parameters): # odict((command, 1) +", "loop = self._loop ) for disconnection_future in disconnection_futures: disconnection_future.add_done_callback(self._connection_lost) self._is_connected = True def", "self._connection_pool), loop = self._loop ) for disconnection_future in disconnection_futures: disconnection_future.add_done_callback(self._connection_lost) self._is_connected = True", "query._client self._query = query self._deque = collections.deque() self._cursor_id = None self._cstr_collection = query._collection._cstr_name", "self._encoded_query = query._encoded_query self._encoded_projection = query._encoded_projection self._batch_length = 25 self._skip = query._skip self._limit", "self._port = port = port or connection.default_port self._next_connection = 0 self._connection_pool = [", "self._future = None ## stop the cursor here item = deque.popleft() ## process", ".connection import Connection MappingProxy = type(type.__dict__) class AttributeKeyError(AttributeError, KeyError): pass class odict(collections.OrderedDict): __getattr__", "def reconnection_made(disconnection_future): print('reconnection made', connection) self._connection_pool.add(connection) disconnection_future.add_done_callback(self._connection_lost) async def disconnect(): self._is_connected = False", "conexion debe estar activa, #self._next_connection_idx = (next_connection_idx + 1) % len(self._connection_pool) #return connection", "None @property def databases(self): return self._databases_proxy async def connect(self): disconnection_futures = await asyncio.gather(", "connect(self): disconnection_futures = await asyncio.gather( *(c.connect() for c in self._connection_pool), loop = self._loop", "_bson_encode(doc) for doc in docs ) def bson_decode_multi(raw): doc = _bson_decode_all(raw, odict) return", "c in self._connection_pool), loop = self._loop ) for disconnection_future in disconnection_futures: disconnection_future.add_done_callback(self._connection_lost) self._is_connected", "(next_connection_idx + 1) % len(self._connection_pool) #return connection return random.sample(self._connection_pool, 1)[0] def database(self, name):", "= 0 self._connection_pool = [ Connection(loop, host, port) for n in range(connections) ]", "self._cstr_collection, self._encoded_query, self._encoded_projection, min(self._limit or 0xFFFFFFFF, self._batch_length), self._skip, ) async def __anext__(self): deque", "self: self._name) collections = property(lambda self: self._collections_proxy) def collection(self, name): collection = self._collections.get(name,", "self._collection = collection self._query = query self._encoded_query = bson_encode(query) self._projection = projection self._encoded_projection", "future # setitem -> future(error) # slice -> cursor class Cursor: ''' '''", "collections.OrderedDict.__setitem__ __delattr__ = collections.OrderedDict.__delitem__ def __missing__(self, key): raise AttributeKeyError(key) _empty_doc = MappingProxy(odict()) bson_encode", "future que se resolverá con el numero de elementos que alcanza esta query", "def databases(self): return self._databases_proxy async def connect(self): disconnection_futures = await asyncio.gather( *(c.connect() for", "= future = connection.OP_QUERY( self._cstr_collection, self._encoded_query, self._encoded_projection, min(self._limit or 0xFFFFFFFF, self._batch_length), self._skip, )", "= _bson_decode_all(raw, odict) return isinstance(doc, list) and doc or [doc] class Client: def", "self._encoded_projection = projection and bson_encode(projection) or b'' self._skip = skip self._limit = limit", "collection return collection # # def __getattr__(self, command): # async def cmd(**parameters): #", "collections.OrderedDict.__delitem__ def __missing__(self, key): raise AttributeKeyError(key) _empty_doc = MappingProxy(odict()) bson_encode = bson.BSON.encode _bson_decode", "self._projection = projection self._encoded_projection = projection and bson_encode(projection) or b'' self._skip = skip", "_bson_decode_all(raw, odict) return isinstance(doc, list) and doc or [doc] class Client: def __init__(self,", "property(lambda self: self._client) name = property(lambda self: self._name) collections = property(lambda self: self._collections_proxy)", "(protocol) classes ''' #cybson import collections, asyncio, bson, random from .connection import Connection", "connection.default_port self._next_connection = 0 self._connection_pool = [ Connection(loop, host, port) for n in", "query._encoded_query self._encoded_projection = query._encoded_projection self._batch_length = 25 self._skip = query._skip self._limit = query._limit", "query._skip self._limit = query._limit self._connection = connection = self._client.connection() self._future = future =", "self._batch_length), self._skip, ) async def __anext__(self): deque = self._deque if not deque: future", "host, port) for n in range(connections) ] self._databases = databases = {} self._databases_proxy", "not deque: future = self._future if future is None: raise StopAsyncIteration try: reply", "future(error) # slice -> cursor class Cursor: ''' ''' def __init__(self, query): self._client", "= reply.cursor_id if cursor_id: self._future = self._connection.OP_GET_MORE( self._cstr_collection, min(self._limit or 0xFFFFFFFF, self._batch_length), cursor_id", "query, projection=_empty_doc): return Query(self, query, projection, 0, 1) def __getitem__(self, id): return Query(self,", "class Collection: ''' contempla: sesiones (colleciones) sistemas de comunicacion (cursores esperando datos) '''", "self._next_connection = 0 self._connection_pool = [ Connection(loop, host, port) for n in range(connections)", "= Collection(self, name) self._collections[name] = collection return collection # # def __getattr__(self, command):", "= collection._client self._collection = collection self._query = query self._encoded_query = bson_encode(query) self._projection =", "def bson_encode_multi(docs): return b''.join( _bson_encode(doc) for doc in docs ) def bson_decode_multi(raw): doc", "return self._databases_proxy async def connect(self): disconnection_futures = await asyncio.gather( *(c.connect() for c in", "= property(lambda self: self._collections_proxy) def collection(self, name): collection = self._collections.get(name, None) if collection", "self._future = self._connection.OP_GET_MORE( self._cstr_collection, min(self._limit or 0xFFFFFFFF, self._batch_length), cursor_id ) else: self._future =", "_bson_decode(raw, odict) def bson_encode_multi(docs): return b''.join( _bson_encode(doc) for doc in docs ) def", "''' # batch_size = 100 def __init__(self, database, name): self._client = database._client self._database", "try items = bson_decode_multi(reply.bson_payload) ## raise BSON DECODE ERROR deque.extend(items) if self._limit: self._limit", "return database class Database: def __init__(self, client, name): self._client = client self._name =", "(cursores esperando datos) ''' # batch_size = 100 def __init__(self, database, name): self._client", "None: database = Database(self, name) self._databases[name] = database return database class Database: def", "None: collection = Collection(self, name) self._collections[name] = collection return collection # # def", "future = connection.OP_QUERY( self._cstr_collection, self._encoded_query, self._encoded_projection, min(self._limit or 0xFFFFFFFF, self._batch_length), self._skip, ) async", "elementos que alcanza esta query ''' return asyncio.Future() # getitem -> future #", "0xFFFFFFFF, self._batch_length), cursor_id ) else: self._future = None ## stop the cursor here", "KeyError): pass class odict(collections.OrderedDict): __getattr__ = collections.OrderedDict.__getitem__ __setattr__ = collections.OrderedDict.__setitem__ __delattr__ = collections.OrderedDict.__delitem__", "__anext__(self): deque = self._deque if not deque: future = self._future if future is", "connection.default_host self._port = port = port or connection.default_port self._next_connection = 0 self._connection_pool =", "1) class Query: ''' Representa una consulta, cachea la codificacion para futuros usos.", "= await future except: ## make reconection and request new query raise ##", "collection = Collection(self, name) self._collections[name] = collection return collection # # def __getattr__(self,", "DECODE ERROR deque.extend(items) if self._limit: self._limit -= reply.number_returned self._skip += reply.number_returned self._cursor_id =", "property(lambda self: self._name) collections = property(lambda self: self._collections_proxy) def collection(self, name): collection =", "''' returna un future que se resolverá con el numero de elementos que", "consulta, cachea la codificacion para futuros usos. ''' def __init__(self, collection, query, projection=_empty_doc,", "= (next_connection_idx + 1) % len(self._connection_pool) #return connection return random.sample(self._connection_pool, 1)[0] def database(self,", "= collections.OrderedDict.__getitem__ __setattr__ = collections.OrderedDict.__setitem__ __delattr__ = collections.OrderedDict.__delitem__ def __missing__(self, key): raise AttributeKeyError(key)", "## raise BSON DECODE ERROR deque.extend(items) if self._limit: self._limit -= reply.number_returned self._skip +=", "if collection is None: collection = Collection(self, name) self._collections[name] = collection return collection", "port = port or connection.default_port self._next_connection = 0 self._connection_pool = [ Connection(loop, host,", "self._query = query self._deque = collections.deque() self._cursor_id = None self._cstr_collection = query._collection._cstr_name self._encoded_query", "self._client = query._client self._query = query self._deque = collections.deque() self._cursor_id = None self._cstr_collection", "collections.deque() self._cursor_id = None self._cstr_collection = query._collection._cstr_name self._encoded_query = query._encoded_query self._encoded_projection = query._encoded_projection", "raise ## try items = bson_decode_multi(reply.bson_payload) ## raise BSON DECODE ERROR deque.extend(items) if", "self._collections = collections = {} self._collections_proxy = MappingProxy(collections) self._cmd = Collection(self, '$cmd') client", "debe estar activa, #self._next_connection_idx = (next_connection_idx + 1) % len(self._connection_pool) #return connection return", "self._deque = collections.deque() self._cursor_id = None self._cstr_collection = query._collection._cstr_name self._encoded_query = query._encoded_query self._encoded_projection", "self._connection_pool.remove(connection) if not self._is_connected: return reconnection = self._loop.create_task(connection.connect()) @reconnection.add_done_callback def reconnection_made(disconnection_future): print('reconnection made',", "disconnection_future.add_done_callback(self._connection_lost) async def disconnect(): self._is_connected = False raise NotImplementedError def connection(self): #next_connection_idx =", "= 100 def __init__(self, database, name): self._client = database._client self._database = database #", "= collection self._query = query self._encoded_query = bson_encode(query) self._projection = projection self._encoded_projection =", "for n in range(connections) ] self._databases = databases = {} self._databases_proxy = MappingProxy(databases)", "None) if database is None: database = Database(self, name) self._databases[name] = database return", "def find(self, query, projection=_empty_doc): return Query(self, query, projection) def find_one(self, query, projection=_empty_doc): return", "def __init__(self, collection, query, projection=_empty_doc, skip=0, limit=None): self._client = collection._client self._collection = collection", "reconection and request new query raise ## try items = bson_decode_multi(reply.bson_payload) ## raise", "self._collections[name] = collection return collection # # def __getattr__(self, command): # async def", "list) and doc or [doc] class Client: def __init__(self, loop=None, host=None, port=None, connections=8):", "para futuros usos. ''' def __init__(self, collection, query, projection=_empty_doc, skip=0, limit=None): self._client =", "1)[0] def database(self, name): database = self._databases.get(name, None) if database is None: database", "find(self, query, projection=_empty_doc): return Query(self, query, projection) def find_one(self, query, projection=_empty_doc): return Query(self,", "self._limit = limit self._tailable = False self._exhaust = False async def __aiter__(self): '''", "= Collection(self, '$cmd') client = property(lambda self: self._client) name = property(lambda self: self._name)", "def __anext__(self): deque = self._deque if not deque: future = self._future if future", "# def __getattr__(self, command): # async def cmd(**parameters): # odict((command, 1) + parameters.items())", "reply.cursor_id if cursor_id: self._future = self._connection.OP_GET_MORE( self._cstr_collection, min(self._limit or 0xFFFFFFFF, self._batch_length), cursor_id )", "n in range(connections) ] self._databases = databases = {} self._databases_proxy = MappingProxy(databases) self._is_connected", "parameters.items()) # return cmd class Collection: ''' contempla: sesiones (colleciones) sistemas de comunicacion", "new query raise ## try items = bson_decode_multi(reply.bson_payload) ## raise BSON DECODE ERROR", "reply.number_returned self._skip += reply.number_returned self._cursor_id = cursor_id = reply.cursor_id if cursor_id: self._future =", "= set() self._server_version = None @property def databases(self): return self._databases_proxy async def connect(self):", "self._name = name self._cstr_name = b'.'.join((database._name.encode(), name.encode())) def find(self, query, projection=_empty_doc): return Query(self,", "@property def databases(self): return self._databases_proxy async def connect(self): disconnection_futures = await asyncio.gather( *(c.connect()", "loop=None, host=None, port=None, connections=8): ''' ''' self._loop = loop = loop or asyncio.get_event_loop()", "loop or asyncio.get_event_loop() self._host = host = host or connection.default_host self._port = port", "= bson.BSON.encode _bson_decode = bson.BSON.decode _bson_decode_all = bson.decode_all #def bson_encode(doc): # return _bson_encode(doc)", "async def connect(self): disconnection_futures = await asyncio.gather( *(c.connect() for c in self._connection_pool), loop", "return _bson_encode(doc) def bson_decode(raw): return _bson_decode(raw, odict) def bson_encode_multi(docs): return b''.join( _bson_encode(doc) for", "bson_encode(query) self._projection = projection self._encoded_projection = projection and bson_encode(projection) or b'' self._skip =", ") async def __anext__(self): deque = self._deque if not deque: future = self._future", "= False self._cursors = set() self._server_version = None @property def databases(self): return self._databases_proxy", "deque.popleft() ## process item item = self._process_item(item) return item def _process_item(self, item): return", "database = Database(self, name) self._databases[name] = database return database class Database: def __init__(self,", "odict((command, 1) + parameters.items()) # return cmd class Collection: ''' contempla: sesiones (colleciones)", "async def cmd(**parameters): # odict((command, 1) + parameters.items()) # return cmd class Collection:", "= self._collections.get(name, None) if collection is None: collection = Collection(self, name) self._collections[name] =", "= MappingProxy(odict()) bson_encode = bson.BSON.encode _bson_decode = bson.BSON.decode _bson_decode_all = bson.decode_all #def bson_encode(doc):", "cachea la codificacion para futuros usos. ''' def __init__(self, collection, query, projection=_empty_doc, skip=0,", "{'_id': id}, _empty_doc, 0, 1) class Query: ''' Representa una consulta, cachea la", "query._limit self._connection = connection = self._client.connection() self._future = future = connection.OP_QUERY( self._cstr_collection, self._encoded_query,", "bson_decode_multi(raw): doc = _bson_decode_all(raw, odict) return isinstance(doc, list) and doc or [doc] class", "asyncio.get_event_loop() self._host = host = host or connection.default_host self._port = port = port", "el numero de elementos que alcanza esta query ''' return asyncio.Future() # getitem", "docs ) def bson_decode_multi(raw): doc = _bson_decode_all(raw, odict) return isinstance(doc, list) and doc", "__setattr__ = collections.OrderedDict.__setitem__ __delattr__ = collections.OrderedDict.__delitem__ def __missing__(self, key): raise AttributeKeyError(key) _empty_doc =", "the cursor here item = deque.popleft() ## process item item = self._process_item(item) return", "= query._skip self._limit = query._limit self._connection = connection = self._client.connection() self._future = future", "self._database = database # proxy(database) self._name = name self._cstr_name = b'.'.join((database._name.encode(), name.encode())) def", "self._databases_proxy = MappingProxy(databases) self._is_connected = False self._cursors = set() self._server_version = None @property", "query self._deque = collections.deque() self._cursor_id = None self._cstr_collection = query._collection._cstr_name self._encoded_query = query._encoded_query", "range(connections) ] self._databases = databases = {} self._databases_proxy = MappingProxy(databases) self._is_connected = False", "class odict(collections.OrderedDict): __getattr__ = collections.OrderedDict.__getitem__ __setattr__ = collections.OrderedDict.__setitem__ __delattr__ = collections.OrderedDict.__delitem__ def __missing__(self,", "con el numero de elementos que alcanza esta query ''' return asyncio.Future() #", "returna un future que se resolverá con el numero de elementos que alcanza", "_empty_doc, 0, 1) class Query: ''' Representa una consulta, cachea la codificacion para", "projection=_empty_doc, skip=0, limit=None): self._client = collection._client self._collection = collection self._query = query self._encoded_query", "def __missing__(self, key): raise AttributeKeyError(key) _empty_doc = MappingProxy(odict()) bson_encode = bson.BSON.encode _bson_decode =", "self._limit -= reply.number_returned self._skip += reply.number_returned self._cursor_id = cursor_id = reply.cursor_id if cursor_id:", "or asyncio.get_event_loop() self._host = host = host or connection.default_host self._port = port =", "port=None, connections=8): ''' ''' self._loop = loop = loop or asyncio.get_event_loop() self._host =", "self._is_connected = True def _connection_lost(self, connection): print('connection lost', connection) self._connection_pool.remove(connection) if not self._is_connected:", "self._is_connected: return reconnection = self._loop.create_task(connection.connect()) @reconnection.add_done_callback def reconnection_made(disconnection_future): print('reconnection made', connection) self._connection_pool.add(connection) disconnection_future.add_done_callback(self._connection_lost)", "async def __aiter__(self): ''' returns a cursor. ''' return Cursor(self) def __len__(self): '''", "0xFFFFFFFF, self._batch_length), self._skip, ) async def __anext__(self): deque = self._deque if not deque:", "self._name = name self._collections = collections = {} self._collections_proxy = MappingProxy(collections) self._cmd =", "from .connection import Connection MappingProxy = type(type.__dict__) class AttributeKeyError(AttributeError, KeyError): pass class odict(collections.OrderedDict):", "def __init__(self, loop=None, host=None, port=None, connections=8): ''' ''' self._loop = loop = loop", "return reconnection = self._loop.create_task(connection.connect()) @reconnection.add_done_callback def reconnection_made(disconnection_future): print('reconnection made', connection) self._connection_pool.add(connection) disconnection_future.add_done_callback(self._connection_lost) async", "raise NotImplementedError def connection(self): #next_connection_idx = self._next_connection_idx #connection = self._connection_pool[next_connection_idx] # la conexion", "Database(self, name) self._databases[name] = database return database class Database: def __init__(self, client, name):", "property(lambda self: self._collections_proxy) def collection(self, name): collection = self._collections.get(name, None) if collection is", "query, projection, 0, 1) def __getitem__(self, id): return Query(self, {'_id': id}, _empty_doc, 0,", "slice -> cursor class Cursor: ''' ''' def __init__(self, query): self._client = query._client", "Collection(self, '$cmd') client = property(lambda self: self._client) name = property(lambda self: self._name) collections", "reconnection = self._loop.create_task(connection.connect()) @reconnection.add_done_callback def reconnection_made(disconnection_future): print('reconnection made', connection) self._connection_pool.add(connection) disconnection_future.add_done_callback(self._connection_lost) async def", "b''.join( _bson_encode(doc) for doc in docs ) def bson_decode_multi(raw): doc = _bson_decode_all(raw, odict)", "= {} self._databases_proxy = MappingProxy(databases) self._is_connected = False self._cursors = set() self._server_version =", "def __getitem__(self, id): return Query(self, {'_id': id}, _empty_doc, 0, 1) class Query: '''", "bson_encode(projection) or b'' self._skip = skip self._limit = limit self._tailable = False self._exhaust", "self._collections_proxy) def collection(self, name): collection = self._collections.get(name, None) if collection is None: collection", "def collection(self, name): collection = self._collections.get(name, None) if collection is None: collection =", "query self._encoded_query = bson_encode(query) self._projection = projection self._encoded_projection = projection and bson_encode(projection) or", "host = host or connection.default_host self._port = port = port or connection.default_port self._next_connection", "self._encoded_projection, min(self._limit or 0xFFFFFFFF, self._batch_length), self._skip, ) async def __anext__(self): deque = self._deque", "or 0xFFFFFFFF, self._batch_length), cursor_id ) else: self._future = None ## stop the cursor", "type(type.__dict__) class AttributeKeyError(AttributeError, KeyError): pass class odict(collections.OrderedDict): __getattr__ = collections.OrderedDict.__getitem__ __setattr__ = collections.OrderedDict.__setitem__", "name self._cstr_name = b'.'.join((database._name.encode(), name.encode())) def find(self, query, projection=_empty_doc): return Query(self, query, projection)", "= {} self._collections_proxy = MappingProxy(collections) self._cmd = Collection(self, '$cmd') client = property(lambda self:", "future is None: raise StopAsyncIteration try: reply = await future except: ## make", "return Query(self, {'_id': id}, _empty_doc, 0, 1) class Query: ''' Representa una consulta,", "name) self._databases[name] = database return database class Database: def __init__(self, client, name): self._client", "disconnection_future.add_done_callback(self._connection_lost) self._is_connected = True def _connection_lost(self, connection): print('connection lost', connection) self._connection_pool.remove(connection) if not", "doc in docs ) def bson_decode_multi(raw): doc = _bson_decode_all(raw, odict) return isinstance(doc, list)", "self._databases_proxy async def connect(self): disconnection_futures = await asyncio.gather( *(c.connect() for c in self._connection_pool),", "# batch_size = 100 def __init__(self, database, name): self._client = database._client self._database =", "or b'' self._skip = skip self._limit = limit self._tailable = False self._exhaust =", "LowLevel (protocol) classes ''' #cybson import collections, asyncio, bson, random from .connection import", "self._databases.get(name, None) if database is None: database = Database(self, name) self._databases[name] = database", "= bson.BSON.decode _bson_decode_all = bson.decode_all #def bson_encode(doc): # return _bson_encode(doc) def bson_decode(raw): return", "self._client) name = property(lambda self: self._name) collections = property(lambda self: self._collections_proxy) def collection(self,", "= collections.deque() self._cursor_id = None self._cstr_collection = query._collection._cstr_name self._encoded_query = query._encoded_query self._encoded_projection =", "databases(self): return self._databases_proxy async def connect(self): disconnection_futures = await asyncio.gather( *(c.connect() for c", "estar activa, #self._next_connection_idx = (next_connection_idx + 1) % len(self._connection_pool) #return connection return random.sample(self._connection_pool,", "self._deque if not deque: future = self._future if future is None: raise StopAsyncIteration", "comunicacion (cursores esperando datos) ''' # batch_size = 100 def __init__(self, database, name):", "def __len__(self): ''' returna un future que se resolverá con el numero de", "usos. ''' def __init__(self, collection, query, projection=_empty_doc, skip=0, limit=None): self._client = collection._client self._collection", "self._cstr_collection, min(self._limit or 0xFFFFFFFF, self._batch_length), cursor_id ) else: self._future = None ## stop", "if self._limit: self._limit -= reply.number_returned self._skip += reply.number_returned self._cursor_id = cursor_id = reply.cursor_id", "self._collections_proxy = MappingProxy(collections) self._cmd = Collection(self, '$cmd') client = property(lambda self: self._client) name", "= query._collection._cstr_name self._encoded_query = query._encoded_query self._encoded_projection = query._encoded_projection self._batch_length = 25 self._skip =", "__init__(self, database, name): self._client = database._client self._database = database # proxy(database) self._name =", "self._server_version = None @property def databases(self): return self._databases_proxy async def connect(self): disconnection_futures =", "name self._collections = collections = {} self._collections_proxy = MappingProxy(collections) self._cmd = Collection(self, '$cmd')", "def cmd(**parameters): # odict((command, 1) + parameters.items()) # return cmd class Collection: '''", "return Cursor(self) def __len__(self): ''' returna un future que se resolverá con el", "= None @property def databases(self): return self._databases_proxy async def connect(self): disconnection_futures = await", "self._connection_pool[next_connection_idx] # la conexion debe estar activa, #self._next_connection_idx = (next_connection_idx + 1) %", "cursor_id: self._future = self._connection.OP_GET_MORE( self._cstr_collection, min(self._limit or 0xFFFFFFFF, self._batch_length), cursor_id ) else: self._future", "Query(self, query, projection) def find_one(self, query, projection=_empty_doc): return Query(self, query, projection, 0, 1)", "connection) self._connection_pool.remove(connection) if not self._is_connected: return reconnection = self._loop.create_task(connection.connect()) @reconnection.add_done_callback def reconnection_made(disconnection_future): print('reconnection", "# la conexion debe estar activa, #self._next_connection_idx = (next_connection_idx + 1) % len(self._connection_pool)", "resolverá con el numero de elementos que alcanza esta query ''' return asyncio.Future()", "una consulta, cachea la codificacion para futuros usos. ''' def __init__(self, collection, query,", "__getitem__(self, id): return Query(self, {'_id': id}, _empty_doc, 0, 1) class Query: ''' Representa", "-> LowLevel (protocol) classes ''' #cybson import collections, asyncio, bson, random from .connection", "## try items = bson_decode_multi(reply.bson_payload) ## raise BSON DECODE ERROR deque.extend(items) if self._limit:", "__delattr__ = collections.OrderedDict.__delitem__ def __missing__(self, key): raise AttributeKeyError(key) _empty_doc = MappingProxy(odict()) bson_encode =", "cursor_id = reply.cursor_id if cursor_id: self._future = self._connection.OP_GET_MORE( self._cstr_collection, min(self._limit or 0xFFFFFFFF, self._batch_length),", "is None: raise StopAsyncIteration try: reply = await future except: ## make reconection", "bson_encode_multi(docs): return b''.join( _bson_encode(doc) for doc in docs ) def bson_decode_multi(raw): doc =", "future = self._future if future is None: raise StopAsyncIteration try: reply = await", "def __init__(self, query): self._client = query._client self._query = query self._deque = collections.deque() self._cursor_id", "database(self, name): database = self._databases.get(name, None) if database is None: database = Database(self,", "__aiter__(self): ''' returns a cursor. ''' return Cursor(self) def __len__(self): ''' returna un", "= query._encoded_projection self._batch_length = 25 self._skip = query._skip self._limit = query._limit self._connection =", "asyncio, bson, random from .connection import Connection MappingProxy = type(type.__dict__) class AttributeKeyError(AttributeError, KeyError):", "skip=0, limit=None): self._client = collection._client self._collection = collection self._query = query self._encoded_query =", "self._client = database._client self._database = database # proxy(database) self._name = name self._cstr_name =", "or connection.default_host self._port = port = port or connection.default_port self._next_connection = 0 self._connection_pool", "= query._encoded_query self._encoded_projection = query._encoded_projection self._batch_length = 25 self._skip = query._skip self._limit =", "''' contempla: sesiones (colleciones) sistemas de comunicacion (cursores esperando datos) ''' # batch_size", "client self._name = name self._collections = collections = {} self._collections_proxy = MappingProxy(collections) self._cmd", "collection self._query = query self._encoded_query = bson_encode(query) self._projection = projection self._encoded_projection = projection", "+ 1) % len(self._connection_pool) #return connection return random.sample(self._connection_pool, 1)[0] def database(self, name): database", "## make reconection and request new query raise ## try items = bson_decode_multi(reply.bson_payload)", "= None self._cstr_collection = query._collection._cstr_name self._encoded_query = query._encoded_query self._encoded_projection = query._encoded_projection self._batch_length =", "is None: collection = Collection(self, name) self._collections[name] = collection return collection # #", "raise AttributeKeyError(key) _empty_doc = MappingProxy(odict()) bson_encode = bson.BSON.encode _bson_decode = bson.BSON.decode _bson_decode_all =", "self._future if future is None: raise StopAsyncIteration try: reply = await future except:", "= type(type.__dict__) class AttributeKeyError(AttributeError, KeyError): pass class odict(collections.OrderedDict): __getattr__ = collections.OrderedDict.__getitem__ __setattr__ =", "[ Connection(loop, host, port) for n in range(connections) ] self._databases = databases =", "-> cursor class Cursor: ''' ''' def __init__(self, query): self._client = query._client self._query", ") else: self._future = None ## stop the cursor here item = deque.popleft()", "= self._future if future is None: raise StopAsyncIteration try: reply = await future", "except: ## make reconection and request new query raise ## try items =", "que se resolverá con el numero de elementos que alcanza esta query '''", "de comunicacion (cursores esperando datos) ''' # batch_size = 100 def __init__(self, database,", "id): return Query(self, {'_id': id}, _empty_doc, 0, 1) class Query: ''' Representa una", "= collections.OrderedDict.__delitem__ def __missing__(self, key): raise AttributeKeyError(key) _empty_doc = MappingProxy(odict()) bson_encode = bson.BSON.encode", "query, projection=_empty_doc, skip=0, limit=None): self._client = collection._client self._collection = collection self._query = query", "self._is_connected = False raise NotImplementedError def connection(self): #next_connection_idx = self._next_connection_idx #connection = self._connection_pool[next_connection_idx]", "activa, #self._next_connection_idx = (next_connection_idx + 1) % len(self._connection_pool) #return connection return random.sample(self._connection_pool, 1)[0]", "def __getattr__(self, command): # async def cmd(**parameters): # odict((command, 1) + parameters.items()) #", "= query self._deque = collections.deque() self._cursor_id = None self._cstr_collection = query._collection._cstr_name self._encoded_query =", "self._query = query self._encoded_query = bson_encode(query) self._projection = projection self._encoded_projection = projection and", "se resolverá con el numero de elementos que alcanza esta query ''' return", "100 def __init__(self, database, name): self._client = database._client self._database = database # proxy(database)", "self._encoded_query, self._encoded_projection, min(self._limit or 0xFFFFFFFF, self._batch_length), self._skip, ) async def __anext__(self): deque =", "limit=None): self._client = collection._client self._collection = collection self._query = query self._encoded_query = bson_encode(query)", "print('reconnection made', connection) self._connection_pool.add(connection) disconnection_future.add_done_callback(self._connection_lost) async def disconnect(): self._is_connected = False raise NotImplementedError", "@reconnection.add_done_callback def reconnection_made(disconnection_future): print('reconnection made', connection) self._connection_pool.add(connection) disconnection_future.add_done_callback(self._connection_lost) async def disconnect(): self._is_connected =", "= self._connection_pool[next_connection_idx] # la conexion debe estar activa, #self._next_connection_idx = (next_connection_idx + 1)", "self._client.connection() self._future = future = connection.OP_QUERY( self._cstr_collection, self._encoded_query, self._encoded_projection, min(self._limit or 0xFFFFFFFF, self._batch_length),", "projection=_empty_doc): return Query(self, query, projection, 0, 1) def __getitem__(self, id): return Query(self, {'_id':", "Database: def __init__(self, client, name): self._client = client self._name = name self._collections =", "1) + parameters.items()) # return cmd class Collection: ''' contempla: sesiones (colleciones) sistemas", "make reconection and request new query raise ## try items = bson_decode_multi(reply.bson_payload) ##", "if not deque: future = self._future if future is None: raise StopAsyncIteration try:", "None) if collection is None: collection = Collection(self, name) self._collections[name] = collection return", "__missing__(self, key): raise AttributeKeyError(key) _empty_doc = MappingProxy(odict()) bson_encode = bson.BSON.encode _bson_decode = bson.BSON.decode", "database, name): self._client = database._client self._database = database # proxy(database) self._name = name", "''' return Cursor(self) def __len__(self): ''' returna un future que se resolverá con", "databases = {} self._databases_proxy = MappingProxy(databases) self._is_connected = False self._cursors = set() self._server_version", "raise StopAsyncIteration try: reply = await future except: ## make reconection and request", "for disconnection_future in disconnection_futures: disconnection_future.add_done_callback(self._connection_lost) self._is_connected = True def _connection_lost(self, connection): print('connection lost',", "Representa una consulta, cachea la codificacion para futuros usos. ''' def __init__(self, collection,", "odict) def bson_encode_multi(docs): return b''.join( _bson_encode(doc) for doc in docs ) def bson_decode_multi(raw):", "__init__(self, loop=None, host=None, port=None, connections=8): ''' ''' self._loop = loop = loop or", "'$cmd') client = property(lambda self: self._client) name = property(lambda self: self._name) collections =", "self._limit: self._limit -= reply.number_returned self._skip += reply.number_returned self._cursor_id = cursor_id = reply.cursor_id if", "None ## stop the cursor here item = deque.popleft() ## process item item", "client, name): self._client = client self._name = name self._collections = collections = {}", "= collections.OrderedDict.__setitem__ __delattr__ = collections.OrderedDict.__delitem__ def __missing__(self, key): raise AttributeKeyError(key) _empty_doc = MappingProxy(odict())", "= False raise NotImplementedError def connection(self): #next_connection_idx = self._next_connection_idx #connection = self._connection_pool[next_connection_idx] #", "''' def __init__(self, query): self._client = query._client self._query = query self._deque = collections.deque()", "self._is_connected = False self._cursors = set() self._server_version = None @property def databases(self): return", "key): raise AttributeKeyError(key) _empty_doc = MappingProxy(odict()) bson_encode = bson.BSON.encode _bson_decode = bson.BSON.decode _bson_decode_all", "projection, 0, 1) def __getitem__(self, id): return Query(self, {'_id': id}, _empty_doc, 0, 1)", "import collections, asyncio, bson, random from .connection import Connection MappingProxy = type(type.__dict__) class", "random.sample(self._connection_pool, 1)[0] def database(self, name): database = self._databases.get(name, None) if database is None:", "collection is None: collection = Collection(self, name) self._collections[name] = collection return collection #", "= query._client self._query = query self._deque = collections.deque() self._cursor_id = None self._cstr_collection =", "client = property(lambda self: self._client) name = property(lambda self: self._name) collections = property(lambda", "self._cmd = Collection(self, '$cmd') client = property(lambda self: self._client) name = property(lambda self:", "raise BSON DECODE ERROR deque.extend(items) if self._limit: self._limit -= reply.number_returned self._skip += reply.number_returned", "__init__(self, collection, query, projection=_empty_doc, skip=0, limit=None): self._client = collection._client self._collection = collection self._query", "port or connection.default_port self._next_connection = 0 self._connection_pool = [ Connection(loop, host, port) for", "= [ Connection(loop, host, port) for n in range(connections) ] self._databases = databases", "Collection: ''' contempla: sesiones (colleciones) sistemas de comunicacion (cursores esperando datos) ''' #", "self._exhaust = False async def __aiter__(self): ''' returns a cursor. ''' return Cursor(self)", "bson_encode = bson.BSON.encode _bson_decode = bson.BSON.decode _bson_decode_all = bson.decode_all #def bson_encode(doc): # return", "def __aiter__(self): ''' returns a cursor. ''' return Cursor(self) def __len__(self): ''' returna", "True def _connection_lost(self, connection): print('connection lost', connection) self._connection_pool.remove(connection) if not self._is_connected: return reconnection", "HighLevel -> LowLevel (protocol) classes ''' #cybson import collections, asyncio, bson, random from", "database is None: database = Database(self, name) self._databases[name] = database return database class", "else: self._future = None ## stop the cursor here item = deque.popleft() ##", "def __init__(self, database, name): self._client = database._client self._database = database # proxy(database) self._name", "bson_encode(doc): # return _bson_encode(doc) def bson_decode(raw): return _bson_decode(raw, odict) def bson_encode_multi(docs): return b''.join(", "= await asyncio.gather( *(c.connect() for c in self._connection_pool), loop = self._loop ) for", "deque = self._deque if not deque: future = self._future if future is None:", "disconnection_futures = await asyncio.gather( *(c.connect() for c in self._connection_pool), loop = self._loop )", "disconnect(): self._is_connected = False raise NotImplementedError def connection(self): #next_connection_idx = self._next_connection_idx #connection =", "''' #cybson import collections, asyncio, bson, random from .connection import Connection MappingProxy =", "cmd class Collection: ''' contempla: sesiones (colleciones) sistemas de comunicacion (cursores esperando datos)", "-= reply.number_returned self._skip += reply.number_returned self._cursor_id = cursor_id = reply.cursor_id if cursor_id: self._future", "port) for n in range(connections) ] self._databases = databases = {} self._databases_proxy =", "## stop the cursor here item = deque.popleft() ## process item item =", "la codificacion para futuros usos. ''' def __init__(self, collection, query, projection=_empty_doc, skip=0, limit=None):", "__init__(self, client, name): self._client = client self._name = name self._collections = collections =", "if cursor_id: self._future = self._connection.OP_GET_MORE( self._cstr_collection, min(self._limit or 0xFFFFFFFF, self._batch_length), cursor_id ) else:", "= MappingProxy(collections) self._cmd = Collection(self, '$cmd') client = property(lambda self: self._client) name =", "= cursor_id = reply.cursor_id if cursor_id: self._future = self._connection.OP_GET_MORE( self._cstr_collection, min(self._limit or 0xFFFFFFFF,", "= connection.OP_QUERY( self._cstr_collection, self._encoded_query, self._encoded_projection, min(self._limit or 0xFFFFFFFF, self._batch_length), self._skip, ) async def", "# getitem -> future # setitem -> future(error) # slice -> cursor class", "query raise ## try items = bson_decode_multi(reply.bson_payload) ## raise BSON DECODE ERROR deque.extend(items)", "self._collections.get(name, None) if collection is None: collection = Collection(self, name) self._collections[name] = collection", "= port or connection.default_port self._next_connection = 0 self._connection_pool = [ Connection(loop, host, port)", "-> future(error) # slice -> cursor class Cursor: ''' ''' def __init__(self, query):", "return b''.join( _bson_encode(doc) for doc in docs ) def bson_decode_multi(raw): doc = _bson_decode_all(raw,", "*(c.connect() for c in self._connection_pool), loop = self._loop ) for disconnection_future in disconnection_futures:", "self._loop ) for disconnection_future in disconnection_futures: disconnection_future.add_done_callback(self._connection_lost) self._is_connected = True def _connection_lost(self, connection):", "__len__(self): ''' returna un future que se resolverá con el numero de elementos", "MappingProxy(databases) self._is_connected = False self._cursors = set() self._server_version = None @property def databases(self):", "MappingProxy = type(type.__dict__) class AttributeKeyError(AttributeError, KeyError): pass class odict(collections.OrderedDict): __getattr__ = collections.OrderedDict.__getitem__ __setattr__", "bson_decode_multi(reply.bson_payload) ## raise BSON DECODE ERROR deque.extend(items) if self._limit: self._limit -= reply.number_returned self._skip", "item = deque.popleft() ## process item item = self._process_item(item) return item def _process_item(self,", "odict(collections.OrderedDict): __getattr__ = collections.OrderedDict.__getitem__ __setattr__ = collections.OrderedDict.__setitem__ __delattr__ = collections.OrderedDict.__delitem__ def __missing__(self, key):", "database._client self._database = database # proxy(database) self._name = name self._cstr_name = b'.'.join((database._name.encode(), name.encode()))", "alcanza esta query ''' return asyncio.Future() # getitem -> future # setitem ->", "loop = loop or asyncio.get_event_loop() self._host = host = host or connection.default_host self._port", "class AttributeKeyError(AttributeError, KeyError): pass class odict(collections.OrderedDict): __getattr__ = collections.OrderedDict.__getitem__ __setattr__ = collections.OrderedDict.__setitem__ __delattr__", "self: self._collections_proxy) def collection(self, name): collection = self._collections.get(name, None) if collection is None:", "Client: def __init__(self, loop=None, host=None, port=None, connections=8): ''' ''' self._loop = loop =", "self._encoded_projection = query._encoded_projection self._batch_length = 25 self._skip = query._skip self._limit = query._limit self._connection", "= None ## stop the cursor here item = deque.popleft() ## process item", "sistemas de comunicacion (cursores esperando datos) ''' # batch_size = 100 def __init__(self,", "cursor class Cursor: ''' ''' def __init__(self, query): self._client = query._client self._query =", "''' ''' def __init__(self, query): self._client = query._client self._query = query self._deque =", "return Query(self, query, projection, 0, 1) def __getitem__(self, id): return Query(self, {'_id': id},", "#next_connection_idx = self._next_connection_idx #connection = self._connection_pool[next_connection_idx] # la conexion debe estar activa, #self._next_connection_idx", "database return database class Database: def __init__(self, client, name): self._client = client self._name", "self._host = host = host or connection.default_host self._port = port = port or", "= self._next_connection_idx #connection = self._connection_pool[next_connection_idx] # la conexion debe estar activa, #self._next_connection_idx =", "= bson_decode_multi(reply.bson_payload) ## raise BSON DECODE ERROR deque.extend(items) if self._limit: self._limit -= reply.number_returned", "self._databases = databases = {} self._databases_proxy = MappingProxy(databases) self._is_connected = False self._cursors =", "''' returns a cursor. ''' return Cursor(self) def __len__(self): ''' returna un future", "self._loop = loop = loop or asyncio.get_event_loop() self._host = host = host or", "self._future = future = connection.OP_QUERY( self._cstr_collection, self._encoded_query, self._encoded_projection, min(self._limit or 0xFFFFFFFF, self._batch_length), self._skip,", "collections.OrderedDict.__getitem__ __setattr__ = collections.OrderedDict.__setitem__ __delattr__ = collections.OrderedDict.__delitem__ def __missing__(self, key): raise AttributeKeyError(key) _empty_doc", "codificacion para futuros usos. ''' def __init__(self, collection, query, projection=_empty_doc, skip=0, limit=None): self._client", "self._encoded_query = bson_encode(query) self._projection = projection self._encoded_projection = projection and bson_encode(projection) or b''", "-> future # setitem -> future(error) # slice -> cursor class Cursor: '''", "''' Representa una consulta, cachea la codificacion para futuros usos. ''' def __init__(self,", "await future except: ## make reconection and request new query raise ## try", "= b'.'.join((database._name.encode(), name.encode())) def find(self, query, projection=_empty_doc): return Query(self, query, projection) def find_one(self,", "pass class odict(collections.OrderedDict): __getattr__ = collections.OrderedDict.__getitem__ __setattr__ = collections.OrderedDict.__setitem__ __delattr__ = collections.OrderedDict.__delitem__ def", "bson.BSON.encode _bson_decode = bson.BSON.decode _bson_decode_all = bson.decode_all #def bson_encode(doc): # return _bson_encode(doc) def", "= loop or asyncio.get_event_loop() self._host = host = host or connection.default_host self._port =", "classes ''' #cybson import collections, asyncio, bson, random from .connection import Connection MappingProxy", "_connection_lost(self, connection): print('connection lost', connection) self._connection_pool.remove(connection) if not self._is_connected: return reconnection = self._loop.create_task(connection.connect())", "cursor. ''' return Cursor(self) def __len__(self): ''' returna un future que se resolverá", "disconnection_future in disconnection_futures: disconnection_future.add_done_callback(self._connection_lost) self._is_connected = True def _connection_lost(self, connection): print('connection lost', connection)", "futuros usos. ''' def __init__(self, collection, query, projection=_empty_doc, skip=0, limit=None): self._client = collection._client", "def _connection_lost(self, connection): print('connection lost', connection) self._connection_pool.remove(connection) if not self._is_connected: return reconnection =", "len(self._connection_pool) #return connection return random.sample(self._connection_pool, 1)[0] def database(self, name): database = self._databases.get(name, None)", "#return connection return random.sample(self._connection_pool, 1)[0] def database(self, name): database = self._databases.get(name, None) if", "returns a cursor. ''' return Cursor(self) def __len__(self): ''' returna un future que", "% len(self._connection_pool) #return connection return random.sample(self._connection_pool, 1)[0] def database(self, name): database = self._databases.get(name,", "= port = port or connection.default_port self._next_connection = 0 self._connection_pool = [ Connection(loop,", "MappingProxy(collections) self._cmd = Collection(self, '$cmd') client = property(lambda self: self._client) name = property(lambda", "set() self._server_version = None @property def databases(self): return self._databases_proxy async def connect(self): disconnection_futures", "= loop = loop or asyncio.get_event_loop() self._host = host = host or connection.default_host", "= host or connection.default_host self._port = port = port or connection.default_port self._next_connection =", "Query(self, {'_id': id}, _empty_doc, 0, 1) class Query: ''' Representa una consulta, cachea", "async def __anext__(self): deque = self._deque if not deque: future = self._future if", "collection = self._collections.get(name, None) if collection is None: collection = Collection(self, name) self._collections[name]", "''' return asyncio.Future() # getitem -> future # setitem -> future(error) # slice", "in self._connection_pool), loop = self._loop ) for disconnection_future in disconnection_futures: disconnection_future.add_done_callback(self._connection_lost) self._is_connected =", "Connection MappingProxy = type(type.__dict__) class AttributeKeyError(AttributeError, KeyError): pass class odict(collections.OrderedDict): __getattr__ = collections.OrderedDict.__getitem__", "self._tailable = False self._exhaust = False async def __aiter__(self): ''' returns a cursor.", "reconnection_made(disconnection_future): print('reconnection made', connection) self._connection_pool.add(connection) disconnection_future.add_done_callback(self._connection_lost) async def disconnect(): self._is_connected = False raise", "= databases = {} self._databases_proxy = MappingProxy(databases) self._is_connected = False self._cursors = set()", "collection._client self._collection = collection self._query = query self._encoded_query = bson_encode(query) self._projection = projection", "StopAsyncIteration try: reply = await future except: ## make reconection and request new", "] self._databases = databases = {} self._databases_proxy = MappingProxy(databases) self._is_connected = False self._cursors", "<reponame>jdavidls/aiomongodb<gh_stars>0 ''' HighLevel -> LowLevel (protocol) classes ''' #cybson import collections, asyncio, bson,", "#cybson import collections, asyncio, bson, random from .connection import Connection MappingProxy = type(type.__dict__)", "async def disconnect(): self._is_connected = False raise NotImplementedError def connection(self): #next_connection_idx = self._next_connection_idx", "None self._cstr_collection = query._collection._cstr_name self._encoded_query = query._encoded_query self._encoded_projection = query._encoded_projection self._batch_length = 25", "bson.decode_all #def bson_encode(doc): # return _bson_encode(doc) def bson_decode(raw): return _bson_decode(raw, odict) def bson_encode_multi(docs):", "= database # proxy(database) self._name = name self._cstr_name = b'.'.join((database._name.encode(), name.encode())) def find(self,", "query ''' return asyncio.Future() # getitem -> future # setitem -> future(error) #", "0, 1) def __getitem__(self, id): return Query(self, {'_id': id}, _empty_doc, 0, 1) class", "host=None, port=None, connections=8): ''' ''' self._loop = loop = loop or asyncio.get_event_loop() self._host", "class Client: def __init__(self, loop=None, host=None, port=None, connections=8): ''' ''' self._loop = loop", "not self._is_connected: return reconnection = self._loop.create_task(connection.connect()) @reconnection.add_done_callback def reconnection_made(disconnection_future): print('reconnection made', connection) self._connection_pool.add(connection)", "numero de elementos que alcanza esta query ''' return asyncio.Future() # getitem ->", "self._connection = connection = self._client.connection() self._future = future = connection.OP_QUERY( self._cstr_collection, self._encoded_query, self._encoded_projection,", "NotImplementedError def connection(self): #next_connection_idx = self._next_connection_idx #connection = self._connection_pool[next_connection_idx] # la conexion debe", "return cmd class Collection: ''' contempla: sesiones (colleciones) sistemas de comunicacion (cursores esperando", "AttributeKeyError(key) _empty_doc = MappingProxy(odict()) bson_encode = bson.BSON.encode _bson_decode = bson.BSON.decode _bson_decode_all = bson.decode_all", "# proxy(database) self._name = name self._cstr_name = b'.'.join((database._name.encode(), name.encode())) def find(self, query, projection=_empty_doc):", "bson.BSON.decode _bson_decode_all = bson.decode_all #def bson_encode(doc): # return _bson_encode(doc) def bson_decode(raw): return _bson_decode(raw,", "Query: ''' Representa una consulta, cachea la codificacion para futuros usos. ''' def", "BSON DECODE ERROR deque.extend(items) if self._limit: self._limit -= reply.number_returned self._skip += reply.number_returned self._cursor_id", "self._loop.create_task(connection.connect()) @reconnection.add_done_callback def reconnection_made(disconnection_future): print('reconnection made', connection) self._connection_pool.add(connection) disconnection_future.add_done_callback(self._connection_lost) async def disconnect(): self._is_connected", "self._cursor_id = None self._cstr_collection = query._collection._cstr_name self._encoded_query = query._encoded_query self._encoded_projection = query._encoded_projection self._batch_length", "min(self._limit or 0xFFFFFFFF, self._batch_length), self._skip, ) async def __anext__(self): deque = self._deque if", "# slice -> cursor class Cursor: ''' ''' def __init__(self, query): self._client =", "''' self._loop = loop = loop or asyncio.get_event_loop() self._host = host = host", "name.encode())) def find(self, query, projection=_empty_doc): return Query(self, query, projection) def find_one(self, query, projection=_empty_doc):", "collections = {} self._collections_proxy = MappingProxy(collections) self._cmd = Collection(self, '$cmd') client = property(lambda", "asyncio.Future() # getitem -> future # setitem -> future(error) # slice -> cursor", "or [doc] class Client: def __init__(self, loop=None, host=None, port=None, connections=8): ''' ''' self._loop", "contempla: sesiones (colleciones) sistemas de comunicacion (cursores esperando datos) ''' # batch_size =", "if not self._is_connected: return reconnection = self._loop.create_task(connection.connect()) @reconnection.add_done_callback def reconnection_made(disconnection_future): print('reconnection made', connection)", "import Connection MappingProxy = type(type.__dict__) class AttributeKeyError(AttributeError, KeyError): pass class odict(collections.OrderedDict): __getattr__ =", "connection return random.sample(self._connection_pool, 1)[0] def database(self, name): database = self._databases.get(name, None) if database", "items = bson_decode_multi(reply.bson_payload) ## raise BSON DECODE ERROR deque.extend(items) if self._limit: self._limit -=", "self._connection.OP_GET_MORE( self._cstr_collection, min(self._limit or 0xFFFFFFFF, self._batch_length), cursor_id ) else: self._future = None ##", "= self._loop ) for disconnection_future in disconnection_futures: disconnection_future.add_done_callback(self._connection_lost) self._is_connected = True def _connection_lost(self,", "= self._client.connection() self._future = future = connection.OP_QUERY( self._cstr_collection, self._encoded_query, self._encoded_projection, min(self._limit or 0xFFFFFFFF,", "self._client = collection._client self._collection = collection self._query = query self._encoded_query = bson_encode(query) self._projection", "here item = deque.popleft() ## process item item = self._process_item(item) return item def", "connections=8): ''' ''' self._loop = loop = loop or asyncio.get_event_loop() self._host = host", "cursor here item = deque.popleft() ## process item item = self._process_item(item) return item", "AttributeKeyError(AttributeError, KeyError): pass class odict(collections.OrderedDict): __getattr__ = collections.OrderedDict.__getitem__ __setattr__ = collections.OrderedDict.__setitem__ __delattr__ =", "= bson.decode_all #def bson_encode(doc): # return _bson_encode(doc) def bson_decode(raw): return _bson_decode(raw, odict) def", "self._connection_pool.add(connection) disconnection_future.add_done_callback(self._connection_lost) async def disconnect(): self._is_connected = False raise NotImplementedError def connection(self): #next_connection_idx", "class Database: def __init__(self, client, name): self._client = client self._name = name self._collections", "def find_one(self, query, projection=_empty_doc): return Query(self, query, projection, 0, 1) def __getitem__(self, id):", "odict) return isinstance(doc, list) and doc or [doc] class Client: def __init__(self, loop=None,", "database # proxy(database) self._name = name self._cstr_name = b'.'.join((database._name.encode(), name.encode())) def find(self, query,", "proxy(database) self._name = name self._cstr_name = b'.'.join((database._name.encode(), name.encode())) def find(self, query, projection=_empty_doc): return", "stop the cursor here item = deque.popleft() ## process item item = self._process_item(item)", "in disconnection_futures: disconnection_future.add_done_callback(self._connection_lost) self._is_connected = True def _connection_lost(self, connection): print('connection lost', connection) self._connection_pool.remove(connection)", "or connection.default_port self._next_connection = 0 self._connection_pool = [ Connection(loop, host, port) for n", "[doc] class Client: def __init__(self, loop=None, host=None, port=None, connections=8): ''' ''' self._loop =", "Cursor: ''' ''' def __init__(self, query): self._client = query._client self._query = query self._deque", "return isinstance(doc, list) and doc or [doc] class Client: def __init__(self, loop=None, host=None,", "#self._next_connection_idx = (next_connection_idx + 1) % len(self._connection_pool) #return connection return random.sample(self._connection_pool, 1)[0] def", "def __init__(self, client, name): self._client = client self._name = name self._collections = collections", "and request new query raise ## try items = bson_decode_multi(reply.bson_payload) ## raise BSON", "and doc or [doc] class Client: def __init__(self, loop=None, host=None, port=None, connections=8): '''", "connection) self._connection_pool.add(connection) disconnection_future.add_done_callback(self._connection_lost) async def disconnect(): self._is_connected = False raise NotImplementedError def connection(self):", "self._next_connection_idx #connection = self._connection_pool[next_connection_idx] # la conexion debe estar activa, #self._next_connection_idx = (next_connection_idx", "Connection(loop, host, port) for n in range(connections) ] self._databases = databases = {}", "query, projection=_empty_doc): return Query(self, query, projection) def find_one(self, query, projection=_empty_doc): return Query(self, query,", "= query._limit self._connection = connection = self._client.connection() self._future = future = connection.OP_QUERY( self._cstr_collection,", "b'.'.join((database._name.encode(), name.encode())) def find(self, query, projection=_empty_doc): return Query(self, query, projection) def find_one(self, query,", "False self._cursors = set() self._server_version = None @property def databases(self): return self._databases_proxy async", "''' HighLevel -> LowLevel (protocol) classes ''' #cybson import collections, asyncio, bson, random", "projection=_empty_doc): return Query(self, query, projection) def find_one(self, query, projection=_empty_doc): return Query(self, query, projection,", "query): self._client = query._client self._query = query self._deque = collections.deque() self._cursor_id = None", "if future is None: raise StopAsyncIteration try: reply = await future except: ##", "= 25 self._skip = query._skip self._limit = query._limit self._connection = connection = self._client.connection()", "doc = _bson_decode_all(raw, odict) return isinstance(doc, list) and doc or [doc] class Client:", "def disconnect(): self._is_connected = False raise NotImplementedError def connection(self): #next_connection_idx = self._next_connection_idx #connection", "= limit self._tailable = False self._exhaust = False async def __aiter__(self): ''' returns", "name): database = self._databases.get(name, None) if database is None: database = Database(self, name)", "name): self._client = database._client self._database = database # proxy(database) self._name = name self._cstr_name", "collection(self, name): collection = self._collections.get(name, None) if collection is None: collection = Collection(self,", "= collection return collection # # def __getattr__(self, command): # async def cmd(**parameters):", "b'' self._skip = skip self._limit = limit self._tailable = False self._exhaust = False", "1) % len(self._connection_pool) #return connection return random.sample(self._connection_pool, 1)[0] def database(self, name): database =", "try: reply = await future except: ## make reconection and request new query", "= self._deque if not deque: future = self._future if future is None: raise", "__getattr__ = collections.OrderedDict.__getitem__ __setattr__ = collections.OrderedDict.__setitem__ __delattr__ = collections.OrderedDict.__delitem__ def __missing__(self, key): raise", "lost', connection) self._connection_pool.remove(connection) if not self._is_connected: return reconnection = self._loop.create_task(connection.connect()) @reconnection.add_done_callback def reconnection_made(disconnection_future):", "= database._client self._database = database # proxy(database) self._name = name self._cstr_name = b'.'.join((database._name.encode(),", "bson, random from .connection import Connection MappingProxy = type(type.__dict__) class AttributeKeyError(AttributeError, KeyError): pass", "def connect(self): disconnection_futures = await asyncio.gather( *(c.connect() for c in self._connection_pool), loop =", "= self._connection.OP_GET_MORE( self._cstr_collection, min(self._limit or 0xFFFFFFFF, self._batch_length), cursor_id ) else: self._future = None", "datos) ''' # batch_size = 100 def __init__(self, database, name): self._client = database._client", "deque: future = self._future if future is None: raise StopAsyncIteration try: reply =", "= property(lambda self: self._name) collections = property(lambda self: self._collections_proxy) def collection(self, name): collection", "self._cursors = set() self._server_version = None @property def databases(self): return self._databases_proxy async def", "command): # async def cmd(**parameters): # odict((command, 1) + parameters.items()) # return cmd", "class Query: ''' Representa una consulta, cachea la codificacion para futuros usos. '''", "#def bson_encode(doc): # return _bson_encode(doc) def bson_decode(raw): return _bson_decode(raw, odict) def bson_encode_multi(docs): return", "# odict((command, 1) + parameters.items()) # return cmd class Collection: ''' contempla: sesiones", "a cursor. ''' return Cursor(self) def __len__(self): ''' returna un future que se", "un future que se resolverá con el numero de elementos que alcanza esta", "disconnection_futures: disconnection_future.add_done_callback(self._connection_lost) self._is_connected = True def _connection_lost(self, connection): print('connection lost', connection) self._connection_pool.remove(connection) if", "name = property(lambda self: self._name) collections = property(lambda self: self._collections_proxy) def collection(self, name):", "reply = await future except: ## make reconection and request new query raise", "False raise NotImplementedError def connection(self): #next_connection_idx = self._next_connection_idx #connection = self._connection_pool[next_connection_idx] # la", "self._connection_pool = [ Connection(loop, host, port) for n in range(connections) ] self._databases =", "None: raise StopAsyncIteration try: reply = await future except: ## make reconection and", "# async def cmd(**parameters): # odict((command, 1) + parameters.items()) # return cmd class", "{} self._collections_proxy = MappingProxy(collections) self._cmd = Collection(self, '$cmd') client = property(lambda self: self._client)", "__getattr__(self, command): # async def cmd(**parameters): # odict((command, 1) + parameters.items()) # return", "in docs ) def bson_decode_multi(raw): doc = _bson_decode_all(raw, odict) return isinstance(doc, list) and", "= skip self._limit = limit self._tailable = False self._exhaust = False async def", "find_one(self, query, projection=_empty_doc): return Query(self, query, projection, 0, 1) def __getitem__(self, id): return", "_bson_encode(doc) def bson_decode(raw): return _bson_decode(raw, odict) def bson_encode_multi(docs): return b''.join( _bson_encode(doc) for doc", "= projection self._encoded_projection = projection and bson_encode(projection) or b'' self._skip = skip self._limit", "esperando datos) ''' # batch_size = 100 def __init__(self, database, name): self._client =", "= host = host or connection.default_host self._port = port = port or connection.default_port", "de elementos que alcanza esta query ''' return asyncio.Future() # getitem -> future", "self._skip = skip self._limit = limit self._tailable = False self._exhaust = False async", "que alcanza esta query ''' return asyncio.Future() # getitem -> future # setitem", "batch_size = 100 def __init__(self, database, name): self._client = database._client self._database = database", "ERROR deque.extend(items) if self._limit: self._limit -= reply.number_returned self._skip += reply.number_returned self._cursor_id = cursor_id", "= property(lambda self: self._client) name = property(lambda self: self._name) collections = property(lambda self:", "return random.sample(self._connection_pool, 1)[0] def database(self, name): database = self._databases.get(name, None) if database is", "0 self._connection_pool = [ Connection(loop, host, port) for n in range(connections) ] self._databases", "name) self._collections[name] = collection return collection # # def __getattr__(self, command): # async", "# return cmd class Collection: ''' contempla: sesiones (colleciones) sistemas de comunicacion (cursores", "self._cstr_name = b'.'.join((database._name.encode(), name.encode())) def find(self, query, projection=_empty_doc): return Query(self, query, projection) def", "projection and bson_encode(projection) or b'' self._skip = skip self._limit = limit self._tailable =", "query, projection) def find_one(self, query, projection=_empty_doc): return Query(self, query, projection, 0, 1) def", "return Query(self, query, projection) def find_one(self, query, projection=_empty_doc): return Query(self, query, projection, 0,", "class Cursor: ''' ''' def __init__(self, query): self._client = query._client self._query = query", "+ parameters.items()) # return cmd class Collection: ''' contempla: sesiones (colleciones) sistemas de", "= deque.popleft() ## process item item = self._process_item(item) return item def _process_item(self, item):", "#connection = self._connection_pool[next_connection_idx] # la conexion debe estar activa, #self._next_connection_idx = (next_connection_idx +", ") for disconnection_future in disconnection_futures: disconnection_future.add_done_callback(self._connection_lost) self._is_connected = True def _connection_lost(self, connection): print('connection", "projection self._encoded_projection = projection and bson_encode(projection) or b'' self._skip = skip self._limit =", "query._collection._cstr_name self._encoded_query = query._encoded_query self._encoded_projection = query._encoded_projection self._batch_length = 25 self._skip = query._skip", "False async def __aiter__(self): ''' returns a cursor. ''' return Cursor(self) def __len__(self):", "name): collection = self._collections.get(name, None) if collection is None: collection = Collection(self, name)", "# setitem -> future(error) # slice -> cursor class Cursor: ''' ''' def", "return collection # # def __getattr__(self, command): # async def cmd(**parameters): # odict((command,", "self._cursor_id = cursor_id = reply.cursor_id if cursor_id: self._future = self._connection.OP_GET_MORE( self._cstr_collection, min(self._limit or", "bson_decode(raw): return _bson_decode(raw, odict) def bson_encode_multi(docs): return b''.join( _bson_encode(doc) for doc in docs", "= name self._collections = collections = {} self._collections_proxy = MappingProxy(collections) self._cmd = Collection(self,", "request new query raise ## try items = bson_decode_multi(reply.bson_payload) ## raise BSON DECODE", "database = self._databases.get(name, None) if database is None: database = Database(self, name) self._databases[name]", "= Database(self, name) self._databases[name] = database return database class Database: def __init__(self, client,", "Cursor(self) def __len__(self): ''' returna un future que se resolverá con el numero", "connection): print('connection lost', connection) self._connection_pool.remove(connection) if not self._is_connected: return reconnection = self._loop.create_task(connection.connect()) @reconnection.add_done_callback", "self._skip += reply.number_returned self._cursor_id = cursor_id = reply.cursor_id if cursor_id: self._future = self._connection.OP_GET_MORE(", "reply.number_returned self._cursor_id = cursor_id = reply.cursor_id if cursor_id: self._future = self._connection.OP_GET_MORE( self._cstr_collection, min(self._limit", "connection.OP_QUERY( self._cstr_collection, self._encoded_query, self._encoded_projection, min(self._limit or 0xFFFFFFFF, self._batch_length), self._skip, ) async def __anext__(self):", "collection # # def __getattr__(self, command): # async def cmd(**parameters): # odict((command, 1)", "= self._databases.get(name, None) if database is None: database = Database(self, name) self._databases[name] =", "setitem -> future(error) # slice -> cursor class Cursor: ''' ''' def __init__(self,", "_bson_decode_all = bson.decode_all #def bson_encode(doc): # return _bson_encode(doc) def bson_decode(raw): return _bson_decode(raw, odict)", "= projection and bson_encode(projection) or b'' self._skip = skip self._limit = limit self._tailable", "_empty_doc = MappingProxy(odict()) bson_encode = bson.BSON.encode _bson_decode = bson.BSON.decode _bson_decode_all = bson.decode_all #def", "= collections = {} self._collections_proxy = MappingProxy(collections) self._cmd = Collection(self, '$cmd') client =", "limit self._tailable = False self._exhaust = False async def __aiter__(self): ''' returns a", "def database(self, name): database = self._databases.get(name, None) if database is None: database =" ]
[ "help=\"Output file with sensor embeddings. (e.g. data/sensor_graph/SE.txt)\", ) parser.add_argument('--walk_length', type=int, default=80, help='Length of", "write_edgelist(args.adj_file, args.edgelist_file) nx_G = read_graph(args.edgelist_file) G = scripts.Graph(nx_G, is_directed, p, q) G.preprocess_transition_probs() walks", "Word2Vec def write_edgelist(adj_file, edgelist_file): adj = np.load(adj_file, allow_pickle=True)[2] with open(edgelist_file, 'w') as f:", "open(edgelist_file, 'w') as f: n_nodes = adj.shape[0] for i in range(n_nodes): for j", "+ '\\n') def read_graph(edgelist_file): G = nx.read_edgelist( edgelist_file, nodetype=int, data=(('weight',float),), create_using=nx.DiGraph()) return G", "walk in walks] model = Word2Vec( walks, size = dimensions, window = 10,", "walks = G.simulate_walks(args.num_walks, args.walk_length) learn_embeddings(walks, dimensions, args.iter, args.SE_file) if __name__ == \"__main__\": parser", "= iter) print ('Writing embedding to', output_file) model.wv.save_word2vec_format(output_file) def main(args): # Author settings", "data=(('weight',float),), create_using=nx.DiGraph()) return G def learn_embeddings(walks, dimensions, iter, output_file): walks = [list(map(str, walk))", "file with sensor embeddings. (e.g. data/sensor_graph/SE.txt)\", ) parser.add_argument('--walk_length', type=int, default=80, help='Length of random", "q = 1 write_edgelist(args.adj_file, args.edgelist_file) nx_G = read_graph(args.edgelist_file) G = scripts.Graph(nx_G, is_directed, p,", "nx_G = read_graph(args.edgelist_file) G = scripts.Graph(nx_G, is_directed, p, q) G.preprocess_transition_probs() walks = G.simulate_walks(args.num_walks,", "scripts.Graph(nx_G, is_directed, p, q) G.preprocess_transition_probs() walks = G.simulate_walks(args.num_walks, args.walk_length) learn_embeddings(walks, dimensions, args.iter, args.SE_file)", "random walks per iteration') parser.add_argument('--iter', type=int, default=1000, help='Number of iterations') args = parser.parse_args()", "write_edgelist(adj_file, edgelist_file): adj = np.load(adj_file, allow_pickle=True)[2] with open(edgelist_file, 'w') as f: n_nodes =", "dimensions, iter, output_file): walks = [list(map(str, walk)) for walk in walks] model =", "np import networkx as nx from gensim.models import Word2Vec def write_edgelist(adj_file, edgelist_file): adj", "type=int, default=1000, help='Number of iterations') args = parser.parse_args() basepath = os.path.dirname(args.adj_file) args.edgelist_file =", "data/sensor_graph/SE.txt)\", ) parser.add_argument('--walk_length', type=int, default=80, help='Length of random walks') parser.add_argument('--num_walks', type=int, default=100, help='Number", "default=1000, help='Number of iterations') args = parser.parse_args() basepath = os.path.dirname(args.adj_file) args.edgelist_file = os.path.join(basepath,", "networkx as nx from gensim.models import Word2Vec def write_edgelist(adj_file, edgelist_file): adj = np.load(adj_file,", "parser = argparse.ArgumentParser() parser.add_argument( \"--adj_file\", type=str, default=\"data/sensor_graph/adj_mx.pkl\", help=\"Input file adjacency matrix to build", "p = 2 q = 1 write_edgelist(args.adj_file, args.edgelist_file) nx_G = read_graph(args.edgelist_file) G =", "adjacency matrix to build graph.\" ) parser.add_argument( \"--SE_file\", type=str, help=\"Output file with sensor", "window = 10, min_count = 0, sg = 1, workers = 4, iter", "f.write(str(i) + ' ' + str(j) + ' ' + str(w) + '\\n')", "main(args): # Author settings is_directed = True dimensions = 64 window_size = 10", "G.simulate_walks(args.num_walks, args.walk_length) learn_embeddings(walks, dimensions, args.iter, args.SE_file) if __name__ == \"__main__\": parser = argparse.ArgumentParser()", "1 write_edgelist(args.adj_file, args.edgelist_file) nx_G = read_graph(args.edgelist_file) G = scripts.Graph(nx_G, is_directed, p, q) G.preprocess_transition_probs()", "np.load(adj_file, allow_pickle=True)[2] with open(edgelist_file, 'w') as f: n_nodes = adj.shape[0] for i in", "args.SE_file) if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument( \"--adj_file\", type=str, default=\"data/sensor_graph/adj_mx.pkl\", help=\"Input", "\"--adj_file\", type=str, default=\"data/sensor_graph/adj_mx.pkl\", help=\"Input file adjacency matrix to build graph.\" ) parser.add_argument( \"--SE_file\",", "import Word2Vec def write_edgelist(adj_file, edgelist_file): adj = np.load(adj_file, allow_pickle=True)[2] with open(edgelist_file, 'w') as", "read_graph(args.edgelist_file) G = scripts.Graph(nx_G, is_directed, p, q) G.preprocess_transition_probs() walks = G.simulate_walks(args.num_walks, args.walk_length) learn_embeddings(walks,", "dimensions, args.iter, args.SE_file) if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument( \"--adj_file\", type=str,", "model.wv.save_word2vec_format(output_file) def main(args): # Author settings is_directed = True dimensions = 64 window_size", "= scripts.Graph(nx_G, is_directed, p, q) G.preprocess_transition_probs() walks = G.simulate_walks(args.num_walks, args.walk_length) learn_embeddings(walks, dimensions, args.iter,", "'w') as f: n_nodes = adj.shape[0] for i in range(n_nodes): for j in", "= 1 write_edgelist(args.adj_file, args.edgelist_file) nx_G = read_graph(args.edgelist_file) G = scripts.Graph(nx_G, is_directed, p, q)", "iter, output_file): walks = [list(map(str, walk)) for walk in walks] model = Word2Vec(", "edgelist_file, nodetype=int, data=(('weight',float),), create_using=nx.DiGraph()) return G def learn_embeddings(walks, dimensions, iter, output_file): walks =", "for i in range(n_nodes): for j in range(n_nodes): w = adj[i][j] f.write(str(i) +", "walks = [list(map(str, walk)) for walk in walks] model = Word2Vec( walks, size", "output_file) model.wv.save_word2vec_format(output_file) def main(args): # Author settings is_directed = True dimensions = 64", "import os import scripts import numpy as np import networkx as nx from", "q) G.preprocess_transition_probs() walks = G.simulate_walks(args.num_walks, args.walk_length) learn_embeddings(walks, dimensions, args.iter, args.SE_file) if __name__ ==", "output_file): walks = [list(map(str, walk)) for walk in walks] model = Word2Vec( walks,", "graph.\" ) parser.add_argument( \"--SE_file\", type=str, help=\"Output file with sensor embeddings. (e.g. data/sensor_graph/SE.txt)\", )", "= 0, sg = 1, workers = 4, iter = iter) print ('Writing", "== \"__main__\": parser = argparse.ArgumentParser() parser.add_argument( \"--adj_file\", type=str, default=\"data/sensor_graph/adj_mx.pkl\", help=\"Input file adjacency matrix", "settings is_directed = True dimensions = 64 window_size = 10 p = 2", "window_size = 10 p = 2 q = 1 write_edgelist(args.adj_file, args.edgelist_file) nx_G =", "min_count = 0, sg = 1, workers = 4, iter = iter) print", "sg = 1, workers = 4, iter = iter) print ('Writing embedding to',", "64 window_size = 10 p = 2 q = 1 write_edgelist(args.adj_file, args.edgelist_file) nx_G", "workers = 4, iter = iter) print ('Writing embedding to', output_file) model.wv.save_word2vec_format(output_file) def", "f: n_nodes = adj.shape[0] for i in range(n_nodes): for j in range(n_nodes): w", "as np import networkx as nx from gensim.models import Word2Vec def write_edgelist(adj_file, edgelist_file):", "walks] model = Word2Vec( walks, size = dimensions, window = 10, min_count =", "help=\"Input file adjacency matrix to build graph.\" ) parser.add_argument( \"--SE_file\", type=str, help=\"Output file", "= argparse.ArgumentParser() parser.add_argument( \"--adj_file\", type=str, default=\"data/sensor_graph/adj_mx.pkl\", help=\"Input file adjacency matrix to build graph.\"", "type=int, default=80, help='Length of random walks') parser.add_argument('--num_walks', type=int, default=100, help='Number of random walks", "help='Number of iterations') args = parser.parse_args() basepath = os.path.dirname(args.adj_file) args.edgelist_file = os.path.join(basepath, 'edgelist.txt')", "2 q = 1 write_edgelist(args.adj_file, args.edgelist_file) nx_G = read_graph(args.edgelist_file) G = scripts.Graph(nx_G, is_directed,", "if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument( \"--adj_file\", type=str, default=\"data/sensor_graph/adj_mx.pkl\", help=\"Input file", "G.preprocess_transition_probs() walks = G.simulate_walks(args.num_walks, args.walk_length) learn_embeddings(walks, dimensions, args.iter, args.SE_file) if __name__ == \"__main__\":", "basepath = os.path.dirname(args.adj_file) args.edgelist_file = os.path.join(basepath, 'edgelist.txt') if not args.SE_file: args.SE_file = os.path.join(basepath,", "in walks] model = Word2Vec( walks, size = dimensions, window = 10, min_count", "help='Length of random walks') parser.add_argument('--num_walks', type=int, default=100, help='Number of random walks per iteration')", "+ str(j) + ' ' + str(w) + '\\n') def read_graph(edgelist_file): G =", "argparse.ArgumentParser() parser.add_argument( \"--adj_file\", type=str, default=\"data/sensor_graph/adj_mx.pkl\", help=\"Input file adjacency matrix to build graph.\" )", "('Writing embedding to', output_file) model.wv.save_word2vec_format(output_file) def main(args): # Author settings is_directed = True", "Word2Vec( walks, size = dimensions, window = 10, min_count = 0, sg =", "numpy as np import networkx as nx from gensim.models import Word2Vec def write_edgelist(adj_file,", "= np.load(adj_file, allow_pickle=True)[2] with open(edgelist_file, 'w') as f: n_nodes = adj.shape[0] for i", "with sensor embeddings. (e.g. data/sensor_graph/SE.txt)\", ) parser.add_argument('--walk_length', type=int, default=80, help='Length of random walks')", "as f: n_nodes = adj.shape[0] for i in range(n_nodes): for j in range(n_nodes):", "type=str, default=\"data/sensor_graph/adj_mx.pkl\", help=\"Input file adjacency matrix to build graph.\" ) parser.add_argument( \"--SE_file\", type=str,", "' + str(w) + '\\n') def read_graph(edgelist_file): G = nx.read_edgelist( edgelist_file, nodetype=int, data=(('weight',float),),", "for j in range(n_nodes): w = adj[i][j] f.write(str(i) + ' ' + str(j)", "' ' + str(j) + ' ' + str(w) + '\\n') def read_graph(edgelist_file):", "gensim.models import Word2Vec def write_edgelist(adj_file, edgelist_file): adj = np.load(adj_file, allow_pickle=True)[2] with open(edgelist_file, 'w')", "print ('Writing embedding to', output_file) model.wv.save_word2vec_format(output_file) def main(args): # Author settings is_directed =", "= 4, iter = iter) print ('Writing embedding to', output_file) model.wv.save_word2vec_format(output_file) def main(args):", "= True dimensions = 64 window_size = 10 p = 2 q =", "build graph.\" ) parser.add_argument( \"--SE_file\", type=str, help=\"Output file with sensor embeddings. (e.g. data/sensor_graph/SE.txt)\",", "of random walks') parser.add_argument('--num_walks', type=int, default=100, help='Number of random walks per iteration') parser.add_argument('--iter',", "of random walks per iteration') parser.add_argument('--iter', type=int, default=1000, help='Number of iterations') args =", "edgelist_file): adj = np.load(adj_file, allow_pickle=True)[2] with open(edgelist_file, 'w') as f: n_nodes = adj.shape[0]", "__name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument( \"--adj_file\", type=str, default=\"data/sensor_graph/adj_mx.pkl\", help=\"Input file adjacency", "nx from gensim.models import Word2Vec def write_edgelist(adj_file, edgelist_file): adj = np.load(adj_file, allow_pickle=True)[2] with", "args.iter, args.SE_file) if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument( \"--adj_file\", type=str, default=\"data/sensor_graph/adj_mx.pkl\",", "iter) print ('Writing embedding to', output_file) model.wv.save_word2vec_format(output_file) def main(args): # Author settings is_directed", "= nx.read_edgelist( edgelist_file, nodetype=int, data=(('weight',float),), create_using=nx.DiGraph()) return G def learn_embeddings(walks, dimensions, iter, output_file):", "is_directed, p, q) G.preprocess_transition_probs() walks = G.simulate_walks(args.num_walks, args.walk_length) learn_embeddings(walks, dimensions, args.iter, args.SE_file) if", "matrix to build graph.\" ) parser.add_argument( \"--SE_file\", type=str, help=\"Output file with sensor embeddings.", "of iterations') args = parser.parse_args() basepath = os.path.dirname(args.adj_file) args.edgelist_file = os.path.join(basepath, 'edgelist.txt') if", "dimensions, window = 10, min_count = 0, sg = 1, workers = 4,", "def write_edgelist(adj_file, edgelist_file): adj = np.load(adj_file, allow_pickle=True)[2] with open(edgelist_file, 'w') as f: n_nodes", "import scripts import numpy as np import networkx as nx from gensim.models import", "file adjacency matrix to build graph.\" ) parser.add_argument( \"--SE_file\", type=str, help=\"Output file with", "to build graph.\" ) parser.add_argument( \"--SE_file\", type=str, help=\"Output file with sensor embeddings. (e.g.", "walks per iteration') parser.add_argument('--iter', type=int, default=1000, help='Number of iterations') args = parser.parse_args() basepath", "parser.parse_args() basepath = os.path.dirname(args.adj_file) args.edgelist_file = os.path.join(basepath, 'edgelist.txt') if not args.SE_file: args.SE_file =", "nodetype=int, data=(('weight',float),), create_using=nx.DiGraph()) return G def learn_embeddings(walks, dimensions, iter, output_file): walks = [list(map(str,", "' ' + str(w) + '\\n') def read_graph(edgelist_file): G = nx.read_edgelist( edgelist_file, nodetype=int,", "adj.shape[0] for i in range(n_nodes): for j in range(n_nodes): w = adj[i][j] f.write(str(i)", "sensor embeddings. (e.g. data/sensor_graph/SE.txt)\", ) parser.add_argument('--walk_length', type=int, default=80, help='Length of random walks') parser.add_argument('--num_walks',", "args = parser.parse_args() basepath = os.path.dirname(args.adj_file) args.edgelist_file = os.path.join(basepath, 'edgelist.txt') if not args.SE_file:", "True dimensions = 64 window_size = 10 p = 2 q = 1", "os import scripts import numpy as np import networkx as nx from gensim.models", "argparse import os import scripts import numpy as np import networkx as nx", "in range(n_nodes): for j in range(n_nodes): w = adj[i][j] f.write(str(i) + ' '", "type=str, help=\"Output file with sensor embeddings. (e.g. data/sensor_graph/SE.txt)\", ) parser.add_argument('--walk_length', type=int, default=80, help='Length", "10 p = 2 q = 1 write_edgelist(args.adj_file, args.edgelist_file) nx_G = read_graph(args.edgelist_file) G", "str(w) + '\\n') def read_graph(edgelist_file): G = nx.read_edgelist( edgelist_file, nodetype=int, data=(('weight',float),), create_using=nx.DiGraph()) return", "Author settings is_directed = True dimensions = 64 window_size = 10 p =", ") parser.add_argument('--walk_length', type=int, default=80, help='Length of random walks') parser.add_argument('--num_walks', type=int, default=100, help='Number of", "parser.add_argument('--iter', type=int, default=1000, help='Number of iterations') args = parser.parse_args() basepath = os.path.dirname(args.adj_file) args.edgelist_file", "i in range(n_nodes): for j in range(n_nodes): w = adj[i][j] f.write(str(i) + '", "parser.add_argument('--walk_length', type=int, default=80, help='Length of random walks') parser.add_argument('--num_walks', type=int, default=100, help='Number of random", "read_graph(edgelist_file): G = nx.read_edgelist( edgelist_file, nodetype=int, data=(('weight',float),), create_using=nx.DiGraph()) return G def learn_embeddings(walks, dimensions,", "dimensions = 64 window_size = 10 p = 2 q = 1 write_edgelist(args.adj_file,", "create_using=nx.DiGraph()) return G def learn_embeddings(walks, dimensions, iter, output_file): walks = [list(map(str, walk)) for", "# Author settings is_directed = True dimensions = 64 window_size = 10 p", "to', output_file) model.wv.save_word2vec_format(output_file) def main(args): # Author settings is_directed = True dimensions =", "range(n_nodes): w = adj[i][j] f.write(str(i) + ' ' + str(j) + ' '", "def read_graph(edgelist_file): G = nx.read_edgelist( edgelist_file, nodetype=int, data=(('weight',float),), create_using=nx.DiGraph()) return G def learn_embeddings(walks,", "nx.read_edgelist( edgelist_file, nodetype=int, data=(('weight',float),), create_using=nx.DiGraph()) return G def learn_embeddings(walks, dimensions, iter, output_file): walks", "from gensim.models import Word2Vec def write_edgelist(adj_file, edgelist_file): adj = np.load(adj_file, allow_pickle=True)[2] with open(edgelist_file,", "= adj.shape[0] for i in range(n_nodes): for j in range(n_nodes): w = adj[i][j]", "= 2 q = 1 write_edgelist(args.adj_file, args.edgelist_file) nx_G = read_graph(args.edgelist_file) G = scripts.Graph(nx_G,", "= 10 p = 2 q = 1 write_edgelist(args.adj_file, args.edgelist_file) nx_G = read_graph(args.edgelist_file)", "\"__main__\": parser = argparse.ArgumentParser() parser.add_argument( \"--adj_file\", type=str, default=\"data/sensor_graph/adj_mx.pkl\", help=\"Input file adjacency matrix to", "parser.add_argument( \"--SE_file\", type=str, help=\"Output file with sensor embeddings. (e.g. data/sensor_graph/SE.txt)\", ) parser.add_argument('--walk_length', type=int,", "random walks') parser.add_argument('--num_walks', type=int, default=100, help='Number of random walks per iteration') parser.add_argument('--iter', type=int,", "with open(edgelist_file, 'w') as f: n_nodes = adj.shape[0] for i in range(n_nodes): for", "1, workers = 4, iter = iter) print ('Writing embedding to', output_file) model.wv.save_word2vec_format(output_file)", "def learn_embeddings(walks, dimensions, iter, output_file): walks = [list(map(str, walk)) for walk in walks]", "def main(args): # Author settings is_directed = True dimensions = 64 window_size =", "walks, size = dimensions, window = 10, min_count = 0, sg = 1,", "G = scripts.Graph(nx_G, is_directed, p, q) G.preprocess_transition_probs() walks = G.simulate_walks(args.num_walks, args.walk_length) learn_embeddings(walks, dimensions,", "import networkx as nx from gensim.models import Word2Vec def write_edgelist(adj_file, edgelist_file): adj =", "allow_pickle=True)[2] with open(edgelist_file, 'w') as f: n_nodes = adj.shape[0] for i in range(n_nodes):", "os.path.dirname(args.adj_file) args.edgelist_file = os.path.join(basepath, 'edgelist.txt') if not args.SE_file: args.SE_file = os.path.join(basepath, 'SE.txt') main(args)", "0, sg = 1, workers = 4, iter = iter) print ('Writing embedding", "as nx from gensim.models import Word2Vec def write_edgelist(adj_file, edgelist_file): adj = np.load(adj_file, allow_pickle=True)[2]", "import argparse import os import scripts import numpy as np import networkx as", "w = adj[i][j] f.write(str(i) + ' ' + str(j) + ' ' +", "+ ' ' + str(w) + '\\n') def read_graph(edgelist_file): G = nx.read_edgelist( edgelist_file,", "learn_embeddings(walks, dimensions, args.iter, args.SE_file) if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument( \"--adj_file\",", "per iteration') parser.add_argument('--iter', type=int, default=1000, help='Number of iterations') args = parser.parse_args() basepath =", "= os.path.dirname(args.adj_file) args.edgelist_file = os.path.join(basepath, 'edgelist.txt') if not args.SE_file: args.SE_file = os.path.join(basepath, 'SE.txt')", "4, iter = iter) print ('Writing embedding to', output_file) model.wv.save_word2vec_format(output_file) def main(args): #", "in range(n_nodes): w = adj[i][j] f.write(str(i) + ' ' + str(j) + '", "= read_graph(args.edgelist_file) G = scripts.Graph(nx_G, is_directed, p, q) G.preprocess_transition_probs() walks = G.simulate_walks(args.num_walks, args.walk_length)", "iteration') parser.add_argument('--iter', type=int, default=1000, help='Number of iterations') args = parser.parse_args() basepath = os.path.dirname(args.adj_file)", "= adj[i][j] f.write(str(i) + ' ' + str(j) + ' ' + str(w)", "[list(map(str, walk)) for walk in walks] model = Word2Vec( walks, size = dimensions,", "range(n_nodes): for j in range(n_nodes): w = adj[i][j] f.write(str(i) + ' ' +", "' + str(j) + ' ' + str(w) + '\\n') def read_graph(edgelist_file): G", "for walk in walks] model = Word2Vec( walks, size = dimensions, window =", "n_nodes = adj.shape[0] for i in range(n_nodes): for j in range(n_nodes): w =", "= G.simulate_walks(args.num_walks, args.walk_length) learn_embeddings(walks, dimensions, args.iter, args.SE_file) if __name__ == \"__main__\": parser =", "iterations') args = parser.parse_args() basepath = os.path.dirname(args.adj_file) args.edgelist_file = os.path.join(basepath, 'edgelist.txt') if not", "return G def learn_embeddings(walks, dimensions, iter, output_file): walks = [list(map(str, walk)) for walk", "adj[i][j] f.write(str(i) + ' ' + str(j) + ' ' + str(w) +", "learn_embeddings(walks, dimensions, iter, output_file): walks = [list(map(str, walk)) for walk in walks] model", "args.walk_length) learn_embeddings(walks, dimensions, args.iter, args.SE_file) if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument(", "adj = np.load(adj_file, allow_pickle=True)[2] with open(edgelist_file, 'w') as f: n_nodes = adj.shape[0] for", "+ ' ' + str(j) + ' ' + str(w) + '\\n') def", "p, q) G.preprocess_transition_probs() walks = G.simulate_walks(args.num_walks, args.walk_length) learn_embeddings(walks, dimensions, args.iter, args.SE_file) if __name__", "size = dimensions, window = 10, min_count = 0, sg = 1, workers", "embedding to', output_file) model.wv.save_word2vec_format(output_file) def main(args): # Author settings is_directed = True dimensions", "parser.add_argument( \"--adj_file\", type=str, default=\"data/sensor_graph/adj_mx.pkl\", help=\"Input file adjacency matrix to build graph.\" ) parser.add_argument(", "help='Number of random walks per iteration') parser.add_argument('--iter', type=int, default=1000, help='Number of iterations') args", "= Word2Vec( walks, size = dimensions, window = 10, min_count = 0, sg", "= 10, min_count = 0, sg = 1, workers = 4, iter =", ") parser.add_argument( \"--SE_file\", type=str, help=\"Output file with sensor embeddings. (e.g. data/sensor_graph/SE.txt)\", ) parser.add_argument('--walk_length',", "is_directed = True dimensions = 64 window_size = 10 p = 2 q", "str(j) + ' ' + str(w) + '\\n') def read_graph(edgelist_file): G = nx.read_edgelist(", "= dimensions, window = 10, min_count = 0, sg = 1, workers =", "\"--SE_file\", type=str, help=\"Output file with sensor embeddings. (e.g. data/sensor_graph/SE.txt)\", ) parser.add_argument('--walk_length', type=int, default=80,", "type=int, default=100, help='Number of random walks per iteration') parser.add_argument('--iter', type=int, default=1000, help='Number of", "= parser.parse_args() basepath = os.path.dirname(args.adj_file) args.edgelist_file = os.path.join(basepath, 'edgelist.txt') if not args.SE_file: args.SE_file", "= 64 window_size = 10 p = 2 q = 1 write_edgelist(args.adj_file, args.edgelist_file)", "default=100, help='Number of random walks per iteration') parser.add_argument('--iter', type=int, default=1000, help='Number of iterations')", "j in range(n_nodes): w = adj[i][j] f.write(str(i) + ' ' + str(j) +", "'\\n') def read_graph(edgelist_file): G = nx.read_edgelist( edgelist_file, nodetype=int, data=(('weight',float),), create_using=nx.DiGraph()) return G def", "import numpy as np import networkx as nx from gensim.models import Word2Vec def", "embeddings. (e.g. data/sensor_graph/SE.txt)\", ) parser.add_argument('--walk_length', type=int, default=80, help='Length of random walks') parser.add_argument('--num_walks', type=int,", "args.edgelist_file) nx_G = read_graph(args.edgelist_file) G = scripts.Graph(nx_G, is_directed, p, q) G.preprocess_transition_probs() walks =", "walk)) for walk in walks] model = Word2Vec( walks, size = dimensions, window", "G def learn_embeddings(walks, dimensions, iter, output_file): walks = [list(map(str, walk)) for walk in", "(e.g. data/sensor_graph/SE.txt)\", ) parser.add_argument('--walk_length', type=int, default=80, help='Length of random walks') parser.add_argument('--num_walks', type=int, default=100,", "<filename>scripts/generateSE.py import argparse import os import scripts import numpy as np import networkx", "model = Word2Vec( walks, size = dimensions, window = 10, min_count = 0,", "G = nx.read_edgelist( edgelist_file, nodetype=int, data=(('weight',float),), create_using=nx.DiGraph()) return G def learn_embeddings(walks, dimensions, iter,", "default=80, help='Length of random walks') parser.add_argument('--num_walks', type=int, default=100, help='Number of random walks per", "10, min_count = 0, sg = 1, workers = 4, iter = iter)", "= 1, workers = 4, iter = iter) print ('Writing embedding to', output_file)", "default=\"data/sensor_graph/adj_mx.pkl\", help=\"Input file adjacency matrix to build graph.\" ) parser.add_argument( \"--SE_file\", type=str, help=\"Output", "walks') parser.add_argument('--num_walks', type=int, default=100, help='Number of random walks per iteration') parser.add_argument('--iter', type=int, default=1000,", "= [list(map(str, walk)) for walk in walks] model = Word2Vec( walks, size =", "parser.add_argument('--num_walks', type=int, default=100, help='Number of random walks per iteration') parser.add_argument('--iter', type=int, default=1000, help='Number", "+ str(w) + '\\n') def read_graph(edgelist_file): G = nx.read_edgelist( edgelist_file, nodetype=int, data=(('weight',float),), create_using=nx.DiGraph())", "scripts import numpy as np import networkx as nx from gensim.models import Word2Vec", "iter = iter) print ('Writing embedding to', output_file) model.wv.save_word2vec_format(output_file) def main(args): # Author" ]
[ "get_sensors_data(cur: psycopg2.extensions.cursor): \"\"\" Fetches data from sensors' tables TODO -> parallelize queries :param", "using a context manager. :param params: database connection parameters dictionary :return: psycopg2 connection", "changes should be committed :return: psycopg2 cursor object \"\"\" with get_connection(params) as conn:", "src.log.logger import logger from contextlib import contextmanager @contextmanager def get_connection(params: Dict[str, str]) ->", "conn = psycopg2.connect(**params) yield conn except Exception as e: logger.error(f\"{str(type(e))} during database operation:", "psycopg2 cursor object \"\"\" with get_connection(params) as conn: # Acquire cursor from connection", "Acquire cursor from connection logger.debug(\"Obtaining database cursor.\") cur = conn.cursor(cursor_factory=DictCursor) try: yield cur", "should be committed :return: psycopg2 cursor object \"\"\" with get_connection(params) as conn: #", "JSON formatted results \"\"\" data = { \"temperature\": cur.execute(\"SELECT * FROM sensors.temperature\").fetchall(), \"humidity\":", "connection cursor using a context manager. :param params: database connection parameters dictionary :param", "= psycopg2.connect(**params) yield conn except Exception as e: logger.error(f\"{str(type(e))} during database operation: {e}\")", "whether changes should be committed :return: psycopg2 cursor object \"\"\" with get_connection(params) as", "Close cursor logger.debug(\"Closing database cursor.\") cur.close() def get_sensors_data(cur: psycopg2.extensions.cursor): \"\"\" Fetches data from", "import DictCursor from typing import Dict from src.log.logger import logger from contextlib import", "connection if defined. logger.debug(\"Closing database connection\") try: conn.close() except UnboundLocalError: pass @contextmanager def", "TODO -> parallelize queries :param cur: database cursor :return: JSON formatted results \"\"\"", "def get_cursor(params: Dict[str, str], commit: bool = True) -> cursor: \"\"\" Get a", "def get_sensors_data(cur: psycopg2.extensions.cursor): \"\"\" Fetches data from sensors' tables TODO -> parallelize queries", "get_connection(params: Dict[str, str]) -> connection: \"\"\" Get a connection using a context manager.", "Dict[str, str], commit: bool = True) -> cursor: \"\"\" Get a connection cursor", "commit: conn.commit() finally: # Close cursor logger.debug(\"Closing database cursor.\") cur.close() def get_sensors_data(cur: psycopg2.extensions.cursor):", "sensors.temperature\").fetchall(), \"humidity\": cur.execute(\"SELECT * FROM sensors.humidity\").fetchall(), \"light\": cur.execute(\"SELECT * FROM sensors.light\").fetchall(), } return", "logger.debug(\"Obtaining database cursor.\") cur = conn.cursor(cursor_factory=DictCursor) try: yield cur if commit: conn.commit() finally:", "during database operation: {e}\") raise e finally: # Close database connection if defined.", "database connection parameters dictionary :param commit: boolean determining whether changes should be committed", "\"\"\" data = { \"temperature\": cur.execute(\"SELECT * FROM sensors.temperature\").fetchall(), \"humidity\": cur.execute(\"SELECT * FROM", "try: yield cur if commit: conn.commit() finally: # Close cursor logger.debug(\"Closing database cursor.\")", "conn.close() except UnboundLocalError: pass @contextmanager def get_cursor(params: Dict[str, str], commit: bool = True)", "str], commit: bool = True) -> cursor: \"\"\" Get a connection cursor using", "-> connection: \"\"\" Get a connection using a context manager. :param params: database", "committed :return: psycopg2 cursor object \"\"\" with get_connection(params) as conn: # Acquire cursor", "a connection using a context manager. :param params: database connection parameters dictionary :return:", "Dict[str, str]) -> connection: \"\"\" Get a connection using a context manager. :param", "Exception as e: logger.error(f\"{str(type(e))} during database operation: {e}\") raise e finally: # Close", "except UnboundLocalError: pass @contextmanager def get_cursor(params: Dict[str, str], commit: bool = True) ->", "manager. :param params: database connection parameters dictionary :param commit: boolean determining whether changes", "from sensors' tables TODO -> parallelize queries :param cur: database cursor :return: JSON", "sensors' tables TODO -> parallelize queries :param cur: database cursor :return: JSON formatted", "import logger from contextlib import contextmanager @contextmanager def get_connection(params: Dict[str, str]) -> connection:", "typing import Dict from src.log.logger import logger from contextlib import contextmanager @contextmanager def", "data = { \"temperature\": cur.execute(\"SELECT * FROM sensors.temperature\").fetchall(), \"humidity\": cur.execute(\"SELECT * FROM sensors.humidity\").fetchall(),", ":param params: database connection parameters dictionary :return: psycopg2 connection object \"\"\" try: conn", ":param params: database connection parameters dictionary :param commit: boolean determining whether changes should", "context manager. :param params: database connection parameters dictionary :return: psycopg2 connection object \"\"\"", "DictCursor from typing import Dict from src.log.logger import logger from contextlib import contextmanager", "-> cursor: \"\"\" Get a connection cursor using a context manager. :param params:", "data from sensors' tables TODO -> parallelize queries :param cur: database cursor :return:", ":param cur: database cursor :return: JSON formatted results \"\"\" data = { \"temperature\":", "conn except Exception as e: logger.error(f\"{str(type(e))} during database operation: {e}\") raise e finally:", "context manager. :param params: database connection parameters dictionary :param commit: boolean determining whether", "parameters dictionary :param commit: boolean determining whether changes should be committed :return: psycopg2", "Dict from src.log.logger import logger from contextlib import contextmanager @contextmanager def get_connection(params: Dict[str,", "bool = True) -> cursor: \"\"\" Get a connection cursor using a context", "cur = conn.cursor(cursor_factory=DictCursor) try: yield cur if commit: conn.commit() finally: # Close cursor", "UnboundLocalError: pass @contextmanager def get_cursor(params: Dict[str, str], commit: bool = True) -> cursor:", "pass @contextmanager def get_cursor(params: Dict[str, str], commit: bool = True) -> cursor: \"\"\"", "psycopg2.extensions.cursor): \"\"\" Fetches data from sensors' tables TODO -> parallelize queries :param cur:", ":return: JSON formatted results \"\"\" data = { \"temperature\": cur.execute(\"SELECT * FROM sensors.temperature\").fetchall(),", "from psycopg2.extensions import connection, cursor from psycopg2.extras import DictCursor from typing import Dict", "using a context manager. :param params: database connection parameters dictionary :param commit: boolean", "determining whether changes should be committed :return: psycopg2 cursor object \"\"\" with get_connection(params)", "cur.execute(\"SELECT * FROM sensors.temperature\").fetchall(), \"humidity\": cur.execute(\"SELECT * FROM sensors.humidity\").fetchall(), \"light\": cur.execute(\"SELECT * FROM", "params: database connection parameters dictionary :param commit: boolean determining whether changes should be", "connection parameters dictionary :return: psycopg2 connection object \"\"\" try: conn = psycopg2.connect(**params) yield", ":param commit: boolean determining whether changes should be committed :return: psycopg2 cursor object", "cur.close() def get_sensors_data(cur: psycopg2.extensions.cursor): \"\"\" Fetches data from sensors' tables TODO -> parallelize", ":return: psycopg2 cursor object \"\"\" with get_connection(params) as conn: # Acquire cursor from", "logger.error(f\"{str(type(e))} during database operation: {e}\") raise e finally: # Close database connection if", "{ \"temperature\": cur.execute(\"SELECT * FROM sensors.temperature\").fetchall(), \"humidity\": cur.execute(\"SELECT * FROM sensors.humidity\").fetchall(), \"light\": cur.execute(\"SELECT", "logger.debug(\"Closing database cursor.\") cur.close() def get_sensors_data(cur: psycopg2.extensions.cursor): \"\"\" Fetches data from sensors' tables", "parameters dictionary :return: psycopg2 connection object \"\"\" try: conn = psycopg2.connect(**params) yield conn", "a context manager. :param params: database connection parameters dictionary :return: psycopg2 connection object", "finally: # Close database connection if defined. logger.debug(\"Closing database connection\") try: conn.close() except", "import psycopg2 from psycopg2.extensions import connection, cursor from psycopg2.extras import DictCursor from typing", "connection using a context manager. :param params: database connection parameters dictionary :return: psycopg2", "import Dict from src.log.logger import logger from contextlib import contextmanager @contextmanager def get_connection(params:", "yield cur if commit: conn.commit() finally: # Close cursor logger.debug(\"Closing database cursor.\") cur.close()", "if defined. logger.debug(\"Closing database connection\") try: conn.close() except UnboundLocalError: pass @contextmanager def get_cursor(params:", "= conn.cursor(cursor_factory=DictCursor) try: yield cur if commit: conn.commit() finally: # Close cursor logger.debug(\"Closing", "cur if commit: conn.commit() finally: # Close cursor logger.debug(\"Closing database cursor.\") cur.close() def", "Get a connection cursor using a context manager. :param params: database connection parameters", "cursor from psycopg2.extras import DictCursor from typing import Dict from src.log.logger import logger", "\"temperature\": cur.execute(\"SELECT * FROM sensors.temperature\").fetchall(), \"humidity\": cur.execute(\"SELECT * FROM sensors.humidity\").fetchall(), \"light\": cur.execute(\"SELECT *", "commit: bool = True) -> cursor: \"\"\" Get a connection cursor using a", "FROM sensors.temperature\").fetchall(), \"humidity\": cur.execute(\"SELECT * FROM sensors.humidity\").fetchall(), \"light\": cur.execute(\"SELECT * FROM sensors.light\").fetchall(), }", "except Exception as e: logger.error(f\"{str(type(e))} during database operation: {e}\") raise e finally: #", "cursor.\") cur.close() def get_sensors_data(cur: psycopg2.extensions.cursor): \"\"\" Fetches data from sensors' tables TODO ->", "# Acquire cursor from connection logger.debug(\"Obtaining database cursor.\") cur = conn.cursor(cursor_factory=DictCursor) try: yield", "queries :param cur: database cursor :return: JSON formatted results \"\"\" data = {", "raise e finally: # Close database connection if defined. logger.debug(\"Closing database connection\") try:", "\"humidity\": cur.execute(\"SELECT * FROM sensors.humidity\").fetchall(), \"light\": cur.execute(\"SELECT * FROM sensors.light\").fetchall(), } return data", "\"\"\" Fetches data from sensors' tables TODO -> parallelize queries :param cur: database", "defined. logger.debug(\"Closing database connection\") try: conn.close() except UnboundLocalError: pass @contextmanager def get_cursor(params: Dict[str,", "database cursor :return: JSON formatted results \"\"\" data = { \"temperature\": cur.execute(\"SELECT *", "operation: {e}\") raise e finally: # Close database connection if defined. logger.debug(\"Closing database", "\"\"\" with get_connection(params) as conn: # Acquire cursor from connection logger.debug(\"Obtaining database cursor.\")", "database cursor.\") cur.close() def get_sensors_data(cur: psycopg2.extensions.cursor): \"\"\" Fetches data from sensors' tables TODO", "yield conn except Exception as e: logger.error(f\"{str(type(e))} during database operation: {e}\") raise e", "Fetches data from sensors' tables TODO -> parallelize queries :param cur: database cursor", "@contextmanager def get_connection(params: Dict[str, str]) -> connection: \"\"\" Get a connection using a", "logger from contextlib import contextmanager @contextmanager def get_connection(params: Dict[str, str]) -> connection: \"\"\"", "from psycopg2.extras import DictCursor from typing import Dict from src.log.logger import logger from", "\"\"\" try: conn = psycopg2.connect(**params) yield conn except Exception as e: logger.error(f\"{str(type(e))} during", "with get_connection(params) as conn: # Acquire cursor from connection logger.debug(\"Obtaining database cursor.\") cur", "Close database connection if defined. logger.debug(\"Closing database connection\") try: conn.close() except UnboundLocalError: pass", "connection object \"\"\" try: conn = psycopg2.connect(**params) yield conn except Exception as e:", "def get_connection(params: Dict[str, str]) -> connection: \"\"\" Get a connection using a context", "get_connection(params) as conn: # Acquire cursor from connection logger.debug(\"Obtaining database cursor.\") cur =", "database connection parameters dictionary :return: psycopg2 connection object \"\"\" try: conn = psycopg2.connect(**params)", "boolean determining whether changes should be committed :return: psycopg2 cursor object \"\"\" with", "object \"\"\" with get_connection(params) as conn: # Acquire cursor from connection logger.debug(\"Obtaining database", "conn.cursor(cursor_factory=DictCursor) try: yield cur if commit: conn.commit() finally: # Close cursor logger.debug(\"Closing database", "cursor.\") cur = conn.cursor(cursor_factory=DictCursor) try: yield cur if commit: conn.commit() finally: # Close", "psycopg2.extras import DictCursor from typing import Dict from src.log.logger import logger from contextlib", "@contextmanager def get_cursor(params: Dict[str, str], commit: bool = True) -> cursor: \"\"\" Get", "\"\"\" Get a connection cursor using a context manager. :param params: database connection", "a connection cursor using a context manager. :param params: database connection parameters dictionary", "connection parameters dictionary :param commit: boolean determining whether changes should be committed :return:", "from contextlib import contextmanager @contextmanager def get_connection(params: Dict[str, str]) -> connection: \"\"\" Get", "psycopg2 connection object \"\"\" try: conn = psycopg2.connect(**params) yield conn except Exception as", "cursor from connection logger.debug(\"Obtaining database cursor.\") cur = conn.cursor(cursor_factory=DictCursor) try: yield cur if", "dictionary :return: psycopg2 connection object \"\"\" try: conn = psycopg2.connect(**params) yield conn except", "import contextmanager @contextmanager def get_connection(params: Dict[str, str]) -> connection: \"\"\" Get a connection", "if commit: conn.commit() finally: # Close cursor logger.debug(\"Closing database cursor.\") cur.close() def get_sensors_data(cur:", "finally: # Close cursor logger.debug(\"Closing database cursor.\") cur.close() def get_sensors_data(cur: psycopg2.extensions.cursor): \"\"\" Fetches", "results \"\"\" data = { \"temperature\": cur.execute(\"SELECT * FROM sensors.temperature\").fetchall(), \"humidity\": cur.execute(\"SELECT *", "database cursor.\") cur = conn.cursor(cursor_factory=DictCursor) try: yield cur if commit: conn.commit() finally: #", "# Close cursor logger.debug(\"Closing database cursor.\") cur.close() def get_sensors_data(cur: psycopg2.extensions.cursor): \"\"\" Fetches data", "formatted results \"\"\" data = { \"temperature\": cur.execute(\"SELECT * FROM sensors.temperature\").fetchall(), \"humidity\": cur.execute(\"SELECT", "= True) -> cursor: \"\"\" Get a connection cursor using a context manager.", "psycopg2.connect(**params) yield conn except Exception as e: logger.error(f\"{str(type(e))} during database operation: {e}\") raise", "parallelize queries :param cur: database cursor :return: JSON formatted results \"\"\" data =", "commit: boolean determining whether changes should be committed :return: psycopg2 cursor object \"\"\"", "import connection, cursor from psycopg2.extras import DictCursor from typing import Dict from src.log.logger", "connection, cursor from psycopg2.extras import DictCursor from typing import Dict from src.log.logger import", "connection logger.debug(\"Obtaining database cursor.\") cur = conn.cursor(cursor_factory=DictCursor) try: yield cur if commit: conn.commit()", "connection\") try: conn.close() except UnboundLocalError: pass @contextmanager def get_cursor(params: Dict[str, str], commit: bool", "cursor logger.debug(\"Closing database cursor.\") cur.close() def get_sensors_data(cur: psycopg2.extensions.cursor): \"\"\" Fetches data from sensors'", "True) -> cursor: \"\"\" Get a connection cursor using a context manager. :param", "logger.debug(\"Closing database connection\") try: conn.close() except UnboundLocalError: pass @contextmanager def get_cursor(params: Dict[str, str],", "cursor: \"\"\" Get a connection cursor using a context manager. :param params: database", "e: logger.error(f\"{str(type(e))} during database operation: {e}\") raise e finally: # Close database connection", "{e}\") raise e finally: # Close database connection if defined. logger.debug(\"Closing database connection\")", "manager. :param params: database connection parameters dictionary :return: psycopg2 connection object \"\"\" try:", "be committed :return: psycopg2 cursor object \"\"\" with get_connection(params) as conn: # Acquire", "contextlib import contextmanager @contextmanager def get_connection(params: Dict[str, str]) -> connection: \"\"\" Get a", "-> parallelize queries :param cur: database cursor :return: JSON formatted results \"\"\" data", "params: database connection parameters dictionary :return: psycopg2 connection object \"\"\" try: conn =", "cursor object \"\"\" with get_connection(params) as conn: # Acquire cursor from connection logger.debug(\"Obtaining", "# Close database connection if defined. logger.debug(\"Closing database connection\") try: conn.close() except UnboundLocalError:", "database operation: {e}\") raise e finally: # Close database connection if defined. logger.debug(\"Closing", "e finally: # Close database connection if defined. logger.debug(\"Closing database connection\") try: conn.close()", "contextmanager @contextmanager def get_connection(params: Dict[str, str]) -> connection: \"\"\" Get a connection using", "psycopg2.extensions import connection, cursor from psycopg2.extras import DictCursor from typing import Dict from", "try: conn.close() except UnboundLocalError: pass @contextmanager def get_cursor(params: Dict[str, str], commit: bool =", "cursor using a context manager. :param params: database connection parameters dictionary :param commit:", "a context manager. :param params: database connection parameters dictionary :param commit: boolean determining", "cur: database cursor :return: JSON formatted results \"\"\" data = { \"temperature\": cur.execute(\"SELECT", "str]) -> connection: \"\"\" Get a connection using a context manager. :param params:", "conn: # Acquire cursor from connection logger.debug(\"Obtaining database cursor.\") cur = conn.cursor(cursor_factory=DictCursor) try:", "* FROM sensors.temperature\").fetchall(), \"humidity\": cur.execute(\"SELECT * FROM sensors.humidity\").fetchall(), \"light\": cur.execute(\"SELECT * FROM sensors.light\").fetchall(),", ":return: psycopg2 connection object \"\"\" try: conn = psycopg2.connect(**params) yield conn except Exception", "\"\"\" Get a connection using a context manager. :param params: database connection parameters", "from connection logger.debug(\"Obtaining database cursor.\") cur = conn.cursor(cursor_factory=DictCursor) try: yield cur if commit:", "conn.commit() finally: # Close cursor logger.debug(\"Closing database cursor.\") cur.close() def get_sensors_data(cur: psycopg2.extensions.cursor): \"\"\"", "Get a connection using a context manager. :param params: database connection parameters dictionary", "= { \"temperature\": cur.execute(\"SELECT * FROM sensors.temperature\").fetchall(), \"humidity\": cur.execute(\"SELECT * FROM sensors.humidity\").fetchall(), \"light\":", "object \"\"\" try: conn = psycopg2.connect(**params) yield conn except Exception as e: logger.error(f\"{str(type(e))}", "psycopg2 from psycopg2.extensions import connection, cursor from psycopg2.extras import DictCursor from typing import", "from typing import Dict from src.log.logger import logger from contextlib import contextmanager @contextmanager", "connection: \"\"\" Get a connection using a context manager. :param params: database connection", "database connection\") try: conn.close() except UnboundLocalError: pass @contextmanager def get_cursor(params: Dict[str, str], commit:", "get_cursor(params: Dict[str, str], commit: bool = True) -> cursor: \"\"\" Get a connection", "as e: logger.error(f\"{str(type(e))} during database operation: {e}\") raise e finally: # Close database", "try: conn = psycopg2.connect(**params) yield conn except Exception as e: logger.error(f\"{str(type(e))} during database", "tables TODO -> parallelize queries :param cur: database cursor :return: JSON formatted results", "dictionary :param commit: boolean determining whether changes should be committed :return: psycopg2 cursor", "from src.log.logger import logger from contextlib import contextmanager @contextmanager def get_connection(params: Dict[str, str])", "database connection if defined. logger.debug(\"Closing database connection\") try: conn.close() except UnboundLocalError: pass @contextmanager", "cursor :return: JSON formatted results \"\"\" data = { \"temperature\": cur.execute(\"SELECT * FROM", "as conn: # Acquire cursor from connection logger.debug(\"Obtaining database cursor.\") cur = conn.cursor(cursor_factory=DictCursor)" ]
[ "required == 10: reqmod = random.randrange(1, 4) for panelist in random.sample(extracted, reqmod): self.required_panelist.add(panelist)", "yes, add them. if required == 10: reqmod = random.randrange(1, 4) for panelist", "model = Day conference = factory.SubFactory(ConferenceFactory) day = datetime.date(year=2020, month=1, day=17) start_time =", "class RoomFactory(factory.django.DjangoModelFactory): class Meta: model = Room conference = factory.SubFactory(ConferenceFactory) capacity = 50", "factory.Faker('text', max_nb_chars=50).generate() return title @factory.post_generation def assign_panelists(self, create, extracted, **kwargs): if not create:", "max_nb_chars=50).generate() return title @factory.post_generation def assign_panelists(self, create, extracted, **kwargs): if not create: #", "10: reqmod = random.randrange(1, 4) for panelist in random.sample(extracted, reqmod): self.required_panelist.add(panelist) @factory.post_generation def", "True @factory.lazy_attribute def badge_name(self): return factory.Faker('name').generate() @factory.lazy_attribute def email(self): return factory.Faker('email').generate() @factory.lazy_attribute def", "select interested panelists at random intpan = random.randrange(3, int(len(extracted)/2)) intmod = random.randrange(1, int(len(extracted)/4))", "DayFactory(factory.django.DjangoModelFactory): class Meta: model = Day conference = factory.SubFactory(ConferenceFactory) day = datetime.date(year=2020, month=1,", "factory.Faker('email').generate() @factory.lazy_attribute def program_name(self): return self.badge_name @factory.lazy_attribute def pronouns(self): pronoun_choices = ['She/Her', 'He/Him',", "# Simple build, do nothing. return if extracted: # assign track via a", "create: # Simple build, do nothing. return if extracted: # select interested panelists", "@factory.lazy_attribute def pronouns(self): pronoun_choices = ['She/Her', 'He/Him', 'They/Them', 'She/They', 'E/Em'] return random.choice(pronoun_choices) class", "RoomFactory(factory.django.DjangoModelFactory): class Meta: model = Room conference = factory.SubFactory(ConferenceFactory) capacity = 50 category", "name = factory.Sequence(lambda n: 'TestCon {}'.format(n)) @factory.lazy_attribute def slug(self): slug = slugify(self.name) return", "slug = slugify(self.name) return slug class PanelistFactory(factory.django.DjangoModelFactory): class Meta: model = Panelist conference", "reqmod): self.required_panelist.add(panelist) @factory.post_generation def assign_track(self, create, extracted, **kwargs): if not create: # Simple", "PanelFactory(factory.django.DjangoModelFactory): class Meta: model = Panel description = factory.Faker('paragraph', nb_sentences=6).generate() conference = factory.SubFactory(ConferenceFactory)", "panelist in random.sample(extracted, reqmod): self.required_panelist.add(panelist) @factory.post_generation def assign_track(self, create, extracted, **kwargs): if not", "django.template.defaultfilters import slugify from scheduler.models import Conference, Room, Track, Panelist, Panel, Day class", "import factory import random import datetime from django.template.defaultfilters import slugify from scheduler.models import", "random.sample(extracted, reqmod): self.required_panelist.add(panelist) @factory.post_generation def assign_track(self, create, extracted, **kwargs): if not create: #", "if yes, add them. if required == 10: reqmod = random.randrange(1, 4) for", "def assign_panelists(self, create, extracted, **kwargs): if not create: # Simple build, do nothing.", "class ConferenceFactory(factory.django.DjangoModelFactory): class Meta: model = Conference name = factory.Sequence(lambda n: 'TestCon {}'.format(n))", "name = factory.Faker('city').generate() return name class TrackFactory(factory.django.DjangoModelFactory): class Meta: model = Track conference", "return slug class PanelistFactory(factory.django.DjangoModelFactory): class Meta: model = Panelist conference = factory.SubFactory(ConferenceFactory) inarow", "= Panelist conference = factory.SubFactory(ConferenceFactory) inarow = 2 reading_requested = True signing_requested =", "class PanelistFactory(factory.django.DjangoModelFactory): class Meta: model = Panelist conference = factory.SubFactory(ConferenceFactory) inarow = 2", "'She/They', 'E/Em'] return random.choice(pronoun_choices) class PanelFactory(factory.django.DjangoModelFactory): class Meta: model = Panel description =", "= slugify(self.name) return slug class PanelistFactory(factory.django.DjangoModelFactory): class Meta: model = Panelist conference =", "factory.SubFactory(ConferenceFactory) capacity = 50 category = Room.PANEL av = False @factory.lazy_attribute def name(self):", "**kwargs): if not create: # Simple build, do nothing. return if extracted: #", "= 50 category = Room.PANEL av = False @factory.lazy_attribute def name(self): name =", "name = factory.Faker('word').generate() return name @factory.lazy_attribute def slug(self): slug = slugify(self.name) return slug", "random.randrange(1, 4) for panelist in random.sample(extracted, reqmod): self.required_panelist.add(panelist) @factory.post_generation def assign_track(self, create, extracted,", "= True @factory.lazy_attribute def title(self): title = factory.Faker('text', max_nb_chars=50).generate() return title @factory.post_generation def", "if extracted: # select interested panelists at random intpan = random.randrange(3, int(len(extracted)/2)) intmod", "do nothing. return if extracted: # assign track via a weighted random track", "required panelists required = random.randrange(1, 10) # if yes, add them. if required", "reqmod = random.randrange(1, 4) for panelist in random.sample(extracted, reqmod): self.required_panelist.add(panelist) @factory.post_generation def assign_track(self,", "slug class PanelistFactory(factory.django.DjangoModelFactory): class Meta: model = Panelist conference = factory.SubFactory(ConferenceFactory) inarow =", "= datetime.time(hour=10) end_time = datetime.time(hour=19) class RoomFactory(factory.django.DjangoModelFactory): class Meta: model = Room conference", "= slugify(self.name) return slug class DayFactory(factory.django.DjangoModelFactory): class Meta: model = Day conference =", "model = Track conference = factory.SubFactory(ConferenceFactory) start = datetime.datetime(year=2020, month=1, day=17, hour=17) end", "# select interested panelists at random intpan = random.randrange(3, int(len(extracted)/2)) intmod = random.randrange(1,", "random.sample(extracted, intmod): self.interested_moderators.add(panelist) # roll a d10 to see if this one has", "if extracted: # assign track via a weighted random track = random.choices(extracted, weights=[45,", "= factory.SubFactory(ConferenceFactory) day = datetime.date(year=2020, month=1, day=17) start_time = datetime.time(hour=10) end_time = datetime.time(hour=19)", "start_time = datetime.time(hour=10) end_time = datetime.time(hour=19) class RoomFactory(factory.django.DjangoModelFactory): class Meta: model = Room", "= datetime.date(year=2020, month=1, day=17) start_time = datetime.time(hour=10) end_time = datetime.time(hour=19) class RoomFactory(factory.django.DjangoModelFactory): class", "class Meta: model = Track conference = factory.SubFactory(ConferenceFactory) start = datetime.datetime(year=2020, month=1, day=17,", "if not create: # Simple build, do nothing. return if extracted: # assign", "name(self): name = factory.Faker('city').generate() return name class TrackFactory(factory.django.DjangoModelFactory): class Meta: model = Track", "'He/Him', 'They/Them', 'She/They', 'E/Em'] return random.choice(pronoun_choices) class PanelFactory(factory.django.DjangoModelFactory): class Meta: model = Panel", "@factory.lazy_attribute def name(self): name = factory.Faker('city').generate() return name class TrackFactory(factory.django.DjangoModelFactory): class Meta: model", "TrackFactory(factory.django.DjangoModelFactory): class Meta: model = Track conference = factory.SubFactory(ConferenceFactory) start = datetime.datetime(year=2020, month=1,", "slug(self): slug = slugify(self.name) return slug class DayFactory(factory.django.DjangoModelFactory): class Meta: model = Day", "month=1, day=19, hour=16) @factory.lazy_attribute def name(self): name = factory.Faker('word').generate() return name @factory.lazy_attribute def", "Meta: model = Day conference = factory.SubFactory(ConferenceFactory) day = datetime.date(year=2020, month=1, day=17) start_time", "class TrackFactory(factory.django.DjangoModelFactory): class Meta: model = Track conference = factory.SubFactory(ConferenceFactory) start = datetime.datetime(year=2020,", "# Simple build, do nothing. return if extracted: # select interested panelists at", "self.required_panelist.add(panelist) @factory.post_generation def assign_track(self, create, extracted, **kwargs): if not create: # Simple build,", "@factory.post_generation def assign_track(self, create, extracted, **kwargs): if not create: # Simple build, do", "Day conference = factory.SubFactory(ConferenceFactory) day = datetime.date(year=2020, month=1, day=17) start_time = datetime.time(hour=10) end_time", "@factory.lazy_attribute def program_name(self): return self.badge_name @factory.lazy_attribute def pronouns(self): pronoun_choices = ['She/Her', 'He/Him', 'They/Them',", "2 reading_requested = True signing_requested = True @factory.lazy_attribute def badge_name(self): return factory.Faker('name').generate() @factory.lazy_attribute", "PanelistFactory(factory.django.DjangoModelFactory): class Meta: model = Panelist conference = factory.SubFactory(ConferenceFactory) inarow = 2 reading_requested", "'TestCon {}'.format(n)) @factory.lazy_attribute def slug(self): slug = slugify(self.name) return slug class DayFactory(factory.django.DjangoModelFactory): class", "reading_requested = True signing_requested = True @factory.lazy_attribute def badge_name(self): return factory.Faker('name').generate() @factory.lazy_attribute def", "import datetime from django.template.defaultfilters import slugify from scheduler.models import Conference, Room, Track, Panelist,", "class Meta: model = Day conference = factory.SubFactory(ConferenceFactory) day = datetime.date(year=2020, month=1, day=17)", "not create: # Simple build, do nothing. return if extracted: # select interested", "# if yes, add them. if required == 10: reqmod = random.randrange(1, 4)", "Meta: model = Room conference = factory.SubFactory(ConferenceFactory) capacity = 50 category = Room.PANEL", "assign_track(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return", "in random.sample(extracted, intmod): self.interested_moderators.add(panelist) # roll a d10 to see if this one", "self.interested_panelists.add(panelist) for panelist in random.sample(extracted, intmod): self.interested_moderators.add(panelist) # roll a d10 to see", "capacity = 50 category = Room.PANEL av = False @factory.lazy_attribute def name(self): name", "Meta: model = Track conference = factory.SubFactory(ConferenceFactory) start = datetime.datetime(year=2020, month=1, day=17, hour=17)", "= ['She/Her', 'He/Him', 'They/Them', 'She/They', 'E/Em'] return random.choice(pronoun_choices) class PanelFactory(factory.django.DjangoModelFactory): class Meta: model", "factory.SubFactory(ConferenceFactory) day = datetime.date(year=2020, month=1, day=17) start_time = datetime.time(hour=10) end_time = datetime.time(hour=19) class", "if this one has required panelists required = random.randrange(1, 10) # if yes,", "Meta: model = Panel description = factory.Faker('paragraph', nb_sentences=6).generate() conference = factory.SubFactory(ConferenceFactory) publish =", "def slug(self): slug = slugify(self.name) return slug class DayFactory(factory.django.DjangoModelFactory): class Meta: model =", "add them. if required == 10: reqmod = random.randrange(1, 4) for panelist in", "d10 to see if this one has required panelists required = random.randrange(1, 10)", "intmod = random.randrange(1, int(len(extracted)/4)) for panelist in random.sample(extracted, intpan): self.interested_panelists.add(panelist) for panelist in", "nothing. return if extracted: # select interested panelists at random intpan = random.randrange(3,", "return slug class DayFactory(factory.django.DjangoModelFactory): class Meta: model = Day conference = factory.SubFactory(ConferenceFactory) day", "= factory.SubFactory(ConferenceFactory) inarow = 2 reading_requested = True signing_requested = True @factory.lazy_attribute def", "self.badge_name @factory.lazy_attribute def pronouns(self): pronoun_choices = ['She/Her', 'He/Him', 'They/Them', 'She/They', 'E/Em'] return random.choice(pronoun_choices)", "factory import random import datetime from django.template.defaultfilters import slugify from scheduler.models import Conference,", "slug class DayFactory(factory.django.DjangoModelFactory): class Meta: model = Day conference = factory.SubFactory(ConferenceFactory) day =", "slugify(self.name) return slug class PanelistFactory(factory.django.DjangoModelFactory): class Meta: model = Panelist conference = factory.SubFactory(ConferenceFactory)", "model = Panelist conference = factory.SubFactory(ConferenceFactory) inarow = 2 reading_requested = True signing_requested", "return title @factory.post_generation def assign_panelists(self, create, extracted, **kwargs): if not create: # Simple", "== 10: reqmod = random.randrange(1, 4) for panelist in random.sample(extracted, reqmod): self.required_panelist.add(panelist) @factory.post_generation", "Conference name = factory.Sequence(lambda n: 'TestCon {}'.format(n)) @factory.lazy_attribute def slug(self): slug = slugify(self.name)", "Room.PANEL av = False @factory.lazy_attribute def name(self): name = factory.Faker('city').generate() return name class", "datetime.date(year=2020, month=1, day=17) start_time = datetime.time(hour=10) end_time = datetime.time(hour=19) class RoomFactory(factory.django.DjangoModelFactory): class Meta:", "panelists at random intpan = random.randrange(3, int(len(extracted)/2)) intmod = random.randrange(1, int(len(extracted)/4)) for panelist", "= Room.PANEL av = False @factory.lazy_attribute def name(self): name = factory.Faker('city').generate() return name", "= factory.Faker('word').generate() return name @factory.lazy_attribute def slug(self): slug = slugify(self.name) return slug class", "hour=16) @factory.lazy_attribute def name(self): name = factory.Faker('word').generate() return name @factory.lazy_attribute def slug(self): slug", "model = Panel description = factory.Faker('paragraph', nb_sentences=6).generate() conference = factory.SubFactory(ConferenceFactory) publish = True", "def badge_name(self): return factory.Faker('name').generate() @factory.lazy_attribute def email(self): return factory.Faker('email').generate() @factory.lazy_attribute def program_name(self): return", "= factory.SubFactory(ConferenceFactory) start = datetime.datetime(year=2020, month=1, day=17, hour=17) end = datetime.datetime(year=2020, month=1, day=19,", "intpan = random.randrange(3, int(len(extracted)/2)) intmod = random.randrange(1, int(len(extracted)/4)) for panelist in random.sample(extracted, intpan):", "panelist in random.sample(extracted, intpan): self.interested_panelists.add(panelist) for panelist in random.sample(extracted, intmod): self.interested_moderators.add(panelist) # roll", "one has required panelists required = random.randrange(1, 10) # if yes, add them.", "description = factory.Faker('paragraph', nb_sentences=6).generate() conference = factory.SubFactory(ConferenceFactory) publish = True @factory.lazy_attribute def title(self):", "Simple build, do nothing. return if extracted: # assign track via a weighted", "has required panelists required = random.randrange(1, 10) # if yes, add them. if", "return name class TrackFactory(factory.django.DjangoModelFactory): class Meta: model = Track conference = factory.SubFactory(ConferenceFactory) start", "return if extracted: # assign track via a weighted random track = random.choices(extracted,", "@factory.lazy_attribute def slug(self): slug = slugify(self.name) return slug class PanelistFactory(factory.django.DjangoModelFactory): class Meta: model", "conference = factory.SubFactory(ConferenceFactory) day = datetime.date(year=2020, month=1, day=17) start_time = datetime.time(hour=10) end_time =", "= True signing_requested = True @factory.lazy_attribute def badge_name(self): return factory.Faker('name').generate() @factory.lazy_attribute def email(self):", "factory.SubFactory(ConferenceFactory) start = datetime.datetime(year=2020, month=1, day=17, hour=17) end = datetime.datetime(year=2020, month=1, day=19, hour=16)", "factory.Faker('name').generate() @factory.lazy_attribute def email(self): return factory.Faker('email').generate() @factory.lazy_attribute def program_name(self): return self.badge_name @factory.lazy_attribute def", "day=19, hour=16) @factory.lazy_attribute def name(self): name = factory.Faker('word').generate() return name @factory.lazy_attribute def slug(self):", "create, extracted, **kwargs): if not create: # Simple build, do nothing. return if", "day = datetime.date(year=2020, month=1, day=17) start_time = datetime.time(hour=10) end_time = datetime.time(hour=19) class RoomFactory(factory.django.DjangoModelFactory):", "random.randrange(3, int(len(extracted)/2)) intmod = random.randrange(1, int(len(extracted)/4)) for panelist in random.sample(extracted, intpan): self.interested_panelists.add(panelist) for", "@factory.lazy_attribute def email(self): return factory.Faker('email').generate() @factory.lazy_attribute def program_name(self): return self.badge_name @factory.lazy_attribute def pronouns(self):", "Panel description = factory.Faker('paragraph', nb_sentences=6).generate() conference = factory.SubFactory(ConferenceFactory) publish = True @factory.lazy_attribute def", "@factory.post_generation def assign_panelists(self, create, extracted, **kwargs): if not create: # Simple build, do", "for panelist in random.sample(extracted, intmod): self.interested_moderators.add(panelist) # roll a d10 to see if", "= datetime.datetime(year=2020, month=1, day=17, hour=17) end = datetime.datetime(year=2020, month=1, day=19, hour=16) @factory.lazy_attribute def", "n: 'TestCon {}'.format(n)) @factory.lazy_attribute def slug(self): slug = slugify(self.name) return slug class DayFactory(factory.django.DjangoModelFactory):", "Meta: model = Panelist conference = factory.SubFactory(ConferenceFactory) inarow = 2 reading_requested = True", "Conference, Room, Track, Panelist, Panel, Day class ConferenceFactory(factory.django.DjangoModelFactory): class Meta: model = Conference", "def pronouns(self): pronoun_choices = ['She/Her', 'He/Him', 'They/Them', 'She/They', 'E/Em'] return random.choice(pronoun_choices) class PanelFactory(factory.django.DjangoModelFactory):", "{}'.format(n)) @factory.lazy_attribute def slug(self): slug = slugify(self.name) return slug class DayFactory(factory.django.DjangoModelFactory): class Meta:", "= True @factory.lazy_attribute def badge_name(self): return factory.Faker('name').generate() @factory.lazy_attribute def email(self): return factory.Faker('email').generate() @factory.lazy_attribute", "build, do nothing. return if extracted: # assign track via a weighted random", "required = random.randrange(1, 10) # if yes, add them. if required == 10:", "create: # Simple build, do nothing. return if extracted: # assign track via", "if not create: # Simple build, do nothing. return if extracted: # select", "class Meta: model = Conference name = factory.Sequence(lambda n: 'TestCon {}'.format(n)) @factory.lazy_attribute def", "day=17, hour=17) end = datetime.datetime(year=2020, month=1, day=19, hour=16) @factory.lazy_attribute def name(self): name =", "publish = True @factory.lazy_attribute def title(self): title = factory.Faker('text', max_nb_chars=50).generate() return title @factory.post_generation", "int(len(extracted)/2)) intmod = random.randrange(1, int(len(extracted)/4)) for panelist in random.sample(extracted, intpan): self.interested_panelists.add(panelist) for panelist", "class Meta: model = Room conference = factory.SubFactory(ConferenceFactory) capacity = 50 category =", "day=17) start_time = datetime.time(hour=10) end_time = datetime.time(hour=19) class RoomFactory(factory.django.DjangoModelFactory): class Meta: model =", "pronouns(self): pronoun_choices = ['She/Her', 'He/Him', 'They/Them', 'She/They', 'E/Em'] return random.choice(pronoun_choices) class PanelFactory(factory.django.DjangoModelFactory): class", "Room, Track, Panelist, Panel, Day class ConferenceFactory(factory.django.DjangoModelFactory): class Meta: model = Conference name", "av = False @factory.lazy_attribute def name(self): name = factory.Faker('city').generate() return name class TrackFactory(factory.django.DjangoModelFactory):", "slugify(self.name) return slug class DayFactory(factory.django.DjangoModelFactory): class Meta: model = Day conference = factory.SubFactory(ConferenceFactory)", "import Conference, Room, Track, Panelist, Panel, Day class ConferenceFactory(factory.django.DjangoModelFactory): class Meta: model =", "['She/Her', 'He/Him', 'They/Them', 'She/They', 'E/Em'] return random.choice(pronoun_choices) class PanelFactory(factory.django.DjangoModelFactory): class Meta: model =", "interested panelists at random intpan = random.randrange(3, int(len(extracted)/2)) intmod = random.randrange(1, int(len(extracted)/4)) for", "in random.sample(extracted, intpan): self.interested_panelists.add(panelist) for panelist in random.sample(extracted, intmod): self.interested_moderators.add(panelist) # roll a", "self.interested_moderators.add(panelist) # roll a d10 to see if this one has required panelists", "at random intpan = random.randrange(3, int(len(extracted)/2)) intmod = random.randrange(1, int(len(extracted)/4)) for panelist in", "def slug(self): slug = slugify(self.name) return slug class PanelistFactory(factory.django.DjangoModelFactory): class Meta: model =", "def program_name(self): return self.badge_name @factory.lazy_attribute def pronouns(self): pronoun_choices = ['She/Her', 'He/Him', 'They/Them', 'She/They',", "factory.Faker('paragraph', nb_sentences=6).generate() conference = factory.SubFactory(ConferenceFactory) publish = True @factory.lazy_attribute def title(self): title =", "for panelist in random.sample(extracted, intpan): self.interested_panelists.add(panelist) for panelist in random.sample(extracted, intmod): self.interested_moderators.add(panelist) #", "for panelist in random.sample(extracted, reqmod): self.required_panelist.add(panelist) @factory.post_generation def assign_track(self, create, extracted, **kwargs): if", "title(self): title = factory.Faker('text', max_nb_chars=50).generate() return title @factory.post_generation def assign_panelists(self, create, extracted, **kwargs):", "a weighted random track = random.choices(extracted, weights=[45, 5, 5, 10, 10, 5]) track[0].panels.add(self)", "Meta: model = Conference name = factory.Sequence(lambda n: 'TestCon {}'.format(n)) @factory.lazy_attribute def slug(self):", "model = Conference name = factory.Sequence(lambda n: 'TestCon {}'.format(n)) @factory.lazy_attribute def slug(self): slug", "Simple build, do nothing. return if extracted: # select interested panelists at random", "datetime.time(hour=10) end_time = datetime.time(hour=19) class RoomFactory(factory.django.DjangoModelFactory): class Meta: model = Room conference =", "@factory.lazy_attribute def title(self): title = factory.Faker('text', max_nb_chars=50).generate() return title @factory.post_generation def assign_panelists(self, create,", "datetime from django.template.defaultfilters import slugify from scheduler.models import Conference, Room, Track, Panelist, Panel,", "extracted, **kwargs): if not create: # Simple build, do nothing. return if extracted:", "return if extracted: # select interested panelists at random intpan = random.randrange(3, int(len(extracted)/2))", "random.randrange(1, 10) # if yes, add them. if required == 10: reqmod =", "True @factory.lazy_attribute def title(self): title = factory.Faker('text', max_nb_chars=50).generate() return title @factory.post_generation def assign_panelists(self,", "= factory.Faker('city').generate() return name class TrackFactory(factory.django.DjangoModelFactory): class Meta: model = Track conference =", "= datetime.time(hour=19) class RoomFactory(factory.django.DjangoModelFactory): class Meta: model = Room conference = factory.SubFactory(ConferenceFactory) capacity", "= factory.SubFactory(ConferenceFactory) publish = True @factory.lazy_attribute def title(self): title = factory.Faker('text', max_nb_chars=50).generate() return", "return self.badge_name @factory.lazy_attribute def pronouns(self): pronoun_choices = ['She/Her', 'He/Him', 'They/Them', 'She/They', 'E/Em'] return", "= factory.SubFactory(ConferenceFactory) capacity = 50 category = Room.PANEL av = False @factory.lazy_attribute def", "conference = factory.SubFactory(ConferenceFactory) start = datetime.datetime(year=2020, month=1, day=17, hour=17) end = datetime.datetime(year=2020, month=1,", "name class TrackFactory(factory.django.DjangoModelFactory): class Meta: model = Track conference = factory.SubFactory(ConferenceFactory) start =", "extracted: # select interested panelists at random intpan = random.randrange(3, int(len(extracted)/2)) intmod =", "random.sample(extracted, intpan): self.interested_panelists.add(panelist) for panelist in random.sample(extracted, intmod): self.interested_moderators.add(panelist) # roll a d10", "inarow = 2 reading_requested = True signing_requested = True @factory.lazy_attribute def badge_name(self): return", "assign_panelists(self, create, extracted, **kwargs): if not create: # Simple build, do nothing. return", "track via a weighted random track = random.choices(extracted, weights=[45, 5, 5, 10, 10,", "Panelist conference = factory.SubFactory(ConferenceFactory) inarow = 2 reading_requested = True signing_requested = True", "Track conference = factory.SubFactory(ConferenceFactory) start = datetime.datetime(year=2020, month=1, day=17, hour=17) end = datetime.datetime(year=2020,", "this one has required panelists required = random.randrange(1, 10) # if yes, add", "in random.sample(extracted, reqmod): self.required_panelist.add(panelist) @factory.post_generation def assign_track(self, create, extracted, **kwargs): if not create:", "assign track via a weighted random track = random.choices(extracted, weights=[45, 5, 5, 10,", "= random.randrange(1, int(len(extracted)/4)) for panelist in random.sample(extracted, intpan): self.interested_panelists.add(panelist) for panelist in random.sample(extracted,", "10) # if yes, add them. if required == 10: reqmod = random.randrange(1,", "Panel, Day class ConferenceFactory(factory.django.DjangoModelFactory): class Meta: model = Conference name = factory.Sequence(lambda n:", "badge_name(self): return factory.Faker('name').generate() @factory.lazy_attribute def email(self): return factory.Faker('email').generate() @factory.lazy_attribute def program_name(self): return self.badge_name", "return factory.Faker('name').generate() @factory.lazy_attribute def email(self): return factory.Faker('email').generate() @factory.lazy_attribute def program_name(self): return self.badge_name @factory.lazy_attribute", "end_time = datetime.time(hour=19) class RoomFactory(factory.django.DjangoModelFactory): class Meta: model = Room conference = factory.SubFactory(ConferenceFactory)", "# roll a d10 to see if this one has required panelists required", "datetime.datetime(year=2020, month=1, day=19, hour=16) @factory.lazy_attribute def name(self): name = factory.Faker('word').generate() return name @factory.lazy_attribute", "Room conference = factory.SubFactory(ConferenceFactory) capacity = 50 category = Room.PANEL av = False", "conference = factory.SubFactory(ConferenceFactory) publish = True @factory.lazy_attribute def title(self): title = factory.Faker('text', max_nb_chars=50).generate()", "= Panel description = factory.Faker('paragraph', nb_sentences=6).generate() conference = factory.SubFactory(ConferenceFactory) publish = True @factory.lazy_attribute", "= Conference name = factory.Sequence(lambda n: 'TestCon {}'.format(n)) @factory.lazy_attribute def slug(self): slug =", "def title(self): title = factory.Faker('text', max_nb_chars=50).generate() return title @factory.post_generation def assign_panelists(self, create, extracted,", "hour=17) end = datetime.datetime(year=2020, month=1, day=19, hour=16) @factory.lazy_attribute def name(self): name = factory.Faker('word').generate()", "extracted: # assign track via a weighted random track = random.choices(extracted, weights=[45, 5,", "= 2 reading_requested = True signing_requested = True @factory.lazy_attribute def badge_name(self): return factory.Faker('name').generate()", "ConferenceFactory(factory.django.DjangoModelFactory): class Meta: model = Conference name = factory.Sequence(lambda n: 'TestCon {}'.format(n)) @factory.lazy_attribute", "random.choice(pronoun_choices) class PanelFactory(factory.django.DjangoModelFactory): class Meta: model = Panel description = factory.Faker('paragraph', nb_sentences=6).generate() conference", "not create: # Simple build, do nothing. return if extracted: # assign track", "False @factory.lazy_attribute def name(self): name = factory.Faker('city').generate() return name class TrackFactory(factory.django.DjangoModelFactory): class Meta:", "int(len(extracted)/4)) for panelist in random.sample(extracted, intpan): self.interested_panelists.add(panelist) for panelist in random.sample(extracted, intmod): self.interested_moderators.add(panelist)", "see if this one has required panelists required = random.randrange(1, 10) # if", "import slugify from scheduler.models import Conference, Room, Track, Panelist, Panel, Day class ConferenceFactory(factory.django.DjangoModelFactory):", "50 category = Room.PANEL av = False @factory.lazy_attribute def name(self): name = factory.Faker('city').generate()", "slug = slugify(self.name) return slug class DayFactory(factory.django.DjangoModelFactory): class Meta: model = Day conference", "from scheduler.models import Conference, Room, Track, Panelist, Panel, Day class ConferenceFactory(factory.django.DjangoModelFactory): class Meta:", "= Day conference = factory.SubFactory(ConferenceFactory) day = datetime.date(year=2020, month=1, day=17) start_time = datetime.time(hour=10)", "name @factory.lazy_attribute def slug(self): slug = slugify(self.name) return slug class PanelistFactory(factory.django.DjangoModelFactory): class Meta:", "4) for panelist in random.sample(extracted, reqmod): self.required_panelist.add(panelist) @factory.post_generation def assign_track(self, create, extracted, **kwargs):", "conference = factory.SubFactory(ConferenceFactory) inarow = 2 reading_requested = True signing_requested = True @factory.lazy_attribute", "datetime.time(hour=19) class RoomFactory(factory.django.DjangoModelFactory): class Meta: model = Room conference = factory.SubFactory(ConferenceFactory) capacity =", "class Meta: model = Panel description = factory.Faker('paragraph', nb_sentences=6).generate() conference = factory.SubFactory(ConferenceFactory) publish", "Track, Panelist, Panel, Day class ConferenceFactory(factory.django.DjangoModelFactory): class Meta: model = Conference name =", "def email(self): return factory.Faker('email').generate() @factory.lazy_attribute def program_name(self): return self.badge_name @factory.lazy_attribute def pronouns(self): pronoun_choices", "title = factory.Faker('text', max_nb_chars=50).generate() return title @factory.post_generation def assign_panelists(self, create, extracted, **kwargs): if", "= factory.Faker('text', max_nb_chars=50).generate() return title @factory.post_generation def assign_panelists(self, create, extracted, **kwargs): if not", "class DayFactory(factory.django.DjangoModelFactory): class Meta: model = Day conference = factory.SubFactory(ConferenceFactory) day = datetime.date(year=2020,", "= random.randrange(3, int(len(extracted)/2)) intmod = random.randrange(1, int(len(extracted)/4)) for panelist in random.sample(extracted, intpan): self.interested_panelists.add(panelist)", "Panelist, Panel, Day class ConferenceFactory(factory.django.DjangoModelFactory): class Meta: model = Conference name = factory.Sequence(lambda", "random.randrange(1, int(len(extracted)/4)) for panelist in random.sample(extracted, intpan): self.interested_panelists.add(panelist) for panelist in random.sample(extracted, intmod):", "intpan): self.interested_panelists.add(panelist) for panelist in random.sample(extracted, intmod): self.interested_moderators.add(panelist) # roll a d10 to", "= factory.Faker('paragraph', nb_sentences=6).generate() conference = factory.SubFactory(ConferenceFactory) publish = True @factory.lazy_attribute def title(self): title", "= random.randrange(1, 10) # if yes, add them. if required == 10: reqmod", "nothing. return if extracted: # assign track via a weighted random track =", "return random.choice(pronoun_choices) class PanelFactory(factory.django.DjangoModelFactory): class Meta: model = Panel description = factory.Faker('paragraph', nb_sentences=6).generate()", "def assign_track(self, create, extracted, **kwargs): if not create: # Simple build, do nothing.", "= Room conference = factory.SubFactory(ConferenceFactory) capacity = 50 category = Room.PANEL av =", "<gh_stars>1-10 import factory import random import datetime from django.template.defaultfilters import slugify from scheduler.models", "= factory.Sequence(lambda n: 'TestCon {}'.format(n)) @factory.lazy_attribute def slug(self): slug = slugify(self.name) return slug", "signing_requested = True @factory.lazy_attribute def badge_name(self): return factory.Faker('name').generate() @factory.lazy_attribute def email(self): return factory.Faker('email').generate()", "from django.template.defaultfilters import slugify from scheduler.models import Conference, Room, Track, Panelist, Panel, Day", "= datetime.datetime(year=2020, month=1, day=19, hour=16) @factory.lazy_attribute def name(self): name = factory.Faker('word').generate() return name", "nb_sentences=6).generate() conference = factory.SubFactory(ConferenceFactory) publish = True @factory.lazy_attribute def title(self): title = factory.Faker('text',", "to see if this one has required panelists required = random.randrange(1, 10) #", "# assign track via a weighted random track = random.choices(extracted, weights=[45, 5, 5,", "Day class ConferenceFactory(factory.django.DjangoModelFactory): class Meta: model = Conference name = factory.Sequence(lambda n: 'TestCon", "start = datetime.datetime(year=2020, month=1, day=17, hour=17) end = datetime.datetime(year=2020, month=1, day=19, hour=16) @factory.lazy_attribute", "'E/Em'] return random.choice(pronoun_choices) class PanelFactory(factory.django.DjangoModelFactory): class Meta: model = Panel description = factory.Faker('paragraph',", "datetime.datetime(year=2020, month=1, day=17, hour=17) end = datetime.datetime(year=2020, month=1, day=19, hour=16) @factory.lazy_attribute def name(self):", "title @factory.post_generation def assign_panelists(self, create, extracted, **kwargs): if not create: # Simple build,", "name(self): name = factory.Faker('word').generate() return name @factory.lazy_attribute def slug(self): slug = slugify(self.name) return", "category = Room.PANEL av = False @factory.lazy_attribute def name(self): name = factory.Faker('city').generate() return", "intmod): self.interested_moderators.add(panelist) # roll a d10 to see if this one has required", "a d10 to see if this one has required panelists required = random.randrange(1,", "them. if required == 10: reqmod = random.randrange(1, 4) for panelist in random.sample(extracted,", "scheduler.models import Conference, Room, Track, Panelist, Panel, Day class ConferenceFactory(factory.django.DjangoModelFactory): class Meta: model", "def name(self): name = factory.Faker('city').generate() return name class TrackFactory(factory.django.DjangoModelFactory): class Meta: model =", "factory.Sequence(lambda n: 'TestCon {}'.format(n)) @factory.lazy_attribute def slug(self): slug = slugify(self.name) return slug class", "'They/Them', 'She/They', 'E/Em'] return random.choice(pronoun_choices) class PanelFactory(factory.django.DjangoModelFactory): class Meta: model = Panel description", "do nothing. return if extracted: # select interested panelists at random intpan =", "if required == 10: reqmod = random.randrange(1, 4) for panelist in random.sample(extracted, reqmod):", "month=1, day=17, hour=17) end = datetime.datetime(year=2020, month=1, day=19, hour=16) @factory.lazy_attribute def name(self): name", "factory.Faker('word').generate() return name @factory.lazy_attribute def slug(self): slug = slugify(self.name) return slug class PanelistFactory(factory.django.DjangoModelFactory):", "import random import datetime from django.template.defaultfilters import slugify from scheduler.models import Conference, Room,", "month=1, day=17) start_time = datetime.time(hour=10) end_time = datetime.time(hour=19) class RoomFactory(factory.django.DjangoModelFactory): class Meta: model", "= Track conference = factory.SubFactory(ConferenceFactory) start = datetime.datetime(year=2020, month=1, day=17, hour=17) end =", "= False @factory.lazy_attribute def name(self): name = factory.Faker('city').generate() return name class TrackFactory(factory.django.DjangoModelFactory): class", "class PanelFactory(factory.django.DjangoModelFactory): class Meta: model = Panel description = factory.Faker('paragraph', nb_sentences=6).generate() conference =", "random import datetime from django.template.defaultfilters import slugify from scheduler.models import Conference, Room, Track,", "model = Room conference = factory.SubFactory(ConferenceFactory) capacity = 50 category = Room.PANEL av", "roll a d10 to see if this one has required panelists required =", "return name @factory.lazy_attribute def slug(self): slug = slugify(self.name) return slug class PanelistFactory(factory.django.DjangoModelFactory): class", "slugify from scheduler.models import Conference, Room, Track, Panelist, Panel, Day class ConferenceFactory(factory.django.DjangoModelFactory): class", "return factory.Faker('email').generate() @factory.lazy_attribute def program_name(self): return self.badge_name @factory.lazy_attribute def pronouns(self): pronoun_choices = ['She/Her',", "@factory.lazy_attribute def name(self): name = factory.Faker('word').generate() return name @factory.lazy_attribute def slug(self): slug =", "class Meta: model = Panelist conference = factory.SubFactory(ConferenceFactory) inarow = 2 reading_requested =", "def name(self): name = factory.Faker('word').generate() return name @factory.lazy_attribute def slug(self): slug = slugify(self.name)", "= random.randrange(1, 4) for panelist in random.sample(extracted, reqmod): self.required_panelist.add(panelist) @factory.post_generation def assign_track(self, create,", "email(self): return factory.Faker('email').generate() @factory.lazy_attribute def program_name(self): return self.badge_name @factory.lazy_attribute def pronouns(self): pronoun_choices =", "conference = factory.SubFactory(ConferenceFactory) capacity = 50 category = Room.PANEL av = False @factory.lazy_attribute", "end = datetime.datetime(year=2020, month=1, day=19, hour=16) @factory.lazy_attribute def name(self): name = factory.Faker('word').generate() return", "@factory.lazy_attribute def badge_name(self): return factory.Faker('name').generate() @factory.lazy_attribute def email(self): return factory.Faker('email').generate() @factory.lazy_attribute def program_name(self):", "@factory.lazy_attribute def slug(self): slug = slugify(self.name) return slug class DayFactory(factory.django.DjangoModelFactory): class Meta: model", "slug(self): slug = slugify(self.name) return slug class PanelistFactory(factory.django.DjangoModelFactory): class Meta: model = Panelist", "pronoun_choices = ['She/Her', 'He/Him', 'They/Them', 'She/They', 'E/Em'] return random.choice(pronoun_choices) class PanelFactory(factory.django.DjangoModelFactory): class Meta:", "factory.SubFactory(ConferenceFactory) publish = True @factory.lazy_attribute def title(self): title = factory.Faker('text', max_nb_chars=50).generate() return title", "True signing_requested = True @factory.lazy_attribute def badge_name(self): return factory.Faker('name').generate() @factory.lazy_attribute def email(self): return", "random intpan = random.randrange(3, int(len(extracted)/2)) intmod = random.randrange(1, int(len(extracted)/4)) for panelist in random.sample(extracted,", "panelist in random.sample(extracted, intmod): self.interested_moderators.add(panelist) # roll a d10 to see if this", "panelists required = random.randrange(1, 10) # if yes, add them. if required ==", "factory.Faker('city').generate() return name class TrackFactory(factory.django.DjangoModelFactory): class Meta: model = Track conference = factory.SubFactory(ConferenceFactory)", "build, do nothing. return if extracted: # select interested panelists at random intpan", "factory.SubFactory(ConferenceFactory) inarow = 2 reading_requested = True signing_requested = True @factory.lazy_attribute def badge_name(self):", "program_name(self): return self.badge_name @factory.lazy_attribute def pronouns(self): pronoun_choices = ['She/Her', 'He/Him', 'They/Them', 'She/They', 'E/Em']", "via a weighted random track = random.choices(extracted, weights=[45, 5, 5, 10, 10, 5])" ]
[ "size, iterations) def test_1(): \"\"\" Runs some tests with all extensions and exclude", "compression param \"list\": [\"zlib\", \"blosc\", None], }, } def clean(): \"\"\" Clean previously", "Runs some tests with all extensions and exclude big dataframe \"\"\" full_test(0, iterations=100)", "comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations, extension=name,", "tables. There are 3 files with different sizes: small: bike_sharing_daily (64 KB) medium:", "in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations, extension=name, func=func,", "not test_compress or extension not in COMPRESSIONS: args = [df, f\"{PATH_DATA}data.{extension}\"] out[extension] =", "out def store_results(data, size, iterations): \"\"\" Store results as a yaml \"\"\" with", "comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] use_param = COMPRESSIONS[extension][\"read_with_param\"] for comp in tqdm(comp_list,", "iterations, exclude_formats, test_compress): \"\"\" Test read for one file Args: size: size of", "file sizes out[\"file_size\"] = {} for file in os.listdir(PATH_DATA): name, extension = file.split(\".\")", "[\"zlib\", \"blosc\", None], }, } def clean(): \"\"\" Clean previously created files \"\"\"", "else: if extension not in COMPRESSIONS: continue # Get name of compression parameter", "extension not in COMPRESSIONS: args = [f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func, args,", "args = [f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func, args, {}) # Try all", "= [df, f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func, args, {}) # Try all", "keyworded arguments \"\"\" out = [] for _ in tqdm(range(iterations), desc=f\"- {extension:8}\", leave=True):", "\"results/\" FILES = [\"bike_sharing_daily\", \"cbg_patterns\", \"checkouts-by-title\"] FUNCS = { \"read\": { \"csv\": pd.read_csv,", "dataframe and trying the compressions \"\"\" full_test(2, iterations=1, exclude_formats=[\"xlsx\", \"csv\"], test_compress=True) if __name__", "size, iterations): \"\"\" Store results as a yaml \"\"\" with open(f\"{PATH_RESULTS}results_s{size}_i{iterations}.yaml\", \"w\") as", "without xlsx extension \"\"\" full_test(1, iterations=5, exclude_formats=[\"xlsx\"], test_compress=True) def test_3(): \"\"\" Run test", "arguments \"\"\" out = [] for _ in tqdm(range(iterations), desc=f\"- {extension:8}\", leave=True): try:", "} # Also get file sizes out[\"file_size\"] = {} for file in os.listdir(PATH_DATA):", "{ \"write\": test_write(size, iterations, exclude_formats, test_compress), \"read\": test_read(size, iterations, exclude_formats, test_compress), } #", "\"data\": os.remove(f\"{PATH_DATA}{name}\") def iterate_one_test(iterations, extension, func, args, kwargs): \"\"\" Do some iterations for", "pd.read_pickle, \"feather\": pd.read_feather, \"parquet\": pd.read_parquet, \"msgpack\": pd.read_msgpack, }, \"write\": { \"csv\": pd.DataFrame.to_csv, \"xlsx\":", "Do some iterations for some function Args: size: size of the file to", "if not test_compress or extension not in COMPRESSIONS: args = [df, f\"{PATH_DATA}data.{extension}\"] out[extension]", "comp} if use_param else {}, ) return out def store_results(data, size, iterations): \"\"\"", "checkouts-by-title (6,62 GB) \"\"\" import os from time import time import yaml import", "COMPRESSIONS: continue # Get name of compression parameter and list of extensions comp_list", "name of compression parameter and list of extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name =", "Test writting for one file Args: size: size of the file to test", "for name in os.listdir(PATH_DATA): if \".\" in name and name.split(\".\")[0] == \"data\": os.remove(f\"{PATH_DATA}{name}\")", "func=func, args=[df, f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp}, ) return out def test_read(size, iterations, exclude_formats, test_compress):", "Returns: dictionary with out \"\"\" out = {} for extension, func in tqdm(FUNCS[\"read\"].items(),", "def store_results(data, size, iterations): \"\"\" Store results as a yaml \"\"\" with open(f\"{PATH_RESULTS}results_s{size}_i{iterations}.yaml\",", "\"checkouts-by-title\"] FUNCS = { \"read\": { \"csv\": pd.read_csv, \"xlsx\": pd.read_excel, \"pickle\": pd.read_pickle, \"feather\":", "args=[df, f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp}, ) return out def test_read(size, iterations, exclude_formats, test_compress): \"\"\"", "in tqdm(FUNCS[\"write\"].items(), desc=f\"{'write':10}\", leave=True): # Skip this extension if extension in exclude_formats: continue", "continue if not test_compress or extension not in COMPRESSIONS: args = [f\"{PATH_DATA}data.{extension}\"] out[extension]", "compression parameter and list of extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] use_param", "list of extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] for comp in tqdm(comp_list,", "(64 KB) medium: cbg_patterns (233 MB) big: checkouts-by-title (6,62 GB) \"\"\" import os", "\"xz\", None], }, \"pickle\": { \"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\",", "sizes: small: bike_sharing_daily (64 KB) medium: cbg_patterns (233 MB) big: checkouts-by-title (6,62 GB)", "test_read(size, iterations, exclude_formats, test_compress): \"\"\" Test read for one file Args: size: size", "desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations, extension=name, func=func, args=[df, f\"{PATH_DATA}data.{extension}_{comp}\"],", "this extension if extension in exclude_formats: continue if not test_compress or extension not", "PATH_DATA = \"data/\" PATH_RESULTS = \"results/\" FILES = [\"bike_sharing_daily\", \"cbg_patterns\", \"checkouts-by-title\"] FUNCS =", "full_test(1, iterations=5, exclude_formats=[\"xlsx\"], test_compress=True) def test_3(): \"\"\" Run test with the big dataframe", "}, } def clean(): \"\"\" Clean previously created files \"\"\" for name in", "[df, f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func, args, {}) # Try all compressions", "\"gzip\", \"brotli\", None], }, \"msgpack\": { \"param_name\": \"compress\", \"read_with_param\": False, # Read function", "test_compress), } # Also get file sizes out[\"file_size\"] = {} for file in", "\"cbg_patterns\", \"checkouts-by-title\"] FUNCS = { \"read\": { \"csv\": pd.read_csv, \"xlsx\": pd.read_excel, \"pickle\": pd.read_pickle,", "{ \"read\": { \"csv\": pd.read_csv, \"xlsx\": pd.read_excel, \"pickle\": pd.read_pickle, \"feather\": pd.read_feather, \"parquet\": pd.read_parquet,", "test_compress=True) if __name__ == \"__main__\": # Dummy test # full_test(0, iterations=20, exclude_formats=[\"xlsx\"], test_compress=True)", "cbg_patterns (233 MB) big: checkouts-by-title (6,62 GB) \"\"\" import os from time import", "Run tests trying all compressions without xlsx extension \"\"\" full_test(1, iterations=5, exclude_formats=[\"xlsx\"], test_compress=True)", "# Get name of compression parameter and list of extensions comp_list = COMPRESSIONS[extension][\"list\"]", "a yaml \"\"\" with open(f\"{PATH_RESULTS}results_s{size}_i{iterations}.yaml\", \"w\") as outfile: yaml.dump(data, outfile, default_flow_style=False) print(f\"\\n- Data", "and trying the compressions \"\"\" full_test(2, iterations=1, exclude_formats=[\"xlsx\", \"csv\"], test_compress=True) if __name__ ==", "\"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None], }, \"pickle\": { \"param_name\":", "test_write(size, iterations, exclude_formats, test_compress): \"\"\" Test writting for one file Args: size: size", "use compression param \"list\": [\"zlib\", \"blosc\", None], }, } def clean(): \"\"\" Clean", "\"pickle\": { \"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None],", "both tests and store the results\"\"\" clean() print(f\"\\nFULL TEST. size: {size}, iterations: {iterations}\")", "\"read\": test_read(size, iterations, exclude_formats, test_compress), } # Also get file sizes out[\"file_size\"] =", "with {extension}: {e}\") return out def test_write(size, iterations, exclude_formats, test_compress): \"\"\" Test writting", "for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations,", "name == \"data\": out[\"file_size\"][extension] = os.path.getsize(f\"{PATH_DATA}{file}\") store_results(out, size, iterations) def test_1(): \"\"\" Runs", "in tqdm(range(iterations), desc=f\"- {extension:8}\", leave=True): try: t0 = time() func(*args, **kwargs) # Store", "== \"data\": out[\"file_size\"][extension] = os.path.getsize(f\"{PATH_DATA}{file}\") store_results(out, size, iterations) def test_1(): \"\"\" Runs some", "2: big) iterations: number of times to run the test exclude_formats: formats to", "pd.DataFrame.to_parquet, \"msgpack\": pd.DataFrame.to_msgpack, }, } COMPRESSIONS = { \"csv\": { \"param_name\": \"compression\", \"read_with_param\":", "exclude_formats, test_compress), } # Also get file sizes out[\"file_size\"] = {} for file", "of extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] use_param = COMPRESSIONS[extension][\"read_with_param\"] for comp", "= COMPRESSIONS[extension][\"param_name\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\" out[name] =", "}, \"pickle\": { \"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\",", "Store time out.append(time() - t0) except Exception as e: print(f\"- Error with {extension}:", "= time() func(*args, **kwargs) # Store time out.append(time() - t0) except Exception as", "file.split(\".\") if name == \"data\": out[\"file_size\"][extension] = os.path.getsize(f\"{PATH_DATA}{file}\") store_results(out, size, iterations) def test_1():", "- t0) except Exception as e: print(f\"- Error with {extension}: {e}\") return out", "desc=f\"- {extension:8}\", leave=True): try: t0 = time() func(*args, **kwargs) # Store time out.append(time()", "from time import time import yaml import pandas as pd from tqdm import", "GB) \"\"\" import os from time import time import yaml import pandas as", "\"list\": [\"snappy\", \"gzip\", \"brotli\", None], }, \"msgpack\": { \"param_name\": \"compress\", \"read_with_param\": False, #", "iterations=5, exclude_formats=[\"xlsx\"], test_compress=True) def test_3(): \"\"\" Run test with the big dataframe and", "test args: arguments for that function kwargs: extra keyworded arguments \"\"\" out =", "try: t0 = time() func(*args, **kwargs) # Store time out.append(time() - t0) except", "def full_test(size, iterations=10, exclude_formats=[], test_compress=False): \"\"\" Do both tests and store the results\"\"\"", "tqdm import tqdm PATH_DATA = \"data/\" PATH_RESULTS = \"results/\" FILES = [\"bike_sharing_daily\", \"cbg_patterns\",", "name = f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations, extension=name, func=func, args=[f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp} if", "FILES = [\"bike_sharing_daily\", \"cbg_patterns\", \"checkouts-by-title\"] FUNCS = { \"read\": { \"csv\": pd.read_csv, \"xlsx\":", "iterations) def test_1(): \"\"\" Runs some tests with all extensions and exclude big", "{}) # Try all compressions else: if extension not in COMPRESSIONS: continue #", "}, \"msgpack\": { \"param_name\": \"compress\", \"read_with_param\": False, # Read function don't use compression", "Test read for one file Args: size: size of the file to test", "to run the test func: function to test args: arguments for that function", "in this test test_compress: if True it will try all compressions Returns: dictionary", "KB) medium: cbg_patterns (233 MB) big: checkouts-by-title (6,62 GB) \"\"\" import os from", "name = f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations, extension=name, func=func, args=[df, f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp},", "import yaml import pandas as pd from tqdm import tqdm PATH_DATA = \"data/\"", "== \"__main__\": # Dummy test # full_test(0, iterations=20, exclude_formats=[\"xlsx\"], test_compress=True) # test_1() #", "PATH_RESULTS = \"results/\" FILES = [\"bike_sharing_daily\", \"cbg_patterns\", \"checkouts-by-title\"] FUNCS = { \"read\": {", "def iterate_one_test(iterations, extension, func, args, kwargs): \"\"\" Do some iterations for some function", "Try all compressions else: if extension not in COMPRESSIONS: continue # Get name", "time() func(*args, **kwargs) # Store time out.append(time() - t0) except Exception as e:", "try all compressions Returns: dictionary with out \"\"\" out = {} for extension,", "extensions and exclude big dataframe \"\"\" full_test(0, iterations=100) full_test(1, iterations=10) def test_2(): \"\"\"", "iterate_one_test( iterations, extension=name, func=func, args=[f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp} if use_param else {}, ) return", "with different sizes: small: bike_sharing_daily (64 KB) medium: cbg_patterns (233 MB) big: checkouts-by-title", "created files \"\"\" for name in os.listdir(PATH_DATA): if \".\" in name and name.split(\".\")[0]", "yaml import pandas as pd from tqdm import tqdm PATH_DATA = \"data/\" PATH_RESULTS", "\"__main__\": # Dummy test # full_test(0, iterations=20, exclude_formats=[\"xlsx\"], test_compress=True) # test_1() # test_2()", "}, } COMPRESSIONS = { \"csv\": { \"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\",", "\"\"\" import os from time import time import yaml import pandas as pd", "False, # Read function don't use compression param \"list\": [\"snappy\", \"gzip\", \"brotli\", None],", "df = pd.read_csv(f\"{PATH_DATA}{FILES[size]}.csv\") for extension, func in tqdm(FUNCS[\"write\"].items(), desc=f\"{'write':10}\", leave=True): # Skip this", "extension, func, args, {}) # Try all compressions else: if extension not in", "1: mediumn, 2: big) iterations: number of times to run the test func:", "writting for one file Args: size: size of the file to test (0:", "all compressions Returns: dictionary with out \"\"\" out = {} for extension, func", "extension, func in tqdm(FUNCS[\"read\"].items(), desc=f\"{'read':10}\", leave=True): # Skip this extension if extension in", "except Exception as e: print(f\"- Error with {extension}: {e}\") return out def test_write(size,", "of compression parameter and list of extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"]", "stored\") def full_test(size, iterations=10, exclude_formats=[], test_compress=False): \"\"\" Do both tests and store the", "leave=True): try: t0 = time() func(*args, **kwargs) # Store time out.append(time() - t0)", "\"\"\" Test different file formats for storing tables. There are 3 files with", "formats for storing tables. There are 3 files with different sizes: small: bike_sharing_daily", "to exclude in this test test_compress: if True it will try all compressions", "<filename>0012-tables_format/test_formats.py \"\"\" Test different file formats for storing tables. There are 3 files", "dataframe \"\"\" full_test(0, iterations=100) full_test(1, iterations=10) def test_2(): \"\"\" Run tests trying all", "small, 1: mediumn, 2: big) iterations: number of times to run the test", "os.path.getsize(f\"{PATH_DATA}{file}\") store_results(out, size, iterations) def test_1(): \"\"\" Runs some tests with all extensions", "tqdm(FUNCS[\"write\"].items(), desc=f\"{'write':10}\", leave=True): # Skip this extension if extension in exclude_formats: continue if", "times to run the test func: function to test args: arguments for that", "} def clean(): \"\"\" Clean previously created files \"\"\" for name in os.listdir(PATH_DATA):", "# Read function don't use compression param \"list\": [\"snappy\", \"gzip\", \"brotli\", None], },", "out[extension] = iterate_one_test(iterations, extension, func, args, {}) # Try all compressions else: if", "[\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None], }, \"pickle\": { \"param_name\": \"compression\", \"read_with_param\": True,", "for _ in tqdm(range(iterations), desc=f\"- {extension:8}\", leave=True): try: t0 = time() func(*args, **kwargs)", "iterations for some function Args: size: size of the file to test (0:", "= f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations, extension=name, func=func, args=[f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp} if use_param", "as e: print(f\"- Error with {extension}: {e}\") return out def test_write(size, iterations, exclude_formats,", "leave=True): name = f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations, extension=name, func=func, args=[df, f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name:", "{PATH_RESULTS}results_s{size}_i{iterations}.yaml stored\") def full_test(size, iterations=10, exclude_formats=[], test_compress=False): \"\"\" Do both tests and store", "and store the results\"\"\" clean() print(f\"\\nFULL TEST. size: {size}, iterations: {iterations}\") out =", "None], }, \"parquet\": { \"param_name\": \"compression\", \"read_with_param\": False, # Read function don't use", "of the file to test (0: small, 1: mediumn, 2: big) iterations: number", "\"gzip\", \"bz2\", \"zip\", \"xz\", None], }, \"parquet\": { \"param_name\": \"compression\", \"read_with_param\": False, #", "\"msgpack\": { \"param_name\": \"compress\", \"read_with_param\": False, # Read function don't use compression param", "store_results(data, size, iterations): \"\"\" Store results as a yaml \"\"\" with open(f\"{PATH_RESULTS}results_s{size}_i{iterations}.yaml\", \"w\")", "= { \"csv\": { \"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\",", "def test_write(size, iterations, exclude_formats, test_compress): \"\"\" Test writting for one file Args: size:", "t0) except Exception as e: print(f\"- Error with {extension}: {e}\") return out def", "medium: cbg_patterns (233 MB) big: checkouts-by-title (6,62 GB) \"\"\" import os from time", "function to test args: arguments for that function kwargs: extra keyworded arguments \"\"\"", "args, {}) # Try all compressions else: if extension not in COMPRESSIONS: continue", "extra keyworded arguments \"\"\" out = [] for _ in tqdm(range(iterations), desc=f\"- {extension:8}\",", "iterations, extension=name, func=func, args=[df, f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp}, ) return out def test_read(size, iterations,", "pd.read_parquet, \"msgpack\": pd.read_msgpack, }, \"write\": { \"csv\": pd.DataFrame.to_csv, \"xlsx\": pd.DataFrame.to_excel, \"pickle\": pd.DataFrame.to_pickle, \"feather\":", "function don't use compression param \"list\": [\"snappy\", \"gzip\", \"brotli\", None], }, \"msgpack\": {", "with out \"\"\" out = {} df = pd.read_csv(f\"{PATH_DATA}{FILES[size]}.csv\") for extension, func in", "COMPRESSIONS = { \"csv\": { \"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\",", "{iterations}\") out = { \"write\": test_write(size, iterations, exclude_formats, test_compress), \"read\": test_read(size, iterations, exclude_formats,", "as pd from tqdm import tqdm PATH_DATA = \"data/\" PATH_RESULTS = \"results/\" FILES", "test_compress): \"\"\" Test writting for one file Args: size: size of the file", "test_compress: if True it will try all compressions Returns: dictionary with out \"\"\"", "Read function don't use compression param \"list\": [\"snappy\", \"gzip\", \"brotli\", None], }, \"msgpack\":", "{} for extension, func in tqdm(FUNCS[\"read\"].items(), desc=f\"{'read':10}\", leave=True): # Skip this extension if", "pandas as pd from tqdm import tqdm PATH_DATA = \"data/\" PATH_RESULTS = \"results/\"", "\"csv\": pd.read_csv, \"xlsx\": pd.read_excel, \"pickle\": pd.read_pickle, \"feather\": pd.read_feather, \"parquet\": pd.read_parquet, \"msgpack\": pd.read_msgpack, },", "dictionary with out \"\"\" out = {} for extension, func in tqdm(FUNCS[\"read\"].items(), desc=f\"{'read':10}\",", "exclude_formats=[\"xlsx\"], test_compress=True) def test_3(): \"\"\" Run test with the big dataframe and trying", "= {} for extension, func in tqdm(FUNCS[\"read\"].items(), desc=f\"{'read':10}\", leave=True): # Skip this extension", "are 3 files with different sizes: small: bike_sharing_daily (64 KB) medium: cbg_patterns (233", "\"zip\", \"xz\", None], }, \"pickle\": { \"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\",", "\"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None], }, \"parquet\": { \"param_name\": \"compression\", \"read_with_param\":", "out = { \"write\": test_write(size, iterations, exclude_formats, test_compress), \"read\": test_read(size, iterations, exclude_formats, test_compress),", "= {} df = pd.read_csv(f\"{PATH_DATA}{FILES[size]}.csv\") for extension, func in tqdm(FUNCS[\"write\"].items(), desc=f\"{'write':10}\", leave=True): #", "True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None], }, \"parquet\": { \"param_name\": \"compression\",", "\"write\": test_write(size, iterations, exclude_formats, test_compress), \"read\": test_read(size, iterations, exclude_formats, test_compress), } # Also", "don't use compression param \"list\": [\"zlib\", \"blosc\", None], }, } def clean(): \"\"\"", "don't use compression param \"list\": [\"snappy\", \"gzip\", \"brotli\", None], }, \"msgpack\": { \"param_name\":", "= iterate_one_test( iterations, extension=name, func=func, args=[df, f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp}, ) return out def", "\"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None], }, \"parquet\": { \"param_name\":", "for that function kwargs: extra keyworded arguments \"\"\" out = [] for _", "compressions Returns: dictionary with out \"\"\" out = {} for extension, func in", "else {}, ) return out def store_results(data, size, iterations): \"\"\" Store results as", "extension in exclude_formats: continue if not test_compress or extension not in COMPRESSIONS: args", "that function kwargs: extra keyworded arguments \"\"\" out = [] for _ in", "[f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func, args, {}) # Try all compressions else:", "\"csv\": { \"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None],", "None], }, } def clean(): \"\"\" Clean previously created files \"\"\" for name", "# Store time out.append(time() - t0) except Exception as e: print(f\"- Error with", "def test_read(size, iterations, exclude_formats, test_compress): \"\"\" Test read for one file Args: size:", "func, args, {}) # Try all compressions else: if extension not in COMPRESSIONS:", "exclude_formats=[], test_compress=False): \"\"\" Do both tests and store the results\"\"\" clean() print(f\"\\nFULL TEST.", "iterations, exclude_formats, test_compress), \"read\": test_read(size, iterations, exclude_formats, test_compress), } # Also get file", "iterations: {iterations}\") out = { \"write\": test_write(size, iterations, exclude_formats, test_compress), \"read\": test_read(size, iterations,", "f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp}, ) return out def test_read(size, iterations, exclude_formats, test_compress): \"\"\" Test", "__name__ == \"__main__\": # Dummy test # full_test(0, iterations=20, exclude_formats=[\"xlsx\"], test_compress=True) # test_1()", "\"compression\", \"read_with_param\": False, # Read function don't use compression param \"list\": [\"snappy\", \"gzip\",", "extension=name, func=func, args=[df, f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp}, ) return out def test_read(size, iterations, exclude_formats,", "TEST. size: {size}, iterations: {iterations}\") out = { \"write\": test_write(size, iterations, exclude_formats, test_compress),", "extension \"\"\" full_test(1, iterations=5, exclude_formats=[\"xlsx\"], test_compress=True) def test_3(): \"\"\" Run test with the", "use_param else {}, ) return out def store_results(data, size, iterations): \"\"\" Store results", "There are 3 files with different sizes: small: bike_sharing_daily (64 KB) medium: cbg_patterns", "\"\"\" Clean previously created files \"\"\" for name in os.listdir(PATH_DATA): if \".\" in", "\"data\": out[\"file_size\"][extension] = os.path.getsize(f\"{PATH_DATA}{file}\") store_results(out, size, iterations) def test_1(): \"\"\" Runs some tests", "True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None], }, \"pickle\": { \"param_name\": \"compression\",", "for storing tables. There are 3 files with different sizes: small: bike_sharing_daily (64", "\"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None], }, \"pickle\": { \"param_name\": \"compression\", \"read_with_param\":", "\"compress\", \"read_with_param\": False, # Read function don't use compression param \"list\": [\"zlib\", \"blosc\",", "f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func, args, {}) # Try all compressions else:", "func in tqdm(FUNCS[\"write\"].items(), desc=f\"{'write':10}\", leave=True): # Skip this extension if extension in exclude_formats:", "args: arguments for that function kwargs: extra keyworded arguments \"\"\" out = []", "iterations: number of times to run the test exclude_formats: formats to exclude in", "\"\"\" with open(f\"{PATH_RESULTS}results_s{size}_i{iterations}.yaml\", \"w\") as outfile: yaml.dump(data, outfile, default_flow_style=False) print(f\"\\n- Data {PATH_RESULTS}results_s{size}_i{iterations}.yaml stored\")", "extension = file.split(\".\") if name == \"data\": out[\"file_size\"][extension] = os.path.getsize(f\"{PATH_DATA}{file}\") store_results(out, size, iterations)", "default_flow_style=False) print(f\"\\n- Data {PATH_RESULTS}results_s{size}_i{iterations}.yaml stored\") def full_test(size, iterations=10, exclude_formats=[], test_compress=False): \"\"\" Do both", "\"zip\", \"xz\", None], }, \"parquet\": { \"param_name\": \"compression\", \"read_with_param\": False, # Read function", "None], }, \"pickle\": { \"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\",", "pd from tqdm import tqdm PATH_DATA = \"data/\" PATH_RESULTS = \"results/\" FILES =", "= os.path.getsize(f\"{PATH_DATA}{file}\") store_results(out, size, iterations) def test_1(): \"\"\" Runs some tests with all", "= {} for file in os.listdir(PATH_DATA): name, extension = file.split(\".\") if name ==", "xlsx extension \"\"\" full_test(1, iterations=5, exclude_formats=[\"xlsx\"], test_compress=True) def test_3(): \"\"\" Run test with", "the results\"\"\" clean() print(f\"\\nFULL TEST. size: {size}, iterations: {iterations}\") out = { \"write\":", "\"\"\" full_test(1, iterations=5, exclude_formats=[\"xlsx\"], test_compress=True) def test_3(): \"\"\" Run test with the big", "pd.read_msgpack, }, \"write\": { \"csv\": pd.DataFrame.to_csv, \"xlsx\": pd.DataFrame.to_excel, \"pickle\": pd.DataFrame.to_pickle, \"feather\": pd.DataFrame.to_feather, \"parquet\":", "all extensions and exclude big dataframe \"\"\" full_test(0, iterations=100) full_test(1, iterations=10) def test_2():", "exclude_formats, test_compress), \"read\": test_read(size, iterations, exclude_formats, test_compress), } # Also get file sizes", "if True it will try all compressions Returns: dictionary with out \"\"\" out", "name, extension = file.split(\".\") if name == \"data\": out[\"file_size\"][extension] = os.path.getsize(f\"{PATH_DATA}{file}\") store_results(out, size,", "\"read\": { \"csv\": pd.read_csv, \"xlsx\": pd.read_excel, \"pickle\": pd.read_pickle, \"feather\": pd.read_feather, \"parquet\": pd.read_parquet, \"msgpack\":", "\"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None], }, \"parquet\": {", "store the results\"\"\" clean() print(f\"\\nFULL TEST. size: {size}, iterations: {iterations}\") out = {", "COMPRESSIONS[extension][\"param_name\"] use_param = COMPRESSIONS[extension][\"read_with_param\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\"", "exclude_formats: continue if not test_compress or extension not in COMPRESSIONS: args = [f\"{PATH_DATA}data.{extension}\"]", "(0: small, 1: mediumn, 2: big) iterations: number of times to run the", "func(*args, **kwargs) # Store time out.append(time() - t0) except Exception as e: print(f\"-", "exclude_formats, test_compress): \"\"\" Test writting for one file Args: size: size of the", "size of the file to test (0: small, 1: mediumn, 2: big) iterations:", "function Args: size: size of the file to test (0: small, 1: mediumn,", "= COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] use_param = COMPRESSIONS[extension][\"read_with_param\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\",", "full_test(1, iterations=10) def test_2(): \"\"\" Run tests trying all compressions without xlsx extension", "\"write\": { \"csv\": pd.DataFrame.to_csv, \"xlsx\": pd.DataFrame.to_excel, \"pickle\": pd.DataFrame.to_pickle, \"feather\": pd.DataFrame.to_feather, \"parquet\": pd.DataFrame.to_parquet, \"msgpack\":", "f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations, extension=name, func=func, args=[df, f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp}, ) return", "iterate_one_test(iterations, extension, func, args, {}) # Try all compressions else: if extension not", "\"parquet\": pd.DataFrame.to_parquet, \"msgpack\": pd.DataFrame.to_msgpack, }, } COMPRESSIONS = { \"csv\": { \"param_name\": \"compression\",", "the test exclude_formats: formats to exclude in this test test_compress: if True it", "# Skip this extension if extension in exclude_formats: continue if not test_compress or", "if \".\" in name and name.split(\".\")[0] == \"data\": os.remove(f\"{PATH_DATA}{name}\") def iterate_one_test(iterations, extension, func,", "\"feather\": pd.read_feather, \"parquet\": pd.read_parquet, \"msgpack\": pd.read_msgpack, }, \"write\": { \"csv\": pd.DataFrame.to_csv, \"xlsx\": pd.DataFrame.to_excel,", "def test_3(): \"\"\" Run test with the big dataframe and trying the compressions", "= pd.read_csv(f\"{PATH_DATA}{FILES[size]}.csv\") for extension, func in tqdm(FUNCS[\"write\"].items(), desc=f\"{'write':10}\", leave=True): # Skip this extension", "results as a yaml \"\"\" with open(f\"{PATH_RESULTS}results_s{size}_i{iterations}.yaml\", \"w\") as outfile: yaml.dump(data, outfile, default_flow_style=False)", "t0 = time() func(*args, **kwargs) # Store time out.append(time() - t0) except Exception", "full_test(0, iterations=100) full_test(1, iterations=10) def test_2(): \"\"\" Run tests trying all compressions without", "size: size of the file to test (0: small, 1: mediumn, 2: big)", "if extension not in COMPRESSIONS: continue # Get name of compression parameter and", "extension=name, func=func, args=[f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp} if use_param else {}, ) return out def", "to run the test exclude_formats: formats to exclude in this test test_compress: if", "with open(f\"{PATH_RESULTS}results_s{size}_i{iterations}.yaml\", \"w\") as outfile: yaml.dump(data, outfile, default_flow_style=False) print(f\"\\n- Data {PATH_RESULTS}results_s{size}_i{iterations}.yaml stored\") def", "use_param = COMPRESSIONS[extension][\"read_with_param\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\" out[name]", "Returns: dictionary with out \"\"\" out = {} df = pd.read_csv(f\"{PATH_DATA}{FILES[size]}.csv\") for extension,", "= iterate_one_test(iterations, extension, func, args, {}) # Try all compressions else: if extension", "it will try all compressions Returns: dictionary with out \"\"\" out = {}", "return out def test_write(size, iterations, exclude_formats, test_compress): \"\"\" Test writting for one file", "{ \"param_name\": \"compression\", \"read_with_param\": False, # Read function don't use compression param \"list\":", "\"param_name\": \"compression\", \"read_with_param\": False, # Read function don't use compression param \"list\": [\"snappy\",", "and list of extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] use_param = COMPRESSIONS[extension][\"read_with_param\"]", "test_read(size, iterations, exclude_formats, test_compress), } # Also get file sizes out[\"file_size\"] = {}", "previously created files \"\"\" for name in os.listdir(PATH_DATA): if \".\" in name and", "\"xlsx\": pd.read_excel, \"pickle\": pd.read_pickle, \"feather\": pd.read_feather, \"parquet\": pd.read_parquet, \"msgpack\": pd.read_msgpack, }, \"write\": {", "extension not in COMPRESSIONS: args = [df, f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func,", "Do both tests and store the results\"\"\" clean() print(f\"\\nFULL TEST. size: {size}, iterations:", "with out \"\"\" out = {} for extension, func in tqdm(FUNCS[\"read\"].items(), desc=f\"{'read':10}\", leave=True):", "param \"list\": [\"snappy\", \"gzip\", \"brotli\", None], }, \"msgpack\": { \"param_name\": \"compress\", \"read_with_param\": False,", "desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations, extension=name, func=func, args=[f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name:", "\"feather\": pd.DataFrame.to_feather, \"parquet\": pd.DataFrame.to_parquet, \"msgpack\": pd.DataFrame.to_msgpack, }, } COMPRESSIONS = { \"csv\": {", "Get name of compression parameter and list of extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name", "\"\"\" Test writting for one file Args: size: size of the file to", "exclude_formats: continue if not test_compress or extension not in COMPRESSIONS: args = [df,", "if __name__ == \"__main__\": # Dummy test # full_test(0, iterations=20, exclude_formats=[\"xlsx\"], test_compress=True) #", "func=func, args=[f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp} if use_param else {}, ) return out def store_results(data,", "True it will try all compressions Returns: dictionary with out \"\"\" out =", "\"parquet\": { \"param_name\": \"compression\", \"read_with_param\": False, # Read function don't use compression param", "iterations=10, exclude_formats=[], test_compress=False): \"\"\" Do both tests and store the results\"\"\" clean() print(f\"\\nFULL", "\"\"\" Test read for one file Args: size: size of the file to", "test_compress), \"read\": test_read(size, iterations, exclude_formats, test_compress), } # Also get file sizes out[\"file_size\"]", "store_results(out, size, iterations) def test_1(): \"\"\" Runs some tests with all extensions and", "\"bz2\", \"zip\", \"xz\", None], }, \"pickle\": { \"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\",", "desc=f\"{'write':10}\", leave=True): # Skip this extension if extension in exclude_formats: continue if not", "Error with {extension}: {e}\") return out def test_write(size, iterations, exclude_formats, test_compress): \"\"\" Test", "pd.read_csv, \"xlsx\": pd.read_excel, \"pickle\": pd.read_pickle, \"feather\": pd.read_feather, \"parquet\": pd.read_parquet, \"msgpack\": pd.read_msgpack, }, \"write\":", "out = {} for extension, func in tqdm(FUNCS[\"read\"].items(), desc=f\"{'read':10}\", leave=True): # Skip this", "[\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None], }, \"parquet\": { \"param_name\": \"compression\", \"read_with_param\": False,", "in os.listdir(PATH_DATA): name, extension = file.split(\".\") if name == \"data\": out[\"file_size\"][extension] = os.path.getsize(f\"{PATH_DATA}{file}\")", "COMPRESSIONS[extension][\"read_with_param\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\" out[name] = iterate_one_test(", "yaml.dump(data, outfile, default_flow_style=False) print(f\"\\n- Data {PATH_RESULTS}results_s{size}_i{iterations}.yaml stored\") def full_test(size, iterations=10, exclude_formats=[], test_compress=False): \"\"\"", "\"brotli\", None], }, \"msgpack\": { \"param_name\": \"compress\", \"read_with_param\": False, # Read function don't", "continue if not test_compress or extension not in COMPRESSIONS: args = [df, f\"{PATH_DATA}data.{extension}\"]", "_ in tqdm(range(iterations), desc=f\"- {extension:8}\", leave=True): try: t0 = time() func(*args, **kwargs) #", "(6,62 GB) \"\"\" import os from time import time import yaml import pandas", "function don't use compression param \"list\": [\"zlib\", \"blosc\", None], }, } def clean():", "\"\"\" full_test(2, iterations=1, exclude_formats=[\"xlsx\", \"csv\"], test_compress=True) if __name__ == \"__main__\": # Dummy test", "time out.append(time() - t0) except Exception as e: print(f\"- Error with {extension}: {e}\")", "or extension not in COMPRESSIONS: args = [f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func,", "trying the compressions \"\"\" full_test(2, iterations=1, exclude_formats=[\"xlsx\", \"csv\"], test_compress=True) if __name__ == \"__main__\":", "big dataframe and trying the compressions \"\"\" full_test(2, iterations=1, exclude_formats=[\"xlsx\", \"csv\"], test_compress=True) if", "def test_1(): \"\"\" Runs some tests with all extensions and exclude big dataframe", "}, \"parquet\": { \"param_name\": \"compression\", \"read_with_param\": False, # Read function don't use compression", "\"\"\" for name in os.listdir(PATH_DATA): if \".\" in name and name.split(\".\")[0] == \"data\":", "run the test exclude_formats: formats to exclude in this test test_compress: if True", "out \"\"\" out = {} for extension, func in tqdm(FUNCS[\"read\"].items(), desc=f\"{'read':10}\", leave=True): #", "pd.read_feather, \"parquet\": pd.read_parquet, \"msgpack\": pd.read_msgpack, }, \"write\": { \"csv\": pd.DataFrame.to_csv, \"xlsx\": pd.DataFrame.to_excel, \"pickle\":", "as outfile: yaml.dump(data, outfile, default_flow_style=False) print(f\"\\n- Data {PATH_RESULTS}results_s{size}_i{iterations}.yaml stored\") def full_test(size, iterations=10, exclude_formats=[],", "not in COMPRESSIONS: args = [f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func, args, {})", "extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] use_param = COMPRESSIONS[extension][\"read_with_param\"] for comp in", "get file sizes out[\"file_size\"] = {} for file in os.listdir(PATH_DATA): name, extension =", "param \"list\": [\"zlib\", \"blosc\", None], }, } def clean(): \"\"\" Clean previously created", "\"gzip\", \"bz2\", \"zip\", \"xz\", None], }, \"pickle\": { \"param_name\": \"compression\", \"read_with_param\": True, \"list\":", "args=[f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp} if use_param else {}, ) return out def store_results(data, size,", "\"param_name\": \"compress\", \"read_with_param\": False, # Read function don't use compression param \"list\": [\"zlib\",", "some tests with all extensions and exclude big dataframe \"\"\" full_test(0, iterations=100) full_test(1,", "tqdm(range(iterations), desc=f\"- {extension:8}\", leave=True): try: t0 = time() func(*args, **kwargs) # Store time", "= [f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func, args, {}) # Try all compressions", "{extension:8}\", leave=True): try: t0 = time() func(*args, **kwargs) # Store time out.append(time() -", "comp_param_name = COMPRESSIONS[extension][\"param_name\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\" out[name]", "file formats for storing tables. There are 3 files with different sizes: small:", "\"data/\" PATH_RESULTS = \"results/\" FILES = [\"bike_sharing_daily\", \"cbg_patterns\", \"checkouts-by-title\"] FUNCS = { \"read\":", "# Also get file sizes out[\"file_size\"] = {} for file in os.listdir(PATH_DATA): name,", "= COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name =", "tests trying all compressions without xlsx extension \"\"\" full_test(1, iterations=5, exclude_formats=[\"xlsx\"], test_compress=True) def", "some function Args: size: size of the file to test (0: small, 1:", "= file.split(\".\") if name == \"data\": out[\"file_size\"][extension] = os.path.getsize(f\"{PATH_DATA}{file}\") store_results(out, size, iterations) def", "\"list\": [\"zlib\", \"blosc\", None], }, } def clean(): \"\"\" Clean previously created files", "= f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations, extension=name, func=func, args=[df, f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp}, )", "outfile: yaml.dump(data, outfile, default_flow_style=False) print(f\"\\n- Data {PATH_RESULTS}results_s{size}_i{iterations}.yaml stored\") def full_test(size, iterations=10, exclude_formats=[], test_compress=False):", "test exclude_formats: formats to exclude in this test test_compress: if True it will", "= [] for _ in tqdm(range(iterations), desc=f\"- {extension:8}\", leave=True): try: t0 = time()", "iterations, exclude_formats, test_compress): \"\"\" Test writting for one file Args: size: size of", "{ \"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None], },", "sizes out[\"file_size\"] = {} for file in os.listdir(PATH_DATA): name, extension = file.split(\".\") if", "# Try all compressions else: if extension not in COMPRESSIONS: continue # Get", "test_1(): \"\"\" Runs some tests with all extensions and exclude big dataframe \"\"\"", "the big dataframe and trying the compressions \"\"\" full_test(2, iterations=1, exclude_formats=[\"xlsx\", \"csv\"], test_compress=True)", "with the big dataframe and trying the compressions \"\"\" full_test(2, iterations=1, exclude_formats=[\"xlsx\", \"csv\"],", "kwargs={comp_param_name: comp} if use_param else {}, ) return out def store_results(data, size, iterations):", "as a yaml \"\"\" with open(f\"{PATH_RESULTS}results_s{size}_i{iterations}.yaml\", \"w\") as outfile: yaml.dump(data, outfile, default_flow_style=False) print(f\"\\n-", "1: mediumn, 2: big) iterations: number of times to run the test exclude_formats:", "= iterate_one_test( iterations, extension=name, func=func, args=[f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp} if use_param else {}, )", "Args: size: size of the file to test (0: small, 1: mediumn, 2:", "outfile, default_flow_style=False) print(f\"\\n- Data {PATH_RESULTS}results_s{size}_i{iterations}.yaml stored\") def full_test(size, iterations=10, exclude_formats=[], test_compress=False): \"\"\" Do", "\"\"\" Do some iterations for some function Args: size: size of the file", "\"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None], }, \"pickle\": {", "Also get file sizes out[\"file_size\"] = {} for file in os.listdir(PATH_DATA): name, extension", "time import time import yaml import pandas as pd from tqdm import tqdm", "number of times to run the test func: function to test args: arguments", "extension not in COMPRESSIONS: continue # Get name of compression parameter and list", "MB) big: checkouts-by-title (6,62 GB) \"\"\" import os from time import time import", "kwargs: extra keyworded arguments \"\"\" out = [] for _ in tqdm(range(iterations), desc=f\"-", "return out def store_results(data, size, iterations): \"\"\" Store results as a yaml \"\"\"", "to test (0: small, 1: mediumn, 2: big) iterations: number of times to", "file to test (0: small, 1: mediumn, 2: big) iterations: number of times", "# Read function don't use compression param \"list\": [\"zlib\", \"blosc\", None], }, }", "exclude_formats=[\"xlsx\", \"csv\"], test_compress=True) if __name__ == \"__main__\": # Dummy test # full_test(0, iterations=20,", "compressions without xlsx extension \"\"\" full_test(1, iterations=5, exclude_formats=[\"xlsx\"], test_compress=True) def test_3(): \"\"\" Run", "args = [df, f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func, args, {}) # Try", "small: bike_sharing_daily (64 KB) medium: cbg_patterns (233 MB) big: checkouts-by-title (6,62 GB) \"\"\"", "\"parquet\": pd.read_parquet, \"msgpack\": pd.read_msgpack, }, \"write\": { \"csv\": pd.DataFrame.to_csv, \"xlsx\": pd.DataFrame.to_excel, \"pickle\": pd.DataFrame.to_pickle,", "tests with all extensions and exclude big dataframe \"\"\" full_test(0, iterations=100) full_test(1, iterations=10)", "continue # Get name of compression parameter and list of extensions comp_list =", "os.listdir(PATH_DATA): name, extension = file.split(\".\") if name == \"data\": out[\"file_size\"][extension] = os.path.getsize(f\"{PATH_DATA}{file}\") store_results(out,", "\"xlsx\": pd.DataFrame.to_excel, \"pickle\": pd.DataFrame.to_pickle, \"feather\": pd.DataFrame.to_feather, \"parquet\": pd.DataFrame.to_parquet, \"msgpack\": pd.DataFrame.to_msgpack, }, } COMPRESSIONS", "\"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None], }, \"pickle\":", "pd.DataFrame.to_pickle, \"feather\": pd.DataFrame.to_feather, \"parquet\": pd.DataFrame.to_parquet, \"msgpack\": pd.DataFrame.to_msgpack, }, } COMPRESSIONS = { \"csv\":", "compressions Returns: dictionary with out \"\"\" out = {} df = pd.read_csv(f\"{PATH_DATA}{FILES[size]}.csv\") for", "COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] use_param = COMPRESSIONS[extension][\"read_with_param\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True):", "out[name] = iterate_one_test( iterations, extension=name, func=func, args=[f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp} if use_param else {},", "func in tqdm(FUNCS[\"read\"].items(), desc=f\"{'read':10}\", leave=True): # Skip this extension if extension in exclude_formats:", "comp_param_name = COMPRESSIONS[extension][\"param_name\"] use_param = COMPRESSIONS[extension][\"read_with_param\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name", "import time import yaml import pandas as pd from tqdm import tqdm PATH_DATA", "[\"bike_sharing_daily\", \"cbg_patterns\", \"checkouts-by-title\"] FUNCS = { \"read\": { \"csv\": pd.read_csv, \"xlsx\": pd.read_excel, \"pickle\":", "leave=True): name = f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations, extension=name, func=func, args=[f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp}", "run the test func: function to test args: arguments for that function kwargs:", "test (0: small, 1: mediumn, 2: big) iterations: number of times to run", "\"bz2\", \"zip\", \"xz\", None], }, \"parquet\": { \"param_name\": \"compression\", \"read_with_param\": False, # Read", "pd.read_csv(f\"{PATH_DATA}{FILES[size]}.csv\") for extension, func in tqdm(FUNCS[\"write\"].items(), desc=f\"{'write':10}\", leave=True): # Skip this extension if", "= \"results/\" FILES = [\"bike_sharing_daily\", \"cbg_patterns\", \"checkouts-by-title\"] FUNCS = { \"read\": { \"csv\":", "iterations): \"\"\" Store results as a yaml \"\"\" with open(f\"{PATH_RESULTS}results_s{size}_i{iterations}.yaml\", \"w\") as outfile:", "\"\"\" out = [] for _ in tqdm(range(iterations), desc=f\"- {extension:8}\", leave=True): try: t0", "iterations, exclude_formats, test_compress), } # Also get file sizes out[\"file_size\"] = {} for", "the file to test (0: small, 1: mediumn, 2: big) iterations: number of", "leave=True): # Skip this extension if extension in exclude_formats: continue if not test_compress", "\"\"\" out = {} df = pd.read_csv(f\"{PATH_DATA}{FILES[size]}.csv\") for extension, func in tqdm(FUNCS[\"write\"].items(), desc=f\"{'write':10}\",", "will try all compressions Returns: dictionary with out \"\"\" out = {} for", "tests and store the results\"\"\" clean() print(f\"\\nFULL TEST. size: {size}, iterations: {iterations}\") out", "e: print(f\"- Error with {extension}: {e}\") return out def test_write(size, iterations, exclude_formats, test_compress):", "for file in os.listdir(PATH_DATA): name, extension = file.split(\".\") if name == \"data\": out[\"file_size\"][extension]", "Clean previously created files \"\"\" for name in os.listdir(PATH_DATA): if \".\" in name", "if name == \"data\": out[\"file_size\"][extension] = os.path.getsize(f\"{PATH_DATA}{file}\") store_results(out, size, iterations) def test_1(): \"\"\"", "func: function to test args: arguments for that function kwargs: extra keyworded arguments", "Data {PATH_RESULTS}results_s{size}_i{iterations}.yaml stored\") def full_test(size, iterations=10, exclude_formats=[], test_compress=False): \"\"\" Do both tests and", "pd.DataFrame.to_msgpack, }, } COMPRESSIONS = { \"csv\": { \"param_name\": \"compression\", \"read_with_param\": True, \"list\":", "\"\"\" Run test with the big dataframe and trying the compressions \"\"\" full_test(2,", "iterations: number of times to run the test func: function to test args:", "if extension in exclude_formats: continue if not test_compress or extension not in COMPRESSIONS:", "number of times to run the test exclude_formats: formats to exclude in this", "read for one file Args: size: size of the file to test (0:", "os from time import time import yaml import pandas as pd from tqdm", "extension, func in tqdm(FUNCS[\"write\"].items(), desc=f\"{'write':10}\", leave=True): # Skip this extension if extension in", "mediumn, 2: big) iterations: number of times to run the test func: function", "\"read_with_param\": False, # Read function don't use compression param \"list\": [\"snappy\", \"gzip\", \"brotli\",", "\"w\") as outfile: yaml.dump(data, outfile, default_flow_style=False) print(f\"\\n- Data {PATH_RESULTS}results_s{size}_i{iterations}.yaml stored\") def full_test(size, iterations=10,", "pd.DataFrame.to_feather, \"parquet\": pd.DataFrame.to_parquet, \"msgpack\": pd.DataFrame.to_msgpack, }, } COMPRESSIONS = { \"csv\": { \"param_name\":", "\"\"\" Do both tests and store the results\"\"\" clean() print(f\"\\nFULL TEST. size: {size},", "in COMPRESSIONS: args = [df, f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func, args, {})", ") return out def test_read(size, iterations, exclude_formats, test_compress): \"\"\" Test read for one", "\"\"\" out = {} for extension, func in tqdm(FUNCS[\"read\"].items(), desc=f\"{'read':10}\", leave=True): # Skip", "function kwargs: extra keyworded arguments \"\"\" out = [] for _ in tqdm(range(iterations),", "in exclude_formats: continue if not test_compress or extension not in COMPRESSIONS: args =", "or extension not in COMPRESSIONS: args = [df, f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension,", "tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations, extension=name, func=func, args=[df,", "in os.listdir(PATH_DATA): if \".\" in name and name.split(\".\")[0] == \"data\": os.remove(f\"{PATH_DATA}{name}\") def iterate_one_test(iterations,", "desc=f\"{'read':10}\", leave=True): # Skip this extension if extension in exclude_formats: continue if not", "\"\"\" Store results as a yaml \"\"\" with open(f\"{PATH_RESULTS}results_s{size}_i{iterations}.yaml\", \"w\") as outfile: yaml.dump(data,", "file Args: size: size of the file to test (0: small, 1: mediumn,", "test_compress or extension not in COMPRESSIONS: args = [f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension,", "name.split(\".\")[0] == \"data\": os.remove(f\"{PATH_DATA}{name}\") def iterate_one_test(iterations, extension, func, args, kwargs): \"\"\" Do some", "{}, ) return out def store_results(data, size, iterations): \"\"\" Store results as a", "this test test_compress: if True it will try all compressions Returns: dictionary with", "test_compress): \"\"\" Test read for one file Args: size: size of the file", "in tqdm(FUNCS[\"read\"].items(), desc=f\"{'read':10}\", leave=True): # Skip this extension if extension in exclude_formats: continue", "Store results as a yaml \"\"\" with open(f\"{PATH_RESULTS}results_s{size}_i{iterations}.yaml\", \"w\") as outfile: yaml.dump(data, outfile,", "all compressions without xlsx extension \"\"\" full_test(1, iterations=5, exclude_formats=[\"xlsx\"], test_compress=True) def test_3(): \"\"\"", "return out def test_read(size, iterations, exclude_formats, test_compress): \"\"\" Test read for one file", "for extension, func in tqdm(FUNCS[\"write\"].items(), desc=f\"{'write':10}\", leave=True): # Skip this extension if extension", "test_compress=False): \"\"\" Do both tests and store the results\"\"\" clean() print(f\"\\nFULL TEST. size:", "FUNCS = { \"read\": { \"csv\": pd.read_csv, \"xlsx\": pd.read_excel, \"pickle\": pd.read_pickle, \"feather\": pd.read_feather,", "Exception as e: print(f\"- Error with {extension}: {e}\") return out def test_write(size, iterations,", "out def test_read(size, iterations, exclude_formats, test_compress): \"\"\" Test read for one file Args:", "file in os.listdir(PATH_DATA): name, extension = file.split(\".\") if name == \"data\": out[\"file_size\"][extension] =", "big) iterations: number of times to run the test func: function to test", "out[\"file_size\"] = {} for file in os.listdir(PATH_DATA): name, extension = file.split(\".\") if name", "use compression param \"list\": [\"snappy\", \"gzip\", \"brotli\", None], }, \"msgpack\": { \"param_name\": \"compress\",", "arguments for that function kwargs: extra keyworded arguments \"\"\" out = [] for", "os.remove(f\"{PATH_DATA}{name}\") def iterate_one_test(iterations, extension, func, args, kwargs): \"\"\" Do some iterations for some", "tqdm(FUNCS[\"read\"].items(), desc=f\"{'read':10}\", leave=True): # Skip this extension if extension in exclude_formats: continue if", "iterate_one_test( iterations, extension=name, func=func, args=[df, f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp}, ) return out def test_read(size,", "pd.read_excel, \"pickle\": pd.read_pickle, \"feather\": pd.read_feather, \"parquet\": pd.read_parquet, \"msgpack\": pd.read_msgpack, }, \"write\": { \"csv\":", "parameter and list of extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] for comp", "not in COMPRESSIONS: continue # Get name of compression parameter and list of", "= \"data/\" PATH_RESULTS = \"results/\" FILES = [\"bike_sharing_daily\", \"cbg_patterns\", \"checkouts-by-title\"] FUNCS = {", "test func: function to test args: arguments for that function kwargs: extra keyworded", "\"msgpack\": pd.read_msgpack, }, \"write\": { \"csv\": pd.DataFrame.to_csv, \"xlsx\": pd.DataFrame.to_excel, \"pickle\": pd.DataFrame.to_pickle, \"feather\": pd.DataFrame.to_feather,", "Test different file formats for storing tables. There are 3 files with different", "{e}\") return out def test_write(size, iterations, exclude_formats, test_compress): \"\"\" Test writting for one", "f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations, extension=name, func=func, args=[f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp} if use_param else", "\"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\", None], }, \"parquet\":", "and list of extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] for comp in", "COMPRESSIONS[extension][\"param_name\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\" out[name] = iterate_one_test(", "big) iterations: number of times to run the test exclude_formats: formats to exclude", "[] for _ in tqdm(range(iterations), desc=f\"- {extension:8}\", leave=True): try: t0 = time() func(*args,", "comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name", "storing tables. There are 3 files with different sizes: small: bike_sharing_daily (64 KB)", "print(f\"\\n- Data {PATH_RESULTS}results_s{size}_i{iterations}.yaml stored\") def full_test(size, iterations=10, exclude_formats=[], test_compress=False): \"\"\" Do both tests", "func, args, kwargs): \"\"\" Do some iterations for some function Args: size: size", "the compressions \"\"\" full_test(2, iterations=1, exclude_formats=[\"xlsx\", \"csv\"], test_compress=True) if __name__ == \"__main__\": #", "name and name.split(\".\")[0] == \"data\": os.remove(f\"{PATH_DATA}{name}\") def iterate_one_test(iterations, extension, func, args, kwargs): \"\"\"", "COMPRESSIONS: args = [f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func, args, {}) # Try", "full_test(size, iterations=10, exclude_formats=[], test_compress=False): \"\"\" Do both tests and store the results\"\"\" clean()", "args, kwargs): \"\"\" Do some iterations for some function Args: size: size of", "size: {size}, iterations: {iterations}\") out = { \"write\": test_write(size, iterations, exclude_formats, test_compress), \"read\":", "with all extensions and exclude big dataframe \"\"\" full_test(0, iterations=100) full_test(1, iterations=10) def", "out def test_write(size, iterations, exclude_formats, test_compress): \"\"\" Test writting for one file Args:", "for one file Args: size: size of the file to test (0: small,", "bike_sharing_daily (64 KB) medium: cbg_patterns (233 MB) big: checkouts-by-title (6,62 GB) \"\"\" import", "} COMPRESSIONS = { \"csv\": { \"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\",", "import os from time import time import yaml import pandas as pd from", "in name and name.split(\".\")[0] == \"data\": os.remove(f\"{PATH_DATA}{name}\") def iterate_one_test(iterations, extension, func, args, kwargs):", "os.listdir(PATH_DATA): if \".\" in name and name.split(\".\")[0] == \"data\": os.remove(f\"{PATH_DATA}{name}\") def iterate_one_test(iterations, extension,", "all compressions Returns: dictionary with out \"\"\" out = {} df = pd.read_csv(f\"{PATH_DATA}{FILES[size]}.csv\")", "exclude big dataframe \"\"\" full_test(0, iterations=100) full_test(1, iterations=10) def test_2(): \"\"\" Run tests", "extension, func, args, kwargs): \"\"\" Do some iterations for some function Args: size:", "iterations=1, exclude_formats=[\"xlsx\", \"csv\"], test_compress=True) if __name__ == \"__main__\": # Dummy test # full_test(0,", "full_test(2, iterations=1, exclude_formats=[\"xlsx\", \"csv\"], test_compress=True) if __name__ == \"__main__\": # Dummy test #", "pd.DataFrame.to_excel, \"pickle\": pd.DataFrame.to_pickle, \"feather\": pd.DataFrame.to_feather, \"parquet\": pd.DataFrame.to_parquet, \"msgpack\": pd.DataFrame.to_msgpack, }, } COMPRESSIONS =", "if not test_compress or extension not in COMPRESSIONS: args = [f\"{PATH_DATA}data.{extension}\"] out[extension] =", ") return out def store_results(data, size, iterations): \"\"\" Store results as a yaml", "\"csv\"], test_compress=True) if __name__ == \"__main__\": # Dummy test # full_test(0, iterations=20, exclude_formats=[\"xlsx\"],", "out = [] for _ in tqdm(range(iterations), desc=f\"- {extension:8}\", leave=True): try: t0 =", "kwargs={comp_param_name: comp}, ) return out def test_read(size, iterations, exclude_formats, test_compress): \"\"\" Test read", "time import yaml import pandas as pd from tqdm import tqdm PATH_DATA =", "\"pickle\": pd.read_pickle, \"feather\": pd.read_feather, \"parquet\": pd.read_parquet, \"msgpack\": pd.read_msgpack, }, \"write\": { \"csv\": pd.DataFrame.to_csv,", "of times to run the test func: function to test args: arguments for", "import pandas as pd from tqdm import tqdm PATH_DATA = \"data/\" PATH_RESULTS =", "False, # Read function don't use compression param \"list\": [\"zlib\", \"blosc\", None], },", "[\"snappy\", \"gzip\", \"brotli\", None], }, \"msgpack\": { \"param_name\": \"compress\", \"read_with_param\": False, # Read", "\".\" in name and name.split(\".\")[0] == \"data\": os.remove(f\"{PATH_DATA}{name}\") def iterate_one_test(iterations, extension, func, args,", "dictionary with out \"\"\" out = {} df = pd.read_csv(f\"{PATH_DATA}{FILES[size]}.csv\") for extension, func", "Run test with the big dataframe and trying the compressions \"\"\" full_test(2, iterations=1,", "{} df = pd.read_csv(f\"{PATH_DATA}{FILES[size]}.csv\") for extension, func in tqdm(FUNCS[\"write\"].items(), desc=f\"{'write':10}\", leave=True): # Skip", "different file formats for storing tables. There are 3 files with different sizes:", "{ \"csv\": pd.read_csv, \"xlsx\": pd.read_excel, \"pickle\": pd.read_pickle, \"feather\": pd.read_feather, \"parquet\": pd.read_parquet, \"msgpack\": pd.read_msgpack,", "exclude in this test test_compress: if True it will try all compressions Returns:", "yaml \"\"\" with open(f\"{PATH_RESULTS}results_s{size}_i{iterations}.yaml\", \"w\") as outfile: yaml.dump(data, outfile, default_flow_style=False) print(f\"\\n- Data {PATH_RESULTS}results_s{size}_i{iterations}.yaml", "{ \"csv\": { \"param_name\": \"compression\", \"read_with_param\": True, \"list\": [\"infer\", \"gzip\", \"bz2\", \"zip\", \"xz\",", "COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\"", "exclude_formats, test_compress): \"\"\" Test read for one file Args: size: size of the", "\"xz\", None], }, \"parquet\": { \"param_name\": \"compression\", \"read_with_param\": False, # Read function don't", "test_write(size, iterations, exclude_formats, test_compress), \"read\": test_read(size, iterations, exclude_formats, test_compress), } # Also get", "out.append(time() - t0) except Exception as e: print(f\"- Error with {extension}: {e}\") return", "tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\" out[name] = iterate_one_test( iterations, extension=name, func=func, args=[f\"{PATH_DATA}data.{extension}_{comp}\"],", "print(f\"- Error with {extension}: {e}\") return out def test_write(size, iterations, exclude_formats, test_compress): \"\"\"", "clean() print(f\"\\nFULL TEST. size: {size}, iterations: {iterations}\") out = { \"write\": test_write(size, iterations,", "3 files with different sizes: small: bike_sharing_daily (64 KB) medium: cbg_patterns (233 MB)", "None], }, \"msgpack\": { \"param_name\": \"compress\", \"read_with_param\": False, # Read function don't use", "\"csv\": pd.DataFrame.to_csv, \"xlsx\": pd.DataFrame.to_excel, \"pickle\": pd.DataFrame.to_pickle, \"feather\": pd.DataFrame.to_feather, \"parquet\": pd.DataFrame.to_parquet, \"msgpack\": pd.DataFrame.to_msgpack, },", "compressions else: if extension not in COMPRESSIONS: continue # Get name of compression", "pd.DataFrame.to_csv, \"xlsx\": pd.DataFrame.to_excel, \"pickle\": pd.DataFrame.to_pickle, \"feather\": pd.DataFrame.to_feather, \"parquet\": pd.DataFrame.to_parquet, \"msgpack\": pd.DataFrame.to_msgpack, }, }", "name in os.listdir(PATH_DATA): if \".\" in name and name.split(\".\")[0] == \"data\": os.remove(f\"{PATH_DATA}{name}\") def", "extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True):", "files \"\"\" for name in os.listdir(PATH_DATA): if \".\" in name and name.split(\".\")[0] ==", "not in COMPRESSIONS: args = [df, f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func, args,", "not test_compress or extension not in COMPRESSIONS: args = [f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations,", "one file Args: size: size of the file to test (0: small, 1:", "of extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\",", "mediumn, 2: big) iterations: number of times to run the test exclude_formats: formats", "for extension, func in tqdm(FUNCS[\"read\"].items(), desc=f\"{'read':10}\", leave=True): # Skip this extension if extension", "Skip this extension if extension in exclude_formats: continue if not test_compress or extension", "trying all compressions without xlsx extension \"\"\" full_test(1, iterations=5, exclude_formats=[\"xlsx\"], test_compress=True) def test_3():", "test_2(): \"\"\" Run tests trying all compressions without xlsx extension \"\"\" full_test(1, iterations=5,", "import tqdm PATH_DATA = \"data/\" PATH_RESULTS = \"results/\" FILES = [\"bike_sharing_daily\", \"cbg_patterns\", \"checkouts-by-title\"]", "= [\"bike_sharing_daily\", \"cbg_patterns\", \"checkouts-by-title\"] FUNCS = { \"read\": { \"csv\": pd.read_csv, \"xlsx\": pd.read_excel,", "def clean(): \"\"\" Clean previously created files \"\"\" for name in os.listdir(PATH_DATA): if", "= COMPRESSIONS[extension][\"read_with_param\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name = f\"{extension}_{str(comp)}\" out[name] =", "of times to run the test exclude_formats: formats to exclude in this test", "results\"\"\" clean() print(f\"\\nFULL TEST. size: {size}, iterations: {iterations}\") out = { \"write\": test_write(size,", "\"blosc\", None], }, } def clean(): \"\"\" Clean previously created files \"\"\" for", "from tqdm import tqdm PATH_DATA = \"data/\" PATH_RESULTS = \"results/\" FILES = [\"bike_sharing_daily\",", "compression param \"list\": [\"snappy\", \"gzip\", \"brotli\", None], }, \"msgpack\": { \"param_name\": \"compress\", \"read_with_param\":", "in COMPRESSIONS: args = [f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func, args, {}) #", "big dataframe \"\"\" full_test(0, iterations=100) full_test(1, iterations=10) def test_2(): \"\"\" Run tests trying", "{ \"param_name\": \"compress\", \"read_with_param\": False, # Read function don't use compression param \"list\":", "{ \"csv\": pd.DataFrame.to_csv, \"xlsx\": pd.DataFrame.to_excel, \"pickle\": pd.DataFrame.to_pickle, \"feather\": pd.DataFrame.to_feather, \"parquet\": pd.DataFrame.to_parquet, \"msgpack\": pd.DataFrame.to_msgpack,", "if use_param else {}, ) return out def store_results(data, size, iterations): \"\"\" Store", "(233 MB) big: checkouts-by-title (6,62 GB) \"\"\" import os from time import time", "and name.split(\".\")[0] == \"data\": os.remove(f\"{PATH_DATA}{name}\") def iterate_one_test(iterations, extension, func, args, kwargs): \"\"\" Do", "kwargs): \"\"\" Do some iterations for some function Args: size: size of the", "= COMPRESSIONS[extension][\"param_name\"] use_param = COMPRESSIONS[extension][\"read_with_param\"] for comp in tqdm(comp_list, desc=f\"{extension:10}\", leave=True): name =", "= { \"write\": test_write(size, iterations, exclude_formats, test_compress), \"read\": test_read(size, iterations, exclude_formats, test_compress), }", "{size}, iterations: {iterations}\") out = { \"write\": test_write(size, iterations, exclude_formats, test_compress), \"read\": test_read(size,", "\"\"\" Runs some tests with all extensions and exclude big dataframe \"\"\" full_test(0,", "\"pickle\": pd.DataFrame.to_pickle, \"feather\": pd.DataFrame.to_feather, \"parquet\": pd.DataFrame.to_parquet, \"msgpack\": pd.DataFrame.to_msgpack, }, } COMPRESSIONS = {", "different sizes: small: bike_sharing_daily (64 KB) medium: cbg_patterns (233 MB) big: checkouts-by-title (6,62", "test test_compress: if True it will try all compressions Returns: dictionary with out", "test_compress=True) def test_3(): \"\"\" Run test with the big dataframe and trying the", "out[name] = iterate_one_test( iterations, extension=name, func=func, args=[df, f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp}, ) return out", "out[\"file_size\"][extension] = os.path.getsize(f\"{PATH_DATA}{file}\") store_results(out, size, iterations) def test_1(): \"\"\" Runs some tests with", "\"read_with_param\": False, # Read function don't use compression param \"list\": [\"zlib\", \"blosc\", None],", "parameter and list of extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] use_param =", "{} for file in os.listdir(PATH_DATA): name, extension = file.split(\".\") if name == \"data\":", "formats to exclude in this test test_compress: if True it will try all", "some iterations for some function Args: size: size of the file to test", "out = {} df = pd.read_csv(f\"{PATH_DATA}{FILES[size]}.csv\") for extension, func in tqdm(FUNCS[\"write\"].items(), desc=f\"{'write':10}\", leave=True):", "list of extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] use_param = COMPRESSIONS[extension][\"read_with_param\"] for", "= { \"read\": { \"csv\": pd.read_csv, \"xlsx\": pd.read_excel, \"pickle\": pd.read_pickle, \"feather\": pd.read_feather, \"parquet\":", "iterations=10) def test_2(): \"\"\" Run tests trying all compressions without xlsx extension \"\"\"", "out \"\"\" out = {} df = pd.read_csv(f\"{PATH_DATA}{FILES[size]}.csv\") for extension, func in tqdm(FUNCS[\"write\"].items(),", "in COMPRESSIONS: continue # Get name of compression parameter and list of extensions", "print(f\"\\nFULL TEST. size: {size}, iterations: {iterations}\") out = { \"write\": test_write(size, iterations, exclude_formats,", "2: big) iterations: number of times to run the test func: function to", "to test args: arguments for that function kwargs: extra keyworded arguments \"\"\" out", "extension if extension in exclude_formats: continue if not test_compress or extension not in", "test_compress or extension not in COMPRESSIONS: args = [df, f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations,", "def test_2(): \"\"\" Run tests trying all compressions without xlsx extension \"\"\" full_test(1,", "{extension}: {e}\") return out def test_write(size, iterations, exclude_formats, test_compress): \"\"\" Test writting for", "COMPRESSIONS: args = [df, f\"{PATH_DATA}data.{extension}\"] out[extension] = iterate_one_test(iterations, extension, func, args, {}) #", "and exclude big dataframe \"\"\" full_test(0, iterations=100) full_test(1, iterations=10) def test_2(): \"\"\" Run", "comp}, ) return out def test_read(size, iterations, exclude_formats, test_compress): \"\"\" Test read for", "for some function Args: size: size of the file to test (0: small,", "compression parameter and list of extensions comp_list = COMPRESSIONS[extension][\"list\"] comp_param_name = COMPRESSIONS[extension][\"param_name\"] for", "try all compressions Returns: dictionary with out \"\"\" out = {} df =", "exclude_formats: formats to exclude in this test test_compress: if True it will try", "\"\"\" Run tests trying all compressions without xlsx extension \"\"\" full_test(1, iterations=5, exclude_formats=[\"xlsx\"],", "compressions \"\"\" full_test(2, iterations=1, exclude_formats=[\"xlsx\", \"csv\"], test_compress=True) if __name__ == \"__main__\": # Dummy", "iterations, extension=name, func=func, args=[f\"{PATH_DATA}data.{extension}_{comp}\"], kwargs={comp_param_name: comp} if use_param else {}, ) return out", "open(f\"{PATH_RESULTS}results_s{size}_i{iterations}.yaml\", \"w\") as outfile: yaml.dump(data, outfile, default_flow_style=False) print(f\"\\n- Data {PATH_RESULTS}results_s{size}_i{iterations}.yaml stored\") def full_test(size,", "files with different sizes: small: bike_sharing_daily (64 KB) medium: cbg_patterns (233 MB) big:", "clean(): \"\"\" Clean previously created files \"\"\" for name in os.listdir(PATH_DATA): if \".\"", "the test func: function to test args: arguments for that function kwargs: extra", "}, \"write\": { \"csv\": pd.DataFrame.to_csv, \"xlsx\": pd.DataFrame.to_excel, \"pickle\": pd.DataFrame.to_pickle, \"feather\": pd.DataFrame.to_feather, \"parquet\": pd.DataFrame.to_parquet,", "Read function don't use compression param \"list\": [\"zlib\", \"blosc\", None], }, } def", "# Dummy test # full_test(0, iterations=20, exclude_formats=[\"xlsx\"], test_compress=True) # test_1() # test_2() test_3()", "iterations=100) full_test(1, iterations=10) def test_2(): \"\"\" Run tests trying all compressions without xlsx", "test_3(): \"\"\" Run test with the big dataframe and trying the compressions \"\"\"", "\"\"\" full_test(0, iterations=100) full_test(1, iterations=10) def test_2(): \"\"\" Run tests trying all compressions", "will try all compressions Returns: dictionary with out \"\"\" out = {} df", "all compressions else: if extension not in COMPRESSIONS: continue # Get name of", "tqdm PATH_DATA = \"data/\" PATH_RESULTS = \"results/\" FILES = [\"bike_sharing_daily\", \"cbg_patterns\", \"checkouts-by-title\"] FUNCS", "\"msgpack\": pd.DataFrame.to_msgpack, }, } COMPRESSIONS = { \"csv\": { \"param_name\": \"compression\", \"read_with_param\": True,", "iterate_one_test(iterations, extension, func, args, kwargs): \"\"\" Do some iterations for some function Args:", "== \"data\": os.remove(f\"{PATH_DATA}{name}\") def iterate_one_test(iterations, extension, func, args, kwargs): \"\"\" Do some iterations", "**kwargs) # Store time out.append(time() - t0) except Exception as e: print(f\"- Error", "big: checkouts-by-title (6,62 GB) \"\"\" import os from time import time import yaml", "test with the big dataframe and trying the compressions \"\"\" full_test(2, iterations=1, exclude_formats=[\"xlsx\",", "times to run the test exclude_formats: formats to exclude in this test test_compress:" ]
[ "\"V\", \"length\" : \"1\"}) ies.append({ \"iei\" : \"30\", \"value\" : \"Authentication failure parameter\",", "parameter\", \"type\" : \"Authentication failure parameter\", \"reference\" : \"9.9.3.1\", \"presence\" : \"O\", \"format\"", "\"iei\" : \"30\", \"value\" : \"Authentication failure parameter\", \"type\" : \"Authentication failure parameter\",", "\"9.9.3.1\", \"presence\" : \"O\", \"format\" : \"TLV\", \"length\" : \"16\"}) msg_list[key][\"ies\"] = ies", ": \"9.9.3.9\", \"presence\" : \"M\", \"format\" : \"V\", \"length\" : \"1\"}) ies.append({ \"iei\"", ": \"Authentication failure parameter\", \"reference\" : \"9.9.3.1\", \"presence\" : \"O\", \"format\" : \"TLV\",", "\"EMM cause\", \"type\" : \"EMM cause\", \"reference\" : \"9.9.3.9\", \"presence\" : \"M\", \"format\"", "\"M\", \"format\" : \"V\", \"length\" : \"1\"}) ies.append({ \"iei\" : \"30\", \"value\" :", "\"reference\" : \"9.9.3.9\", \"presence\" : \"M\", \"format\" : \"V\", \"length\" : \"1\"}) ies.append({", "ies = [] ies.append({ \"iei\" : \"\", \"value\" : \"EMM cause\", \"type\" :", "\"9.9.3.9\", \"presence\" : \"M\", \"format\" : \"V\", \"length\" : \"1\"}) ies.append({ \"iei\" :", ": \"1\"}) ies.append({ \"iei\" : \"30\", \"value\" : \"Authentication failure parameter\", \"type\" :", "\"1\"}) ies.append({ \"iei\" : \"30\", \"value\" : \"Authentication failure parameter\", \"type\" : \"Authentication", "parameter\", \"reference\" : \"9.9.3.1\", \"presence\" : \"O\", \"format\" : \"TLV\", \"length\" : \"16\"})", "\"EMM cause\", \"reference\" : \"9.9.3.9\", \"presence\" : \"M\", \"format\" : \"V\", \"length\" :", "\"value\" : \"EMM cause\", \"type\" : \"EMM cause\", \"reference\" : \"9.9.3.9\", \"presence\" :", ": \"9.9.3.1\", \"presence\" : \"O\", \"format\" : \"TLV\", \"length\" : \"16\"}) msg_list[key][\"ies\"] =", "ies.append({ \"iei\" : \"\", \"value\" : \"EMM cause\", \"type\" : \"EMM cause\", \"reference\"", "cause\", \"reference\" : \"9.9.3.9\", \"presence\" : \"M\", \"format\" : \"V\", \"length\" : \"1\"})", "\"iei\" : \"\", \"value\" : \"EMM cause\", \"type\" : \"EMM cause\", \"reference\" :", "\"value\" : \"Authentication failure parameter\", \"type\" : \"Authentication failure parameter\", \"reference\" : \"9.9.3.1\",", ": \"M\", \"format\" : \"V\", \"length\" : \"1\"}) ies.append({ \"iei\" : \"30\", \"value\"", "\"type\" : \"EMM cause\", \"reference\" : \"9.9.3.9\", \"presence\" : \"M\", \"format\" : \"V\",", "\"\", \"value\" : \"EMM cause\", \"type\" : \"EMM cause\", \"reference\" : \"9.9.3.9\", \"presence\"", "\"Authentication failure parameter\", \"type\" : \"Authentication failure parameter\", \"reference\" : \"9.9.3.1\", \"presence\" :", "\"presence\" : \"M\", \"format\" : \"V\", \"length\" : \"1\"}) ies.append({ \"iei\" : \"30\",", "\"length\" : \"1\"}) ies.append({ \"iei\" : \"30\", \"value\" : \"Authentication failure parameter\", \"type\"", "cause\", \"type\" : \"EMM cause\", \"reference\" : \"9.9.3.9\", \"presence\" : \"M\", \"format\" :", ": \"30\", \"value\" : \"Authentication failure parameter\", \"type\" : \"Authentication failure parameter\", \"reference\"", "failure parameter\", \"reference\" : \"9.9.3.1\", \"presence\" : \"O\", \"format\" : \"TLV\", \"length\" :", "\"30\", \"value\" : \"Authentication failure parameter\", \"type\" : \"Authentication failure parameter\", \"reference\" :", "= [] ies.append({ \"iei\" : \"\", \"value\" : \"EMM cause\", \"type\" : \"EMM", "\"format\" : \"V\", \"length\" : \"1\"}) ies.append({ \"iei\" : \"30\", \"value\" : \"Authentication", ": \"EMM cause\", \"reference\" : \"9.9.3.9\", \"presence\" : \"M\", \"format\" : \"V\", \"length\"", "[] ies.append({ \"iei\" : \"\", \"value\" : \"EMM cause\", \"type\" : \"EMM cause\",", ": \"EMM cause\", \"type\" : \"EMM cause\", \"reference\" : \"9.9.3.9\", \"presence\" : \"M\",", "\"Authentication failure parameter\", \"reference\" : \"9.9.3.1\", \"presence\" : \"O\", \"format\" : \"TLV\", \"length\"", "\"reference\" : \"9.9.3.1\", \"presence\" : \"O\", \"format\" : \"TLV\", \"length\" : \"16\"}) msg_list[key][\"ies\"]", "failure parameter\", \"type\" : \"Authentication failure parameter\", \"reference\" : \"9.9.3.1\", \"presence\" : \"O\",", ": \"V\", \"length\" : \"1\"}) ies.append({ \"iei\" : \"30\", \"value\" : \"Authentication failure", ": \"Authentication failure parameter\", \"type\" : \"Authentication failure parameter\", \"reference\" : \"9.9.3.1\", \"presence\"", "ies.append({ \"iei\" : \"30\", \"value\" : \"Authentication failure parameter\", \"type\" : \"Authentication failure", "\"type\" : \"Authentication failure parameter\", \"reference\" : \"9.9.3.1\", \"presence\" : \"O\", \"format\" :", ": \"\", \"value\" : \"EMM cause\", \"type\" : \"EMM cause\", \"reference\" : \"9.9.3.9\"," ]
[ "a friendly description of the file' }) } class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model", "Meta: model = CustomUser fields = ('email','password') widgets={ 'email': forms.EmailInput(attrs={ 'placeholder': 'Enter your", "from uploads.core.models import Document class DocumentForm(forms.ModelForm): class Meta: model = Document fields =", "} class LoginForm(AuthenticationForm): class Meta: model = CustomUser fields = ('email','password') widgets={ 'email':", "}) } class LoginForm(AuthenticationForm): class Meta: model = CustomUser fields = ('email','password') widgets={", "}), 'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class LoginForm(AuthenticationForm): class Meta: model = CustomUser", "('email','password') widgets={ 'email': forms.EmailInput(attrs={ 'placeholder': 'Enter your email', 'class':'form-control' }), 'password': forms.PasswordInput(attrs={ 'class':", "file' }) } class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = CustomUser fields = ('email',)", "= CustomUser fields = ('email','password') widgets={ 'email': forms.EmailInput(attrs={ 'placeholder': 'Enter your email', 'class':'form-control'", "forms.TextInput(attrs={ 'class': 'form-control', 'placeholder': 'Enter a friendly description of the file' }) }", "Document class DocumentForm(forms.ModelForm): class Meta: model = Document fields = ('description', 'document') widgets={", "from uploads.core.models import CustomUser from uploads.core.models import Document class DocumentForm(forms.ModelForm): class Meta: model", "'placeholder': 'Enter your username' # }), 'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class LoginForm(AuthenticationForm):", "class CustomUserChangeForm(UserChangeForm): class Meta: model = CustomUser fields = ('email',) widgets={ # 'username':", "'Enter your email', 'class':'form-control' }), 'password': forms.PasswordInput(attrs={ 'class': 'form-control', }) } # email", "'placeholder': 'Enter a friendly description of the file' }) } class CustomUserCreationForm(UserCreationForm): class", "your username' # }), 'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class CustomUserChangeForm(UserChangeForm): class Meta:", "} class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = CustomUser fields = ('email',) widgets={ #", "'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class LoginForm(AuthenticationForm): class Meta: model = CustomUser fields", "# 'class': 'form-control', # 'placeholder': 'Enter your username' # }), 'email': forms.EmailInput(attrs={ 'class':'form-control'", "'placeholder': 'Enter your email', 'class':'form-control' }), 'password': forms.PasswordInput(attrs={ 'class': 'form-control', }) } #", "('description', 'document') widgets={ 'description': forms.TextInput(attrs={ 'class': 'form-control', 'placeholder': 'Enter a friendly description of", "CustomUser fields = ('email','password') widgets={ 'email': forms.EmailInput(attrs={ 'placeholder': 'Enter your email', 'class':'form-control' }),", "'class':'form-control' }) } class LoginForm(AuthenticationForm): class Meta: model = CustomUser fields = ('email','password')", "class Meta: model = CustomUser fields = ('email',) widgets={ # 'username': forms.TextInput(attrs={ #", "model = Document fields = ('description', 'document') widgets={ 'description': forms.TextInput(attrs={ 'class': 'form-control', 'placeholder':", "friendly description of the file' }) } class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model =", "'Enter your username' # }), 'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class CustomUserChangeForm(UserChangeForm): class", "from django import forms from django.contrib.auth.forms import UserCreationForm, UserChangeForm, AuthenticationForm from uploads.core.models import", "forms.EmailInput(attrs={ 'class':'form-control' }) } class CustomUserChangeForm(UserChangeForm): class Meta: model = CustomUser fields =", "class Meta: model = CustomUser fields = ('email','password') widgets={ 'email': forms.EmailInput(attrs={ 'placeholder': 'Enter", "of the file' }) } class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = CustomUser fields", "'Enter your username' # }), 'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class LoginForm(AuthenticationForm): class", "forms.EmailInput(attrs={ 'class':'form-control' }) } class LoginForm(AuthenticationForm): class Meta: model = CustomUser fields =", "forms.PasswordInput(attrs={ 'class': 'form-control', }) } # email = forms.EmailInput(widget=forms.TextInput(attrs={'class': 'form-control'})) # password =", "= Document fields = ('description', 'document') widgets={ 'description': forms.TextInput(attrs={ 'class': 'form-control', 'placeholder': 'Enter", "}), 'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class CustomUserChangeForm(UserChangeForm): class Meta: model = CustomUser", "# }), 'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class LoginForm(AuthenticationForm): class Meta: model =", "widgets={ 'description': forms.TextInput(attrs={ 'class': 'form-control', 'placeholder': 'Enter a friendly description of the file'", "your email', 'class':'form-control' }), 'password': forms.PasswordInput(attrs={ 'class': 'form-control', }) } # email =", "= ('description', 'document') widgets={ 'description': forms.TextInput(attrs={ 'class': 'form-control', 'placeholder': 'Enter a friendly description", "'Enter a friendly description of the file' }) } class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta):", "UserChangeForm, AuthenticationForm from uploads.core.models import CustomUser from uploads.core.models import Document class DocumentForm(forms.ModelForm): class", "'description': forms.TextInput(attrs={ 'class': 'form-control', 'placeholder': 'Enter a friendly description of the file' })", "# }), 'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class CustomUserChangeForm(UserChangeForm): class Meta: model =", "<filename>uploads/core/forms.py from django import forms from django.contrib.auth.forms import UserCreationForm, UserChangeForm, AuthenticationForm from uploads.core.models", "from django.contrib.auth.forms import UserCreationForm, UserChangeForm, AuthenticationForm from uploads.core.models import CustomUser from uploads.core.models import", "fields = ('description', 'document') widgets={ 'description': forms.TextInput(attrs={ 'class': 'form-control', 'placeholder': 'Enter a friendly", "username' # }), 'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class CustomUserChangeForm(UserChangeForm): class Meta: model", "'class': 'form-control', }) } # email = forms.EmailInput(widget=forms.TextInput(attrs={'class': 'form-control'})) # password = forms.CharField(widget=forms.PasswordInput(attrs={'class':", "# 'username': forms.TextInput(attrs={ # 'class': 'form-control', # 'placeholder': 'Enter your username' # }),", "Document fields = ('description', 'document') widgets={ 'description': forms.TextInput(attrs={ 'class': 'form-control', 'placeholder': 'Enter a", "class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = CustomUser fields = ('email',) widgets={ # 'username':", "= ('email','password') widgets={ 'email': forms.EmailInput(attrs={ 'placeholder': 'Enter your email', 'class':'form-control' }), 'password': forms.PasswordInput(attrs={", "description of the file' }) } class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = CustomUser", "email', 'class':'form-control' }), 'password': forms.PasswordInput(attrs={ 'class': 'form-control', }) } # email = forms.EmailInput(widget=forms.TextInput(attrs={'class':", "username' # }), 'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class LoginForm(AuthenticationForm): class Meta: model", "'form-control', 'placeholder': 'Enter a friendly description of the file' }) } class CustomUserCreationForm(UserCreationForm):", "class Meta: model = Document fields = ('description', 'document') widgets={ 'description': forms.TextInput(attrs={ 'class':", "fields = ('email','password') widgets={ 'email': forms.EmailInput(attrs={ 'placeholder': 'Enter your email', 'class':'form-control' }), 'password':", "model = CustomUser fields = ('email',) widgets={ # 'username': forms.TextInput(attrs={ # 'class': 'form-control',", "'class': 'form-control', # 'placeholder': 'Enter your username' # }), 'email': forms.EmailInput(attrs={ 'class':'form-control' })", "CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = CustomUser fields = ('email',) widgets={ # 'username': forms.TextInput(attrs={", "forms.TextInput(attrs={ # 'class': 'form-control', # 'placeholder': 'Enter your username' # }), 'email': forms.EmailInput(attrs={", "'class': 'form-control', 'placeholder': 'Enter a friendly description of the file' }) } class", "'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class CustomUserChangeForm(UserChangeForm): class Meta: model = CustomUser fields", "# 'placeholder': 'Enter your username' # }), 'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class", "CustomUserChangeForm(UserChangeForm): class Meta: model = CustomUser fields = ('email',) widgets={ # 'username': forms.TextInput(attrs={", "CustomUser fields = ('email',) widgets={ # 'username': forms.TextInput(attrs={ # 'class': 'form-control', # 'placeholder':", "model = CustomUser fields = ('email','password') widgets={ 'email': forms.EmailInput(attrs={ 'placeholder': 'Enter your email',", "the file' }) } class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = CustomUser fields =", "uploads.core.models import CustomUser from uploads.core.models import Document class DocumentForm(forms.ModelForm): class Meta: model =", "'email': forms.EmailInput(attrs={ 'placeholder': 'Enter your email', 'class':'form-control' }), 'password': forms.PasswordInput(attrs={ 'class': 'form-control', })", "uploads.core.models import Document class DocumentForm(forms.ModelForm): class Meta: model = Document fields = ('description',", "Meta: model = Document fields = ('description', 'document') widgets={ 'description': forms.TextInput(attrs={ 'class': 'form-control',", "'password': forms.PasswordInput(attrs={ 'class': 'form-control', }) } # email = forms.EmailInput(widget=forms.TextInput(attrs={'class': 'form-control'})) # password", "django.contrib.auth.forms import UserCreationForm, UserChangeForm, AuthenticationForm from uploads.core.models import CustomUser from uploads.core.models import Document", "fields = ('email',) widgets={ # 'username': forms.TextInput(attrs={ # 'class': 'form-control', # 'placeholder': 'Enter", "widgets={ # 'username': forms.TextInput(attrs={ # 'class': 'form-control', # 'placeholder': 'Enter your username' #", "class DocumentForm(forms.ModelForm): class Meta: model = Document fields = ('description', 'document') widgets={ 'description':", "= CustomUser fields = ('email',) widgets={ # 'username': forms.TextInput(attrs={ # 'class': 'form-control', #", "'class':'form-control' }), 'password': forms.PasswordInput(attrs={ 'class': 'form-control', }) } # email = forms.EmailInput(widget=forms.TextInput(attrs={'class': 'form-control'}))", "}) } class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = CustomUser fields = ('email',) widgets={", "import forms from django.contrib.auth.forms import UserCreationForm, UserChangeForm, AuthenticationForm from uploads.core.models import CustomUser from", "'form-control', # 'placeholder': 'Enter your username' # }), 'email': forms.EmailInput(attrs={ 'class':'form-control' }) }", "CustomUser from uploads.core.models import Document class DocumentForm(forms.ModelForm): class Meta: model = Document fields", "'document') widgets={ 'description': forms.TextInput(attrs={ 'class': 'form-control', 'placeholder': 'Enter a friendly description of the", "('email',) widgets={ # 'username': forms.TextInput(attrs={ # 'class': 'form-control', # 'placeholder': 'Enter your username'", "'placeholder': 'Enter your username' # }), 'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class CustomUserChangeForm(UserChangeForm):", "'username': forms.TextInput(attrs={ # 'class': 'form-control', # 'placeholder': 'Enter your username' # }), 'email':", "}) } class CustomUserChangeForm(UserChangeForm): class Meta: model = CustomUser fields = ('email',) widgets={", "forms from django.contrib.auth.forms import UserCreationForm, UserChangeForm, AuthenticationForm from uploads.core.models import CustomUser from uploads.core.models", "Meta(UserCreationForm.Meta): model = CustomUser fields = ('email',) widgets={ # 'username': forms.TextInput(attrs={ # 'class':", "import UserCreationForm, UserChangeForm, AuthenticationForm from uploads.core.models import CustomUser from uploads.core.models import Document class", "Meta: model = CustomUser fields = ('email',) widgets={ # 'username': forms.TextInput(attrs={ # 'class':", "import Document class DocumentForm(forms.ModelForm): class Meta: model = Document fields = ('description', 'document')", "}), 'password': forms.PasswordInput(attrs={ 'class': 'form-control', }) } # email = forms.EmailInput(widget=forms.TextInput(attrs={'class': 'form-control'})) #", "import CustomUser from uploads.core.models import Document class DocumentForm(forms.ModelForm): class Meta: model = Document", "= ('email',) widgets={ # 'username': forms.TextInput(attrs={ # 'class': 'form-control', # 'placeholder': 'Enter your", "'form-control', }) } # email = forms.EmailInput(widget=forms.TextInput(attrs={'class': 'form-control'})) # password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'form-control'}))", "widgets={ 'email': forms.EmailInput(attrs={ 'placeholder': 'Enter your email', 'class':'form-control' }), 'password': forms.PasswordInput(attrs={ 'class': 'form-control',", "your username' # }), 'email': forms.EmailInput(attrs={ 'class':'form-control' }) } class LoginForm(AuthenticationForm): class Meta:", "class Meta(UserCreationForm.Meta): model = CustomUser fields = ('email',) widgets={ # 'username': forms.TextInput(attrs={ #", "DocumentForm(forms.ModelForm): class Meta: model = Document fields = ('description', 'document') widgets={ 'description': forms.TextInput(attrs={", "AuthenticationForm from uploads.core.models import CustomUser from uploads.core.models import Document class DocumentForm(forms.ModelForm): class Meta:", "} class CustomUserChangeForm(UserChangeForm): class Meta: model = CustomUser fields = ('email',) widgets={ #", "class LoginForm(AuthenticationForm): class Meta: model = CustomUser fields = ('email','password') widgets={ 'email': forms.EmailInput(attrs={", "'class':'form-control' }) } class CustomUserChangeForm(UserChangeForm): class Meta: model = CustomUser fields = ('email',)", "forms.EmailInput(attrs={ 'placeholder': 'Enter your email', 'class':'form-control' }), 'password': forms.PasswordInput(attrs={ 'class': 'form-control', }) }", "django import forms from django.contrib.auth.forms import UserCreationForm, UserChangeForm, AuthenticationForm from uploads.core.models import CustomUser", "UserCreationForm, UserChangeForm, AuthenticationForm from uploads.core.models import CustomUser from uploads.core.models import Document class DocumentForm(forms.ModelForm):", "LoginForm(AuthenticationForm): class Meta: model = CustomUser fields = ('email','password') widgets={ 'email': forms.EmailInput(attrs={ 'placeholder':" ]
[ "unittest from test.test01 import soma class TesteSoma(unittest.TestCase): def test_retorno_soma_10_10(self): self .assertEqual(soma(10, 10), 20)", "import unittest from test.test01 import soma class TesteSoma(unittest.TestCase): def test_retorno_soma_10_10(self): self .assertEqual(soma(10, 10)," ]
[ "Message: {message}\" EWSONPREM_ERR_API_UNSUPPORTED_METHOD = \"Unsupported method\" EWSONPREM_USING_BASE_URL = \"Using url: {base_url}\" EWSONPREM_ERR_VAULT_INFO =", "have privileges to the mailbox.\" EWS_INGEST_LATEST_EMAILS = \"latest first\" EWS_INGEST_OLDEST_EMAILS = \"oldest first\"", "EWSONPREM_ERR_MESSAGE = \"Error message unavailable. Please check the asset configuration and|or action parameters.\"", "mailbox.\" EWS_INGEST_LATEST_EMAILS = \"latest first\" EWS_INGEST_OLDEST_EMAILS = \"oldest first\" DATETIME_FORMAT = \"%Y-%m-%dT%H:%M:%SZ\" AUTH_TYPE_AZURE", "EWSONPREM_ERR_SERVER_CONNECTION = \"Connection failed\" EWSONPREM_ERR_FED_PING_URL = \"Parameter validation failed for the Federated Auth", "EWS server. Please check the asset configuration and|or the action parameters.\" EWSONPREM_VALIDATE_INTEGER_MESSAGE =", "check the asset configuration and|or the action parameters.\" EWSONPREM_VALIDATE_INTEGER_MESSAGE = \"Please provide a", "agreed to in writing, software distributed under # the License is distributed on", "code: {code}. Message: {message}\" EWSONPREM_ERR_API_UNSUPPORTED_METHOD = \"Unsupported method\" EWSONPREM_USING_BASE_URL = \"Using url: {base_url}\"", "\"Toggling the impersonation configuration on the asset might help, or login user does", "impersonation configuration on the asset might help, or login user does not have", "\"%Y-%m-%dT%H:%M:%SZ\" AUTH_TYPE_AZURE = \"Azure\" AUTH_TYPE_AZURE_INTERACTIVE = \"Azure (interactive)\" AUTH_TYPE_FEDERATED = \"Federated\" AUTH_TYPE_BASIC =", "this file except in compliance with the License. # You may obtain a", "EWS_JSON_AUTH_TYPE = \"auth_type\" EWS_JSON_CLIENT_ID = \"client_id\" EWS_JSON_POLL_FOLDER = \"poll_folder\" EWS_JSON_INGEST_MANNER = \"ingest_manner\" EWS_JSON_INGEST_TIME", "user does not have privileges to the mailbox.\" EWS_INGEST_LATEST_EMAILS = \"latest first\" EWS_INGEST_OLDEST_EMAILS", "parameters.\" EWSONPREM_VALIDATE_INTEGER_MESSAGE = \"Please provide a valid integer value in the {key} parameter\"", "seconds STATE_FILE_CORRUPT_ERR = ( \"Error occurred while loading the state file due to", "Federated Auth Ping URL\" EWSONPREM_ERR_FROM_SERVER = \"API failed. Status code: {code}. Message: {message}\"", "License. EWSONPREM_JSON_DEVICE_URL = \"url\" EWSONPREM_JSON_TEST_USER = \"test_user\" EWSONPREM_JSON_SUBJECT = \"subject\" EWSONPREM_JSON_FROM = \"sender\"", "connecting to the EWS server. Please check the asset configuration and|or the action", "= \"Error occurred while connecting to the EWS server. Please check the asset", "# the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "EWSONPREM_SEARCH_FINISHED_STATUS = \"Finished Searching {0:.0%}\" EWS_JSON_POLL_USER = \"poll_user\" EWS_JSON_USE_IMPERSONATE = \"use_impersonation\" EWS_JSON_AUTH_TYPE =", "\"impersonate_email\" EWS_JSON_AUTH_URL = \"authority_url\" EWS_JSON_FED_PING_URL = \"fed_ping_url\" EWS_JSON_FED_VERIFY_CERT = \"fed_verify_server_cert\" EWS_JSON_IS_PUBLIC_FOLDER = \"is_public_folder\"", "EWS_JSON_FIRST_RUN_MAX_EMAILS = \"first_run_max_emails\" EWS_JSON_POLL_MAX_CONTAINERS = \"max_containers\" EWS_JSON_DONT_IMPERSONATE = \"dont_impersonate\" EWS_JSON_IMPERSONATE_EMAIL = \"impersonate_email\" EWS_JSON_AUTH_URL", "= \"folder\" EWSONPREM_JSON_BODY = \"body\" EWSONPREM_JSON_QUERY = \"query\" EWSONPREM_JSON_RANGE = \"range\" EWSONPREM_JSON_ID =", "method\" EWSONPREM_USING_BASE_URL = \"Using url: {base_url}\" EWSONPREM_ERR_VAULT_INFO = \"Could not retrieve vault file\"", "Please check the asset configuration and|or action parameters.\" TYPE_ERR_MESSAGE = \"Error occurred while", "Ping URL\" EWSONPREM_ERR_FROM_SERVER = \"API failed. Status code: {code}. Message: {message}\" EWSONPREM_ERR_API_UNSUPPORTED_METHOD =", "help, or login user does not have privileges to the mailbox.\" EWS_INGEST_LATEST_EMAILS =", "\"Test Connectivity Failed\" EWSONPREM_SUCC_CONNECTIVITY_TEST = \"Test Connectivity Passed\" EWSONPREM_ERR_SERVER_CONNECTION = \"Connection failed\" EWSONPREM_ERR_FED_PING_URL", "{0}. Error Message: {1}\" EWSONPREM_ERR_CODE_MESSAGE = \"Error code unavailable\" EWSONPREM_ERR_MESSAGE = \"Error message", "= \"internet_message_id\" EWSONPREM_JSON_EMAIL = \"email\" EWSONPREM_JSON_FOLDER = \"folder\" EWSONPREM_JSON_BODY = \"body\" EWSONPREM_JSON_QUERY =", "EWSONPREM_JSON_INT_MSG_ID = \"internet_message_id\" EWSONPREM_JSON_EMAIL = \"email\" EWSONPREM_JSON_FOLDER = \"folder\" EWSONPREM_JSON_BODY = \"body\" EWSONPREM_JSON_QUERY", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, #", "EWS_JSON_INGEST_MANNER = \"ingest_manner\" EWS_JSON_INGEST_TIME = \"ingest_time\" EWS_JSON_FIRST_RUN_MAX_EMAILS = \"first_run_max_emails\" EWS_JSON_POLL_MAX_CONTAINERS = \"max_containers\" EWS_JSON_DONT_IMPERSONATE", "to parse reply, raw string reply: '{raw_text}'\" EWSONPREM_EXCEPTION_ERR_MESSAGE = \"Error Code: {0}. Error", "Copyright (c) 2016-2022 Splunk Inc. # # Licensed under the Apache License, Version", "unexpected format. \" \"Resetting the state file with the default format. Please try", "Unless required by applicable law or agreed to in writing, software distributed under", "EWS_JSON_POLL_FOLDER = \"poll_folder\" EWS_JSON_INGEST_MANNER = \"ingest_manner\" EWS_JSON_INGEST_TIME = \"ingest_time\" EWS_JSON_FIRST_RUN_MAX_EMAILS = \"first_run_max_emails\" EWS_JSON_POLL_MAX_CONTAINERS", "= \"API failed. Status code: {code}. Message: {message}\" EWSONPREM_ERR_API_UNSUPPORTED_METHOD = \"Unsupported method\" EWSONPREM_USING_BASE_URL", "configuration and|or action parameters.\" TYPE_ERR_MESSAGE = \"Error occurred while connecting to the EWS", "action parameters.\" TYPE_ERR_MESSAGE = \"Error occurred while connecting to the EWS server. Please", "TYPE_ERR_MESSAGE = \"Error occurred while connecting to the EWS server. Please check the", "<filename>ewsonprem_consts.py # File: ewsonprem_consts.py # # Copyright (c) 2016-2022 Splunk Inc. # #", "KIND, # either express or implied. See the License for the specific language", "might help, or login user does not have privileges to the mailbox.\" EWS_INGEST_LATEST_EMAILS", "state file due to its unexpected format. \" \"Resetting the state file with", "to its unexpected format. \" \"Resetting the state file with the default format.", "by applicable law or agreed to in writing, software distributed under # the", "\"first_run_max_emails\" EWS_JSON_POLL_MAX_CONTAINERS = \"max_containers\" EWS_JSON_DONT_IMPERSONATE = \"dont_impersonate\" EWS_JSON_IMPERSONATE_EMAIL = \"impersonate_email\" EWS_JSON_AUTH_URL = \"authority_url\"", "= \"sender\" EWSONPREM_JSON_INT_MSG_ID = \"internet_message_id\" EWSONPREM_JSON_EMAIL = \"email\" EWSONPREM_JSON_FOLDER = \"folder\" EWSONPREM_JSON_BODY =", "the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "Passed\" EWSONPREM_ERR_SERVER_CONNECTION = \"Connection failed\" EWSONPREM_ERR_FED_PING_URL = \"Parameter validation failed for the Federated", "EWSONPREM_JSON_BODY = \"body\" EWSONPREM_JSON_QUERY = \"query\" EWSONPREM_JSON_RANGE = \"range\" EWSONPREM_JSON_ID = \"id\" EWSONPREM_JSON_GROUP", "EWSONPREM_JSON_TEST_USER = \"test_user\" EWSONPREM_JSON_SUBJECT = \"subject\" EWSONPREM_JSON_FROM = \"sender\" EWSONPREM_JSON_INT_MSG_ID = \"internet_message_id\" EWSONPREM_JSON_EMAIL", "AUTH_TYPE_AZURE = \"Azure\" AUTH_TYPE_AZURE_INTERACTIVE = \"Azure (interactive)\" AUTH_TYPE_FEDERATED = \"Federated\" AUTH_TYPE_BASIC = \"Basic\"", "the {key} parameter\" EWSONPREM_MAIL_TYPES = [ \"t:Message\", \"t:MeetingRequest\", \"t:MeetingResponse\", \"t:MeetingMessage\", \"t:MeetingCancellation\" ] EWSONPREM_MAX_END_OFFSET_VAL", "or implied. See the License for the specific language governing permissions # and", "while connecting to the EWS server. Please check the asset configuration and|or the", "ewsonprem_consts.py # # Copyright (c) 2016-2022 Splunk Inc. # # Licensed under the", "EWSONPREM_JSON_DEVICE_URL = \"url\" EWSONPREM_JSON_TEST_USER = \"test_user\" EWSONPREM_JSON_SUBJECT = \"subject\" EWSONPREM_JSON_FROM = \"sender\" EWSONPREM_JSON_INT_MSG_ID", "parse reply, raw string reply: '{raw_text}'\" EWSONPREM_EXCEPTION_ERR_MESSAGE = \"Error Code: {0}. Error Message:", "\"https://login.windows.net\" EWS_MODIFY_CONFIG = \"Toggling the impersonation configuration on the asset might help, or", "valid integer value in the {key} parameter\" EWSONPREM_MAIL_TYPES = [ \"t:Message\", \"t:MeetingRequest\", \"t:MeetingResponse\",", "\"t:MeetingRequest\", \"t:MeetingResponse\", \"t:MeetingMessage\", \"t:MeetingCancellation\" ] EWSONPREM_MAX_END_OFFSET_VAL = 2147483646 EWS_O365_RESOURCE = \"https://outlook.office365.com\" EWS_LOGIN_URL =", "'{raw_text}'\" EWSONPREM_EXCEPTION_ERR_MESSAGE = \"Error Code: {0}. Error Message: {1}\" EWSONPREM_ERR_CODE_MESSAGE = \"Error code", "= [ \"t:Message\", \"t:MeetingRequest\", \"t:MeetingResponse\", \"t:MeetingMessage\", \"t:MeetingCancellation\" ] EWSONPREM_MAX_END_OFFSET_VAL = 2147483646 EWS_O365_RESOURCE =", "# # Copyright (c) 2016-2022 Splunk Inc. # # Licensed under the Apache", "= \"Using url: {base_url}\" EWSONPREM_ERR_VAULT_INFO = \"Could not retrieve vault file\" EWSONPREM_ERR_JSON_PARSE =", "Searching {0:.0%}\" EWS_JSON_POLL_USER = \"poll_user\" EWS_JSON_USE_IMPERSONATE = \"use_impersonation\" EWS_JSON_AUTH_TYPE = \"auth_type\" EWS_JSON_CLIENT_ID =", "\"subject\" EWSONPREM_JSON_FROM = \"sender\" EWSONPREM_JSON_INT_MSG_ID = \"internet_message_id\" EWSONPREM_JSON_EMAIL = \"email\" EWSONPREM_JSON_FOLDER = \"folder\"", "{key} parameter\" EWSONPREM_MAIL_TYPES = [ \"t:Message\", \"t:MeetingRequest\", \"t:MeetingResponse\", \"t:MeetingMessage\", \"t:MeetingCancellation\" ] EWSONPREM_MAX_END_OFFSET_VAL =", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "language governing permissions # and limitations under the License. EWSONPREM_JSON_DEVICE_URL = \"url\" EWSONPREM_JSON_TEST_USER", "\"dont_impersonate\" EWS_JSON_IMPERSONATE_EMAIL = \"impersonate_email\" EWS_JSON_AUTH_URL = \"authority_url\" EWS_JSON_FED_PING_URL = \"fed_ping_url\" EWS_JSON_FED_VERIFY_CERT = \"fed_verify_server_cert\"", "EWSONPREM_JSON_INGEST_EMAIL = \"ingest_email\" EWS_JSON_CONTAINER_ID = \"container_id\" EWS_JSON_VAULT_ID = \"vault_id\" EWSONPREM_SEARCH_FINISHED_STATUS = \"Finished Searching", "= \"https://login.windows.net\" EWS_MODIFY_CONFIG = \"Toggling the impersonation configuration on the asset might help,", "STATE_FILE_CORRUPT_ERR = ( \"Error occurred while loading the state file due to its", "\"Finished Searching {0:.0%}\" EWS_JSON_POLL_USER = \"poll_user\" EWS_JSON_USE_IMPERSONATE = \"use_impersonation\" EWS_JSON_AUTH_TYPE = \"auth_type\" EWS_JSON_CLIENT_ID", "AUTH_TYPE_BASIC = \"Basic\" DEFAULT_REQUEST_TIMEOUT = 30 # in seconds STATE_FILE_CORRUPT_ERR = ( \"Error", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "\"Using url: {base_url}\" EWSONPREM_ERR_VAULT_INFO = \"Could not retrieve vault file\" EWSONPREM_ERR_JSON_PARSE = \"Unable", "= 30 # in seconds STATE_FILE_CORRUPT_ERR = ( \"Error occurred while loading the", "EWS_MODIFY_CONFIG = \"Toggling the impersonation configuration on the asset might help, or login", "\"poll_folder\" EWS_JSON_INGEST_MANNER = \"ingest_manner\" EWS_JSON_INGEST_TIME = \"ingest_time\" EWS_JSON_FIRST_RUN_MAX_EMAILS = \"first_run_max_emails\" EWS_JSON_POLL_MAX_CONTAINERS = \"max_containers\"", "to the EWS server. Please check the asset configuration and|or the action parameters.\"", "Please check the asset configuration and|or the action parameters.\" EWSONPREM_VALIDATE_INTEGER_MESSAGE = \"Please provide", "the impersonation configuration on the asset might help, or login user does not", "EWS_JSON_IS_PUBLIC_FOLDER = \"is_public_folder\" EWSONPREM_ERR_CONNECTIVITY_TEST = \"Test Connectivity Failed\" EWSONPREM_SUCC_CONNECTIVITY_TEST = \"Test Connectivity Passed\"", "= \"Connection failed\" EWSONPREM_ERR_FED_PING_URL = \"Parameter validation failed for the Federated Auth Ping", "the asset configuration and|or the action parameters.\" EWSONPREM_VALIDATE_INTEGER_MESSAGE = \"Please provide a valid", "URL\" EWSONPREM_ERR_FROM_SERVER = \"API failed. Status code: {code}. Message: {message}\" EWSONPREM_ERR_API_UNSUPPORTED_METHOD = \"Unsupported", "EWSONPREM_JSON_RANGE = \"range\" EWSONPREM_JSON_ID = \"id\" EWSONPREM_JSON_GROUP = \"group\" EWSONPREM_JSON_INGEST_EMAIL = \"ingest_email\" EWS_JSON_CONTAINER_ID", "\"id\" EWSONPREM_JSON_GROUP = \"group\" EWSONPREM_JSON_INGEST_EMAIL = \"ingest_email\" EWS_JSON_CONTAINER_ID = \"container_id\" EWS_JSON_VAULT_ID = \"vault_id\"", "\" \"Resetting the state file with the default format. Please try again.\" )", "EWSONPREM_JSON_EMAIL = \"email\" EWSONPREM_JSON_FOLDER = \"folder\" EWSONPREM_JSON_BODY = \"body\" EWSONPREM_JSON_QUERY = \"query\" EWSONPREM_JSON_RANGE", "first\" EWS_INGEST_OLDEST_EMAILS = \"oldest first\" DATETIME_FORMAT = \"%Y-%m-%dT%H:%M:%SZ\" AUTH_TYPE_AZURE = \"Azure\" AUTH_TYPE_AZURE_INTERACTIVE =", "EWS_JSON_VAULT_ID = \"vault_id\" EWSONPREM_SEARCH_FINISHED_STATUS = \"Finished Searching {0:.0%}\" EWS_JSON_POLL_USER = \"poll_user\" EWS_JSON_USE_IMPERSONATE =", "\"Azure (interactive)\" AUTH_TYPE_FEDERATED = \"Federated\" AUTH_TYPE_BASIC = \"Basic\" DEFAULT_REQUEST_TIMEOUT = 30 # in", "{1}\" EWSONPREM_ERR_CODE_MESSAGE = \"Error code unavailable\" EWSONPREM_ERR_MESSAGE = \"Error message unavailable. Please check", "\"folder\" EWSONPREM_JSON_BODY = \"body\" EWSONPREM_JSON_QUERY = \"query\" EWSONPREM_JSON_RANGE = \"range\" EWSONPREM_JSON_ID = \"id\"", "= \"subject\" EWSONPREM_JSON_FROM = \"sender\" EWSONPREM_JSON_INT_MSG_ID = \"internet_message_id\" EWSONPREM_JSON_EMAIL = \"email\" EWSONPREM_JSON_FOLDER =", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "\"url\" EWSONPREM_JSON_TEST_USER = \"test_user\" EWSONPREM_JSON_SUBJECT = \"subject\" EWSONPREM_JSON_FROM = \"sender\" EWSONPREM_JSON_INT_MSG_ID = \"internet_message_id\"", "under the License. EWSONPREM_JSON_DEVICE_URL = \"url\" EWSONPREM_JSON_TEST_USER = \"test_user\" EWSONPREM_JSON_SUBJECT = \"subject\" EWSONPREM_JSON_FROM", "See the License for the specific language governing permissions # and limitations under", "the License. EWSONPREM_JSON_DEVICE_URL = \"url\" EWSONPREM_JSON_TEST_USER = \"test_user\" EWSONPREM_JSON_SUBJECT = \"subject\" EWSONPREM_JSON_FROM =", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "EWS_JSON_CLIENT_ID = \"client_id\" EWS_JSON_POLL_FOLDER = \"poll_folder\" EWS_JSON_INGEST_MANNER = \"ingest_manner\" EWS_JSON_INGEST_TIME = \"ingest_time\" EWS_JSON_FIRST_RUN_MAX_EMAILS", "unavailable. Please check the asset configuration and|or action parameters.\" TYPE_ERR_MESSAGE = \"Error occurred", "License. # You may obtain a copy of the License at # #", "\"email\" EWSONPREM_JSON_FOLDER = \"folder\" EWSONPREM_JSON_BODY = \"body\" EWSONPREM_JSON_QUERY = \"query\" EWSONPREM_JSON_RANGE = \"range\"", "= \"Error message unavailable. Please check the asset configuration and|or action parameters.\" TYPE_ERR_MESSAGE", "\"https://outlook.office365.com\" EWS_LOGIN_URL = \"https://login.windows.net\" EWS_MODIFY_CONFIG = \"Toggling the impersonation configuration on the asset", "compliance with the License. # You may obtain a copy of the License", "software distributed under # the License is distributed on an \"AS IS\" BASIS,", "= 2147483646 EWS_O365_RESOURCE = \"https://outlook.office365.com\" EWS_LOGIN_URL = \"https://login.windows.net\" EWS_MODIFY_CONFIG = \"Toggling the impersonation", "\"vault_id\" EWSONPREM_SEARCH_FINISHED_STATUS = \"Finished Searching {0:.0%}\" EWS_JSON_POLL_USER = \"poll_user\" EWS_JSON_USE_IMPERSONATE = \"use_impersonation\" EWS_JSON_AUTH_TYPE", "= \"Federated\" AUTH_TYPE_BASIC = \"Basic\" DEFAULT_REQUEST_TIMEOUT = 30 # in seconds STATE_FILE_CORRUPT_ERR =", "a valid integer value in the {key} parameter\" EWSONPREM_MAIL_TYPES = [ \"t:Message\", \"t:MeetingRequest\",", "= \"Unable to parse reply, raw string reply: '{raw_text}'\" EWSONPREM_EXCEPTION_ERR_MESSAGE = \"Error Code:", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "and limitations under the License. EWSONPREM_JSON_DEVICE_URL = \"url\" EWSONPREM_JSON_TEST_USER = \"test_user\" EWSONPREM_JSON_SUBJECT =", "{code}. Message: {message}\" EWSONPREM_ERR_API_UNSUPPORTED_METHOD = \"Unsupported method\" EWSONPREM_USING_BASE_URL = \"Using url: {base_url}\" EWSONPREM_ERR_VAULT_INFO", "parameter\" EWSONPREM_MAIL_TYPES = [ \"t:Message\", \"t:MeetingRequest\", \"t:MeetingResponse\", \"t:MeetingMessage\", \"t:MeetingCancellation\" ] EWSONPREM_MAX_END_OFFSET_VAL = 2147483646", "integer value in the {key} parameter\" EWSONPREM_MAIL_TYPES = [ \"t:Message\", \"t:MeetingRequest\", \"t:MeetingResponse\", \"t:MeetingMessage\",", "parameters.\" TYPE_ERR_MESSAGE = \"Error occurred while connecting to the EWS server. Please check", "EWS_JSON_FED_VERIFY_CERT = \"fed_verify_server_cert\" EWS_JSON_IS_PUBLIC_FOLDER = \"is_public_folder\" EWSONPREM_ERR_CONNECTIVITY_TEST = \"Test Connectivity Failed\" EWSONPREM_SUCC_CONNECTIVITY_TEST =", "EWS_JSON_AUTH_URL = \"authority_url\" EWS_JSON_FED_PING_URL = \"fed_ping_url\" EWS_JSON_FED_VERIFY_CERT = \"fed_verify_server_cert\" EWS_JSON_IS_PUBLIC_FOLDER = \"is_public_folder\" EWSONPREM_ERR_CONNECTIVITY_TEST", "EWS_INGEST_LATEST_EMAILS = \"latest first\" EWS_INGEST_OLDEST_EMAILS = \"oldest first\" DATETIME_FORMAT = \"%Y-%m-%dT%H:%M:%SZ\" AUTH_TYPE_AZURE =", "\"Connection failed\" EWSONPREM_ERR_FED_PING_URL = \"Parameter validation failed for the Federated Auth Ping URL\"", "EWSONPREM_JSON_FOLDER = \"folder\" EWSONPREM_JSON_BODY = \"body\" EWSONPREM_JSON_QUERY = \"query\" EWSONPREM_JSON_RANGE = \"range\" EWSONPREM_JSON_ID", "for the Federated Auth Ping URL\" EWSONPREM_ERR_FROM_SERVER = \"API failed. Status code: {code}.", "asset configuration and|or action parameters.\" TYPE_ERR_MESSAGE = \"Error occurred while connecting to the", "its unexpected format. \" \"Resetting the state file with the default format. Please", "asset might help, or login user does not have privileges to the mailbox.\"", "file\" EWSONPREM_ERR_JSON_PARSE = \"Unable to parse reply, raw string reply: '{raw_text}'\" EWSONPREM_EXCEPTION_ERR_MESSAGE =", "failed. Status code: {code}. Message: {message}\" EWSONPREM_ERR_API_UNSUPPORTED_METHOD = \"Unsupported method\" EWSONPREM_USING_BASE_URL = \"Using", "Status code: {code}. Message: {message}\" EWSONPREM_ERR_API_UNSUPPORTED_METHOD = \"Unsupported method\" EWSONPREM_USING_BASE_URL = \"Using url:", "not use this file except in compliance with the License. # You may", "\"sender\" EWSONPREM_JSON_INT_MSG_ID = \"internet_message_id\" EWSONPREM_JSON_EMAIL = \"email\" EWSONPREM_JSON_FOLDER = \"folder\" EWSONPREM_JSON_BODY = \"body\"", "the specific language governing permissions # and limitations under the License. EWSONPREM_JSON_DEVICE_URL =", "provide a valid integer value in the {key} parameter\" EWSONPREM_MAIL_TYPES = [ \"t:Message\",", "= \"poll_folder\" EWS_JSON_INGEST_MANNER = \"ingest_manner\" EWS_JSON_INGEST_TIME = \"ingest_time\" EWS_JSON_FIRST_RUN_MAX_EMAILS = \"first_run_max_emails\" EWS_JSON_POLL_MAX_CONTAINERS =", "= ( \"Error occurred while loading the state file due to its unexpected", "= \"test_user\" EWSONPREM_JSON_SUBJECT = \"subject\" EWSONPREM_JSON_FROM = \"sender\" EWSONPREM_JSON_INT_MSG_ID = \"internet_message_id\" EWSONPREM_JSON_EMAIL =", "License, Version 2.0 (the \"License\"); # you may not use this file except", "specific language governing permissions # and limitations under the License. EWSONPREM_JSON_DEVICE_URL = \"url\"", "\"oldest first\" DATETIME_FORMAT = \"%Y-%m-%dT%H:%M:%SZ\" AUTH_TYPE_AZURE = \"Azure\" AUTH_TYPE_AZURE_INTERACTIVE = \"Azure (interactive)\" AUTH_TYPE_FEDERATED", "(c) 2016-2022 Splunk Inc. # # Licensed under the Apache License, Version 2.0", "\"group\" EWSONPREM_JSON_INGEST_EMAIL = \"ingest_email\" EWS_JSON_CONTAINER_ID = \"container_id\" EWS_JSON_VAULT_ID = \"vault_id\" EWSONPREM_SEARCH_FINISHED_STATUS = \"Finished", "or agreed to in writing, software distributed under # the License is distributed", "failed\" EWSONPREM_ERR_FED_PING_URL = \"Parameter validation failed for the Federated Auth Ping URL\" EWSONPREM_ERR_FROM_SERVER", "License for the specific language governing permissions # and limitations under the License.", "DEFAULT_REQUEST_TIMEOUT = 30 # in seconds STATE_FILE_CORRUPT_ERR = ( \"Error occurred while loading", "EWSONPREM_EXCEPTION_ERR_MESSAGE = \"Error Code: {0}. Error Message: {1}\" EWSONPREM_ERR_CODE_MESSAGE = \"Error code unavailable\"", "ANY KIND, # either express or implied. See the License for the specific", "= \"range\" EWSONPREM_JSON_ID = \"id\" EWSONPREM_JSON_GROUP = \"group\" EWSONPREM_JSON_INGEST_EMAIL = \"ingest_email\" EWS_JSON_CONTAINER_ID =", "# you may not use this file except in compliance with the License.", "\"fed_ping_url\" EWS_JSON_FED_VERIFY_CERT = \"fed_verify_server_cert\" EWS_JSON_IS_PUBLIC_FOLDER = \"is_public_folder\" EWSONPREM_ERR_CONNECTIVITY_TEST = \"Test Connectivity Failed\" EWSONPREM_SUCC_CONNECTIVITY_TEST", "\"ingest_email\" EWS_JSON_CONTAINER_ID = \"container_id\" EWS_JSON_VAULT_ID = \"vault_id\" EWSONPREM_SEARCH_FINISHED_STATUS = \"Finished Searching {0:.0%}\" EWS_JSON_POLL_USER", "reply: '{raw_text}'\" EWSONPREM_EXCEPTION_ERR_MESSAGE = \"Error Code: {0}. Error Message: {1}\" EWSONPREM_ERR_CODE_MESSAGE = \"Error", "File: ewsonprem_consts.py # # Copyright (c) 2016-2022 Splunk Inc. # # Licensed under", "= \"fed_ping_url\" EWS_JSON_FED_VERIFY_CERT = \"fed_verify_server_cert\" EWS_JSON_IS_PUBLIC_FOLDER = \"is_public_folder\" EWSONPREM_ERR_CONNECTIVITY_TEST = \"Test Connectivity Failed\"", "] EWSONPREM_MAX_END_OFFSET_VAL = 2147483646 EWS_O365_RESOURCE = \"https://outlook.office365.com\" EWS_LOGIN_URL = \"https://login.windows.net\" EWS_MODIFY_CONFIG = \"Toggling", "login user does not have privileges to the mailbox.\" EWS_INGEST_LATEST_EMAILS = \"latest first\"", "\"Error occurred while loading the state file due to its unexpected format. \"", "WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the", "(the \"License\"); # you may not use this file except in compliance with", "2016-2022 Splunk Inc. # # Licensed under the Apache License, Version 2.0 (the", "# File: ewsonprem_consts.py # # Copyright (c) 2016-2022 Splunk Inc. # # Licensed", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "= \"client_id\" EWS_JSON_POLL_FOLDER = \"poll_folder\" EWS_JSON_INGEST_MANNER = \"ingest_manner\" EWS_JSON_INGEST_TIME = \"ingest_time\" EWS_JSON_FIRST_RUN_MAX_EMAILS =", "= \"%Y-%m-%dT%H:%M:%SZ\" AUTH_TYPE_AZURE = \"Azure\" AUTH_TYPE_AZURE_INTERACTIVE = \"Azure (interactive)\" AUTH_TYPE_FEDERATED = \"Federated\" AUTH_TYPE_BASIC", "code unavailable\" EWSONPREM_ERR_MESSAGE = \"Error message unavailable. Please check the asset configuration and|or", "# Unless required by applicable law or agreed to in writing, software distributed", "loading the state file due to its unexpected format. \" \"Resetting the state", "EWS_JSON_CONTAINER_ID = \"container_id\" EWS_JSON_VAULT_ID = \"vault_id\" EWSONPREM_SEARCH_FINISHED_STATUS = \"Finished Searching {0:.0%}\" EWS_JSON_POLL_USER =", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "the asset configuration and|or action parameters.\" TYPE_ERR_MESSAGE = \"Error occurred while connecting to", "Inc. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "\"Could not retrieve vault file\" EWSONPREM_ERR_JSON_PARSE = \"Unable to parse reply, raw string", "distributed under # the License is distributed on an \"AS IS\" BASIS, WITHOUT", "and|or action parameters.\" TYPE_ERR_MESSAGE = \"Error occurred while connecting to the EWS server.", "= \"Parameter validation failed for the Federated Auth Ping URL\" EWSONPREM_ERR_FROM_SERVER = \"API", "\"query\" EWSONPREM_JSON_RANGE = \"range\" EWSONPREM_JSON_ID = \"id\" EWSONPREM_JSON_GROUP = \"group\" EWSONPREM_JSON_INGEST_EMAIL = \"ingest_email\"", "file except in compliance with the License. # You may obtain a copy", "\"use_impersonation\" EWS_JSON_AUTH_TYPE = \"auth_type\" EWS_JSON_CLIENT_ID = \"client_id\" EWS_JSON_POLL_FOLDER = \"poll_folder\" EWS_JSON_INGEST_MANNER = \"ingest_manner\"", "= \"auth_type\" EWS_JSON_CLIENT_ID = \"client_id\" EWS_JSON_POLL_FOLDER = \"poll_folder\" EWS_JSON_INGEST_MANNER = \"ingest_manner\" EWS_JSON_INGEST_TIME =", "\"Error message unavailable. Please check the asset configuration and|or action parameters.\" TYPE_ERR_MESSAGE =", "Code: {0}. Error Message: {1}\" EWSONPREM_ERR_CODE_MESSAGE = \"Error code unavailable\" EWSONPREM_ERR_MESSAGE = \"Error", "\"test_user\" EWSONPREM_JSON_SUBJECT = \"subject\" EWSONPREM_JSON_FROM = \"sender\" EWSONPREM_JSON_INT_MSG_ID = \"internet_message_id\" EWSONPREM_JSON_EMAIL = \"email\"", "to the mailbox.\" EWS_INGEST_LATEST_EMAILS = \"latest first\" EWS_INGEST_OLDEST_EMAILS = \"oldest first\" DATETIME_FORMAT =", "\"License\"); # you may not use this file except in compliance with the", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "Failed\" EWSONPREM_SUCC_CONNECTIVITY_TEST = \"Test Connectivity Passed\" EWSONPREM_ERR_SERVER_CONNECTION = \"Connection failed\" EWSONPREM_ERR_FED_PING_URL = \"Parameter", "EWS_LOGIN_URL = \"https://login.windows.net\" EWS_MODIFY_CONFIG = \"Toggling the impersonation configuration on the asset might", "server. Please check the asset configuration and|or the action parameters.\" EWSONPREM_VALIDATE_INTEGER_MESSAGE = \"Please", "= \"Please provide a valid integer value in the {key} parameter\" EWSONPREM_MAIL_TYPES =", "\"latest first\" EWS_INGEST_OLDEST_EMAILS = \"oldest first\" DATETIME_FORMAT = \"%Y-%m-%dT%H:%M:%SZ\" AUTH_TYPE_AZURE = \"Azure\" AUTH_TYPE_AZURE_INTERACTIVE", "EWS_JSON_POLL_MAX_CONTAINERS = \"max_containers\" EWS_JSON_DONT_IMPERSONATE = \"dont_impersonate\" EWS_JSON_IMPERSONATE_EMAIL = \"impersonate_email\" EWS_JSON_AUTH_URL = \"authority_url\" EWS_JSON_FED_PING_URL", "= \"ingest_email\" EWS_JSON_CONTAINER_ID = \"container_id\" EWS_JSON_VAULT_ID = \"vault_id\" EWSONPREM_SEARCH_FINISHED_STATUS = \"Finished Searching {0:.0%}\"", "\"Error occurred while connecting to the EWS server. Please check the asset configuration", "2147483646 EWS_O365_RESOURCE = \"https://outlook.office365.com\" EWS_LOGIN_URL = \"https://login.windows.net\" EWS_MODIFY_CONFIG = \"Toggling the impersonation configuration", "the asset might help, or login user does not have privileges to the", "the mailbox.\" EWS_INGEST_LATEST_EMAILS = \"latest first\" EWS_INGEST_OLDEST_EMAILS = \"oldest first\" DATETIME_FORMAT = \"%Y-%m-%dT%H:%M:%SZ\"", "OR CONDITIONS OF ANY KIND, # either express or implied. See the License", "EWSONPREM_ERR_VAULT_INFO = \"Could not retrieve vault file\" EWSONPREM_ERR_JSON_PARSE = \"Unable to parse reply,", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "\"is_public_folder\" EWSONPREM_ERR_CONNECTIVITY_TEST = \"Test Connectivity Failed\" EWSONPREM_SUCC_CONNECTIVITY_TEST = \"Test Connectivity Passed\" EWSONPREM_ERR_SERVER_CONNECTION =", "while loading the state file due to its unexpected format. \" \"Resetting the", "# either express or implied. See the License for the specific language governing", "EWSONPREM_SUCC_CONNECTIVITY_TEST = \"Test Connectivity Passed\" EWSONPREM_ERR_SERVER_CONNECTION = \"Connection failed\" EWSONPREM_ERR_FED_PING_URL = \"Parameter validation", "30 # in seconds STATE_FILE_CORRUPT_ERR = ( \"Error occurred while loading the state", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "{0:.0%}\" EWS_JSON_POLL_USER = \"poll_user\" EWS_JSON_USE_IMPERSONATE = \"use_impersonation\" EWS_JSON_AUTH_TYPE = \"auth_type\" EWS_JSON_CLIENT_ID = \"client_id\"", "EWS_O365_RESOURCE = \"https://outlook.office365.com\" EWS_LOGIN_URL = \"https://login.windows.net\" EWS_MODIFY_CONFIG = \"Toggling the impersonation configuration on", "= \"Test Connectivity Passed\" EWSONPREM_ERR_SERVER_CONNECTION = \"Connection failed\" EWSONPREM_ERR_FED_PING_URL = \"Parameter validation failed", "\"range\" EWSONPREM_JSON_ID = \"id\" EWSONPREM_JSON_GROUP = \"group\" EWSONPREM_JSON_INGEST_EMAIL = \"ingest_email\" EWS_JSON_CONTAINER_ID = \"container_id\"", "\"authority_url\" EWS_JSON_FED_PING_URL = \"fed_ping_url\" EWS_JSON_FED_VERIFY_CERT = \"fed_verify_server_cert\" EWS_JSON_IS_PUBLIC_FOLDER = \"is_public_folder\" EWSONPREM_ERR_CONNECTIVITY_TEST = \"Test", "EWSONPREM_ERR_CONNECTIVITY_TEST = \"Test Connectivity Failed\" EWSONPREM_SUCC_CONNECTIVITY_TEST = \"Test Connectivity Passed\" EWSONPREM_ERR_SERVER_CONNECTION = \"Connection", "in the {key} parameter\" EWSONPREM_MAIL_TYPES = [ \"t:Message\", \"t:MeetingRequest\", \"t:MeetingResponse\", \"t:MeetingMessage\", \"t:MeetingCancellation\" ]", "\"Basic\" DEFAULT_REQUEST_TIMEOUT = 30 # in seconds STATE_FILE_CORRUPT_ERR = ( \"Error occurred while", "= \"Unsupported method\" EWSONPREM_USING_BASE_URL = \"Using url: {base_url}\" EWSONPREM_ERR_VAULT_INFO = \"Could not retrieve", "\"Federated\" AUTH_TYPE_BASIC = \"Basic\" DEFAULT_REQUEST_TIMEOUT = 30 # in seconds STATE_FILE_CORRUPT_ERR = (", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "( \"Error occurred while loading the state file due to its unexpected format.", "(interactive)\" AUTH_TYPE_FEDERATED = \"Federated\" AUTH_TYPE_BASIC = \"Basic\" DEFAULT_REQUEST_TIMEOUT = 30 # in seconds", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "applicable law or agreed to in writing, software distributed under # the License", "\"internet_message_id\" EWSONPREM_JSON_EMAIL = \"email\" EWSONPREM_JSON_FOLDER = \"folder\" EWSONPREM_JSON_BODY = \"body\" EWSONPREM_JSON_QUERY = \"query\"", "you may not use this file except in compliance with the License. #", "\"Please provide a valid integer value in the {key} parameter\" EWSONPREM_MAIL_TYPES = [", "\"body\" EWSONPREM_JSON_QUERY = \"query\" EWSONPREM_JSON_RANGE = \"range\" EWSONPREM_JSON_ID = \"id\" EWSONPREM_JSON_GROUP = \"group\"", "value in the {key} parameter\" EWSONPREM_MAIL_TYPES = [ \"t:Message\", \"t:MeetingRequest\", \"t:MeetingResponse\", \"t:MeetingMessage\", \"t:MeetingCancellation\"", "AUTH_TYPE_FEDERATED = \"Federated\" AUTH_TYPE_BASIC = \"Basic\" DEFAULT_REQUEST_TIMEOUT = 30 # in seconds STATE_FILE_CORRUPT_ERR", "limitations under the License. EWSONPREM_JSON_DEVICE_URL = \"url\" EWSONPREM_JSON_TEST_USER = \"test_user\" EWSONPREM_JSON_SUBJECT = \"subject\"", "action parameters.\" EWSONPREM_VALIDATE_INTEGER_MESSAGE = \"Please provide a valid integer value in the {key}", "= \"https://outlook.office365.com\" EWS_LOGIN_URL = \"https://login.windows.net\" EWS_MODIFY_CONFIG = \"Toggling the impersonation configuration on the", "EWS_JSON_USE_IMPERSONATE = \"use_impersonation\" EWS_JSON_AUTH_TYPE = \"auth_type\" EWS_JSON_CLIENT_ID = \"client_id\" EWS_JSON_POLL_FOLDER = \"poll_folder\" EWS_JSON_INGEST_MANNER", "EWSONPREM_ERR_FROM_SERVER = \"API failed. Status code: {code}. Message: {message}\" EWSONPREM_ERR_API_UNSUPPORTED_METHOD = \"Unsupported method\"", "use this file except in compliance with the License. # You may obtain", "to in writing, software distributed under # the License is distributed on an", "the License for the specific language governing permissions # and limitations under the", "\"t:MeetingCancellation\" ] EWSONPREM_MAX_END_OFFSET_VAL = 2147483646 EWS_O365_RESOURCE = \"https://outlook.office365.com\" EWS_LOGIN_URL = \"https://login.windows.net\" EWS_MODIFY_CONFIG =", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "= \"oldest first\" DATETIME_FORMAT = \"%Y-%m-%dT%H:%M:%SZ\" AUTH_TYPE_AZURE = \"Azure\" AUTH_TYPE_AZURE_INTERACTIVE = \"Azure (interactive)\"", "\"Unsupported method\" EWSONPREM_USING_BASE_URL = \"Using url: {base_url}\" EWSONPREM_ERR_VAULT_INFO = \"Could not retrieve vault", "2.0 (the \"License\"); # you may not use this file except in compliance", "url: {base_url}\" EWSONPREM_ERR_VAULT_INFO = \"Could not retrieve vault file\" EWSONPREM_ERR_JSON_PARSE = \"Unable to", "EWSONPREM_JSON_GROUP = \"group\" EWSONPREM_JSON_INGEST_EMAIL = \"ingest_email\" EWS_JSON_CONTAINER_ID = \"container_id\" EWS_JSON_VAULT_ID = \"vault_id\" EWSONPREM_SEARCH_FINISHED_STATUS", "= \"Could not retrieve vault file\" EWSONPREM_ERR_JSON_PARSE = \"Unable to parse reply, raw", "EWS_JSON_POLL_USER = \"poll_user\" EWS_JSON_USE_IMPERSONATE = \"use_impersonation\" EWS_JSON_AUTH_TYPE = \"auth_type\" EWS_JSON_CLIENT_ID = \"client_id\" EWS_JSON_POLL_FOLDER", "EWS_INGEST_OLDEST_EMAILS = \"oldest first\" DATETIME_FORMAT = \"%Y-%m-%dT%H:%M:%SZ\" AUTH_TYPE_AZURE = \"Azure\" AUTH_TYPE_AZURE_INTERACTIVE = \"Azure", "EWSONPREM_MAIL_TYPES = [ \"t:Message\", \"t:MeetingRequest\", \"t:MeetingResponse\", \"t:MeetingMessage\", \"t:MeetingCancellation\" ] EWSONPREM_MAX_END_OFFSET_VAL = 2147483646 EWS_O365_RESOURCE", "Connectivity Passed\" EWSONPREM_ERR_SERVER_CONNECTION = \"Connection failed\" EWSONPREM_ERR_FED_PING_URL = \"Parameter validation failed for the", "# # Unless required by applicable law or agreed to in writing, software", "CONDITIONS OF ANY KIND, # either express or implied. See the License for", "Error Message: {1}\" EWSONPREM_ERR_CODE_MESSAGE = \"Error code unavailable\" EWSONPREM_ERR_MESSAGE = \"Error message unavailable.", "= \"Finished Searching {0:.0%}\" EWS_JSON_POLL_USER = \"poll_user\" EWS_JSON_USE_IMPERSONATE = \"use_impersonation\" EWS_JSON_AUTH_TYPE = \"auth_type\"", "EWS_JSON_FED_PING_URL = \"fed_ping_url\" EWS_JSON_FED_VERIFY_CERT = \"fed_verify_server_cert\" EWS_JSON_IS_PUBLIC_FOLDER = \"is_public_folder\" EWSONPREM_ERR_CONNECTIVITY_TEST = \"Test Connectivity", "EWSONPREM_JSON_ID = \"id\" EWSONPREM_JSON_GROUP = \"group\" EWSONPREM_JSON_INGEST_EMAIL = \"ingest_email\" EWS_JSON_CONTAINER_ID = \"container_id\" EWS_JSON_VAULT_ID", "# in seconds STATE_FILE_CORRUPT_ERR = ( \"Error occurred while loading the state file", "EWS_JSON_INGEST_TIME = \"ingest_time\" EWS_JSON_FIRST_RUN_MAX_EMAILS = \"first_run_max_emails\" EWS_JSON_POLL_MAX_CONTAINERS = \"max_containers\" EWS_JSON_DONT_IMPERSONATE = \"dont_impersonate\" EWS_JSON_IMPERSONATE_EMAIL", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied.", "= \"poll_user\" EWS_JSON_USE_IMPERSONATE = \"use_impersonation\" EWS_JSON_AUTH_TYPE = \"auth_type\" EWS_JSON_CLIENT_ID = \"client_id\" EWS_JSON_POLL_FOLDER =", "express or implied. See the License for the specific language governing permissions #", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "raw string reply: '{raw_text}'\" EWSONPREM_EXCEPTION_ERR_MESSAGE = \"Error Code: {0}. Error Message: {1}\" EWSONPREM_ERR_CODE_MESSAGE", "required by applicable law or agreed to in writing, software distributed under #", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "= \"ingest_manner\" EWS_JSON_INGEST_TIME = \"ingest_time\" EWS_JSON_FIRST_RUN_MAX_EMAILS = \"first_run_max_emails\" EWS_JSON_POLL_MAX_CONTAINERS = \"max_containers\" EWS_JSON_DONT_IMPERSONATE =", "and|or the action parameters.\" EWSONPREM_VALIDATE_INTEGER_MESSAGE = \"Please provide a valid integer value in", "does not have privileges to the mailbox.\" EWS_INGEST_LATEST_EMAILS = \"latest first\" EWS_INGEST_OLDEST_EMAILS =", "[ \"t:Message\", \"t:MeetingRequest\", \"t:MeetingResponse\", \"t:MeetingMessage\", \"t:MeetingCancellation\" ] EWSONPREM_MAX_END_OFFSET_VAL = 2147483646 EWS_O365_RESOURCE = \"https://outlook.office365.com\"", "{base_url}\" EWSONPREM_ERR_VAULT_INFO = \"Could not retrieve vault file\" EWSONPREM_ERR_JSON_PARSE = \"Unable to parse", "vault file\" EWSONPREM_ERR_JSON_PARSE = \"Unable to parse reply, raw string reply: '{raw_text}'\" EWSONPREM_EXCEPTION_ERR_MESSAGE", "\"Unable to parse reply, raw string reply: '{raw_text}'\" EWSONPREM_EXCEPTION_ERR_MESSAGE = \"Error Code: {0}.", "reply, raw string reply: '{raw_text}'\" EWSONPREM_EXCEPTION_ERR_MESSAGE = \"Error Code: {0}. Error Message: {1}\"", "= \"fed_verify_server_cert\" EWS_JSON_IS_PUBLIC_FOLDER = \"is_public_folder\" EWSONPREM_ERR_CONNECTIVITY_TEST = \"Test Connectivity Failed\" EWSONPREM_SUCC_CONNECTIVITY_TEST = \"Test", "the License. # You may obtain a copy of the License at #", "in writing, software distributed under # the License is distributed on an \"AS", "string reply: '{raw_text}'\" EWSONPREM_EXCEPTION_ERR_MESSAGE = \"Error Code: {0}. Error Message: {1}\" EWSONPREM_ERR_CODE_MESSAGE =", "under # the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "law or agreed to in writing, software distributed under # the License is", "either express or implied. See the License for the specific language governing permissions", "EWS_JSON_DONT_IMPERSONATE = \"dont_impersonate\" EWS_JSON_IMPERSONATE_EMAIL = \"impersonate_email\" EWS_JSON_AUTH_URL = \"authority_url\" EWS_JSON_FED_PING_URL = \"fed_ping_url\" EWS_JSON_FED_VERIFY_CERT", "configuration and|or the action parameters.\" EWSONPREM_VALIDATE_INTEGER_MESSAGE = \"Please provide a valid integer value", "= \"authority_url\" EWS_JSON_FED_PING_URL = \"fed_ping_url\" EWS_JSON_FED_VERIFY_CERT = \"fed_verify_server_cert\" EWS_JSON_IS_PUBLIC_FOLDER = \"is_public_folder\" EWSONPREM_ERR_CONNECTIVITY_TEST =", "first\" DATETIME_FORMAT = \"%Y-%m-%dT%H:%M:%SZ\" AUTH_TYPE_AZURE = \"Azure\" AUTH_TYPE_AZURE_INTERACTIVE = \"Azure (interactive)\" AUTH_TYPE_FEDERATED =", "EWSONPREM_ERR_API_UNSUPPORTED_METHOD = \"Unsupported method\" EWSONPREM_USING_BASE_URL = \"Using url: {base_url}\" EWSONPREM_ERR_VAULT_INFO = \"Could not", "with the License. # You may obtain a copy of the License at", "configuration on the asset might help, or login user does not have privileges", "OF ANY KIND, # either express or implied. See the License for the", "# Copyright (c) 2016-2022 Splunk Inc. # # Licensed under the Apache License,", "format. \" \"Resetting the state file with the default format. Please try again.\"", "= \"Azure\" AUTH_TYPE_AZURE_INTERACTIVE = \"Azure (interactive)\" AUTH_TYPE_FEDERATED = \"Federated\" AUTH_TYPE_BASIC = \"Basic\" DEFAULT_REQUEST_TIMEOUT", "\"Azure\" AUTH_TYPE_AZURE_INTERACTIVE = \"Azure (interactive)\" AUTH_TYPE_FEDERATED = \"Federated\" AUTH_TYPE_BASIC = \"Basic\" DEFAULT_REQUEST_TIMEOUT =", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "\"t:MeetingMessage\", \"t:MeetingCancellation\" ] EWSONPREM_MAX_END_OFFSET_VAL = 2147483646 EWS_O365_RESOURCE = \"https://outlook.office365.com\" EWS_LOGIN_URL = \"https://login.windows.net\" EWS_MODIFY_CONFIG", "= \"use_impersonation\" EWS_JSON_AUTH_TYPE = \"auth_type\" EWS_JSON_CLIENT_ID = \"client_id\" EWS_JSON_POLL_FOLDER = \"poll_folder\" EWS_JSON_INGEST_MANNER =", "= \"body\" EWSONPREM_JSON_QUERY = \"query\" EWSONPREM_JSON_RANGE = \"range\" EWSONPREM_JSON_ID = \"id\" EWSONPREM_JSON_GROUP =", "EWSONPREM_JSON_SUBJECT = \"subject\" EWSONPREM_JSON_FROM = \"sender\" EWSONPREM_JSON_INT_MSG_ID = \"internet_message_id\" EWSONPREM_JSON_EMAIL = \"email\" EWSONPREM_JSON_FOLDER", "or login user does not have privileges to the mailbox.\" EWS_INGEST_LATEST_EMAILS = \"latest", "message unavailable. Please check the asset configuration and|or action parameters.\" TYPE_ERR_MESSAGE = \"Error", "EWSONPREM_MAX_END_OFFSET_VAL = 2147483646 EWS_O365_RESOURCE = \"https://outlook.office365.com\" EWS_LOGIN_URL = \"https://login.windows.net\" EWS_MODIFY_CONFIG = \"Toggling the", "for the specific language governing permissions # and limitations under the License. EWSONPREM_JSON_DEVICE_URL", "validation failed for the Federated Auth Ping URL\" EWSONPREM_ERR_FROM_SERVER = \"API failed. Status", "\"max_containers\" EWS_JSON_DONT_IMPERSONATE = \"dont_impersonate\" EWS_JSON_IMPERSONATE_EMAIL = \"impersonate_email\" EWS_JSON_AUTH_URL = \"authority_url\" EWS_JSON_FED_PING_URL = \"fed_ping_url\"", "Connectivity Failed\" EWSONPREM_SUCC_CONNECTIVITY_TEST = \"Test Connectivity Passed\" EWSONPREM_ERR_SERVER_CONNECTION = \"Connection failed\" EWSONPREM_ERR_FED_PING_URL =", "governing permissions # and limitations under the License. EWSONPREM_JSON_DEVICE_URL = \"url\" EWSONPREM_JSON_TEST_USER =", "unavailable\" EWSONPREM_ERR_MESSAGE = \"Error message unavailable. Please check the asset configuration and|or action", "\"Test Connectivity Passed\" EWSONPREM_ERR_SERVER_CONNECTION = \"Connection failed\" EWSONPREM_ERR_FED_PING_URL = \"Parameter validation failed for", "AUTH_TYPE_AZURE_INTERACTIVE = \"Azure (interactive)\" AUTH_TYPE_FEDERATED = \"Federated\" AUTH_TYPE_BASIC = \"Basic\" DEFAULT_REQUEST_TIMEOUT = 30", "{message}\" EWSONPREM_ERR_API_UNSUPPORTED_METHOD = \"Unsupported method\" EWSONPREM_USING_BASE_URL = \"Using url: {base_url}\" EWSONPREM_ERR_VAULT_INFO = \"Could", "check the asset configuration and|or action parameters.\" TYPE_ERR_MESSAGE = \"Error occurred while connecting", "\"container_id\" EWS_JSON_VAULT_ID = \"vault_id\" EWSONPREM_SEARCH_FINISHED_STATUS = \"Finished Searching {0:.0%}\" EWS_JSON_POLL_USER = \"poll_user\" EWS_JSON_USE_IMPERSONATE", "in seconds STATE_FILE_CORRUPT_ERR = ( \"Error occurred while loading the state file due", "\"t:Message\", \"t:MeetingRequest\", \"t:MeetingResponse\", \"t:MeetingMessage\", \"t:MeetingCancellation\" ] EWSONPREM_MAX_END_OFFSET_VAL = 2147483646 EWS_O365_RESOURCE = \"https://outlook.office365.com\" EWS_LOGIN_URL", "the state file due to its unexpected format. \" \"Resetting the state file", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "= \"group\" EWSONPREM_JSON_INGEST_EMAIL = \"ingest_email\" EWS_JSON_CONTAINER_ID = \"container_id\" EWS_JSON_VAULT_ID = \"vault_id\" EWSONPREM_SEARCH_FINISHED_STATUS =", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "EWSONPREM_VALIDATE_INTEGER_MESSAGE = \"Please provide a valid integer value in the {key} parameter\" EWSONPREM_MAIL_TYPES", "= \"query\" EWSONPREM_JSON_RANGE = \"range\" EWSONPREM_JSON_ID = \"id\" EWSONPREM_JSON_GROUP = \"group\" EWSONPREM_JSON_INGEST_EMAIL =", "privileges to the mailbox.\" EWS_INGEST_LATEST_EMAILS = \"latest first\" EWS_INGEST_OLDEST_EMAILS = \"oldest first\" DATETIME_FORMAT", "\"ingest_manner\" EWS_JSON_INGEST_TIME = \"ingest_time\" EWS_JSON_FIRST_RUN_MAX_EMAILS = \"first_run_max_emails\" EWS_JSON_POLL_MAX_CONTAINERS = \"max_containers\" EWS_JSON_DONT_IMPERSONATE = \"dont_impersonate\"", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either", "= \"email\" EWSONPREM_JSON_FOLDER = \"folder\" EWSONPREM_JSON_BODY = \"body\" EWSONPREM_JSON_QUERY = \"query\" EWSONPREM_JSON_RANGE =", "# and limitations under the License. EWSONPREM_JSON_DEVICE_URL = \"url\" EWSONPREM_JSON_TEST_USER = \"test_user\" EWSONPREM_JSON_SUBJECT", "occurred while loading the state file due to its unexpected format. \" \"Resetting", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "implied. See the License for the specific language governing permissions # and limitations", "due to its unexpected format. \" \"Resetting the state file with the default", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "= \"dont_impersonate\" EWS_JSON_IMPERSONATE_EMAIL = \"impersonate_email\" EWS_JSON_AUTH_URL = \"authority_url\" EWS_JSON_FED_PING_URL = \"fed_ping_url\" EWS_JSON_FED_VERIFY_CERT =", "= \"ingest_time\" EWS_JSON_FIRST_RUN_MAX_EMAILS = \"first_run_max_emails\" EWS_JSON_POLL_MAX_CONTAINERS = \"max_containers\" EWS_JSON_DONT_IMPERSONATE = \"dont_impersonate\" EWS_JSON_IMPERSONATE_EMAIL =", "\"client_id\" EWS_JSON_POLL_FOLDER = \"poll_folder\" EWS_JSON_INGEST_MANNER = \"ingest_manner\" EWS_JSON_INGEST_TIME = \"ingest_time\" EWS_JSON_FIRST_RUN_MAX_EMAILS = \"first_run_max_emails\"", "the Federated Auth Ping URL\" EWSONPREM_ERR_FROM_SERVER = \"API failed. Status code: {code}. Message:", "writing, software distributed under # the License is distributed on an \"AS IS\"", "the action parameters.\" EWSONPREM_VALIDATE_INTEGER_MESSAGE = \"Please provide a valid integer value in the", "= \"Error Code: {0}. Error Message: {1}\" EWSONPREM_ERR_CODE_MESSAGE = \"Error code unavailable\" EWSONPREM_ERR_MESSAGE", "failed for the Federated Auth Ping URL\" EWSONPREM_ERR_FROM_SERVER = \"API failed. Status code:", "= \"first_run_max_emails\" EWS_JSON_POLL_MAX_CONTAINERS = \"max_containers\" EWS_JSON_DONT_IMPERSONATE = \"dont_impersonate\" EWS_JSON_IMPERSONATE_EMAIL = \"impersonate_email\" EWS_JSON_AUTH_URL =", "= \"Azure (interactive)\" AUTH_TYPE_FEDERATED = \"Federated\" AUTH_TYPE_BASIC = \"Basic\" DEFAULT_REQUEST_TIMEOUT = 30 #", "Splunk Inc. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "not retrieve vault file\" EWSONPREM_ERR_JSON_PARSE = \"Unable to parse reply, raw string reply:", "EWSONPREM_JSON_FROM = \"sender\" EWSONPREM_JSON_INT_MSG_ID = \"internet_message_id\" EWSONPREM_JSON_EMAIL = \"email\" EWSONPREM_JSON_FOLDER = \"folder\" EWSONPREM_JSON_BODY", "= \"latest first\" EWS_INGEST_OLDEST_EMAILS = \"oldest first\" DATETIME_FORMAT = \"%Y-%m-%dT%H:%M:%SZ\" AUTH_TYPE_AZURE = \"Azure\"", "not have privileges to the mailbox.\" EWS_INGEST_LATEST_EMAILS = \"latest first\" EWS_INGEST_OLDEST_EMAILS = \"oldest", "Version 2.0 (the \"License\"); # you may not use this file except in", "= \"Toggling the impersonation configuration on the asset might help, or login user", "except in compliance with the License. # You may obtain a copy of", "\"poll_user\" EWS_JSON_USE_IMPERSONATE = \"use_impersonation\" EWS_JSON_AUTH_TYPE = \"auth_type\" EWS_JSON_CLIENT_ID = \"client_id\" EWS_JSON_POLL_FOLDER = \"poll_folder\"", "= \"url\" EWSONPREM_JSON_TEST_USER = \"test_user\" EWSONPREM_JSON_SUBJECT = \"subject\" EWSONPREM_JSON_FROM = \"sender\" EWSONPREM_JSON_INT_MSG_ID =", "retrieve vault file\" EWSONPREM_ERR_JSON_PARSE = \"Unable to parse reply, raw string reply: '{raw_text}'\"", "= \"vault_id\" EWSONPREM_SEARCH_FINISHED_STATUS = \"Finished Searching {0:.0%}\" EWS_JSON_POLL_USER = \"poll_user\" EWS_JSON_USE_IMPERSONATE = \"use_impersonation\"", "Message: {1}\" EWSONPREM_ERR_CODE_MESSAGE = \"Error code unavailable\" EWSONPREM_ERR_MESSAGE = \"Error message unavailable. Please", "asset configuration and|or the action parameters.\" EWSONPREM_VALIDATE_INTEGER_MESSAGE = \"Please provide a valid integer", "= \"container_id\" EWS_JSON_VAULT_ID = \"vault_id\" EWSONPREM_SEARCH_FINISHED_STATUS = \"Finished Searching {0:.0%}\" EWS_JSON_POLL_USER = \"poll_user\"", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "EWSONPREM_USING_BASE_URL = \"Using url: {base_url}\" EWSONPREM_ERR_VAULT_INFO = \"Could not retrieve vault file\" EWSONPREM_ERR_JSON_PARSE", "= \"is_public_folder\" EWSONPREM_ERR_CONNECTIVITY_TEST = \"Test Connectivity Failed\" EWSONPREM_SUCC_CONNECTIVITY_TEST = \"Test Connectivity Passed\" EWSONPREM_ERR_SERVER_CONNECTION", "occurred while connecting to the EWS server. Please check the asset configuration and|or", "on the asset might help, or login user does not have privileges to", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See", "DATETIME_FORMAT = \"%Y-%m-%dT%H:%M:%SZ\" AUTH_TYPE_AZURE = \"Azure\" AUTH_TYPE_AZURE_INTERACTIVE = \"Azure (interactive)\" AUTH_TYPE_FEDERATED = \"Federated\"", "= \"impersonate_email\" EWS_JSON_AUTH_URL = \"authority_url\" EWS_JSON_FED_PING_URL = \"fed_ping_url\" EWS_JSON_FED_VERIFY_CERT = \"fed_verify_server_cert\" EWS_JSON_IS_PUBLIC_FOLDER =", "EWS_JSON_IMPERSONATE_EMAIL = \"impersonate_email\" EWS_JSON_AUTH_URL = \"authority_url\" EWS_JSON_FED_PING_URL = \"fed_ping_url\" EWS_JSON_FED_VERIFY_CERT = \"fed_verify_server_cert\" EWS_JSON_IS_PUBLIC_FOLDER", "EWSONPREM_ERR_JSON_PARSE = \"Unable to parse reply, raw string reply: '{raw_text}'\" EWSONPREM_EXCEPTION_ERR_MESSAGE = \"Error", "Auth Ping URL\" EWSONPREM_ERR_FROM_SERVER = \"API failed. Status code: {code}. Message: {message}\" EWSONPREM_ERR_API_UNSUPPORTED_METHOD", "= \"Error code unavailable\" EWSONPREM_ERR_MESSAGE = \"Error message unavailable. Please check the asset", "\"Parameter validation failed for the Federated Auth Ping URL\" EWSONPREM_ERR_FROM_SERVER = \"API failed.", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or", "= \"Basic\" DEFAULT_REQUEST_TIMEOUT = 30 # in seconds STATE_FILE_CORRUPT_ERR = ( \"Error occurred", "\"API failed. Status code: {code}. Message: {message}\" EWSONPREM_ERR_API_UNSUPPORTED_METHOD = \"Unsupported method\" EWSONPREM_USING_BASE_URL =", "\"Error code unavailable\" EWSONPREM_ERR_MESSAGE = \"Error message unavailable. Please check the asset configuration", "\"t:MeetingResponse\", \"t:MeetingMessage\", \"t:MeetingCancellation\" ] EWSONPREM_MAX_END_OFFSET_VAL = 2147483646 EWS_O365_RESOURCE = \"https://outlook.office365.com\" EWS_LOGIN_URL = \"https://login.windows.net\"", "= \"Test Connectivity Failed\" EWSONPREM_SUCC_CONNECTIVITY_TEST = \"Test Connectivity Passed\" EWSONPREM_ERR_SERVER_CONNECTION = \"Connection failed\"", "file due to its unexpected format. \" \"Resetting the state file with the", "\"fed_verify_server_cert\" EWS_JSON_IS_PUBLIC_FOLDER = \"is_public_folder\" EWSONPREM_ERR_CONNECTIVITY_TEST = \"Test Connectivity Failed\" EWSONPREM_SUCC_CONNECTIVITY_TEST = \"Test Connectivity", "permissions # and limitations under the License. EWSONPREM_JSON_DEVICE_URL = \"url\" EWSONPREM_JSON_TEST_USER = \"test_user\"", "= \"max_containers\" EWS_JSON_DONT_IMPERSONATE = \"dont_impersonate\" EWS_JSON_IMPERSONATE_EMAIL = \"impersonate_email\" EWS_JSON_AUTH_URL = \"authority_url\" EWS_JSON_FED_PING_URL =", "EWSONPREM_ERR_FED_PING_URL = \"Parameter validation failed for the Federated Auth Ping URL\" EWSONPREM_ERR_FROM_SERVER =", "EWSONPREM_ERR_CODE_MESSAGE = \"Error code unavailable\" EWSONPREM_ERR_MESSAGE = \"Error message unavailable. Please check the", "= \"id\" EWSONPREM_JSON_GROUP = \"group\" EWSONPREM_JSON_INGEST_EMAIL = \"ingest_email\" EWS_JSON_CONTAINER_ID = \"container_id\" EWS_JSON_VAULT_ID =", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express", "\"auth_type\" EWS_JSON_CLIENT_ID = \"client_id\" EWS_JSON_POLL_FOLDER = \"poll_folder\" EWS_JSON_INGEST_MANNER = \"ingest_manner\" EWS_JSON_INGEST_TIME = \"ingest_time\"", "EWSONPREM_JSON_QUERY = \"query\" EWSONPREM_JSON_RANGE = \"range\" EWSONPREM_JSON_ID = \"id\" EWSONPREM_JSON_GROUP = \"group\" EWSONPREM_JSON_INGEST_EMAIL", "the EWS server. Please check the asset configuration and|or the action parameters.\" EWSONPREM_VALIDATE_INTEGER_MESSAGE", "\"ingest_time\" EWS_JSON_FIRST_RUN_MAX_EMAILS = \"first_run_max_emails\" EWS_JSON_POLL_MAX_CONTAINERS = \"max_containers\" EWS_JSON_DONT_IMPERSONATE = \"dont_impersonate\" EWS_JSON_IMPERSONATE_EMAIL = \"impersonate_email\"", "\"Error Code: {0}. Error Message: {1}\" EWSONPREM_ERR_CODE_MESSAGE = \"Error code unavailable\" EWSONPREM_ERR_MESSAGE =" ]
[ "test_heap_peek(base_heap): assert base_heap.peek() == 1 def test_heap_empty(): heap = Heap() assert heap.empty() heap.push(1)", "heap.empty() def test_heapify_up_and_down(base_heap): base_heap.pop() base_heap.pop() base_heap.push(8) base_heap.push(1) base_heap.push(0) base_heap.push(9) assert base_heap.get_heap() == [0,", "heap.push(2) heap.push(3) heap.push(4) heap.push(5) return heap def test_heap_init(): basic_heap = Heap() init_list_heap =", "test_heap_push(): heap = Heap() heap.push(2) heap.push(3) heap.push(1) def test_heap_pop(base_heap): assert base_heap.pop() == 1", "8, 7, 5, 1, 2]) assert isinstance(basic_heap, Heap) assert isinstance(init_list_heap, Heap) def test_heap_push():", "5, 1, 3, 2, 0, 6]) assert heap.get_heap() == [0, 1, 2, 6,", "return heap def test_heap_init(): basic_heap = Heap() init_list_heap = Heap([9, 8, 7, 5,", "9, 5, 1, 3, 2, 0, 6]) assert heap.get_heap() == [0, 1, 2,", "2, 0, 6]) assert heap.get_heap() == [0, 1, 2, 6, 3, 8, 5,", "def test_heap_peek(base_heap): assert base_heap.peek() == 1 def test_heap_empty(): heap = Heap() assert heap.empty()", "= Heap() assert heap.empty() heap.push(1) assert not heap.empty() def test_heapify_up_and_down(base_heap): base_heap.pop() base_heap.pop() base_heap.push(8)", "base_heap.push(9) assert base_heap.get_heap() == [0, 3, 1, 8, 4, 5, 9] def test_heapify():", "base_heap.get_heap() == [0, 3, 1, 8, 4, 5, 9] def test_heapify(): heap =", "assert isinstance(basic_heap, Heap) assert isinstance(init_list_heap, Heap) def test_heap_push(): heap = Heap() heap.push(2) heap.push(3)", "= Heap() heap.push(1) heap.push(2) heap.push(3) heap.push(4) heap.push(5) return heap def test_heap_init(): basic_heap =", "heap.push(1) def test_heap_pop(base_heap): assert base_heap.pop() == 1 assert base_heap.pop() == 2 def test_heap_peek(base_heap):", "= Heap() heap.push(2) heap.push(3) heap.push(1) def test_heap_pop(base_heap): assert base_heap.pop() == 1 assert base_heap.pop()", "assert heap.empty() heap.push(1) assert not heap.empty() def test_heapify_up_and_down(base_heap): base_heap.pop() base_heap.pop() base_heap.push(8) base_heap.push(1) base_heap.push(0)", "def test_heapify(): heap = Heap([8, 9, 5, 1, 3, 2, 0, 6]) assert", "base_heap.push(0) base_heap.push(9) assert base_heap.get_heap() == [0, 3, 1, 8, 4, 5, 9] def", "base_heap.pop() base_heap.push(8) base_heap.push(1) base_heap.push(0) base_heap.push(9) assert base_heap.get_heap() == [0, 3, 1, 8, 4,", "not heap.empty() def test_heapify_up_and_down(base_heap): base_heap.pop() base_heap.pop() base_heap.push(8) base_heap.push(1) base_heap.push(0) base_heap.push(9) assert base_heap.get_heap() ==", "heap.push(3) heap.push(1) def test_heap_pop(base_heap): assert base_heap.pop() == 1 assert base_heap.pop() == 2 def", "0, 6]) assert heap.get_heap() == [0, 1, 2, 6, 3, 8, 5, 9]", "assert isinstance(init_list_heap, Heap) def test_heap_push(): heap = Heap() heap.push(2) heap.push(3) heap.push(1) def test_heap_pop(base_heap):", "Heap) def test_heap_push(): heap = Heap() heap.push(2) heap.push(3) heap.push(1) def test_heap_pop(base_heap): assert base_heap.pop()", "2]) assert isinstance(basic_heap, Heap) assert isinstance(init_list_heap, Heap) def test_heap_push(): heap = Heap() heap.push(2)", "isinstance(basic_heap, Heap) assert isinstance(init_list_heap, Heap) def test_heap_push(): heap = Heap() heap.push(2) heap.push(3) heap.push(1)", "test_heap_pop(base_heap): assert base_heap.pop() == 1 assert base_heap.pop() == 2 def test_heap_peek(base_heap): assert base_heap.peek()", "def test_heap_empty(): heap = Heap() assert heap.empty() heap.push(1) assert not heap.empty() def test_heapify_up_and_down(base_heap):", "base_heap.pop() == 1 assert base_heap.pop() == 2 def test_heap_peek(base_heap): assert base_heap.peek() == 1", "7, 5, 1, 2]) assert isinstance(basic_heap, Heap) assert isinstance(init_list_heap, Heap) def test_heap_push(): heap", "heap = Heap([8, 9, 5, 1, 3, 2, 0, 6]) assert heap.get_heap() ==", "data_structures.heap import Heap @pytest.fixture def base_heap(): heap = Heap() heap.push(1) heap.push(2) heap.push(3) heap.push(4)", "init_list_heap = Heap([9, 8, 7, 5, 1, 2]) assert isinstance(basic_heap, Heap) assert isinstance(init_list_heap,", "5, 1, 2]) assert isinstance(basic_heap, Heap) assert isinstance(init_list_heap, Heap) def test_heap_push(): heap =", "= Heap([8, 9, 5, 1, 3, 2, 0, 6]) assert heap.get_heap() == [0,", "assert base_heap.pop() == 2 def test_heap_peek(base_heap): assert base_heap.peek() == 1 def test_heap_empty(): heap", "= Heap() init_list_heap = Heap([9, 8, 7, 5, 1, 2]) assert isinstance(basic_heap, Heap)", "import pytest from data_structures.heap import Heap @pytest.fixture def base_heap(): heap = Heap() heap.push(1)", "base_heap(): heap = Heap() heap.push(1) heap.push(2) heap.push(3) heap.push(4) heap.push(5) return heap def test_heap_init():", "Heap() init_list_heap = Heap([9, 8, 7, 5, 1, 2]) assert isinstance(basic_heap, Heap) assert", "isinstance(init_list_heap, Heap) def test_heap_push(): heap = Heap() heap.push(2) heap.push(3) heap.push(1) def test_heap_pop(base_heap): assert", "base_heap.peek() == 1 def test_heap_empty(): heap = Heap() assert heap.empty() heap.push(1) assert not", "@pytest.fixture def base_heap(): heap = Heap() heap.push(1) heap.push(2) heap.push(3) heap.push(4) heap.push(5) return heap", "heap = Heap() heap.push(2) heap.push(3) heap.push(1) def test_heap_pop(base_heap): assert base_heap.pop() == 1 assert", "assert not heap.empty() def test_heapify_up_and_down(base_heap): base_heap.pop() base_heap.pop() base_heap.push(8) base_heap.push(1) base_heap.push(0) base_heap.push(9) assert base_heap.get_heap()", "== 2 def test_heap_peek(base_heap): assert base_heap.peek() == 1 def test_heap_empty(): heap = Heap()", "base_heap.pop() base_heap.pop() base_heap.push(8) base_heap.push(1) base_heap.push(0) base_heap.push(9) assert base_heap.get_heap() == [0, 3, 1, 8,", "heap.empty() heap.push(1) assert not heap.empty() def test_heapify_up_and_down(base_heap): base_heap.pop() base_heap.pop() base_heap.push(8) base_heap.push(1) base_heap.push(0) base_heap.push(9)", "heap.push(1) assert not heap.empty() def test_heapify_up_and_down(base_heap): base_heap.pop() base_heap.pop() base_heap.push(8) base_heap.push(1) base_heap.push(0) base_heap.push(9) assert", "base_heap.push(8) base_heap.push(1) base_heap.push(0) base_heap.push(9) assert base_heap.get_heap() == [0, 3, 1, 8, 4, 5,", "2 def test_heap_peek(base_heap): assert base_heap.peek() == 1 def test_heap_empty(): heap = Heap() assert", "== 1 def test_heap_empty(): heap = Heap() assert heap.empty() heap.push(1) assert not heap.empty()", "Heap() heap.push(1) heap.push(2) heap.push(3) heap.push(4) heap.push(5) return heap def test_heap_init(): basic_heap = Heap()", "basic_heap = Heap() init_list_heap = Heap([9, 8, 7, 5, 1, 2]) assert isinstance(basic_heap,", "assert base_heap.pop() == 1 assert base_heap.pop() == 2 def test_heap_peek(base_heap): assert base_heap.peek() ==", "= Heap([9, 8, 7, 5, 1, 2]) assert isinstance(basic_heap, Heap) assert isinstance(init_list_heap, Heap)", "from data_structures.heap import Heap @pytest.fixture def base_heap(): heap = Heap() heap.push(1) heap.push(2) heap.push(3)", "def base_heap(): heap = Heap() heap.push(1) heap.push(2) heap.push(3) heap.push(4) heap.push(5) return heap def", "base_heap.push(1) base_heap.push(0) base_heap.push(9) assert base_heap.get_heap() == [0, 3, 1, 8, 4, 5, 9]", "Heap([8, 9, 5, 1, 3, 2, 0, 6]) assert heap.get_heap() == [0, 1,", "1 assert base_heap.pop() == 2 def test_heap_peek(base_heap): assert base_heap.peek() == 1 def test_heap_empty():", "base_heap.pop() == 2 def test_heap_peek(base_heap): assert base_heap.peek() == 1 def test_heap_empty(): heap =", "Heap() assert heap.empty() heap.push(1) assert not heap.empty() def test_heapify_up_and_down(base_heap): base_heap.pop() base_heap.pop() base_heap.push(8) base_heap.push(1)", "def test_heap_pop(base_heap): assert base_heap.pop() == 1 assert base_heap.pop() == 2 def test_heap_peek(base_heap): assert", "3, 1, 8, 4, 5, 9] def test_heapify(): heap = Heap([8, 9, 5,", "heap.push(2) heap.push(3) heap.push(1) def test_heap_pop(base_heap): assert base_heap.pop() == 1 assert base_heap.pop() == 2", "assert base_heap.get_heap() == [0, 3, 1, 8, 4, 5, 9] def test_heapify(): heap", "Heap() heap.push(2) heap.push(3) heap.push(1) def test_heap_pop(base_heap): assert base_heap.pop() == 1 assert base_heap.pop() ==", "1 def test_heap_empty(): heap = Heap() assert heap.empty() heap.push(1) assert not heap.empty() def", "1, 2]) assert isinstance(basic_heap, Heap) assert isinstance(init_list_heap, Heap) def test_heap_push(): heap = Heap()", "1, 3, 2, 0, 6]) assert heap.get_heap() == [0, 1, 2, 6, 3,", "5, 9] def test_heapify(): heap = Heap([8, 9, 5, 1, 3, 2, 0,", "def test_heap_init(): basic_heap = Heap() init_list_heap = Heap([9, 8, 7, 5, 1, 2])", "4, 5, 9] def test_heapify(): heap = Heap([8, 9, 5, 1, 3, 2,", "Heap([9, 8, 7, 5, 1, 2]) assert isinstance(basic_heap, Heap) assert isinstance(init_list_heap, Heap) def", "assert base_heap.peek() == 1 def test_heap_empty(): heap = Heap() assert heap.empty() heap.push(1) assert", "test_heap_empty(): heap = Heap() assert heap.empty() heap.push(1) assert not heap.empty() def test_heapify_up_and_down(base_heap): base_heap.pop()", "pytest from data_structures.heap import Heap @pytest.fixture def base_heap(): heap = Heap() heap.push(1) heap.push(2)", "== 1 assert base_heap.pop() == 2 def test_heap_peek(base_heap): assert base_heap.peek() == 1 def", "heap.push(1) heap.push(2) heap.push(3) heap.push(4) heap.push(5) return heap def test_heap_init(): basic_heap = Heap() init_list_heap", "heap = Heap() assert heap.empty() heap.push(1) assert not heap.empty() def test_heapify_up_and_down(base_heap): base_heap.pop() base_heap.pop()", "heap.push(4) heap.push(5) return heap def test_heap_init(): basic_heap = Heap() init_list_heap = Heap([9, 8,", "heap.push(3) heap.push(4) heap.push(5) return heap def test_heap_init(): basic_heap = Heap() init_list_heap = Heap([9,", "test_heapify_up_and_down(base_heap): base_heap.pop() base_heap.pop() base_heap.push(8) base_heap.push(1) base_heap.push(0) base_heap.push(9) assert base_heap.get_heap() == [0, 3, 1,", "== [0, 3, 1, 8, 4, 5, 9] def test_heapify(): heap = Heap([8,", "8, 4, 5, 9] def test_heapify(): heap = Heap([8, 9, 5, 1, 3,", "1, 8, 4, 5, 9] def test_heapify(): heap = Heap([8, 9, 5, 1,", "def test_heapify_up_and_down(base_heap): base_heap.pop() base_heap.pop() base_heap.push(8) base_heap.push(1) base_heap.push(0) base_heap.push(9) assert base_heap.get_heap() == [0, 3,", "test_heap_init(): basic_heap = Heap() init_list_heap = Heap([9, 8, 7, 5, 1, 2]) assert", "3, 2, 0, 6]) assert heap.get_heap() == [0, 1, 2, 6, 3, 8,", "heap def test_heap_init(): basic_heap = Heap() init_list_heap = Heap([9, 8, 7, 5, 1,", "[0, 3, 1, 8, 4, 5, 9] def test_heapify(): heap = Heap([8, 9,", "Heap) assert isinstance(init_list_heap, Heap) def test_heap_push(): heap = Heap() heap.push(2) heap.push(3) heap.push(1) def", "def test_heap_push(): heap = Heap() heap.push(2) heap.push(3) heap.push(1) def test_heap_pop(base_heap): assert base_heap.pop() ==", "heap = Heap() heap.push(1) heap.push(2) heap.push(3) heap.push(4) heap.push(5) return heap def test_heap_init(): basic_heap", "heap.push(5) return heap def test_heap_init(): basic_heap = Heap() init_list_heap = Heap([9, 8, 7,", "test_heapify(): heap = Heap([8, 9, 5, 1, 3, 2, 0, 6]) assert heap.get_heap()", "import Heap @pytest.fixture def base_heap(): heap = Heap() heap.push(1) heap.push(2) heap.push(3) heap.push(4) heap.push(5)", "Heap @pytest.fixture def base_heap(): heap = Heap() heap.push(1) heap.push(2) heap.push(3) heap.push(4) heap.push(5) return", "9] def test_heapify(): heap = Heap([8, 9, 5, 1, 3, 2, 0, 6])" ]
[ "print('UPDATED') # print(f\"The following object got updated: {spec}\") # return {'message': 'updated'} <EMAIL>.delete('ore<EMAIL>',", "'v1alpha1', 'book') #def update_fn(old, new, diff, **kwargs): # print('UPDATED') # print(f\"The following object", "here we are! Creating: {spec}\") return {'message': 'hello world'} # will be the", "are! Creating: {spec}\") return {'message': 'hello world'} # will be the new status", "# print('UPDATED') # print(f\"The following object got updated: {spec}\") # return {'message': 'updated'}", "will be the new status <EMAIL>('<EMAIL>', 'v1alpha1', 'book') #def update_fn(old, new, diff, **kwargs):", "update_fn(old, new, diff, **kwargs): # print('UPDATED') # print(f\"The following object got updated: {spec}\")", "new status <EMAIL>('<EMAIL>', 'v1alpha1', 'book') #def update_fn(old, new, diff, **kwargs): # print('UPDATED') #", "be the new status <EMAIL>('<EMAIL>', 'v1alpha1', 'book') #def update_fn(old, new, diff, **kwargs): #", "{spec}\") return {'message': 'hello world'} # will be the new status <EMAIL>('<EMAIL>', 'v1alpha1',", "following object got updated: {spec}\") # return {'message': 'updated'} <EMAIL>.delete('ore<EMAIL>', 'v1alpha1', 'book') #def", "# print(f\"The following object got updated: {spec}\") # return {'message': 'updated'} <EMAIL>.delete('ore<EMAIL>', 'v1alpha1',", "# will be the new status <EMAIL>('<EMAIL>', 'v1alpha1', 'book') #def update_fn(old, new, diff,", "we are! Creating: {spec}\") return {'message': 'hello world'} # will be the new", "Creating: {spec}\") return {'message': 'hello world'} # will be the new status <EMAIL>('<EMAIL>',", "create_fn(spec, **kwargs): print(f\"And here we are! Creating: {spec}\") return {'message': 'hello world'} #", "the new status <EMAIL>('<EMAIL>', 'v1alpha1', 'book') #def update_fn(old, new, diff, **kwargs): # print('UPDATED')", "got updated: {spec}\") # return {'message': 'updated'} <EMAIL>.delete('ore<EMAIL>', 'v1alpha1', 'book') #def delete_fn(metadata, **kwargs):", "kopf @kopf.on.create('oreilly.com', 'v1alpha1', 'book') def create_fn(spec, **kwargs): print(f\"And here we are! Creating: {spec}\")", "import kopf @kopf.on.create('oreilly.com', 'v1alpha1', 'book') def create_fn(spec, **kwargs): print(f\"And here we are! Creating:", "@kopf.on.create('oreilly.com', 'v1alpha1', 'book') def create_fn(spec, **kwargs): print(f\"And here we are! Creating: {spec}\") return", "print(f\"And here we are! Creating: {spec}\") return {'message': 'hello world'} # will be", "def create_fn(spec, **kwargs): print(f\"And here we are! Creating: {spec}\") return {'message': 'hello world'}", "**kwargs): # print('UPDATED') # print(f\"The following object got updated: {spec}\") # return {'message':", "'book') def create_fn(spec, **kwargs): print(f\"And here we are! Creating: {spec}\") return {'message': 'hello", "status <EMAIL>('<EMAIL>', 'v1alpha1', 'book') #def update_fn(old, new, diff, **kwargs): # print('UPDATED') # print(f\"The", "'hello world'} # will be the new status <EMAIL>('<EMAIL>', 'v1alpha1', 'book') #def update_fn(old,", "<EMAIL>('<EMAIL>', 'v1alpha1', 'book') #def update_fn(old, new, diff, **kwargs): # print('UPDATED') # print(f\"The following", "object got updated: {spec}\") # return {'message': 'updated'} <EMAIL>.delete('ore<EMAIL>', 'v1alpha1', 'book') #def delete_fn(metadata,", "diff, **kwargs): # print('UPDATED') # print(f\"The following object got updated: {spec}\") # return", "#def update_fn(old, new, diff, **kwargs): # print('UPDATED') # print(f\"The following object got updated:", "<gh_stars>100-1000 import kopf @kopf.on.create('oreilly.com', 'v1alpha1', 'book') def create_fn(spec, **kwargs): print(f\"And here we are!", "print(f\"The following object got updated: {spec}\") # return {'message': 'updated'} <EMAIL>.delete('ore<EMAIL>', 'v1alpha1', 'book')", "world'} # will be the new status <EMAIL>('<EMAIL>', 'v1alpha1', 'book') #def update_fn(old, new,", "**kwargs): print(f\"And here we are! Creating: {spec}\") return {'message': 'hello world'} # will", "'v1alpha1', 'book') def create_fn(spec, **kwargs): print(f\"And here we are! Creating: {spec}\") return {'message':", "{'message': 'hello world'} # will be the new status <EMAIL>('<EMAIL>', 'v1alpha1', 'book') #def", "'book') #def update_fn(old, new, diff, **kwargs): # print('UPDATED') # print(f\"The following object got", "new, diff, **kwargs): # print('UPDATED') # print(f\"The following object got updated: {spec}\") #", "return {'message': 'hello world'} # will be the new status <EMAIL>('<EMAIL>', 'v1alpha1', 'book')" ]
[ "= json.loads(_decrypt_item_value(response['value'].value)) return { \"statusCode\": 200, \"headers\": { \"Content-Type\": \"application/json\" }, \"body\": json.dumps(response)", "= os.environ.get('S3_BUCKET_NAME') key_file_name = os.environ.get('ENCRYPTION_KEY') key_file_destination = \"/tmp/\" + key_file_name s3.meta.client.download_file(Bucket=bucket_name, Key=key_file_name, Filename=key_file_destination)", "return decrypted_value.decode(\"utf-8\") def _get_vault_item(email, name): try: response = vault_table.get_item( Key={ vault_table_partition_key: email, vault_table_sort_key:", "context): email = event['pathParameters']['email'] name = event['pathParameters']['name'] try: response = _get_vault_item(email, name) del", "decrypted_value.decode(\"utf-8\") def _get_vault_item(email, name): try: response = vault_table.get_item( Key={ vault_table_partition_key: email, vault_table_sort_key: name", "vault_table_sort_key: name } ) except Exception as e: print(e) raise else: return response['Item']", "_get_vault_item(email, name) del response['email'] print(f\"RESPONSE: {response}\") response['value'] = json.loads(_decrypt_item_value(response['value'].value)) return { \"statusCode\": 200,", "bucket_name = os.environ.get('S3_BUCKET_NAME') key_file_name = os.environ.get('ENCRYPTION_KEY') key_file_destination = \"/tmp/\" + key_file_name s3.meta.client.download_file(Bucket=bucket_name, Key=key_file_name,", "as e: print(e) raise else: return response['Item'] def lambda_handler(event, context): email = event['pathParameters']['email']", "Fernet dynamodb = boto3.resource('dynamodb') s3 = boto3.resource('s3') vault_table = dynamodb.Table(os.environ.get('VAULT_TABLE_NAME')) vault_table_partition_key = os.environ.get('VAULT_TABLE_KEY')", "\"Content-Type\": \"application/json\" }, \"body\": json.dumps(response) } except Exception as e: return { \"statusCode\":", "json.dumps(response) } except Exception as e: return { \"statusCode\": 500, \"headers\": { \"Content-Type\":", "f = Fernet(key) decrypted_value = f.decrypt(value) return decrypted_value.decode(\"utf-8\") def _get_vault_item(email, name): try: response", "\"headers\": { \"Content-Type\": \"application/json\" }, \"body\": json.dumps(response) } except Exception as e: return", "= vault_table.get_item( Key={ vault_table_partition_key: email, vault_table_sort_key: name } ) except Exception as e:", "lambda_handler(event, context): email = event['pathParameters']['email'] name = event['pathParameters']['name'] try: response = _get_vault_item(email, name)", "dynamodb = boto3.resource('dynamodb') s3 = boto3.resource('s3') vault_table = dynamodb.Table(os.environ.get('VAULT_TABLE_NAME')) vault_table_partition_key = os.environ.get('VAULT_TABLE_KEY') vault_table_sort_key", "s3.meta.client.download_file(Bucket=bucket_name, Key=key_file_name, Filename=key_file_destination) key = open(key_file_destination, \"rb\").read() def _decrypt_item_value(value): f = Fernet(key) decrypted_value", "key_file_destination = \"/tmp/\" + key_file_name s3.meta.client.download_file(Bucket=bucket_name, Key=key_file_name, Filename=key_file_destination) key = open(key_file_destination, \"rb\").read() def", "key_file_name = os.environ.get('ENCRYPTION_KEY') key_file_destination = \"/tmp/\" + key_file_name s3.meta.client.download_file(Bucket=bucket_name, Key=key_file_name, Filename=key_file_destination) key =", "= open(key_file_destination, \"rb\").read() def _decrypt_item_value(value): f = Fernet(key) decrypted_value = f.decrypt(value) return decrypted_value.decode(\"utf-8\")", "Exception as e: print(e) raise else: return response['Item'] def lambda_handler(event, context): email =", "\"body\": json.dumps(response) } except Exception as e: return { \"statusCode\": 500, \"headers\": {", "return { \"statusCode\": 200, \"headers\": { \"Content-Type\": \"application/json\" }, \"body\": json.dumps(response) } except", "from cryptography.fernet import Fernet dynamodb = boto3.resource('dynamodb') s3 = boto3.resource('s3') vault_table = dynamodb.Table(os.environ.get('VAULT_TABLE_NAME'))", "decrypted_value = f.decrypt(value) return decrypted_value.decode(\"utf-8\") def _get_vault_item(email, name): try: response = vault_table.get_item( Key={", "else: return response['Item'] def lambda_handler(event, context): email = event['pathParameters']['email'] name = event['pathParameters']['name'] try:", "} ) except Exception as e: print(e) raise else: return response['Item'] def lambda_handler(event,", "\"/tmp/\" + key_file_name s3.meta.client.download_file(Bucket=bucket_name, Key=key_file_name, Filename=key_file_destination) key = open(key_file_destination, \"rb\").read() def _decrypt_item_value(value): f", "key = open(key_file_destination, \"rb\").read() def _decrypt_item_value(value): f = Fernet(key) decrypted_value = f.decrypt(value) return", "vault_table_partition_key = os.environ.get('VAULT_TABLE_KEY') vault_table_sort_key = os.environ.get('VAULT_SORT_KEY') bucket_name = os.environ.get('S3_BUCKET_NAME') key_file_name = os.environ.get('ENCRYPTION_KEY') key_file_destination", "e: print(e) raise else: return response['Item'] def lambda_handler(event, context): email = event['pathParameters']['email'] name", "200, \"headers\": { \"Content-Type\": \"application/json\" }, \"body\": json.dumps(response) } except Exception as e:", "}, \"body\": json.dumps(response) } except Exception as e: return { \"statusCode\": 500, \"headers\":", "del response['email'] print(f\"RESPONSE: {response}\") response['value'] = json.loads(_decrypt_item_value(response['value'].value)) return { \"statusCode\": 200, \"headers\": {", "s3 = boto3.resource('s3') vault_table = dynamodb.Table(os.environ.get('VAULT_TABLE_NAME')) vault_table_partition_key = os.environ.get('VAULT_TABLE_KEY') vault_table_sort_key = os.environ.get('VAULT_SORT_KEY') bucket_name", "name = event['pathParameters']['name'] try: response = _get_vault_item(email, name) del response['email'] print(f\"RESPONSE: {response}\") response['value']", "e: return { \"statusCode\": 500, \"headers\": { \"Content-Type\": \"application/json\" }, \"body\": str(e) }", "boto3.resource('dynamodb') s3 = boto3.resource('s3') vault_table = dynamodb.Table(os.environ.get('VAULT_TABLE_NAME')) vault_table_partition_key = os.environ.get('VAULT_TABLE_KEY') vault_table_sort_key = os.environ.get('VAULT_SORT_KEY')", "as e: return { \"statusCode\": 500, \"headers\": { \"Content-Type\": \"application/json\" }, \"body\": str(e)", "ClientError from cryptography.fernet import Fernet dynamodb = boto3.resource('dynamodb') s3 = boto3.resource('s3') vault_table =", "def _get_vault_item(email, name): try: response = vault_table.get_item( Key={ vault_table_partition_key: email, vault_table_sort_key: name }", "+ key_file_name s3.meta.client.download_file(Bucket=bucket_name, Key=key_file_name, Filename=key_file_destination) key = open(key_file_destination, \"rb\").read() def _decrypt_item_value(value): f =", "Key=key_file_name, Filename=key_file_destination) key = open(key_file_destination, \"rb\").read() def _decrypt_item_value(value): f = Fernet(key) decrypted_value =", "_decrypt_item_value(value): f = Fernet(key) decrypted_value = f.decrypt(value) return decrypted_value.decode(\"utf-8\") def _get_vault_item(email, name): try:", "response = _get_vault_item(email, name) del response['email'] print(f\"RESPONSE: {response}\") response['value'] = json.loads(_decrypt_item_value(response['value'].value)) return {", "import Fernet dynamodb = boto3.resource('dynamodb') s3 = boto3.resource('s3') vault_table = dynamodb.Table(os.environ.get('VAULT_TABLE_NAME')) vault_table_partition_key =", "json import boto3 from botocore.exceptions import ClientError from cryptography.fernet import Fernet dynamodb =", "name } ) except Exception as e: print(e) raise else: return response['Item'] def", "return response['Item'] def lambda_handler(event, context): email = event['pathParameters']['email'] name = event['pathParameters']['name'] try: response", "Exception as e: return { \"statusCode\": 500, \"headers\": { \"Content-Type\": \"application/json\" }, \"body\":", "Fernet(key) decrypted_value = f.decrypt(value) return decrypted_value.decode(\"utf-8\") def _get_vault_item(email, name): try: response = vault_table.get_item(", "{response}\") response['value'] = json.loads(_decrypt_item_value(response['value'].value)) return { \"statusCode\": 200, \"headers\": { \"Content-Type\": \"application/json\" },", "\"rb\").read() def _decrypt_item_value(value): f = Fernet(key) decrypted_value = f.decrypt(value) return decrypted_value.decode(\"utf-8\") def _get_vault_item(email,", "cryptography.fernet import Fernet dynamodb = boto3.resource('dynamodb') s3 = boto3.resource('s3') vault_table = dynamodb.Table(os.environ.get('VAULT_TABLE_NAME')) vault_table_partition_key", "email, vault_table_sort_key: name } ) except Exception as e: print(e) raise else: return", "= os.environ.get('ENCRYPTION_KEY') key_file_destination = \"/tmp/\" + key_file_name s3.meta.client.download_file(Bucket=bucket_name, Key=key_file_name, Filename=key_file_destination) key = open(key_file_destination,", "response['value'] = json.loads(_decrypt_item_value(response['value'].value)) return { \"statusCode\": 200, \"headers\": { \"Content-Type\": \"application/json\" }, \"body\":", "= _get_vault_item(email, name) del response['email'] print(f\"RESPONSE: {response}\") response['value'] = json.loads(_decrypt_item_value(response['value'].value)) return { \"statusCode\":", "Filename=key_file_destination) key = open(key_file_destination, \"rb\").read() def _decrypt_item_value(value): f = Fernet(key) decrypted_value = f.decrypt(value)", "os.environ.get('VAULT_SORT_KEY') bucket_name = os.environ.get('S3_BUCKET_NAME') key_file_name = os.environ.get('ENCRYPTION_KEY') key_file_destination = \"/tmp/\" + key_file_name s3.meta.client.download_file(Bucket=bucket_name,", "try: response = _get_vault_item(email, name) del response['email'] print(f\"RESPONSE: {response}\") response['value'] = json.loads(_decrypt_item_value(response['value'].value)) return", "vault_table_partition_key: email, vault_table_sort_key: name } ) except Exception as e: print(e) raise else:", "= event['pathParameters']['name'] try: response = _get_vault_item(email, name) del response['email'] print(f\"RESPONSE: {response}\") response['value'] =", ") except Exception as e: print(e) raise else: return response['Item'] def lambda_handler(event, context):", "except Exception as e: print(e) raise else: return response['Item'] def lambda_handler(event, context): email", "botocore.exceptions import ClientError from cryptography.fernet import Fernet dynamodb = boto3.resource('dynamodb') s3 = boto3.resource('s3')", "os.environ.get('VAULT_TABLE_KEY') vault_table_sort_key = os.environ.get('VAULT_SORT_KEY') bucket_name = os.environ.get('S3_BUCKET_NAME') key_file_name = os.environ.get('ENCRYPTION_KEY') key_file_destination = \"/tmp/\"", "os import json import boto3 from botocore.exceptions import ClientError from cryptography.fernet import Fernet", "response['email'] print(f\"RESPONSE: {response}\") response['value'] = json.loads(_decrypt_item_value(response['value'].value)) return { \"statusCode\": 200, \"headers\": { \"Content-Type\":", "raise else: return response['Item'] def lambda_handler(event, context): email = event['pathParameters']['email'] name = event['pathParameters']['name']", "json.loads(_decrypt_item_value(response['value'].value)) return { \"statusCode\": 200, \"headers\": { \"Content-Type\": \"application/json\" }, \"body\": json.dumps(response) }", "dynamodb.Table(os.environ.get('VAULT_TABLE_NAME')) vault_table_partition_key = os.environ.get('VAULT_TABLE_KEY') vault_table_sort_key = os.environ.get('VAULT_SORT_KEY') bucket_name = os.environ.get('S3_BUCKET_NAME') key_file_name = os.environ.get('ENCRYPTION_KEY')", "= boto3.resource('s3') vault_table = dynamodb.Table(os.environ.get('VAULT_TABLE_NAME')) vault_table_partition_key = os.environ.get('VAULT_TABLE_KEY') vault_table_sort_key = os.environ.get('VAULT_SORT_KEY') bucket_name =", "try: response = vault_table.get_item( Key={ vault_table_partition_key: email, vault_table_sort_key: name } ) except Exception", "= Fernet(key) decrypted_value = f.decrypt(value) return decrypted_value.decode(\"utf-8\") def _get_vault_item(email, name): try: response =", "boto3 from botocore.exceptions import ClientError from cryptography.fernet import Fernet dynamodb = boto3.resource('dynamodb') s3", "_get_vault_item(email, name): try: response = vault_table.get_item( Key={ vault_table_partition_key: email, vault_table_sort_key: name } )", "boto3.resource('s3') vault_table = dynamodb.Table(os.environ.get('VAULT_TABLE_NAME')) vault_table_partition_key = os.environ.get('VAULT_TABLE_KEY') vault_table_sort_key = os.environ.get('VAULT_SORT_KEY') bucket_name = os.environ.get('S3_BUCKET_NAME')", "{ \"Content-Type\": \"application/json\" }, \"body\": json.dumps(response) } except Exception as e: return {", "Key={ vault_table_partition_key: email, vault_table_sort_key: name } ) except Exception as e: print(e) raise", "email = event['pathParameters']['email'] name = event['pathParameters']['name'] try: response = _get_vault_item(email, name) del response['email']", "from botocore.exceptions import ClientError from cryptography.fernet import Fernet dynamodb = boto3.resource('dynamodb') s3 =", "print(f\"RESPONSE: {response}\") response['value'] = json.loads(_decrypt_item_value(response['value'].value)) return { \"statusCode\": 200, \"headers\": { \"Content-Type\": \"application/json\"", "event['pathParameters']['name'] try: response = _get_vault_item(email, name) del response['email'] print(f\"RESPONSE: {response}\") response['value'] = json.loads(_decrypt_item_value(response['value'].value))", "event['pathParameters']['email'] name = event['pathParameters']['name'] try: response = _get_vault_item(email, name) del response['email'] print(f\"RESPONSE: {response}\")", "import ClientError from cryptography.fernet import Fernet dynamodb = boto3.resource('dynamodb') s3 = boto3.resource('s3') vault_table", "os.environ.get('S3_BUCKET_NAME') key_file_name = os.environ.get('ENCRYPTION_KEY') key_file_destination = \"/tmp/\" + key_file_name s3.meta.client.download_file(Bucket=bucket_name, Key=key_file_name, Filename=key_file_destination) key", "vault_table.get_item( Key={ vault_table_partition_key: email, vault_table_sort_key: name } ) except Exception as e: print(e)", "def lambda_handler(event, context): email = event['pathParameters']['email'] name = event['pathParameters']['name'] try: response = _get_vault_item(email,", "def _decrypt_item_value(value): f = Fernet(key) decrypted_value = f.decrypt(value) return decrypted_value.decode(\"utf-8\") def _get_vault_item(email, name):", "vault_table_sort_key = os.environ.get('VAULT_SORT_KEY') bucket_name = os.environ.get('S3_BUCKET_NAME') key_file_name = os.environ.get('ENCRYPTION_KEY') key_file_destination = \"/tmp/\" +", "import json import boto3 from botocore.exceptions import ClientError from cryptography.fernet import Fernet dynamodb", "import os import json import boto3 from botocore.exceptions import ClientError from cryptography.fernet import", "open(key_file_destination, \"rb\").read() def _decrypt_item_value(value): f = Fernet(key) decrypted_value = f.decrypt(value) return decrypted_value.decode(\"utf-8\") def", "= f.decrypt(value) return decrypted_value.decode(\"utf-8\") def _get_vault_item(email, name): try: response = vault_table.get_item( Key={ vault_table_partition_key:", "= os.environ.get('VAULT_TABLE_KEY') vault_table_sort_key = os.environ.get('VAULT_SORT_KEY') bucket_name = os.environ.get('S3_BUCKET_NAME') key_file_name = os.environ.get('ENCRYPTION_KEY') key_file_destination =", "= os.environ.get('VAULT_SORT_KEY') bucket_name = os.environ.get('S3_BUCKET_NAME') key_file_name = os.environ.get('ENCRYPTION_KEY') key_file_destination = \"/tmp/\" + key_file_name", "import boto3 from botocore.exceptions import ClientError from cryptography.fernet import Fernet dynamodb = boto3.resource('dynamodb')", "= boto3.resource('dynamodb') s3 = boto3.resource('s3') vault_table = dynamodb.Table(os.environ.get('VAULT_TABLE_NAME')) vault_table_partition_key = os.environ.get('VAULT_TABLE_KEY') vault_table_sort_key =", "key_file_name s3.meta.client.download_file(Bucket=bucket_name, Key=key_file_name, Filename=key_file_destination) key = open(key_file_destination, \"rb\").read() def _decrypt_item_value(value): f = Fernet(key)", "} except Exception as e: return { \"statusCode\": 500, \"headers\": { \"Content-Type\": \"application/json\"", "{ \"statusCode\": 200, \"headers\": { \"Content-Type\": \"application/json\" }, \"body\": json.dumps(response) } except Exception", "\"statusCode\": 200, \"headers\": { \"Content-Type\": \"application/json\" }, \"body\": json.dumps(response) } except Exception as", "= \"/tmp/\" + key_file_name s3.meta.client.download_file(Bucket=bucket_name, Key=key_file_name, Filename=key_file_destination) key = open(key_file_destination, \"rb\").read() def _decrypt_item_value(value):", "print(e) raise else: return response['Item'] def lambda_handler(event, context): email = event['pathParameters']['email'] name =", "= event['pathParameters']['email'] name = event['pathParameters']['name'] try: response = _get_vault_item(email, name) del response['email'] print(f\"RESPONSE:", "response = vault_table.get_item( Key={ vault_table_partition_key: email, vault_table_sort_key: name } ) except Exception as", "name) del response['email'] print(f\"RESPONSE: {response}\") response['value'] = json.loads(_decrypt_item_value(response['value'].value)) return { \"statusCode\": 200, \"headers\":", "= dynamodb.Table(os.environ.get('VAULT_TABLE_NAME')) vault_table_partition_key = os.environ.get('VAULT_TABLE_KEY') vault_table_sort_key = os.environ.get('VAULT_SORT_KEY') bucket_name = os.environ.get('S3_BUCKET_NAME') key_file_name =", "vault_table = dynamodb.Table(os.environ.get('VAULT_TABLE_NAME')) vault_table_partition_key = os.environ.get('VAULT_TABLE_KEY') vault_table_sort_key = os.environ.get('VAULT_SORT_KEY') bucket_name = os.environ.get('S3_BUCKET_NAME') key_file_name", "except Exception as e: return { \"statusCode\": 500, \"headers\": { \"Content-Type\": \"application/json\" },", "response['Item'] def lambda_handler(event, context): email = event['pathParameters']['email'] name = event['pathParameters']['name'] try: response =", "name): try: response = vault_table.get_item( Key={ vault_table_partition_key: email, vault_table_sort_key: name } ) except", "f.decrypt(value) return decrypted_value.decode(\"utf-8\") def _get_vault_item(email, name): try: response = vault_table.get_item( Key={ vault_table_partition_key: email,", "\"application/json\" }, \"body\": json.dumps(response) } except Exception as e: return { \"statusCode\": 500,", "os.environ.get('ENCRYPTION_KEY') key_file_destination = \"/tmp/\" + key_file_name s3.meta.client.download_file(Bucket=bucket_name, Key=key_file_name, Filename=key_file_destination) key = open(key_file_destination, \"rb\").read()" ]
[ "version = identifier_data.get(PipIdentifier.VERSION) pip_package = identifier_data.get(PipIdentifier.PIP_PACKAGE) identifier = PipIdentifier(version=version, pip_package=pip_package) if identifier_type ==", "id and hold some reference in workspace??? # dir_path = os.path.dirname(path) # create_repo(dir_path)", "was caused because the source wasn't a directory if e.errno == errno.ENOTDIR: shutil.copy(src,", "Function): # TODO fn repository if not os.path.exists(os.path.abspath(os.path.join(directory, '..', 'function'))): os.mkdir(os.path.abspath(os.path.join(directory, '..', 'function')))", "e: # If the error was caused because the source wasn't a directory", "# add_and_commit(dir_path) # code_hash = git_hash(path=dir_path) # # if obj is not None:", "shutil.copy(src, dest) else: print('Directory not copied. Error: %s' % e) NAME = \"code\"", "metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.function): if root_dir is not None: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(root_dir), os.path.abspath(os.path.join(directory, '..',", "== str(CodeMixin._CodeType.file): code = GenericCall(metadata=metadata, cmd=metadata.get(GenericCall.CMD), repository_identifier=identifier) else: raise NotImplementedError(metadata.get(CodeMixin.CODE_TYPE) + \" couldn't", "Create a repo # # Add any untracked files and commit those files", "of a config or similar?) return str(code.id) def list(self, search, offset=0, size=100): if", "code = PythonPackage(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonPackage.PACKAGE), variable=metadata.get(PythonPackage.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.python_file): code =", "digest of a config or similar?) return str(code.id) def list(self, search, offset=0, size=100):", "= GenericCall(metadata=metadata, cmd=metadata.get(GenericCall.CMD), repository_identifier=identifier) else: raise NotImplementedError(metadata.get(CodeMixin.CODE_TYPE) + \" couldn't load from type.\")", "path=metadata.get(PythonFile.PATH), package=metadata.get(PythonFile.PACKAGE), variable=metadata.get(PythonFile.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.file): code = GenericCall(metadata=metadata, cmd=metadata.get(GenericCall.CMD), repository_identifier=identifier)", "shutil from pypadre.core.model.code.code_mixin import CodeMixin, PythonPackage, PythonFile, GenericCall, \\ GitIdentifier, RepositoryIdentifier, PipIdentifier, Function", "placeholder(): return '{CODE_ID}' def __init__(self, backend: IPadreBackend): super().__init__(root_dir=os.path.join(backend.root_dir, NAME), backend=backend) def _get_by_dir(self, directory):", "= search.pop(\"name\") search[self.FOLDER_SEARCH] = re.escape(name) return super().list(search, offset, size) def _put(self, obj, *args,", "return code def to_folder_name(self, code): # TODO only name for folder okay? (maybe", "= os.path.join(directory, \"code\") # if code.file is not None: # if not os.path.exists(code_dir):", "code_hash = git_hash(path=path) # if code_hash is None and init_repo is True: #", "PythonFile(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonFile.PACKAGE), variable=metadata.get(PythonFile.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.file): code = GenericCall(metadata=metadata, cmd=metadata.get(GenericCall.CMD),", "super().__init__(root_dir=os.path.join(backend.root_dir, NAME), backend=backend) def _get_by_dir(self, directory): path = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), directory))[0] metadata = self.get_file(path,", "# dir_path = os.path.dirname(path) # create_repo(dir_path) # add_and_commit(dir_path) # code_hash = git_hash(path=dir_path) #", "the meta information of code object in directory \" + directory) if metadata.get(CodeMixin.CODE_TYPE)", "pypadre.pod.repository.i_repository import ICodeRepository from pypadre.pod.repository.local.file.generic.i_file_repository import File from pypadre.pod.repository.local.file.generic.i_git_repository import IGitRepository from pypadre.pod.repository.local.file.project_code_repository", "config or similar?) return str(code.id) def list(self, search, offset=0, size=100): if search is", "glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] fn = self.get_file(fn_dir, CODE_FILE) code = Function(fn=fn, metadata=metadata, repository_identifier=identifier)", "is None: raise ValueError( \"Identifier is not present in the meta information of", "folder name. We don't have to search in metadata.json name = search.pop(\"name\") search[self.FOLDER_SEARCH]", "repository if not os.path.exists(os.path.abspath(os.path.join(directory, '..', 'function'))): os.mkdir(os.path.abspath(os.path.join(directory, '..', 'function'))) self.write_file(os.path.abspath(os.path.join(directory, '..', 'function')), CODE_FILE,", "'function'))))[0] else: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] fn = self.get_file(fn_dir, CODE_FILE) code", "dir_path = os.path.dirname(path) # create_repo(dir_path) # add_and_commit(dir_path) # code_hash = git_hash(path=dir_path) # #", "Function from pypadre.pod.backend.i_padre_backend import IPadreBackend from pypadre.pod.repository.i_repository import ICodeRepository from pypadre.pod.repository.local.file.generic.i_file_repository import File", "return str(code.id) def list(self, search, offset=0, size=100): if search is not None and", "import re import shutil from pypadre.core.model.code.code_mixin import CodeMixin, PythonPackage, PythonFile, GenericCall, \\ GitIdentifier,", "elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.file): code = GenericCall(metadata=metadata, cmd=metadata.get(GenericCall.CMD), repository_identifier=identifier) else: raise NotImplementedError(metadata.get(CodeMixin.CODE_TYPE) +", "*args, directory: str, **kwargs): code = obj if isinstance(code, Function): # TODO fn", "code): # TODO only name for folder okay? (maybe a uuid, a digest", "or similar?) return str(code.id) def list(self, search, offset=0, size=100): if search is not", "'..', 'function')), CODE_FILE, code.fn, mode=\"wb\") self.write_file(directory, META_FILE, code.metadata) # if store_code: # if", "# # if there is no repository present in the path, but the", "\"name\" in search: # Shortcut because we know name is the folder name.", "PythonPackage(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonPackage.PACKAGE), variable=metadata.get(PythonPackage.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.python_file): code = PythonFile(metadata=metadata, path=metadata.get(PythonFile.PATH),", "= git_hash(path=path) # if code_hash is None and init_repo is True: # #", "CODE_FILE = File(\"code.bin\", DillSerializer) class CodeFileRepository(IGitRepository, ICodeRepository): @staticmethod def placeholder(): return '{CODE_ID}' def", "== str(CodeMixin._CodeType.function): if root_dir is not None: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0]", "= \"code\" META_FILE = File(\"metadata.json\", JSonSerializer) # CODE_FILE = File(\"code.bin\", DillSerializer) class CodeFileRepository(IGitRepository,", "the folder name. We don't have to search in metadata.json name = search.pop(\"name\")", "self.get_file(path, META_FILE) return self._create_object(metadata, directory) def _create_object(self, metadata, directory, root_dir=None): identifier_type = metadata.get(CodeMixin.REPOSITORY_TYPE)", "identifier_type = metadata.get(CodeMixin.REPOSITORY_TYPE) identifier_data = metadata.get(CodeMixin.IDENTIFIER) identifier = None if identifier_type == RepositoryIdentifier._RepositoryType.pip:", "= PipIdentifier(version=version, pip_package=pip_package) if identifier_type == RepositoryIdentifier._RepositoryType.git: path = identifier_data.get(GitIdentifier.PATH) git_hash = identifier_data.get(GitIdentifier.GIT_HASH)", "identifier_data.get(PipIdentifier.VERSION) pip_package = identifier_data.get(PipIdentifier.PIP_PACKAGE) identifier = PipIdentifier(version=version, pip_package=pip_package) if identifier_type == RepositoryIdentifier._RepositoryType.git: path", "because the source wasn't a directory if e.errno == errno.ENOTDIR: shutil.copy(src, dest) else:", "path = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), directory))[0] metadata = self.get_file(path, META_FILE) return self._create_object(metadata, directory) def _create_object(self,", "fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] else: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0]", "git_hash = identifier_data.get(GitIdentifier.GIT_HASH) identifier = GitIdentifier(path=path, git_hash=git_hash) if identifier is None: raise ValueError(", "backend=backend) def _get_by_dir(self, directory): path = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), directory))[0] metadata = self.get_file(path, META_FILE) return", "# else: # copy(code.path, code_dir) # def get_code_hash(self, obj=None, path=None, init_repo=False, **kwargs): #", "# def get_code_hash(self, obj=None, path=None, init_repo=False, **kwargs): # # code_hash = git_hash(path=path) #", "is not present in the meta information of code object in directory \"", "the user wants to create a repo then # # Create a repo", "True: # # if there is no repository present in the path, but", "code = Function(fn=fn, metadata=metadata, repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.package): code = PythonPackage(metadata=metadata, path=metadata.get(PythonFile.PATH),", "# Shortcut because we know name is the folder name. We don't have", "search is not None and \"name\" in search: # Shortcut because we know", "name for folder okay? (maybe a uuid, a digest of a config or", "import glob import os import re import shutil from pypadre.core.model.code.code_mixin import CodeMixin, PythonPackage,", "== RepositoryIdentifier._RepositoryType.git: path = identifier_data.get(GitIdentifier.PATH) git_hash = identifier_data.get(GitIdentifier.GIT_HASH) identifier = GitIdentifier(path=path, git_hash=git_hash) if", "= identifier_data.get(GitIdentifier.GIT_HASH) identifier = GitIdentifier(path=path, git_hash=git_hash) if identifier is None: raise ValueError( \"Identifier", "self.write_file(os.path.abspath(os.path.join(directory, '..', 'function')), CODE_FILE, code.fn, mode=\"wb\") self.write_file(directory, META_FILE, code.metadata) # if store_code: #", "code_hash is None and init_repo is True: # # if there is no", "= File(\"metadata.json\", JSonSerializer) # CODE_FILE = File(\"code.bin\", DillSerializer) class CodeFileRepository(IGitRepository, ICodeRepository): @staticmethod def", "because we know name is the folder name. We don't have to search", "any untracked files and commit those files # # Get the code_hash of", "pip_package = identifier_data.get(PipIdentifier.PIP_PACKAGE) identifier = PipIdentifier(version=version, pip_package=pip_package) if identifier_type == RepositoryIdentifier._RepositoryType.git: path =", "directory \" + directory) if metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.function): if root_dir is not None:", "workspace??? # dir_path = os.path.dirname(path) # create_repo(dir_path) # add_and_commit(dir_path) # code_hash = git_hash(path=dir_path)", "pypadre.pod.repository.serializer.serialiser import JSonSerializer def copy(src, dest): try: shutil.copytree(src, dest) except OSError as e:", "ICodeRepository): @staticmethod def placeholder(): return '{CODE_ID}' def __init__(self, backend: IPadreBackend): super().__init__(root_dir=os.path.join(backend.root_dir, NAME), backend=backend)", "Add any untracked files and commit those files # # Get the code_hash", "\" couldn't load from type.\") return code def to_folder_name(self, code): # TODO only", "source wasn't a directory if e.errno == errno.ENOTDIR: shutil.copy(src, dest) else: print('Directory not", "(maybe a uuid, a digest of a config or similar?) return str(code.id) def", "caused because the source wasn't a directory if e.errno == errno.ENOTDIR: shutil.copy(src, dest)", "of the repo # # TODO give git an id and hold some", "metadata.get(CodeMixin.REPOSITORY_TYPE) identifier_data = metadata.get(CodeMixin.IDENTIFIER) identifier = None if identifier_type == RepositoryIdentifier._RepositoryType.pip: version =", "= None if identifier_type == RepositoryIdentifier._RepositoryType.pip: version = identifier_data.get(PipIdentifier.VERSION) pip_package = identifier_data.get(PipIdentifier.PIP_PACKAGE) identifier", "identifier_type == RepositoryIdentifier._RepositoryType.git: path = identifier_data.get(GitIdentifier.PATH) git_hash = identifier_data.get(GitIdentifier.GIT_HASH) identifier = GitIdentifier(path=path, git_hash=git_hash)", "We don't have to search in metadata.json name = search.pop(\"name\") search[self.FOLDER_SEARCH] = re.escape(name)", "CodeMixin, PythonPackage, PythonFile, GenericCall, \\ GitIdentifier, RepositoryIdentifier, PipIdentifier, Function from pypadre.pod.backend.i_padre_backend import IPadreBackend", "to search in metadata.json name = search.pop(\"name\") search[self.FOLDER_SEARCH] = re.escape(name) return super().list(search, offset,", "if isinstance(code, CodeFile): # code_dir = os.path.join(directory, \"code\") # if code.file is not", "pip_package=pip_package) if identifier_type == RepositoryIdentifier._RepositoryType.git: path = identifier_data.get(GitIdentifier.PATH) git_hash = identifier_data.get(GitIdentifier.GIT_HASH) identifier =", "== RepositoryIdentifier._RepositoryType.pip: version = identifier_data.get(PipIdentifier.VERSION) pip_package = identifier_data.get(PipIdentifier.PIP_PACKAGE) identifier = PipIdentifier(version=version, pip_package=pip_package) if", "JSonSerializer) # CODE_FILE = File(\"code.bin\", DillSerializer) class CodeFileRepository(IGitRepository, ICodeRepository): @staticmethod def placeholder(): return", "the repo # # TODO give git an id and hold some reference", "GitIdentifier, RepositoryIdentifier, PipIdentifier, Function from pypadre.pod.backend.i_padre_backend import IPadreBackend from pypadre.pod.repository.i_repository import ICodeRepository from", "# # Add any untracked files and commit those files # # Get", "reference in workspace??? # dir_path = os.path.dirname(path) # create_repo(dir_path) # add_and_commit(dir_path) # code_hash", "code_hash of the repo # # TODO give git an id and hold", "IPadreBackend from pypadre.pod.repository.i_repository import ICodeRepository from pypadre.pod.repository.local.file.generic.i_file_repository import File from pypadre.pod.repository.local.file.generic.i_git_repository import IGitRepository", "and commit those files # # Get the code_hash of the repo #", "File(\"metadata.json\", JSonSerializer) # CODE_FILE = File(\"code.bin\", DillSerializer) class CodeFileRepository(IGitRepository, ICodeRepository): @staticmethod def placeholder():", "store_code: # if isinstance(code, CodeFile): # code_dir = os.path.join(directory, \"code\") # if code.file", "else: raise NotImplementedError(metadata.get(CodeMixin.CODE_TYPE) + \" couldn't load from type.\") return code def to_folder_name(self,", "if not os.path.exists(code_dir): # os.mkdir(code_dir) # copy(os.path.join(code.path, code.file), os.path.join(directory, \"code\", code.file)) # else:", "create a repo then # # Create a repo # # Add any", "repository_identifier=identifier) else: raise NotImplementedError(metadata.get(CodeMixin.CODE_TYPE) + \" couldn't load from type.\") return code def", "os.mkdir(os.path.abspath(os.path.join(directory, '..', 'function'))) self.write_file(os.path.abspath(os.path.join(directory, '..', 'function')), CODE_FILE, code.fn, mode=\"wb\") self.write_file(directory, META_FILE, code.metadata) #", "None: raise ValueError( \"Identifier is not present in the meta information of code", "only name for folder okay? (maybe a uuid, a digest of a config", "import os import re import shutil from pypadre.core.model.code.code_mixin import CodeMixin, PythonPackage, PythonFile, GenericCall,", "import shutil from pypadre.core.model.code.code_mixin import CodeMixin, PythonPackage, PythonFile, GenericCall, \\ GitIdentifier, RepositoryIdentifier, PipIdentifier,", "import CODE_FILE from pypadre.pod.repository.serializer.serialiser import JSonSerializer def copy(src, dest): try: shutil.copytree(src, dest) except", "# If the error was caused because the source wasn't a directory if", "code.file is not None: # if not os.path.exists(code_dir): # os.mkdir(code_dir) # copy(os.path.join(code.path, code.file),", "pypadre.pod.repository.local.file.generic.i_git_repository import IGitRepository from pypadre.pod.repository.local.file.project_code_repository import CODE_FILE from pypadre.pod.repository.serializer.serialiser import JSonSerializer def copy(src,", "os.path.join(directory, \"code\", code.file)) # else: # copy(code.path, code_dir) # def get_code_hash(self, obj=None, path=None,", "IGitRepository from pypadre.pod.repository.local.file.project_code_repository import CODE_FILE from pypadre.pod.repository.serializer.serialiser import JSonSerializer def copy(src, dest): try:", "object in directory \" + directory) if metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.function): if root_dir is", "\\ GitIdentifier, RepositoryIdentifier, PipIdentifier, Function from pypadre.pod.backend.i_padre_backend import IPadreBackend from pypadre.pod.repository.i_repository import ICodeRepository", "metadata.json name = search.pop(\"name\") search[self.FOLDER_SEARCH] = re.escape(name) return super().list(search, offset, size) def _put(self,", "e.errno == errno.ENOTDIR: shutil.copy(src, dest) else: print('Directory not copied. Error: %s' % e)", "return '{CODE_ID}' def __init__(self, backend: IPadreBackend): super().__init__(root_dir=os.path.join(backend.root_dir, NAME), backend=backend) def _get_by_dir(self, directory): path", "'function'))))[0] fn = self.get_file(fn_dir, CODE_FILE) code = Function(fn=fn, metadata=metadata, repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) ==", "identifier = None if identifier_type == RepositoryIdentifier._RepositoryType.pip: version = identifier_data.get(PipIdentifier.VERSION) pip_package = identifier_data.get(PipIdentifier.PIP_PACKAGE)", "= identifier_data.get(GitIdentifier.PATH) git_hash = identifier_data.get(GitIdentifier.GIT_HASH) identifier = GitIdentifier(path=path, git_hash=git_hash) if identifier is None:", "directory: str, **kwargs): code = obj if isinstance(code, Function): # TODO fn repository", "def __init__(self, backend: IPadreBackend): super().__init__(root_dir=os.path.join(backend.root_dir, NAME), backend=backend) def _get_by_dir(self, directory): path = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir),", "== str(CodeMixin._CodeType.python_file): code = PythonFile(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonFile.PACKAGE), variable=metadata.get(PythonFile.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.file):", "NAME = \"code\" META_FILE = File(\"metadata.json\", JSonSerializer) # CODE_FILE = File(\"code.bin\", DillSerializer) class", "fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] fn = self.get_file(fn_dir, CODE_FILE) code = Function(fn=fn,", "files and commit those files # # Get the code_hash of the repo", "# create_repo(dir_path) # add_and_commit(dir_path) # code_hash = git_hash(path=dir_path) # # if obj is", "present in the path, but the user wants to create a repo then", "code object in directory \" + directory) if metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.function): if root_dir", "dest) except OSError as e: # If the error was caused because the", "from pypadre.pod.repository.local.file.generic.i_git_repository import IGitRepository from pypadre.pod.repository.local.file.project_code_repository import CODE_FILE from pypadre.pod.repository.serializer.serialiser import JSonSerializer def", "code.file), os.path.join(directory, \"code\", code.file)) # else: # copy(code.path, code_dir) # def get_code_hash(self, obj=None,", "in the path, but the user wants to create a repo then #", "# # Create a repo # # Add any untracked files and commit", "identifier = GitIdentifier(path=path, git_hash=git_hash) if identifier is None: raise ValueError( \"Identifier is not", "ICodeRepository from pypadre.pod.repository.local.file.generic.i_file_repository import File from pypadre.pod.repository.local.file.generic.i_git_repository import IGitRepository from pypadre.pod.repository.local.file.project_code_repository import CODE_FILE", "init_repo is True: # # if there is no repository present in the", "File(\"code.bin\", DillSerializer) class CodeFileRepository(IGitRepository, ICodeRepository): @staticmethod def placeholder(): return '{CODE_ID}' def __init__(self, backend:", "code def to_folder_name(self, code): # TODO only name for folder okay? (maybe a", "Error: %s' % e) NAME = \"code\" META_FILE = File(\"metadata.json\", JSonSerializer) # CODE_FILE", "PythonPackage, PythonFile, GenericCall, \\ GitIdentifier, RepositoryIdentifier, PipIdentifier, Function from pypadre.pod.backend.i_padre_backend import IPadreBackend from", "is not None and \"name\" in search: # Shortcut because we know name", "type.\") return code def to_folder_name(self, code): # TODO only name for folder okay?", "dest) else: print('Directory not copied. Error: %s' % e) NAME = \"code\" META_FILE", "is not None: # if not os.path.exists(code_dir): # os.mkdir(code_dir) # copy(os.path.join(code.path, code.file), os.path.join(directory,", "dest): try: shutil.copytree(src, dest) except OSError as e: # If the error was", "not os.path.exists(os.path.abspath(os.path.join(directory, '..', 'function'))): os.mkdir(os.path.abspath(os.path.join(directory, '..', 'function'))) self.write_file(os.path.abspath(os.path.join(directory, '..', 'function')), CODE_FILE, code.fn, mode=\"wb\")", "code = GenericCall(metadata=metadata, cmd=metadata.get(GenericCall.CMD), repository_identifier=identifier) else: raise NotImplementedError(metadata.get(CodeMixin.CODE_TYPE) + \" couldn't load from", "# if code.file is not None: # if not os.path.exists(code_dir): # os.mkdir(code_dir) #", "# TODO give git an id and hold some reference in workspace??? #", "is the folder name. We don't have to search in metadata.json name =", "def get_code_hash(self, obj=None, path=None, init_repo=False, **kwargs): # # code_hash = git_hash(path=path) # if", "__init__(self, backend: IPadreBackend): super().__init__(root_dir=os.path.join(backend.root_dir, NAME), backend=backend) def _get_by_dir(self, directory): path = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), directory))[0]", "# if isinstance(code, CodeFile): # code_dir = os.path.join(directory, \"code\") # if code.file is", "errno import glob import os import re import shutil from pypadre.core.model.code.code_mixin import CodeMixin,", "commit those files # # Get the code_hash of the repo # #", "identifier_type == RepositoryIdentifier._RepositoryType.pip: version = identifier_data.get(PipIdentifier.VERSION) pip_package = identifier_data.get(PipIdentifier.PIP_PACKAGE) identifier = PipIdentifier(version=version, pip_package=pip_package)", "glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), directory))[0] metadata = self.get_file(path, META_FILE) return self._create_object(metadata, directory) def _create_object(self, metadata, directory,", "# if not os.path.exists(code_dir): # os.mkdir(code_dir) # copy(os.path.join(code.path, code.file), os.path.join(directory, \"code\", code.file)) #", "if isinstance(code, Function): # TODO fn repository if not os.path.exists(os.path.abspath(os.path.join(directory, '..', 'function'))): os.mkdir(os.path.abspath(os.path.join(directory,", "a repo # # Add any untracked files and commit those files #", "a digest of a config or similar?) return str(code.id) def list(self, search, offset=0,", "File from pypadre.pod.repository.local.file.generic.i_git_repository import IGitRepository from pypadre.pod.repository.local.file.project_code_repository import CODE_FILE from pypadre.pod.repository.serializer.serialiser import JSonSerializer", "okay? (maybe a uuid, a digest of a config or similar?) return str(code.id)", "str(code.id) def list(self, search, offset=0, size=100): if search is not None and \"name\"", "metadata, directory, root_dir=None): identifier_type = metadata.get(CodeMixin.REPOSITORY_TYPE) identifier_data = metadata.get(CodeMixin.IDENTIFIER) identifier = None if", "e) NAME = \"code\" META_FILE = File(\"metadata.json\", JSonSerializer) # CODE_FILE = File(\"code.bin\", DillSerializer)", "code = PythonFile(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonFile.PACKAGE), variable=metadata.get(PythonFile.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.file): code =", "= glob.glob(os.path.join(self._replace_placeholders_with_wildcard(root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] else: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] fn", "metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.python_file): code = PythonFile(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonFile.PACKAGE), variable=metadata.get(PythonFile.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) ==", "self.write_file(directory, META_FILE, code.metadata) # if store_code: # if isinstance(code, CodeFile): # code_dir =", "repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.package): code = PythonPackage(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonPackage.PACKAGE), variable=metadata.get(PythonPackage.VARIABLE), repository_identifier=identifier) elif", "then # # Create a repo # # Add any untracked files and", "TODO give git an id and hold some reference in workspace??? # dir_path", "directory) def _create_object(self, metadata, directory, root_dir=None): identifier_type = metadata.get(CodeMixin.REPOSITORY_TYPE) identifier_data = metadata.get(CodeMixin.IDENTIFIER) identifier", "= glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), directory))[0] metadata = self.get_file(path, META_FILE) return self._create_object(metadata, directory) def _create_object(self, metadata,", "DillSerializer) class CodeFileRepository(IGitRepository, ICodeRepository): @staticmethod def placeholder(): return '{CODE_ID}' def __init__(self, backend: IPadreBackend):", "not present in the meta information of code object in directory \" +", "= obj if isinstance(code, Function): # TODO fn repository if not os.path.exists(os.path.abspath(os.path.join(directory, '..',", "= re.escape(name) return super().list(search, offset, size) def _put(self, obj, *args, directory: str, **kwargs):", "= PythonPackage(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonPackage.PACKAGE), variable=metadata.get(PythonPackage.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.python_file): code = PythonFile(metadata=metadata,", "hold some reference in workspace??? # dir_path = os.path.dirname(path) # create_repo(dir_path) # add_and_commit(dir_path)", "= self.get_file(path, META_FILE) return self._create_object(metadata, directory) def _create_object(self, metadata, directory, root_dir=None): identifier_type =", "user wants to create a repo then # # Create a repo #", "add_and_commit(dir_path) # code_hash = git_hash(path=dir_path) # # if obj is not None: #", "meta information of code object in directory \" + directory) if metadata.get(CodeMixin.CODE_TYPE) ==", "_create_object(self, metadata, directory, root_dir=None): identifier_type = metadata.get(CodeMixin.REPOSITORY_TYPE) identifier_data = metadata.get(CodeMixin.IDENTIFIER) identifier = None", "== str(CodeMixin._CodeType.package): code = PythonPackage(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonPackage.PACKAGE), variable=metadata.get(PythonPackage.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.python_file):", "untracked files and commit those files # # Get the code_hash of the", "GitIdentifier(path=path, git_hash=git_hash) if identifier is None: raise ValueError( \"Identifier is not present in", "= identifier_data.get(PipIdentifier.VERSION) pip_package = identifier_data.get(PipIdentifier.PIP_PACKAGE) identifier = PipIdentifier(version=version, pip_package=pip_package) if identifier_type == RepositoryIdentifier._RepositoryType.git:", "'function'))) self.write_file(os.path.abspath(os.path.join(directory, '..', 'function')), CODE_FILE, code.fn, mode=\"wb\") self.write_file(directory, META_FILE, code.metadata) # if store_code:", "a config or similar?) return str(code.id) def list(self, search, offset=0, size=100): if search", "IPadreBackend): super().__init__(root_dir=os.path.join(backend.root_dir, NAME), backend=backend) def _get_by_dir(self, directory): path = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), directory))[0] metadata =", "Shortcut because we know name is the folder name. We don't have to", "= glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] fn = self.get_file(fn_dir, CODE_FILE) code = Function(fn=fn, metadata=metadata,", "size=100): if search is not None and \"name\" in search: # Shortcut because", "metadata = self.get_file(path, META_FILE) return self._create_object(metadata, directory) def _create_object(self, metadata, directory, root_dir=None): identifier_type", "those files # # Get the code_hash of the repo # # TODO", "mode=\"wb\") self.write_file(directory, META_FILE, code.metadata) # if store_code: # if isinstance(code, CodeFile): # code_dir", "if identifier_type == RepositoryIdentifier._RepositoryType.git: path = identifier_data.get(GitIdentifier.PATH) git_hash = identifier_data.get(GitIdentifier.GIT_HASH) identifier = GitIdentifier(path=path,", "in workspace??? # dir_path = os.path.dirname(path) # create_repo(dir_path) # add_and_commit(dir_path) # code_hash =", "# os.mkdir(code_dir) # copy(os.path.join(code.path, code.file), os.path.join(directory, \"code\", code.file)) # else: # copy(code.path, code_dir)", "if code_hash is None and init_repo is True: # # if there is", "not None and \"name\" in search: # Shortcut because we know name is", "# CODE_FILE = File(\"code.bin\", DillSerializer) class CodeFileRepository(IGitRepository, ICodeRepository): @staticmethod def placeholder(): return '{CODE_ID}'", "there is no repository present in the path, but the user wants to", "= identifier_data.get(PipIdentifier.PIP_PACKAGE) identifier = PipIdentifier(version=version, pip_package=pip_package) if identifier_type == RepositoryIdentifier._RepositoryType.git: path = identifier_data.get(GitIdentifier.PATH)", "in the meta information of code object in directory \" + directory) if", "in search: # Shortcut because we know name is the folder name. We", "couldn't load from type.\") return code def to_folder_name(self, code): # TODO only name", "root_dir is not None: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] else: fn_dir =", "CodeFileRepository(IGitRepository, ICodeRepository): @staticmethod def placeholder(): return '{CODE_ID}' def __init__(self, backend: IPadreBackend): super().__init__(root_dir=os.path.join(backend.root_dir, NAME),", "# code_dir = os.path.join(directory, \"code\") # if code.file is not None: # if", "a repo then # # Create a repo # # Add any untracked", "None if identifier_type == RepositoryIdentifier._RepositoryType.pip: version = identifier_data.get(PipIdentifier.VERSION) pip_package = identifier_data.get(PipIdentifier.PIP_PACKAGE) identifier =", "\"code\", code.file)) # else: # copy(code.path, code_dir) # def get_code_hash(self, obj=None, path=None, init_repo=False,", "code = obj if isinstance(code, Function): # TODO fn repository if not os.path.exists(os.path.abspath(os.path.join(directory,", "the error was caused because the source wasn't a directory if e.errno ==", "git_hash=git_hash) if identifier is None: raise ValueError( \"Identifier is not present in the", "path = identifier_data.get(GitIdentifier.PATH) git_hash = identifier_data.get(GitIdentifier.GIT_HASH) identifier = GitIdentifier(path=path, git_hash=git_hash) if identifier is", "fn repository if not os.path.exists(os.path.abspath(os.path.join(directory, '..', 'function'))): os.mkdir(os.path.abspath(os.path.join(directory, '..', 'function'))) self.write_file(os.path.abspath(os.path.join(directory, '..', 'function')),", "directory if e.errno == errno.ENOTDIR: shutil.copy(src, dest) else: print('Directory not copied. Error: %s'", "if root_dir is not None: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] else: fn_dir", "files # # Get the code_hash of the repo # # TODO give", "except OSError as e: # If the error was caused because the source", "'..', 'function'))) self.write_file(os.path.abspath(os.path.join(directory, '..', 'function')), CODE_FILE, code.fn, mode=\"wb\") self.write_file(directory, META_FILE, code.metadata) # if", "directory))[0] metadata = self.get_file(path, META_FILE) return self._create_object(metadata, directory) def _create_object(self, metadata, directory, root_dir=None):", "# copy(os.path.join(code.path, code.file), os.path.join(directory, \"code\", code.file)) # else: # copy(code.path, code_dir) # def", "import IGitRepository from pypadre.pod.repository.local.file.project_code_repository import CODE_FILE from pypadre.pod.repository.serializer.serialiser import JSonSerializer def copy(src, dest):", "'{CODE_ID}' def __init__(self, backend: IPadreBackend): super().__init__(root_dir=os.path.join(backend.root_dir, NAME), backend=backend) def _get_by_dir(self, directory): path =", "shutil.copytree(src, dest) except OSError as e: # If the error was caused because", "# # Get the code_hash of the repo # # TODO give git", "no repository present in the path, but the user wants to create a", "wasn't a directory if e.errno == errno.ENOTDIR: shutil.copy(src, dest) else: print('Directory not copied.", "elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.package): code = PythonPackage(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonPackage.PACKAGE), variable=metadata.get(PythonPackage.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE)", "similar?) return str(code.id) def list(self, search, offset=0, size=100): if search is not None", "class CodeFileRepository(IGitRepository, ICodeRepository): @staticmethod def placeholder(): return '{CODE_ID}' def __init__(self, backend: IPadreBackend): super().__init__(root_dir=os.path.join(backend.root_dir,", "search in metadata.json name = search.pop(\"name\") search[self.FOLDER_SEARCH] = re.escape(name) return super().list(search, offset, size)", "= metadata.get(CodeMixin.REPOSITORY_TYPE) identifier_data = metadata.get(CodeMixin.IDENTIFIER) identifier = None if identifier_type == RepositoryIdentifier._RepositoryType.pip: version", "import errno import glob import os import re import shutil from pypadre.core.model.code.code_mixin import", "from pypadre.pod.repository.local.file.project_code_repository import CODE_FILE from pypadre.pod.repository.serializer.serialiser import JSonSerializer def copy(src, dest): try: shutil.copytree(src,", "\"code\" META_FILE = File(\"metadata.json\", JSonSerializer) # CODE_FILE = File(\"code.bin\", DillSerializer) class CodeFileRepository(IGitRepository, ICodeRepository):", "= File(\"code.bin\", DillSerializer) class CodeFileRepository(IGitRepository, ICodeRepository): @staticmethod def placeholder(): return '{CODE_ID}' def __init__(self,", "isinstance(code, Function): # TODO fn repository if not os.path.exists(os.path.abspath(os.path.join(directory, '..', 'function'))): os.mkdir(os.path.abspath(os.path.join(directory, '..',", "_put(self, obj, *args, directory: str, **kwargs): code = obj if isinstance(code, Function): #", "= self.get_file(fn_dir, CODE_FILE) code = Function(fn=fn, metadata=metadata, repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.package): code", "elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.python_file): code = PythonFile(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonFile.PACKAGE), variable=metadata.get(PythonFile.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE)", "metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.package): code = PythonPackage(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonPackage.PACKAGE), variable=metadata.get(PythonPackage.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) ==", "%s' % e) NAME = \"code\" META_FILE = File(\"metadata.json\", JSonSerializer) # CODE_FILE =", "from type.\") return code def to_folder_name(self, code): # TODO only name for folder", "try: shutil.copytree(src, dest) except OSError as e: # If the error was caused", "variable=metadata.get(PythonFile.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.file): code = GenericCall(metadata=metadata, cmd=metadata.get(GenericCall.CMD), repository_identifier=identifier) else: raise", "is None and init_repo is True: # # if there is no repository", "a directory if e.errno == errno.ENOTDIR: shutil.copy(src, dest) else: print('Directory not copied. Error:", "else: print('Directory not copied. Error: %s' % e) NAME = \"code\" META_FILE =", "repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.python_file): code = PythonFile(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonFile.PACKAGE), variable=metadata.get(PythonFile.VARIABLE), repository_identifier=identifier) elif", "an id and hold some reference in workspace??? # dir_path = os.path.dirname(path) #", "'function'))): os.mkdir(os.path.abspath(os.path.join(directory, '..', 'function'))) self.write_file(os.path.abspath(os.path.join(directory, '..', 'function')), CODE_FILE, code.fn, mode=\"wb\") self.write_file(directory, META_FILE, code.metadata)", "not None: # if not os.path.exists(code_dir): # os.mkdir(code_dir) # copy(os.path.join(code.path, code.file), os.path.join(directory, \"code\",", "offset, size) def _put(self, obj, *args, directory: str, **kwargs): code = obj if", "backend: IPadreBackend): super().__init__(root_dir=os.path.join(backend.root_dir, NAME), backend=backend) def _get_by_dir(self, directory): path = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), directory))[0] metadata", "git an id and hold some reference in workspace??? # dir_path = os.path.dirname(path)", "obj, *args, directory: str, **kwargs): code = obj if isinstance(code, Function): # TODO", "variable=metadata.get(PythonPackage.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.python_file): code = PythonFile(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonFile.PACKAGE), variable=metadata.get(PythonFile.VARIABLE), repository_identifier=identifier)", "path=None, init_repo=False, **kwargs): # # code_hash = git_hash(path=path) # if code_hash is None", "def _create_object(self, metadata, directory, root_dir=None): identifier_type = metadata.get(CodeMixin.REPOSITORY_TYPE) identifier_data = metadata.get(CodeMixin.IDENTIFIER) identifier =", "= metadata.get(CodeMixin.IDENTIFIER) identifier = None if identifier_type == RepositoryIdentifier._RepositoryType.pip: version = identifier_data.get(PipIdentifier.VERSION) pip_package", "isinstance(code, CodeFile): # code_dir = os.path.join(directory, \"code\") # if code.file is not None:", "wants to create a repo then # # Create a repo # #", "META_FILE, code.metadata) # if store_code: # if isinstance(code, CodeFile): # code_dir = os.path.join(directory,", "None: # if not os.path.exists(code_dir): # os.mkdir(code_dir) # copy(os.path.join(code.path, code.file), os.path.join(directory, \"code\", code.file))", "directory): path = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), directory))[0] metadata = self.get_file(path, META_FILE) return self._create_object(metadata, directory) def", "print('Directory not copied. Error: %s' % e) NAME = \"code\" META_FILE = File(\"metadata.json\",", "if identifier_type == RepositoryIdentifier._RepositoryType.pip: version = identifier_data.get(PipIdentifier.VERSION) pip_package = identifier_data.get(PipIdentifier.PIP_PACKAGE) identifier = PipIdentifier(version=version,", "# if store_code: # if isinstance(code, CodeFile): # code_dir = os.path.join(directory, \"code\") #", "re import shutil from pypadre.core.model.code.code_mixin import CodeMixin, PythonPackage, PythonFile, GenericCall, \\ GitIdentifier, RepositoryIdentifier,", "re.escape(name) return super().list(search, offset, size) def _put(self, obj, *args, directory: str, **kwargs): code", "import ICodeRepository from pypadre.pod.repository.local.file.generic.i_file_repository import File from pypadre.pod.repository.local.file.generic.i_git_repository import IGitRepository from pypadre.pod.repository.local.file.project_code_repository import", "CodeFile): # code_dir = os.path.join(directory, \"code\") # if code.file is not None: #", "== errno.ENOTDIR: shutil.copy(src, dest) else: print('Directory not copied. Error: %s' % e) NAME", "import CodeMixin, PythonPackage, PythonFile, GenericCall, \\ GitIdentifier, RepositoryIdentifier, PipIdentifier, Function from pypadre.pod.backend.i_padre_backend import", "def to_folder_name(self, code): # TODO only name for folder okay? (maybe a uuid,", "not None: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] else: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), os.path.abspath(os.path.join(directory,", "**kwargs): code = obj if isinstance(code, Function): # TODO fn repository if not", "from pypadre.pod.repository.local.file.generic.i_file_repository import File from pypadre.pod.repository.local.file.generic.i_git_repository import IGitRepository from pypadre.pod.repository.local.file.project_code_repository import CODE_FILE from", "PipIdentifier(version=version, pip_package=pip_package) if identifier_type == RepositoryIdentifier._RepositoryType.git: path = identifier_data.get(GitIdentifier.PATH) git_hash = identifier_data.get(GitIdentifier.GIT_HASH) identifier", "os.path.exists(os.path.abspath(os.path.join(directory, '..', 'function'))): os.mkdir(os.path.abspath(os.path.join(directory, '..', 'function'))) self.write_file(os.path.abspath(os.path.join(directory, '..', 'function')), CODE_FILE, code.fn, mode=\"wb\") self.write_file(directory,", "from pypadre.pod.repository.serializer.serialiser import JSonSerializer def copy(src, dest): try: shutil.copytree(src, dest) except OSError as", "directory) if metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.function): if root_dir is not None: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(root_dir),", "+ \" couldn't load from type.\") return code def to_folder_name(self, code): # TODO", "= PythonFile(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonFile.PACKAGE), variable=metadata.get(PythonFile.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.file): code = GenericCall(metadata=metadata,", "**kwargs): # # code_hash = git_hash(path=path) # if code_hash is None and init_repo", "def copy(src, dest): try: shutil.copytree(src, dest) except OSError as e: # If the", "# Add any untracked files and commit those files # # Get the", "os import re import shutil from pypadre.core.model.code.code_mixin import CodeMixin, PythonPackage, PythonFile, GenericCall, \\", "present in the meta information of code object in directory \" + directory)", "metadata.get(CodeMixin.IDENTIFIER) identifier = None if identifier_type == RepositoryIdentifier._RepositoryType.pip: version = identifier_data.get(PipIdentifier.VERSION) pip_package =", "# code_hash = git_hash(path=path) # if code_hash is None and init_repo is True:", "code.fn, mode=\"wb\") self.write_file(directory, META_FILE, code.metadata) # if store_code: # if isinstance(code, CodeFile): #", "% e) NAME = \"code\" META_FILE = File(\"metadata.json\", JSonSerializer) # CODE_FILE = File(\"code.bin\",", "load from type.\") return code def to_folder_name(self, code): # TODO only name for", "the source wasn't a directory if e.errno == errno.ENOTDIR: shutil.copy(src, dest) else: print('Directory", "glob import os import re import shutil from pypadre.core.model.code.code_mixin import CodeMixin, PythonPackage, PythonFile,", "pypadre.core.model.code.code_mixin import CodeMixin, PythonPackage, PythonFile, GenericCall, \\ GitIdentifier, RepositoryIdentifier, PipIdentifier, Function from pypadre.pod.backend.i_padre_backend", "# # code_hash = git_hash(path=path) # if code_hash is None and init_repo is", "as e: # If the error was caused because the source wasn't a", "identifier_data.get(GitIdentifier.PATH) git_hash = identifier_data.get(GitIdentifier.GIT_HASH) identifier = GitIdentifier(path=path, git_hash=git_hash) if identifier is None: raise", "else: # copy(code.path, code_dir) # def get_code_hash(self, obj=None, path=None, init_repo=False, **kwargs): # #", "str(CodeMixin._CodeType.package): code = PythonPackage(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonPackage.PACKAGE), variable=metadata.get(PythonPackage.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.python_file): code", "know name is the folder name. We don't have to search in metadata.json", "identifier_data.get(PipIdentifier.PIP_PACKAGE) identifier = PipIdentifier(version=version, pip_package=pip_package) if identifier_type == RepositoryIdentifier._RepositoryType.git: path = identifier_data.get(GitIdentifier.PATH) git_hash", "information of code object in directory \" + directory) if metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.function):", "# Get the code_hash of the repo # # TODO give git an", "self._create_object(metadata, directory) def _create_object(self, metadata, directory, root_dir=None): identifier_type = metadata.get(CodeMixin.REPOSITORY_TYPE) identifier_data = metadata.get(CodeMixin.IDENTIFIER)", "= os.path.dirname(path) # create_repo(dir_path) # add_and_commit(dir_path) # code_hash = git_hash(path=dir_path) # # if", "str(CodeMixin._CodeType.python_file): code = PythonFile(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonFile.PACKAGE), variable=metadata.get(PythonFile.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.file): code", "search: # Shortcut because we know name is the folder name. We don't", "and init_repo is True: # # if there is no repository present in", "CODE_FILE from pypadre.pod.repository.serializer.serialiser import JSonSerializer def copy(src, dest): try: shutil.copytree(src, dest) except OSError", "OSError as e: # If the error was caused because the source wasn't", "fn = self.get_file(fn_dir, CODE_FILE) code = Function(fn=fn, metadata=metadata, repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.package):", "None and init_repo is True: # # if there is no repository present", "is True: # # if there is no repository present in the path,", "@staticmethod def placeholder(): return '{CODE_ID}' def __init__(self, backend: IPadreBackend): super().__init__(root_dir=os.path.join(backend.root_dir, NAME), backend=backend) def", "\"Identifier is not present in the meta information of code object in directory", "list(self, search, offset=0, size=100): if search is not None and \"name\" in search:", "git_hash(path=path) # if code_hash is None and init_repo is True: # # if", "None: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] else: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), os.path.abspath(os.path.join(directory, '..',", "we know name is the folder name. We don't have to search in", "# code_hash = git_hash(path=dir_path) # # if obj is not None: # obj.set_hash(code_hash)", "repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.file): code = GenericCall(metadata=metadata, cmd=metadata.get(GenericCall.CMD), repository_identifier=identifier) else: raise NotImplementedError(metadata.get(CodeMixin.CODE_TYPE)", "don't have to search in metadata.json name = search.pop(\"name\") search[self.FOLDER_SEARCH] = re.escape(name) return", "get_code_hash(self, obj=None, path=None, init_repo=False, **kwargs): # # code_hash = git_hash(path=path) # if code_hash", "repo then # # Create a repo # # Add any untracked files", "a uuid, a digest of a config or similar?) return str(code.id) def list(self,", "NotImplementedError(metadata.get(CodeMixin.CODE_TYPE) + \" couldn't load from type.\") return code def to_folder_name(self, code): #", "the path, but the user wants to create a repo then # #", "'..', 'function'))): os.mkdir(os.path.abspath(os.path.join(directory, '..', 'function'))) self.write_file(os.path.abspath(os.path.join(directory, '..', 'function')), CODE_FILE, code.fn, mode=\"wb\") self.write_file(directory, META_FILE,", "RepositoryIdentifier._RepositoryType.git: path = identifier_data.get(GitIdentifier.PATH) git_hash = identifier_data.get(GitIdentifier.GIT_HASH) identifier = GitIdentifier(path=path, git_hash=git_hash) if identifier", "from pypadre.pod.repository.i_repository import ICodeRepository from pypadre.pod.repository.local.file.generic.i_file_repository import File from pypadre.pod.repository.local.file.generic.i_git_repository import IGitRepository from", "# if code_hash is None and init_repo is True: # # if there", "pypadre.pod.repository.local.file.project_code_repository import CODE_FILE from pypadre.pod.repository.serializer.serialiser import JSonSerializer def copy(src, dest): try: shutil.copytree(src, dest)", "code_dir = os.path.join(directory, \"code\") # if code.file is not None: # if not", "os.path.dirname(path) # create_repo(dir_path) # add_and_commit(dir_path) # code_hash = git_hash(path=dir_path) # # if obj", "# TODO fn repository if not os.path.exists(os.path.abspath(os.path.join(directory, '..', 'function'))): os.mkdir(os.path.abspath(os.path.join(directory, '..', 'function'))) self.write_file(os.path.abspath(os.path.join(directory,", "If the error was caused because the source wasn't a directory if e.errno", "PythonFile, GenericCall, \\ GitIdentifier, RepositoryIdentifier, PipIdentifier, Function from pypadre.pod.backend.i_padre_backend import IPadreBackend from pypadre.pod.repository.i_repository", "# copy(code.path, code_dir) # def get_code_hash(self, obj=None, path=None, init_repo=False, **kwargs): # # code_hash", "name = search.pop(\"name\") search[self.FOLDER_SEARCH] = re.escape(name) return super().list(search, offset, size) def _put(self, obj,", "but the user wants to create a repo then # # Create a", "pypadre.pod.backend.i_padre_backend import IPadreBackend from pypadre.pod.repository.i_repository import ICodeRepository from pypadre.pod.repository.local.file.generic.i_file_repository import File from pypadre.pod.repository.local.file.generic.i_git_repository", "GenericCall(metadata=metadata, cmd=metadata.get(GenericCall.CMD), repository_identifier=identifier) else: raise NotImplementedError(metadata.get(CodeMixin.CODE_TYPE) + \" couldn't load from type.\") return", "JSonSerializer def copy(src, dest): try: shutil.copytree(src, dest) except OSError as e: # If", "obj=None, path=None, init_repo=False, **kwargs): # # code_hash = git_hash(path=path) # if code_hash is", "os.mkdir(code_dir) # copy(os.path.join(code.path, code.file), os.path.join(directory, \"code\", code.file)) # else: # copy(code.path, code_dir) #", "identifier_data.get(GitIdentifier.GIT_HASH) identifier = GitIdentifier(path=path, git_hash=git_hash) if identifier is None: raise ValueError( \"Identifier is", "in metadata.json name = search.pop(\"name\") search[self.FOLDER_SEARCH] = re.escape(name) return super().list(search, offset, size) def", "import IPadreBackend from pypadre.pod.repository.i_repository import ICodeRepository from pypadre.pod.repository.local.file.generic.i_file_repository import File from pypadre.pod.repository.local.file.generic.i_git_repository import", "def list(self, search, offset=0, size=100): if search is not None and \"name\" in", "import JSonSerializer def copy(src, dest): try: shutil.copytree(src, dest) except OSError as e: #", "if there is no repository present in the path, but the user wants", "code.file)) # else: # copy(code.path, code_dir) # def get_code_hash(self, obj=None, path=None, init_repo=False, **kwargs):", "str(CodeMixin._CodeType.file): code = GenericCall(metadata=metadata, cmd=metadata.get(GenericCall.CMD), repository_identifier=identifier) else: raise NotImplementedError(metadata.get(CodeMixin.CODE_TYPE) + \" couldn't load", "search, offset=0, size=100): if search is not None and \"name\" in search: #", "raise NotImplementedError(metadata.get(CodeMixin.CODE_TYPE) + \" couldn't load from type.\") return code def to_folder_name(self, code):", "'function')), CODE_FILE, code.fn, mode=\"wb\") self.write_file(directory, META_FILE, code.metadata) # if store_code: # if isinstance(code,", "Get the code_hash of the repo # # TODO give git an id", "identifier = PipIdentifier(version=version, pip_package=pip_package) if identifier_type == RepositoryIdentifier._RepositoryType.git: path = identifier_data.get(GitIdentifier.PATH) git_hash =", "+ directory) if metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.function): if root_dir is not None: fn_dir =", "directory, root_dir=None): identifier_type = metadata.get(CodeMixin.REPOSITORY_TYPE) identifier_data = metadata.get(CodeMixin.IDENTIFIER) identifier = None if identifier_type", "os.path.exists(code_dir): # os.mkdir(code_dir) # copy(os.path.join(code.path, code.file), os.path.join(directory, \"code\", code.file)) # else: # copy(code.path,", "CODE_FILE) code = Function(fn=fn, metadata=metadata, repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.package): code = PythonPackage(metadata=metadata,", "= Function(fn=fn, metadata=metadata, repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.package): code = PythonPackage(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonPackage.PACKAGE),", "super().list(search, offset, size) def _put(self, obj, *args, directory: str, **kwargs): code = obj", "if code.file is not None: # if not os.path.exists(code_dir): # os.mkdir(code_dir) # copy(os.path.join(code.path,", "# # TODO give git an id and hold some reference in workspace???", "repo # # Add any untracked files and commit those files # #", "create_repo(dir_path) # add_and_commit(dir_path) # code_hash = git_hash(path=dir_path) # # if obj is not", "if identifier is None: raise ValueError( \"Identifier is not present in the meta", "RepositoryIdentifier._RepositoryType.pip: version = identifier_data.get(PipIdentifier.VERSION) pip_package = identifier_data.get(PipIdentifier.PIP_PACKAGE) identifier = PipIdentifier(version=version, pip_package=pip_package) if identifier_type", "if store_code: # if isinstance(code, CodeFile): # code_dir = os.path.join(directory, \"code\") # if", "else: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] fn = self.get_file(fn_dir, CODE_FILE) code =", "PipIdentifier, Function from pypadre.pod.backend.i_padre_backend import IPadreBackend from pypadre.pod.repository.i_repository import ICodeRepository from pypadre.pod.repository.local.file.generic.i_file_repository import", "for folder okay? (maybe a uuid, a digest of a config or similar?)", "give git an id and hold some reference in workspace??? # dir_path =", "self.get_file(fn_dir, CODE_FILE) code = Function(fn=fn, metadata=metadata, repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.package): code =", "if e.errno == errno.ENOTDIR: shutil.copy(src, dest) else: print('Directory not copied. Error: %s' %", "cmd=metadata.get(GenericCall.CMD), repository_identifier=identifier) else: raise NotImplementedError(metadata.get(CodeMixin.CODE_TYPE) + \" couldn't load from type.\") return code", "some reference in workspace??? # dir_path = os.path.dirname(path) # create_repo(dir_path) # add_and_commit(dir_path) #", "init_repo=False, **kwargs): # # code_hash = git_hash(path=path) # if code_hash is None and", "copy(os.path.join(code.path, code.file), os.path.join(directory, \"code\", code.file)) # else: # copy(code.path, code_dir) # def get_code_hash(self,", "repository present in the path, but the user wants to create a repo", "return super().list(search, offset, size) def _put(self, obj, *args, directory: str, **kwargs): code =", "TODO fn repository if not os.path.exists(os.path.abspath(os.path.join(directory, '..', 'function'))): os.mkdir(os.path.abspath(os.path.join(directory, '..', 'function'))) self.write_file(os.path.abspath(os.path.join(directory, '..',", "# Create a repo # # Add any untracked files and commit those", "import File from pypadre.pod.repository.local.file.generic.i_git_repository import IGitRepository from pypadre.pod.repository.local.file.project_code_repository import CODE_FILE from pypadre.pod.repository.serializer.serialiser import", "package=metadata.get(PythonPackage.PACKAGE), variable=metadata.get(PythonPackage.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.python_file): code = PythonFile(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonFile.PACKAGE), variable=metadata.get(PythonFile.VARIABLE),", "identifier is None: raise ValueError( \"Identifier is not present in the meta information", "TODO only name for folder okay? (maybe a uuid, a digest of a", "metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.file): code = GenericCall(metadata=metadata, cmd=metadata.get(GenericCall.CMD), repository_identifier=identifier) else: raise NotImplementedError(metadata.get(CodeMixin.CODE_TYPE) + \"", "META_FILE) return self._create_object(metadata, directory) def _create_object(self, metadata, directory, root_dir=None): identifier_type = metadata.get(CodeMixin.REPOSITORY_TYPE) identifier_data", "'..', 'function'))))[0] else: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] fn = self.get_file(fn_dir, CODE_FILE)", "NAME), backend=backend) def _get_by_dir(self, directory): path = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), directory))[0] metadata = self.get_file(path, META_FILE)", "return self._create_object(metadata, directory) def _create_object(self, metadata, directory, root_dir=None): identifier_type = metadata.get(CodeMixin.REPOSITORY_TYPE) identifier_data =", "\"code\") # if code.file is not None: # if not os.path.exists(code_dir): # os.mkdir(code_dir)", "search.pop(\"name\") search[self.FOLDER_SEARCH] = re.escape(name) return super().list(search, offset, size) def _put(self, obj, *args, directory:", "of code object in directory \" + directory) if metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.function): if", "name. We don't have to search in metadata.json name = search.pop(\"name\") search[self.FOLDER_SEARCH] =", "to create a repo then # # Create a repo # # Add", "copy(code.path, code_dir) # def get_code_hash(self, obj=None, path=None, init_repo=False, **kwargs): # # code_hash =", "code.metadata) # if store_code: # if isinstance(code, CodeFile): # code_dir = os.path.join(directory, \"code\")", "not copied. Error: %s' % e) NAME = \"code\" META_FILE = File(\"metadata.json\", JSonSerializer)", "def placeholder(): return '{CODE_ID}' def __init__(self, backend: IPadreBackend): super().__init__(root_dir=os.path.join(backend.root_dir, NAME), backend=backend) def _get_by_dir(self,", "'..', 'function'))))[0] fn = self.get_file(fn_dir, CODE_FILE) code = Function(fn=fn, metadata=metadata, repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE)", "size) def _put(self, obj, *args, directory: str, **kwargs): code = obj if isinstance(code,", "raise ValueError( \"Identifier is not present in the meta information of code object", "CODE_FILE, code.fn, mode=\"wb\") self.write_file(directory, META_FILE, code.metadata) # if store_code: # if isinstance(code, CodeFile):", "str, **kwargs): code = obj if isinstance(code, Function): # TODO fn repository if", "Function(fn=fn, metadata=metadata, repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.package): code = PythonPackage(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonPackage.PACKAGE), variable=metadata.get(PythonPackage.VARIABLE),", "and hold some reference in workspace??? # dir_path = os.path.dirname(path) # create_repo(dir_path) #", "search[self.FOLDER_SEARCH] = re.escape(name) return super().list(search, offset, size) def _put(self, obj, *args, directory: str,", "ValueError( \"Identifier is not present in the meta information of code object in", "RepositoryIdentifier, PipIdentifier, Function from pypadre.pod.backend.i_padre_backend import IPadreBackend from pypadre.pod.repository.i_repository import ICodeRepository from pypadre.pod.repository.local.file.generic.i_file_repository", "META_FILE = File(\"metadata.json\", JSonSerializer) # CODE_FILE = File(\"code.bin\", DillSerializer) class CodeFileRepository(IGitRepository, ICodeRepository): @staticmethod", "package=metadata.get(PythonFile.PACKAGE), variable=metadata.get(PythonFile.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.file): code = GenericCall(metadata=metadata, cmd=metadata.get(GenericCall.CMD), repository_identifier=identifier) else:", "pypadre.pod.repository.local.file.generic.i_file_repository import File from pypadre.pod.repository.local.file.generic.i_git_repository import IGitRepository from pypadre.pod.repository.local.file.project_code_repository import CODE_FILE from pypadre.pod.repository.serializer.serialiser", "= GitIdentifier(path=path, git_hash=git_hash) if identifier is None: raise ValueError( \"Identifier is not present", "and \"name\" in search: # Shortcut because we know name is the folder", "root_dir=None): identifier_type = metadata.get(CodeMixin.REPOSITORY_TYPE) identifier_data = metadata.get(CodeMixin.IDENTIFIER) identifier = None if identifier_type ==", "error was caused because the source wasn't a directory if e.errno == errno.ENOTDIR:", "copied. Error: %s' % e) NAME = \"code\" META_FILE = File(\"metadata.json\", JSonSerializer) #", "the code_hash of the repo # # TODO give git an id and", "os.path.abspath(os.path.join(directory, '..', 'function'))))[0] else: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] fn = self.get_file(fn_dir,", "path, but the user wants to create a repo then # # Create", "os.path.abspath(os.path.join(directory, '..', 'function'))))[0] fn = self.get_file(fn_dir, CODE_FILE) code = Function(fn=fn, metadata=metadata, repository_identifier=identifier) elif", "from pypadre.pod.backend.i_padre_backend import IPadreBackend from pypadre.pod.repository.i_repository import ICodeRepository from pypadre.pod.repository.local.file.generic.i_file_repository import File from", "code_dir) # def get_code_hash(self, obj=None, path=None, init_repo=False, **kwargs): # # code_hash = git_hash(path=path)", "# TODO only name for folder okay? (maybe a uuid, a digest of", "in directory \" + directory) if metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.function): if root_dir is not", "to_folder_name(self, code): # TODO only name for folder okay? (maybe a uuid, a", "path=metadata.get(PythonFile.PATH), package=metadata.get(PythonPackage.PACKAGE), variable=metadata.get(PythonPackage.VARIABLE), repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.python_file): code = PythonFile(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonFile.PACKAGE),", "name is the folder name. We don't have to search in metadata.json name", "not os.path.exists(code_dir): # os.mkdir(code_dir) # copy(os.path.join(code.path, code.file), os.path.join(directory, \"code\", code.file)) # else: #", "repo # # TODO give git an id and hold some reference in", "uuid, a digest of a config or similar?) return str(code.id) def list(self, search,", "def _get_by_dir(self, directory): path = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), directory))[0] metadata = self.get_file(path, META_FILE) return self._create_object(metadata,", "offset=0, size=100): if search is not None and \"name\" in search: # Shortcut", "\" + directory) if metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.function): if root_dir is not None: fn_dir", "errno.ENOTDIR: shutil.copy(src, dest) else: print('Directory not copied. Error: %s' % e) NAME =", "glob.glob(os.path.join(self._replace_placeholders_with_wildcard(root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] else: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] fn =", "_get_by_dir(self, directory): path = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir), directory))[0] metadata = self.get_file(path, META_FILE) return self._create_object(metadata, directory)", "# if there is no repository present in the path, but the user", "have to search in metadata.json name = search.pop(\"name\") search[self.FOLDER_SEARCH] = re.escape(name) return super().list(search,", "from pypadre.core.model.code.code_mixin import CodeMixin, PythonPackage, PythonFile, GenericCall, \\ GitIdentifier, RepositoryIdentifier, PipIdentifier, Function from", "if not os.path.exists(os.path.abspath(os.path.join(directory, '..', 'function'))): os.mkdir(os.path.abspath(os.path.join(directory, '..', 'function'))) self.write_file(os.path.abspath(os.path.join(directory, '..', 'function')), CODE_FILE, code.fn,", "copy(src, dest): try: shutil.copytree(src, dest) except OSError as e: # If the error", "GenericCall, \\ GitIdentifier, RepositoryIdentifier, PipIdentifier, Function from pypadre.pod.backend.i_padre_backend import IPadreBackend from pypadre.pod.repository.i_repository import", "if metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.function): if root_dir is not None: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(root_dir), os.path.abspath(os.path.join(directory,", "if search is not None and \"name\" in search: # Shortcut because we", "os.path.join(directory, \"code\") # if code.file is not None: # if not os.path.exists(code_dir): #", "obj if isinstance(code, Function): # TODO fn repository if not os.path.exists(os.path.abspath(os.path.join(directory, '..', 'function'))):", "metadata=metadata, repository_identifier=identifier) elif metadata.get(CodeMixin.CODE_TYPE) == str(CodeMixin._CodeType.package): code = PythonPackage(metadata=metadata, path=metadata.get(PythonFile.PATH), package=metadata.get(PythonPackage.PACKAGE), variable=metadata.get(PythonPackage.VARIABLE), repository_identifier=identifier)", "is not None: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] else: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(self.root_dir),", "def _put(self, obj, *args, directory: str, **kwargs): code = obj if isinstance(code, Function):", "folder okay? (maybe a uuid, a digest of a config or similar?) return", "str(CodeMixin._CodeType.function): if root_dir is not None: fn_dir = glob.glob(os.path.join(self._replace_placeholders_with_wildcard(root_dir), os.path.abspath(os.path.join(directory, '..', 'function'))))[0] else:", "None and \"name\" in search: # Shortcut because we know name is the", "is no repository present in the path, but the user wants to create", "identifier_data = metadata.get(CodeMixin.IDENTIFIER) identifier = None if identifier_type == RepositoryIdentifier._RepositoryType.pip: version = identifier_data.get(PipIdentifier.VERSION)" ]
[ "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "and # limitations under the License. \"\"\"Module containing classes related to Azure disks.", "from VMs. At this time, Azure only supports one disk type, so the", "fail while the associated VM ' 'is deleted, but will be retried.', self.name)", "= 'azure' FLAGS = flags.FLAGS DRIVE_START_LETTER = 'c' PREMIUM_STORAGE = 'premium-storage' STANDARD_DISK =", "== PREMIUM_STORAGE: assert FLAGS.azure_storage_type == azure_flags.PLRS else: assert FLAGS.azure_storage_type != azure_flags.PLRS with self._lock:", "self.disk_type == disk.LOCAL: media = disk.SSD if LocalDiskIsSSD(machine_type) else disk.HDD self.metadata = {", "disk.REMOTE_SSD } AZURE_REPLICATION_MAP = { azure_flags.LRS: disk.ZONE, azure_flags.ZRS: disk.REGION, # Deliberately omitting PLRS,", "\"\"\" pass # TODO(user): Implement Attach() # (not critical because disks are attached", "is None: return False show_cmd = [AZURE_PATH, 'vm', 'disk', 'show', '--json', self.name] stdout,", "whether the local disk is an SSD drive.\"\"\" return any((machine_type.startswith(prefix) for prefix in", "\"\"\"Deletes the disk.\"\"\" delete_cmd = [AZURE_PATH, 'vm', 'disk', 'delete', '--blob-delete', self.name] logging.info('Deleting disk", "pass # TODO(user): Implement Attach() # (not critical because disks are attached to", "this file except in compliance with the License. # You may obtain a", "azure_flags.LRS: disk.ZONE, azure_flags.ZRS: disk.REGION, # Deliberately omitting PLRS, because that is handled by", "Disk.\"\"\" _lock = threading.Lock() num_disks = {} def __init__(self, disk_spec, vm_name, machine_type): super(AzureDisk,", "the disk.\"\"\" delete_cmd = [AZURE_PATH, 'vm', 'disk', 'delete', '--blob-delete', self.name] logging.info('Deleting disk %s.", "'disk', 'delete', '--blob-delete', self.name] logging.info('Deleting disk %s. This may fail while the associated", "(self.lun == int(data_disk['logicalUnitNumber']))) self.name = data_disk['name'] def Attach(self, vm): \"\"\"Attaches the disk to", "self.name is None: return False show_cmd = [AZURE_PATH, 'vm', 'disk', 'show', '--json', self.name]", "may fail while the associated VM ' 'is deleted, but will be retried.',", "ANY KIND, either express or implied. # See the License for the specific", "'attach-new', '--host-caching=%s' % self.host_caching, self.vm_name, str(self.disk_size)] vm_util.IssueRetryableCommand(create_cmd) if self.vm_name not in AzureDisk.num_disks: AzureDisk.num_disks[self.vm_name]", "retried.', self.name) vm_util.IssueCommand(delete_cmd) def _Exists(self): \"\"\"Returns true if the disk exists.\"\"\" if self.name", "self.name] stdout, _, _ = vm_util.IssueCommand(show_cmd, suppress_warning=True) try: json.loads(stdout) except ValueError: return False", "FLAGS.azure_storage_type != azure_flags.PLRS with self._lock: create_cmd = [AZURE_PATH, 'vm', 'disk', 'attach-new', '--host-caching=%s' %", "representing an Azure Disk.\"\"\" _lock = threading.Lock() num_disks = {} def __init__(self, disk_spec,", "= 'standard-disk' DISK_TYPE = {disk.STANDARD: STANDARD_DISK, disk.REMOTE_SSD: PREMIUM_STORAGE} AZURE = 'Azure' disk.RegisterDiskTypeMap(AZURE, DISK_TYPE)", "detached from VMs. At this time, Azure only supports one disk type, so", "disk.REPLICATION: AZURE_REPLICATION_MAP[FLAGS.azure_storage_type], disk.LEGACY_DISK_TYPE: disk.STANDARD } elif self.disk_type == disk.LOCAL: media = disk.SSD if", "== 0 and 'logicalUnitNumber' not in data_disk) or (self.lun == int(data_disk['logicalUnitNumber']))) self.name =", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "vm_name, machine_type): super(AzureDisk, self).__init__(disk_spec) self.host_caching = FLAGS.azure_host_caching self.name = None self.vm_name = vm_name", "not in data_disk) or (self.lun == int(data_disk['logicalUnitNumber']))) self.name = data_disk['name'] def Attach(self, vm):", "if the disk exists.\"\"\" if self.name is None and self.created: return True elif", "num_disks = {} def __init__(self, disk_spec, vm_name, machine_type): super(AzureDisk, self).__init__(disk_spec) self.host_caching = FLAGS.azure_host_caching", "in data_disk) or (self.lun == int(data_disk['logicalUnitNumber']))) self.name = data_disk['name'] def Attach(self, vm): \"\"\"Attaches", "reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "only supports one disk type, so the disk spec's disk type is ignored.", "vm: The AzureVirtualMachine instance to which the disk will be attached. \"\"\" pass", "the disk from a VM.\"\"\" pass # TODO(user): Implement Detach() def GetDevicePath(self): \"\"\"Returns", "to VMs when created) def Detach(self): \"\"\"Detaches the disk from a VM.\"\"\" pass", "Azure Disk.\"\"\" _lock = threading.Lock() num_disks = {} def __init__(self, disk_spec, vm_name, machine_type):", "# TODO(user): Implement Attach() # (not critical because disks are attached to VMs", "# Deliberately omitting PLRS, because that is handled by # PREMIUM_STORAGE_METADATA, and (RA)GRS,", "OF ANY KIND, either express or implied. # See the License for the", "def Detach(self): \"\"\"Detaches the disk from a VM.\"\"\" pass # TODO(user): Implement Detach()", "True @vm_util.Retry() def _PostCreate(self): \"\"\"Get the disk's name.\"\"\" show_cmd = [AZURE_PATH, 'vm', 'show',", "'--json', self.name] stdout, _, _ = vm_util.IssueCommand(show_cmd, suppress_warning=True) try: json.loads(stdout) except ValueError: return", "= PREMIUM_STORAGE_METADATA elif self.disk_type == STANDARD_DISK: self.metadata = { disk.MEDIA: disk.HDD, disk.REPLICATION: AZURE_REPLICATION_MAP[FLAGS.azure_storage_type],", "type is ignored. See http://msdn.microsoft.com/en-us/library/azure/dn790303.aspx for more information about azure disks. \"\"\" import", "classes related to Azure disks. Disks can be created, deleted, attached to VMs,", "ignored. See http://msdn.microsoft.com/en-us/library/azure/dn790303.aspx for more information about azure disks. \"\"\" import json import", "self._lock: create_cmd = [AZURE_PATH, 'vm', 'disk', 'attach-new', '--host-caching=%s' % self.host_caching, self.vm_name, str(self.disk_size)] vm_util.IssueRetryableCommand(create_cmd)", "instance to which the disk will be attached. \"\"\" pass # TODO(user): Implement", "perfkitbenchmarker import disk from perfkitbenchmarker import flags from perfkitbenchmarker import vm_util from perfkitbenchmarker.providers.azure", "true if the disk exists.\"\"\" if self.name is None and self.created: return True", "LocalDiskIsSSD(machine_type) else disk.HDD self.metadata = { disk.MEDIA: media, disk.REPLICATION: disk.NONE, disk.LEGACY_DISK_TYPE: disk.LOCAL }", "which the disk will be attached. \"\"\" pass # TODO(user): Implement Attach() #", "# PREMIUM_STORAGE_METADATA, and (RA)GRS, because those are # asynchronously replicated. } LOCAL_SSD_PREFIXES =", "Copyright 2014 PerfKitBenchmarker Authors. All rights reserved. # # Licensed under the Apache", "AzureDisk.num_disks[self.vm_name] AzureDisk.num_disks[self.vm_name] += 1 self.created = True def _Delete(self): \"\"\"Deletes the disk.\"\"\" delete_cmd", "or (self.lun == int(data_disk['logicalUnitNumber']))) self.name = data_disk['name'] def Attach(self, vm): \"\"\"Attaches the disk", "flags.FLAGS DRIVE_START_LETTER = 'c' PREMIUM_STORAGE = 'premium-storage' STANDARD_DISK = 'standard-disk' DISK_TYPE = {disk.STANDARD:", "the disk's name.\"\"\" show_cmd = [AZURE_PATH, 'vm', 'show', '--json', self.vm_name] stdout, _, _", "= json.loads(stdout) data_disk = response['DataDisks'][self.lun] assert ((self.lun == 0 and 'logicalUnitNumber' not in", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "import vm_util from perfkitbenchmarker.providers.azure import flags as azure_flags AZURE_PATH = 'azure' FLAGS =", "def GetDevicePath(self): \"\"\"Returns the path to the device inside the VM.\"\"\" if self.disk_type", "} LOCAL_SSD_PREFIXES = { 'Standard_D', 'Standard_G' } def LocalDiskIsSSD(machine_type): \"\"\"Check whether the local", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "'c' PREMIUM_STORAGE = 'premium-storage' STANDARD_DISK = 'standard-disk' DISK_TYPE = {disk.STANDARD: STANDARD_DISK, disk.REMOTE_SSD: PREMIUM_STORAGE}", "return False return True @vm_util.Retry() def _PostCreate(self): \"\"\"Get the disk's name.\"\"\" show_cmd =", "associated VM ' 'is deleted, but will be retried.', self.name) vm_util.IssueCommand(delete_cmd) def _Exists(self):", "suppress_warning=True) try: json.loads(stdout) except ValueError: return False return True @vm_util.Retry() def _PostCreate(self): \"\"\"Get", "self.metadata = PREMIUM_STORAGE_METADATA elif self.disk_type == STANDARD_DISK: self.metadata = { disk.MEDIA: disk.HDD, disk.REPLICATION:", "= 0 self.lun = AzureDisk.num_disks[self.vm_name] AzureDisk.num_disks[self.vm_name] += 1 self.created = True def _Delete(self):", "self.name] logging.info('Deleting disk %s. This may fail while the associated VM ' 'is", "((self.lun == 0 and 'logicalUnitNumber' not in data_disk) or (self.lun == int(data_disk['logicalUnitNumber']))) self.name", "attached. \"\"\" pass # TODO(user): Implement Attach() # (not critical because disks are", "Detach() def GetDevicePath(self): \"\"\"Returns the path to the device inside the VM.\"\"\" if", "perfkitbenchmarker import vm_util from perfkitbenchmarker.providers.azure import flags as azure_flags AZURE_PATH = 'azure' FLAGS", "= FLAGS.azure_host_caching self.name = None self.vm_name = vm_name self.lun = None if self.disk_type", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "logging import threading from perfkitbenchmarker import disk from perfkitbenchmarker import flags from perfkitbenchmarker", "stdout, _, _ = vm_util.IssueCommand(show_cmd) response = json.loads(stdout) data_disk = response['DataDisks'][self.lun] assert ((self.lun", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "those are # asynchronously replicated. } LOCAL_SSD_PREFIXES = { 'Standard_D', 'Standard_G' } def", "disk.LOCAL } def _Create(self): \"\"\"Creates the disk.\"\"\" if self.disk_type == PREMIUM_STORAGE: assert FLAGS.azure_storage_type", "required by applicable law or agreed to in writing, software # distributed under", "disks. Disks can be created, deleted, attached to VMs, and detached from VMs.", "if LocalDiskIsSSD(machine_type) else disk.HDD self.metadata = { disk.MEDIA: media, disk.REPLICATION: disk.NONE, disk.LEGACY_DISK_TYPE: disk.LOCAL", "= vm_util.IssueCommand(show_cmd) response = json.loads(stdout) data_disk = response['DataDisks'][self.lun] assert ((self.lun == 0 and", "the path to the device inside the VM.\"\"\" if self.disk_type == disk.LOCAL: return", "from perfkitbenchmarker import disk from perfkitbenchmarker import flags from perfkitbenchmarker import vm_util from", "applicable law or agreed to in writing, software # distributed under the License", "VM ' 'is deleted, but will be retried.', self.name) vm_util.IssueCommand(delete_cmd) def _Exists(self): \"\"\"Returns", "not in AzureDisk.num_disks: AzureDisk.num_disks[self.vm_name] = 0 self.lun = AzureDisk.num_disks[self.vm_name] AzureDisk.num_disks[self.vm_name] += 1 self.created", "response = json.loads(stdout) data_disk = response['DataDisks'][self.lun] assert ((self.lun == 0 and 'logicalUnitNumber' not", "to a VM. Args: vm: The AzureVirtualMachine instance to which the disk will", "azure disks. \"\"\" import json import logging import threading from perfkitbenchmarker import disk", "inside the VM.\"\"\" if self.disk_type == disk.LOCAL: return '/dev/sdb' else: return '/dev/sd%s' %", "or agreed to in writing, software # distributed under the License is distributed", "def _PostCreate(self): \"\"\"Get the disk's name.\"\"\" show_cmd = [AZURE_PATH, 'vm', 'show', '--json', self.vm_name]", "disk %s. This may fail while the associated VM ' 'is deleted, but", "TODO(user): Implement Detach() def GetDevicePath(self): \"\"\"Returns the path to the device inside the", "Deliberately omitting PLRS, because that is handled by # PREMIUM_STORAGE_METADATA, and (RA)GRS, because", "limitations under the License. \"\"\"Module containing classes related to Azure disks. Disks can", "else disk.HDD self.metadata = { disk.MEDIA: media, disk.REPLICATION: disk.NONE, disk.LEGACY_DISK_TYPE: disk.LOCAL } def", "machine_type): super(AzureDisk, self).__init__(disk_spec) self.host_caching = FLAGS.azure_host_caching self.name = None self.vm_name = vm_name self.lun", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "All rights reserved. # # Licensed under the Apache License, Version 2.0 (the", "disk.MEDIA: disk.SSD, disk.REPLICATION: disk.ZONE, disk.LEGACY_DISK_TYPE: disk.REMOTE_SSD } AZURE_REPLICATION_MAP = { azure_flags.LRS: disk.ZONE, azure_flags.ZRS:", "self.metadata = { disk.MEDIA: disk.HDD, disk.REPLICATION: AZURE_REPLICATION_MAP[FLAGS.azure_storage_type], disk.LEGACY_DISK_TYPE: disk.STANDARD } elif self.disk_type ==", "omitting PLRS, because that is handled by # PREMIUM_STORAGE_METADATA, and (RA)GRS, because those", "vm_util.IssueCommand(show_cmd, suppress_warning=True) try: json.loads(stdout) except ValueError: return False return True @vm_util.Retry() def _PostCreate(self):", "'Azure' disk.RegisterDiskTypeMap(AZURE, DISK_TYPE) PREMIUM_STORAGE_METADATA = { disk.MEDIA: disk.SSD, disk.REPLICATION: disk.ZONE, disk.LEGACY_DISK_TYPE: disk.REMOTE_SSD }", "disk.REGION, # Deliberately omitting PLRS, because that is handled by # PREMIUM_STORAGE_METADATA, and", "import json import logging import threading from perfkitbenchmarker import disk from perfkitbenchmarker import", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "json import logging import threading from perfkitbenchmarker import disk from perfkitbenchmarker import flags", "writing, software # distributed under the License is distributed on an \"AS IS\"", "None: return False show_cmd = [AZURE_PATH, 'vm', 'disk', 'show', '--json', self.name] stdout, _,", "= threading.Lock() num_disks = {} def __init__(self, disk_spec, vm_name, machine_type): super(AzureDisk, self).__init__(disk_spec) self.host_caching", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "self.disk_type == STANDARD_DISK: self.metadata = { disk.MEDIA: disk.HDD, disk.REPLICATION: AZURE_REPLICATION_MAP[FLAGS.azure_storage_type], disk.LEGACY_DISK_TYPE: disk.STANDARD }", "License. # You may obtain a copy of the License at # #", "PREMIUM_STORAGE_METADATA = { disk.MEDIA: disk.SSD, disk.REPLICATION: disk.ZONE, disk.LEGACY_DISK_TYPE: disk.REMOTE_SSD } AZURE_REPLICATION_MAP = {", "DISK_TYPE = {disk.STANDARD: STANDARD_DISK, disk.REMOTE_SSD: PREMIUM_STORAGE} AZURE = 'Azure' disk.RegisterDiskTypeMap(AZURE, DISK_TYPE) PREMIUM_STORAGE_METADATA =", "threading from perfkitbenchmarker import disk from perfkitbenchmarker import flags from perfkitbenchmarker import vm_util", "<gh_stars>0 # Copyright 2014 PerfKitBenchmarker Authors. All rights reserved. # # Licensed under", "compliance with the License. # You may obtain a copy of the License", "disk.\"\"\" delete_cmd = [AZURE_PATH, 'vm', 'disk', 'delete', '--blob-delete', self.name] logging.info('Deleting disk %s. This", "None and self.created: return True elif self.name is None: return False show_cmd =", "disk.SSD if LocalDiskIsSSD(machine_type) else disk.HDD self.metadata = { disk.MEDIA: media, disk.REPLICATION: disk.NONE, disk.LEGACY_DISK_TYPE:", "None self.vm_name = vm_name self.lun = None if self.disk_type == PREMIUM_STORAGE: self.metadata =", "stdout, _, _ = vm_util.IssueCommand(show_cmd, suppress_warning=True) try: json.loads(stdout) except ValueError: return False return", "the disk.\"\"\" if self.disk_type == PREMIUM_STORAGE: assert FLAGS.azure_storage_type == azure_flags.PLRS else: assert FLAGS.azure_storage_type", "attached to VMs, and detached from VMs. At this time, Azure only supports", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "self.disk_type == PREMIUM_STORAGE: assert FLAGS.azure_storage_type == azure_flags.PLRS else: assert FLAGS.azure_storage_type != azure_flags.PLRS with", "!= azure_flags.PLRS with self._lock: create_cmd = [AZURE_PATH, 'vm', 'disk', 'attach-new', '--host-caching=%s' % self.host_caching,", "{} def __init__(self, disk_spec, vm_name, machine_type): super(AzureDisk, self).__init__(disk_spec) self.host_caching = FLAGS.azure_host_caching self.name =", "disk type is ignored. See http://msdn.microsoft.com/en-us/library/azure/dn790303.aspx for more information about azure disks. \"\"\"", "http://msdn.microsoft.com/en-us/library/azure/dn790303.aspx for more information about azure disks. \"\"\" import json import logging import", "governing permissions and # limitations under the License. \"\"\"Module containing classes related to", "'Standard_G' } def LocalDiskIsSSD(machine_type): \"\"\"Check whether the local disk is an SSD drive.\"\"\"", "ValueError: return False return True @vm_util.Retry() def _PostCreate(self): \"\"\"Get the disk's name.\"\"\" show_cmd", "the device inside the VM.\"\"\" if self.disk_type == disk.LOCAL: return '/dev/sdb' else: return", "False return True @vm_util.Retry() def _PostCreate(self): \"\"\"Get the disk's name.\"\"\" show_cmd = [AZURE_PATH,", "# TODO(user): Implement Detach() def GetDevicePath(self): \"\"\"Returns the path to the device inside", "This may fail while the associated VM ' 'is deleted, but will be", "= flags.FLAGS DRIVE_START_LETTER = 'c' PREMIUM_STORAGE = 'premium-storage' STANDARD_DISK = 'standard-disk' DISK_TYPE =", "not use this file except in compliance with the License. # You may", "disks. \"\"\" import json import logging import threading from perfkitbenchmarker import disk from", "' 'is deleted, but will be retried.', self.name) vm_util.IssueCommand(delete_cmd) def _Exists(self): \"\"\"Returns true", "# asynchronously replicated. } LOCAL_SSD_PREFIXES = { 'Standard_D', 'Standard_G' } def LocalDiskIsSSD(machine_type): \"\"\"Check", "handled by # PREMIUM_STORAGE_METADATA, and (RA)GRS, because those are # asynchronously replicated. }", "_PostCreate(self): \"\"\"Get the disk's name.\"\"\" show_cmd = [AZURE_PATH, 'vm', 'show', '--json', self.vm_name] stdout,", "License, Version 2.0 (the \"License\"); # you may not use this file except", "vm_util.IssueRetryableCommand(create_cmd) if self.vm_name not in AzureDisk.num_disks: AzureDisk.num_disks[self.vm_name] = 0 self.lun = AzureDisk.num_disks[self.vm_name] AzureDisk.num_disks[self.vm_name]", "disk_spec, vm_name, machine_type): super(AzureDisk, self).__init__(disk_spec) self.host_caching = FLAGS.azure_host_caching self.name = None self.vm_name =", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "name.\"\"\" show_cmd = [AZURE_PATH, 'vm', 'show', '--json', self.vm_name] stdout, _, _ = vm_util.IssueCommand(show_cmd)", "return any((machine_type.startswith(prefix) for prefix in LOCAL_SSD_PREFIXES)) class AzureDisk(disk.BaseDisk): \"\"\"Object representing an Azure Disk.\"\"\"", "'--host-caching=%s' % self.host_caching, self.vm_name, str(self.disk_size)] vm_util.IssueRetryableCommand(create_cmd) if self.vm_name not in AzureDisk.num_disks: AzureDisk.num_disks[self.vm_name] =", "True elif self.name is None: return False show_cmd = [AZURE_PATH, 'vm', 'disk', 'show',", "= response['DataDisks'][self.lun] assert ((self.lun == 0 and 'logicalUnitNumber' not in data_disk) or (self.lun", "PREMIUM_STORAGE: assert FLAGS.azure_storage_type == azure_flags.PLRS else: assert FLAGS.azure_storage_type != azure_flags.PLRS with self._lock: create_cmd", "# you may not use this file except in compliance with the License.", "self.host_caching = FLAGS.azure_host_caching self.name = None self.vm_name = vm_name self.lun = None if", "return True @vm_util.Retry() def _PostCreate(self): \"\"\"Get the disk's name.\"\"\" show_cmd = [AZURE_PATH, 'vm',", "agreed to in writing, software # distributed under the License is distributed on", "# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved. # # Licensed under the", "= { disk.MEDIA: media, disk.REPLICATION: disk.NONE, disk.LEGACY_DISK_TYPE: disk.LOCAL } def _Create(self): \"\"\"Creates the", "one disk type, so the disk spec's disk type is ignored. See http://msdn.microsoft.com/en-us/library/azure/dn790303.aspx", "PREMIUM_STORAGE_METADATA elif self.disk_type == STANDARD_DISK: self.metadata = { disk.MEDIA: disk.HDD, disk.REPLICATION: AZURE_REPLICATION_MAP[FLAGS.azure_storage_type], disk.LEGACY_DISK_TYPE:", "(the \"License\"); # you may not use this file except in compliance with", "License. \"\"\"Module containing classes related to Azure disks. Disks can be created, deleted,", "return True elif self.name is None: return False show_cmd = [AZURE_PATH, 'vm', 'disk',", "try: json.loads(stdout) except ValueError: return False return True @vm_util.Retry() def _PostCreate(self): \"\"\"Get the", "'standard-disk' DISK_TYPE = {disk.STANDARD: STANDARD_DISK, disk.REMOTE_SSD: PREMIUM_STORAGE} AZURE = 'Azure' disk.RegisterDiskTypeMap(AZURE, DISK_TYPE) PREMIUM_STORAGE_METADATA", "TODO(user): Implement Attach() # (not critical because disks are attached to VMs when", "import logging import threading from perfkitbenchmarker import disk from perfkitbenchmarker import flags from", "1 self.created = True def _Delete(self): \"\"\"Deletes the disk.\"\"\" delete_cmd = [AZURE_PATH, 'vm',", "for prefix in LOCAL_SSD_PREFIXES)) class AzureDisk(disk.BaseDisk): \"\"\"Object representing an Azure Disk.\"\"\" _lock =", "= None self.vm_name = vm_name self.lun = None if self.disk_type == PREMIUM_STORAGE: self.metadata", "this time, Azure only supports one disk type, so the disk spec's disk", "0 self.lun = AzureDisk.num_disks[self.vm_name] AzureDisk.num_disks[self.vm_name] += 1 self.created = True def _Delete(self): \"\"\"Deletes", "= [AZURE_PATH, 'vm', 'show', '--json', self.vm_name] stdout, _, _ = vm_util.IssueCommand(show_cmd) response =", "# Unless required by applicable law or agreed to in writing, software #", "the disk spec's disk type is ignored. See http://msdn.microsoft.com/en-us/library/azure/dn790303.aspx for more information about", "by applicable law or agreed to in writing, software # distributed under the", "AZURE_PATH = 'azure' FLAGS = flags.FLAGS DRIVE_START_LETTER = 'c' PREMIUM_STORAGE = 'premium-storage' STANDARD_DISK", "self).__init__(disk_spec) self.host_caching = FLAGS.azure_host_caching self.name = None self.vm_name = vm_name self.lun = None", "data_disk = response['DataDisks'][self.lun] assert ((self.lun == 0 and 'logicalUnitNumber' not in data_disk) or", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "type, so the disk spec's disk type is ignored. See http://msdn.microsoft.com/en-us/library/azure/dn790303.aspx for more", "the disk exists.\"\"\" if self.name is None and self.created: return True elif self.name", "json.loads(stdout) except ValueError: return False return True @vm_util.Retry() def _PostCreate(self): \"\"\"Get the disk's", "to which the disk will be attached. \"\"\" pass # TODO(user): Implement Attach()", "in AzureDisk.num_disks: AzureDisk.num_disks[self.vm_name] = 0 self.lun = AzureDisk.num_disks[self.vm_name] AzureDisk.num_disks[self.vm_name] += 1 self.created =", "= { azure_flags.LRS: disk.ZONE, azure_flags.ZRS: disk.REGION, # Deliberately omitting PLRS, because that is", "def LocalDiskIsSSD(machine_type): \"\"\"Check whether the local disk is an SSD drive.\"\"\" return any((machine_type.startswith(prefix)", "self.disk_type == PREMIUM_STORAGE: self.metadata = PREMIUM_STORAGE_METADATA elif self.disk_type == STANDARD_DISK: self.metadata = {", "self.vm_name = vm_name self.lun = None if self.disk_type == PREMIUM_STORAGE: self.metadata = PREMIUM_STORAGE_METADATA", "media = disk.SSD if LocalDiskIsSSD(machine_type) else disk.HDD self.metadata = { disk.MEDIA: media, disk.REPLICATION:", "The AzureVirtualMachine instance to which the disk will be attached. \"\"\" pass #", "== disk.LOCAL: media = disk.SSD if LocalDiskIsSSD(machine_type) else disk.HDD self.metadata = { disk.MEDIA:", "deleted, attached to VMs, and detached from VMs. At this time, Azure only", "vm_util.IssueCommand(show_cmd) response = json.loads(stdout) data_disk = response['DataDisks'][self.lun] assert ((self.lun == 0 and 'logicalUnitNumber'", "0 and 'logicalUnitNumber' not in data_disk) or (self.lun == int(data_disk['logicalUnitNumber']))) self.name = data_disk['name']", "file except in compliance with the License. # You may obtain a copy", "will be retried.', self.name) vm_util.IssueCommand(delete_cmd) def _Exists(self): \"\"\"Returns true if the disk exists.\"\"\"", "because that is handled by # PREMIUM_STORAGE_METADATA, and (RA)GRS, because those are #", "self.name) vm_util.IssueCommand(delete_cmd) def _Exists(self): \"\"\"Returns true if the disk exists.\"\"\" if self.name is", "PREMIUM_STORAGE = 'premium-storage' STANDARD_DISK = 'standard-disk' DISK_TYPE = {disk.STANDARD: STANDARD_DISK, disk.REMOTE_SSD: PREMIUM_STORAGE} AZURE", "= { 'Standard_D', 'Standard_G' } def LocalDiskIsSSD(machine_type): \"\"\"Check whether the local disk is", "information about azure disks. \"\"\" import json import logging import threading from perfkitbenchmarker", "Azure disks. Disks can be created, deleted, attached to VMs, and detached from", "License for the specific language governing permissions and # limitations under the License.", "specific language governing permissions and # limitations under the License. \"\"\"Module containing classes", "= {disk.STANDARD: STANDARD_DISK, disk.REMOTE_SSD: PREMIUM_STORAGE} AZURE = 'Azure' disk.RegisterDiskTypeMap(AZURE, DISK_TYPE) PREMIUM_STORAGE_METADATA = {", "= None if self.disk_type == PREMIUM_STORAGE: self.metadata = PREMIUM_STORAGE_METADATA elif self.disk_type == STANDARD_DISK:", "pass # TODO(user): Implement Detach() def GetDevicePath(self): \"\"\"Returns the path to the device", "to in writing, software # distributed under the License is distributed on an", "+= 1 self.created = True def _Delete(self): \"\"\"Deletes the disk.\"\"\" delete_cmd = [AZURE_PATH,", "implied. # See the License for the specific language governing permissions and #", "= vm_util.IssueCommand(show_cmd, suppress_warning=True) try: json.loads(stdout) except ValueError: return False return True @vm_util.Retry() def", "LocalDiskIsSSD(machine_type): \"\"\"Check whether the local disk is an SSD drive.\"\"\" return any((machine_type.startswith(prefix) for", "\"License\"); # you may not use this file except in compliance with the", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "for the specific language governing permissions and # limitations under the License. \"\"\"Module", "[AZURE_PATH, 'vm', 'disk', 'delete', '--blob-delete', self.name] logging.info('Deleting disk %s. This may fail while", "else: assert FLAGS.azure_storage_type != azure_flags.PLRS with self._lock: create_cmd = [AZURE_PATH, 'vm', 'disk', 'attach-new',", "True def _Delete(self): \"\"\"Deletes the disk.\"\"\" delete_cmd = [AZURE_PATH, 'vm', 'disk', 'delete', '--blob-delete',", "exists.\"\"\" if self.name is None and self.created: return True elif self.name is None:", "def __init__(self, disk_spec, vm_name, machine_type): super(AzureDisk, self).__init__(disk_spec) self.host_caching = FLAGS.azure_host_caching self.name = None", "STANDARD_DISK: self.metadata = { disk.MEDIA: disk.HDD, disk.REPLICATION: AZURE_REPLICATION_MAP[FLAGS.azure_storage_type], disk.LEGACY_DISK_TYPE: disk.STANDARD } elif self.disk_type", "disk's name.\"\"\" show_cmd = [AZURE_PATH, 'vm', 'show', '--json', self.vm_name] stdout, _, _ =", "\"\"\"Attaches the disk to a VM. Args: vm: The AzureVirtualMachine instance to which", "disk.LEGACY_DISK_TYPE: disk.REMOTE_SSD } AZURE_REPLICATION_MAP = { azure_flags.LRS: disk.ZONE, azure_flags.ZRS: disk.REGION, # Deliberately omitting", "def Attach(self, vm): \"\"\"Attaches the disk to a VM. Args: vm: The AzureVirtualMachine", "(RA)GRS, because those are # asynchronously replicated. } LOCAL_SSD_PREFIXES = { 'Standard_D', 'Standard_G'", "disk.LEGACY_DISK_TYPE: disk.LOCAL } def _Create(self): \"\"\"Creates the disk.\"\"\" if self.disk_type == PREMIUM_STORAGE: assert", "be retried.', self.name) vm_util.IssueCommand(delete_cmd) def _Exists(self): \"\"\"Returns true if the disk exists.\"\"\" if", "import threading from perfkitbenchmarker import disk from perfkitbenchmarker import flags from perfkitbenchmarker import", "or implied. # See the License for the specific language governing permissions and", "self.vm_name] stdout, _, _ = vm_util.IssueCommand(show_cmd) response = json.loads(stdout) data_disk = response['DataDisks'][self.lun] assert", "FLAGS = flags.FLAGS DRIVE_START_LETTER = 'c' PREMIUM_STORAGE = 'premium-storage' STANDARD_DISK = 'standard-disk' DISK_TYPE", "if self.disk_type == disk.LOCAL: return '/dev/sdb' else: return '/dev/sd%s' % chr(ord(DRIVE_START_LETTER) + self.lun)", "VM.\"\"\" pass # TODO(user): Implement Detach() def GetDevicePath(self): \"\"\"Returns the path to the", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "'vm', 'disk', 'attach-new', '--host-caching=%s' % self.host_caching, self.vm_name, str(self.disk_size)] vm_util.IssueRetryableCommand(create_cmd) if self.vm_name not in", "perfkitbenchmarker.providers.azure import flags as azure_flags AZURE_PATH = 'azure' FLAGS = flags.FLAGS DRIVE_START_LETTER =", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "== STANDARD_DISK: self.metadata = { disk.MEDIA: disk.HDD, disk.REPLICATION: AZURE_REPLICATION_MAP[FLAGS.azure_storage_type], disk.LEGACY_DISK_TYPE: disk.STANDARD } elif", "with self._lock: create_cmd = [AZURE_PATH, 'vm', 'disk', 'attach-new', '--host-caching=%s' % self.host_caching, self.vm_name, str(self.disk_size)]", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "disk.\"\"\" if self.disk_type == PREMIUM_STORAGE: assert FLAGS.azure_storage_type == azure_flags.PLRS else: assert FLAGS.azure_storage_type !=", "to Azure disks. Disks can be created, deleted, attached to VMs, and detached", "= vm_name self.lun = None if self.disk_type == PREMIUM_STORAGE: self.metadata = PREMIUM_STORAGE_METADATA elif", "def _Exists(self): \"\"\"Returns true if the disk exists.\"\"\" if self.name is None and", "spec's disk type is ignored. See http://msdn.microsoft.com/en-us/library/azure/dn790303.aspx for more information about azure disks.", "'vm', 'show', '--json', self.vm_name] stdout, _, _ = vm_util.IssueCommand(show_cmd) response = json.loads(stdout) data_disk", "device inside the VM.\"\"\" if self.disk_type == disk.LOCAL: return '/dev/sdb' else: return '/dev/sd%s'", "self.name is None and self.created: return True elif self.name is None: return False", "} def _Create(self): \"\"\"Creates the disk.\"\"\" if self.disk_type == PREMIUM_STORAGE: assert FLAGS.azure_storage_type ==", "= 'premium-storage' STANDARD_DISK = 'standard-disk' DISK_TYPE = {disk.STANDARD: STANDARD_DISK, disk.REMOTE_SSD: PREMIUM_STORAGE} AZURE =", "== azure_flags.PLRS else: assert FLAGS.azure_storage_type != azure_flags.PLRS with self._lock: create_cmd = [AZURE_PATH, 'vm',", "create_cmd = [AZURE_PATH, 'vm', 'disk', 'attach-new', '--host-caching=%s' % self.host_caching, self.vm_name, str(self.disk_size)] vm_util.IssueRetryableCommand(create_cmd) if", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "{disk.STANDARD: STANDARD_DISK, disk.REMOTE_SSD: PREMIUM_STORAGE} AZURE = 'Azure' disk.RegisterDiskTypeMap(AZURE, DISK_TYPE) PREMIUM_STORAGE_METADATA = { disk.MEDIA:", "LOCAL_SSD_PREFIXES = { 'Standard_D', 'Standard_G' } def LocalDiskIsSSD(machine_type): \"\"\"Check whether the local disk", "disk.NONE, disk.LEGACY_DISK_TYPE: disk.LOCAL } def _Create(self): \"\"\"Creates the disk.\"\"\" if self.disk_type == PREMIUM_STORAGE:", "and detached from VMs. At this time, Azure only supports one disk type,", "azure_flags.PLRS else: assert FLAGS.azure_storage_type != azure_flags.PLRS with self._lock: create_cmd = [AZURE_PATH, 'vm', 'disk',", "disk type, so the disk spec's disk type is ignored. See http://msdn.microsoft.com/en-us/library/azure/dn790303.aspx for", "'--json', self.vm_name] stdout, _, _ = vm_util.IssueCommand(show_cmd) response = json.loads(stdout) data_disk = response['DataDisks'][self.lun]", "azure_flags.ZRS: disk.REGION, # Deliberately omitting PLRS, because that is handled by # PREMIUM_STORAGE_METADATA,", "} def LocalDiskIsSSD(machine_type): \"\"\"Check whether the local disk is an SSD drive.\"\"\" return", "= [AZURE_PATH, 'vm', 'disk', 'show', '--json', self.name] stdout, _, _ = vm_util.IssueCommand(show_cmd, suppress_warning=True)", "{ disk.MEDIA: media, disk.REPLICATION: disk.NONE, disk.LEGACY_DISK_TYPE: disk.LOCAL } def _Create(self): \"\"\"Creates the disk.\"\"\"", "AzureDisk.num_disks: AzureDisk.num_disks[self.vm_name] = 0 self.lun = AzureDisk.num_disks[self.vm_name] AzureDisk.num_disks[self.vm_name] += 1 self.created = True", "for more information about azure disks. \"\"\" import json import logging import threading", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "flags as azure_flags AZURE_PATH = 'azure' FLAGS = flags.FLAGS DRIVE_START_LETTER = 'c' PREMIUM_STORAGE", "DISK_TYPE) PREMIUM_STORAGE_METADATA = { disk.MEDIA: disk.SSD, disk.REPLICATION: disk.ZONE, disk.LEGACY_DISK_TYPE: disk.REMOTE_SSD } AZURE_REPLICATION_MAP =", "disk.SSD, disk.REPLICATION: disk.ZONE, disk.LEGACY_DISK_TYPE: disk.REMOTE_SSD } AZURE_REPLICATION_MAP = { azure_flags.LRS: disk.ZONE, azure_flags.ZRS: disk.REGION,", "VM.\"\"\" if self.disk_type == disk.LOCAL: return '/dev/sdb' else: return '/dev/sd%s' % chr(ord(DRIVE_START_LETTER) +", "PerfKitBenchmarker Authors. All rights reserved. # # Licensed under the Apache License, Version", "and self.created: return True elif self.name is None: return False show_cmd = [AZURE_PATH,", "from a VM.\"\"\" pass # TODO(user): Implement Detach() def GetDevicePath(self): \"\"\"Returns the path", "\"\"\" import json import logging import threading from perfkitbenchmarker import disk from perfkitbenchmarker", "is an SSD drive.\"\"\" return any((machine_type.startswith(prefix) for prefix in LOCAL_SSD_PREFIXES)) class AzureDisk(disk.BaseDisk): \"\"\"Object", "= { disk.MEDIA: disk.SSD, disk.REPLICATION: disk.ZONE, disk.LEGACY_DISK_TYPE: disk.REMOTE_SSD } AZURE_REPLICATION_MAP = { azure_flags.LRS:", "an Azure Disk.\"\"\" _lock = threading.Lock() num_disks = {} def __init__(self, disk_spec, vm_name,", "and 'logicalUnitNumber' not in data_disk) or (self.lun == int(data_disk['logicalUnitNumber']))) self.name = data_disk['name'] def", "disk.ZONE, disk.LEGACY_DISK_TYPE: disk.REMOTE_SSD } AZURE_REPLICATION_MAP = { azure_flags.LRS: disk.ZONE, azure_flags.ZRS: disk.REGION, # Deliberately", "use this file except in compliance with the License. # You may obtain", "a VM.\"\"\" pass # TODO(user): Implement Detach() def GetDevicePath(self): \"\"\"Returns the path to", "PLRS, because that is handled by # PREMIUM_STORAGE_METADATA, and (RA)GRS, because those are", "the associated VM ' 'is deleted, but will be retried.', self.name) vm_util.IssueCommand(delete_cmd) def", "show_cmd = [AZURE_PATH, 'vm', 'disk', 'show', '--json', self.name] stdout, _, _ = vm_util.IssueCommand(show_cmd,", "delete_cmd = [AZURE_PATH, 'vm', 'disk', 'delete', '--blob-delete', self.name] logging.info('Deleting disk %s. This may", "self.lun = AzureDisk.num_disks[self.vm_name] AzureDisk.num_disks[self.vm_name] += 1 self.created = True def _Delete(self): \"\"\"Deletes the", "disk spec's disk type is ignored. See http://msdn.microsoft.com/en-us/library/azure/dn790303.aspx for more information about azure", "Implement Detach() def GetDevicePath(self): \"\"\"Returns the path to the device inside the VM.\"\"\"", "@vm_util.Retry() def _PostCreate(self): \"\"\"Get the disk's name.\"\"\" show_cmd = [AZURE_PATH, 'vm', 'show', '--json',", "vm_util.IssueCommand(delete_cmd) def _Exists(self): \"\"\"Returns true if the disk exists.\"\"\" if self.name is None", "prefix in LOCAL_SSD_PREFIXES)) class AzureDisk(disk.BaseDisk): \"\"\"Object representing an Azure Disk.\"\"\" _lock = threading.Lock()", "except ValueError: return False return True @vm_util.Retry() def _PostCreate(self): \"\"\"Get the disk's name.\"\"\"", "disk is an SSD drive.\"\"\" return any((machine_type.startswith(prefix) for prefix in LOCAL_SSD_PREFIXES)) class AzureDisk(disk.BaseDisk):", "At this time, Azure only supports one disk type, so the disk spec's", "more information about azure disks. \"\"\" import json import logging import threading from", "disk exists.\"\"\" if self.name is None and self.created: return True elif self.name is", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "will be attached. \"\"\" pass # TODO(user): Implement Attach() # (not critical because", "'--blob-delete', self.name] logging.info('Deleting disk %s. This may fail while the associated VM '", "disk.HDD self.metadata = { disk.MEDIA: media, disk.REPLICATION: disk.NONE, disk.LEGACY_DISK_TYPE: disk.LOCAL } def _Create(self):", "FLAGS.azure_host_caching self.name = None self.vm_name = vm_name self.lun = None if self.disk_type ==", "VMs. At this time, Azure only supports one disk type, so the disk", "containing classes related to Azure disks. Disks can be created, deleted, attached to", "Args: vm: The AzureVirtualMachine instance to which the disk will be attached. \"\"\"", "self.name = data_disk['name'] def Attach(self, vm): \"\"\"Attaches the disk to a VM. Args:", "def _Delete(self): \"\"\"Deletes the disk.\"\"\" delete_cmd = [AZURE_PATH, 'vm', 'disk', 'delete', '--blob-delete', self.name]", "AZURE_REPLICATION_MAP = { azure_flags.LRS: disk.ZONE, azure_flags.ZRS: disk.REGION, # Deliberately omitting PLRS, because that", "import flags from perfkitbenchmarker import vm_util from perfkitbenchmarker.providers.azure import flags as azure_flags AZURE_PATH", "import disk from perfkitbenchmarker import flags from perfkitbenchmarker import vm_util from perfkitbenchmarker.providers.azure import", "when created) def Detach(self): \"\"\"Detaches the disk from a VM.\"\"\" pass # TODO(user):", "threading.Lock() num_disks = {} def __init__(self, disk_spec, vm_name, machine_type): super(AzureDisk, self).__init__(disk_spec) self.host_caching =", "2.0 (the \"License\"); # you may not use this file except in compliance", "assert ((self.lun == 0 and 'logicalUnitNumber' not in data_disk) or (self.lun == int(data_disk['logicalUnitNumber'])))", "'is deleted, but will be retried.', self.name) vm_util.IssueCommand(delete_cmd) def _Exists(self): \"\"\"Returns true if", "# limitations under the License. \"\"\"Module containing classes related to Azure disks. Disks", "GetDevicePath(self): \"\"\"Returns the path to the device inside the VM.\"\"\" if self.disk_type ==", "if self.disk_type == PREMIUM_STORAGE: self.metadata = PREMIUM_STORAGE_METADATA elif self.disk_type == STANDARD_DISK: self.metadata =", "# (not critical because disks are attached to VMs when created) def Detach(self):", "= 'c' PREMIUM_STORAGE = 'premium-storage' STANDARD_DISK = 'standard-disk' DISK_TYPE = {disk.STANDARD: STANDARD_DISK, disk.REMOTE_SSD:", "LOCAL_SSD_PREFIXES)) class AzureDisk(disk.BaseDisk): \"\"\"Object representing an Azure Disk.\"\"\" _lock = threading.Lock() num_disks =", "asynchronously replicated. } LOCAL_SSD_PREFIXES = { 'Standard_D', 'Standard_G' } def LocalDiskIsSSD(machine_type): \"\"\"Check whether", "= AzureDisk.num_disks[self.vm_name] AzureDisk.num_disks[self.vm_name] += 1 self.created = True def _Delete(self): \"\"\"Deletes the disk.\"\"\"", "} elif self.disk_type == disk.LOCAL: media = disk.SSD if LocalDiskIsSSD(machine_type) else disk.HDD self.metadata", "to VMs, and detached from VMs. At this time, Azure only supports one", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "disk.HDD, disk.REPLICATION: AZURE_REPLICATION_MAP[FLAGS.azure_storage_type], disk.LEGACY_DISK_TYPE: disk.STANDARD } elif self.disk_type == disk.LOCAL: media = disk.SSD", "= True def _Delete(self): \"\"\"Deletes the disk.\"\"\" delete_cmd = [AZURE_PATH, 'vm', 'disk', 'delete',", "critical because disks are attached to VMs when created) def Detach(self): \"\"\"Detaches the", "Detach(self): \"\"\"Detaches the disk from a VM.\"\"\" pass # TODO(user): Implement Detach() def", "by # PREMIUM_STORAGE_METADATA, and (RA)GRS, because those are # asynchronously replicated. } LOCAL_SSD_PREFIXES", "DRIVE_START_LETTER = 'c' PREMIUM_STORAGE = 'premium-storage' STANDARD_DISK = 'standard-disk' DISK_TYPE = {disk.STANDARD: STANDARD_DISK,", "None if self.disk_type == PREMIUM_STORAGE: self.metadata = PREMIUM_STORAGE_METADATA elif self.disk_type == STANDARD_DISK: self.metadata", "data_disk) or (self.lun == int(data_disk['logicalUnitNumber']))) self.name = data_disk['name'] def Attach(self, vm): \"\"\"Attaches the", "AzureVirtualMachine instance to which the disk will be attached. \"\"\" pass # TODO(user):", "# # Unless required by applicable law or agreed to in writing, software", "self.created: return True elif self.name is None: return False show_cmd = [AZURE_PATH, 'vm',", "express or implied. # See the License for the specific language governing permissions", "AzureDisk.num_disks[self.vm_name] += 1 self.created = True def _Delete(self): \"\"\"Deletes the disk.\"\"\" delete_cmd =", "is None and self.created: return True elif self.name is None: return False show_cmd", "the License. \"\"\"Module containing classes related to Azure disks. Disks can be created,", "int(data_disk['logicalUnitNumber']))) self.name = data_disk['name'] def Attach(self, vm): \"\"\"Attaches the disk to a VM.", "either express or implied. # See the License for the specific language governing", "[AZURE_PATH, 'vm', 'show', '--json', self.vm_name] stdout, _, _ = vm_util.IssueCommand(show_cmd) response = json.loads(stdout)", "assert FLAGS.azure_storage_type != azure_flags.PLRS with self._lock: create_cmd = [AZURE_PATH, 'vm', 'disk', 'attach-new', '--host-caching=%s'", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0", "any((machine_type.startswith(prefix) for prefix in LOCAL_SSD_PREFIXES)) class AzureDisk(disk.BaseDisk): \"\"\"Object representing an Azure Disk.\"\"\" _lock", "%s. This may fail while the associated VM ' 'is deleted, but will", "as azure_flags AZURE_PATH = 'azure' FLAGS = flags.FLAGS DRIVE_START_LETTER = 'c' PREMIUM_STORAGE =", "\"\"\"Detaches the disk from a VM.\"\"\" pass # TODO(user): Implement Detach() def GetDevicePath(self):", "disk from perfkitbenchmarker import flags from perfkitbenchmarker import vm_util from perfkitbenchmarker.providers.azure import flags", "super(AzureDisk, self).__init__(disk_spec) self.host_caching = FLAGS.azure_host_caching self.name = None self.vm_name = vm_name self.lun =", "the License. # You may obtain a copy of the License at #", "(not critical because disks are attached to VMs when created) def Detach(self): \"\"\"Detaches", "'logicalUnitNumber' not in data_disk) or (self.lun == int(data_disk['logicalUnitNumber']))) self.name = data_disk['name'] def Attach(self,", "data_disk['name'] def Attach(self, vm): \"\"\"Attaches the disk to a VM. Args: vm: The", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "disk.LOCAL: media = disk.SSD if LocalDiskIsSSD(machine_type) else disk.HDD self.metadata = { disk.MEDIA: media,", "SSD drive.\"\"\" return any((machine_type.startswith(prefix) for prefix in LOCAL_SSD_PREFIXES)) class AzureDisk(disk.BaseDisk): \"\"\"Object representing an", "self.vm_name not in AzureDisk.num_disks: AzureDisk.num_disks[self.vm_name] = 0 self.lun = AzureDisk.num_disks[self.vm_name] AzureDisk.num_disks[self.vm_name] += 1", "\"\"\"Returns the path to the device inside the VM.\"\"\" if self.disk_type == disk.LOCAL:", "{ azure_flags.LRS: disk.ZONE, azure_flags.ZRS: disk.REGION, # Deliberately omitting PLRS, because that is handled", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "while the associated VM ' 'is deleted, but will be retried.', self.name) vm_util.IssueCommand(delete_cmd)", "self.lun = None if self.disk_type == PREMIUM_STORAGE: self.metadata = PREMIUM_STORAGE_METADATA elif self.disk_type ==", "disk.RegisterDiskTypeMap(AZURE, DISK_TYPE) PREMIUM_STORAGE_METADATA = { disk.MEDIA: disk.SSD, disk.REPLICATION: disk.ZONE, disk.LEGACY_DISK_TYPE: disk.REMOTE_SSD } AZURE_REPLICATION_MAP", "str(self.disk_size)] vm_util.IssueRetryableCommand(create_cmd) if self.vm_name not in AzureDisk.num_disks: AzureDisk.num_disks[self.vm_name] = 0 self.lun = AzureDisk.num_disks[self.vm_name]", "elif self.disk_type == STANDARD_DISK: self.metadata = { disk.MEDIA: disk.HDD, disk.REPLICATION: AZURE_REPLICATION_MAP[FLAGS.azure_storage_type], disk.LEGACY_DISK_TYPE: disk.STANDARD", "== PREMIUM_STORAGE: self.metadata = PREMIUM_STORAGE_METADATA elif self.disk_type == STANDARD_DISK: self.metadata = { disk.MEDIA:", "\"\"\"Creates the disk.\"\"\" if self.disk_type == PREMIUM_STORAGE: assert FLAGS.azure_storage_type == azure_flags.PLRS else: assert", "'premium-storage' STANDARD_DISK = 'standard-disk' DISK_TYPE = {disk.STANDARD: STANDARD_DISK, disk.REMOTE_SSD: PREMIUM_STORAGE} AZURE = 'Azure'", "VMs when created) def Detach(self): \"\"\"Detaches the disk from a VM.\"\"\" pass #", "= [AZURE_PATH, 'vm', 'disk', 'delete', '--blob-delete', self.name] logging.info('Deleting disk %s. This may fail", "\"\"\"Get the disk's name.\"\"\" show_cmd = [AZURE_PATH, 'vm', 'show', '--json', self.vm_name] stdout, _,", "response['DataDisks'][self.lun] assert ((self.lun == 0 and 'logicalUnitNumber' not in data_disk) or (self.lun ==", "disk.LEGACY_DISK_TYPE: disk.STANDARD } elif self.disk_type == disk.LOCAL: media = disk.SSD if LocalDiskIsSSD(machine_type) else", "the VM.\"\"\" if self.disk_type == disk.LOCAL: return '/dev/sdb' else: return '/dev/sd%s' % chr(ord(DRIVE_START_LETTER)", "azure_flags AZURE_PATH = 'azure' FLAGS = flags.FLAGS DRIVE_START_LETTER = 'c' PREMIUM_STORAGE = 'premium-storage'", "} AZURE_REPLICATION_MAP = { azure_flags.LRS: disk.ZONE, azure_flags.ZRS: disk.REGION, # Deliberately omitting PLRS, because", "with the License. # You may obtain a copy of the License at", "_lock = threading.Lock() num_disks = {} def __init__(self, disk_spec, vm_name, machine_type): super(AzureDisk, self).__init__(disk_spec)", "permissions and # limitations under the License. \"\"\"Module containing classes related to Azure", "and (RA)GRS, because those are # asynchronously replicated. } LOCAL_SSD_PREFIXES = { 'Standard_D',", "azure_flags.PLRS with self._lock: create_cmd = [AZURE_PATH, 'vm', 'disk', 'attach-new', '--host-caching=%s' % self.host_caching, self.vm_name,", "FLAGS.azure_storage_type == azure_flags.PLRS else: assert FLAGS.azure_storage_type != azure_flags.PLRS with self._lock: create_cmd = [AZURE_PATH,", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "because those are # asynchronously replicated. } LOCAL_SSD_PREFIXES = { 'Standard_D', 'Standard_G' }", "AzureDisk(disk.BaseDisk): \"\"\"Object representing an Azure Disk.\"\"\" _lock = threading.Lock() num_disks = {} def", "is handled by # PREMIUM_STORAGE_METADATA, and (RA)GRS, because those are # asynchronously replicated.", "created, deleted, attached to VMs, and detached from VMs. At this time, Azure", "See http://msdn.microsoft.com/en-us/library/azure/dn790303.aspx for more information about azure disks. \"\"\" import json import logging", "the disk to a VM. Args: vm: The AzureVirtualMachine instance to which the", "self.name = None self.vm_name = vm_name self.lun = None if self.disk_type == PREMIUM_STORAGE:", "rights reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "self.created = True def _Delete(self): \"\"\"Deletes the disk.\"\"\" delete_cmd = [AZURE_PATH, 'vm', 'disk',", "be attached. \"\"\" pass # TODO(user): Implement Attach() # (not critical because disks", "self.host_caching, self.vm_name, str(self.disk_size)] vm_util.IssueRetryableCommand(create_cmd) if self.vm_name not in AzureDisk.num_disks: AzureDisk.num_disks[self.vm_name] = 0 self.lun", "'show', '--json', self.name] stdout, _, _ = vm_util.IssueCommand(show_cmd, suppress_warning=True) try: json.loads(stdout) except ValueError:", "'show', '--json', self.vm_name] stdout, _, _ = vm_util.IssueCommand(show_cmd) response = json.loads(stdout) data_disk =", "related to Azure disks. Disks can be created, deleted, attached to VMs, and", "law or agreed to in writing, software # distributed under the License is", "the License for the specific language governing permissions and # limitations under the", "Disks can be created, deleted, attached to VMs, and detached from VMs. At", "_, _ = vm_util.IssueCommand(show_cmd) response = json.loads(stdout) data_disk = response['DataDisks'][self.lun] assert ((self.lun ==", "disk to a VM. Args: vm: The AzureVirtualMachine instance to which the disk", "AZURE = 'Azure' disk.RegisterDiskTypeMap(AZURE, DISK_TYPE) PREMIUM_STORAGE_METADATA = { disk.MEDIA: disk.SSD, disk.REPLICATION: disk.ZONE, disk.LEGACY_DISK_TYPE:", "VMs, and detached from VMs. At this time, Azure only supports one disk", "the disk will be attached. \"\"\" pass # TODO(user): Implement Attach() # (not", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "disk from a VM.\"\"\" pass # TODO(user): Implement Detach() def GetDevicePath(self): \"\"\"Returns the", "2014 PerfKitBenchmarker Authors. All rights reserved. # # Licensed under the Apache License,", "from perfkitbenchmarker.providers.azure import flags as azure_flags AZURE_PATH = 'azure' FLAGS = flags.FLAGS DRIVE_START_LETTER", "AzureDisk.num_disks[self.vm_name] = 0 self.lun = AzureDisk.num_disks[self.vm_name] AzureDisk.num_disks[self.vm_name] += 1 self.created = True def", "'vm', 'disk', 'delete', '--blob-delete', self.name] logging.info('Deleting disk %s. This may fail while the", "STANDARD_DISK = 'standard-disk' DISK_TYPE = {disk.STANDARD: STANDARD_DISK, disk.REMOTE_SSD: PREMIUM_STORAGE} AZURE = 'Azure' disk.RegisterDiskTypeMap(AZURE,", "the local disk is an SSD drive.\"\"\" return any((machine_type.startswith(prefix) for prefix in LOCAL_SSD_PREFIXES))", "perfkitbenchmarker import flags from perfkitbenchmarker import vm_util from perfkitbenchmarker.providers.azure import flags as azure_flags", "in compliance with the License. # You may obtain a copy of the", "disk.MEDIA: disk.HDD, disk.REPLICATION: AZURE_REPLICATION_MAP[FLAGS.azure_storage_type], disk.LEGACY_DISK_TYPE: disk.STANDARD } elif self.disk_type == disk.LOCAL: media =", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "'disk', 'attach-new', '--host-caching=%s' % self.host_caching, self.vm_name, str(self.disk_size)] vm_util.IssueRetryableCommand(create_cmd) if self.vm_name not in AzureDisk.num_disks:", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "__init__(self, disk_spec, vm_name, machine_type): super(AzureDisk, self).__init__(disk_spec) self.host_caching = FLAGS.azure_host_caching self.name = None self.vm_name", "% self.host_caching, self.vm_name, str(self.disk_size)] vm_util.IssueRetryableCommand(create_cmd) if self.vm_name not in AzureDisk.num_disks: AzureDisk.num_disks[self.vm_name] = 0", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "False show_cmd = [AZURE_PATH, 'vm', 'disk', 'show', '--json', self.name] stdout, _, _ =", "json.loads(stdout) data_disk = response['DataDisks'][self.lun] assert ((self.lun == 0 and 'logicalUnitNumber' not in data_disk)", "disk.STANDARD } elif self.disk_type == disk.LOCAL: media = disk.SSD if LocalDiskIsSSD(machine_type) else disk.HDD", "an SSD drive.\"\"\" return any((machine_type.startswith(prefix) for prefix in LOCAL_SSD_PREFIXES)) class AzureDisk(disk.BaseDisk): \"\"\"Object representing", "logging.info('Deleting disk %s. This may fail while the associated VM ' 'is deleted,", "replicated. } LOCAL_SSD_PREFIXES = { 'Standard_D', 'Standard_G' } def LocalDiskIsSSD(machine_type): \"\"\"Check whether the", "Attach(self, vm): \"\"\"Attaches the disk to a VM. Args: vm: The AzureVirtualMachine instance", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "VM. Args: vm: The AzureVirtualMachine instance to which the disk will be attached.", "vm_util from perfkitbenchmarker.providers.azure import flags as azure_flags AZURE_PATH = 'azure' FLAGS = flags.FLAGS", "PREMIUM_STORAGE} AZURE = 'Azure' disk.RegisterDiskTypeMap(AZURE, DISK_TYPE) PREMIUM_STORAGE_METADATA = { disk.MEDIA: disk.SSD, disk.REPLICATION: disk.ZONE,", "Azure only supports one disk type, so the disk spec's disk type is", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "created) def Detach(self): \"\"\"Detaches the disk from a VM.\"\"\" pass # TODO(user): Implement", "that is handled by # PREMIUM_STORAGE_METADATA, and (RA)GRS, because those are # asynchronously", "the specific language governing permissions and # limitations under the License. \"\"\"Module containing", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "[AZURE_PATH, 'vm', 'disk', 'show', '--json', self.name] stdout, _, _ = vm_util.IssueCommand(show_cmd, suppress_warning=True) try:", "elif self.name is None: return False show_cmd = [AZURE_PATH, 'vm', 'disk', 'show', '--json',", "{ 'Standard_D', 'Standard_G' } def LocalDiskIsSSD(machine_type): \"\"\"Check whether the local disk is an", "AZURE_REPLICATION_MAP[FLAGS.azure_storage_type], disk.LEGACY_DISK_TYPE: disk.STANDARD } elif self.disk_type == disk.LOCAL: media = disk.SSD if LocalDiskIsSSD(machine_type)", "if self.disk_type == PREMIUM_STORAGE: assert FLAGS.azure_storage_type == azure_flags.PLRS else: assert FLAGS.azure_storage_type != azure_flags.PLRS", "disk.REPLICATION: disk.NONE, disk.LEGACY_DISK_TYPE: disk.LOCAL } def _Create(self): \"\"\"Creates the disk.\"\"\" if self.disk_type ==", "a VM. Args: vm: The AzureVirtualMachine instance to which the disk will be", "are attached to VMs when created) def Detach(self): \"\"\"Detaches the disk from a", "self.metadata = { disk.MEDIA: media, disk.REPLICATION: disk.NONE, disk.LEGACY_DISK_TYPE: disk.LOCAL } def _Create(self): \"\"\"Creates", "_Create(self): \"\"\"Creates the disk.\"\"\" if self.disk_type == PREMIUM_STORAGE: assert FLAGS.azure_storage_type == azure_flags.PLRS else:", "_Exists(self): \"\"\"Returns true if the disk exists.\"\"\" if self.name is None and self.created:", "is ignored. See http://msdn.microsoft.com/en-us/library/azure/dn790303.aspx for more information about azure disks. \"\"\" import json", "\"\"\"Returns true if the disk exists.\"\"\" if self.name is None and self.created: return", "disk.REMOTE_SSD: PREMIUM_STORAGE} AZURE = 'Azure' disk.RegisterDiskTypeMap(AZURE, DISK_TYPE) PREMIUM_STORAGE_METADATA = { disk.MEDIA: disk.SSD, disk.REPLICATION:", "show_cmd = [AZURE_PATH, 'vm', 'show', '--json', self.vm_name] stdout, _, _ = vm_util.IssueCommand(show_cmd) response", "local disk is an SSD drive.\"\"\" return any((machine_type.startswith(prefix) for prefix in LOCAL_SSD_PREFIXES)) class", "if self.name is None and self.created: return True elif self.name is None: return", "about azure disks. \"\"\" import json import logging import threading from perfkitbenchmarker import", "= {} def __init__(self, disk_spec, vm_name, machine_type): super(AzureDisk, self).__init__(disk_spec) self.host_caching = FLAGS.azure_host_caching self.name", "disk.REPLICATION: disk.ZONE, disk.LEGACY_DISK_TYPE: disk.REMOTE_SSD } AZURE_REPLICATION_MAP = { azure_flags.LRS: disk.ZONE, azure_flags.ZRS: disk.REGION, #", "'disk', 'show', '--json', self.name] stdout, _, _ = vm_util.IssueCommand(show_cmd, suppress_warning=True) try: json.loads(stdout) except", "media, disk.REPLICATION: disk.NONE, disk.LEGACY_DISK_TYPE: disk.LOCAL } def _Create(self): \"\"\"Creates the disk.\"\"\" if self.disk_type", "under the License. \"\"\"Module containing classes related to Azure disks. Disks can be", "drive.\"\"\" return any((machine_type.startswith(prefix) for prefix in LOCAL_SSD_PREFIXES)) class AzureDisk(disk.BaseDisk): \"\"\"Object representing an Azure", "Version 2.0 (the \"License\"); # you may not use this file except in", "from perfkitbenchmarker import vm_util from perfkitbenchmarker.providers.azure import flags as azure_flags AZURE_PATH = 'azure'", "except in compliance with the License. # You may obtain a copy of", "'azure' FLAGS = flags.FLAGS DRIVE_START_LETTER = 'c' PREMIUM_STORAGE = 'premium-storage' STANDARD_DISK = 'standard-disk'", "STANDARD_DISK, disk.REMOTE_SSD: PREMIUM_STORAGE} AZURE = 'Azure' disk.RegisterDiskTypeMap(AZURE, DISK_TYPE) PREMIUM_STORAGE_METADATA = { disk.MEDIA: disk.SSD,", "'delete', '--blob-delete', self.name] logging.info('Deleting disk %s. This may fail while the associated VM", "Attach() # (not critical because disks are attached to VMs when created) def", "[AZURE_PATH, 'vm', 'disk', 'attach-new', '--host-caching=%s' % self.host_caching, self.vm_name, str(self.disk_size)] vm_util.IssueRetryableCommand(create_cmd) if self.vm_name not", "vm_name self.lun = None if self.disk_type == PREMIUM_STORAGE: self.metadata = PREMIUM_STORAGE_METADATA elif self.disk_type", "'vm', 'disk', 'show', '--json', self.name] stdout, _, _ = vm_util.IssueCommand(show_cmd, suppress_warning=True) try: json.loads(stdout)", "def _Create(self): \"\"\"Creates the disk.\"\"\" if self.disk_type == PREMIUM_STORAGE: assert FLAGS.azure_storage_type == azure_flags.PLRS", "to the device inside the VM.\"\"\" if self.disk_type == disk.LOCAL: return '/dev/sdb' else:", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "attached to VMs when created) def Detach(self): \"\"\"Detaches the disk from a VM.\"\"\"", "so the disk spec's disk type is ignored. See http://msdn.microsoft.com/en-us/library/azure/dn790303.aspx for more information", "\"\"\"Check whether the local disk is an SSD drive.\"\"\" return any((machine_type.startswith(prefix) for prefix", "'Standard_D', 'Standard_G' } def LocalDiskIsSSD(machine_type): \"\"\"Check whether the local disk is an SSD", "= { disk.MEDIA: disk.HDD, disk.REPLICATION: AZURE_REPLICATION_MAP[FLAGS.azure_storage_type], disk.LEGACY_DISK_TYPE: disk.STANDARD } elif self.disk_type == disk.LOCAL:", "path to the device inside the VM.\"\"\" if self.disk_type == disk.LOCAL: return '/dev/sdb'", "= 'Azure' disk.RegisterDiskTypeMap(AZURE, DISK_TYPE) PREMIUM_STORAGE_METADATA = { disk.MEDIA: disk.SSD, disk.REPLICATION: disk.ZONE, disk.LEGACY_DISK_TYPE: disk.REMOTE_SSD", "PREMIUM_STORAGE: self.metadata = PREMIUM_STORAGE_METADATA elif self.disk_type == STANDARD_DISK: self.metadata = { disk.MEDIA: disk.HDD,", "\"\"\"Object representing an Azure Disk.\"\"\" _lock = threading.Lock() num_disks = {} def __init__(self,", "return False show_cmd = [AZURE_PATH, 'vm', 'disk', 'show', '--json', self.name] stdout, _, _", "class AzureDisk(disk.BaseDisk): \"\"\"Object representing an Azure Disk.\"\"\" _lock = threading.Lock() num_disks = {}", "_ = vm_util.IssueCommand(show_cmd) response = json.loads(stdout) data_disk = response['DataDisks'][self.lun] assert ((self.lun == 0", "{ disk.MEDIA: disk.HDD, disk.REPLICATION: AZURE_REPLICATION_MAP[FLAGS.azure_storage_type], disk.LEGACY_DISK_TYPE: disk.STANDARD } elif self.disk_type == disk.LOCAL: media", "because disks are attached to VMs when created) def Detach(self): \"\"\"Detaches the disk", "PREMIUM_STORAGE_METADATA, and (RA)GRS, because those are # asynchronously replicated. } LOCAL_SSD_PREFIXES = {", "\"\"\"Module containing classes related to Azure disks. Disks can be created, deleted, attached", "flags from perfkitbenchmarker import vm_util from perfkitbenchmarker.providers.azure import flags as azure_flags AZURE_PATH =", "_ = vm_util.IssueCommand(show_cmd, suppress_warning=True) try: json.loads(stdout) except ValueError: return False return True @vm_util.Retry()", "vm): \"\"\"Attaches the disk to a VM. Args: vm: The AzureVirtualMachine instance to", "from perfkitbenchmarker import flags from perfkitbenchmarker import vm_util from perfkitbenchmarker.providers.azure import flags as", "supports one disk type, so the disk spec's disk type is ignored. See", "deleted, but will be retried.', self.name) vm_util.IssueCommand(delete_cmd) def _Exists(self): \"\"\"Returns true if the", "assert FLAGS.azure_storage_type == azure_flags.PLRS else: assert FLAGS.azure_storage_type != azure_flags.PLRS with self._lock: create_cmd =", "time, Azure only supports one disk type, so the disk spec's disk type", "_Delete(self): \"\"\"Deletes the disk.\"\"\" delete_cmd = [AZURE_PATH, 'vm', 'disk', 'delete', '--blob-delete', self.name] logging.info('Deleting", "can be created, deleted, attached to VMs, and detached from VMs. At this", "self.vm_name, str(self.disk_size)] vm_util.IssueRetryableCommand(create_cmd) if self.vm_name not in AzureDisk.num_disks: AzureDisk.num_disks[self.vm_name] = 0 self.lun =", "= disk.SSD if LocalDiskIsSSD(machine_type) else disk.HDD self.metadata = { disk.MEDIA: media, disk.REPLICATION: disk.NONE,", "disk.ZONE, azure_flags.ZRS: disk.REGION, # Deliberately omitting PLRS, because that is handled by #", "import flags as azure_flags AZURE_PATH = 'azure' FLAGS = flags.FLAGS DRIVE_START_LETTER = 'c'", "but will be retried.', self.name) vm_util.IssueCommand(delete_cmd) def _Exists(self): \"\"\"Returns true if the disk", "== int(data_disk['logicalUnitNumber']))) self.name = data_disk['name'] def Attach(self, vm): \"\"\"Attaches the disk to a", "Implement Attach() # (not critical because disks are attached to VMs when created)", "be created, deleted, attached to VMs, and detached from VMs. At this time,", "disks are attached to VMs when created) def Detach(self): \"\"\"Detaches the disk from", "disk will be attached. \"\"\" pass # TODO(user): Implement Attach() # (not critical", "language governing permissions and # limitations under the License. \"\"\"Module containing classes related", "if self.vm_name not in AzureDisk.num_disks: AzureDisk.num_disks[self.vm_name] = 0 self.lun = AzureDisk.num_disks[self.vm_name] AzureDisk.num_disks[self.vm_name] +=", "{ disk.MEDIA: disk.SSD, disk.REPLICATION: disk.ZONE, disk.LEGACY_DISK_TYPE: disk.REMOTE_SSD } AZURE_REPLICATION_MAP = { azure_flags.LRS: disk.ZONE,", "in LOCAL_SSD_PREFIXES)) class AzureDisk(disk.BaseDisk): \"\"\"Object representing an Azure Disk.\"\"\" _lock = threading.Lock() num_disks", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "elif self.disk_type == disk.LOCAL: media = disk.SSD if LocalDiskIsSSD(machine_type) else disk.HDD self.metadata =", "_, _ = vm_util.IssueCommand(show_cmd, suppress_warning=True) try: json.loads(stdout) except ValueError: return False return True", "= data_disk['name'] def Attach(self, vm): \"\"\"Attaches the disk to a VM. Args: vm:", "are # asynchronously replicated. } LOCAL_SSD_PREFIXES = { 'Standard_D', 'Standard_G' } def LocalDiskIsSSD(machine_type):", "= [AZURE_PATH, 'vm', 'disk', 'attach-new', '--host-caching=%s' % self.host_caching, self.vm_name, str(self.disk_size)] vm_util.IssueRetryableCommand(create_cmd) if self.vm_name", "disk.MEDIA: media, disk.REPLICATION: disk.NONE, disk.LEGACY_DISK_TYPE: disk.LOCAL } def _Create(self): \"\"\"Creates the disk.\"\"\" if" ]
[ "<reponame>fbitti/Bioinformatics-with-Python-Cookbook-Second-Edition<gh_stars>100-1000 import sys sys.stdout.write('ID_1 ID_2 missing\\n0 0 0 \\n') for line in sys.stdin:", "ID_2 missing\\n0 0 0 \\n') for line in sys.stdin: ind = line.rstrip() sys.stdout.write('%s", "sys.stdout.write('ID_1 ID_2 missing\\n0 0 0 \\n') for line in sys.stdin: ind = line.rstrip()", "import sys sys.stdout.write('ID_1 ID_2 missing\\n0 0 0 \\n') for line in sys.stdin: ind", "missing\\n0 0 0 \\n') for line in sys.stdin: ind = line.rstrip() sys.stdout.write('%s %s", "sys sys.stdout.write('ID_1 ID_2 missing\\n0 0 0 \\n') for line in sys.stdin: ind =", "0 0 \\n') for line in sys.stdin: ind = line.rstrip() sys.stdout.write('%s %s 0\\n'", "0 \\n') for line in sys.stdin: ind = line.rstrip() sys.stdout.write('%s %s 0\\n' %", "\\n') for line in sys.stdin: ind = line.rstrip() sys.stdout.write('%s %s 0\\n' % (ind,", "for line in sys.stdin: ind = line.rstrip() sys.stdout.write('%s %s 0\\n' % (ind, ind))" ]
[ "open(fich_json, 'w')) def do_local(self): # Recorre la lista y descarga recursos remotos for", "+ nombre + '=' + '\"' + valor + '\"' print(linea) def to_json(self,", "open(sys.argv[1]) except (ValueError, IndexError, FileNotFoundError): sys.exit(\"Usage: python3 karaoke.py file.smil\") fichero = sys.argv[1] fich_json", "fich.split('.')[0] + '.json' json.dump(self.lista, open(fich_json, 'w')) def do_local(self): # Recorre la lista y", "posi in atrib.items(): if atributos == \"src\" and posi[0:7] == \"http://\": atrib_Nuevo =", "urlretrieve(posi, atrib_Nuevo) print(\"Descargando %s ...\" % posi) if __name__ == \"__main__\": if len(sys.argv)", "la lista de etiquetas \"\"\" linea = \" \" for elem in self.lista:", "andrea \"\"\" import sys import json from xml.sax import make_parser from urllib.request import", "karaoke.py file.smil\") try: obj = open(sys.argv[1]) except (ValueError, IndexError, FileNotFoundError): sys.exit(\"Usage: python3 karaoke.py", "para crear la lista de etiquetas \"\"\" linea = \" \" for elem", "# -*- coding: utf-8 -*- \"\"\" Created on Mon Oct 8 20:54:26 2018", "diccs[1] for atributos, posi in atrib.items(): if atributos == \"src\" and posi[0:7] ==", "class KaraokeLocal(SmallSMILHandler): def __init__(self, fichero): # Inicializo y construyo la lista parser =", "== \"__main__\": if len(sys.argv) != 2: sys.exit(\"Usage: python3 karaoke.py file.smil\") try: obj =", "manejador parser.parse(open(fichero)) self.lista = cHandler.get_tags() def __str__(self): \"\"\" Método para crear la lista", "atributos, posi in atrib.items(): if atributos == \"src\" and posi[0:7] == \"http://\": atrib_Nuevo", "python3 karaoke.py file.smil\") fichero = sys.argv[1] fich_json = sys.argv[1].replace(\".smil\", \".json\") obj = KaraokeLocal(fichero)", "Inicializo y construyo la lista parser = make_parser() # Creo parser cHandler =", "= elem[1].items() for nombre, valor in atributos: if valor != '': linea =", "linea + '\\t' + nombre + '=' + '\"' + valor + '\"'", "valor in atributos: if valor != '': linea = linea + '\\t' +", "la lista parser = make_parser() # Creo parser cHandler = SmallSMILHandler() # Creo", "Le paso el parser al manejador parser.parse(open(fichero)) self.lista = cHandler.get_tags() def __str__(self): \"\"\"", "# Creamos un fichero en formato json fich_json = json.dumps(self.lista) if fich_json is", "xml.sax import make_parser from urllib.request import urlretrieve from smallsmilhandler import SmallSMILHandler class KaraokeLocal(SmallSMILHandler):", "Creo parser cHandler = SmallSMILHandler() # Creo manejador parser.setContentHandler(cHandler) # Le paso el", "do_local(self): # Recorre la lista y descarga recursos remotos for diccs in self.lista:", "atrib.items(): if atributos == \"src\" and posi[0:7] == \"http://\": atrib_Nuevo = posi.split('/')[-1] urlretrieve(posi,", "+ '\"' print(linea) def to_json(self, fich, fich_json=None): # Creamos un fichero en formato", "un fichero en formato json fich_json = json.dumps(self.lista) if fich_json is None: fich_json", "\" \" for elem in self.lista: linea = linea + elem[0] atributos =", "\" for elem in self.lista: linea = linea + elem[0] atributos = elem[1].items()", "and posi[0:7] == \"http://\": atrib_Nuevo = posi.split('/')[-1] urlretrieve(posi, atrib_Nuevo) print(\"Descargando %s ...\" %", "on Mon Oct 8 20:54:26 2018 @author: andrea \"\"\" import sys import json", "-*- \"\"\" Created on Mon Oct 8 20:54:26 2018 @author: andrea \"\"\" import", "self.lista = cHandler.get_tags() def __str__(self): \"\"\" Método para crear la lista de etiquetas", "#!/usr/bin/python3 # -*- coding: utf-8 -*- \"\"\" Created on Mon Oct 8 20:54:26", "None: fich_json = fich.split('.')[0] + '.json' json.dump(self.lista, open(fich_json, 'w')) def do_local(self): # Recorre", "SmallSMILHandler() # Creo manejador parser.setContentHandler(cHandler) # Le paso el parser al manejador parser.parse(open(fichero))", "self.lista: linea = linea + elem[0] atributos = elem[1].items() for nombre, valor in", "for elem in self.lista: linea = linea + elem[0] atributos = elem[1].items() for", "linea = \" \" for elem in self.lista: linea = linea + elem[0]", "atrib_Nuevo) print(\"Descargando %s ...\" % posi) if __name__ == \"__main__\": if len(sys.argv) !=", "\"\"\" linea = \" \" for elem in self.lista: linea = linea +", "linea = linea + '\\t' + nombre + '=' + '\"' + valor", "'=' + '\"' + valor + '\"' print(linea) def to_json(self, fich, fich_json=None): #", "utf-8 -*- \"\"\" Created on Mon Oct 8 20:54:26 2018 @author: andrea \"\"\"", "en formato json fich_json = json.dumps(self.lista) if fich_json is None: fich_json = fich.split('.')[0]", "elem[1].items() for nombre, valor in atributos: if valor != '': linea = linea", "from xml.sax import make_parser from urllib.request import urlretrieve from smallsmilhandler import SmallSMILHandler class", "manejador parser.setContentHandler(cHandler) # Le paso el parser al manejador parser.parse(open(fichero)) self.lista = cHandler.get_tags()", "linea = linea + elem[0] atributos = elem[1].items() for nombre, valor in atributos:", "make_parser() # Creo parser cHandler = SmallSMILHandler() # Creo manejador parser.setContentHandler(cHandler) # Le", "descarga recursos remotos for diccs in self.lista: atrib = diccs[1] for atributos, posi", "= diccs[1] for atributos, posi in atrib.items(): if atributos == \"src\" and posi[0:7]", "-*- coding: utf-8 -*- \"\"\" Created on Mon Oct 8 20:54:26 2018 @author:", "= cHandler.get_tags() def __str__(self): \"\"\" Método para crear la lista de etiquetas \"\"\"", "fich_json = json.dumps(self.lista) if fich_json is None: fich_json = fich.split('.')[0] + '.json' json.dump(self.lista,", "from urllib.request import urlretrieve from smallsmilhandler import SmallSMILHandler class KaraokeLocal(SmallSMILHandler): def __init__(self, fichero):", "al manejador parser.parse(open(fichero)) self.lista = cHandler.get_tags() def __str__(self): \"\"\" Método para crear la", "\"__main__\": if len(sys.argv) != 2: sys.exit(\"Usage: python3 karaoke.py file.smil\") try: obj = open(sys.argv[1])", "atributos = elem[1].items() for nombre, valor in atributos: if valor != '': linea", "self.lista: atrib = diccs[1] for atributos, posi in atrib.items(): if atributos == \"src\"", "if __name__ == \"__main__\": if len(sys.argv) != 2: sys.exit(\"Usage: python3 karaoke.py file.smil\") try:", "linea + elem[0] atributos = elem[1].items() for nombre, valor in atributos: if valor", "2: sys.exit(\"Usage: python3 karaoke.py file.smil\") try: obj = open(sys.argv[1]) except (ValueError, IndexError, FileNotFoundError):", "SmallSMILHandler class KaraokeLocal(SmallSMILHandler): def __init__(self, fichero): # Inicializo y construyo la lista parser", "!= '': linea = linea + '\\t' + nombre + '=' + '\"'", "+ '=' + '\"' + valor + '\"' print(linea) def to_json(self, fich, fich_json=None):", "== \"src\" and posi[0:7] == \"http://\": atrib_Nuevo = posi.split('/')[-1] urlretrieve(posi, atrib_Nuevo) print(\"Descargando %s", "import SmallSMILHandler class KaraokeLocal(SmallSMILHandler): def __init__(self, fichero): # Inicializo y construyo la lista", "\"http://\": atrib_Nuevo = posi.split('/')[-1] urlretrieve(posi, atrib_Nuevo) print(\"Descargando %s ...\" % posi) if __name__", "print(\"Descargando %s ...\" % posi) if __name__ == \"__main__\": if len(sys.argv) != 2:", "try: obj = open(sys.argv[1]) except (ValueError, IndexError, FileNotFoundError): sys.exit(\"Usage: python3 karaoke.py file.smil\") fichero", "= \" \" for elem in self.lista: linea = linea + elem[0] atributos", "cHandler = SmallSMILHandler() # Creo manejador parser.setContentHandler(cHandler) # Le paso el parser al", "= posi.split('/')[-1] urlretrieve(posi, atrib_Nuevo) print(\"Descargando %s ...\" % posi) if __name__ == \"__main__\":", "construyo la lista parser = make_parser() # Creo parser cHandler = SmallSMILHandler() #", "valor != '': linea = linea + '\\t' + nombre + '=' +", "if valor != '': linea = linea + '\\t' + nombre + '='", "+ '\"' + valor + '\"' print(linea) def to_json(self, fich, fich_json=None): # Creamos", "import make_parser from urllib.request import urlretrieve from smallsmilhandler import SmallSMILHandler class KaraokeLocal(SmallSMILHandler): def", "= json.dumps(self.lista) if fich_json is None: fich_json = fich.split('.')[0] + '.json' json.dump(self.lista, open(fich_json,", "parser.setContentHandler(cHandler) # Le paso el parser al manejador parser.parse(open(fichero)) self.lista = cHandler.get_tags() def", "%s ...\" % posi) if __name__ == \"__main__\": if len(sys.argv) != 2: sys.exit(\"Usage:", "= fich.split('.')[0] + '.json' json.dump(self.lista, open(fich_json, 'w')) def do_local(self): # Recorre la lista", "for diccs in self.lista: atrib = diccs[1] for atributos, posi in atrib.items(): if", "el parser al manejador parser.parse(open(fichero)) self.lista = cHandler.get_tags() def __str__(self): \"\"\" Método para", "# Creo parser cHandler = SmallSMILHandler() # Creo manejador parser.setContentHandler(cHandler) # Le paso", "\"\"\" Created on Mon Oct 8 20:54:26 2018 @author: andrea \"\"\" import sys", "fich_json=None): # Creamos un fichero en formato json fich_json = json.dumps(self.lista) if fich_json", "elem in self.lista: linea = linea + elem[0] atributos = elem[1].items() for nombre,", "+ '.json' json.dump(self.lista, open(fich_json, 'w')) def do_local(self): # Recorre la lista y descarga", "% posi) if __name__ == \"__main__\": if len(sys.argv) != 2: sys.exit(\"Usage: python3 karaoke.py", "__init__(self, fichero): # Inicializo y construyo la lista parser = make_parser() # Creo", "# Creo manejador parser.setContentHandler(cHandler) # Le paso el parser al manejador parser.parse(open(fichero)) self.lista", "json fich_json = json.dumps(self.lista) if fich_json is None: fich_json = fich.split('.')[0] + '.json'", "KaraokeLocal(SmallSMILHandler): def __init__(self, fichero): # Inicializo y construyo la lista parser = make_parser()", "in atributos: if valor != '': linea = linea + '\\t' + nombre", "for atributos, posi in atrib.items(): if atributos == \"src\" and posi[0:7] == \"http://\":", "atrib_Nuevo = posi.split('/')[-1] urlretrieve(posi, atrib_Nuevo) print(\"Descargando %s ...\" % posi) if __name__ ==", "2018 @author: andrea \"\"\" import sys import json from xml.sax import make_parser from", "import urlretrieve from smallsmilhandler import SmallSMILHandler class KaraokeLocal(SmallSMILHandler): def __init__(self, fichero): # Inicializo", "+ valor + '\"' print(linea) def to_json(self, fich, fich_json=None): # Creamos un fichero", "file.smil\") fichero = sys.argv[1] fich_json = sys.argv[1].replace(\".smil\", \".json\") obj = KaraokeLocal(fichero) obj.__init__(fichero) obj.__str__()", "json.dumps(self.lista) if fich_json is None: fich_json = fich.split('.')[0] + '.json' json.dump(self.lista, open(fich_json, 'w'))", "recursos remotos for diccs in self.lista: atrib = diccs[1] for atributos, posi in", "to_json(self, fich, fich_json=None): # Creamos un fichero en formato json fich_json = json.dumps(self.lista)", "python3 karaoke.py file.smil\") try: obj = open(sys.argv[1]) except (ValueError, IndexError, FileNotFoundError): sys.exit(\"Usage: python3", "for nombre, valor in atributos: if valor != '': linea = linea +", "cHandler.get_tags() def __str__(self): \"\"\" Método para crear la lista de etiquetas \"\"\" linea", "'\"' print(linea) def to_json(self, fich, fich_json=None): # Creamos un fichero en formato json", "\"src\" and posi[0:7] == \"http://\": atrib_Nuevo = posi.split('/')[-1] urlretrieve(posi, atrib_Nuevo) print(\"Descargando %s ...\"", "'': linea = linea + '\\t' + nombre + '=' + '\"' +", "def to_json(self, fich, fich_json=None): # Creamos un fichero en formato json fich_json =", "lista de etiquetas \"\"\" linea = \" \" for elem in self.lista: linea", "nombre + '=' + '\"' + valor + '\"' print(linea) def to_json(self, fich,", "fich, fich_json=None): # Creamos un fichero en formato json fich_json = json.dumps(self.lista) if", "if atributos == \"src\" and posi[0:7] == \"http://\": atrib_Nuevo = posi.split('/')[-1] urlretrieve(posi, atrib_Nuevo)", "sys.argv[1] fich_json = sys.argv[1].replace(\".smil\", \".json\") obj = KaraokeLocal(fichero) obj.__init__(fichero) obj.__str__() obj.to_json(fich_json) obj.do_local() obj.to_json(fich_json,", "@author: andrea \"\"\" import sys import json from xml.sax import make_parser from urllib.request", "Método para crear la lista de etiquetas \"\"\" linea = \" \" for", "= SmallSMILHandler() # Creo manejador parser.setContentHandler(cHandler) # Le paso el parser al manejador", "__str__(self): \"\"\" Método para crear la lista de etiquetas \"\"\" linea = \"", "from smallsmilhandler import SmallSMILHandler class KaraokeLocal(SmallSMILHandler): def __init__(self, fichero): # Inicializo y construyo", "def __str__(self): \"\"\" Método para crear la lista de etiquetas \"\"\" linea =", "lista y descarga recursos remotos for diccs in self.lista: atrib = diccs[1] for", "coding: utf-8 -*- \"\"\" Created on Mon Oct 8 20:54:26 2018 @author: andrea", "Recorre la lista y descarga recursos remotos for diccs in self.lista: atrib =", "!= 2: sys.exit(\"Usage: python3 karaoke.py file.smil\") try: obj = open(sys.argv[1]) except (ValueError, IndexError,", "json from xml.sax import make_parser from urllib.request import urlretrieve from smallsmilhandler import SmallSMILHandler", "urllib.request import urlretrieve from smallsmilhandler import SmallSMILHandler class KaraokeLocal(SmallSMILHandler): def __init__(self, fichero): #", "karaoke.py file.smil\") fichero = sys.argv[1] fich_json = sys.argv[1].replace(\".smil\", \".json\") obj = KaraokeLocal(fichero) obj.__init__(fichero)", "\"\"\" Método para crear la lista de etiquetas \"\"\" linea = \" \"", "Oct 8 20:54:26 2018 @author: andrea \"\"\" import sys import json from xml.sax", "FileNotFoundError): sys.exit(\"Usage: python3 karaoke.py file.smil\") fichero = sys.argv[1] fich_json = sys.argv[1].replace(\".smil\", \".json\") obj", "etiquetas \"\"\" linea = \" \" for elem in self.lista: linea = linea", "in self.lista: linea = linea + elem[0] atributos = elem[1].items() for nombre, valor", "# Recorre la lista y descarga recursos remotos for diccs in self.lista: atrib", "= make_parser() # Creo parser cHandler = SmallSMILHandler() # Creo manejador parser.setContentHandler(cHandler) #", "file.smil\") try: obj = open(sys.argv[1]) except (ValueError, IndexError, FileNotFoundError): sys.exit(\"Usage: python3 karaoke.py file.smil\")", "formato json fich_json = json.dumps(self.lista) if fich_json is None: fich_json = fich.split('.')[0] +", "if fich_json is None: fich_json = fich.split('.')[0] + '.json' json.dump(self.lista, open(fich_json, 'w')) def", "fich_json is None: fich_json = fich.split('.')[0] + '.json' json.dump(self.lista, open(fich_json, 'w')) def do_local(self):", "posi[0:7] == \"http://\": atrib_Nuevo = posi.split('/')[-1] urlretrieve(posi, atrib_Nuevo) print(\"Descargando %s ...\" % posi)", "= sys.argv[1] fich_json = sys.argv[1].replace(\".smil\", \".json\") obj = KaraokeLocal(fichero) obj.__init__(fichero) obj.__str__() obj.to_json(fich_json) obj.do_local()", "valor + '\"' print(linea) def to_json(self, fich, fich_json=None): # Creamos un fichero en", "parser al manejador parser.parse(open(fichero)) self.lista = cHandler.get_tags() def __str__(self): \"\"\" Método para crear", "Mon Oct 8 20:54:26 2018 @author: andrea \"\"\" import sys import json from", "fichero): # Inicializo y construyo la lista parser = make_parser() # Creo parser", "= linea + '\\t' + nombre + '=' + '\"' + valor +", "\"\"\" import sys import json from xml.sax import make_parser from urllib.request import urlretrieve", "crear la lista de etiquetas \"\"\" linea = \" \" for elem in", "def do_local(self): # Recorre la lista y descarga recursos remotos for diccs in", "parser = make_parser() # Creo parser cHandler = SmallSMILHandler() # Creo manejador parser.setContentHandler(cHandler)", "atrib = diccs[1] for atributos, posi in atrib.items(): if atributos == \"src\" and", "posi.split('/')[-1] urlretrieve(posi, atrib_Nuevo) print(\"Descargando %s ...\" % posi) if __name__ == \"__main__\": if", "json.dump(self.lista, open(fich_json, 'w')) def do_local(self): # Recorre la lista y descarga recursos remotos", "make_parser from urllib.request import urlretrieve from smallsmilhandler import SmallSMILHandler class KaraokeLocal(SmallSMILHandler): def __init__(self,", "= linea + elem[0] atributos = elem[1].items() for nombre, valor in atributos: if", "parser cHandler = SmallSMILHandler() # Creo manejador parser.setContentHandler(cHandler) # Le paso el parser", "smallsmilhandler import SmallSMILHandler class KaraokeLocal(SmallSMILHandler): def __init__(self, fichero): # Inicializo y construyo la", "de etiquetas \"\"\" linea = \" \" for elem in self.lista: linea =", "la lista y descarga recursos remotos for diccs in self.lista: atrib = diccs[1]", "sys.exit(\"Usage: python3 karaoke.py file.smil\") fichero = sys.argv[1] fich_json = sys.argv[1].replace(\".smil\", \".json\") obj =", "'w')) def do_local(self): # Recorre la lista y descarga recursos remotos for diccs", "fich_json = sys.argv[1].replace(\".smil\", \".json\") obj = KaraokeLocal(fichero) obj.__init__(fichero) obj.__str__() obj.to_json(fich_json) obj.do_local() obj.to_json(fich_json, 'local.json')", "Created on Mon Oct 8 20:54:26 2018 @author: andrea \"\"\" import sys import", "in atrib.items(): if atributos == \"src\" and posi[0:7] == \"http://\": atrib_Nuevo = posi.split('/')[-1]", "IndexError, FileNotFoundError): sys.exit(\"Usage: python3 karaoke.py file.smil\") fichero = sys.argv[1] fich_json = sys.argv[1].replace(\".smil\", \".json\")", "sys.exit(\"Usage: python3 karaoke.py file.smil\") try: obj = open(sys.argv[1]) except (ValueError, IndexError, FileNotFoundError): sys.exit(\"Usage:", "elem[0] atributos = elem[1].items() for nombre, valor in atributos: if valor != '':", "+ elem[0] atributos = elem[1].items() for nombre, valor in atributos: if valor !=", "sys import json from xml.sax import make_parser from urllib.request import urlretrieve from smallsmilhandler", "8 20:54:26 2018 @author: andrea \"\"\" import sys import json from xml.sax import", "Creo manejador parser.setContentHandler(cHandler) # Le paso el parser al manejador parser.parse(open(fichero)) self.lista =", "atributos == \"src\" and posi[0:7] == \"http://\": atrib_Nuevo = posi.split('/')[-1] urlretrieve(posi, atrib_Nuevo) print(\"Descargando", "= sys.argv[1].replace(\".smil\", \".json\") obj = KaraokeLocal(fichero) obj.__init__(fichero) obj.__str__() obj.to_json(fich_json) obj.do_local() obj.to_json(fich_json, 'local.json') obj.__str__()", "lista parser = make_parser() # Creo parser cHandler = SmallSMILHandler() # Creo manejador", "import json from xml.sax import make_parser from urllib.request import urlretrieve from smallsmilhandler import", "== \"http://\": atrib_Nuevo = posi.split('/')[-1] urlretrieve(posi, atrib_Nuevo) print(\"Descargando %s ...\" % posi) if", "# Le paso el parser al manejador parser.parse(open(fichero)) self.lista = cHandler.get_tags() def __str__(self):", "nombre, valor in atributos: if valor != '': linea = linea + '\\t'", "parser.parse(open(fichero)) self.lista = cHandler.get_tags() def __str__(self): \"\"\" Método para crear la lista de", "len(sys.argv) != 2: sys.exit(\"Usage: python3 karaoke.py file.smil\") try: obj = open(sys.argv[1]) except (ValueError,", "fichero = sys.argv[1] fich_json = sys.argv[1].replace(\".smil\", \".json\") obj = KaraokeLocal(fichero) obj.__init__(fichero) obj.__str__() obj.to_json(fich_json)", "remotos for diccs in self.lista: atrib = diccs[1] for atributos, posi in atrib.items():", "def __init__(self, fichero): # Inicializo y construyo la lista parser = make_parser() #", "fichero en formato json fich_json = json.dumps(self.lista) if fich_json is None: fich_json =", "20:54:26 2018 @author: andrea \"\"\" import sys import json from xml.sax import make_parser", "'.json' json.dump(self.lista, open(fich_json, 'w')) def do_local(self): # Recorre la lista y descarga recursos", "Creamos un fichero en formato json fich_json = json.dumps(self.lista) if fich_json is None:", "+ '\\t' + nombre + '=' + '\"' + valor + '\"' print(linea)", "(ValueError, IndexError, FileNotFoundError): sys.exit(\"Usage: python3 karaoke.py file.smil\") fichero = sys.argv[1] fich_json = sys.argv[1].replace(\".smil\",", "'\"' + valor + '\"' print(linea) def to_json(self, fich, fich_json=None): # Creamos un", "paso el parser al manejador parser.parse(open(fichero)) self.lista = cHandler.get_tags() def __str__(self): \"\"\" Método", "obj = open(sys.argv[1]) except (ValueError, IndexError, FileNotFoundError): sys.exit(\"Usage: python3 karaoke.py file.smil\") fichero =", "y construyo la lista parser = make_parser() # Creo parser cHandler = SmallSMILHandler()", "is None: fich_json = fich.split('.')[0] + '.json' json.dump(self.lista, open(fich_json, 'w')) def do_local(self): #", "in self.lista: atrib = diccs[1] for atributos, posi in atrib.items(): if atributos ==", "'\\t' + nombre + '=' + '\"' + valor + '\"' print(linea) def", "if len(sys.argv) != 2: sys.exit(\"Usage: python3 karaoke.py file.smil\") try: obj = open(sys.argv[1]) except", "posi) if __name__ == \"__main__\": if len(sys.argv) != 2: sys.exit(\"Usage: python3 karaoke.py file.smil\")", "print(linea) def to_json(self, fich, fich_json=None): # Creamos un fichero en formato json fich_json", "# Inicializo y construyo la lista parser = make_parser() # Creo parser cHandler", "__name__ == \"__main__\": if len(sys.argv) != 2: sys.exit(\"Usage: python3 karaoke.py file.smil\") try: obj", "y descarga recursos remotos for diccs in self.lista: atrib = diccs[1] for atributos,", "diccs in self.lista: atrib = diccs[1] for atributos, posi in atrib.items(): if atributos", "= open(sys.argv[1]) except (ValueError, IndexError, FileNotFoundError): sys.exit(\"Usage: python3 karaoke.py file.smil\") fichero = sys.argv[1]", "except (ValueError, IndexError, FileNotFoundError): sys.exit(\"Usage: python3 karaoke.py file.smil\") fichero = sys.argv[1] fich_json =", "import sys import json from xml.sax import make_parser from urllib.request import urlretrieve from", "atributos: if valor != '': linea = linea + '\\t' + nombre +", "fich_json = fich.split('.')[0] + '.json' json.dump(self.lista, open(fich_json, 'w')) def do_local(self): # Recorre la", "...\" % posi) if __name__ == \"__main__\": if len(sys.argv) != 2: sys.exit(\"Usage: python3", "urlretrieve from smallsmilhandler import SmallSMILHandler class KaraokeLocal(SmallSMILHandler): def __init__(self, fichero): # Inicializo y" ]