body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
d31717748aae71f61a2ea6b77637b419bcfcfd0ba5564e2fdd78ad01957a833c
def __call__(self) -> pd.DataFrame: 'Loads the provided (path to a) dataframe.' if (self.df is not None): result = self.df else: result = pd.read_csv(self.csv_path, encoding='latin', low_memory=False) if self.remove_id_substring: result.columns = self._replace_id_in_col_name(result) return result
Loads the provided (path to a) dataframe.
foreshadow/smart/intent_resolving/core/data_set_parsers/lazy_dataframe_loader.py
__call__
adithyabsk/foreshadow
25
python
def __call__(self) -> pd.DataFrame: if (self.df is not None): result = self.df else: result = pd.read_csv(self.csv_path, encoding='latin', low_memory=False) if self.remove_id_substring: result.columns = self._replace_id_in_col_name(result) return result
def __call__(self) -> pd.DataFrame: if (self.df is not None): result = self.df else: result = pd.read_csv(self.csv_path, encoding='latin', low_memory=False) if self.remove_id_substring: result.columns = self._replace_id_in_col_name(result) return result<|docstring|>Loads the provided (path to a) dataframe.<|endoftext|>
9c7893bbe585e1244faa2317d9f70ffb5e3363564dd6edcccbb7f9dc2a40f265
def substitute(self, s: str) -> str: '\n Get a random alphabetical or numerical character.\n\n This is a pure function —\xa0results are deterministic,\n and uses the md5 hash function.\n\n Returns:\n str -- A character from self.chars\n ' hashed = int(md5(s.encode()).hexdigest(), 16) return self.chars[(hashed % len(self.chars))]
Get a random alphabetical or numerical character. This is a pure function — results are deterministic, and uses the md5 hash function. Returns: str -- A character from self.chars
foreshadow/smart/intent_resolving/core/data_set_parsers/lazy_dataframe_loader.py
substitute
adithyabsk/foreshadow
25
python
def substitute(self, s: str) -> str: '\n Get a random alphabetical or numerical character.\n\n This is a pure function —\xa0results are deterministic,\n and uses the md5 hash function.\n\n Returns:\n str -- A character from self.chars\n ' hashed = int(md5(s.encode()).hexdigest(), 16) return self.chars[(hashed % len(self.chars))]
def substitute(self, s: str) -> str: '\n Get a random alphabetical or numerical character.\n\n This is a pure function —\xa0results are deterministic,\n and uses the md5 hash function.\n\n Returns:\n str -- A character from self.chars\n ' hashed = int(md5(s.encode()).hexdigest(), 16) return self.chars[(hashed % len(self.chars))]<|docstring|>Get a random alphabetical or numerical character. This is a pure function — results are deterministic, and uses the md5 hash function. Returns: str -- A character from self.chars<|endoftext|>
0cc886c628e401709e722674f7c6225e456568320a130857776fb86b29a903a8
def _CreatePatchedSetUp(func): 'Create a setUp function that includes the SOAP implementation.\n\n Args:\n func: A reference to the original setUp function.\n\n Returns:\n The result of func called with soap_impl.\n ' @functools.wraps(func) def SetUp(self): soap_impl = ('suds' if ('_suds' in str(self)) else 'zeep') return func(self, soap_impl) return SetUp
Create a setUp function that includes the SOAP implementation. Args: func: A reference to the original setUp function. Returns: The result of func called with soap_impl.
tests/testing.py
_CreatePatchedSetUp
khanhnhk/googleads-python-lib
2
python
def _CreatePatchedSetUp(func): 'Create a setUp function that includes the SOAP implementation.\n\n Args:\n func: A reference to the original setUp function.\n\n Returns:\n The result of func called with soap_impl.\n ' @functools.wraps(func) def SetUp(self): soap_impl = ('suds' if ('_suds' in str(self)) else 'zeep') return func(self, soap_impl) return SetUp
def _CreatePatchedSetUp(func): 'Create a setUp function that includes the SOAP implementation.\n\n Args:\n func: A reference to the original setUp function.\n\n Returns:\n The result of func called with soap_impl.\n ' @functools.wraps(func) def SetUp(self): soap_impl = ('suds' if ('_suds' in str(self)) else 'zeep') return func(self, soap_impl) return SetUp<|docstring|>Create a setUp function that includes the SOAP implementation. Args: func: A reference to the original setUp function. Returns: The result of func called with soap_impl.<|endoftext|>
e7bf385f25b1f38ac0987ba0b0a77bc21fefca6497e7a8ea4070ab57571bc5af
def MultiBackendTest(cls): 'A decorator that patches a test suite to test with zeep and suds.\n\n Args:\n cls: The test suite to patch\n\n Returns:\n The patched suite.\n ' for (name, func) in list(cls.__dict__.items()): if name.startswith('test'): for backend in _SOAP_BACKENDS: setattr(cls, ('%s_%s' % (name, backend)), func) delattr(cls, name) cls.setUp = _CreatePatchedSetUp(cls.setUp) return cls
A decorator that patches a test suite to test with zeep and suds. Args: cls: The test suite to patch Returns: The patched suite.
tests/testing.py
MultiBackendTest
khanhnhk/googleads-python-lib
2
python
def MultiBackendTest(cls): 'A decorator that patches a test suite to test with zeep and suds.\n\n Args:\n cls: The test suite to patch\n\n Returns:\n The patched suite.\n ' for (name, func) in list(cls.__dict__.items()): if name.startswith('test'): for backend in _SOAP_BACKENDS: setattr(cls, ('%s_%s' % (name, backend)), func) delattr(cls, name) cls.setUp = _CreatePatchedSetUp(cls.setUp) return cls
def MultiBackendTest(cls): 'A decorator that patches a test suite to test with zeep and suds.\n\n Args:\n cls: The test suite to patch\n\n Returns:\n The patched suite.\n ' for (name, func) in list(cls.__dict__.items()): if name.startswith('test'): for backend in _SOAP_BACKENDS: setattr(cls, ('%s_%s' % (name, backend)), func) delattr(cls, name) cls.setUp = _CreatePatchedSetUp(cls.setUp) return cls<|docstring|>A decorator that patches a test suite to test with zeep and suds. Args: cls: The test suite to patch Returns: The patched suite.<|endoftext|>
966663c01583210a0b54a748bd0f11e73f528d4f6365313fb6dbfcddebe94552
def tearDown(self): 'Ensures that the UtilityRegistry is cleared between tests.' googleads.common._utility_registry.Clear()
Ensures that the UtilityRegistry is cleared between tests.
tests/testing.py
tearDown
khanhnhk/googleads-python-lib
2
python
def tearDown(self): googleads.common._utility_registry.Clear()
def tearDown(self): googleads.common._utility_registry.Clear()<|docstring|>Ensures that the UtilityRegistry is cleared between tests.<|endoftext|>
ba0cb7d10bd1ac2f26e67248747cb452fcb99de7656e8e9697b9a00387bba729
def _add_reference(obj, loader: SafeLineLoader, node: yaml.nodes.Node): 'Add file reference information to an object.' if isinstance(obj, list): obj = NodeListClass(obj) if isinstance(obj, str): obj = NodeStrClass(obj) setattr(obj, '__config_file__', loader.name) setattr(obj, '__line__', node.start_mark.line) return obj
Add file reference information to an object.
xknx/config/yaml_loader.py
_add_reference
onkelbeh/xknx
1
python
def _add_reference(obj, loader: SafeLineLoader, node: yaml.nodes.Node): if isinstance(obj, list): obj = NodeListClass(obj) if isinstance(obj, str): obj = NodeStrClass(obj) setattr(obj, '__config_file__', loader.name) setattr(obj, '__line__', node.start_mark.line) return obj
def _add_reference(obj, loader: SafeLineLoader, node: yaml.nodes.Node): if isinstance(obj, list): obj = NodeListClass(obj) if isinstance(obj, str): obj = NodeStrClass(obj) setattr(obj, '__config_file__', loader.name) setattr(obj, '__line__', node.start_mark.line) return obj<|docstring|>Add file reference information to an object.<|endoftext|>
0ad6a98b1560d469b3977556bc0335d03528f628987e9d6de1f61581a37cf355
def load_yaml(fname: str) -> JSON_TYPE: 'Load a YAML file.' try: with open(fname, encoding='utf-8') as conf_file: return (yaml.load(conf_file, Loader=SafeLineLoader) or OrderedDict()) except yaml.YAMLError as exc: logger.error(str(exc)) raise XKNXException(exc) from exc except UnicodeDecodeError as exc: logger.error('Unable to read file %s: %s', fname, exc) raise XKNXException(exc) from exc
Load a YAML file.
xknx/config/yaml_loader.py
load_yaml
onkelbeh/xknx
1
python
def load_yaml(fname: str) -> JSON_TYPE: try: with open(fname, encoding='utf-8') as conf_file: return (yaml.load(conf_file, Loader=SafeLineLoader) or OrderedDict()) except yaml.YAMLError as exc: logger.error(str(exc)) raise XKNXException(exc) from exc except UnicodeDecodeError as exc: logger.error('Unable to read file %s: %s', fname, exc) raise XKNXException(exc) from exc
def load_yaml(fname: str) -> JSON_TYPE: try: with open(fname, encoding='utf-8') as conf_file: return (yaml.load(conf_file, Loader=SafeLineLoader) or OrderedDict()) except yaml.YAMLError as exc: logger.error(str(exc)) raise XKNXException(exc) from exc except UnicodeDecodeError as exc: logger.error('Unable to read file %s: %s', fname, exc) raise XKNXException(exc) from exc<|docstring|>Load a YAML file.<|endoftext|>
ccc81816ca539e044b8575e89dbfec05cef2727afcc21a4c0478a1b25d25dfb5
def _include_yaml(loader: SafeLineLoader, node: yaml.nodes.Node) -> JSON_TYPE: 'Load another YAML file and embeds it using the !include tag.\n\n Example:\n device_tracker: !include device_tracker.yaml\n\n ' fname = os.path.join(os.path.dirname(loader.name), node.value) try: return _add_reference(load_yaml(fname), loader, node) except FileNotFoundError as exc: raise XKNXException(f'{node.start_mark}: Unable to read file {fname}.') from exc
Load another YAML file and embeds it using the !include tag. Example: device_tracker: !include device_tracker.yaml
xknx/config/yaml_loader.py
_include_yaml
onkelbeh/xknx
1
python
def _include_yaml(loader: SafeLineLoader, node: yaml.nodes.Node) -> JSON_TYPE: 'Load another YAML file and embeds it using the !include tag.\n\n Example:\n device_tracker: !include device_tracker.yaml\n\n ' fname = os.path.join(os.path.dirname(loader.name), node.value) try: return _add_reference(load_yaml(fname), loader, node) except FileNotFoundError as exc: raise XKNXException(f'{node.start_mark}: Unable to read file {fname}.') from exc
def _include_yaml(loader: SafeLineLoader, node: yaml.nodes.Node) -> JSON_TYPE: 'Load another YAML file and embeds it using the !include tag.\n\n Example:\n device_tracker: !include device_tracker.yaml\n\n ' fname = os.path.join(os.path.dirname(loader.name), node.value) try: return _add_reference(load_yaml(fname), loader, node) except FileNotFoundError as exc: raise XKNXException(f'{node.start_mark}: Unable to read file {fname}.') from exc<|docstring|>Load another YAML file and embeds it using the !include tag. Example: device_tracker: !include device_tracker.yaml<|endoftext|>
9b07fd0c5612de8eb75da1068a2d93e86ba25cc6788028112a733d218724f019
def _is_file_valid(name: str) -> bool: 'Decide if a file is valid.' return (not name.startswith('.'))
Decide if a file is valid.
xknx/config/yaml_loader.py
_is_file_valid
onkelbeh/xknx
1
python
def _is_file_valid(name: str) -> bool: return (not name.startswith('.'))
def _is_file_valid(name: str) -> bool: return (not name.startswith('.'))<|docstring|>Decide if a file is valid.<|endoftext|>
833d526eb808883ca089a6eb846b7d29c6dc244c12ca1a3858264b1cdf454e93
def _find_files(directory: str, pattern: str) -> Iterator[str]: 'Recursively load files in a directory.' for (root, dirs, files) in os.walk(directory, topdown=True): dirs[:] = [d for d in dirs if _is_file_valid(d)] for basename in sorted(files): if (_is_file_valid(basename) and fnmatch.fnmatch(basename, pattern)): filename = os.path.join(root, basename) (yield filename)
Recursively load files in a directory.
xknx/config/yaml_loader.py
_find_files
onkelbeh/xknx
1
python
def _find_files(directory: str, pattern: str) -> Iterator[str]: for (root, dirs, files) in os.walk(directory, topdown=True): dirs[:] = [d for d in dirs if _is_file_valid(d)] for basename in sorted(files): if (_is_file_valid(basename) and fnmatch.fnmatch(basename, pattern)): filename = os.path.join(root, basename) (yield filename)
def _find_files(directory: str, pattern: str) -> Iterator[str]: for (root, dirs, files) in os.walk(directory, topdown=True): dirs[:] = [d for d in dirs if _is_file_valid(d)] for basename in sorted(files): if (_is_file_valid(basename) and fnmatch.fnmatch(basename, pattern)): filename = os.path.join(root, basename) (yield filename)<|docstring|>Recursively load files in a directory.<|endoftext|>
759ff5261e419c03a619b69236ae0ac5d0b8b41ea362a9543ed913f03424c628
def _construct_seq(loader: SafeLineLoader, node: yaml.nodes.Node) -> JSON_TYPE: 'Add line number and file name to Load YAML sequence.' (obj,) = loader.construct_yaml_seq(node) return _add_reference(obj, loader, node)
Add line number and file name to Load YAML sequence.
xknx/config/yaml_loader.py
_construct_seq
onkelbeh/xknx
1
python
def _construct_seq(loader: SafeLineLoader, node: yaml.nodes.Node) -> JSON_TYPE: (obj,) = loader.construct_yaml_seq(node) return _add_reference(obj, loader, node)
def _construct_seq(loader: SafeLineLoader, node: yaml.nodes.Node) -> JSON_TYPE: (obj,) = loader.construct_yaml_seq(node) return _add_reference(obj, loader, node)<|docstring|>Add line number and file name to Load YAML sequence.<|endoftext|>
5f2102da9021293bf98b37a305b607e0b7c6e4798f7078485349fa8e9f753191
def _include_dir_list_yaml(loader: SafeLineLoader, node: yaml.nodes.Node) -> List[JSON_TYPE]: 'Load multiple files from directory as a list.' loc = os.path.join(os.path.dirname(loader.name), node.value) return [load_yaml(f) for f in _find_files(loc, '*.yaml')]
Load multiple files from directory as a list.
xknx/config/yaml_loader.py
_include_dir_list_yaml
onkelbeh/xknx
1
python
def _include_dir_list_yaml(loader: SafeLineLoader, node: yaml.nodes.Node) -> List[JSON_TYPE]: loc = os.path.join(os.path.dirname(loader.name), node.value) return [load_yaml(f) for f in _find_files(loc, '*.yaml')]
def _include_dir_list_yaml(loader: SafeLineLoader, node: yaml.nodes.Node) -> List[JSON_TYPE]: loc = os.path.join(os.path.dirname(loader.name), node.value) return [load_yaml(f) for f in _find_files(loc, '*.yaml')]<|docstring|>Load multiple files from directory as a list.<|endoftext|>
61c3cd3934210dbd766674de2ae4817f9d2c1deb9d6cb051f4a9f6ca5ed8689d
def _env_var_yaml(loader: SafeLineLoader, node: yaml.nodes.Node) -> str: 'Load environment variables and embed it into the configuration YAML.' args = node.value.split() if (len(args) > 1): return os.getenv(args[0], ' '.join(args[1:])) if (args[0] in os.environ): return os.environ[args[0]] logger.error('Environment variable %s not defined', node.value) raise XKNXException(node.value)
Load environment variables and embed it into the configuration YAML.
xknx/config/yaml_loader.py
_env_var_yaml
onkelbeh/xknx
1
python
def _env_var_yaml(loader: SafeLineLoader, node: yaml.nodes.Node) -> str: args = node.value.split() if (len(args) > 1): return os.getenv(args[0], ' '.join(args[1:])) if (args[0] in os.environ): return os.environ[args[0]] logger.error('Environment variable %s not defined', node.value) raise XKNXException(node.value)
def _env_var_yaml(loader: SafeLineLoader, node: yaml.nodes.Node) -> str: args = node.value.split() if (len(args) > 1): return os.getenv(args[0], ' '.join(args[1:])) if (args[0] in os.environ): return os.environ[args[0]] logger.error('Environment variable %s not defined', node.value) raise XKNXException(node.value)<|docstring|>Load environment variables and embed it into the configuration YAML.<|endoftext|>
cacf579e982b986d216adbd2fb7544720026fbebae7e53dc36829c57aced99cf
def compose_node(self, parent: yaml.nodes.Node, index: int) -> yaml.nodes.Node: 'Annotate a node with the first line it was seen.' last_line: int = self.line node: yaml.nodes.Node = super().compose_node(parent, index) node.__line__ = (last_line + 1) return node
Annotate a node with the first line it was seen.
xknx/config/yaml_loader.py
compose_node
onkelbeh/xknx
1
python
def compose_node(self, parent: yaml.nodes.Node, index: int) -> yaml.nodes.Node: last_line: int = self.line node: yaml.nodes.Node = super().compose_node(parent, index) node.__line__ = (last_line + 1) return node
def compose_node(self, parent: yaml.nodes.Node, index: int) -> yaml.nodes.Node: last_line: int = self.line node: yaml.nodes.Node = super().compose_node(parent, index) node.__line__ = (last_line + 1) return node<|docstring|>Annotate a node with the first line it was seen.<|endoftext|>
836c130961dc516c2388edd237027135a4070785785f302ea99db30fe9b07cae
def main(): 'Transliterate string from command line' if (len(sys.argv) < 3): print('usage: iuliia SCHEMA SOURCE') print('Supported schemas:') print('\n'.join(iuliia.Schemas.names())) sys.exit(1) schema_name = sys.argv[1] schema = iuliia.Schemas.get(schema_name) if (schema is None): print("Schema '{schema_name}' does not exist. Supported schemas:".format(**locals())) print('\n'.join(iuliia.Schemas.names())) sys.exit(1) source = sys.argv[2] result = iuliia.translate(source, schema) print(result)
Transliterate string from command line
iuliia/__main__.py
main
kamiram/iuliia-py27
0
python
def main(): if (len(sys.argv) < 3): print('usage: iuliia SCHEMA SOURCE') print('Supported schemas:') print('\n'.join(iuliia.Schemas.names())) sys.exit(1) schema_name = sys.argv[1] schema = iuliia.Schemas.get(schema_name) if (schema is None): print("Schema '{schema_name}' does not exist. Supported schemas:".format(**locals())) print('\n'.join(iuliia.Schemas.names())) sys.exit(1) source = sys.argv[2] result = iuliia.translate(source, schema) print(result)
def main(): if (len(sys.argv) < 3): print('usage: iuliia SCHEMA SOURCE') print('Supported schemas:') print('\n'.join(iuliia.Schemas.names())) sys.exit(1) schema_name = sys.argv[1] schema = iuliia.Schemas.get(schema_name) if (schema is None): print("Schema '{schema_name}' does not exist. Supported schemas:".format(**locals())) print('\n'.join(iuliia.Schemas.names())) sys.exit(1) source = sys.argv[2] result = iuliia.translate(source, schema) print(result)<|docstring|>Transliterate string from command line<|endoftext|>
81986d96802ecedabfa95f0f2d60a999ff6f9db73a65fd99656d0e810a30a3cd
def get_shape_obs_logits(self, shape_id, shape_pose, num_rows=None, num_cols=None): 'p_S(obs | shape_pose)\n\n Args\n shape_id (int)\n shape_pose\n raw_position [*shape, 2]\n raw_scale [*shape]\n num_rows (int)\n num_cols (int)\n\n Returns: [*shape, num_rows, num_cols]\n ' if (num_rows is None): num_rows = self.im_size if (num_cols is None): num_cols = self.im_size (raw_position, raw_scale) = shape_pose position = (raw_position.sigmoid() - 0.5) scale = ((raw_scale.sigmoid() * 0.8) + 0.1) shape = scale.shape (position_x, position_y) = (position[(..., 0)], position[(..., 1)]) (canvas_x, canvas_y) = render.get_canvas_xy(num_rows, num_cols, self.device) canvas_x = ((canvas_x[None] - position_x.view((- 1), 1, 1)) / scale.view((- 1), 1, 1)) canvas_y = ((canvas_y[None] - position_y.view((- 1), 1, 1)) / scale.view((- 1), 1, 1)) mlp_input = torch.atan2(canvas_y, canvas_x).view((- 1), 1) logits = (self.logit_multipliers_raw[shape_id].exp() * (self.mlps[shape_id](mlp_input).view(*[*shape, num_rows, num_cols]).exp() - torch.sqrt(((canvas_x ** 2) + (canvas_y ** 2))).view(*[*shape, num_rows, num_cols]))) if torch.isnan(logits).any(): raise RuntimeError('nan') return logits
p_S(obs | shape_pose) Args shape_id (int) shape_pose raw_position [*shape, 2] raw_scale [*shape] num_rows (int) num_cols (int) Returns: [*shape, num_rows, num_cols]
cmws/examples/csg/models/universal.py
get_shape_obs_logits
tuananhle7/hmws
0
python
def get_shape_obs_logits(self, shape_id, shape_pose, num_rows=None, num_cols=None): 'p_S(obs | shape_pose)\n\n Args\n shape_id (int)\n shape_pose\n raw_position [*shape, 2]\n raw_scale [*shape]\n num_rows (int)\n num_cols (int)\n\n Returns: [*shape, num_rows, num_cols]\n ' if (num_rows is None): num_rows = self.im_size if (num_cols is None): num_cols = self.im_size (raw_position, raw_scale) = shape_pose position = (raw_position.sigmoid() - 0.5) scale = ((raw_scale.sigmoid() * 0.8) + 0.1) shape = scale.shape (position_x, position_y) = (position[(..., 0)], position[(..., 1)]) (canvas_x, canvas_y) = render.get_canvas_xy(num_rows, num_cols, self.device) canvas_x = ((canvas_x[None] - position_x.view((- 1), 1, 1)) / scale.view((- 1), 1, 1)) canvas_y = ((canvas_y[None] - position_y.view((- 1), 1, 1)) / scale.view((- 1), 1, 1)) mlp_input = torch.atan2(canvas_y, canvas_x).view((- 1), 1) logits = (self.logit_multipliers_raw[shape_id].exp() * (self.mlps[shape_id](mlp_input).view(*[*shape, num_rows, num_cols]).exp() - torch.sqrt(((canvas_x ** 2) + (canvas_y ** 2))).view(*[*shape, num_rows, num_cols]))) if torch.isnan(logits).any(): raise RuntimeError('nan') return logits
def get_shape_obs_logits(self, shape_id, shape_pose, num_rows=None, num_cols=None): 'p_S(obs | shape_pose)\n\n Args\n shape_id (int)\n shape_pose\n raw_position [*shape, 2]\n raw_scale [*shape]\n num_rows (int)\n num_cols (int)\n\n Returns: [*shape, num_rows, num_cols]\n ' if (num_rows is None): num_rows = self.im_size if (num_cols is None): num_cols = self.im_size (raw_position, raw_scale) = shape_pose position = (raw_position.sigmoid() - 0.5) scale = ((raw_scale.sigmoid() * 0.8) + 0.1) shape = scale.shape (position_x, position_y) = (position[(..., 0)], position[(..., 1)]) (canvas_x, canvas_y) = render.get_canvas_xy(num_rows, num_cols, self.device) canvas_x = ((canvas_x[None] - position_x.view((- 1), 1, 1)) / scale.view((- 1), 1, 1)) canvas_y = ((canvas_y[None] - position_y.view((- 1), 1, 1)) / scale.view((- 1), 1, 1)) mlp_input = torch.atan2(canvas_y, canvas_x).view((- 1), 1) logits = (self.logit_multipliers_raw[shape_id].exp() * (self.mlps[shape_id](mlp_input).view(*[*shape, num_rows, num_cols]).exp() - torch.sqrt(((canvas_x ** 2) + (canvas_y ** 2))).view(*[*shape, num_rows, num_cols]))) if torch.isnan(logits).any(): raise RuntimeError('nan') return logits<|docstring|>p_S(obs | shape_pose) Args shape_id (int) shape_pose raw_position [*shape, 2] raw_scale [*shape] num_rows (int) num_cols (int) Returns: [*shape, num_rows, num_cols]<|endoftext|>
a50482b1988543f1ab7fd3e5bf1d693f6dbc301f3d82178a80cc26e55b7e005d
def forward(self, obs): '\n Args:\n obs [batch_size, num_rows, num_cols]\n ' pyro.module('generative_model', self) (batch_size, num_rows, num_cols) = obs.shape for batch_id in pyro.plate('batch', batch_size): shape_id = pyro.sample(f'shape_id_{batch_id}', pyro.distributions.Categorical(logits=torch.ones(self.num_primitives, device=self.device))).long() tag = f'shape_pose_{batch_id}' raw_position = pyro.sample(f'{tag}_raw_position', pyro.distributions.Independent(pyro.distributions.Normal(torch.zeros((2,), device=self.device), torch.ones((2,), device=self.device)), reinterpreted_batch_ndims=1)) if self.has_shape_scale: raw_scale = pyro.sample(f'{tag}_raw_scale', pyro.distributions.Normal(torch.tensor(0.0, device=self.device), torch.tensor(1.0, device=self.device))) else: raw_scale = torch.tensor(0.0, device=self.device) pyro.sample(f'obs_{batch_id}', pyro.distributions.Independent(pyro.distributions.Bernoulli(logits=self.get_shape_obs_logits(shape_id, (raw_position, raw_scale), num_rows, num_cols)), reinterpreted_batch_ndims=2), obs=obs[batch_id])
Args: obs [batch_size, num_rows, num_cols]
cmws/examples/csg/models/universal.py
forward
tuananhle7/hmws
0
python
def forward(self, obs): '\n Args:\n obs [batch_size, num_rows, num_cols]\n ' pyro.module('generative_model', self) (batch_size, num_rows, num_cols) = obs.shape for batch_id in pyro.plate('batch', batch_size): shape_id = pyro.sample(f'shape_id_{batch_id}', pyro.distributions.Categorical(logits=torch.ones(self.num_primitives, device=self.device))).long() tag = f'shape_pose_{batch_id}' raw_position = pyro.sample(f'{tag}_raw_position', pyro.distributions.Independent(pyro.distributions.Normal(torch.zeros((2,), device=self.device), torch.ones((2,), device=self.device)), reinterpreted_batch_ndims=1)) if self.has_shape_scale: raw_scale = pyro.sample(f'{tag}_raw_scale', pyro.distributions.Normal(torch.tensor(0.0, device=self.device), torch.tensor(1.0, device=self.device))) else: raw_scale = torch.tensor(0.0, device=self.device) pyro.sample(f'obs_{batch_id}', pyro.distributions.Independent(pyro.distributions.Bernoulli(logits=self.get_shape_obs_logits(shape_id, (raw_position, raw_scale), num_rows, num_cols)), reinterpreted_batch_ndims=2), obs=obs[batch_id])
def forward(self, obs): '\n Args:\n obs [batch_size, num_rows, num_cols]\n ' pyro.module('generative_model', self) (batch_size, num_rows, num_cols) = obs.shape for batch_id in pyro.plate('batch', batch_size): shape_id = pyro.sample(f'shape_id_{batch_id}', pyro.distributions.Categorical(logits=torch.ones(self.num_primitives, device=self.device))).long() tag = f'shape_pose_{batch_id}' raw_position = pyro.sample(f'{tag}_raw_position', pyro.distributions.Independent(pyro.distributions.Normal(torch.zeros((2,), device=self.device), torch.ones((2,), device=self.device)), reinterpreted_batch_ndims=1)) if self.has_shape_scale: raw_scale = pyro.sample(f'{tag}_raw_scale', pyro.distributions.Normal(torch.tensor(0.0, device=self.device), torch.tensor(1.0, device=self.device))) else: raw_scale = torch.tensor(0.0, device=self.device) pyro.sample(f'obs_{batch_id}', pyro.distributions.Independent(pyro.distributions.Bernoulli(logits=self.get_shape_obs_logits(shape_id, (raw_position, raw_scale), num_rows, num_cols)), reinterpreted_batch_ndims=2), obs=obs[batch_id])<|docstring|>Args: obs [batch_size, num_rows, num_cols]<|endoftext|>
854df5497fc76e2af435112304849547e258718e6c2b5612e4c25d10f6a7b2b6
def get_cnn_features(self, obs, pose_or_shape_id): '\n Args:\n obs: [batch_size, im_size, im_size]\n pose_or_shape_id: str\n\n Returns: [batch_size, cnn_features_dim]\n ' batch_size = obs.shape[0] if (pose_or_shape_id == 'pose'): cnn = self.pose_net elif (pose_or_shape_id == 'shape_id'): cnn = self.shape_id_net return cnn(obs[(:, None)]).view(batch_size, (- 1))
Args: obs: [batch_size, im_size, im_size] pose_or_shape_id: str Returns: [batch_size, cnn_features_dim]
cmws/examples/csg/models/universal.py
get_cnn_features
tuananhle7/hmws
0
python
def get_cnn_features(self, obs, pose_or_shape_id): '\n Args:\n obs: [batch_size, im_size, im_size]\n pose_or_shape_id: str\n\n Returns: [batch_size, cnn_features_dim]\n ' batch_size = obs.shape[0] if (pose_or_shape_id == 'pose'): cnn = self.pose_net elif (pose_or_shape_id == 'shape_id'): cnn = self.shape_id_net return cnn(obs[(:, None)]).view(batch_size, (- 1))
def get_cnn_features(self, obs, pose_or_shape_id): '\n Args:\n obs: [batch_size, im_size, im_size]\n pose_or_shape_id: str\n\n Returns: [batch_size, cnn_features_dim]\n ' batch_size = obs.shape[0] if (pose_or_shape_id == 'pose'): cnn = self.pose_net elif (pose_or_shape_id == 'shape_id'): cnn = self.shape_id_net return cnn(obs[(:, None)]).view(batch_size, (- 1))<|docstring|>Args: obs: [batch_size, im_size, im_size] pose_or_shape_id: str Returns: [batch_size, cnn_features_dim]<|endoftext|>
6de0b70cd42536e91a1388f81be3bd9e503b840454caf1a609bd3fd9b8951603
def forward(self, obs): '\n Args:\n obs [batch_size, num_rows, num_cols]\n\n Returns:\n shape_id [batch_size]\n raw_position [batch_size, 2]\n raw_scale [batch_size]\n ' pyro.module('guide', self) (batch_size, num_rows, num_cols) = obs.shape pose_cnn_features = self.get_cnn_features(obs, 'pose') logits = self.shape_id_mlp(cnn_features) (shape_id, raw_position, raw_scale) = ([], [], []) for batch_id in pyro.plate('batch', batch_size): shape_id.append(pyro.sample(f'shape_id_{batch_id}', pyro.distributions.Categorical(logits=logits[batch_id])).long()) (position_raw_loc, position_raw_scale) = self.raw_position_mlps[shape_id[(- 1)]](cnn_features[batch_id][None]).chunk(2, dim=(- 1)) (position_loc, position_scale) = (position_raw_loc.view((- 1)), position_raw_scale.exp().view((- 1))) raw_position.append(pyro.sample(f'shape_pose_{batch_id}_raw_position', pyro.distributions.Independent(pyro.distributions.Normal(position_loc, position_scale), reinterpreted_batch_ndims=1))) if self.has_shape_scale: (scale_raw_loc, scale_raw_scale) = self.raw_scale_mlps[shape_id[(- 1)]](cnn_features[batch_id][None]).chunk(2, dim=(- 1)) (scale_loc, scale_scale) = (scale_raw_loc[(0, 0)], scale_raw_scale.exp()[(0, 0)]) raw_scale.append(pyro.sample(f'shape_pose_{batch_id}_raw_scale', pyro.distributions.Normal(scale_loc, scale_scale))) else: raw_scale.append(torch.tensor(0.0, device=self.device)) return (torch.stack(shape_id), (torch.stack(raw_position), torch.stack(raw_scale)))
Args: obs [batch_size, num_rows, num_cols] Returns: shape_id [batch_size] raw_position [batch_size, 2] raw_scale [batch_size]
cmws/examples/csg/models/universal.py
forward
tuananhle7/hmws
0
python
def forward(self, obs): '\n Args:\n obs [batch_size, num_rows, num_cols]\n\n Returns:\n shape_id [batch_size]\n raw_position [batch_size, 2]\n raw_scale [batch_size]\n ' pyro.module('guide', self) (batch_size, num_rows, num_cols) = obs.shape pose_cnn_features = self.get_cnn_features(obs, 'pose') logits = self.shape_id_mlp(cnn_features) (shape_id, raw_position, raw_scale) = ([], [], []) for batch_id in pyro.plate('batch', batch_size): shape_id.append(pyro.sample(f'shape_id_{batch_id}', pyro.distributions.Categorical(logits=logits[batch_id])).long()) (position_raw_loc, position_raw_scale) = self.raw_position_mlps[shape_id[(- 1)]](cnn_features[batch_id][None]).chunk(2, dim=(- 1)) (position_loc, position_scale) = (position_raw_loc.view((- 1)), position_raw_scale.exp().view((- 1))) raw_position.append(pyro.sample(f'shape_pose_{batch_id}_raw_position', pyro.distributions.Independent(pyro.distributions.Normal(position_loc, position_scale), reinterpreted_batch_ndims=1))) if self.has_shape_scale: (scale_raw_loc, scale_raw_scale) = self.raw_scale_mlps[shape_id[(- 1)]](cnn_features[batch_id][None]).chunk(2, dim=(- 1)) (scale_loc, scale_scale) = (scale_raw_loc[(0, 0)], scale_raw_scale.exp()[(0, 0)]) raw_scale.append(pyro.sample(f'shape_pose_{batch_id}_raw_scale', pyro.distributions.Normal(scale_loc, scale_scale))) else: raw_scale.append(torch.tensor(0.0, device=self.device)) return (torch.stack(shape_id), (torch.stack(raw_position), torch.stack(raw_scale)))
def forward(self, obs): '\n Args:\n obs [batch_size, num_rows, num_cols]\n\n Returns:\n shape_id [batch_size]\n raw_position [batch_size, 2]\n raw_scale [batch_size]\n ' pyro.module('guide', self) (batch_size, num_rows, num_cols) = obs.shape pose_cnn_features = self.get_cnn_features(obs, 'pose') logits = self.shape_id_mlp(cnn_features) (shape_id, raw_position, raw_scale) = ([], [], []) for batch_id in pyro.plate('batch', batch_size): shape_id.append(pyro.sample(f'shape_id_{batch_id}', pyro.distributions.Categorical(logits=logits[batch_id])).long()) (position_raw_loc, position_raw_scale) = self.raw_position_mlps[shape_id[(- 1)]](cnn_features[batch_id][None]).chunk(2, dim=(- 1)) (position_loc, position_scale) = (position_raw_loc.view((- 1)), position_raw_scale.exp().view((- 1))) raw_position.append(pyro.sample(f'shape_pose_{batch_id}_raw_position', pyro.distributions.Independent(pyro.distributions.Normal(position_loc, position_scale), reinterpreted_batch_ndims=1))) if self.has_shape_scale: (scale_raw_loc, scale_raw_scale) = self.raw_scale_mlps[shape_id[(- 1)]](cnn_features[batch_id][None]).chunk(2, dim=(- 1)) (scale_loc, scale_scale) = (scale_raw_loc[(0, 0)], scale_raw_scale.exp()[(0, 0)]) raw_scale.append(pyro.sample(f'shape_pose_{batch_id}_raw_scale', pyro.distributions.Normal(scale_loc, scale_scale))) else: raw_scale.append(torch.tensor(0.0, device=self.device)) return (torch.stack(shape_id), (torch.stack(raw_position), torch.stack(raw_scale)))<|docstring|>Args: obs [batch_size, num_rows, num_cols] Returns: shape_id [batch_size] raw_position [batch_size, 2] raw_scale [batch_size]<|endoftext|>
a37620790c40cb5f3c7f71533d7d97809a4e48fc686137119acf7f5fe8442bf6
def start(update: Update, _: CallbackContext) -> None: 'Send a message when the command /start is issued.' user = update.effective_user update.message.reply_markdown_v2(f'Hi {user.mention_markdown_v2()}\!', reply_markup=ForceReply(selective=True))
Send a message when the command /start is issued.
echobot.py
start
wertherm/Auto-Trade-Crypto-Bot
0
python
def start(update: Update, _: CallbackContext) -> None: user = update.effective_user update.message.reply_markdown_v2(f'Hi {user.mention_markdown_v2()}\!', reply_markup=ForceReply(selective=True))
def start(update: Update, _: CallbackContext) -> None: user = update.effective_user update.message.reply_markdown_v2(f'Hi {user.mention_markdown_v2()}\!', reply_markup=ForceReply(selective=True))<|docstring|>Send a message when the command /start is issued.<|endoftext|>
0d625c0d8b4c10509757846fb1b3c4172418d1a2098b4bcc0b2901788646cba6
def help_command(update: Update, _: CallbackContext) -> None: 'Send a message when the command /help is issued.' update.message.reply_text('Help!')
Send a message when the command /help is issued.
echobot.py
help_command
wertherm/Auto-Trade-Crypto-Bot
0
python
def help_command(update: Update, _: CallbackContext) -> None: update.message.reply_text('Help!')
def help_command(update: Update, _: CallbackContext) -> None: update.message.reply_text('Help!')<|docstring|>Send a message when the command /help is issued.<|endoftext|>
49a07824090288fc39ad4de91f5caef910a8e00c3f2e3cb27ef6b851a8966b57
def btc_command(update: Update, _: CallbackContext) -> None: 'Send a message when the command /btc is issued.' btc = cg.get_price(ids='bitcoin', vs_currencies='usd') update.message.reply_text(btc)
Send a message when the command /btc is issued.
echobot.py
btc_command
wertherm/Auto-Trade-Crypto-Bot
0
python
def btc_command(update: Update, _: CallbackContext) -> None: btc = cg.get_price(ids='bitcoin', vs_currencies='usd') update.message.reply_text(btc)
def btc_command(update: Update, _: CallbackContext) -> None: btc = cg.get_price(ids='bitcoin', vs_currencies='usd') update.message.reply_text(btc)<|docstring|>Send a message when the command /btc is issued.<|endoftext|>
3a6fc50855b27425c293692a6a0b86c6b1a70a02d7b7323f01339e0919f7d954
def oportunidades_command(update: Update, _: CallbackContext) -> None: 'Send a message when the command /oportunidades is issued.' listaCryptos = cg.get_coins_markets(vs_currency='usd', price_change_percentage='1h,24h,7d') lista = coin.filtrarMaiorQue(listaCryptos, 'price_change_percentage_7d_in_currency', 20) lista = coin.filtrarMaiorQue(lista, 'price_change_percentage_24h_in_currency', 10) retorno = coin.mostrar(lista) update.message.reply_html(retorno)
Send a message when the command /oportunidades is issued.
echobot.py
oportunidades_command
wertherm/Auto-Trade-Crypto-Bot
0
python
def oportunidades_command(update: Update, _: CallbackContext) -> None: listaCryptos = cg.get_coins_markets(vs_currency='usd', price_change_percentage='1h,24h,7d') lista = coin.filtrarMaiorQue(listaCryptos, 'price_change_percentage_7d_in_currency', 20) lista = coin.filtrarMaiorQue(lista, 'price_change_percentage_24h_in_currency', 10) retorno = coin.mostrar(lista) update.message.reply_html(retorno)
def oportunidades_command(update: Update, _: CallbackContext) -> None: listaCryptos = cg.get_coins_markets(vs_currency='usd', price_change_percentage='1h,24h,7d') lista = coin.filtrarMaiorQue(listaCryptos, 'price_change_percentage_7d_in_currency', 20) lista = coin.filtrarMaiorQue(lista, 'price_change_percentage_24h_in_currency', 10) retorno = coin.mostrar(lista) update.message.reply_html(retorno)<|docstring|>Send a message when the command /oportunidades is issued.<|endoftext|>
25f4ee25ebabe6ccce9619dc8e5f1450ee242433068736c79243678e7ce896d0
def echo(update: Update, _: CallbackContext) -> None: 'Echo the user message.' update.message.reply_text(update.message.text)
Echo the user message.
echobot.py
echo
wertherm/Auto-Trade-Crypto-Bot
0
python
def echo(update: Update, _: CallbackContext) -> None: update.message.reply_text(update.message.text)
def echo(update: Update, _: CallbackContext) -> None: update.message.reply_text(update.message.text)<|docstring|>Echo the user message.<|endoftext|>
88f5660e52d42360bd8594613e8d72e97893656a8786a2ad93c818898bda625f
def main() -> None: 'Start the bot.' updater = Updater('1740492837:AAHU2KPSlgPtaQsPCgABZAWh4sFtE78B-MA') dispatcher = updater.dispatcher dispatcher.add_handler(CommandHandler('start', start)) dispatcher.add_handler(CommandHandler('help', help_command)) dispatcher.add_handler(CommandHandler('btc', btc_command)) dispatcher.add_handler(CommandHandler('oportunidades', oportunidades_command)) dispatcher.add_handler(MessageHandler((Filters.text & (~ Filters.command)), echo)) updater.start_polling() updater.idle()
Start the bot.
echobot.py
main
wertherm/Auto-Trade-Crypto-Bot
0
python
def main() -> None: updater = Updater('1740492837:AAHU2KPSlgPtaQsPCgABZAWh4sFtE78B-MA') dispatcher = updater.dispatcher dispatcher.add_handler(CommandHandler('start', start)) dispatcher.add_handler(CommandHandler('help', help_command)) dispatcher.add_handler(CommandHandler('btc', btc_command)) dispatcher.add_handler(CommandHandler('oportunidades', oportunidades_command)) dispatcher.add_handler(MessageHandler((Filters.text & (~ Filters.command)), echo)) updater.start_polling() updater.idle()
def main() -> None: updater = Updater('1740492837:AAHU2KPSlgPtaQsPCgABZAWh4sFtE78B-MA') dispatcher = updater.dispatcher dispatcher.add_handler(CommandHandler('start', start)) dispatcher.add_handler(CommandHandler('help', help_command)) dispatcher.add_handler(CommandHandler('btc', btc_command)) dispatcher.add_handler(CommandHandler('oportunidades', oportunidades_command)) dispatcher.add_handler(MessageHandler((Filters.text & (~ Filters.command)), echo)) updater.start_polling() updater.idle()<|docstring|>Start the bot.<|endoftext|>
ed9bcd21314d25fab26d6d005751a2ad348fad240033b3f4fb14e8200139a5ac
def parse_args(): 'Parse command-line arguments.' parser = ArgumentParser() parser.add_argument('-d', '--data_dir', type=str, help='The directrory of test data.', required=True) parser.add_argument('-r', '--root', type=str, help='The objective metric directrory.', required=True) parser.add_argument('-o', '--output_dir', type=str, default=None, help='The output path.') parser.add_argument('-t', '--target_dir', type=str, default=None, help='The directrory of label data.') parser.add_argument('-th', '--threshold_path', type=str, default=None, help='The path of threshold.') parser.add_argument('-m', '--metadata_path', type=str, default=None, help='The path of metadata.') parser.add_argument('-l', '--language', type=str, default='EN', help='The language for ASR.') return vars(parser.parse_args())
Parse command-line arguments.
Voice-conversion-evaluation/calculate_objective_metric.py
parse_args
ga642381/robust-vc
12
python
def parse_args(): parser = ArgumentParser() parser.add_argument('-d', '--data_dir', type=str, help='The directrory of test data.', required=True) parser.add_argument('-r', '--root', type=str, help='The objective metric directrory.', required=True) parser.add_argument('-o', '--output_dir', type=str, default=None, help='The output path.') parser.add_argument('-t', '--target_dir', type=str, default=None, help='The directrory of label data.') parser.add_argument('-th', '--threshold_path', type=str, default=None, help='The path of threshold.') parser.add_argument('-m', '--metadata_path', type=str, default=None, help='The path of metadata.') parser.add_argument('-l', '--language', type=str, default='EN', help='The language for ASR.') return vars(parser.parse_args())
def parse_args(): parser = ArgumentParser() parser.add_argument('-d', '--data_dir', type=str, help='The directrory of test data.', required=True) parser.add_argument('-r', '--root', type=str, help='The objective metric directrory.', required=True) parser.add_argument('-o', '--output_dir', type=str, default=None, help='The output path.') parser.add_argument('-t', '--target_dir', type=str, default=None, help='The directrory of label data.') parser.add_argument('-th', '--threshold_path', type=str, default=None, help='The path of threshold.') parser.add_argument('-m', '--metadata_path', type=str, default=None, help='The path of metadata.') parser.add_argument('-l', '--language', type=str, default='EN', help='The language for ASR.') return vars(parser.parse_args())<|docstring|>Parse command-line arguments.<|endoftext|>
af3c1bd4053a6d2db1b9802a483753ef57cb618757172011e837d2309eb5ea68
def main(data_dir, output_dir, root, target_dir, threshold_path, metadata_path, language): 'Main function' device = torch.device(('cuda' if torch.cuda.is_available() else 'cpu')) inference_path = (Path(root) / 'inference') inference_path = str(inference_path).replace('/', '.') load_model = getattr(importlib.import_module(inference_path), 'load_model') calculate_score = getattr(importlib.import_module(inference_path), 'calculate_score') print(f'[INFO]: The metric is used from {root}.') if (root.find('character_error_rate') > (- 1)): root = language model = load_model(root, device) with torch.no_grad(): print(f'[INFO]: The testing waveform is loaded from {data_dir}.') step_moment = datetime.now() arguments = {'model': model, 'device': device, 'data_dir': data_dir, 'output_dir': output_dir, 'metadata_path': metadata_path, 'target_dir': target_dir, 'threshold_path': threshold_path} calculate_score(**arguments) elaspe_time = (datetime.now() - step_moment) print('[INFO]: The time of calculate score', elaspe_time.total_seconds()) print(('-' * 100))
Main function
Voice-conversion-evaluation/calculate_objective_metric.py
main
ga642381/robust-vc
12
python
def main(data_dir, output_dir, root, target_dir, threshold_path, metadata_path, language): device = torch.device(('cuda' if torch.cuda.is_available() else 'cpu')) inference_path = (Path(root) / 'inference') inference_path = str(inference_path).replace('/', '.') load_model = getattr(importlib.import_module(inference_path), 'load_model') calculate_score = getattr(importlib.import_module(inference_path), 'calculate_score') print(f'[INFO]: The metric is used from {root}.') if (root.find('character_error_rate') > (- 1)): root = language model = load_model(root, device) with torch.no_grad(): print(f'[INFO]: The testing waveform is loaded from {data_dir}.') step_moment = datetime.now() arguments = {'model': model, 'device': device, 'data_dir': data_dir, 'output_dir': output_dir, 'metadata_path': metadata_path, 'target_dir': target_dir, 'threshold_path': threshold_path} calculate_score(**arguments) elaspe_time = (datetime.now() - step_moment) print('[INFO]: The time of calculate score', elaspe_time.total_seconds()) print(('-' * 100))
def main(data_dir, output_dir, root, target_dir, threshold_path, metadata_path, language): device = torch.device(('cuda' if torch.cuda.is_available() else 'cpu')) inference_path = (Path(root) / 'inference') inference_path = str(inference_path).replace('/', '.') load_model = getattr(importlib.import_module(inference_path), 'load_model') calculate_score = getattr(importlib.import_module(inference_path), 'calculate_score') print(f'[INFO]: The metric is used from {root}.') if (root.find('character_error_rate') > (- 1)): root = language model = load_model(root, device) with torch.no_grad(): print(f'[INFO]: The testing waveform is loaded from {data_dir}.') step_moment = datetime.now() arguments = {'model': model, 'device': device, 'data_dir': data_dir, 'output_dir': output_dir, 'metadata_path': metadata_path, 'target_dir': target_dir, 'threshold_path': threshold_path} calculate_score(**arguments) elaspe_time = (datetime.now() - step_moment) print('[INFO]: The time of calculate score', elaspe_time.total_seconds()) print(('-' * 100))<|docstring|>Main function<|endoftext|>
f8bd505acb2dc0b4e171bdc61df3f744f0737637bbd9707f366cedb88a76d388
def batch_pix_accuracy(output, target): 'PixAcc' predict = (np.argmax(output.astype(np.int64), 1) + 1) target = (target.astype(np.int64) + 1) pixel_labeled = (target > 0).sum() pixel_correct = ((predict == target) * (target > 0)).sum() assert (pixel_correct <= pixel_labeled), 'Correct area should be smaller than Labeled' return (pixel_correct, pixel_labeled)
PixAcc
official/cv/fastscnn/src/score.py
batch_pix_accuracy
Li-kewei/models
1
python
def batch_pix_accuracy(output, target): predict = (np.argmax(output.astype(np.int64), 1) + 1) target = (target.astype(np.int64) + 1) pixel_labeled = (target > 0).sum() pixel_correct = ((predict == target) * (target > 0)).sum() assert (pixel_correct <= pixel_labeled), 'Correct area should be smaller than Labeled' return (pixel_correct, pixel_labeled)
def batch_pix_accuracy(output, target): predict = (np.argmax(output.astype(np.int64), 1) + 1) target = (target.astype(np.int64) + 1) pixel_labeled = (target > 0).sum() pixel_correct = ((predict == target) * (target > 0)).sum() assert (pixel_correct <= pixel_labeled), 'Correct area should be smaller than Labeled' return (pixel_correct, pixel_labeled)<|docstring|>PixAcc<|endoftext|>
017fe9c568556776f0ded12929097ae28c8b1928aded26d32ba00f0d309550db
def batch_intersection_union(output, target, nclass): 'mIoU' mini = 1 maxi = nclass nbins = nclass predict = (np.argmax(output.astype(np.float32), 1) + 1) target = (target.astype(np.float32) + 1) predict = (predict.astype(np.float32) * (target > 0).astype(np.float32)) intersection = (predict * (predict == target).astype(np.float32)) (area_inter, _) = np.histogram(intersection, bins=nbins, range=(mini, maxi)) (area_pred, _) = np.histogram(predict, bins=nbins, range=(mini, maxi)) (area_lab, _) = np.histogram(target, bins=nbins, range=(mini, maxi)) area_union = ((area_pred + area_lab) - area_inter) assert ((area_inter > area_union).sum() == 0), 'Intersection area should be smaller than Union area' return (area_inter.astype(np.float32), area_union.astype(np.float32))
mIoU
official/cv/fastscnn/src/score.py
batch_intersection_union
Li-kewei/models
1
python
def batch_intersection_union(output, target, nclass): mini = 1 maxi = nclass nbins = nclass predict = (np.argmax(output.astype(np.float32), 1) + 1) target = (target.astype(np.float32) + 1) predict = (predict.astype(np.float32) * (target > 0).astype(np.float32)) intersection = (predict * (predict == target).astype(np.float32)) (area_inter, _) = np.histogram(intersection, bins=nbins, range=(mini, maxi)) (area_pred, _) = np.histogram(predict, bins=nbins, range=(mini, maxi)) (area_lab, _) = np.histogram(target, bins=nbins, range=(mini, maxi)) area_union = ((area_pred + area_lab) - area_inter) assert ((area_inter > area_union).sum() == 0), 'Intersection area should be smaller than Union area' return (area_inter.astype(np.float32), area_union.astype(np.float32))
def batch_intersection_union(output, target, nclass): mini = 1 maxi = nclass nbins = nclass predict = (np.argmax(output.astype(np.float32), 1) + 1) target = (target.astype(np.float32) + 1) predict = (predict.astype(np.float32) * (target > 0).astype(np.float32)) intersection = (predict * (predict == target).astype(np.float32)) (area_inter, _) = np.histogram(intersection, bins=nbins, range=(mini, maxi)) (area_pred, _) = np.histogram(predict, bins=nbins, range=(mini, maxi)) (area_lab, _) = np.histogram(target, bins=nbins, range=(mini, maxi)) area_union = ((area_pred + area_lab) - area_inter) assert ((area_inter > area_union).sum() == 0), 'Intersection area should be smaller than Union area' return (area_inter.astype(np.float32), area_union.astype(np.float32))<|docstring|>mIoU<|endoftext|>
7edc81285c9677f6676387bbbbb3937b73d4ee1f1cad10ba4bec5e3cf20d6d5d
def pixelAccuracy(imPred, imLab): '\n This function takes the prediction and label of a single image, returns pixel-wise accuracy\n To compute over many images do:\n for i = range(Nimages):\n (pixel_accuracy[i], pixel_correct[i], pixel_labeled[i]) = pixelAccuracy(imPred[i], imLab[i])\n mean_pixel_accuracy = 1.0 * np.sum(pixel_correct) / (np.spacing(1) + np.sum(pixel_labeled))\n ' pixel_labeled = np.sum((imLab >= 0)) pixel_correct = np.sum(((imPred == imLab) * (imLab >= 0))) pixel_accuracy = ((1.0 * pixel_correct) / pixel_labeled) return (pixel_accuracy, pixel_correct, pixel_labeled)
This function takes the prediction and label of a single image, returns pixel-wise accuracy To compute over many images do: for i = range(Nimages): (pixel_accuracy[i], pixel_correct[i], pixel_labeled[i]) = pixelAccuracy(imPred[i], imLab[i]) mean_pixel_accuracy = 1.0 * np.sum(pixel_correct) / (np.spacing(1) + np.sum(pixel_labeled))
official/cv/fastscnn/src/score.py
pixelAccuracy
Li-kewei/models
1
python
def pixelAccuracy(imPred, imLab): '\n This function takes the prediction and label of a single image, returns pixel-wise accuracy\n To compute over many images do:\n for i = range(Nimages):\n (pixel_accuracy[i], pixel_correct[i], pixel_labeled[i]) = pixelAccuracy(imPred[i], imLab[i])\n mean_pixel_accuracy = 1.0 * np.sum(pixel_correct) / (np.spacing(1) + np.sum(pixel_labeled))\n ' pixel_labeled = np.sum((imLab >= 0)) pixel_correct = np.sum(((imPred == imLab) * (imLab >= 0))) pixel_accuracy = ((1.0 * pixel_correct) / pixel_labeled) return (pixel_accuracy, pixel_correct, pixel_labeled)
def pixelAccuracy(imPred, imLab): '\n This function takes the prediction and label of a single image, returns pixel-wise accuracy\n To compute over many images do:\n for i = range(Nimages):\n (pixel_accuracy[i], pixel_correct[i], pixel_labeled[i]) = pixelAccuracy(imPred[i], imLab[i])\n mean_pixel_accuracy = 1.0 * np.sum(pixel_correct) / (np.spacing(1) + np.sum(pixel_labeled))\n ' pixel_labeled = np.sum((imLab >= 0)) pixel_correct = np.sum(((imPred == imLab) * (imLab >= 0))) pixel_accuracy = ((1.0 * pixel_correct) / pixel_labeled) return (pixel_accuracy, pixel_correct, pixel_labeled)<|docstring|>This function takes the prediction and label of a single image, returns pixel-wise accuracy To compute over many images do: for i = range(Nimages): (pixel_accuracy[i], pixel_correct[i], pixel_labeled[i]) = pixelAccuracy(imPred[i], imLab[i]) mean_pixel_accuracy = 1.0 * np.sum(pixel_correct) / (np.spacing(1) + np.sum(pixel_labeled))<|endoftext|>
11dc7bcd60c908cb32ba254a560348b8cff916b74bd18b6b1533a6e2099299ef
def intersectionAndUnion(imPred, imLab, numClass): '\n This function takes the prediction and label of a single image,\n returns intersection and union areas for each class\n To compute over many images do:\n for i in range(Nimages):\n (area_intersection[:,i], area_union[:,i]) = intersectionAndUnion(imPred[i], imLab[i])\n IoU = 1.0 * np.sum(area_intersection, axis=1) / np.sum(np.spacing(1)+area_union, axis=1)\n ' imPred = (imPred * (imLab >= 0)) intersection = (imPred * (imPred == imLab)) (area_intersection, _) = np.histogram(intersection, bins=numClass, range=(1, numClass)) (area_pred, _) = np.histogram(imPred, bins=numClass, range=(1, numClass)) (area_lab, _) = np.histogram(imLab, bins=numClass, range=(1, numClass)) area_union = ((area_pred + area_lab) - area_intersection) return (area_intersection, area_union)
This function takes the prediction and label of a single image, returns intersection and union areas for each class To compute over many images do: for i in range(Nimages): (area_intersection[:,i], area_union[:,i]) = intersectionAndUnion(imPred[i], imLab[i]) IoU = 1.0 * np.sum(area_intersection, axis=1) / np.sum(np.spacing(1)+area_union, axis=1)
official/cv/fastscnn/src/score.py
intersectionAndUnion
Li-kewei/models
1
python
def intersectionAndUnion(imPred, imLab, numClass): '\n This function takes the prediction and label of a single image,\n returns intersection and union areas for each class\n To compute over many images do:\n for i in range(Nimages):\n (area_intersection[:,i], area_union[:,i]) = intersectionAndUnion(imPred[i], imLab[i])\n IoU = 1.0 * np.sum(area_intersection, axis=1) / np.sum(np.spacing(1)+area_union, axis=1)\n ' imPred = (imPred * (imLab >= 0)) intersection = (imPred * (imPred == imLab)) (area_intersection, _) = np.histogram(intersection, bins=numClass, range=(1, numClass)) (area_pred, _) = np.histogram(imPred, bins=numClass, range=(1, numClass)) (area_lab, _) = np.histogram(imLab, bins=numClass, range=(1, numClass)) area_union = ((area_pred + area_lab) - area_intersection) return (area_intersection, area_union)
def intersectionAndUnion(imPred, imLab, numClass): '\n This function takes the prediction and label of a single image,\n returns intersection and union areas for each class\n To compute over many images do:\n for i in range(Nimages):\n (area_intersection[:,i], area_union[:,i]) = intersectionAndUnion(imPred[i], imLab[i])\n IoU = 1.0 * np.sum(area_intersection, axis=1) / np.sum(np.spacing(1)+area_union, axis=1)\n ' imPred = (imPred * (imLab >= 0)) intersection = (imPred * (imPred == imLab)) (area_intersection, _) = np.histogram(intersection, bins=numClass, range=(1, numClass)) (area_pred, _) = np.histogram(imPred, bins=numClass, range=(1, numClass)) (area_lab, _) = np.histogram(imLab, bins=numClass, range=(1, numClass)) area_union = ((area_pred + area_lab) - area_intersection) return (area_intersection, area_union)<|docstring|>This function takes the prediction and label of a single image, returns intersection and union areas for each class To compute over many images do: for i in range(Nimages): (area_intersection[:,i], area_union[:,i]) = intersectionAndUnion(imPred[i], imLab[i]) IoU = 1.0 * np.sum(area_intersection, axis=1) / np.sum(np.spacing(1)+area_union, axis=1)<|endoftext|>
3cf892bf0bf26e35ecc1a8182477c3d1c31a8d3ac979215ced30a43a9b8d65c4
def update(self, preds, labels): "Updates the internal evaluation result.\n\n Parameters\n ----------\n labels : 'NumpyArray' or list of `NumpyArray`\n The labels of the data.\n preds : 'NumpyArray' or list of `NumpyArray`\n Predicted values.\n " def evaluate_worker(self, pred, label): (correct, labeled) = batch_pix_accuracy(pred.asnumpy(), label.asnumpy()) (inter, union) = batch_intersection_union(pred.asnumpy(), label.asnumpy(), self.nclass) self.total_correct += correct self.total_label += labeled self.total_inter += inter self.total_union += union if isinstance(preds, Tensor): evaluate_worker(self, preds, labels) elif isinstance(preds, (list, tuple)): for (pred, label) in zip(preds, labels): evaluate_worker(self, pred, label)
Updates the internal evaluation result. Parameters ---------- labels : 'NumpyArray' or list of `NumpyArray` The labels of the data. preds : 'NumpyArray' or list of `NumpyArray` Predicted values.
official/cv/fastscnn/src/score.py
update
Li-kewei/models
1
python
def update(self, preds, labels): "Updates the internal evaluation result.\n\n Parameters\n ----------\n labels : 'NumpyArray' or list of `NumpyArray`\n The labels of the data.\n preds : 'NumpyArray' or list of `NumpyArray`\n Predicted values.\n " def evaluate_worker(self, pred, label): (correct, labeled) = batch_pix_accuracy(pred.asnumpy(), label.asnumpy()) (inter, union) = batch_intersection_union(pred.asnumpy(), label.asnumpy(), self.nclass) self.total_correct += correct self.total_label += labeled self.total_inter += inter self.total_union += union if isinstance(preds, Tensor): evaluate_worker(self, preds, labels) elif isinstance(preds, (list, tuple)): for (pred, label) in zip(preds, labels): evaluate_worker(self, pred, label)
def update(self, preds, labels): "Updates the internal evaluation result.\n\n Parameters\n ----------\n labels : 'NumpyArray' or list of `NumpyArray`\n The labels of the data.\n preds : 'NumpyArray' or list of `NumpyArray`\n Predicted values.\n " def evaluate_worker(self, pred, label): (correct, labeled) = batch_pix_accuracy(pred.asnumpy(), label.asnumpy()) (inter, union) = batch_intersection_union(pred.asnumpy(), label.asnumpy(), self.nclass) self.total_correct += correct self.total_label += labeled self.total_inter += inter self.total_union += union if isinstance(preds, Tensor): evaluate_worker(self, preds, labels) elif isinstance(preds, (list, tuple)): for (pred, label) in zip(preds, labels): evaluate_worker(self, pred, label)<|docstring|>Updates the internal evaluation result. Parameters ---------- labels : 'NumpyArray' or list of `NumpyArray` The labels of the data. preds : 'NumpyArray' or list of `NumpyArray` Predicted values.<|endoftext|>
5a948b4b8d1d81ff546160a5389137710d49b9ce4a7bf3592551fb630b8d0654
def get(self, return_category_iou=False): 'Gets the current evaluation result.\n\n Returns\n -------\n metrics : tuple of float\n pixAcc and mIoU\n ' pixAcc = ((1.0 * self.total_correct) / (2.220446049250313e-16 + self.total_label)) IoU = ((1.0 * self.total_inter) / (2.220446049250313e-16 + self.total_union)) mIoU = IoU.mean().item() if return_category_iou: return (pixAcc, mIoU, IoU) return (pixAcc, mIoU)
Gets the current evaluation result. Returns ------- metrics : tuple of float pixAcc and mIoU
official/cv/fastscnn/src/score.py
get
Li-kewei/models
1
python
def get(self, return_category_iou=False): 'Gets the current evaluation result.\n\n Returns\n -------\n metrics : tuple of float\n pixAcc and mIoU\n ' pixAcc = ((1.0 * self.total_correct) / (2.220446049250313e-16 + self.total_label)) IoU = ((1.0 * self.total_inter) / (2.220446049250313e-16 + self.total_union)) mIoU = IoU.mean().item() if return_category_iou: return (pixAcc, mIoU, IoU) return (pixAcc, mIoU)
def get(self, return_category_iou=False): 'Gets the current evaluation result.\n\n Returns\n -------\n metrics : tuple of float\n pixAcc and mIoU\n ' pixAcc = ((1.0 * self.total_correct) / (2.220446049250313e-16 + self.total_label)) IoU = ((1.0 * self.total_inter) / (2.220446049250313e-16 + self.total_union)) mIoU = IoU.mean().item() if return_category_iou: return (pixAcc, mIoU, IoU) return (pixAcc, mIoU)<|docstring|>Gets the current evaluation result. Returns ------- metrics : tuple of float pixAcc and mIoU<|endoftext|>
6440441b62678c9ae29d1e5ba363e5cf249a6b5fda3808456102fb63f21cf141
def reset(self): 'Resets the internal evaluation result to initial state.' self.total_inter = np.zeros(self.nclass) self.total_union = np.zeros(self.nclass) self.total_correct = 0 self.total_label = 0
Resets the internal evaluation result to initial state.
official/cv/fastscnn/src/score.py
reset
Li-kewei/models
1
python
def reset(self): self.total_inter = np.zeros(self.nclass) self.total_union = np.zeros(self.nclass) self.total_correct = 0 self.total_label = 0
def reset(self): self.total_inter = np.zeros(self.nclass) self.total_union = np.zeros(self.nclass) self.total_correct = 0 self.total_label = 0<|docstring|>Resets the internal evaluation result to initial state.<|endoftext|>
e9b99c8a832809588c86bb061b0cb36352ac22b9d621c3831f32ee5f025b6533
def tcp_listener(address, backlog=50, reuse_addr=True): 'A shortcut to create a TCP socket, bind it and put it into listening state.' from gevent import socket sock = socket.socket() bind_and_listen(sock) return sock
A shortcut to create a TCP socket, bind it and put it into listening state.
src/greentest/greentest.py
tcp_listener
snjypl/gevent
1
python
def tcp_listener(address, backlog=50, reuse_addr=True): from gevent import socket sock = socket.socket() bind_and_listen(sock) return sock
def tcp_listener(address, backlog=50, reuse_addr=True): from gevent import socket sock = socket.socket() bind_and_listen(sock) return sock<|docstring|>A shortcut to create a TCP socket, bind it and put it into listening state.<|endoftext|>
cdcfff725c9680b41007993c5e6886aa5feed714bf65797f527999fa8dcb3eb2
def get_open_files(): '\n Return a list of popenfile and pconn objects.\n\n Note that other than `fd`, they have different attributes.\n ' results = dict() process = psutil.Process() results['data'] = (process.open_files() + process.connections('all')) for x in results['data']: results[x.fd] = x results['data'] += ['From psutil', process] return results
Return a list of popenfile and pconn objects. Note that other than `fd`, they have different attributes.
src/greentest/greentest.py
get_open_files
snjypl/gevent
1
python
def get_open_files(): '\n Return a list of popenfile and pconn objects.\n\n Note that other than `fd`, they have different attributes.\n ' results = dict() process = psutil.Process() results['data'] = (process.open_files() + process.connections('all')) for x in results['data']: results[x.fd] = x results['data'] += ['From psutil', process] return results
def get_open_files(): '\n Return a list of popenfile and pconn objects.\n\n Note that other than `fd`, they have different attributes.\n ' results = dict() process = psutil.Process() results['data'] = (process.open_files() + process.connections('all')) for x in results['data']: results[x.fd] = x results['data'] += ['From psutil', process] return results<|docstring|>Return a list of popenfile and pconn objects. Note that other than `fd`, they have different attributes.<|endoftext|>
b6cceab3900cad1a2b8c9f761fc1d2c6c115f2c6e3bc64c7d15524dca70628ae
def user_can_authenticate(self, user): "\n Reject users with is_active=False. Custom user models that don't have\n that attribute are allowed.\n " is_active = getattr(user, 'is_active', None) return (is_active or (is_active is None))
Reject users with is_active=False. Custom user models that don't have that attribute are allowed.
customizing_auth_practice/backend.py
user_can_authenticate
yunqingqing/django_practice
0
python
def user_can_authenticate(self, user): "\n Reject users with is_active=False. Custom user models that don't have\n that attribute are allowed.\n " is_active = getattr(user, 'is_active', None) return (is_active or (is_active is None))
def user_can_authenticate(self, user): "\n Reject users with is_active=False. Custom user models that don't have\n that attribute are allowed.\n " is_active = getattr(user, 'is_active', None) return (is_active or (is_active is None))<|docstring|>Reject users with is_active=False. Custom user models that don't have that attribute are allowed.<|endoftext|>
a44584b660013e985683fef013574dd73f220c845edd5f1628725a4a1c66889a
def get_lightfield_obj(lf_config): 'Return the lightfield object' if (lf_config.name == 'lightslab'): lightfield_obj = LightSlab(lf_config) else: raise ValueError('Parametrization:{} not supported for light field'.format(lf_config.name)) return lightfield_obj
Return the lightfield object
light_field_neural_rendering/src/utils/lf_utils.py
get_lightfield_obj
suryatmodulus/google-research
2
python
def get_lightfield_obj(lf_config): if (lf_config.name == 'lightslab'): lightfield_obj = LightSlab(lf_config) else: raise ValueError('Parametrization:{} not supported for light field'.format(lf_config.name)) return lightfield_obj
def get_lightfield_obj(lf_config): if (lf_config.name == 'lightslab'): lightfield_obj = LightSlab(lf_config) else: raise ValueError('Parametrization:{} not supported for light field'.format(lf_config.name)) return lightfield_obj<|docstring|>Return the lightfield object<|endoftext|>
bb4091f7d7ff127c4c700aa0bbf6d16e805ef27990f2c9aeee9938ee9b73159d
def __init__(self, config): 'Init Method.' self.config = config
Init Method.
light_field_neural_rendering/src/utils/lf_utils.py
__init__
suryatmodulus/google-research
2
python
def __init__(self, config): self.config = config
def __init__(self, config): self.config = config<|docstring|>Init Method.<|endoftext|>
a5824f1a39c39ac6df139d43494bc1a772ce4a0956d126a4dfa8ae865d883013
def get_lf_encoding(self, rays): 'Return the light field and its encoding' (lf_samples, non_intersect_mask) = self.ray2lightfield(rays) lf_samples_enc = self.encode(lf_samples) return (lf_samples, lf_samples_enc, non_intersect_mask)
Return the light field and its encoding
light_field_neural_rendering/src/utils/lf_utils.py
get_lf_encoding
suryatmodulus/google-research
2
python
def get_lf_encoding(self, rays): (lf_samples, non_intersect_mask) = self.ray2lightfield(rays) lf_samples_enc = self.encode(lf_samples) return (lf_samples, lf_samples_enc, non_intersect_mask)
def get_lf_encoding(self, rays): (lf_samples, non_intersect_mask) = self.ray2lightfield(rays) lf_samples_enc = self.encode(lf_samples) return (lf_samples, lf_samples_enc, non_intersect_mask)<|docstring|>Return the light field and its encoding<|endoftext|>
1f1f29fdc0199cb0d4644e229c6b7d3032f0e32cb86fcc09a1750c0fc5eb39b0
def ray2lightfield(self, rays): 'Convert the rays to light field representation.\n\n Args:\n rays: data_types.Rays\n\n Returns:\n lf_samples: Light field representation of rays\n non_intersect_mask: [Optional] To indcate rays that dont intersect the\n light field manifold.\n ' raise NotImplementedError
Convert the rays to light field representation. Args: rays: data_types.Rays Returns: lf_samples: Light field representation of rays non_intersect_mask: [Optional] To indcate rays that dont intersect the light field manifold.
light_field_neural_rendering/src/utils/lf_utils.py
ray2lightfield
suryatmodulus/google-research
2
python
def ray2lightfield(self, rays): 'Convert the rays to light field representation.\n\n Args:\n rays: data_types.Rays\n\n Returns:\n lf_samples: Light field representation of rays\n non_intersect_mask: [Optional] To indcate rays that dont intersect the\n light field manifold.\n ' raise NotImplementedError
def ray2lightfield(self, rays): 'Convert the rays to light field representation.\n\n Args:\n rays: data_types.Rays\n\n Returns:\n lf_samples: Light field representation of rays\n non_intersect_mask: [Optional] To indcate rays that dont intersect the\n light field manifold.\n ' raise NotImplementedError<|docstring|>Convert the rays to light field representation. Args: rays: data_types.Rays Returns: lf_samples: Light field representation of rays non_intersect_mask: [Optional] To indcate rays that dont intersect the light field manifold.<|endoftext|>
64953a2bdcbb24bd50212bf29f4586d3ee102b6c7aac30374e32bee308eea4aa
def encode(self, lf_samples): 'Feature encoding for the light field samples.\n\n Args:\n lf_samples: Light field input.\n\n Returns:\n lf_samples_enc : Encoded light field representation.\n ' if (self.config.encoding_name == 'positional_encoding'): lf_samples_enc = model_utils.posenc(lf_samples, self.config.min_deg_point, self.config.max_deg_point) elif (self.config.encoding_name == 'identity'): lf_samples_enc = lf_samples else: raise ValueError('Mapping type {} not implemented'.format(self.config.encoding_name)) return lf_samples_enc
Feature encoding for the light field samples. Args: lf_samples: Light field input. Returns: lf_samples_enc : Encoded light field representation.
light_field_neural_rendering/src/utils/lf_utils.py
encode
suryatmodulus/google-research
2
python
def encode(self, lf_samples): 'Feature encoding for the light field samples.\n\n Args:\n lf_samples: Light field input.\n\n Returns:\n lf_samples_enc : Encoded light field representation.\n ' if (self.config.encoding_name == 'positional_encoding'): lf_samples_enc = model_utils.posenc(lf_samples, self.config.min_deg_point, self.config.max_deg_point) elif (self.config.encoding_name == 'identity'): lf_samples_enc = lf_samples else: raise ValueError('Mapping type {} not implemented'.format(self.config.encoding_name)) return lf_samples_enc
def encode(self, lf_samples): 'Feature encoding for the light field samples.\n\n Args:\n lf_samples: Light field input.\n\n Returns:\n lf_samples_enc : Encoded light field representation.\n ' if (self.config.encoding_name == 'positional_encoding'): lf_samples_enc = model_utils.posenc(lf_samples, self.config.min_deg_point, self.config.max_deg_point) elif (self.config.encoding_name == 'identity'): lf_samples_enc = lf_samples else: raise ValueError('Mapping type {} not implemented'.format(self.config.encoding_name)) return lf_samples_enc<|docstring|>Feature encoding for the light field samples. Args: lf_samples: Light field input. Returns: lf_samples_enc : Encoded light field representation.<|endoftext|>
9d329e65a9621bb74cccf59d10e6d7ff9841a25e57961cefb560df5d277f0fad
def ray_plane_intersection(self, zconst, rays): 'Compute intersection of the ray with a plane of the form z=const.\n\n Args:\n zconst: Fixed z-value for the plane.\n rays: data_type.Rays.\n\n Returns:\n xy: The free-coordinates of intersection.\n ' t1 = ((zconst - rays.origins[(Ellipsis, (- 1))]) / rays.directions[(Ellipsis, (- 1))]) xy = (rays.origins[(Ellipsis, :2)] + (t1[(Ellipsis, None)] * rays.directions)[(Ellipsis, :2)]) return xy
Compute intersection of the ray with a plane of the form z=const. Args: zconst: Fixed z-value for the plane. rays: data_type.Rays. Returns: xy: The free-coordinates of intersection.
light_field_neural_rendering/src/utils/lf_utils.py
ray_plane_intersection
suryatmodulus/google-research
2
python
def ray_plane_intersection(self, zconst, rays): 'Compute intersection of the ray with a plane of the form z=const.\n\n Args:\n zconst: Fixed z-value for the plane.\n rays: data_type.Rays.\n\n Returns:\n xy: The free-coordinates of intersection.\n ' t1 = ((zconst - rays.origins[(Ellipsis, (- 1))]) / rays.directions[(Ellipsis, (- 1))]) xy = (rays.origins[(Ellipsis, :2)] + (t1[(Ellipsis, None)] * rays.directions)[(Ellipsis, :2)]) return xy
def ray_plane_intersection(self, zconst, rays): 'Compute intersection of the ray with a plane of the form z=const.\n\n Args:\n zconst: Fixed z-value for the plane.\n rays: data_type.Rays.\n\n Returns:\n xy: The free-coordinates of intersection.\n ' t1 = ((zconst - rays.origins[(Ellipsis, (- 1))]) / rays.directions[(Ellipsis, (- 1))]) xy = (rays.origins[(Ellipsis, :2)] + (t1[(Ellipsis, None)] * rays.directions)[(Ellipsis, :2)]) return xy<|docstring|>Compute intersection of the ray with a plane of the form z=const. Args: zconst: Fixed z-value for the plane. rays: data_type.Rays. Returns: xy: The free-coordinates of intersection.<|endoftext|>
33ca9017ebb9dc7a3e8ba7ca3f067ba4136add8c952344bbff552036ab12f901
def ray2lightfield(self, rays): 'Compute the lightslab representation.' st = self.ray_plane_intersection(self.config.st_plane, rays) uv = self.ray_plane_intersection(self.config.uv_plane, rays) lf_samples = jnp.concatenate([st, uv], (- 1)) non_intersect_mask = jnp.array(([False] * lf_samples.shape[0]))[(:, None)] return (lf_samples, non_intersect_mask)
Compute the lightslab representation.
light_field_neural_rendering/src/utils/lf_utils.py
ray2lightfield
suryatmodulus/google-research
2
python
def ray2lightfield(self, rays): st = self.ray_plane_intersection(self.config.st_plane, rays) uv = self.ray_plane_intersection(self.config.uv_plane, rays) lf_samples = jnp.concatenate([st, uv], (- 1)) non_intersect_mask = jnp.array(([False] * lf_samples.shape[0]))[(:, None)] return (lf_samples, non_intersect_mask)
def ray2lightfield(self, rays): st = self.ray_plane_intersection(self.config.st_plane, rays) uv = self.ray_plane_intersection(self.config.uv_plane, rays) lf_samples = jnp.concatenate([st, uv], (- 1)) non_intersect_mask = jnp.array(([False] * lf_samples.shape[0]))[(:, None)] return (lf_samples, non_intersect_mask)<|docstring|>Compute the lightslab representation.<|endoftext|>
0ddfb12925f44607efc586211a85a3976e7d0da39d14fc25461f668e5a8f2481
def Write_Vasp_KPOINTS(cal_loc, structure_filename, workflow): '\n Write or modify KPOINTS in cal_loc as follows:\n step I: Check the presence of file KPOINTS in folder cal_loc.\n If present, no need to write KPOINTS,\n If missing, write KPOINTS according to tag kpoints_tag\n step II: If KPOINTS is written and the system is 2D according to tag 2d_system, modify KPOINTS such that K_z = 0 for all kpoints\n step III: tag denser_kpoints defaults to (1, 1, 1). If denser_kpoints is set, then modfiy KPOINTS accordingly and save the old\n KPOINTS as KPOINTS.sparse. Note that this function is only active for automatically generated KPOINTS, namely,\n kpoints_type = "MPRelaxSet" or "MPStaticSet"\n Input arguments:\n cal_loc (str): the absolute path\n structure_filename (str): the file from which the structure is read using pymatgen.Structure.from_file\n workflow\n ' vasp_kpoints = Vasp_Kpoints(cal_loc, structure_filename, workflow) firework = vasp_kpoints.current_firework if os.path.isfile(os.path.join(cal_loc, 'KPOINTS')): kpoints_type = None else: kpoints_type = vasp_kpoints.current_firework['kpoints_type'] if (kpoints_type == 'Line-mode'): vasp_kpoints.Write_line_mode_KPOINTS(intersections=firework['intersections'], twoD_system=workflow[0]['2d_system']) with open(vasp_kpoints.log_txt, 'a') as f: f.write('{} INFO: write KPOINTS in the line mode based on pymatgen.symmetry.bandstructure.HighSymmKpath\n'.format(get_time_str())) if workflow[0]['2d_system']: f.write('\t\t\tKPOINTS is already suitable for 2D systems\n') elif (kpoints_type == 'MPNonSCFSet_line'): vasp_kpoints.Write_NONSCF_KPOINTS(mode='line', kpoints_line_density=firework['kpoints_line_density']) elif (kpoints_type == 'MPNonSCFSet_uniform'): vasp_kpoints.Write_NONSCF_KPOINTS(mode='uniform', reciprocal_density=firework['reciprocal_density']) elif (kpoints_type == 'MPRelaxSet'): vasp_kpoints.Write_MPRelax_KPOINTS(force_gamma=workflow[0]['force_gamma']) elif (kpoints_type == 'MPStaticSet'): vasp_kpoints.Write_MPStatic_KPOINTS(force_gamma=workflow[0]['force_gamma']) if (kpoints_type in ['MPNonSCFSet_line', 'MPNonSCFSet_uniform', 'MPRelaxSet', 'MPStaticSet']): with open(vasp_kpoints.log_txt, 'a') as f: f.write('{} INFO: use pymatgen.io.vasp.{} '.format(get_time_str(), kpoints_type)) f.write('to write KPOINTS under {}\n'.format(vasp_kpoints.firework_name)) if workflow[0]['2d_system']: new_name = vasp_kpoints.modify_vasp_kpoints_for_2D(rename_old_kpoints=('KPOINTS.pymatgen_' + kpoints_type)) with open(vasp_kpoints.log_txt, 'a') as f: f.write('\t\t\tKPOINTS is modified for 2D systems\n') f.write('\t\t\t\told KPOINTS --> {}\n'.format(new_name)) kpoints_type = firework['kpoints_type'] if (isinstance(firework['denser_kpoints'][0], float) and (kpoints_type in ['MPRelaxSet', 'MPStaticSet'])): vasp_kpoints.make_KPOINTS_denser(denser_kpoints=firework['denser_kpoints']) with open(vasp_kpoints.log_txt, 'a') as f: f.write('\t\t\ttag denser_kpoints has been set to {}\n'.format(firework['denser_kpoints'])) f.write('\t\t\t\tSo change KPOINTS according to denser_kpoints\n') f.write('\t\t\t\told KPOINTS --> KPOINTS.sparse\n')
Write or modify KPOINTS in cal_loc as follows: step I: Check the presence of file KPOINTS in folder cal_loc. If present, no need to write KPOINTS, If missing, write KPOINTS according to tag kpoints_tag step II: If KPOINTS is written and the system is 2D according to tag 2d_system, modify KPOINTS such that K_z = 0 for all kpoints step III: tag denser_kpoints defaults to (1, 1, 1). If denser_kpoints is set, then modfiy KPOINTS accordingly and save the old KPOINTS as KPOINTS.sparse. Note that this function is only active for automatically generated KPOINTS, namely, kpoints_type = "MPRelaxSet" or "MPStaticSet" Input arguments: cal_loc (str): the absolute path structure_filename (str): the file from which the structure is read using pymatgen.Structure.from_file workflow
HTC_lib/Write_VASP_KPOINTS.py
Write_Vasp_KPOINTS
tair-ai/deephtc
0
python
def Write_Vasp_KPOINTS(cal_loc, structure_filename, workflow): '\n Write or modify KPOINTS in cal_loc as follows:\n step I: Check the presence of file KPOINTS in folder cal_loc.\n If present, no need to write KPOINTS,\n If missing, write KPOINTS according to tag kpoints_tag\n step II: If KPOINTS is written and the system is 2D according to tag 2d_system, modify KPOINTS such that K_z = 0 for all kpoints\n step III: tag denser_kpoints defaults to (1, 1, 1). If denser_kpoints is set, then modfiy KPOINTS accordingly and save the old\n KPOINTS as KPOINTS.sparse. Note that this function is only active for automatically generated KPOINTS, namely,\n kpoints_type = "MPRelaxSet" or "MPStaticSet"\n Input arguments:\n cal_loc (str): the absolute path\n structure_filename (str): the file from which the structure is read using pymatgen.Structure.from_file\n workflow\n ' vasp_kpoints = Vasp_Kpoints(cal_loc, structure_filename, workflow) firework = vasp_kpoints.current_firework if os.path.isfile(os.path.join(cal_loc, 'KPOINTS')): kpoints_type = None else: kpoints_type = vasp_kpoints.current_firework['kpoints_type'] if (kpoints_type == 'Line-mode'): vasp_kpoints.Write_line_mode_KPOINTS(intersections=firework['intersections'], twoD_system=workflow[0]['2d_system']) with open(vasp_kpoints.log_txt, 'a') as f: f.write('{} INFO: write KPOINTS in the line mode based on pymatgen.symmetry.bandstructure.HighSymmKpath\n'.format(get_time_str())) if workflow[0]['2d_system']: f.write('\t\t\tKPOINTS is already suitable for 2D systems\n') elif (kpoints_type == 'MPNonSCFSet_line'): vasp_kpoints.Write_NONSCF_KPOINTS(mode='line', kpoints_line_density=firework['kpoints_line_density']) elif (kpoints_type == 'MPNonSCFSet_uniform'): vasp_kpoints.Write_NONSCF_KPOINTS(mode='uniform', reciprocal_density=firework['reciprocal_density']) elif (kpoints_type == 'MPRelaxSet'): vasp_kpoints.Write_MPRelax_KPOINTS(force_gamma=workflow[0]['force_gamma']) elif (kpoints_type == 'MPStaticSet'): vasp_kpoints.Write_MPStatic_KPOINTS(force_gamma=workflow[0]['force_gamma']) if (kpoints_type in ['MPNonSCFSet_line', 'MPNonSCFSet_uniform', 'MPRelaxSet', 'MPStaticSet']): with open(vasp_kpoints.log_txt, 'a') as f: f.write('{} INFO: use pymatgen.io.vasp.{} '.format(get_time_str(), kpoints_type)) f.write('to write KPOINTS under {}\n'.format(vasp_kpoints.firework_name)) if workflow[0]['2d_system']: new_name = vasp_kpoints.modify_vasp_kpoints_for_2D(rename_old_kpoints=('KPOINTS.pymatgen_' + kpoints_type)) with open(vasp_kpoints.log_txt, 'a') as f: f.write('\t\t\tKPOINTS is modified for 2D systems\n') f.write('\t\t\t\told KPOINTS --> {}\n'.format(new_name)) kpoints_type = firework['kpoints_type'] if (isinstance(firework['denser_kpoints'][0], float) and (kpoints_type in ['MPRelaxSet', 'MPStaticSet'])): vasp_kpoints.make_KPOINTS_denser(denser_kpoints=firework['denser_kpoints']) with open(vasp_kpoints.log_txt, 'a') as f: f.write('\t\t\ttag denser_kpoints has been set to {}\n'.format(firework['denser_kpoints'])) f.write('\t\t\t\tSo change KPOINTS according to denser_kpoints\n') f.write('\t\t\t\told KPOINTS --> KPOINTS.sparse\n')
def Write_Vasp_KPOINTS(cal_loc, structure_filename, workflow): '\n Write or modify KPOINTS in cal_loc as follows:\n step I: Check the presence of file KPOINTS in folder cal_loc.\n If present, no need to write KPOINTS,\n If missing, write KPOINTS according to tag kpoints_tag\n step II: If KPOINTS is written and the system is 2D according to tag 2d_system, modify KPOINTS such that K_z = 0 for all kpoints\n step III: tag denser_kpoints defaults to (1, 1, 1). If denser_kpoints is set, then modfiy KPOINTS accordingly and save the old\n KPOINTS as KPOINTS.sparse. Note that this function is only active for automatically generated KPOINTS, namely,\n kpoints_type = "MPRelaxSet" or "MPStaticSet"\n Input arguments:\n cal_loc (str): the absolute path\n structure_filename (str): the file from which the structure is read using pymatgen.Structure.from_file\n workflow\n ' vasp_kpoints = Vasp_Kpoints(cal_loc, structure_filename, workflow) firework = vasp_kpoints.current_firework if os.path.isfile(os.path.join(cal_loc, 'KPOINTS')): kpoints_type = None else: kpoints_type = vasp_kpoints.current_firework['kpoints_type'] if (kpoints_type == 'Line-mode'): vasp_kpoints.Write_line_mode_KPOINTS(intersections=firework['intersections'], twoD_system=workflow[0]['2d_system']) with open(vasp_kpoints.log_txt, 'a') as f: f.write('{} INFO: write KPOINTS in the line mode based on pymatgen.symmetry.bandstructure.HighSymmKpath\n'.format(get_time_str())) if workflow[0]['2d_system']: f.write('\t\t\tKPOINTS is already suitable for 2D systems\n') elif (kpoints_type == 'MPNonSCFSet_line'): vasp_kpoints.Write_NONSCF_KPOINTS(mode='line', kpoints_line_density=firework['kpoints_line_density']) elif (kpoints_type == 'MPNonSCFSet_uniform'): vasp_kpoints.Write_NONSCF_KPOINTS(mode='uniform', reciprocal_density=firework['reciprocal_density']) elif (kpoints_type == 'MPRelaxSet'): vasp_kpoints.Write_MPRelax_KPOINTS(force_gamma=workflow[0]['force_gamma']) elif (kpoints_type == 'MPStaticSet'): vasp_kpoints.Write_MPStatic_KPOINTS(force_gamma=workflow[0]['force_gamma']) if (kpoints_type in ['MPNonSCFSet_line', 'MPNonSCFSet_uniform', 'MPRelaxSet', 'MPStaticSet']): with open(vasp_kpoints.log_txt, 'a') as f: f.write('{} INFO: use pymatgen.io.vasp.{} '.format(get_time_str(), kpoints_type)) f.write('to write KPOINTS under {}\n'.format(vasp_kpoints.firework_name)) if workflow[0]['2d_system']: new_name = vasp_kpoints.modify_vasp_kpoints_for_2D(rename_old_kpoints=('KPOINTS.pymatgen_' + kpoints_type)) with open(vasp_kpoints.log_txt, 'a') as f: f.write('\t\t\tKPOINTS is modified for 2D systems\n') f.write('\t\t\t\told KPOINTS --> {}\n'.format(new_name)) kpoints_type = firework['kpoints_type'] if (isinstance(firework['denser_kpoints'][0], float) and (kpoints_type in ['MPRelaxSet', 'MPStaticSet'])): vasp_kpoints.make_KPOINTS_denser(denser_kpoints=firework['denser_kpoints']) with open(vasp_kpoints.log_txt, 'a') as f: f.write('\t\t\ttag denser_kpoints has been set to {}\n'.format(firework['denser_kpoints'])) f.write('\t\t\t\tSo change KPOINTS according to denser_kpoints\n') f.write('\t\t\t\told KPOINTS --> KPOINTS.sparse\n')<|docstring|>Write or modify KPOINTS in cal_loc as follows: step I: Check the presence of file KPOINTS in folder cal_loc. If present, no need to write KPOINTS, If missing, write KPOINTS according to tag kpoints_tag step II: If KPOINTS is written and the system is 2D according to tag 2d_system, modify KPOINTS such that K_z = 0 for all kpoints step III: tag denser_kpoints defaults to (1, 1, 1). If denser_kpoints is set, then modfiy KPOINTS accordingly and save the old KPOINTS as KPOINTS.sparse. Note that this function is only active for automatically generated KPOINTS, namely, kpoints_type = "MPRelaxSet" or "MPStaticSet" Input arguments: cal_loc (str): the absolute path structure_filename (str): the file from which the structure is read using pymatgen.Structure.from_file workflow<|endoftext|>
379d0b1e888ab3a3acb2fec9d3b022bd61299bc48c702b4d5cfd78bd8a1a0d17
def Write_MPRelax_KPOINTS(self, **kwargs): '\n generate KPOINTS for scf or structural relaxations cal by pymatgen.io.vasp.set.MPRelaxSet.\n ' vis = MPRelaxSet(structure=self.structure, **kwargs) vis.kpoints.write_file(os.path.join(self.cal_loc, 'KPOINTS'))
generate KPOINTS for scf or structural relaxations cal by pymatgen.io.vasp.set.MPRelaxSet.
HTC_lib/Write_VASP_KPOINTS.py
Write_MPRelax_KPOINTS
tair-ai/deephtc
0
python
def Write_MPRelax_KPOINTS(self, **kwargs): '\n \n ' vis = MPRelaxSet(structure=self.structure, **kwargs) vis.kpoints.write_file(os.path.join(self.cal_loc, 'KPOINTS'))
def Write_MPRelax_KPOINTS(self, **kwargs): '\n \n ' vis = MPRelaxSet(structure=self.structure, **kwargs) vis.kpoints.write_file(os.path.join(self.cal_loc, 'KPOINTS'))<|docstring|>generate KPOINTS for scf or structural relaxations cal by pymatgen.io.vasp.set.MPRelaxSet.<|endoftext|>
c4c727469bf7ed932b942d663038ea12400ec0b37910127197d43d10647a73cc
def Write_MPStatic_KPOINTS(self, **kwargs): '\n generate KPOINTS for scf cal by pymatgen.io.vasp.set.MPStaticSet.\n ' vis = MPStaticSet(structure=self.structure, **kwargs) vis.kpoints.write_file(os.path.join(self.cal_loc, 'KPOINTS'))
generate KPOINTS for scf cal by pymatgen.io.vasp.set.MPStaticSet.
HTC_lib/Write_VASP_KPOINTS.py
Write_MPStatic_KPOINTS
tair-ai/deephtc
0
python
def Write_MPStatic_KPOINTS(self, **kwargs): '\n \n ' vis = MPStaticSet(structure=self.structure, **kwargs) vis.kpoints.write_file(os.path.join(self.cal_loc, 'KPOINTS'))
def Write_MPStatic_KPOINTS(self, **kwargs): '\n \n ' vis = MPStaticSet(structure=self.structure, **kwargs) vis.kpoints.write_file(os.path.join(self.cal_loc, 'KPOINTS'))<|docstring|>generate KPOINTS for scf cal by pymatgen.io.vasp.set.MPStaticSet.<|endoftext|>
8a63c2bd83f9444efd996b6ee9d29e5ad8a5ff0dad9df835ed4d34d51da2e23b
def Write_NONSCF_KPOINTS(self, mode='line', nedos=601, reciprocal_density=100, sym_prec=0.1, kpoints_line_density=20, optics=False, **kwargs): '\n generate KPOINTS for DOS (mode="uniform") or band struture (mode="line") by pymatgen.io.vasp.set.MPNonSCFSet\n input arguments:\n -mode (str): \'line\' or \'uniform\'\n -nedos (int): default 601. Only valid at mode=\'uniform\'\n -reciprocal_density (int): default 100. Only valid at mode=\'uniform\'\n -sym_prec (float): default 0.1\n -kpoints_line_density (int): default 20. Only valid at mode=\'line\'\n -optics (bool)\n ' vis = MPNonSCFSet(structure=self.structure, mode=mode, nedos=nedos, reciprocal_density=reciprocal_density, sym_prec=sym_prec, kpoints_line_density=kpoints_line_density, optics=optics) vis.kpoints.write_file(os.path.join(self.cal_loc, 'KPOINTS'))
generate KPOINTS for DOS (mode="uniform") or band struture (mode="line") by pymatgen.io.vasp.set.MPNonSCFSet input arguments: -mode (str): 'line' or 'uniform' -nedos (int): default 601. Only valid at mode='uniform' -reciprocal_density (int): default 100. Only valid at mode='uniform' -sym_prec (float): default 0.1 -kpoints_line_density (int): default 20. Only valid at mode='line' -optics (bool)
HTC_lib/Write_VASP_KPOINTS.py
Write_NONSCF_KPOINTS
tair-ai/deephtc
0
python
def Write_NONSCF_KPOINTS(self, mode='line', nedos=601, reciprocal_density=100, sym_prec=0.1, kpoints_line_density=20, optics=False, **kwargs): '\n generate KPOINTS for DOS (mode="uniform") or band struture (mode="line") by pymatgen.io.vasp.set.MPNonSCFSet\n input arguments:\n -mode (str): \'line\' or \'uniform\'\n -nedos (int): default 601. Only valid at mode=\'uniform\'\n -reciprocal_density (int): default 100. Only valid at mode=\'uniform\'\n -sym_prec (float): default 0.1\n -kpoints_line_density (int): default 20. Only valid at mode=\'line\'\n -optics (bool)\n ' vis = MPNonSCFSet(structure=self.structure, mode=mode, nedos=nedos, reciprocal_density=reciprocal_density, sym_prec=sym_prec, kpoints_line_density=kpoints_line_density, optics=optics) vis.kpoints.write_file(os.path.join(self.cal_loc, 'KPOINTS'))
def Write_NONSCF_KPOINTS(self, mode='line', nedos=601, reciprocal_density=100, sym_prec=0.1, kpoints_line_density=20, optics=False, **kwargs): '\n generate KPOINTS for DOS (mode="uniform") or band struture (mode="line") by pymatgen.io.vasp.set.MPNonSCFSet\n input arguments:\n -mode (str): \'line\' or \'uniform\'\n -nedos (int): default 601. Only valid at mode=\'uniform\'\n -reciprocal_density (int): default 100. Only valid at mode=\'uniform\'\n -sym_prec (float): default 0.1\n -kpoints_line_density (int): default 20. Only valid at mode=\'line\'\n -optics (bool)\n ' vis = MPNonSCFSet(structure=self.structure, mode=mode, nedos=nedos, reciprocal_density=reciprocal_density, sym_prec=sym_prec, kpoints_line_density=kpoints_line_density, optics=optics) vis.kpoints.write_file(os.path.join(self.cal_loc, 'KPOINTS'))<|docstring|>generate KPOINTS for DOS (mode="uniform") or band struture (mode="line") by pymatgen.io.vasp.set.MPNonSCFSet input arguments: -mode (str): 'line' or 'uniform' -nedos (int): default 601. Only valid at mode='uniform' -reciprocal_density (int): default 100. Only valid at mode='uniform' -sym_prec (float): default 0.1 -kpoints_line_density (int): default 20. Only valid at mode='line' -optics (bool)<|endoftext|>
cef27987da9687453aaabbeccb24cde5c7db08aedd6eb0bd83b1e1be9f9ed0a4
def Write_line_mode_KPOINTS(self, intersections, twoD_system=False): '\n Write a kpath along the high symmetry kpoints in the line mode into KPOINTS under dir cal_loc for the band structure calculation.\n input arguments:\n - intersections (int): For every segment, there are intersections equally spaced kpionts, including the starting and ending high symmetry k-points\n - twoD_system (bool): If True, the kpath only includes the kpoints whose z component are zero. Default: False\n see https://cms.mpi.univie.ac.at/vasp/vasp/Strings_k_points_bandstructure_calculations.html\n Note that the reciprocal coordinates are adopted.\n Note that if twoD_system is True, the vacuum layer is assumed to be along the Z direction and the lattice vector c must be normal to the surface.\n ' try: kpath = HighSymmKpath(structure=self.structure).get_kpoints(line_density=1, coords_are_cartesian=False) except Exception as e: with open(os.path.join(self.log_txt), 'a') as f: f.write('{} Error: {}\n'.format(get_time_str(), self.firework_name)) f.write("\t\tfail to find high-symmetry kpoints using pymatgen's HighSymmKpath\n") f.write('\t\t{}\n'.format(e)) f.write("\t\tcreate __manual__ and write down '__HighSymmKpath__'.\n") with open(os.path.join(self.cal_loc, '__manual__'), 'w') as f: f.write('__HighSymmKpath__') return False kpoints = [] for (k_, k_label) in zip(*kpath): if k_label: kpoints.append((list(k_) + [k_label])) starting_kpoints = kpoints[::2] ending_kpoints = kpoints[1::2] with open(os.path.join(self.cal_loc, 'KPOINTS'), 'w') as f: f.write('k-points along high symmetry lines\n') f.write('{}\n'.format(intersections)) f.write('Line-mode\n') f.write('rec\n') for (start_k, end_k) in zip(starting_kpoints, ending_kpoints): if (twoD_system and ((abs(start_k[2]) + abs(end_k[2])) > 1e-05)): continue f.write(' {} {} {} {}\n'.format(*start_k)) f.write(' {} {} {} {}\n'.format(*end_k)) f.write('\n')
Write a kpath along the high symmetry kpoints in the line mode into KPOINTS under dir cal_loc for the band structure calculation. input arguments: - intersections (int): For every segment, there are intersections equally spaced kpionts, including the starting and ending high symmetry k-points - twoD_system (bool): If True, the kpath only includes the kpoints whose z component are zero. Default: False see https://cms.mpi.univie.ac.at/vasp/vasp/Strings_k_points_bandstructure_calculations.html Note that the reciprocal coordinates are adopted. Note that if twoD_system is True, the vacuum layer is assumed to be along the Z direction and the lattice vector c must be normal to the surface.
HTC_lib/Write_VASP_KPOINTS.py
Write_line_mode_KPOINTS
tair-ai/deephtc
0
python
def Write_line_mode_KPOINTS(self, intersections, twoD_system=False): '\n Write a kpath along the high symmetry kpoints in the line mode into KPOINTS under dir cal_loc for the band structure calculation.\n input arguments:\n - intersections (int): For every segment, there are intersections equally spaced kpionts, including the starting and ending high symmetry k-points\n - twoD_system (bool): If True, the kpath only includes the kpoints whose z component are zero. Default: False\n see https://cms.mpi.univie.ac.at/vasp/vasp/Strings_k_points_bandstructure_calculations.html\n Note that the reciprocal coordinates are adopted.\n Note that if twoD_system is True, the vacuum layer is assumed to be along the Z direction and the lattice vector c must be normal to the surface.\n ' try: kpath = HighSymmKpath(structure=self.structure).get_kpoints(line_density=1, coords_are_cartesian=False) except Exception as e: with open(os.path.join(self.log_txt), 'a') as f: f.write('{} Error: {}\n'.format(get_time_str(), self.firework_name)) f.write("\t\tfail to find high-symmetry kpoints using pymatgen's HighSymmKpath\n") f.write('\t\t{}\n'.format(e)) f.write("\t\tcreate __manual__ and write down '__HighSymmKpath__'.\n") with open(os.path.join(self.cal_loc, '__manual__'), 'w') as f: f.write('__HighSymmKpath__') return False kpoints = [] for (k_, k_label) in zip(*kpath): if k_label: kpoints.append((list(k_) + [k_label])) starting_kpoints = kpoints[::2] ending_kpoints = kpoints[1::2] with open(os.path.join(self.cal_loc, 'KPOINTS'), 'w') as f: f.write('k-points along high symmetry lines\n') f.write('{}\n'.format(intersections)) f.write('Line-mode\n') f.write('rec\n') for (start_k, end_k) in zip(starting_kpoints, ending_kpoints): if (twoD_system and ((abs(start_k[2]) + abs(end_k[2])) > 1e-05)): continue f.write(' {} {} {} {}\n'.format(*start_k)) f.write(' {} {} {} {}\n'.format(*end_k)) f.write('\n')
def Write_line_mode_KPOINTS(self, intersections, twoD_system=False): '\n Write a kpath along the high symmetry kpoints in the line mode into KPOINTS under dir cal_loc for the band structure calculation.\n input arguments:\n - intersections (int): For every segment, there are intersections equally spaced kpionts, including the starting and ending high symmetry k-points\n - twoD_system (bool): If True, the kpath only includes the kpoints whose z component are zero. Default: False\n see https://cms.mpi.univie.ac.at/vasp/vasp/Strings_k_points_bandstructure_calculations.html\n Note that the reciprocal coordinates are adopted.\n Note that if twoD_system is True, the vacuum layer is assumed to be along the Z direction and the lattice vector c must be normal to the surface.\n ' try: kpath = HighSymmKpath(structure=self.structure).get_kpoints(line_density=1, coords_are_cartesian=False) except Exception as e: with open(os.path.join(self.log_txt), 'a') as f: f.write('{} Error: {}\n'.format(get_time_str(), self.firework_name)) f.write("\t\tfail to find high-symmetry kpoints using pymatgen's HighSymmKpath\n") f.write('\t\t{}\n'.format(e)) f.write("\t\tcreate __manual__ and write down '__HighSymmKpath__'.\n") with open(os.path.join(self.cal_loc, '__manual__'), 'w') as f: f.write('__HighSymmKpath__') return False kpoints = [] for (k_, k_label) in zip(*kpath): if k_label: kpoints.append((list(k_) + [k_label])) starting_kpoints = kpoints[::2] ending_kpoints = kpoints[1::2] with open(os.path.join(self.cal_loc, 'KPOINTS'), 'w') as f: f.write('k-points along high symmetry lines\n') f.write('{}\n'.format(intersections)) f.write('Line-mode\n') f.write('rec\n') for (start_k, end_k) in zip(starting_kpoints, ending_kpoints): if (twoD_system and ((abs(start_k[2]) + abs(end_k[2])) > 1e-05)): continue f.write(' {} {} {} {}\n'.format(*start_k)) f.write(' {} {} {} {}\n'.format(*end_k)) f.write('\n')<|docstring|>Write a kpath along the high symmetry kpoints in the line mode into KPOINTS under dir cal_loc for the band structure calculation. input arguments: - intersections (int): For every segment, there are intersections equally spaced kpionts, including the starting and ending high symmetry k-points - twoD_system (bool): If True, the kpath only includes the kpoints whose z component are zero. Default: False see https://cms.mpi.univie.ac.at/vasp/vasp/Strings_k_points_bandstructure_calculations.html Note that the reciprocal coordinates are adopted. Note that if twoD_system is True, the vacuum layer is assumed to be along the Z direction and the lattice vector c must be normal to the surface.<|endoftext|>
8943ecd683badd42a33710305d58835ce4260953905a36970bb26751434e03f3
def make_KPOINTS_denser(self, denser_kpoints=[]): "\n Modify KPOINTS such that the kpiont along the axes in certain directions is denser_kpoints denser. Rename old KPOINTS as KPOINTS.sparse\n input arguments:\n denser_kpoints (list of float): This tag is only active at kpoints_type='MPRelaxSet', 'MPStaticSet',\n namely only for automatically generated KPOINTS\n Note that denser_kpoints must consist of three float numbers:\n - The first number is associated to the 'x' axis\n - The second number is associated to the 'y' axis\n - The third number is associated to the 'z' axis\n " if (self.current_firework['kpoints_type'] not in ['MPRelaxSet', 'MPStaticSet']): return False assert (len(denser_kpoints) == 3), 'Error: tag denser_kpoints must be three float/integer numbers separated by commas.' with open(os.path.join(self.cal_loc, 'KPOINTS'), 'r') as f: kpoints = [line.strip() for line in f if line.strip()] assert (('gam' in kpoints[2].lower()) or ('mon' in kpoints[2].lower())), 'Error: fail to make KPOINTS denser at {}'.format(self.cal_loc) nk_list = [int(k) for k in kpoints[3].split() if k] for i in range(3): nk_list[i] = int(round((nk_list[i] * denser_kpoints[i]))) if (nk_list[i] == 0): nk_list[i] = 1 kpoints[3] = '{} {} {}'.format(*nk_list) decorated_os_rename(loc=self.cal_loc, old_filename='KPOINTS', new_filename='KPOINTS.sparse') with open(os.path.join(self.cal_loc, 'KPOINTS'), 'w') as f: for line in kpoints: f.write((line + '\n'))
Modify KPOINTS such that the kpiont along the axes in certain directions is denser_kpoints denser. Rename old KPOINTS as KPOINTS.sparse input arguments: denser_kpoints (list of float): This tag is only active at kpoints_type='MPRelaxSet', 'MPStaticSet', namely only for automatically generated KPOINTS Note that denser_kpoints must consist of three float numbers: - The first number is associated to the 'x' axis - The second number is associated to the 'y' axis - The third number is associated to the 'z' axis
HTC_lib/Write_VASP_KPOINTS.py
make_KPOINTS_denser
tair-ai/deephtc
0
python
def make_KPOINTS_denser(self, denser_kpoints=[]): "\n Modify KPOINTS such that the kpiont along the axes in certain directions is denser_kpoints denser. Rename old KPOINTS as KPOINTS.sparse\n input arguments:\n denser_kpoints (list of float): This tag is only active at kpoints_type='MPRelaxSet', 'MPStaticSet',\n namely only for automatically generated KPOINTS\n Note that denser_kpoints must consist of three float numbers:\n - The first number is associated to the 'x' axis\n - The second number is associated to the 'y' axis\n - The third number is associated to the 'z' axis\n " if (self.current_firework['kpoints_type'] not in ['MPRelaxSet', 'MPStaticSet']): return False assert (len(denser_kpoints) == 3), 'Error: tag denser_kpoints must be three float/integer numbers separated by commas.' with open(os.path.join(self.cal_loc, 'KPOINTS'), 'r') as f: kpoints = [line.strip() for line in f if line.strip()] assert (('gam' in kpoints[2].lower()) or ('mon' in kpoints[2].lower())), 'Error: fail to make KPOINTS denser at {}'.format(self.cal_loc) nk_list = [int(k) for k in kpoints[3].split() if k] for i in range(3): nk_list[i] = int(round((nk_list[i] * denser_kpoints[i]))) if (nk_list[i] == 0): nk_list[i] = 1 kpoints[3] = '{} {} {}'.format(*nk_list) decorated_os_rename(loc=self.cal_loc, old_filename='KPOINTS', new_filename='KPOINTS.sparse') with open(os.path.join(self.cal_loc, 'KPOINTS'), 'w') as f: for line in kpoints: f.write((line + '\n'))
def make_KPOINTS_denser(self, denser_kpoints=[]): "\n Modify KPOINTS such that the kpiont along the axes in certain directions is denser_kpoints denser. Rename old KPOINTS as KPOINTS.sparse\n input arguments:\n denser_kpoints (list of float): This tag is only active at kpoints_type='MPRelaxSet', 'MPStaticSet',\n namely only for automatically generated KPOINTS\n Note that denser_kpoints must consist of three float numbers:\n - The first number is associated to the 'x' axis\n - The second number is associated to the 'y' axis\n - The third number is associated to the 'z' axis\n " if (self.current_firework['kpoints_type'] not in ['MPRelaxSet', 'MPStaticSet']): return False assert (len(denser_kpoints) == 3), 'Error: tag denser_kpoints must be three float/integer numbers separated by commas.' with open(os.path.join(self.cal_loc, 'KPOINTS'), 'r') as f: kpoints = [line.strip() for line in f if line.strip()] assert (('gam' in kpoints[2].lower()) or ('mon' in kpoints[2].lower())), 'Error: fail to make KPOINTS denser at {}'.format(self.cal_loc) nk_list = [int(k) for k in kpoints[3].split() if k] for i in range(3): nk_list[i] = int(round((nk_list[i] * denser_kpoints[i]))) if (nk_list[i] == 0): nk_list[i] = 1 kpoints[3] = '{} {} {}'.format(*nk_list) decorated_os_rename(loc=self.cal_loc, old_filename='KPOINTS', new_filename='KPOINTS.sparse') with open(os.path.join(self.cal_loc, 'KPOINTS'), 'w') as f: for line in kpoints: f.write((line + '\n'))<|docstring|>Modify KPOINTS such that the kpiont along the axes in certain directions is denser_kpoints denser. Rename old KPOINTS as KPOINTS.sparse input arguments: denser_kpoints (list of float): This tag is only active at kpoints_type='MPRelaxSet', 'MPStaticSet', namely only for automatically generated KPOINTS Note that denser_kpoints must consist of three float numbers: - The first number is associated to the 'x' axis - The second number is associated to the 'y' axis - The third number is associated to the 'z' axis<|endoftext|>
875fcdb15ac8c3180c303a929d8a09b2d6a9b38dad0616c1a8b8634e4aa9a168
def modify_vasp_kpoints_for_2D(self, rename_old_kpoints=True, tolerance=1e-05): "\n modify KPOINTS properly for 2D structures.\n support kpoints_type for KPOINTS modifications: 'MPRelaxSet', 'MPStaticSet', 'MPNonSCFSet_line', 'MPNonSCFSet_uniform', 'automatic'\n - 'MPRelaxSet': pymatgen.io.vasp.sets.MPRelaxSet generates KPOINTS.\n - 'MPStaticSet': pymatgen.io.vasp.sets.MPStaticSet generates KPOINTS.\n - 'MPNonSCFSet_uniform': pymatgen.io.vasp.sets.MPNonSCFSet generates KPOINTS in the uniform mode for DOS\n - 'MPNonSCFSet_line': pymatgen.io.vasp.sets.MPNonSCFSet generates KPOINTS in the line mode for band str\n input arguments:\n - rename_old_kpoints (bool or str):\n - if it is True, rename the old KPOINTS like KPOINTS_0, KPOINTS_1, KPINTS_2, ...\n - if it is False, the old KPOINTS will be overwritten\n - if it a string, the string is the new name of the old KPOINTS\n Default: True\n - tolerance (float): if abs(k_z) < tolerance for uniform and line modes, \n we think this kpoint is valid; otherwise remove it.\n Default: 1.0e-5\n if old KPOINTS is saved, return the new name of the old KPOINTS.\n " kpoints_type = self.current_firework['kpoints_type'] if (kpoints_type.strip() not in ['MPRelaxSet', 'MPStaticSet', 'MPNonSCFSet_line', 'MPNonSCFSet_uniform']): print('Error: for func modify_vasp_kpoints_for_2D, the input argument kpoints_tag must be on the those below:') print("'MPRelaxSet', 'MPStaticSet', 'MPNonSCFSet_line', 'MPNonSCFSet_uniform'") raise Exception('See above for the error information') with open(os.path.join(self.cal_loc, 'KPOINTS'), 'r') as f: kpoints = [line.strip() for line in f if line.strip()] if (kpoints_type in ['MPRelaxSet', 'MPStaticSet']): assert (('gam' in kpoints[2].lower()) or ('mon' in kpoints[2].lower())), 'Error: fail to modify KPOINTS generated by pymatgen at {}'.format(cal_loc) nk_list = [int(k) for k in kpoints[3].split()] if (nk_list[2] != 1): nk_list[2] = 1 kpoints[3] = '{} {} {}'.format(*nk_list) else: assert ('reci' in kpoints[2].lower()), 'Error: fail to modify KPOINTS generated by pymatgen in the uniform/line mode at {}'.format(cal_loc) new_kpoints = [] for kpoint in kpoints[3:]: k = kpoint.split() k = [float(k[0]), float(k[1]), float(k[2]), int(k[3])] if (abs(k[2]) < tolerance): new_kpoints.append(kpoint) kpoints = (kpoints[:3] + new_kpoints) kpoints[1] = str(len(new_kpoints)) if isinstance(rename_old_kpoints, bool): if rename_old_kpoints: new_name = find_next_name(cal_loc=self.cal_loc, orig_name='KPOINTS')['next_name'] shutil.move(os.path.join(self.cal_loc, 'KPOINTS'), os.path.join(self.cal_loc, new_name)) elif isinstance(rename_old_kpoints, str): shutil.move(os.path.join(self.cal_loc, 'KPOINTS'), os.path.join(self.cal_loc, rename_old_kpoints)) else: raise Exception('rename_old_kpoints must be either bool or str for func modify_vasp_kpoints_for_2D') with open(os.path.join(self.cal_loc, 'KPOINTS'), 'w') as f: for line in kpoints: f.write((line + '\n')) if (rename_old_kpoints == True): return new_name elif isinstance(rename_old_kpoints, str): return rename_old_kpoints
modify KPOINTS properly for 2D structures. support kpoints_type for KPOINTS modifications: 'MPRelaxSet', 'MPStaticSet', 'MPNonSCFSet_line', 'MPNonSCFSet_uniform', 'automatic' - 'MPRelaxSet': pymatgen.io.vasp.sets.MPRelaxSet generates KPOINTS. - 'MPStaticSet': pymatgen.io.vasp.sets.MPStaticSet generates KPOINTS. - 'MPNonSCFSet_uniform': pymatgen.io.vasp.sets.MPNonSCFSet generates KPOINTS in the uniform mode for DOS - 'MPNonSCFSet_line': pymatgen.io.vasp.sets.MPNonSCFSet generates KPOINTS in the line mode for band str input arguments: - rename_old_kpoints (bool or str): - if it is True, rename the old KPOINTS like KPOINTS_0, KPOINTS_1, KPINTS_2, ... - if it is False, the old KPOINTS will be overwritten - if it a string, the string is the new name of the old KPOINTS Default: True - tolerance (float): if abs(k_z) < tolerance for uniform and line modes, we think this kpoint is valid; otherwise remove it. Default: 1.0e-5 if old KPOINTS is saved, return the new name of the old KPOINTS.
HTC_lib/Write_VASP_KPOINTS.py
modify_vasp_kpoints_for_2D
tair-ai/deephtc
0
python
def modify_vasp_kpoints_for_2D(self, rename_old_kpoints=True, tolerance=1e-05): "\n modify KPOINTS properly for 2D structures.\n support kpoints_type for KPOINTS modifications: 'MPRelaxSet', 'MPStaticSet', 'MPNonSCFSet_line', 'MPNonSCFSet_uniform', 'automatic'\n - 'MPRelaxSet': pymatgen.io.vasp.sets.MPRelaxSet generates KPOINTS.\n - 'MPStaticSet': pymatgen.io.vasp.sets.MPStaticSet generates KPOINTS.\n - 'MPNonSCFSet_uniform': pymatgen.io.vasp.sets.MPNonSCFSet generates KPOINTS in the uniform mode for DOS\n - 'MPNonSCFSet_line': pymatgen.io.vasp.sets.MPNonSCFSet generates KPOINTS in the line mode for band str\n input arguments:\n - rename_old_kpoints (bool or str):\n - if it is True, rename the old KPOINTS like KPOINTS_0, KPOINTS_1, KPINTS_2, ...\n - if it is False, the old KPOINTS will be overwritten\n - if it a string, the string is the new name of the old KPOINTS\n Default: True\n - tolerance (float): if abs(k_z) < tolerance for uniform and line modes, \n we think this kpoint is valid; otherwise remove it.\n Default: 1.0e-5\n if old KPOINTS is saved, return the new name of the old KPOINTS.\n " kpoints_type = self.current_firework['kpoints_type'] if (kpoints_type.strip() not in ['MPRelaxSet', 'MPStaticSet', 'MPNonSCFSet_line', 'MPNonSCFSet_uniform']): print('Error: for func modify_vasp_kpoints_for_2D, the input argument kpoints_tag must be on the those below:') print("'MPRelaxSet', 'MPStaticSet', 'MPNonSCFSet_line', 'MPNonSCFSet_uniform'") raise Exception('See above for the error information') with open(os.path.join(self.cal_loc, 'KPOINTS'), 'r') as f: kpoints = [line.strip() for line in f if line.strip()] if (kpoints_type in ['MPRelaxSet', 'MPStaticSet']): assert (('gam' in kpoints[2].lower()) or ('mon' in kpoints[2].lower())), 'Error: fail to modify KPOINTS generated by pymatgen at {}'.format(cal_loc) nk_list = [int(k) for k in kpoints[3].split()] if (nk_list[2] != 1): nk_list[2] = 1 kpoints[3] = '{} {} {}'.format(*nk_list) else: assert ('reci' in kpoints[2].lower()), 'Error: fail to modify KPOINTS generated by pymatgen in the uniform/line mode at {}'.format(cal_loc) new_kpoints = [] for kpoint in kpoints[3:]: k = kpoint.split() k = [float(k[0]), float(k[1]), float(k[2]), int(k[3])] if (abs(k[2]) < tolerance): new_kpoints.append(kpoint) kpoints = (kpoints[:3] + new_kpoints) kpoints[1] = str(len(new_kpoints)) if isinstance(rename_old_kpoints, bool): if rename_old_kpoints: new_name = find_next_name(cal_loc=self.cal_loc, orig_name='KPOINTS')['next_name'] shutil.move(os.path.join(self.cal_loc, 'KPOINTS'), os.path.join(self.cal_loc, new_name)) elif isinstance(rename_old_kpoints, str): shutil.move(os.path.join(self.cal_loc, 'KPOINTS'), os.path.join(self.cal_loc, rename_old_kpoints)) else: raise Exception('rename_old_kpoints must be either bool or str for func modify_vasp_kpoints_for_2D') with open(os.path.join(self.cal_loc, 'KPOINTS'), 'w') as f: for line in kpoints: f.write((line + '\n')) if (rename_old_kpoints == True): return new_name elif isinstance(rename_old_kpoints, str): return rename_old_kpoints
def modify_vasp_kpoints_for_2D(self, rename_old_kpoints=True, tolerance=1e-05): "\n modify KPOINTS properly for 2D structures.\n support kpoints_type for KPOINTS modifications: 'MPRelaxSet', 'MPStaticSet', 'MPNonSCFSet_line', 'MPNonSCFSet_uniform', 'automatic'\n - 'MPRelaxSet': pymatgen.io.vasp.sets.MPRelaxSet generates KPOINTS.\n - 'MPStaticSet': pymatgen.io.vasp.sets.MPStaticSet generates KPOINTS.\n - 'MPNonSCFSet_uniform': pymatgen.io.vasp.sets.MPNonSCFSet generates KPOINTS in the uniform mode for DOS\n - 'MPNonSCFSet_line': pymatgen.io.vasp.sets.MPNonSCFSet generates KPOINTS in the line mode for band str\n input arguments:\n - rename_old_kpoints (bool or str):\n - if it is True, rename the old KPOINTS like KPOINTS_0, KPOINTS_1, KPINTS_2, ...\n - if it is False, the old KPOINTS will be overwritten\n - if it a string, the string is the new name of the old KPOINTS\n Default: True\n - tolerance (float): if abs(k_z) < tolerance for uniform and line modes, \n we think this kpoint is valid; otherwise remove it.\n Default: 1.0e-5\n if old KPOINTS is saved, return the new name of the old KPOINTS.\n " kpoints_type = self.current_firework['kpoints_type'] if (kpoints_type.strip() not in ['MPRelaxSet', 'MPStaticSet', 'MPNonSCFSet_line', 'MPNonSCFSet_uniform']): print('Error: for func modify_vasp_kpoints_for_2D, the input argument kpoints_tag must be on the those below:') print("'MPRelaxSet', 'MPStaticSet', 'MPNonSCFSet_line', 'MPNonSCFSet_uniform'") raise Exception('See above for the error information') with open(os.path.join(self.cal_loc, 'KPOINTS'), 'r') as f: kpoints = [line.strip() for line in f if line.strip()] if (kpoints_type in ['MPRelaxSet', 'MPStaticSet']): assert (('gam' in kpoints[2].lower()) or ('mon' in kpoints[2].lower())), 'Error: fail to modify KPOINTS generated by pymatgen at {}'.format(cal_loc) nk_list = [int(k) for k in kpoints[3].split()] if (nk_list[2] != 1): nk_list[2] = 1 kpoints[3] = '{} {} {}'.format(*nk_list) else: assert ('reci' in kpoints[2].lower()), 'Error: fail to modify KPOINTS generated by pymatgen in the uniform/line mode at {}'.format(cal_loc) new_kpoints = [] for kpoint in kpoints[3:]: k = kpoint.split() k = [float(k[0]), float(k[1]), float(k[2]), int(k[3])] if (abs(k[2]) < tolerance): new_kpoints.append(kpoint) kpoints = (kpoints[:3] + new_kpoints) kpoints[1] = str(len(new_kpoints)) if isinstance(rename_old_kpoints, bool): if rename_old_kpoints: new_name = find_next_name(cal_loc=self.cal_loc, orig_name='KPOINTS')['next_name'] shutil.move(os.path.join(self.cal_loc, 'KPOINTS'), os.path.join(self.cal_loc, new_name)) elif isinstance(rename_old_kpoints, str): shutil.move(os.path.join(self.cal_loc, 'KPOINTS'), os.path.join(self.cal_loc, rename_old_kpoints)) else: raise Exception('rename_old_kpoints must be either bool or str for func modify_vasp_kpoints_for_2D') with open(os.path.join(self.cal_loc, 'KPOINTS'), 'w') as f: for line in kpoints: f.write((line + '\n')) if (rename_old_kpoints == True): return new_name elif isinstance(rename_old_kpoints, str): return rename_old_kpoints<|docstring|>modify KPOINTS properly for 2D structures. support kpoints_type for KPOINTS modifications: 'MPRelaxSet', 'MPStaticSet', 'MPNonSCFSet_line', 'MPNonSCFSet_uniform', 'automatic' - 'MPRelaxSet': pymatgen.io.vasp.sets.MPRelaxSet generates KPOINTS. - 'MPStaticSet': pymatgen.io.vasp.sets.MPStaticSet generates KPOINTS. - 'MPNonSCFSet_uniform': pymatgen.io.vasp.sets.MPNonSCFSet generates KPOINTS in the uniform mode for DOS - 'MPNonSCFSet_line': pymatgen.io.vasp.sets.MPNonSCFSet generates KPOINTS in the line mode for band str input arguments: - rename_old_kpoints (bool or str): - if it is True, rename the old KPOINTS like KPOINTS_0, KPOINTS_1, KPINTS_2, ... - if it is False, the old KPOINTS will be overwritten - if it a string, the string is the new name of the old KPOINTS Default: True - tolerance (float): if abs(k_z) < tolerance for uniform and line modes, we think this kpoint is valid; otherwise remove it. Default: 1.0e-5 if old KPOINTS is saved, return the new name of the old KPOINTS.<|endoftext|>
7f02785a7244e77e0cbc6272293a946d64f5b55c0588293392793f6c2ed0f327
def create(self): '\n Creates a Order to the database\n ' logger.info('Creating %s', self.name) self.id = None db.session.add(self) db.session.commit()
Creates a Order to the database
service/models.py
create
NYU-Devops-orders/orders
0
python
def create(self): '\n \n ' logger.info('Creating %s', self.name) self.id = None db.session.add(self) db.session.commit()
def create(self): '\n \n ' logger.info('Creating %s', self.name) self.id = None db.session.add(self) db.session.commit()<|docstring|>Creates a Order to the database<|endoftext|>
2b4d77c8b2a313b13c182a06729ce0632263f32a40e41e73469d69eca9c9937e
def save(self): ' Updates a Order to the database ' logger.info('Saving %s', self.name) db.session.commit()
Updates a Order to the database
service/models.py
save
NYU-Devops-orders/orders
0
python
def save(self): ' ' logger.info('Saving %s', self.name) db.session.commit()
def save(self): ' ' logger.info('Saving %s', self.name) db.session.commit()<|docstring|>Updates a Order to the database<|endoftext|>
1de4d91c0c487be8f60fc625d11900282ddfb814bab94ac713778338a351e77a
def delete(self): ' Removes a Order from the data store ' logger.info('Deleting %s', self.name) db.session.delete(self) db.session.commit()
Removes a Order from the data store
service/models.py
delete
NYU-Devops-orders/orders
0
python
def delete(self): ' ' logger.info('Deleting %s', self.name) db.session.delete(self) db.session.commit()
def delete(self): ' ' logger.info('Deleting %s', self.name) db.session.delete(self) db.session.commit()<|docstring|>Removes a Order from the data store<|endoftext|>
a923b08d18931a993b928501cf9208f37882523fc9f60b83964613ce22d239cc
@classmethod def init_db(cls, app): ' Initializes the database session ' logger.info('Initializing database') cls.app = app db.init_app(app) app.app_context().push() db.create_all()
Initializes the database session
service/models.py
init_db
NYU-Devops-orders/orders
0
python
@classmethod def init_db(cls, app): ' ' logger.info('Initializing database') cls.app = app db.init_app(app) app.app_context().push() db.create_all()
@classmethod def init_db(cls, app): ' ' logger.info('Initializing database') cls.app = app db.init_app(app) app.app_context().push() db.create_all()<|docstring|>Initializes the database session<|endoftext|>
17f4eb3ba5bfcf75b8826ae1d7014ffc5bb3633464ac1270d25b5612d2e1a927
@classmethod def all(cls): ' Returns all of the records in the database ' logger.info('Processing all records') return cls.query.all()
Returns all of the records in the database
service/models.py
all
NYU-Devops-orders/orders
0
python
@classmethod def all(cls): ' ' logger.info('Processing all records') return cls.query.all()
@classmethod def all(cls): ' ' logger.info('Processing all records') return cls.query.all()<|docstring|>Returns all of the records in the database<|endoftext|>
cac60ce2ffdf3bc0d99ed7d91298b4ae3ce0899b83987ef5c4b156105e7831be
@classmethod def find(cls, by_id): " Finds a record by it's ID " logger.info('Processing lookup for id %s ...', by_id) return cls.query.get(by_id)
Finds a record by it's ID
service/models.py
find
NYU-Devops-orders/orders
0
python
@classmethod def find(cls, by_id): " " logger.info('Processing lookup for id %s ...', by_id) return cls.query.get(by_id)
@classmethod def find(cls, by_id): " " logger.info('Processing lookup for id %s ...', by_id) return cls.query.get(by_id)<|docstring|>Finds a record by it's ID<|endoftext|>
14d26d193842c42367414888f83f0dd33ee5d66f7adf8a0dcbb5bc3a8826943d
@classmethod def find_or_404(cls, by_id): " Find a record by it's id " logger.info('Processing lookup or 404 for id %s ...', by_id) return cls.query.get_or_404(by_id)
Find a record by it's id
service/models.py
find_or_404
NYU-Devops-orders/orders
0
python
@classmethod def find_or_404(cls, by_id): " " logger.info('Processing lookup or 404 for id %s ...', by_id) return cls.query.get_or_404(by_id)
@classmethod def find_or_404(cls, by_id): " " logger.info('Processing lookup or 404 for id %s ...', by_id) return cls.query.get_or_404(by_id)<|docstring|>Find a record by it's id<|endoftext|>
ab40ad8240e8b6e833d1037b29ba94691874bdcdb27171fa857d1d34cc57ef71
@classmethod def remove_all(cls): ' Removes all documents from the database (use for testing) ' cls.query.delete() db.session.commit()
Removes all documents from the database (use for testing)
service/models.py
remove_all
NYU-Devops-orders/orders
0
python
@classmethod def remove_all(cls): ' ' cls.query.delete() db.session.commit()
@classmethod def remove_all(cls): ' ' cls.query.delete() db.session.commit()<|docstring|>Removes all documents from the database (use for testing)<|endoftext|>
05a0fc37211eb5e49daa8ab6689cda1f7af72119d7c6532d243e3211349ffe92
def serialize(self): ' Serializes a Product into a dictionary ' return {'product_id': self.product_id, 'order_id': self.order_id, 'name': self.name, 'quantity': self.quantity, 'price': self.price}
Serializes a Product into a dictionary
service/models.py
serialize
NYU-Devops-orders/orders
0
python
def serialize(self): ' ' return {'product_id': self.product_id, 'order_id': self.order_id, 'name': self.name, 'quantity': self.quantity, 'price': self.price}
def serialize(self): ' ' return {'product_id': self.product_id, 'order_id': self.order_id, 'name': self.name, 'quantity': self.quantity, 'price': self.price}<|docstring|>Serializes a Product into a dictionary<|endoftext|>
644ab791a403cc190be654c81031be15189e8330822cf0da959151638d3627ca
def deserialize(self, data): '\n Deserializes a Product from a dictionary\n Args:\n data (dict): A dictionary containing the resource data\n ' try: self.order_id = data['order_id'] self.name = data['name'] self.quantity = data['quantity'] self.price = data['price'] except KeyError as error: raise DataValidationError(('Invalid Product: missing ' + error.args[0])) except TypeError as error: raise DataValidationError('Invalid Product: body of request containedbad or no data') return self
Deserializes a Product from a dictionary Args: data (dict): A dictionary containing the resource data
service/models.py
deserialize
NYU-Devops-orders/orders
0
python
def deserialize(self, data): '\n Deserializes a Product from a dictionary\n Args:\n data (dict): A dictionary containing the resource data\n ' try: self.order_id = data['order_id'] self.name = data['name'] self.quantity = data['quantity'] self.price = data['price'] except KeyError as error: raise DataValidationError(('Invalid Product: missing ' + error.args[0])) except TypeError as error: raise DataValidationError('Invalid Product: body of request containedbad or no data') return self
def deserialize(self, data): '\n Deserializes a Product from a dictionary\n Args:\n data (dict): A dictionary containing the resource data\n ' try: self.order_id = data['order_id'] self.name = data['name'] self.quantity = data['quantity'] self.price = data['price'] except KeyError as error: raise DataValidationError(('Invalid Product: missing ' + error.args[0])) except TypeError as error: raise DataValidationError('Invalid Product: body of request containedbad or no data') return self<|docstring|>Deserializes a Product from a dictionary Args: data (dict): A dictionary containing the resource data<|endoftext|>
778bf4cadff51670afd435449fbb41f2e0de6b9174ce250fcb19d9f7ef29d79e
def serialize(self): ' Serializes a Account into a dictionary ' order = {'id': self.id, 'name': self.name, 'status': self.status, 'products': [product.serialize() for product in self.products]} return order
Serializes a Account into a dictionary
service/models.py
serialize
NYU-Devops-orders/orders
0
python
def serialize(self): ' ' order = {'id': self.id, 'name': self.name, 'status': self.status, 'products': [product.serialize() for product in self.products]} return order
def serialize(self): ' ' order = {'id': self.id, 'name': self.name, 'status': self.status, 'products': [product.serialize() for product in self.products]} return order<|docstring|>Serializes a Account into a dictionary<|endoftext|>
65e20ca154b148cd313e348d902650766a0919da6ad31a272ee714cfce7cae08
def deserialize(self, data): '\n Deserializes a Order from a dictionary\n Args:\n data (dict): A dictionary containing the resource data\n ' try: self.name = data['name'] self.status = data['status'] product_list = data.get('products') for product in data['products']: self.products.append(Product(product_id=product['product_id'], name=product['name'], quantity=product['quantity'], price=float(product['price']))) except KeyError as error: raise DataValidationError(('Invalid Order: missing ' + error.args[0])) except TypeError as error: raise DataValidationError('Invalid Order: body of request containedbad or no data') return self
Deserializes a Order from a dictionary Args: data (dict): A dictionary containing the resource data
service/models.py
deserialize
NYU-Devops-orders/orders
0
python
def deserialize(self, data): '\n Deserializes a Order from a dictionary\n Args:\n data (dict): A dictionary containing the resource data\n ' try: self.name = data['name'] self.status = data['status'] product_list = data.get('products') for product in data['products']: self.products.append(Product(product_id=product['product_id'], name=product['name'], quantity=product['quantity'], price=float(product['price']))) except KeyError as error: raise DataValidationError(('Invalid Order: missing ' + error.args[0])) except TypeError as error: raise DataValidationError('Invalid Order: body of request containedbad or no data') return self
def deserialize(self, data): '\n Deserializes a Order from a dictionary\n Args:\n data (dict): A dictionary containing the resource data\n ' try: self.name = data['name'] self.status = data['status'] product_list = data.get('products') for product in data['products']: self.products.append(Product(product_id=product['product_id'], name=product['name'], quantity=product['quantity'], price=float(product['price']))) except KeyError as error: raise DataValidationError(('Invalid Order: missing ' + error.args[0])) except TypeError as error: raise DataValidationError('Invalid Order: body of request containedbad or no data') return self<|docstring|>Deserializes a Order from a dictionary Args: data (dict): A dictionary containing the resource data<|endoftext|>
8dc40e52a1dcf0952ecb457710dc3e99827ba3e803d6c9433a6f4be17bf41f81
@classmethod def find_by_name(cls, name): ' Returns all Orders with the given customer_id\n Args:\n name(string): the name on the Orders you want to match\n ' logger.info('Processing name query for %s ...', name) return cls.query.filter((cls.name == name))
Returns all Orders with the given customer_id Args: name(string): the name on the Orders you want to match
service/models.py
find_by_name
NYU-Devops-orders/orders
0
python
@classmethod def find_by_name(cls, name): ' Returns all Orders with the given customer_id\n Args:\n name(string): the name on the Orders you want to match\n ' logger.info('Processing name query for %s ...', name) return cls.query.filter((cls.name == name))
@classmethod def find_by_name(cls, name): ' Returns all Orders with the given customer_id\n Args:\n name(string): the name on the Orders you want to match\n ' logger.info('Processing name query for %s ...', name) return cls.query.filter((cls.name == name))<|docstring|>Returns all Orders with the given customer_id Args: name(string): the name on the Orders you want to match<|endoftext|>
b4360c2bcc68ee3a69708df8fd4383c2fc5cd2132667064b3f594681b128dc56
def __init__(self, *args, **kwds): '\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n speed\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n ' if (args or kwds): super(SetSpeedRequest, self).__init__(*args, **kwds) if (self.speed is None): self.speed = 0.0 else: self.speed = 0.0
Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: speed :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields.
devel/lib/python2.7/dist-packages/arbotix_msgs/srv/_SetSpeed.py
__init__
4ndreas/ROSCoffeButler
2
python
def __init__(self, *args, **kwds): '\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n speed\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n ' if (args or kwds): super(SetSpeedRequest, self).__init__(*args, **kwds) if (self.speed is None): self.speed = 0.0 else: self.speed = 0.0
def __init__(self, *args, **kwds): '\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n speed\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n ' if (args or kwds): super(SetSpeedRequest, self).__init__(*args, **kwds) if (self.speed is None): self.speed = 0.0 else: self.speed = 0.0<|docstring|>Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: speed :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields.<|endoftext|>
1fb6b2b708db1f101aab56633ecd49b6f4087e60f5bbe6926e83ee92f9106530
def _get_types(self): '\n internal API method\n ' return self._slot_types
internal API method
devel/lib/python2.7/dist-packages/arbotix_msgs/srv/_SetSpeed.py
_get_types
4ndreas/ROSCoffeButler
2
python
def _get_types(self): '\n \n ' return self._slot_types
def _get_types(self): '\n \n ' return self._slot_types<|docstring|>internal API method<|endoftext|>
d0ef664657cfb3e51eef1d55399799946296044b7e93b465f6c21009e6649484
def serialize(self, buff): '\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n ' try: buff.write(_struct_d.pack(self.speed)) except struct.error as se: self._check_types(struct.error(("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))) except TypeError as te: self._check_types(ValueError(("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))))
serialize message into buffer :param buff: buffer, ``StringIO``
devel/lib/python2.7/dist-packages/arbotix_msgs/srv/_SetSpeed.py
serialize
4ndreas/ROSCoffeButler
2
python
def serialize(self, buff): '\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n ' try: buff.write(_struct_d.pack(self.speed)) except struct.error as se: self._check_types(struct.error(("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))) except TypeError as te: self._check_types(ValueError(("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))))
def serialize(self, buff): '\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n ' try: buff.write(_struct_d.pack(self.speed)) except struct.error as se: self._check_types(struct.error(("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))) except TypeError as te: self._check_types(ValueError(("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))))<|docstring|>serialize message into buffer :param buff: buffer, ``StringIO``<|endoftext|>
2fd18ed67ba6b6ec351979f03a6522eee9f2b8aa01faaa3f83085918a1a57061
def deserialize(self, str): '\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n ' try: end = 0 start = end end += 8 (self.speed,) = _struct_d.unpack(str[start:end]) return self except struct.error as e: raise genpy.DeserializationError(e)
unpack serialized message in str into this message instance :param str: byte array of serialized message, ``str``
devel/lib/python2.7/dist-packages/arbotix_msgs/srv/_SetSpeed.py
deserialize
4ndreas/ROSCoffeButler
2
python
def deserialize(self, str): '\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n ' try: end = 0 start = end end += 8 (self.speed,) = _struct_d.unpack(str[start:end]) return self except struct.error as e: raise genpy.DeserializationError(e)
def deserialize(self, str): '\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n ' try: end = 0 start = end end += 8 (self.speed,) = _struct_d.unpack(str[start:end]) return self except struct.error as e: raise genpy.DeserializationError(e)<|docstring|>unpack serialized message in str into this message instance :param str: byte array of serialized message, ``str``<|endoftext|>
95fd821dd2cd6252dc8bed451390743e2fd9af27da70c376ba894a2f88071660
def serialize_numpy(self, buff, numpy): '\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n ' try: buff.write(_struct_d.pack(self.speed)) except struct.error as se: self._check_types(struct.error(("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))) except TypeError as te: self._check_types(ValueError(("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))))
serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module
devel/lib/python2.7/dist-packages/arbotix_msgs/srv/_SetSpeed.py
serialize_numpy
4ndreas/ROSCoffeButler
2
python
def serialize_numpy(self, buff, numpy): '\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n ' try: buff.write(_struct_d.pack(self.speed)) except struct.error as se: self._check_types(struct.error(("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))) except TypeError as te: self._check_types(ValueError(("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))))
def serialize_numpy(self, buff, numpy): '\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n ' try: buff.write(_struct_d.pack(self.speed)) except struct.error as se: self._check_types(struct.error(("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))) except TypeError as te: self._check_types(ValueError(("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))))<|docstring|>serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module<|endoftext|>
68020b6113cb73191b7b49e338da541f48d99b2a889a1781c34bb1f0d0270216
def deserialize_numpy(self, str, numpy): '\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n ' try: end = 0 start = end end += 8 (self.speed,) = _struct_d.unpack(str[start:end]) return self except struct.error as e: raise genpy.DeserializationError(e)
unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module
devel/lib/python2.7/dist-packages/arbotix_msgs/srv/_SetSpeed.py
deserialize_numpy
4ndreas/ROSCoffeButler
2
python
def deserialize_numpy(self, str, numpy): '\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n ' try: end = 0 start = end end += 8 (self.speed,) = _struct_d.unpack(str[start:end]) return self except struct.error as e: raise genpy.DeserializationError(e)
def deserialize_numpy(self, str, numpy): '\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n ' try: end = 0 start = end end += 8 (self.speed,) = _struct_d.unpack(str[start:end]) return self except struct.error as e: raise genpy.DeserializationError(e)<|docstring|>unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module<|endoftext|>
65644acc6e90e988a7bc88d21ca72f01d8292d898fd43f54320d22b567f3a666
def __init__(self, *args, **kwds): '\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n \n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n ' if (args or kwds): super(SetSpeedResponse, self).__init__(*args, **kwds)
Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields.
devel/lib/python2.7/dist-packages/arbotix_msgs/srv/_SetSpeed.py
__init__
4ndreas/ROSCoffeButler
2
python
def __init__(self, *args, **kwds): '\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n \n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n ' if (args or kwds): super(SetSpeedResponse, self).__init__(*args, **kwds)
def __init__(self, *args, **kwds): '\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n \n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n ' if (args or kwds): super(SetSpeedResponse, self).__init__(*args, **kwds)<|docstring|>Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields.<|endoftext|>
1fb6b2b708db1f101aab56633ecd49b6f4087e60f5bbe6926e83ee92f9106530
def _get_types(self): '\n internal API method\n ' return self._slot_types
internal API method
devel/lib/python2.7/dist-packages/arbotix_msgs/srv/_SetSpeed.py
_get_types
4ndreas/ROSCoffeButler
2
python
def _get_types(self): '\n \n ' return self._slot_types
def _get_types(self): '\n \n ' return self._slot_types<|docstring|>internal API method<|endoftext|>
967528c1ea0ec77a2f07a51fffd66e7a566a791f10269b97e99caa47530f9091
def serialize(self, buff): '\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n ' try: pass except struct.error as se: self._check_types(struct.error(("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))) except TypeError as te: self._check_types(ValueError(("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))))
serialize message into buffer :param buff: buffer, ``StringIO``
devel/lib/python2.7/dist-packages/arbotix_msgs/srv/_SetSpeed.py
serialize
4ndreas/ROSCoffeButler
2
python
def serialize(self, buff): '\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n ' try: pass except struct.error as se: self._check_types(struct.error(("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))) except TypeError as te: self._check_types(ValueError(("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))))
def serialize(self, buff): '\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n ' try: pass except struct.error as se: self._check_types(struct.error(("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))) except TypeError as te: self._check_types(ValueError(("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))))<|docstring|>serialize message into buffer :param buff: buffer, ``StringIO``<|endoftext|>
f5909ac2f0a581255411b9f2ca14a49585a6356491faad1787348097a5a30e4c
def deserialize(self, str): '\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n ' try: end = 0 return self except struct.error as e: raise genpy.DeserializationError(e)
unpack serialized message in str into this message instance :param str: byte array of serialized message, ``str``
devel/lib/python2.7/dist-packages/arbotix_msgs/srv/_SetSpeed.py
deserialize
4ndreas/ROSCoffeButler
2
python
def deserialize(self, str): '\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n ' try: end = 0 return self except struct.error as e: raise genpy.DeserializationError(e)
def deserialize(self, str): '\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n ' try: end = 0 return self except struct.error as e: raise genpy.DeserializationError(e)<|docstring|>unpack serialized message in str into this message instance :param str: byte array of serialized message, ``str``<|endoftext|>
1d1b95d014dc7a04fd037ae9ba22ba06f8a180a3153ebc5d5060f45f0cb8f111
def serialize_numpy(self, buff, numpy): '\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n ' try: pass except struct.error as se: self._check_types(struct.error(("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))) except TypeError as te: self._check_types(ValueError(("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))))
serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module
devel/lib/python2.7/dist-packages/arbotix_msgs/srv/_SetSpeed.py
serialize_numpy
4ndreas/ROSCoffeButler
2
python
def serialize_numpy(self, buff, numpy): '\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n ' try: pass except struct.error as se: self._check_types(struct.error(("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))) except TypeError as te: self._check_types(ValueError(("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))))
def serialize_numpy(self, buff, numpy): '\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n ' try: pass except struct.error as se: self._check_types(struct.error(("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))) except TypeError as te: self._check_types(ValueError(("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x)))))<|docstring|>serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module<|endoftext|>
351d9988f85d0e1344ab2fcf905df2f4f72798a7bd2e2e574070ebc6a7ef341b
def deserialize_numpy(self, str, numpy): '\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n ' try: end = 0 return self except struct.error as e: raise genpy.DeserializationError(e)
unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module
devel/lib/python2.7/dist-packages/arbotix_msgs/srv/_SetSpeed.py
deserialize_numpy
4ndreas/ROSCoffeButler
2
python
def deserialize_numpy(self, str, numpy): '\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n ' try: end = 0 return self except struct.error as e: raise genpy.DeserializationError(e)
def deserialize_numpy(self, str, numpy): '\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n ' try: end = 0 return self except struct.error as e: raise genpy.DeserializationError(e)<|docstring|>unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module<|endoftext|>
846dac85613d4c287793c9d29d2860834038c65a8f243d172c507936b640f6e8
def foo(): 'Test äöü' pass
Test äöü
bpython/test/fodder/encoding_utf8.py
foo
ocurero/bpython
2,168
python
def foo(): pass
def foo(): pass<|docstring|>Test äöü<|endoftext|>
595f2e1f56f800b43f8724955a43af248740704df6ef6926363056ab10df5378
def dflt_kwargs(k, dflt, **kwargs): '\n counts = dflt_kwargs("counts",100,**kwargs)\n ' if (k in kwargs): v = kwargs[k] else: v = dflt return v
counts = dflt_kwargs("counts",100,**kwargs)
elist/elist.py
dflt_kwargs
ihgazni2/elist
0
python
def dflt_kwargs(k, dflt, **kwargs): '\n \n ' if (k in kwargs): v = kwargs[k] else: v = dflt return v
def dflt_kwargs(k, dflt, **kwargs): '\n \n ' if (k in kwargs): v = kwargs[k] else: v = dflt return v<|docstring|>counts = dflt_kwargs("counts",100,**kwargs)<|endoftext|>
6783fcc5ec423012bfb2f65f8458a40bdde78b43f66200988bfd20ae1ba69abb
def mapfivo(ol, *args, **kwargs): '\n #mapfivo f,i,v,o四元决定 fivo-4-tuple-engine\n #map_func diff_func(index,value,*diff_args)\n ' args = list(args) lngth = args.__len__() if (lngth == 0): diff_funcs_arr = kwargs['map_funcs'] diff_args_arr = kwargs['map_func_args_array'] elif (lngth == 1): if ('map_func_args_array' in kwargs): diff_funcs_arr = args[0] diff_args_arr = kwargs['map_func_args_array'] else: diff_funcs_arr = kwargs['map_funcs'] diff_args_arr = args[0] else: diff_funcs_arr = args[0] diff_args_arr = args[1] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] ele = func(index, value, *args) rslt.append(ele) return rslt
#mapfivo f,i,v,o四元决定 fivo-4-tuple-engine #map_func diff_func(index,value,*diff_args)
elist/elist.py
mapfivo
ihgazni2/elist
0
python
def mapfivo(ol, *args, **kwargs): '\n #mapfivo f,i,v,o四元决定 fivo-4-tuple-engine\n #map_func diff_func(index,value,*diff_args)\n ' args = list(args) lngth = args.__len__() if (lngth == 0): diff_funcs_arr = kwargs['map_funcs'] diff_args_arr = kwargs['map_func_args_array'] elif (lngth == 1): if ('map_func_args_array' in kwargs): diff_funcs_arr = args[0] diff_args_arr = kwargs['map_func_args_array'] else: diff_funcs_arr = kwargs['map_funcs'] diff_args_arr = args[0] else: diff_funcs_arr = args[0] diff_args_arr = args[1] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] ele = func(index, value, *args) rslt.append(ele) return rslt
def mapfivo(ol, *args, **kwargs): '\n #mapfivo f,i,v,o四元决定 fivo-4-tuple-engine\n #map_func diff_func(index,value,*diff_args)\n ' args = list(args) lngth = args.__len__() if (lngth == 0): diff_funcs_arr = kwargs['map_funcs'] diff_args_arr = kwargs['map_func_args_array'] elif (lngth == 1): if ('map_func_args_array' in kwargs): diff_funcs_arr = args[0] diff_args_arr = kwargs['map_func_args_array'] else: diff_funcs_arr = kwargs['map_funcs'] diff_args_arr = args[0] else: diff_funcs_arr = args[0] diff_args_arr = args[1] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] ele = func(index, value, *args) rslt.append(ele) return rslt<|docstring|>#mapfivo f,i,v,o四元决定 fivo-4-tuple-engine #map_func diff_func(index,value,*diff_args)<|endoftext|>
85738f490a368e593a6265eb0c8cc9a0ea33a05dc61addd8fe53b43679d6d3cc
def mapfiv(ol, map_func_args, **kwargs): '\n #mapfiv 共享相同的o share common other_args\n #map_func diff_func(index,value,*common_args)\n ' lngth = ol.__len__() diff_funcs_arr = kwargs['map_funcs'] common_args_arr = init(lngth, map_func_args) rslt = mapfivo(ol, map_funcs=diff_funcs_arr, map_func_args_array=common_args_arr) return rslt
#mapfiv 共享相同的o share common other_args #map_func diff_func(index,value,*common_args)
elist/elist.py
mapfiv
ihgazni2/elist
0
python
def mapfiv(ol, map_func_args, **kwargs): '\n #mapfiv 共享相同的o share common other_args\n #map_func diff_func(index,value,*common_args)\n ' lngth = ol.__len__() diff_funcs_arr = kwargs['map_funcs'] common_args_arr = init(lngth, map_func_args) rslt = mapfivo(ol, map_funcs=diff_funcs_arr, map_func_args_array=common_args_arr) return rslt
def mapfiv(ol, map_func_args, **kwargs): '\n #mapfiv 共享相同的o share common other_args\n #map_func diff_func(index,value,*common_args)\n ' lngth = ol.__len__() diff_funcs_arr = kwargs['map_funcs'] common_args_arr = init(lngth, map_func_args) rslt = mapfivo(ol, map_funcs=diff_funcs_arr, map_func_args_array=common_args_arr) return rslt<|docstring|>#mapfiv 共享相同的o share common other_args #map_func diff_func(index,value,*common_args)<|endoftext|>
030f1b2eb0798cc56c4a6d26b072153a058124f0d6e387aa8974c8af5073fa67
def mapfio(ol, **kwargs): '\n #mapfio v不作为map_func参数 NOT take value as a param for map_func\n #map_func diff_func(index,*diff_args)\n ' diff_funcs_arr = kwargs['map_funcs'] diff_args_arr = kwargs['map_func_args_array'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] ele = func(index, *args) rslt.append(ele) return rslt
#mapfio v不作为map_func参数 NOT take value as a param for map_func #map_func diff_func(index,*diff_args)
elist/elist.py
mapfio
ihgazni2/elist
0
python
def mapfio(ol, **kwargs): '\n #mapfio v不作为map_func参数 NOT take value as a param for map_func\n #map_func diff_func(index,*diff_args)\n ' diff_funcs_arr = kwargs['map_funcs'] diff_args_arr = kwargs['map_func_args_array'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] ele = func(index, *args) rslt.append(ele) return rslt
def mapfio(ol, **kwargs): '\n #mapfio v不作为map_func参数 NOT take value as a param for map_func\n #map_func diff_func(index,*diff_args)\n ' diff_funcs_arr = kwargs['map_funcs'] diff_args_arr = kwargs['map_func_args_array'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] ele = func(index, *args) rslt.append(ele) return rslt<|docstring|>#mapfio v不作为map_func参数 NOT take value as a param for map_func #map_func diff_func(index,*diff_args)<|endoftext|>
5dd9b40a28b3487d9b9c5e43f764e5dbef74d74e8f571d58d05a3726ea577133
def mapfvo(ol, **kwargs): '\n #mapfvo i不作为map_func参数 NOT take index as a param for map_func\n #map_func diff_func(value,*diff_args)\n ' diff_funcs_arr = kwargs['map_funcs'] diff_args_arr = kwargs['map_func_args_array'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] ele = func(value, *args) rslt.append(ele) return rslt
#mapfvo i不作为map_func参数 NOT take index as a param for map_func #map_func diff_func(value,*diff_args)
elist/elist.py
mapfvo
ihgazni2/elist
0
python
def mapfvo(ol, **kwargs): '\n #mapfvo i不作为map_func参数 NOT take index as a param for map_func\n #map_func diff_func(value,*diff_args)\n ' diff_funcs_arr = kwargs['map_funcs'] diff_args_arr = kwargs['map_func_args_array'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] ele = func(value, *args) rslt.append(ele) return rslt
def mapfvo(ol, **kwargs): '\n #mapfvo i不作为map_func参数 NOT take index as a param for map_func\n #map_func diff_func(value,*diff_args)\n ' diff_funcs_arr = kwargs['map_funcs'] diff_args_arr = kwargs['map_func_args_array'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] ele = func(value, *args) rslt.append(ele) return rslt<|docstring|>#mapfvo i不作为map_func参数 NOT take index as a param for map_func #map_func diff_func(value,*diff_args)<|endoftext|>
3e8e67101c8316b647e861ee41143f15701f3d25c06fd9ac29573af161387f6f
def mapivo(ol, map_func, **kwargs): '\n #mapivo 共享相同的f share common map_func\n #map_func common_func(index,value,*diff_args)\n ' lngth = ol.__len__() common_funcs_arr = init(lngth, map_func) diff_args_arr = kwargs['map_func_args_array'] rslt = mapfivo(ol, map_funcs=common_funcs_arr, map_func_args_array=diff_args_arr) return rslt
#mapivo 共享相同的f share common map_func #map_func common_func(index,value,*diff_args)
elist/elist.py
mapivo
ihgazni2/elist
0
python
def mapivo(ol, map_func, **kwargs): '\n #mapivo 共享相同的f share common map_func\n #map_func common_func(index,value,*diff_args)\n ' lngth = ol.__len__() common_funcs_arr = init(lngth, map_func) diff_args_arr = kwargs['map_func_args_array'] rslt = mapfivo(ol, map_funcs=common_funcs_arr, map_func_args_array=diff_args_arr) return rslt
def mapivo(ol, map_func, **kwargs): '\n #mapivo 共享相同的f share common map_func\n #map_func common_func(index,value,*diff_args)\n ' lngth = ol.__len__() common_funcs_arr = init(lngth, map_func) diff_args_arr = kwargs['map_func_args_array'] rslt = mapfivo(ol, map_funcs=common_funcs_arr, map_func_args_array=diff_args_arr) return rslt<|docstring|>#mapivo 共享相同的f share common map_func #map_func common_func(index,value,*diff_args)<|endoftext|>
00821e91f8f4bad1be82fff783319f67b6deec0b1a9c95b29c420bb39ab9f87a
def array_dualmap(ol, value_map_func, **kwargs): "\n from elist.elist import *\n ol = ['a','b','c','d']\n def index_map_func(index,prefix,suffix):\n s = prefix +str(index+97)+ suffix\n return(s)\n \n def value_map_func(mapped_index,ele,prefix,suffix):\n s = prefix+mapped_index+': ' + str(ele) + suffix\n return(s)\n \n ####\n rslt = array_dualmap2(ol,index_map_func=index_map_func,index_map_func_args=[': ',' is '],value_map_func=value_map_func,value_map_func_args=['ord',' yes?'])\n pobj(rslt)\n " def get_self(obj): return obj if ('index_map_func_args' in kwargs): index_map_func_args = kwargs['index_map_func_args'] else: index_map_func_args = [] if ('value_map_func_args' in kwargs): value_map_func_args = kwargs['value_map_func_args'] else: value_map_func_args = [] if ('index_map_func' in kwargs): index_map_func = kwargs['index_map_func'] else: index_map_func = get_self length = ol.__len__() il = list(range(0, length)) nil = list(map((lambda ele: index_map_func(ele, *index_map_func_args)), il)) nvl = [] for i in range(0, length): ele = ol[i] v = value_map_func(nil[i], ele, *value_map_func_args) nvl.append(v) return nvl
from elist.elist import * ol = ['a','b','c','d'] def index_map_func(index,prefix,suffix): s = prefix +str(index+97)+ suffix return(s) def value_map_func(mapped_index,ele,prefix,suffix): s = prefix+mapped_index+': ' + str(ele) + suffix return(s) #### rslt = array_dualmap2(ol,index_map_func=index_map_func,index_map_func_args=[': ',' is '],value_map_func=value_map_func,value_map_func_args=['ord',' yes?']) pobj(rslt)
elist/elist.py
array_dualmap
ihgazni2/elist
0
python
def array_dualmap(ol, value_map_func, **kwargs): "\n from elist.elist import *\n ol = ['a','b','c','d']\n def index_map_func(index,prefix,suffix):\n s = prefix +str(index+97)+ suffix\n return(s)\n \n def value_map_func(mapped_index,ele,prefix,suffix):\n s = prefix+mapped_index+': ' + str(ele) + suffix\n return(s)\n \n ####\n rslt = array_dualmap2(ol,index_map_func=index_map_func,index_map_func_args=[': ',' is '],value_map_func=value_map_func,value_map_func_args=['ord',' yes?'])\n pobj(rslt)\n " def get_self(obj): return obj if ('index_map_func_args' in kwargs): index_map_func_args = kwargs['index_map_func_args'] else: index_map_func_args = [] if ('value_map_func_args' in kwargs): value_map_func_args = kwargs['value_map_func_args'] else: value_map_func_args = [] if ('index_map_func' in kwargs): index_map_func = kwargs['index_map_func'] else: index_map_func = get_self length = ol.__len__() il = list(range(0, length)) nil = list(map((lambda ele: index_map_func(ele, *index_map_func_args)), il)) nvl = [] for i in range(0, length): ele = ol[i] v = value_map_func(nil[i], ele, *value_map_func_args) nvl.append(v) return nvl
def array_dualmap(ol, value_map_func, **kwargs): "\n from elist.elist import *\n ol = ['a','b','c','d']\n def index_map_func(index,prefix,suffix):\n s = prefix +str(index+97)+ suffix\n return(s)\n \n def value_map_func(mapped_index,ele,prefix,suffix):\n s = prefix+mapped_index+': ' + str(ele) + suffix\n return(s)\n \n ####\n rslt = array_dualmap2(ol,index_map_func=index_map_func,index_map_func_args=[': ',' is '],value_map_func=value_map_func,value_map_func_args=['ord',' yes?'])\n pobj(rslt)\n " def get_self(obj): return obj if ('index_map_func_args' in kwargs): index_map_func_args = kwargs['index_map_func_args'] else: index_map_func_args = [] if ('value_map_func_args' in kwargs): value_map_func_args = kwargs['value_map_func_args'] else: value_map_func_args = [] if ('index_map_func' in kwargs): index_map_func = kwargs['index_map_func'] else: index_map_func = get_self length = ol.__len__() il = list(range(0, length)) nil = list(map((lambda ele: index_map_func(ele, *index_map_func_args)), il)) nvl = [] for i in range(0, length): ele = ol[i] v = value_map_func(nil[i], ele, *value_map_func_args) nvl.append(v) return nvl<|docstring|>from elist.elist import * ol = ['a','b','c','d'] def index_map_func(index,prefix,suffix): s = prefix +str(index+97)+ suffix return(s) def value_map_func(mapped_index,ele,prefix,suffix): s = prefix+mapped_index+': ' + str(ele) + suffix return(s) #### rslt = array_dualmap2(ol,index_map_func=index_map_func,index_map_func_args=[': ',' is '],value_map_func=value_map_func,value_map_func_args=['ord',' yes?']) pobj(rslt)<|endoftext|>
8ae7e627d9d0643396eef2e717542d0d0395de3ea8b3440bfc1fb83c33684437
def array_dualmap2(*refls, **kwargs): '\n from elist.elist import *\n ol = [1,2,3,4]\n refl1 = [\'+\',\'+\',\'+\',\'+\']\n refl2 = [7,7,7,7]\n refl3 = [\'=\',\'=\',\'=\',\'=\']\n def index_map_func(index):\n s ="<"+str(index)+">"\n return(s)\n \n def value_map_func(mapped_index,ele,ref_ele1,ref_ele2,ref_ele3,prefix,suffix):\n s = prefix+mapped_index+\': \' + str(ele) + str(ref_ele1) + str(ref_ele2) + str(ref_ele3) + suffix\n return(s)\n \n ####\n rslt = array_dualmap2(ol,refl1,refl2,refl3,index_map_func=index_map_func,value_map_func=value_map_func,value_map_func_args=[\'Q\',\'?\'])\n pobj(rslt)\n ' def get_self(obj, *args): return obj if ('value_map_func_args' in kwargs): value_map_func_args = kwargs['value_map_func_args'] else: value_map_func_args = [] if ('index_map_func' in kwargs): index_map_func = kwargs['index_map_func'] else: index_map_func = get_self if ('index_map_func_args' in kwargs): index_map_func_args = kwargs['index_map_func_args'] else: index_map_func_args = [] length = ol.__len__() il = list(range(0, length)) nil = list(map((lambda ele: index_map_func(ele, *index_map_func_args)), il)) refls = list(refls) refls = prepend(refls, nil) nvl = array_map2(*refls, map_func=value_map_func, map_func_args=value_map_func_args) return nvl
from elist.elist import * ol = [1,2,3,4] refl1 = ['+','+','+','+'] refl2 = [7,7,7,7] refl3 = ['=','=','=','='] def index_map_func(index): s ="<"+str(index)+">" return(s) def value_map_func(mapped_index,ele,ref_ele1,ref_ele2,ref_ele3,prefix,suffix): s = prefix+mapped_index+': ' + str(ele) + str(ref_ele1) + str(ref_ele2) + str(ref_ele3) + suffix return(s) #### rslt = array_dualmap2(ol,refl1,refl2,refl3,index_map_func=index_map_func,value_map_func=value_map_func,value_map_func_args=['Q','?']) pobj(rslt)
elist/elist.py
array_dualmap2
ihgazni2/elist
0
python
def array_dualmap2(*refls, **kwargs): '\n from elist.elist import *\n ol = [1,2,3,4]\n refl1 = [\'+\',\'+\',\'+\',\'+\']\n refl2 = [7,7,7,7]\n refl3 = [\'=\',\'=\',\'=\',\'=\']\n def index_map_func(index):\n s ="<"+str(index)+">"\n return(s)\n \n def value_map_func(mapped_index,ele,ref_ele1,ref_ele2,ref_ele3,prefix,suffix):\n s = prefix+mapped_index+\': \' + str(ele) + str(ref_ele1) + str(ref_ele2) + str(ref_ele3) + suffix\n return(s)\n \n ####\n rslt = array_dualmap2(ol,refl1,refl2,refl3,index_map_func=index_map_func,value_map_func=value_map_func,value_map_func_args=[\'Q\',\'?\'])\n pobj(rslt)\n ' def get_self(obj, *args): return obj if ('value_map_func_args' in kwargs): value_map_func_args = kwargs['value_map_func_args'] else: value_map_func_args = [] if ('index_map_func' in kwargs): index_map_func = kwargs['index_map_func'] else: index_map_func = get_self if ('index_map_func_args' in kwargs): index_map_func_args = kwargs['index_map_func_args'] else: index_map_func_args = [] length = ol.__len__() il = list(range(0, length)) nil = list(map((lambda ele: index_map_func(ele, *index_map_func_args)), il)) refls = list(refls) refls = prepend(refls, nil) nvl = array_map2(*refls, map_func=value_map_func, map_func_args=value_map_func_args) return nvl
def array_dualmap2(*refls, **kwargs): '\n from elist.elist import *\n ol = [1,2,3,4]\n refl1 = [\'+\',\'+\',\'+\',\'+\']\n refl2 = [7,7,7,7]\n refl3 = [\'=\',\'=\',\'=\',\'=\']\n def index_map_func(index):\n s ="<"+str(index)+">"\n return(s)\n \n def value_map_func(mapped_index,ele,ref_ele1,ref_ele2,ref_ele3,prefix,suffix):\n s = prefix+mapped_index+\': \' + str(ele) + str(ref_ele1) + str(ref_ele2) + str(ref_ele3) + suffix\n return(s)\n \n ####\n rslt = array_dualmap2(ol,refl1,refl2,refl3,index_map_func=index_map_func,value_map_func=value_map_func,value_map_func_args=[\'Q\',\'?\'])\n pobj(rslt)\n ' def get_self(obj, *args): return obj if ('value_map_func_args' in kwargs): value_map_func_args = kwargs['value_map_func_args'] else: value_map_func_args = [] if ('index_map_func' in kwargs): index_map_func = kwargs['index_map_func'] else: index_map_func = get_self if ('index_map_func_args' in kwargs): index_map_func_args = kwargs['index_map_func_args'] else: index_map_func_args = [] length = ol.__len__() il = list(range(0, length)) nil = list(map((lambda ele: index_map_func(ele, *index_map_func_args)), il)) refls = list(refls) refls = prepend(refls, nil) nvl = array_map2(*refls, map_func=value_map_func, map_func_args=value_map_func_args) return nvl<|docstring|>from elist.elist import * ol = [1,2,3,4] refl1 = ['+','+','+','+'] refl2 = [7,7,7,7] refl3 = ['=','=','=','='] def index_map_func(index): s ="<"+str(index)+">" return(s) def value_map_func(mapped_index,ele,ref_ele1,ref_ele2,ref_ele3,prefix,suffix): s = prefix+mapped_index+': ' + str(ele) + str(ref_ele1) + str(ref_ele2) + str(ref_ele3) + suffix return(s) #### rslt = array_dualmap2(ol,refl1,refl2,refl3,index_map_func=index_map_func,value_map_func=value_map_func,value_map_func_args=['Q','?']) pobj(rslt)<|endoftext|>
d7aaa67192ec5b26682fd0e63f4ecf95c8fe8ef8a15ac2c64a7a321ed2ca327e
def mapfi(ol, map_func_args, **kwargs): '\n #mapfi 共享相同的o,v不作为map_func参数\n # share common other_args,NOT take value as a param for map_func\n #map_func diff_func(index,*common_args)\n ' diff_funcs_arr = kwargs['map_funcs'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = map_func_args ele = func(index, *args) rslt.append(ele) return rslt
#mapfi 共享相同的o,v不作为map_func参数 # share common other_args,NOT take value as a param for map_func #map_func diff_func(index,*common_args)
elist/elist.py
mapfi
ihgazni2/elist
0
python
def mapfi(ol, map_func_args, **kwargs): '\n #mapfi 共享相同的o,v不作为map_func参数\n # share common other_args,NOT take value as a param for map_func\n #map_func diff_func(index,*common_args)\n ' diff_funcs_arr = kwargs['map_funcs'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = map_func_args ele = func(index, *args) rslt.append(ele) return rslt
def mapfi(ol, map_func_args, **kwargs): '\n #mapfi 共享相同的o,v不作为map_func参数\n # share common other_args,NOT take value as a param for map_func\n #map_func diff_func(index,*common_args)\n ' diff_funcs_arr = kwargs['map_funcs'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = map_func_args ele = func(index, *args) rslt.append(ele) return rslt<|docstring|>#mapfi 共享相同的o,v不作为map_func参数 # share common other_args,NOT take value as a param for map_func #map_func diff_func(index,*common_args)<|endoftext|>
ecbad8e76647bc4f64d66d023e7388f0d8c7fa753dcf0a42d08edce06b101ab1
def mapfv(ol, map_func_args, *args, **kwargs): '\n #mapfv 共享相同的o,i不作为map_func参数\n # share common other_args,NOT take value as a param for map_func\n #map_func diff_func(value,*common_args)\n ' args = list(args) lngth = args.__len__() if (lngth == 0): diff_funcs_arr = kwargs['map_funcs'] else: diff_funcs_arr = args[0] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = map_func_args ele = func(value, *args) rslt.append(ele) return rslt
#mapfv 共享相同的o,i不作为map_func参数 # share common other_args,NOT take value as a param for map_func #map_func diff_func(value,*common_args)
elist/elist.py
mapfv
ihgazni2/elist
0
python
def mapfv(ol, map_func_args, *args, **kwargs): '\n #mapfv 共享相同的o,i不作为map_func参数\n # share common other_args,NOT take value as a param for map_func\n #map_func diff_func(value,*common_args)\n ' args = list(args) lngth = args.__len__() if (lngth == 0): diff_funcs_arr = kwargs['map_funcs'] else: diff_funcs_arr = args[0] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = map_func_args ele = func(value, *args) rslt.append(ele) return rslt
def mapfv(ol, map_func_args, *args, **kwargs): '\n #mapfv 共享相同的o,i不作为map_func参数\n # share common other_args,NOT take value as a param for map_func\n #map_func diff_func(value,*common_args)\n ' args = list(args) lngth = args.__len__() if (lngth == 0): diff_funcs_arr = kwargs['map_funcs'] else: diff_funcs_arr = args[0] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = map_func_args ele = func(value, *args) rslt.append(ele) return rslt<|docstring|>#mapfv 共享相同的o,i不作为map_func参数 # share common other_args,NOT take value as a param for map_func #map_func diff_func(value,*common_args)<|endoftext|>
c6ddd2aaf2049f189d7f0cda22d2820d539b5a62ee1efdf8b2c990a649769bf9
def mapfo(ol, **kwargs): '\n #mapfo i不作为map_func参数,v不作为map_func参数\n # NOT take value as a param for map_func,NOT take index as a param for map_func\n #map_func diff_func(*diff_args)\n ' diff_args_arr = kwargs['map_func_args_array'] diff_funcs_arr = kwargs['map_funcs'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] ele = func(value, *args) rslt.append(ele) return rslt
#mapfo i不作为map_func参数,v不作为map_func参数 # NOT take value as a param for map_func,NOT take index as a param for map_func #map_func diff_func(*diff_args)
elist/elist.py
mapfo
ihgazni2/elist
0
python
def mapfo(ol, **kwargs): '\n #mapfo i不作为map_func参数,v不作为map_func参数\n # NOT take value as a param for map_func,NOT take index as a param for map_func\n #map_func diff_func(*diff_args)\n ' diff_args_arr = kwargs['map_func_args_array'] diff_funcs_arr = kwargs['map_funcs'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] ele = func(value, *args) rslt.append(ele) return rslt
def mapfo(ol, **kwargs): '\n #mapfo i不作为map_func参数,v不作为map_func参数\n # NOT take value as a param for map_func,NOT take index as a param for map_func\n #map_func diff_func(*diff_args)\n ' diff_args_arr = kwargs['map_func_args_array'] diff_funcs_arr = kwargs['map_funcs'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] ele = func(value, *args) rslt.append(ele) return rslt<|docstring|>#mapfo i不作为map_func参数,v不作为map_func参数 # NOT take value as a param for map_func,NOT take index as a param for map_func #map_func diff_func(*diff_args)<|endoftext|>
992ac0b52174d2eb745a0ba95824a9064d704f324a5476fde268ccc6bef3312a
def mapiv(ol, map_func, map_func_args=[]): '\n #mapiv 共享相同的o,共享相同的f share common map_func,share common other_args\n #map_func common_func(index,value,*common_args)\n ' lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = map_func_args ele = func(index, value, *args) rslt.append(ele) return rslt
#mapiv 共享相同的o,共享相同的f share common map_func,share common other_args #map_func common_func(index,value,*common_args)
elist/elist.py
mapiv
ihgazni2/elist
0
python
def mapiv(ol, map_func, map_func_args=[]): '\n #mapiv 共享相同的o,共享相同的f share common map_func,share common other_args\n #map_func common_func(index,value,*common_args)\n ' lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = map_func_args ele = func(index, value, *args) rslt.append(ele) return rslt
def mapiv(ol, map_func, map_func_args=[]): '\n #mapiv 共享相同的o,共享相同的f share common map_func,share common other_args\n #map_func common_func(index,value,*common_args)\n ' lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = map_func_args ele = func(index, value, *args) rslt.append(ele) return rslt<|docstring|>#mapiv 共享相同的o,共享相同的f share common map_func,share common other_args #map_func common_func(index,value,*common_args)<|endoftext|>
0ad0827be387e44daf31f8383b7d0e413dc4fe76442f32d30dd410b258e20ffb
def mapiv2(ol, map_func, *args, **kwargs): "\n from elist.elist import *\n ol = ['a','b','c','d']\n #1\n def map_func(index,value,*others):\n return(value * index + others[0] +others[-1])\n mapiv(ol,map_func,'tailA-','tailB')\n #2\n mapiv2(ol,lambda index,value,other:(value*index+other),['-'])\n mapiv2(ol,lambda index,value,other:(value*index+other),'-')\n mapiv2(ol,lambda index,value:(value*index))\n " args = list(args) if (args.__len__() > 0): map_func_args = args elif ('map_func_args' in kwargs): map_func_args = kwargs['map_func_args'] else: map_func_args = [] lngth = ol.__len__() rslt = [] for i in range(0, lngth): ele = map_func(i, ol[i], *map_func_args) rslt.append(ele) return rslt
from elist.elist import * ol = ['a','b','c','d'] #1 def map_func(index,value,*others): return(value * index + others[0] +others[-1]) mapiv(ol,map_func,'tailA-','tailB') #2 mapiv2(ol,lambda index,value,other:(value*index+other),['-']) mapiv2(ol,lambda index,value,other:(value*index+other),'-') mapiv2(ol,lambda index,value:(value*index))
elist/elist.py
mapiv2
ihgazni2/elist
0
python
def mapiv2(ol, map_func, *args, **kwargs): "\n from elist.elist import *\n ol = ['a','b','c','d']\n #1\n def map_func(index,value,*others):\n return(value * index + others[0] +others[-1])\n mapiv(ol,map_func,'tailA-','tailB')\n #2\n mapiv2(ol,lambda index,value,other:(value*index+other),['-'])\n mapiv2(ol,lambda index,value,other:(value*index+other),'-')\n mapiv2(ol,lambda index,value:(value*index))\n " args = list(args) if (args.__len__() > 0): map_func_args = args elif ('map_func_args' in kwargs): map_func_args = kwargs['map_func_args'] else: map_func_args = [] lngth = ol.__len__() rslt = [] for i in range(0, lngth): ele = map_func(i, ol[i], *map_func_args) rslt.append(ele) return rslt
def mapiv2(ol, map_func, *args, **kwargs): "\n from elist.elist import *\n ol = ['a','b','c','d']\n #1\n def map_func(index,value,*others):\n return(value * index + others[0] +others[-1])\n mapiv(ol,map_func,'tailA-','tailB')\n #2\n mapiv2(ol,lambda index,value,other:(value*index+other),['-'])\n mapiv2(ol,lambda index,value,other:(value*index+other),'-')\n mapiv2(ol,lambda index,value:(value*index))\n " args = list(args) if (args.__len__() > 0): map_func_args = args elif ('map_func_args' in kwargs): map_func_args = kwargs['map_func_args'] else: map_func_args = [] lngth = ol.__len__() rslt = [] for i in range(0, lngth): ele = map_func(i, ol[i], *map_func_args) rslt.append(ele) return rslt<|docstring|>from elist.elist import * ol = ['a','b','c','d'] #1 def map_func(index,value,*others): return(value * index + others[0] +others[-1]) mapiv(ol,map_func,'tailA-','tailB') #2 mapiv2(ol,lambda index,value,other:(value*index+other),['-']) mapiv2(ol,lambda index,value,other:(value*index+other),'-') mapiv2(ol,lambda index,value:(value*index))<|endoftext|>
2072243124d823095203d7b2c5b19fbb1e2ae9d4bce09b82699a6eb6d44ec9f4
def mapio(ol, map_func, **kwargs): '\n #mapvo 共享相同的f,i不作为map_func参数\n # share common map_func,NOT take index as a param for map_func\n # common_func(value,*priv_args)\n ' lngth = ol.__len__() diff_args_arr = kwargs['map_func_args_array'] rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = diff_args_arr[i] ele = func(index, *args) rslt.append(ele) return rslt
#mapvo 共享相同的f,i不作为map_func参数 # share common map_func,NOT take index as a param for map_func # common_func(value,*priv_args)
elist/elist.py
mapio
ihgazni2/elist
0
python
def mapio(ol, map_func, **kwargs): '\n #mapvo 共享相同的f,i不作为map_func参数\n # share common map_func,NOT take index as a param for map_func\n # common_func(value,*priv_args)\n ' lngth = ol.__len__() diff_args_arr = kwargs['map_func_args_array'] rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = diff_args_arr[i] ele = func(index, *args) rslt.append(ele) return rslt
def mapio(ol, map_func, **kwargs): '\n #mapvo 共享相同的f,i不作为map_func参数\n # share common map_func,NOT take index as a param for map_func\n # common_func(value,*priv_args)\n ' lngth = ol.__len__() diff_args_arr = kwargs['map_func_args_array'] rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = diff_args_arr[i] ele = func(index, *args) rslt.append(ele) return rslt<|docstring|>#mapvo 共享相同的f,i不作为map_func参数 # share common map_func,NOT take index as a param for map_func # common_func(value,*priv_args)<|endoftext|>
a578e3282e8a73c4baf633a3243d40a83758d666860fc60af155fcfcfdbc86f8
def mapvo(ol, map_func, *args, **kwargs): '\n #mapvo 共享相同的f,i不作为map_func参数\n # share common map_func,NOT take index as a param for map_func\n # common_func(value,*priv_args)\n ' lngth = ol.__len__() args = list(args) if (args.__len__() == 0): diff_args_arr = kwargs['map_func_args_array'] else: diff_args_arr = args[0] rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = diff_args_arr[i] ele = func(value, *args) rslt.append(ele) return rslt
#mapvo 共享相同的f,i不作为map_func参数 # share common map_func,NOT take index as a param for map_func # common_func(value,*priv_args)
elist/elist.py
mapvo
ihgazni2/elist
0
python
def mapvo(ol, map_func, *args, **kwargs): '\n #mapvo 共享相同的f,i不作为map_func参数\n # share common map_func,NOT take index as a param for map_func\n # common_func(value,*priv_args)\n ' lngth = ol.__len__() args = list(args) if (args.__len__() == 0): diff_args_arr = kwargs['map_func_args_array'] else: diff_args_arr = args[0] rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = diff_args_arr[i] ele = func(value, *args) rslt.append(ele) return rslt
def mapvo(ol, map_func, *args, **kwargs): '\n #mapvo 共享相同的f,i不作为map_func参数\n # share common map_func,NOT take index as a param for map_func\n # common_func(value,*priv_args)\n ' lngth = ol.__len__() args = list(args) if (args.__len__() == 0): diff_args_arr = kwargs['map_func_args_array'] else: diff_args_arr = args[0] rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = diff_args_arr[i] ele = func(value, *args) rslt.append(ele) return rslt<|docstring|>#mapvo 共享相同的f,i不作为map_func参数 # share common map_func,NOT take index as a param for map_func # common_func(value,*priv_args)<|endoftext|>
f1c370f0d3e7443ba585dca47a37dcf34e3f72d7099df8212d9f27683ee9f52f
def array_map2(*referls, **kwargs): "\n obseleted just for compatible\n from elist.elist import *\n ol = [1,2,3,4]\n refl1 = ['+','+','+','+']\n refl2 = [7,7,7,7]\n refl3 = ['=','=','=','=']\n def map_func(ele,ref_ele1,ref_ele2,ref_ele3,prefix,suffix):\n s = prefix+': ' + str(ele) + str(ref_ele1) + str(ref_ele2) + str(ref_ele3) + suffix\n return(s)\n\n ####\n rslt = array_map2(ol,refl1,refl2,refl3,map_func=map_func,map_func_args=['Q','?'])\n pobj(rslt)\n " map_func = kwargs['map_func'] if ('map_func_args' in kwargs): map_func_args = kwargs['map_func_args'] else: map_func_args = [] length = referls.__len__() rslt = [] anum = list(referls)[0].__len__() for j in range(0, anum): args = [] for i in range(0, length): refl = referls[i] args.append(refl[j]) args.extend(map_func_args) v = map_func(*args) rslt.append(v) return rslt
obseleted just for compatible from elist.elist import * ol = [1,2,3,4] refl1 = ['+','+','+','+'] refl2 = [7,7,7,7] refl3 = ['=','=','=','='] def map_func(ele,ref_ele1,ref_ele2,ref_ele3,prefix,suffix): s = prefix+': ' + str(ele) + str(ref_ele1) + str(ref_ele2) + str(ref_ele3) + suffix return(s) #### rslt = array_map2(ol,refl1,refl2,refl3,map_func=map_func,map_func_args=['Q','?']) pobj(rslt)
elist/elist.py
array_map2
ihgazni2/elist
0
python
def array_map2(*referls, **kwargs): "\n obseleted just for compatible\n from elist.elist import *\n ol = [1,2,3,4]\n refl1 = ['+','+','+','+']\n refl2 = [7,7,7,7]\n refl3 = ['=','=','=','=']\n def map_func(ele,ref_ele1,ref_ele2,ref_ele3,prefix,suffix):\n s = prefix+': ' + str(ele) + str(ref_ele1) + str(ref_ele2) + str(ref_ele3) + suffix\n return(s)\n\n ####\n rslt = array_map2(ol,refl1,refl2,refl3,map_func=map_func,map_func_args=['Q','?'])\n pobj(rslt)\n " map_func = kwargs['map_func'] if ('map_func_args' in kwargs): map_func_args = kwargs['map_func_args'] else: map_func_args = [] length = referls.__len__() rslt = [] anum = list(referls)[0].__len__() for j in range(0, anum): args = [] for i in range(0, length): refl = referls[i] args.append(refl[j]) args.extend(map_func_args) v = map_func(*args) rslt.append(v) return rslt
def array_map2(*referls, **kwargs): "\n obseleted just for compatible\n from elist.elist import *\n ol = [1,2,3,4]\n refl1 = ['+','+','+','+']\n refl2 = [7,7,7,7]\n refl3 = ['=','=','=','=']\n def map_func(ele,ref_ele1,ref_ele2,ref_ele3,prefix,suffix):\n s = prefix+': ' + str(ele) + str(ref_ele1) + str(ref_ele2) + str(ref_ele3) + suffix\n return(s)\n\n ####\n rslt = array_map2(ol,refl1,refl2,refl3,map_func=map_func,map_func_args=['Q','?'])\n pobj(rslt)\n " map_func = kwargs['map_func'] if ('map_func_args' in kwargs): map_func_args = kwargs['map_func_args'] else: map_func_args = [] length = referls.__len__() rslt = [] anum = list(referls)[0].__len__() for j in range(0, anum): args = [] for i in range(0, length): refl = referls[i] args.append(refl[j]) args.extend(map_func_args) v = map_func(*args) rslt.append(v) return rslt<|docstring|>obseleted just for compatible from elist.elist import * ol = [1,2,3,4] refl1 = ['+','+','+','+'] refl2 = [7,7,7,7] refl3 = ['=','=','=','='] def map_func(ele,ref_ele1,ref_ele2,ref_ele3,prefix,suffix): s = prefix+': ' + str(ele) + str(ref_ele1) + str(ref_ele2) + str(ref_ele3) + suffix return(s) #### rslt = array_map2(ol,refl1,refl2,refl3,map_func=map_func,map_func_args=['Q','?']) pobj(rslt)<|endoftext|>
aee5ef127f3221b04b57c0960c2600468da10828f7761c2119ca3d4587a18c02
def mapf(ol, map_func_args, **kwargs): '\n #mapf i不作为map_func参数,v不作为map_func参数,共享相同的o\n # NOT take value as a param for map_func\n # NOT take index as a param for map_func\n # share common other_args\n # diff_func(*common_args)\n ' diff_funcs_arr = kwargs['map_funcs'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = map_func_args ele = func(*args) rslt.append(ele) return rslt
#mapf i不作为map_func参数,v不作为map_func参数,共享相同的o # NOT take value as a param for map_func # NOT take index as a param for map_func # share common other_args # diff_func(*common_args)
elist/elist.py
mapf
ihgazni2/elist
0
python
def mapf(ol, map_func_args, **kwargs): '\n #mapf i不作为map_func参数,v不作为map_func参数,共享相同的o\n # NOT take value as a param for map_func\n # NOT take index as a param for map_func\n # share common other_args\n # diff_func(*common_args)\n ' diff_funcs_arr = kwargs['map_funcs'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = map_func_args ele = func(*args) rslt.append(ele) return rslt
def mapf(ol, map_func_args, **kwargs): '\n #mapf i不作为map_func参数,v不作为map_func参数,共享相同的o\n # NOT take value as a param for map_func\n # NOT take index as a param for map_func\n # share common other_args\n # diff_func(*common_args)\n ' diff_funcs_arr = kwargs['map_funcs'] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = map_func_args ele = func(*args) rslt.append(ele) return rslt<|docstring|>#mapf i不作为map_func参数,v不作为map_func参数,共享相同的o # NOT take value as a param for map_func # NOT take index as a param for map_func # share common other_args # diff_func(*common_args)<|endoftext|>
6caf3f4405aee96ad7073a4635adfee38024c594a52be336a39459fd8b10245b
def mapi(ol, map_func, map_func_args=[]): '\n #mapi v不作为map_func参数,共享相同的f,共享相同的o\n # NOT take value as a param for map_func\n # share common other_args\n # share common map_func\n # common_func(index,*common_args)\n ' lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = map_func_args ele = func(index, *args) rslt.append(ele) return rslt
#mapi v不作为map_func参数,共享相同的f,共享相同的o # NOT take value as a param for map_func # share common other_args # share common map_func # common_func(index,*common_args)
elist/elist.py
mapi
ihgazni2/elist
0
python
def mapi(ol, map_func, map_func_args=[]): '\n #mapi v不作为map_func参数,共享相同的f,共享相同的o\n # NOT take value as a param for map_func\n # share common other_args\n # share common map_func\n # common_func(index,*common_args)\n ' lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = map_func_args ele = func(index, *args) rslt.append(ele) return rslt
def mapi(ol, map_func, map_func_args=[]): '\n #mapi v不作为map_func参数,共享相同的f,共享相同的o\n # NOT take value as a param for map_func\n # share common other_args\n # share common map_func\n # common_func(index,*common_args)\n ' lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = map_func_args ele = func(index, *args) rslt.append(ele) return rslt<|docstring|>#mapi v不作为map_func参数,共享相同的f,共享相同的o # NOT take value as a param for map_func # share common other_args # share common map_func # common_func(index,*common_args)<|endoftext|>
1971fcdca704a845d5c77d06f4d9b8ec99d3f177d53f1ccc150b549aceecb598
def mapv(ol, map_func, map_func_args=[]): '\n #mapv i不作为map_func参数,共享相同的f,共享相同的o\n # NOT take index as a param for map_func\n # share common other_args\n # share common map_func\n # common_func(value,*common_args)\n\n ' rslt = list(map((lambda ele: map_func(ele, *map_func_args)), ol)) return rslt
#mapv i不作为map_func参数,共享相同的f,共享相同的o # NOT take index as a param for map_func # share common other_args # share common map_func # common_func(value,*common_args)
elist/elist.py
mapv
ihgazni2/elist
0
python
def mapv(ol, map_func, map_func_args=[]): '\n #mapv i不作为map_func参数,共享相同的f,共享相同的o\n # NOT take index as a param for map_func\n # share common other_args\n # share common map_func\n # common_func(value,*common_args)\n\n ' rslt = list(map((lambda ele: map_func(ele, *map_func_args)), ol)) return rslt
def mapv(ol, map_func, map_func_args=[]): '\n #mapv i不作为map_func参数,共享相同的f,共享相同的o\n # NOT take index as a param for map_func\n # share common other_args\n # share common map_func\n # common_func(value,*common_args)\n\n ' rslt = list(map((lambda ele: map_func(ele, *map_func_args)), ol)) return rslt<|docstring|>#mapv i不作为map_func参数,共享相同的f,共享相同的o # NOT take index as a param for map_func # share common other_args # share common map_func # common_func(value,*common_args)<|endoftext|>
b7b0a1fe621059eb5ccb2d869b2bafea44f64c6556e84c37fc02088779d4e853
def array_map(ol, map_func, *args): '\n obseleted,just for compatible\n from elist.elist import *\n ol = [1,2,3,4]\n def map_func(ele,mul,plus):\n return(ele*mul+plus)\n\n array_map(ol,map_func,2,100)\n ' rslt = list(map((lambda ele: map_func(ele, *args)), ol)) return rslt
obseleted,just for compatible from elist.elist import * ol = [1,2,3,4] def map_func(ele,mul,plus): return(ele*mul+plus) array_map(ol,map_func,2,100)
elist/elist.py
array_map
ihgazni2/elist
0
python
def array_map(ol, map_func, *args): '\n obseleted,just for compatible\n from elist.elist import *\n ol = [1,2,3,4]\n def map_func(ele,mul,plus):\n return(ele*mul+plus)\n\n array_map(ol,map_func,2,100)\n ' rslt = list(map((lambda ele: map_func(ele, *args)), ol)) return rslt
def array_map(ol, map_func, *args): '\n obseleted,just for compatible\n from elist.elist import *\n ol = [1,2,3,4]\n def map_func(ele,mul,plus):\n return(ele*mul+plus)\n\n array_map(ol,map_func,2,100)\n ' rslt = list(map((lambda ele: map_func(ele, *args)), ol)) return rslt<|docstring|>obseleted,just for compatible from elist.elist import * ol = [1,2,3,4] def map_func(ele,mul,plus): return(ele*mul+plus) array_map(ol,map_func,2,100)<|endoftext|>
453074c2028e18d9b276f2307ae00d0ba64fd6f0764c48986e9a8e9d38bf9211
def mapo(ol, map_func, *params, **kwargs): '\n #mapo i不作为map_func参数,v不作为map_func参数,共享相同的f\n # NOT take index as a param for map_func\n # NOT take value as a param for map_func\n # share common map_func\n # common_func(*priv_args)\n ' params = list(params) if (params.__len__() == 0): diff_args_arr = kwargs['map_func_args_array'] elif isinstance(params[0], list): diff_args_arr = params[0] else: diff_args_arr = params lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = diff_args_arr[i] ele = func(*args) rslt.append(ele) return rslt
#mapo i不作为map_func参数,v不作为map_func参数,共享相同的f # NOT take index as a param for map_func # NOT take value as a param for map_func # share common map_func # common_func(*priv_args)
elist/elist.py
mapo
ihgazni2/elist
0
python
def mapo(ol, map_func, *params, **kwargs): '\n #mapo i不作为map_func参数,v不作为map_func参数,共享相同的f\n # NOT take index as a param for map_func\n # NOT take value as a param for map_func\n # share common map_func\n # common_func(*priv_args)\n ' params = list(params) if (params.__len__() == 0): diff_args_arr = kwargs['map_func_args_array'] elif isinstance(params[0], list): diff_args_arr = params[0] else: diff_args_arr = params lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = diff_args_arr[i] ele = func(*args) rslt.append(ele) return rslt
def mapo(ol, map_func, *params, **kwargs): '\n #mapo i不作为map_func参数,v不作为map_func参数,共享相同的f\n # NOT take index as a param for map_func\n # NOT take value as a param for map_func\n # share common map_func\n # common_func(*priv_args)\n ' params = list(params) if (params.__len__() == 0): diff_args_arr = kwargs['map_func_args_array'] elif isinstance(params[0], list): diff_args_arr = params[0] else: diff_args_arr = params lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = map_func args = diff_args_arr[i] ele = func(*args) rslt.append(ele) return rslt<|docstring|>#mapo i不作为map_func参数,v不作为map_func参数,共享相同的f # NOT take index as a param for map_func # NOT take value as a param for map_func # share common map_func # common_func(*priv_args)<|endoftext|>
f3b27f8e0965d32b2c5edf72e4a6d8c996b6af6694edaf6e27eba9d87ebe342c
def findfivo(ol, *args, **kwargs): '\n #findfivo f,i,v,o四元决定 fivo-4-tuple-engine\n #cond_func diff_func(index,value,*diff_args)\n ' args = list(args) lngth = args.__len__() if (lngth == 0): diff_funcs_arr = kwargs['cond_funcs'] diff_args_arr = kwargs['cond_func_args_array'] elif (lngth == 1): if ('cond_func_args_array' in kwargs): diff_funcs_arr = args[0] diff_args_arr = kwargs['cond_func_args_array'] else: diff_funcs_arr = kwargs['cond_funcs'] diff_args_arr = args[0] else: diff_funcs_arr = args[0] diff_args_arr = args[1] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] cond = func(index, value, *args) if cond: rslt.append((index, value)) else: pass return rslt
#findfivo f,i,v,o四元决定 fivo-4-tuple-engine #cond_func diff_func(index,value,*diff_args)
elist/elist.py
findfivo
ihgazni2/elist
0
python
def findfivo(ol, *args, **kwargs): '\n #findfivo f,i,v,o四元决定 fivo-4-tuple-engine\n #cond_func diff_func(index,value,*diff_args)\n ' args = list(args) lngth = args.__len__() if (lngth == 0): diff_funcs_arr = kwargs['cond_funcs'] diff_args_arr = kwargs['cond_func_args_array'] elif (lngth == 1): if ('cond_func_args_array' in kwargs): diff_funcs_arr = args[0] diff_args_arr = kwargs['cond_func_args_array'] else: diff_funcs_arr = kwargs['cond_funcs'] diff_args_arr = args[0] else: diff_funcs_arr = args[0] diff_args_arr = args[1] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] cond = func(index, value, *args) if cond: rslt.append((index, value)) else: pass return rslt
def findfivo(ol, *args, **kwargs): '\n #findfivo f,i,v,o四元决定 fivo-4-tuple-engine\n #cond_func diff_func(index,value,*diff_args)\n ' args = list(args) lngth = args.__len__() if (lngth == 0): diff_funcs_arr = kwargs['cond_funcs'] diff_args_arr = kwargs['cond_func_args_array'] elif (lngth == 1): if ('cond_func_args_array' in kwargs): diff_funcs_arr = args[0] diff_args_arr = kwargs['cond_func_args_array'] else: diff_funcs_arr = kwargs['cond_funcs'] diff_args_arr = args[0] else: diff_funcs_arr = args[0] diff_args_arr = args[1] lngth = ol.__len__() rslt = [] for i in range(0, lngth): index = i value = ol[i] func = diff_funcs_arr[i] args = diff_args_arr[i] cond = func(index, value, *args) if cond: rslt.append((index, value)) else: pass return rslt<|docstring|>#findfivo f,i,v,o四元决定 fivo-4-tuple-engine #cond_func diff_func(index,value,*diff_args)<|endoftext|>
703d429cbe994bd98ff79e5b72fb3dc00449c87796aacf378ddb79017709bd35
def findfiv(ol, cond_func_args, **kwargs): '\n #findfiv 共享相同的o share common other_args\n #cond_func diff_func(index,value,*common_args)\n ' lngth = ol.__len__() diff_funcs_arr = kwargs['cond_funcs'] common_args_arr = init(lngth, map_func_args) rslt = findfivo(ol, cond_funcs=diff_funcs_arr, cond_func_args_array=common_args_arr) return rslt
#findfiv 共享相同的o share common other_args #cond_func diff_func(index,value,*common_args)
elist/elist.py
findfiv
ihgazni2/elist
0
python
def findfiv(ol, cond_func_args, **kwargs): '\n #findfiv 共享相同的o share common other_args\n #cond_func diff_func(index,value,*common_args)\n ' lngth = ol.__len__() diff_funcs_arr = kwargs['cond_funcs'] common_args_arr = init(lngth, map_func_args) rslt = findfivo(ol, cond_funcs=diff_funcs_arr, cond_func_args_array=common_args_arr) return rslt
def findfiv(ol, cond_func_args, **kwargs): '\n #findfiv 共享相同的o share common other_args\n #cond_func diff_func(index,value,*common_args)\n ' lngth = ol.__len__() diff_funcs_arr = kwargs['cond_funcs'] common_args_arr = init(lngth, map_func_args) rslt = findfivo(ol, cond_funcs=diff_funcs_arr, cond_func_args_array=common_args_arr) return rslt<|docstring|>#findfiv 共享相同的o share common other_args #cond_func diff_func(index,value,*common_args)<|endoftext|>